diff --git a/services/document-updater/app.coffee b/services/document-updater/app.coffee index 36c0cb3a72..31e8ebb3b3 100644 --- a/services/document-updater/app.coffee +++ b/services/document-updater/app.coffee @@ -8,7 +8,6 @@ if Settings.sentry?.dsn? RedisManager = require('./app/js/RedisManager') DispatchManager = require('./app/js/DispatchManager') -Keys = require('./app/js/RedisKeyBuilder') Errors = require "./app/js/Errors" HttpController = require "./app/js/HttpController" @@ -63,15 +62,18 @@ app.get '/status', (req, res)-> else res.send('document updater is alive') -redisCheck = require("redis-sharelatex").activeHealthCheckRedis(Settings.redis.web) -app.get "/health_check/redis", (req, res, next)-> - if redisCheck.isAlive() - res.send 200 - else - res.send 500 +webRedisClient = require("redis-sharelatex").createClient(Settings.redis.web) +app.get "/health_check/redis", (req, res, next) -> + webRedisClient.healthCheck (error) -> + if error? + logger.err {err: error}, "failed redis health check" + res.send 500 + else + res.send 200 +docUpdaterRedisClient = require("redis-sharelatex").createClient(Settings.redis.documentupdater) app.get "/health_check/redis_cluster", (req, res, next) -> - RedisManager.rclient.healthCheck (error, alive) -> + docUpdaterRedisClient.healthCheck (error) -> if error? logger.err {err: error}, "failed redis cluster health check" res.send 500 diff --git a/services/document-updater/app/coffee/RedisBackend.coffee b/services/document-updater/app/coffee/RedisBackend.coffee deleted file mode 100644 index d69cd21a6e..0000000000 --- a/services/document-updater/app/coffee/RedisBackend.coffee +++ /dev/null @@ -1,206 +0,0 @@ -Settings = require "settings-sharelatex" -async = require "async" -_ = require "underscore" -logger = require "logger-sharelatex" -Metrics = require "metrics-sharelatex" - -class Client - constructor: (@clients) -> - @SECONDARY_TIMEOUT = 600 - @HEARTBEAT_TIMEOUT = 2000 - - multi: () -> - return new MultiClient( - @clients.map (client) -> { - rclient: client.rclient.multi() - key_schema: client.key_schema - primary: client.primary - driver: client.driver - } - ) - - healthCheck: (callback) -> - jobs = @clients.map (client) => - (cb) => @_healthCheckClient(client, cb) - async.parallel jobs, callback - - _healthCheckClient: (client, callback) -> - if client.driver == "ioredis" - @_healthCheckClusterClient(client, callback) - else - @_healthCheckNodeRedisClient(client, callback) - - _healthCheckNodeRedisClient: (client, callback) -> - client.healthCheck ?= require("redis-sharelatex").activeHealthCheckRedis(Settings.redis.web) - if client.healthCheck.isAlive() - return callback() - else - return callback(new Error("node-redis client failed health check")) - - _healthCheckClusterClient: (client, callback) -> - jobs = client.rclient.nodes("all").map (n) => - (cb) => @_checkNode(n, cb) - async.parallel jobs, callback - - _checkNode: (node, _callback) -> - callback = (args...) -> - _callback(args...) - _callback = () -> - timer = setTimeout () -> - error = new Error("ioredis node ping check timed out") - logger.error {err: error, key: node.options.key}, "node timed out" - callback(error) - , @HEARTBEAT_TIMEOUT - node.ping (err) -> - clearTimeout timer - callback(err) - -class MultiClient - constructor: (@clients) -> - @SECONDARY_TIMEOUT = 600 - - exec: (callback) -> - primaryError = null - primaryResult = null - jobs = @clients.map (client) => - (cb) => - cb = _.once(cb) - timer = new Metrics.Timer("redis.#{client.driver}.exec") - - timeout = null - if !client.primary - timeout = setTimeout () -> - logger.error {err: new Error("#{client.driver} backend timed out")}, "backend timed out" - cb() - , @SECONDARY_TIMEOUT - - client.rclient.exec (error, result) => - timer.done() - if client.driver == "ioredis" - # ioredis returns an results like: - # [ [null, 42], [null, "foo"] ] - # where the first entries in each 2-tuple are - # presumably errors for each individual command, - # and the second entry is the result. We need to transform - # this into the same result as the old redis driver: - # [ 42, "foo" ] - filtered_result = [] - for entry in result or [] - if entry[0]? - return cb(entry[0]) - else - filtered_result.push entry[1] - result = filtered_result - - if client.primary - primaryError = error - primaryResult = result - if timeout? - clearTimeout(timeout) - cb(error, result) - async.parallel jobs, (error, results) -> - if error? - # suppress logging of errors - # logger.error {err: error}, "error in redis backend" - else - compareResults(results, "exec") - callback(primaryError, primaryResult) - -COMMANDS = { - "get": 0, - "smembers": 0, - "set": 0, - "srem": 0, - "sadd": 0, - "del": 0, - "lrange": 0, - "llen": 0, - "rpush": 0, - "expire": 0, - "ltrim": 0, - "incr": 0, - "eval": 2 -} -for command, key_pos of COMMANDS - do (command, key_pos) -> - Client.prototype[command] = (args..., callback) -> - primaryError = null - primaryResult = [] - jobs = @clients.map (client) => - (cb) => - cb = _.once(cb) - key_builder = args[key_pos] - key = key_builder(client.key_schema) - args_with_key = args.slice(0) - args_with_key[key_pos] = key - timer = new Metrics.Timer("redis.#{client.driver}.#{command}") - - timeout = null - if !client.primary - timeout = setTimeout () -> - logger.error {err: new Error("#{client.driver} backend timed out")}, "backend timed out" - cb() - , @SECONDARY_TIMEOUT - - client.rclient[command] args_with_key..., (error, result...) => - timer.done() - if client.primary - primaryError = error - primaryResult = result - if timeout? - clearTimeout(timeout) - cb(error, result...) - async.parallel jobs, (error, results) -> - if error? - logger.error {err: error}, "error in redis backend" - else - compareResults(results, command) - callback(primaryError, primaryResult...) - - MultiClient.prototype[command] = (args...) -> - for client in @clients - key_builder = args[key_pos] - key = key_builder(client.key_schema) - args_with_key = args.slice(0) - args_with_key[key_pos] = key - client.rclient[command] args_with_key... - -compareResults = (results, command) -> - return if results.length < 2 - first = results[0] - if command == "smembers" and first? - first = first.slice().sort() - for result in results.slice(1) - if command == "smembers" and result? - result = result.slice().sort() - if not _.isEqual(first, result) - logger.error results: results, "redis backend conflict" - Metrics.inc "backend-conflict" - else - Metrics.inc "backend-match" - -module.exports = - createClient: () -> - client_configs = Settings.redis.documentupdater - unless client_configs instanceof Array - client_configs.primary = true - client_configs = [client_configs] - clients = client_configs.map (config) -> - if config.cluster? - Redis = require("ioredis") - rclient = new Redis.Cluster(config.cluster) - driver = "ioredis" - else - redis_config = {} - for key in ["host", "port", "password", "endpoints", "masterName"] - if config[key]? - redis_config[key] = config[key] - rclient = require("redis-sharelatex").createClient(redis_config) - driver = "noderedis" - return { - rclient: rclient - key_schema: config.key_schema - primary: config.primary - driver: driver - } - return new Client(clients) \ No newline at end of file diff --git a/services/document-updater/app/coffee/RedisKeyBuilder.coffee b/services/document-updater/app/coffee/RedisKeyBuilder.coffee deleted file mode 100644 index adde3ee1c9..0000000000 --- a/services/document-updater/app/coffee/RedisKeyBuilder.coffee +++ /dev/null @@ -1,44 +0,0 @@ -# The default key schema looks like: -# doclines:foo -# DocVersion:foo -# but if we use redis cluster, we want all 'foo' keys to map to the same -# node, so we must use: -# doclines:{foo} -# DocVersion:{foo} -# since redis hashes on the contents of {...}. -# -# To transparently support different key schemas for different clients -# (potential writing/reading to both a cluster and single instance -# while we migrate), instead of keys, we now pass around functions which -# will build the key when passed a schema. -# -# E.g. -# key_schema = Settings.redis.keys -# key_schema == { docLines: ({doc_id}) -> "doclines:#{doc_id}", ... } -# key_builder = RedisKeyBuilder.docLines({doc_id: "foo"}) -# key_builder == (key_schema) -> key_schema.docLines({doc_id: "foo"}) -# key = key_builder(key_schema) -# key == "doclines:foo" -module.exports = RedisKeyBuilder = - blockingKey: ({doc_id}) -> - return (key_schema) -> key_schema.blockingKey({doc_id}) - docLines: ({doc_id}) -> - return (key_schema) -> key_schema.docLines({doc_id}) - docOps: ({doc_id}) -> - return (key_schema) -> key_schema.docOps({doc_id}) - docVersion: ({doc_id}) -> - return (key_schema) -> key_schema.docVersion({doc_id}) - docHash: ({doc_id}) -> - return (key_schema) -> key_schema.docHash({doc_id}) - projectKey: ({doc_id}) -> - return (key_schema) -> key_schema.projectKey({doc_id}) - uncompressedHistoryOp: ({doc_id}) -> - return (key_schema) -> key_schema.uncompressedHistoryOp({doc_id}) - pendingUpdates: ({doc_id}) -> - return (key_schema) -> key_schema.pendingUpdates({doc_id}) - ranges: ({doc_id}) -> - return (key_schema) -> key_schema.ranges({doc_id}) - docsInProject: ({project_id}) -> - return (key_schema) -> key_schema.docsInProject({project_id}) - docsWithHistoryOps: ({project_id}) -> - return (key_schema) -> key_schema.docsWithHistoryOps({project_id}) diff --git a/services/document-updater/app/coffee/RedisManager.coffee b/services/document-updater/app/coffee/RedisManager.coffee index cf8249dbd7..3359a36231 100644 --- a/services/document-updater/app/coffee/RedisManager.coffee +++ b/services/document-updater/app/coffee/RedisManager.coffee @@ -1,8 +1,7 @@ Settings = require('settings-sharelatex') async = require('async') -rclient = require("./RedisBackend").createClient() +rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater) _ = require('underscore') -keys = require('./RedisKeyBuilder') logger = require('logger-sharelatex') metrics = require('./Metrics') Errors = require "./Errors" @@ -25,6 +24,8 @@ logHashWriteErrors = logHashErrors?.write MEGABYTES = 1024 * 1024 MAX_RANGES_SIZE = 3 * MEGABYTES +keys = Settings.redis.documentupdater.key_schema + module.exports = RedisManager = rclient: rclient diff --git a/services/document-updater/config/settings.defaults.coffee b/services/document-updater/config/settings.defaults.coffee index ae0f9fe681..d638329622 100755 --- a/services/document-updater/config/settings.defaults.coffee +++ b/services/document-updater/config/settings.defaults.coffee @@ -20,11 +20,10 @@ module.exports = port:"6379" host:"localhost" password:"" - documentupdater: [{ - primary: true - port:"6379" - host:"localhost" - password:"" + documentupdater: + port: "6379" + host: "localhost" + password: "" key_schema: blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" docLines: ({doc_id}) -> "doclines:#{doc_id}" @@ -34,20 +33,19 @@ module.exports = projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" docsInProject: ({project_id}) -> "DocsIn:#{project_id}" ranges: ({doc_id}) -> "Ranges:#{doc_id}" - # }, { - # cluster: [{ - # port: "7000" - # host: "localhost" - # }] - # key_schema: - # blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" - # docLines: ({doc_id}) -> "doclines:{#{doc_id}}" - # docOps: ({doc_id}) -> "DocOps:{#{doc_id}}" - # docVersion: ({doc_id}) -> "DocVersion:{#{doc_id}}" - # projectKey: ({doc_id}) -> "ProjectId:{#{doc_id}}" - # docsInProject: ({project_id}) -> "DocsIn:{#{project_id}}" - # ranges: ({doc_id}) -> "Ranges:{#{doc_id}}" - }] + # cluster: [{ + # port: "7000" + # host: "localhost" + # }] + # key_schema: + # blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" + # docLines: ({doc_id}) -> "doclines:{#{doc_id}}" + # docOps: ({doc_id}) -> "DocOps:{#{doc_id}}" + # docVersion: ({doc_id}) -> "DocVersion:{#{doc_id}}" + # docHash: ({doc_id}) -> "DocHash:{#{doc_id}}" + # projectKey: ({doc_id}) -> "ProjectId:{#{doc_id}}" + # docsInProject: ({project_id}) -> "DocsIn:{#{project_id}}" + # ranges: ({doc_id}) -> "Ranges:{#{doc_id}}" max_doc_length: 2 * 1024 * 1024 # 2mb diff --git a/services/document-updater/package.json b/services/document-updater/package.json index 94e8881810..fecda6f936 100644 --- a/services/document-updater/package.json +++ b/services/document-updater/package.json @@ -14,7 +14,7 @@ "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.6", "lynx": "0.0.11", "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.5.0", - "redis-sharelatex": "0.0.9", + "redis-sharelatex": "git+https://github.com/sharelatex/redis-sharelatex.git#v1.0.0", "request": "2.25.0", "sandboxed-module": "~0.2.0", "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0", diff --git a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee index bdfe89b990..b0cca1d18b 100644 --- a/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee +++ b/services/document-updater/test/acceptance/coffee/ApplyingUpdatesToADocTests.coffee @@ -4,7 +4,9 @@ chai.should() expect = chai.expect async = require "async" Settings = require('settings-sharelatex') -rclient = require("redis-sharelatex").createClient(Settings.redis.web) +rclient_web = require("redis-sharelatex").createClient(Settings.redis.web) +rclient_du = require("redis-sharelatex").createClient(Settings.redis.documentupdater) +Keys = Settings.redis.documentupdater.key_schema MockTrackChangesApi = require "./helpers/MockTrackChangesApi" MockWebApi = require "./helpers/MockWebApi" @@ -47,10 +49,10 @@ describe "Applying updates to a doc", -> done() it "should push the applied updates to the track changes api", (done) -> - rclient.lrange "UncompressedHistoryOps:#{@doc_id}", 0, -1, (error, updates) => + rclient_web.lrange "UncompressedHistoryOps:#{@doc_id}", 0, -1, (error, updates) => throw error if error? JSON.parse(updates[0]).op.should.deep.equal @update.op - rclient.sismember "DocsWithHistoryOps:#{@project_id}", @doc_id, (error, result) => + rclient_web.sismember "DocsWithHistoryOps:#{@project_id}", @doc_id, (error, result) => throw error if error? result.should.equal 1 done() @@ -80,9 +82,9 @@ describe "Applying updates to a doc", -> done() it "should push the applied updates to the track changes api", (done) -> - rclient.lrange "UncompressedHistoryOps:#{@doc_id}", 0, -1, (error, updates) => + rclient_web.lrange "UncompressedHistoryOps:#{@doc_id}", 0, -1, (error, updates) => JSON.parse(updates[0]).op.should.deep.equal @update.op - rclient.sismember "DocsWithHistoryOps:#{@project_id}", @doc_id, (error, result) => + rclient_web.sismember "DocsWithHistoryOps:#{@project_id}", @doc_id, (error, result) => result.should.equal 1 done() @@ -125,17 +127,17 @@ describe "Applying updates to a doc", -> done() it "should push the applied updates to the track changes api", (done) -> - rclient.lrange "UncompressedHistoryOps:#{@doc_id}", 0, -1, (error, updates) => + rclient_web.lrange "UncompressedHistoryOps:#{@doc_id}", 0, -1, (error, updates) => updates = (JSON.parse(u) for u in updates) for appliedUpdate, i in @updates appliedUpdate.op.should.deep.equal updates[i].op - rclient.sismember "DocsWithHistoryOps:#{@project_id}", @doc_id, (error, result) => + rclient_web.sismember "DocsWithHistoryOps:#{@project_id}", @doc_id, (error, result) => result.should.equal 1 done() it "should store the doc ops in the correct order", (done) -> - rclient.lrange "DocOps:#{@doc_id}", 0, -1, (error, updates) => + rclient_du.lrange Keys.docOps({doc_id: @doc_id}), 0, -1, (error, updates) => updates = (JSON.parse(u) for u in updates) for appliedUpdate, i in @updates appliedUpdate.op.should.deep.equal updates[i].op diff --git a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee index 1a5d790be8..97fae5cf14 100644 --- a/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee +++ b/services/document-updater/test/acceptance/coffee/SettingADocumentTests.coffee @@ -3,7 +3,8 @@ chai = require("chai") chai.should() expect = require("chai").expect Settings = require('settings-sharelatex') -rclient = require("redis-sharelatex").createClient(Settings.redis.web) +rclient_du = require("redis-sharelatex").createClient(Settings.redis.documentupdater) +Keys = Settings.redis.documentupdater.key_schema MockTrackChangesApi = require "./helpers/MockTrackChangesApi" MockWebApi = require "./helpers/MockWebApi" @@ -65,7 +66,7 @@ describe "Setting a document", -> done() it "should leave the document in redis", (done) -> - rclient.get "doclines:#{@doc_id}", (error, lines) => + rclient_du.get Keys.docLines({doc_id: @doc_id}), (error, lines) => throw error if error? expect(JSON.parse(lines)).to.deep.equal @newLines done() @@ -90,7 +91,7 @@ describe "Setting a document", -> MockTrackChangesApi.flushDoc.calledWith(@doc_id).should.equal true it "should remove the document from redis", (done) -> - rclient.get "doclines:#{@doc_id}", (error, lines) => + rclient_du.get Keys.docLines({doc_id: @doc_id}), (error, lines) => throw error if error? expect(lines).to.not.exist done() diff --git a/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee b/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee deleted file mode 100644 index 4a136baae1..0000000000 --- a/services/document-updater/test/unit/coffee/RedisBackend/RedisBackendTests.coffee +++ /dev/null @@ -1,504 +0,0 @@ -sinon = require('sinon') -chai = require('chai') -should = chai.should() -modulePath = "../../../../app/js/RedisBackend.js" -SandboxedModule = require('sandboxed-module') -RedisKeyBuilder = require "../../../../app/js/RedisKeyBuilder" - -describe "RedisBackend", -> - beforeEach -> - @Settings = - redis: - documentupdater: [{ - primary: true - port: "6379" - host: "localhost" - password: "single-password" - key_schema: - blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" - docLines: ({doc_id}) -> "doclines:#{doc_id}" - docOps: ({doc_id}) -> "DocOps:#{doc_id}" - docVersion: ({doc_id}) -> "DocVersion:#{doc_id}" - docHash: ({doc_id}) -> "DocHash:#{doc_id}" - projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" - pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" - docsInProject: ({project_id}) -> "DocsIn:#{project_id}" - }, { - cluster: [{ - port: "7000" - host: "localhost" - }] - password: "cluster-password" - key_schema: - blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" - docLines: ({doc_id}) -> "doclines:{#{doc_id}}" - docOps: ({doc_id}) -> "DocOps:{#{doc_id}}" - docVersion: ({doc_id}) -> "DocVersion:{#{doc_id}}" - docHash: ({doc_id}) -> "DocHash:{#{doc_id}}" - projectKey: ({doc_id}) -> "ProjectId:{#{doc_id}}" - pendingUpdates: ({doc_id}) -> "PendingUpdates:{#{doc_id}}" - docsInProject: ({project_id}) -> "DocsIn:{#{project_id}}" - }] - - test_context = @ - class Cluster - constructor: (@config) -> - test_context.rclient_ioredis = @ - - nodes: sinon.stub() - - @timer = timer = sinon.stub() - class Timer - constructor: (args...) -> timer(args...) - done: () -> - - @RedisBackend = SandboxedModule.require modulePath, requires: - "settings-sharelatex": @Settings - "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } - "redis-sharelatex": @redis = - createClient: sinon.stub().returns @rclient_redis = {} - activeHealthCheck: sinon.stub() - "ioredis": @ioredis = - Cluster: Cluster - "metrics-sharelatex": - @Metrics = - inc: sinon.stub() - Timer: Timer - - @client = @RedisBackend.createClient() - - @doc_id = "mock-doc-id" - @project_id = "mock-project-id" - - it "should create a redis client", -> - @redis.createClient - .calledWith({ - port: "6379" - host: "localhost" - password: "single-password" - }) - .should.equal true - - it "should create an ioredis cluster client", -> - @rclient_ioredis.config.should.deep.equal [{ - port: "7000" - host: "localhost" - }] - - describe "individual commands", -> - describe "with the same results", -> - beforeEach (done) -> - @content = "bar" - @rclient_redis.get = sinon.stub() - @rclient_redis.get.withArgs("doclines:#{@doc_id}").yields(null, @content) - @rclient_ioredis.get = sinon.stub() - @rclient_ioredis.get.withArgs("doclines:{#{@doc_id}}").yields(null, @content) - @client.get RedisKeyBuilder.docLines({doc_id: @doc_id}), (error, @result) => - setTimeout () -> # Let all background requests complete - done(error) - - it "should return the result", -> - @result.should.equal @content - - it "should have called the redis client with the appropriate key", -> - @rclient_redis.get - .calledWith("doclines:#{@doc_id}") - .should.equal true - - it "should have called the ioredis cluster client with the appropriate key", -> - @rclient_ioredis.get - .calledWith("doclines:{#{@doc_id}}") - .should.equal true - - it "should send a metric", -> - @Metrics.inc - .calledWith("backend-match") - .should.equal true - - it "should time the commands", -> - @timer - .calledWith("redis.ioredis.get") - .should.equal true - @timer - .calledWith("redis.noderedis.get") - .should.equal true - - describe "with different results", -> - beforeEach (done) -> - @rclient_redis.get = sinon.stub() - @rclient_redis.get.withArgs("doclines:#{@doc_id}").yields(null, "primary-result") - @rclient_ioredis.get = sinon.stub() - @rclient_ioredis.get.withArgs("doclines:{#{@doc_id}}").yields(null, "secondary-result") - @client.get RedisKeyBuilder.docLines({doc_id: @doc_id}), (error, @result) => - setTimeout () -> # Let all background requests complete - done(error) - - it "should return the primary result", -> - @result.should.equal "primary-result" - - it "should send a metric", -> - @Metrics.inc - .calledWith("backend-conflict") - .should.equal true - - describe "with differently ordered results from smembers", -> - beforeEach (done) -> - @rclient_redis.smembers = sinon.stub() - @rclient_redis.smembers.withArgs("DocsIn:#{@project_id}").yields(null, ["one", "two"]) - @rclient_ioredis.smembers = sinon.stub() - @rclient_ioredis.smembers.withArgs("DocsIn:{#{@project_id}}").yields(null, ["two", "one"]) - @client.smembers RedisKeyBuilder.docsInProject({project_id: @project_id}), (error, @result) => - setTimeout () -> # Let all background requests complete - done(error) - - it "should return the primary result", -> - @result.should.deep.equal ["one", "two"] - - it "should send a metric indicating a match", -> - @Metrics.inc - .calledWith("backend-match") - .should.equal true - - describe "when the secondary errors", -> - beforeEach (done) -> - @rclient_redis.get = sinon.stub() - @rclient_redis.get.withArgs("doclines:#{@doc_id}").yields(null, "primary-result") - @rclient_ioredis.get = sinon.stub() - @rclient_ioredis.get.withArgs("doclines:{#{@doc_id}}").yields(@error = new Error("oops")) - @client.get RedisKeyBuilder.docLines({doc_id: @doc_id}), (error, @result) => - setTimeout () -> # Let all background requests complete - done(error) - - it "should return the primary result", -> - @result.should.equal "primary-result" - - it "should log out the secondary error", -> - @logger.error - .calledWith({ - err: @error - }, "error in redis backend") - .should.equal true - - describe "when the primary errors", -> - beforeEach (done) -> - @rclient_redis.get = sinon.stub() - @rclient_redis.get.withArgs("doclines:#{@doc_id}").yields(@error = new Error("oops")) - @rclient_ioredis.get = sinon.stub() - @rclient_ioredis.get.withArgs("doclines:{#{@doc_id}}").yields(null, "secondary-result") - @client.get RedisKeyBuilder.docLines({doc_id: @doc_id}), (@returned_error, @result) => - setTimeout () -> # Let all background requests complete - done() - - it "should return the error", -> - @returned_error.should.equal @error - - it "should log out the error", -> - @logger.error - .calledWith({ - err: @error - }, "error in redis backend") - .should.equal true - - describe "when the command has the key in a non-zero argument index", -> - beforeEach (done) -> - @script = "mock-script" - @key_count = 1 - @value = "mock-value" - @rclient_redis.eval = sinon.stub() - @rclient_redis.eval.withArgs(@script, @key_count, "Blocking:#{@doc_id}", @value).yields(null) - @rclient_ioredis.eval = sinon.stub() - @rclient_ioredis.eval.withArgs(@script, @key_count, "Blocking:{#{@doc_id}}", @value).yields(null, @content) - @client.eval @script, @key_count, RedisKeyBuilder.blockingKey({doc_id: @doc_id}), @value, (error) => - setTimeout () -> # Let all background requests complete - done(error) - - it "should have called the redis client with the appropriate key", -> - @rclient_redis.eval - .calledWith(@script, @key_count, "Blocking:#{@doc_id}", @value) - .should.equal true - - it "should have called the ioredis cluster client with the appropriate key", -> - @rclient_ioredis.eval - .calledWith(@script, @key_count, "Blocking:{#{@doc_id}}", @value) - .should.equal true - - describe "when the secondary takes longer than SECONDARY_TIMEOUT", -> - beforeEach (done) -> - @client.SECONDARY_TIMEOUT = 10 - @content = "bar" - @rclient_redis.get = (key, cb) => - key.should.equal "doclines:#{@doc_id}" - setTimeout () => - cb(null, @content) - , @client.SECONDARY_TIMEOUT * 3 # If the secondary errors first, don't affect the primary result - @rclient_ioredis.get = (key, cb) => - key.should.equal "doclines:{#{@doc_id}}" - setTimeout () => - cb(null, @content) - , @client.SECONDARY_TIMEOUT * 2 - @client.get RedisKeyBuilder.docLines({doc_id: @doc_id}), (error, @result) => - done(error) - - it "should log out an error for the backend", -> - @logger.error - .calledWith({err: new Error("backend timed out")}, "backend timed out") - .should.equal true - - it "should return the primary result", -> - @result.should.equal @content - - describe "when the primary takes longer than SECONDARY_TIMEOUT", -> - beforeEach (done) -> - @client.SECONDARY_TIMEOUT = 10 - @content = "bar" - @rclient_ioredis.get = sinon.stub() - @rclient_ioredis.get.withArgs("doclines:{#{@doc_id}}").yields(null, @content) - @rclient_redis.get = (key, cb) => - key.should.equal "doclines:#{@doc_id}" - setTimeout () => - cb(null, @content) - , @client.SECONDARY_TIMEOUT * 2 - @client.get RedisKeyBuilder.docLines({doc_id: @doc_id}), (error, @result) => - done(error) - - it "should not consider this an error", -> - @logger.error - .called - .should.equal false - - describe "multi commands", -> - beforeEach -> - # We will test with: - # rclient.multi() - # .get("doclines:foo") - # .get("DocVersion:foo") - # .exec (...) -> - @doclines = "mock-doclines" - @version = "42" - @rclient_redis.multi = sinon.stub().returns @rclient_redis - @rclient_ioredis.multi = sinon.stub().returns @rclient_ioredis - - describe "with the same results", -> - beforeEach (done) -> - @rclient_redis.get = sinon.stub() - @rclient_redis.exec = sinon.stub().yields(null, [@doclines, @version]) - @rclient_ioredis.get = sinon.stub() - @rclient_ioredis.exec = sinon.stub().yields(null, [ [null, @doclines], [null, @version] ]) - - multi = @client.multi() - multi.get RedisKeyBuilder.docLines({doc_id: @doc_id}) - multi.get RedisKeyBuilder.docVersion({doc_id: @doc_id}) - multi.exec (error, @result) => - setTimeout () -> - done(error) - - it "should return the result", -> - @result.should.deep.equal [@doclines, @version] - - it "should have called the redis client with the appropriate keys", -> - @rclient_redis.get - .calledWith("doclines:#{@doc_id}") - .should.equal true - @rclient_redis.get - .calledWith("DocVersion:#{@doc_id}") - .should.equal true - @rclient_ioredis.exec - .called - .should.equal true - - it "should have called the ioredis cluster client with the appropriate keys", -> - @rclient_ioredis.get - .calledWith("doclines:{#{@doc_id}}") - .should.equal true - @rclient_ioredis.get - .calledWith("DocVersion:{#{@doc_id}}") - .should.equal true - @rclient_ioredis.exec - .called - .should.equal true - - it "should send a metric", -> - @Metrics.inc - .calledWith("backend-match") - .should.equal true - - it "should time the exec", -> - @timer - .calledWith("redis.ioredis.exec") - .should.equal true - @timer - .calledWith("redis.noderedis.exec") - .should.equal true - - describe "with different results", -> - beforeEach (done) -> - @rclient_redis.get = sinon.stub() - @rclient_redis.exec = sinon.stub().yields(null, [@doclines, @version]) - @rclient_ioredis.get = sinon.stub() - @rclient_ioredis.exec = sinon.stub().yields(null, [ [null, "different-doc-lines"], [null, @version] ]) - - multi = @client.multi() - multi.get RedisKeyBuilder.docLines({doc_id: @doc_id}) - multi.get RedisKeyBuilder.docVersion({doc_id: @doc_id}) - multi.exec (error, @result) => - setTimeout () -> - done(error) - - it "should return the primary result", -> - @result.should.deep.equal [@doclines, @version] - - it "should send a metric", -> - @Metrics.inc - .calledWith("backend-conflict") - .should.equal true - - describe "when the secondary errors", -> - beforeEach (done) -> - @rclient_redis.get = sinon.stub() - @rclient_redis.exec = sinon.stub().yields(null, [@doclines, @version]) - @rclient_ioredis.get = sinon.stub() - @rclient_ioredis.exec = sinon.stub().yields(@error = new Error("oops")) - - multi = @client.multi() - multi.get RedisKeyBuilder.docLines({doc_id: @doc_id}) - multi.get RedisKeyBuilder.docVersion({doc_id: @doc_id}) - multi.exec (error, @result) => - setTimeout () -> - done(error) - - it "should return the primary result", -> - @result.should.deep.equal [@doclines, @version] - - describe "when the primary errors", -> - beforeEach (done) -> - @rclient_redis.get = sinon.stub() - @rclient_redis.exec = sinon.stub().yields(@error = new Error("oops")) - @rclient_ioredis.get = sinon.stub() - @rclient_ioredis.exec = sinon.stub().yields([ [null, @doclines], [null, @version] ]) - - multi = @client.multi() - multi.get RedisKeyBuilder.docLines({doc_id: @doc_id}) - multi.get RedisKeyBuilder.docVersion({doc_id: @doc_id}) - multi.exec (@returned_error) => - setTimeout () -> done() - - it "should return the error", -> - @returned_error.should.equal @error - - describe "when the secondary takes longer than SECONDARY_TIMEOUT", -> - beforeEach (done) -> - @rclient_redis.get = sinon.stub() - @rclient_redis.exec = (cb) => - setTimeout () => - cb(null, [@doclines, @version]) - , 30 # If secondary errors first, don't affect the primary result - @rclient_ioredis.get = sinon.stub() - @rclient_ioredis.exec = (cb) => - setTimeout () => - cb(null, [ [null, @doclines], [null, @version] ]) - , 20 - - multi = @client.multi() - multi.SECONDARY_TIMEOUT = 10 - multi.get RedisKeyBuilder.docLines({doc_id: @doc_id}) - multi.get RedisKeyBuilder.docVersion({doc_id: @doc_id}) - multi.exec (error, @result) => - done(error) - - it "should log out an error for the backend", -> - @logger.error - .calledWith({err: new Error("backend timed out")}, "backend timed out") - .should.equal true - - it "should return the primary result", -> - @result.should.deep.equal [@doclines, @version] - - describe "when the primary takes longer than SECONDARY_TIMEOUT", -> - beforeEach (done) -> - @rclient_redis.get = sinon.stub() - @rclient_redis.exec = (cb) => - setTimeout () => - cb(null, [@doclines, @version]) - , 20 - @rclient_ioredis.get = sinon.stub() - @rclient_ioredis.exec = sinon.stub().yields(null, [ [null, @doclines], [null, @version] ]) - - multi = @client.multi() - multi.SECONDARY_TIMEOUT = 10 - multi.get RedisKeyBuilder.docLines({doc_id: @doc_id}) - multi.get RedisKeyBuilder.docVersion({doc_id: @doc_id}) - multi.exec (error, @result) => - done(error) - - it "should not consider this an error", -> - @logger.error - .called - .should.equal false - - describe "_healthCheckNodeRedisClient", -> - beforeEach -> - @redis.activeHealthCheckRedis = sinon.stub().returns @healthCheck = { - isAlive: sinon.stub() - } - - describe "successfully", -> - beforeEach (done) -> - @healthCheck.isAlive.returns true - @redis_client = {} - @client._healthCheckNodeRedisClient(@redis_client, done) - - it "should check the status of the node redis client", -> - @healthCheck.isAlive.called.should.equal true - - it "should only create one health check when called multiple times", (done) -> - @client._healthCheckNodeRedisClient @redis_client, () => - @redis.activeHealthCheckRedis.calledOnce.should.equal true - @healthCheck.isAlive.calledTwice.should.equal true - done() - - describe "when failing", -> - beforeEach -> - @healthCheck.isAlive.returns false - @redis_client = {} - - it "should return an error", (done) -> - @client._healthCheckNodeRedisClient @redis_client, (error) -> - error.message.should.equal "node-redis client failed health check" - done() - - describe "_healthCheckClusterClient", -> - beforeEach -> - @client.HEARTBEAT_TIMEOUT = 10 - @nodes = [{ - options: key: "node-0" - stream: destroy: sinon.stub() - }, { - options: key: "node-1" - stream: destroy: sinon.stub() - }] - @rclient_ioredis.nodes = sinon.stub().returns(@nodes) - - describe "when both clients are successful", -> - beforeEach (done) -> - @nodes[0].ping = sinon.stub().yields() - @nodes[1].ping = sinon.stub().yields() - @client._healthCheckClusterClient({ rclient: @rclient_ioredis }, done) - - it "should get all cluster nodes", -> - @rclient_ioredis.nodes - .calledWith("all") - .should.equal true - - it "should ping each cluster node", -> - for node in @nodes - node.ping.called.should.equal true - - describe "when ping fails to a node", -> - beforeEach -> - @nodes[0].ping = (cb) -> cb() - @nodes[1].ping = (cb) -> # Just hang - - it "should return an error", (done) -> - @client._healthCheckClusterClient { rclient: @rclient_ioredis }, (error) -> - error.message.should.equal "ioredis node ping check timed out" - done() diff --git a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee index 258603be9b..070abd859a 100644 --- a/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/RedisManager/RedisManagerTests.coffee @@ -14,20 +14,24 @@ describe "RedisManager", -> @rclient.multi = () => @rclient @RedisManager = SandboxedModule.require modulePath, requires: - "./RedisBackend": - createClient: () => @rclient - "./RedisKeyBuilder": - blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" - docLines: ({doc_id}) -> "doclines:#{doc_id}" - docOps: ({doc_id}) -> "DocOps:#{doc_id}" - docVersion: ({doc_id}) -> "DocVersion:#{doc_id}" - docHash: ({doc_id}) -> "DocHash:#{doc_id}" - projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" - pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" - docsInProject: ({project_id}) -> "DocsIn:#{project_id}" - ranges: ({doc_id}) -> "Ranges:#{doc_id}" "logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() } - "settings-sharelatex": {documentupdater: {logHashErrors: {write:true, read:true}}} + "settings-sharelatex": { + documentupdater: {logHashErrors: {write:true, read:true}} + redis: + documentupdater: + key_schema: + blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" + docLines: ({doc_id}) -> "doclines:#{doc_id}" + docOps: ({doc_id}) -> "DocOps:#{doc_id}" + docVersion: ({doc_id}) -> "DocVersion:#{doc_id}" + docHash: ({doc_id}) -> "DocHash:#{doc_id}" + projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" + pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" + docsInProject: ({project_id}) -> "DocsIn:#{project_id}" + ranges: ({doc_id}) -> "Ranges:#{doc_id}" + } + "redis-sharelatex": + createClient: () => @rclient "./Metrics": @metrics = inc: sinon.stub() Timer: class Timer diff --git a/services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee b/services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee index f3f0d8afdc..a0f88b33f1 100644 --- a/services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee +++ b/services/document-updater/test/unit/coffee/WebRedisManager/WebRedisManagerTests.coffee @@ -14,6 +14,7 @@ describe "WebRedisManager", -> @WebRedisManager = SandboxedModule.require modulePath, requires: "redis-sharelatex": createClient: () => @rclient "settings-sharelatex": redis: web: @settings = {"mock": "settings"} + "logger-sharelatex": { log: () -> } @doc_id = "doc-id-123" @project_id = "project-id-123" @callback = sinon.stub()