mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-07 20:31:06 -05:00
Merge pull request #127 from overleaf/bg-add-queue-metrics
add queue throughput metrics
This commit is contained in:
commit
a6bee0c16a
8 changed files with 53 additions and 18 deletions
|
@ -2,9 +2,13 @@ Settings = require('settings-sharelatex')
|
|||
projectHistoryKeys = Settings.redis?.project_history?.key_schema
|
||||
rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater)
|
||||
logger = require('logger-sharelatex')
|
||||
metrics = require('./Metrics')
|
||||
|
||||
module.exports = ProjectHistoryRedisManager =
|
||||
queueOps: (project_id, ops..., callback = (error, projectUpdateCount) ->) ->
|
||||
# Record metric for ops pushed onto queue
|
||||
for op in ops
|
||||
metrics.summary "redis.projectHistoryOps", op.length, {status: "push"}
|
||||
multi = rclient.multi()
|
||||
# Push the ops onto the project history queue
|
||||
multi.rpush projectHistoryKeys.projectHistoryOps({project_id}), ops...
|
||||
|
|
|
@ -5,6 +5,7 @@ Keys = Settings.redis.documentupdater.key_schema
|
|||
logger = require('logger-sharelatex')
|
||||
os = require "os"
|
||||
crypto = require "crypto"
|
||||
metrics = require('./Metrics')
|
||||
|
||||
HOST = os.hostname()
|
||||
RND = crypto.randomBytes(4).toString('hex') # generate a random key for this process
|
||||
|
@ -20,6 +21,9 @@ module.exports = RealTimeRedisManager =
|
|||
multi.exec (error, replys) ->
|
||||
return callback(error) if error?
|
||||
jsonUpdates = replys[0]
|
||||
for jsonUpdate in jsonUpdates
|
||||
# record metric for each update removed from queue
|
||||
metrics.summary "redis.pendingUpdates", jsonUpdate.length, {status: "pop"}
|
||||
updates = []
|
||||
for jsonUpdate in jsonUpdates
|
||||
try
|
||||
|
@ -36,9 +40,13 @@ module.exports = RealTimeRedisManager =
|
|||
# create a unique message id using a counter
|
||||
message_id = "doc:#{HOST}:#{RND}-#{COUNT++}"
|
||||
data?._id = message_id
|
||||
|
||||
blob = JSON.stringify(data)
|
||||
metrics.summary "redis.publish.applied-ops", blob.length
|
||||
|
||||
# publish on separate channels for individual projects and docs when
|
||||
# configured (needs realtime to be configured for this too).
|
||||
if Settings.publishOnIndividualChannels
|
||||
pubsubClient.publish "applied-ops:#{data.doc_id}", JSON.stringify(data)
|
||||
pubsubClient.publish "applied-ops:#{data.doc_id}", blob
|
||||
else
|
||||
pubsubClient.publish "applied-ops", JSON.stringify(data)
|
||||
pubsubClient.publish "applied-ops", blob
|
||||
|
|
|
@ -41,6 +41,8 @@ module.exports = RedisManager =
|
|||
logger.error {err: error, doc_id: doc_id, docLines: docLines}, error.message
|
||||
return callback(error)
|
||||
docHash = RedisManager._computeHash(docLines)
|
||||
# record bytes sent to redis
|
||||
metrics.summary "redis.docLines", docLines.length, {status: "set"}
|
||||
logger.log {project_id, doc_id, version, docHash, pathname, projectHistoryId}, "putting doc in redis"
|
||||
RedisManager._serializeRanges ranges, (error, ranges) ->
|
||||
if error?
|
||||
|
@ -73,6 +75,7 @@ module.exports = RedisManager =
|
|||
_callback()
|
||||
|
||||
multi = rclient.multi()
|
||||
multi.strlen keys.docLines(doc_id:doc_id)
|
||||
multi.del keys.docLines(doc_id:doc_id)
|
||||
multi.del keys.projectKey(doc_id:doc_id)
|
||||
multi.del keys.docVersion(doc_id:doc_id)
|
||||
|
@ -84,8 +87,12 @@ module.exports = RedisManager =
|
|||
multi.del keys.unflushedTime(doc_id:doc_id)
|
||||
multi.del keys.lastUpdatedAt(doc_id: doc_id)
|
||||
multi.del keys.lastUpdatedBy(doc_id: doc_id)
|
||||
multi.exec (error) ->
|
||||
multi.exec (error, response) ->
|
||||
return callback(error) if error?
|
||||
length = response?[0]
|
||||
if length > 0
|
||||
# record bytes freed in redis
|
||||
metrics.summary "redis.docLines", length, {status: "del"}
|
||||
multi = rclient.multi()
|
||||
multi.srem keys.docsInProject(project_id:project_id), doc_id
|
||||
multi.del keys.projectState(project_id:project_id)
|
||||
|
@ -125,6 +132,9 @@ module.exports = RedisManager =
|
|||
if timeSpan > MAX_REDIS_REQUEST_LENGTH
|
||||
error = new Error("redis getDoc exceeded timeout")
|
||||
return callback(error)
|
||||
# record bytes loaded from redis
|
||||
if docLines?
|
||||
metrics.summary "redis.docLines", docLines.length, {status: "get"}
|
||||
# check sha1 hash value if present
|
||||
if docLines? and storedHash?
|
||||
computedHash = RedisManager._computeHash(docLines)
|
||||
|
@ -240,7 +250,8 @@ module.exports = RedisManager =
|
|||
|
||||
opVersions = appliedOps.map (op) -> op?.v
|
||||
logger.log doc_id: doc_id, version: newVersion, hash: newHash, op_versions: opVersions, "updating doc in redis"
|
||||
|
||||
# record bytes sent to redis in update
|
||||
metrics.summary "redis.docLines", newDocLines.length, {status: "update"}
|
||||
RedisManager._serializeRanges ranges, (error, ranges) ->
|
||||
if error?
|
||||
logger.error {err: error, doc_id}, error.message
|
||||
|
|
12
services/document-updater/package-lock.json
generated
12
services/document-updater/package-lock.json
generated
|
@ -853,9 +853,9 @@
|
|||
}
|
||||
},
|
||||
"acorn": {
|
||||
"version": "6.4.0",
|
||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-6.4.0.tgz",
|
||||
"integrity": "sha512-gac8OEcQ2Li1dxIEWGZzsp2BitJxwkwcOm0zHAJLcPJaVvm58FRnk6RkuLRpU1EujipU2ZFODv2P9DLMfnV8mw=="
|
||||
"version": "6.4.1",
|
||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-6.4.1.tgz",
|
||||
"integrity": "sha512-ZVA9k326Nwrj3Cj9jlh3wGFutC2ZornPNARZwsNYqQYgN0EsV2d53w5RN/co65Ohn4sUAUtb1rSUAOD6XN9idA=="
|
||||
},
|
||||
"agent-base": {
|
||||
"version": "6.0.0",
|
||||
|
@ -2183,9 +2183,9 @@
|
|||
"integrity": "sha512-2403MfnVypWSNIEpmQ26/ObZ5kSUx37E8NHRvriw0+I8Sne7k0HGuLGCk0OrCqURh4UIygD0cSsYq+Ll+kzNqA=="
|
||||
},
|
||||
"metrics-sharelatex": {
|
||||
"version": "2.5.1",
|
||||
"resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.5.1.tgz",
|
||||
"integrity": "sha512-C2gmkl/tUnq3IlSX/x3dixGhdvfD6H9FR9mBf9lnkeyy2arafxhCU6u+1IQj6byjBM7vGpYHyjwWnmoi3Vb+ZQ==",
|
||||
"version": "2.6.2",
|
||||
"resolved": "https://registry.npmjs.org/metrics-sharelatex/-/metrics-sharelatex-2.6.2.tgz",
|
||||
"integrity": "sha512-bOLfkSCexiPgB96hdXhoOWyvvrwscgjeZPEqdcJ7BTGxY59anzvymNf5hTGJ1RtS4sblDKxITw3L5a+gYKhRYQ==",
|
||||
"requires": {
|
||||
"@google-cloud/debug-agent": "^3.0.0",
|
||||
"@google-cloud/profiler": "^0.2.3",
|
||||
|
|
|
@ -26,7 +26,7 @@
|
|||
"express": "3.11.0",
|
||||
"lodash": "^4.17.13",
|
||||
"logger-sharelatex": "^1.9.1",
|
||||
"metrics-sharelatex": "^2.5.1",
|
||||
"metrics-sharelatex": "^2.6.2",
|
||||
"mongojs": "^2.6.0",
|
||||
"redis-sharelatex": "^1.0.12",
|
||||
"request": "^2.47.0",
|
||||
|
|
|
@ -26,6 +26,7 @@ describe "ProjectHistoryRedisManager", ->
|
|||
createClient: () => @rclient
|
||||
"logger-sharelatex":
|
||||
log:->
|
||||
"./Metrics": @metrics = { summary: sinon.stub()}
|
||||
globals:
|
||||
JSON: @JSON = JSON
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ describe "RealTimeRedisManager", ->
|
|||
auth: () ->
|
||||
exec: sinon.stub()
|
||||
@rclient.multi = () => @rclient
|
||||
@pubsubClient =
|
||||
@pubsubClient =
|
||||
publish: sinon.stub()
|
||||
@RealTimeRedisManager = SandboxedModule.require modulePath, requires:
|
||||
"redis-sharelatex": createClient: (config) => if (config.name is 'pubsub') then @pubsubClient else @rclient
|
||||
|
@ -25,11 +25,12 @@ describe "RealTimeRedisManager", ->
|
|||
"logger-sharelatex": { log: () -> }
|
||||
"crypto": @crypto = { randomBytes: sinon.stub().withArgs(4).returns(Buffer.from([0x1, 0x2, 0x3, 0x4])) }
|
||||
"os": @os = {hostname: sinon.stub().returns("somehost")}
|
||||
"./Metrics": @metrics = { summary: sinon.stub()}
|
||||
|
||||
@doc_id = "doc-id-123"
|
||||
@project_id = "project-id-123"
|
||||
@callback = sinon.stub()
|
||||
|
||||
|
||||
describe "getPendingUpdatesForDoc", ->
|
||||
beforeEach ->
|
||||
@rclient.lrange = sinon.stub()
|
||||
|
@ -44,7 +45,7 @@ describe "RealTimeRedisManager", ->
|
|||
@jsonUpdates = @updates.map (update) -> JSON.stringify update
|
||||
@rclient.exec = sinon.stub().callsArgWith(0, null, [@jsonUpdates])
|
||||
@RealTimeRedisManager.getPendingUpdatesForDoc @doc_id, @callback
|
||||
|
||||
|
||||
it "should get the pending updates", ->
|
||||
@rclient.lrange
|
||||
.calledWith("PendingUpdates:#{@doc_id}", 0, 7)
|
||||
|
@ -75,10 +76,10 @@ describe "RealTimeRedisManager", ->
|
|||
beforeEach ->
|
||||
@rclient.llen = sinon.stub().yields(null, @length = 3)
|
||||
@RealTimeRedisManager.getUpdatesLength @doc_id, @callback
|
||||
|
||||
|
||||
it "should look up the length", ->
|
||||
@rclient.llen.calledWith("PendingUpdates:#{@doc_id}").should.equal true
|
||||
|
||||
|
||||
it "should return the length", ->
|
||||
@callback.calledWith(null, @length).should.equal true
|
||||
|
||||
|
@ -86,6 +87,9 @@ describe "RealTimeRedisManager", ->
|
|||
beforeEach ->
|
||||
@message_id = "doc:somehost:01020304-0"
|
||||
@RealTimeRedisManager.sendData({op: "thisop"})
|
||||
|
||||
|
||||
it "should send the op with a message id", ->
|
||||
@pubsubClient.publish.calledWith("applied-ops", JSON.stringify({op:"thisop",_id:@message_id})).should.equal true
|
||||
@pubsubClient.publish.calledWith("applied-ops", JSON.stringify({op:"thisop",_id:@message_id})).should.equal true
|
||||
|
||||
it "should track the payload size", ->
|
||||
@metrics.summary.calledWith("redis.publish.applied-ops", JSON.stringify({op:"thisop",_id:@message_id}).length).should.equal true
|
||||
|
|
|
@ -48,6 +48,7 @@ describe "RedisManager", ->
|
|||
createClient: () => @rclient
|
||||
"./Metrics": @metrics =
|
||||
inc: sinon.stub()
|
||||
summary: sinon.stub()
|
||||
Timer: class Timer
|
||||
constructor: () ->
|
||||
this.start = new Date()
|
||||
|
@ -670,11 +671,17 @@ describe "RedisManager", ->
|
|||
|
||||
describe "removeDocFromMemory", ->
|
||||
beforeEach (done) ->
|
||||
@multi.strlen = sinon.stub()
|
||||
@multi.del = sinon.stub()
|
||||
@multi.srem = sinon.stub()
|
||||
@multi.exec.yields()
|
||||
@RedisManager.removeDocFromMemory @project_id, @doc_id, done
|
||||
|
||||
it "should check the length of the current doclines", ->
|
||||
@multi.strlen
|
||||
.calledWith("doclines:#{@doc_id}")
|
||||
.should.equal true
|
||||
|
||||
it "should delete the lines", ->
|
||||
@multi.del
|
||||
.calledWith("doclines:#{@doc_id}")
|
||||
|
|
Loading…
Reference in a new issue