mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
Merge pull request #39 from sharelatex/bg-fix-message-ids
fix message id parsing
This commit is contained in:
commit
61ec1d195b
3 changed files with 58 additions and 32 deletions
|
@ -14,14 +14,15 @@ module.exports = EventLogger =
|
||||||
|
|
||||||
checkEventOrder: (channel, message_id, message) ->
|
checkEventOrder: (channel, message_id, message) ->
|
||||||
return if typeof(message_id) isnt 'string'
|
return if typeof(message_id) isnt 'string'
|
||||||
[key, count] = message_id.split("-", 2)
|
return if !(result = message_id.match(/^(.*)-(\d+)$/))
|
||||||
count = parseInt(count, 10)
|
key = result[1]
|
||||||
|
count = parseInt(result[2], 0)
|
||||||
if !(count >= 0)# ignore checks if counter is not present
|
if !(count >= 0)# ignore checks if counter is not present
|
||||||
return
|
return
|
||||||
# store the last count in a hash for each host
|
# store the last count in a hash for each host
|
||||||
previous = EventLogger._storeEventCount(key, count)
|
previous = EventLogger._storeEventCount(key, count)
|
||||||
if !previous? || count == (previous + 1)
|
if !previous? || count == (previous + 1)
|
||||||
metrics.inc "event.#{channel}.valid", 0.001
|
metrics.inc "event.#{channel}.valid", 0.001 # downsample high rate docupdater events
|
||||||
return # order is ok
|
return # order is ok
|
||||||
if (count == previous)
|
if (count == previous)
|
||||||
metrics.inc "event.#{channel}.duplicate"
|
metrics.inc "event.#{channel}.duplicate"
|
||||||
|
@ -30,7 +31,7 @@ module.exports = EventLogger =
|
||||||
else
|
else
|
||||||
metrics.inc "event.#{channel}.out-of-order"
|
metrics.inc "event.#{channel}.out-of-order"
|
||||||
# logger.error {key:key, previous: previous, count:count, message:message}, "events out of order"
|
# logger.error {key:key, previous: previous, count:count, message:message}, "events out of order"
|
||||||
return # out of order
|
return "out-of-order"
|
||||||
|
|
||||||
_storeEventCount: (key, count) ->
|
_storeEventCount: (key, count) ->
|
||||||
previous = EVENT_LOG_COUNTER[key]
|
previous = EVENT_LOG_COUNTER[key]
|
||||||
|
|
|
@ -22,6 +22,7 @@ describe "DocumentUpdaterController", ->
|
||||||
@rclient = {}
|
@rclient = {}
|
||||||
"./SafeJsonParse": @SafeJsonParse =
|
"./SafeJsonParse": @SafeJsonParse =
|
||||||
parse: (data, cb) => cb null, JSON.parse(data)
|
parse: (data, cb) => cb null, JSON.parse(data)
|
||||||
|
"./EventLogger": @EventLogger = {checkEventOrder: sinon.stub()}
|
||||||
|
|
||||||
describe "listenForUpdatesFromDocumentUpdater", ->
|
describe "listenForUpdatesFromDocumentUpdater", ->
|
||||||
beforeEach ->
|
beforeEach ->
|
||||||
|
|
|
@ -11,9 +11,11 @@ describe 'EventLogger', ->
|
||||||
tk.freeze(new Date(@start))
|
tk.freeze(new Date(@start))
|
||||||
@EventLogger = SandboxedModule.require modulePath, requires:
|
@EventLogger = SandboxedModule.require modulePath, requires:
|
||||||
"logger-sharelatex": @logger = {error: sinon.stub()}
|
"logger-sharelatex": @logger = {error: sinon.stub()}
|
||||||
@id_1 = "abc-1"
|
"metrics-sharelatex": @metrics = {inc: sinon.stub()}
|
||||||
|
@channel = "applied-ops"
|
||||||
|
@id_1 = "random-hostname:abc-1"
|
||||||
@message_1 = "message-1"
|
@message_1 = "message-1"
|
||||||
@id_2 = "abc-2"
|
@id_2 = "random-hostname:abc-2"
|
||||||
@message_2 = "message-2"
|
@message_2 = "message-2"
|
||||||
|
|
||||||
afterEach ->
|
afterEach ->
|
||||||
|
@ -21,32 +23,54 @@ describe 'EventLogger', ->
|
||||||
|
|
||||||
describe 'checkEventOrder', ->
|
describe 'checkEventOrder', ->
|
||||||
|
|
||||||
|
describe 'when the events are in order', ->
|
||||||
|
beforeEach ->
|
||||||
|
@EventLogger.checkEventOrder(@channel, @id_1, @message_1)
|
||||||
|
@status = @EventLogger.checkEventOrder(@channel, @id_2, @message_2)
|
||||||
|
|
||||||
it 'should accept events in order', ->
|
it 'should accept events in order', ->
|
||||||
@EventLogger.checkEventOrder(@id_1, @message_1)
|
expect(@status).to.be.undefined
|
||||||
status = @EventLogger.checkEventOrder(@id_2, @message_2)
|
|
||||||
expect(status).to.be.undefined
|
it 'should increment the valid event metric', ->
|
||||||
|
@metrics.inc.calledWith("event.#{@channel}.valid", 1)
|
||||||
|
.should.equal.true
|
||||||
|
|
||||||
|
describe 'when there is a duplicate events', ->
|
||||||
|
beforeEach ->
|
||||||
|
@EventLogger.checkEventOrder(@channel, @id_1, @message_1)
|
||||||
|
@status = @EventLogger.checkEventOrder(@channel, @id_1, @message_1)
|
||||||
|
|
||||||
it 'should return "duplicate" for the same event', ->
|
it 'should return "duplicate" for the same event', ->
|
||||||
@EventLogger.checkEventOrder(@id_1, @message_1)
|
expect(@status).to.equal "duplicate"
|
||||||
status = @EventLogger.checkEventOrder(@id_1, @message_1)
|
|
||||||
expect(status).to.equal "duplicate"
|
|
||||||
|
|
||||||
it 'should log an error for out of order events', ->
|
it 'should increment the duplicate event metric', ->
|
||||||
@EventLogger.checkEventOrder(@id_1, @message_1)
|
@metrics.inc.calledWith("event.#{@channel}.duplicate", 1)
|
||||||
@EventLogger.checkEventOrder(@id_2, @message_2)
|
.should.equal.true
|
||||||
status = @EventLogger.checkEventOrder(@id_1, @message_1)
|
|
||||||
expect(status).to.be.undefined
|
|
||||||
|
|
||||||
|
describe 'when there are out of order events', ->
|
||||||
|
beforeEach ->
|
||||||
|
@EventLogger.checkEventOrder(@channel, @id_1, @message_1)
|
||||||
|
@EventLogger.checkEventOrder(@channel, @id_2, @message_2)
|
||||||
|
@status = @EventLogger.checkEventOrder(@channel, @id_1, @message_1)
|
||||||
|
|
||||||
|
it 'should return "out-of-order" for the event', ->
|
||||||
|
expect(@status).to.equal "out-of-order"
|
||||||
|
|
||||||
|
it 'should increment the out-of-order event metric', ->
|
||||||
|
@metrics.inc.calledWith("event.#{@channel}.out-of-order", 1)
|
||||||
|
.should.equal.true
|
||||||
|
|
||||||
|
describe 'after MAX_STALE_TIME_IN_MS', ->
|
||||||
it 'should flush old entries', ->
|
it 'should flush old entries', ->
|
||||||
@EventLogger.MAX_EVENTS_BEFORE_CLEAN = 10
|
@EventLogger.MAX_EVENTS_BEFORE_CLEAN = 10
|
||||||
@EventLogger.checkEventOrder(@id_1, @message_1)
|
@EventLogger.checkEventOrder(@channel, @id_1, @message_1)
|
||||||
for i in [1..8]
|
for i in [1..8]
|
||||||
status = @EventLogger.checkEventOrder(@id_1, @message_1)
|
status = @EventLogger.checkEventOrder(@channel, @id_1, @message_1)
|
||||||
expect(status).to.equal "duplicate"
|
expect(status).to.equal "duplicate"
|
||||||
# the next event should flush the old entries aboce
|
# the next event should flush the old entries aboce
|
||||||
@EventLogger.MAX_STALE_TIME_IN_MS=1000
|
@EventLogger.MAX_STALE_TIME_IN_MS=1000
|
||||||
tk.freeze(new Date(@start + 5 * 1000))
|
tk.freeze(new Date(@start + 5 * 1000))
|
||||||
# because we flushed the entries this should not be a duplicate
|
# because we flushed the entries this should not be a duplicate
|
||||||
@EventLogger.checkEventOrder('other-1', @message_2)
|
@EventLogger.checkEventOrder(@channel, 'other-1', @message_2)
|
||||||
status = @EventLogger.checkEventOrder(@id_1, @message_1)
|
status = @EventLogger.checkEventOrder(@channel, @id_1, @message_1)
|
||||||
expect(status).to.be.undefined
|
expect(status).to.be.undefined
|
Loading…
Reference in a new issue