2016-06-17 07:17:22 -04:00
|
|
|
Settings = require('settings-sharelatex')
|
2019-07-10 04:42:05 -04:00
|
|
|
rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater)
|
|
|
|
pubsubClient = require("redis-sharelatex").createClient(Settings.redis.pubsub)
|
2019-07-03 05:21:25 -04:00
|
|
|
Keys = Settings.redis.documentupdater.key_schema
|
2017-03-30 06:20:41 -04:00
|
|
|
logger = require('logger-sharelatex')
|
2019-03-21 08:10:15 -04:00
|
|
|
os = require "os"
|
|
|
|
crypto = require "crypto"
|
2020-03-25 08:15:35 -04:00
|
|
|
metrics = require('./Metrics')
|
2019-03-21 08:10:15 -04:00
|
|
|
|
|
|
|
HOST = os.hostname()
|
|
|
|
RND = crypto.randomBytes(4).toString('hex') # generate a random key for this process
|
|
|
|
COUNT = 0
|
2016-06-17 07:17:22 -04:00
|
|
|
|
2017-05-12 08:11:04 -04:00
|
|
|
MAX_OPS_PER_ITERATION = 8 # process a limited number of ops for safety
|
|
|
|
|
2017-05-02 10:38:33 -04:00
|
|
|
module.exports = RealTimeRedisManager =
|
2016-06-17 07:17:22 -04:00
|
|
|
getPendingUpdatesForDoc : (doc_id, callback)->
|
|
|
|
multi = rclient.multi()
|
2017-05-12 08:11:04 -04:00
|
|
|
multi.lrange Keys.pendingUpdates({doc_id}), 0, (MAX_OPS_PER_ITERATION-1)
|
|
|
|
multi.ltrim Keys.pendingUpdates({doc_id}), MAX_OPS_PER_ITERATION, -1
|
2016-06-17 07:17:22 -04:00
|
|
|
multi.exec (error, replys) ->
|
|
|
|
return callback(error) if error?
|
|
|
|
jsonUpdates = replys[0]
|
|
|
|
updates = []
|
|
|
|
for jsonUpdate in jsonUpdates
|
|
|
|
try
|
|
|
|
update = JSON.parse jsonUpdate
|
|
|
|
catch e
|
|
|
|
return callback e
|
|
|
|
updates.push update
|
2020-03-25 08:15:35 -04:00
|
|
|
# record metric for updates removed from queue
|
|
|
|
metrics.summary "redis.pendingUpdates", jsonUpdate.length, {status: "pop"}
|
2016-06-17 07:17:22 -04:00
|
|
|
callback error, updates
|
|
|
|
|
|
|
|
getUpdatesLength: (doc_id, callback)->
|
2017-04-13 12:00:42 -04:00
|
|
|
rclient.llen Keys.pendingUpdates({doc_id}), callback
|
2016-06-17 07:17:22 -04:00
|
|
|
|
2016-11-28 05:14:42 -05:00
|
|
|
sendData: (data) ->
|
2019-03-21 08:10:15 -04:00
|
|
|
# create a unique message id using a counter
|
|
|
|
message_id = "doc:#{HOST}:#{RND}-#{COUNT++}"
|
|
|
|
data?._id = message_id
|
2020-03-30 05:31:43 -04:00
|
|
|
|
|
|
|
blob = JSON.stringify(data)
|
|
|
|
metrics.summary "redis.publish.applied-ops", blob.length
|
|
|
|
|
2019-07-22 07:20:06 -04:00
|
|
|
# publish on separate channels for individual projects and docs when
|
|
|
|
# configured (needs realtime to be configured for this too).
|
2019-07-24 11:57:43 -04:00
|
|
|
if Settings.publishOnIndividualChannels
|
2020-03-30 05:31:43 -04:00
|
|
|
pubsubClient.publish "applied-ops:#{data.doc_id}", blob
|
2019-07-22 07:20:06 -04:00
|
|
|
else
|
2020-03-30 05:31:43 -04:00
|
|
|
pubsubClient.publish "applied-ops", blob
|