2020-05-06 06:09:15 -04:00
|
|
|
/* eslint-disable
|
|
|
|
camelcase,
|
|
|
|
no-unused-vars,
|
|
|
|
*/
|
|
|
|
// TODO: This file was created by bulk-decaffeinate.
|
|
|
|
// Fix any style issues and re-enable lint.
|
2020-05-06 06:08:21 -04:00
|
|
|
/*
|
|
|
|
* decaffeinate suggestions:
|
|
|
|
* DS101: Remove unnecessary use of Array.from
|
|
|
|
* DS102: Remove unnecessary code created because of implicit returns
|
|
|
|
* DS207: Consider shorter variations of null checks
|
|
|
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
|
|
|
*/
|
2020-05-06 06:09:33 -04:00
|
|
|
let RealTimeRedisManager
|
|
|
|
const Settings = require('settings-sharelatex')
|
|
|
|
const rclient = require('redis-sharelatex').createClient(
|
|
|
|
Settings.redis.documentupdater
|
|
|
|
)
|
|
|
|
const pubsubClient = require('redis-sharelatex').createClient(
|
|
|
|
Settings.redis.pubsub
|
|
|
|
)
|
|
|
|
const Keys = Settings.redis.documentupdater.key_schema
|
|
|
|
const logger = require('logger-sharelatex')
|
|
|
|
const os = require('os')
|
|
|
|
const crypto = require('crypto')
|
|
|
|
const metrics = require('./Metrics')
|
2019-03-21 08:10:15 -04:00
|
|
|
|
2020-05-06 06:09:33 -04:00
|
|
|
const HOST = os.hostname()
|
|
|
|
const RND = crypto.randomBytes(4).toString('hex') // generate a random key for this process
|
|
|
|
let COUNT = 0
|
2016-06-17 07:17:22 -04:00
|
|
|
|
2020-05-06 06:09:33 -04:00
|
|
|
const MAX_OPS_PER_ITERATION = 8 // process a limited number of ops for safety
|
2017-05-12 08:11:04 -04:00
|
|
|
|
2020-05-06 06:09:33 -04:00
|
|
|
module.exports = RealTimeRedisManager = {
|
|
|
|
getPendingUpdatesForDoc(doc_id, callback) {
|
|
|
|
const multi = rclient.multi()
|
|
|
|
multi.lrange(Keys.pendingUpdates({ doc_id }), 0, MAX_OPS_PER_ITERATION - 1)
|
|
|
|
multi.ltrim(Keys.pendingUpdates({ doc_id }), MAX_OPS_PER_ITERATION, -1)
|
|
|
|
return multi.exec(function (error, replys) {
|
|
|
|
let jsonUpdate
|
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
|
|
|
const jsonUpdates = replys[0]
|
|
|
|
for (jsonUpdate of Array.from(jsonUpdates)) {
|
|
|
|
// record metric for each update removed from queue
|
|
|
|
metrics.summary('redis.pendingUpdates', jsonUpdate.length, {
|
|
|
|
status: 'pop'
|
|
|
|
})
|
|
|
|
}
|
|
|
|
const updates = []
|
|
|
|
for (jsonUpdate of Array.from(jsonUpdates)) {
|
|
|
|
var update
|
|
|
|
try {
|
|
|
|
update = JSON.parse(jsonUpdate)
|
|
|
|
} catch (e) {
|
|
|
|
return callback(e)
|
|
|
|
}
|
|
|
|
updates.push(update)
|
|
|
|
}
|
|
|
|
return callback(error, updates)
|
|
|
|
})
|
|
|
|
},
|
2016-06-17 07:17:22 -04:00
|
|
|
|
2020-05-06 06:09:33 -04:00
|
|
|
getUpdatesLength(doc_id, callback) {
|
|
|
|
return rclient.llen(Keys.pendingUpdates({ doc_id }), callback)
|
|
|
|
},
|
2016-06-17 07:17:22 -04:00
|
|
|
|
2020-05-06 06:09:33 -04:00
|
|
|
sendData(data) {
|
|
|
|
// create a unique message id using a counter
|
|
|
|
const message_id = `doc:${HOST}:${RND}-${COUNT++}`
|
|
|
|
if (data != null) {
|
|
|
|
data._id = message_id
|
|
|
|
}
|
2020-03-30 05:31:43 -04:00
|
|
|
|
2020-05-06 06:09:33 -04:00
|
|
|
const blob = JSON.stringify(data)
|
|
|
|
metrics.summary('redis.publish.applied-ops', blob.length)
|
2020-03-30 05:31:43 -04:00
|
|
|
|
2020-05-06 06:09:33 -04:00
|
|
|
// publish on separate channels for individual projects and docs when
|
|
|
|
// configured (needs realtime to be configured for this too).
|
|
|
|
if (Settings.publishOnIndividualChannels) {
|
|
|
|
return pubsubClient.publish(`applied-ops:${data.doc_id}`, blob)
|
|
|
|
} else {
|
|
|
|
return pubsubClient.publish('applied-ops', blob)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|