2020-02-17 17:34:21 +00:00
|
|
|
// TODO: This file was created by bulk-decaffeinate.
|
|
|
|
// Fix any style issues and re-enable lint.
|
2020-02-17 17:34:04 +00:00
|
|
|
/*
|
|
|
|
* decaffeinate suggestions:
|
|
|
|
* DS101: Remove unnecessary use of Array.from
|
|
|
|
* DS102: Remove unnecessary code created because of implicit returns
|
|
|
|
* DS205: Consider reworking code to avoid use of IIFEs
|
|
|
|
* DS207: Consider shorter variations of null checks
|
|
|
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
|
|
|
*/
|
2020-02-17 17:34:28 +00:00
|
|
|
let RedisManager
|
2021-07-12 16:47:16 +00:00
|
|
|
const Settings = require('@overleaf/settings')
|
2020-11-10 11:32:05 +00:00
|
|
|
const redis = require('@overleaf/redis-wrapper')
|
2020-02-17 17:34:28 +00:00
|
|
|
const rclient = redis.createClient(Settings.redis.history)
|
|
|
|
const Keys = Settings.redis.history.key_schema
|
|
|
|
const async = require('async')
|
2014-02-25 16:27:14 +00:00
|
|
|
|
2020-02-17 17:34:28 +00:00
|
|
|
module.exports = RedisManager = {
|
2023-03-21 12:21:51 +00:00
|
|
|
getOldestDocUpdates(docId, batchSize, callback) {
|
2020-02-17 17:34:28 +00:00
|
|
|
if (callback == null) {
|
2021-10-27 09:49:18 +00:00
|
|
|
callback = function () {}
|
2020-02-17 17:34:28 +00:00
|
|
|
}
|
2023-03-21 12:21:51 +00:00
|
|
|
const key = Keys.uncompressedHistoryOps({ doc_id: docId })
|
2020-02-17 17:34:28 +00:00
|
|
|
return rclient.lrange(key, 0, batchSize - 1, callback)
|
|
|
|
},
|
2015-10-08 13:40:42 +00:00
|
|
|
|
2020-02-17 17:34:28 +00:00
|
|
|
expandDocUpdates(jsonUpdates, callback) {
|
|
|
|
let rawUpdates
|
|
|
|
if (callback == null) {
|
2021-10-27 09:49:18 +00:00
|
|
|
callback = function () {}
|
2020-02-17 17:34:28 +00:00
|
|
|
}
|
|
|
|
try {
|
2021-07-13 11:04:43 +00:00
|
|
|
rawUpdates = Array.from(jsonUpdates || []).map(update =>
|
2020-02-17 17:34:28 +00:00
|
|
|
JSON.parse(update)
|
|
|
|
)
|
|
|
|
} catch (e) {
|
|
|
|
return callback(e)
|
|
|
|
}
|
|
|
|
return callback(null, rawUpdates)
|
|
|
|
},
|
2015-10-08 13:40:42 +00:00
|
|
|
|
2023-03-21 12:21:51 +00:00
|
|
|
deleteAppliedDocUpdates(projectId, docId, docUpdates, callback) {
|
2020-02-17 17:34:28 +00:00
|
|
|
if (callback == null) {
|
2021-10-27 09:49:18 +00:00
|
|
|
callback = function () {}
|
2020-02-17 17:34:28 +00:00
|
|
|
}
|
|
|
|
const multi = rclient.multi()
|
|
|
|
// Delete all the updates which have been applied (exact match)
|
|
|
|
for (const update of Array.from(docUpdates || [])) {
|
2023-03-21 12:21:51 +00:00
|
|
|
multi.lrem(Keys.uncompressedHistoryOps({ doc_id: docId }), 1, update)
|
2020-02-17 17:34:28 +00:00
|
|
|
}
|
2020-06-04 08:24:21 +00:00
|
|
|
return multi.exec(function (error, results) {
|
2020-02-17 17:34:28 +00:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
|
|
|
// It's ok to delete the doc_id from the set here. Even though the list
|
|
|
|
// of updates may not be empty, we will continue to process it until it is.
|
|
|
|
return rclient.srem(
|
2023-03-21 12:21:51 +00:00
|
|
|
Keys.docsWithHistoryOps({ project_id: projectId }),
|
|
|
|
docId,
|
2020-06-04 08:24:21 +00:00
|
|
|
function (error) {
|
2020-02-17 17:34:28 +00:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
|
|
|
return callback(null)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
})
|
|
|
|
},
|
2014-02-25 16:27:14 +00:00
|
|
|
|
2023-03-21 12:21:51 +00:00
|
|
|
getDocIdsWithHistoryOps(projectId, callback) {
|
2020-02-17 17:34:28 +00:00
|
|
|
if (callback == null) {
|
2021-10-27 09:49:18 +00:00
|
|
|
callback = function () {}
|
2020-02-17 17:34:28 +00:00
|
|
|
}
|
2023-03-21 12:21:51 +00:00
|
|
|
return rclient.smembers(
|
|
|
|
Keys.docsWithHistoryOps({ project_id: projectId }),
|
|
|
|
callback
|
|
|
|
)
|
2020-02-17 17:34:28 +00:00
|
|
|
},
|
2014-03-21 13:48:14 +00:00
|
|
|
|
2020-02-17 17:34:28 +00:00
|
|
|
// iterate over keys asynchronously using redis scan (non-blocking)
|
|
|
|
// handle all the cluster nodes or single redis server
|
|
|
|
_getKeys(pattern, callback) {
|
|
|
|
const nodes = (typeof rclient.nodes === 'function'
|
|
|
|
? rclient.nodes('master')
|
|
|
|
: undefined) || [rclient]
|
|
|
|
const doKeyLookupForNode = (node, cb) =>
|
|
|
|
RedisManager._getKeysFromNode(node, pattern, cb)
|
|
|
|
return async.concatSeries(nodes, doKeyLookupForNode, callback)
|
|
|
|
},
|
2017-05-02 13:19:49 +00:00
|
|
|
|
2020-02-17 17:34:28 +00:00
|
|
|
_getKeysFromNode(node, pattern, callback) {
|
|
|
|
let cursor = 0 // redis iterator
|
|
|
|
const keySet = {} // use hash to avoid duplicate results
|
|
|
|
// scan over all keys looking for pattern
|
2021-10-26 08:08:56 +00:00
|
|
|
const doIteration = cb =>
|
2021-07-13 11:04:43 +00:00
|
|
|
node.scan(
|
|
|
|
cursor,
|
|
|
|
'MATCH',
|
|
|
|
pattern,
|
|
|
|
'COUNT',
|
|
|
|
1000,
|
|
|
|
function (error, reply) {
|
|
|
|
let keys
|
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
|
|
|
;[cursor, keys] = Array.from(reply)
|
|
|
|
for (const key of Array.from(keys)) {
|
|
|
|
keySet[key] = true
|
|
|
|
}
|
|
|
|
if (cursor === '0') {
|
|
|
|
// note redis returns string result not numeric
|
|
|
|
return callback(null, Object.keys(keySet))
|
|
|
|
} else {
|
|
|
|
return doIteration()
|
|
|
|
}
|
2020-02-17 17:34:28 +00:00
|
|
|
}
|
2021-07-13 11:04:43 +00:00
|
|
|
)
|
2020-02-17 17:34:28 +00:00
|
|
|
return doIteration()
|
|
|
|
},
|
2017-05-05 10:30:11 +00:00
|
|
|
|
2020-02-17 17:34:28 +00:00
|
|
|
// extract ids from keys like DocsWithHistoryOps:57fd0b1f53a8396d22b2c24b
|
|
|
|
// or DocsWithHistoryOps:{57fd0b1f53a8396d22b2c24b} (for redis cluster)
|
|
|
|
_extractIds(keyList) {
|
|
|
|
const ids = (() => {
|
|
|
|
const result = []
|
|
|
|
for (const key of Array.from(keyList)) {
|
|
|
|
const m = key.match(/:\{?([0-9a-f]{24})\}?/) // extract object id
|
|
|
|
result.push(m[1])
|
|
|
|
}
|
|
|
|
return result
|
|
|
|
})()
|
|
|
|
return ids
|
|
|
|
},
|
2017-05-02 13:19:49 +00:00
|
|
|
|
2020-02-17 17:34:28 +00:00
|
|
|
getProjectIdsWithHistoryOps(callback) {
|
|
|
|
if (callback == null) {
|
2021-10-27 09:49:18 +00:00
|
|
|
callback = function () {}
|
2020-02-17 17:34:28 +00:00
|
|
|
}
|
|
|
|
return RedisManager._getKeys(
|
|
|
|
Keys.docsWithHistoryOps({ project_id: '*' }),
|
2023-03-21 12:21:51 +00:00
|
|
|
function (error, projectKeys) {
|
2020-02-17 17:34:28 +00:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
2023-03-21 12:21:51 +00:00
|
|
|
const projectIds = RedisManager._extractIds(projectKeys)
|
|
|
|
return callback(error, projectIds)
|
2020-02-17 17:34:28 +00:00
|
|
|
}
|
|
|
|
)
|
|
|
|
},
|
2017-05-02 13:19:49 +00:00
|
|
|
|
2020-02-17 17:34:28 +00:00
|
|
|
getAllDocIdsWithHistoryOps(callback) {
|
|
|
|
// return all the docids, to find dangling history entries after
|
|
|
|
// everything is flushed.
|
|
|
|
if (callback == null) {
|
2021-10-27 09:49:18 +00:00
|
|
|
callback = function () {}
|
2020-02-17 17:34:28 +00:00
|
|
|
}
|
|
|
|
return RedisManager._getKeys(
|
|
|
|
Keys.uncompressedHistoryOps({ doc_id: '*' }),
|
2023-03-21 12:21:51 +00:00
|
|
|
function (error, docKeys) {
|
2020-02-17 17:34:28 +00:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
2023-03-21 12:21:51 +00:00
|
|
|
const docIds = RedisManager._extractIds(docKeys)
|
|
|
|
return callback(error, docIds)
|
2020-02-17 17:34:28 +00:00
|
|
|
}
|
|
|
|
)
|
2021-07-13 11:04:43 +00:00
|
|
|
},
|
2020-02-17 17:34:28 +00:00
|
|
|
}
|