2020-02-17 12:34:21 -05:00
|
|
|
// TODO: This file was created by bulk-decaffeinate.
|
|
|
|
// Fix any style issues and re-enable lint.
|
2020-02-17 12:34:04 -05:00
|
|
|
/*
|
|
|
|
* decaffeinate suggestions:
|
|
|
|
* DS101: Remove unnecessary use of Array.from
|
|
|
|
* DS102: Remove unnecessary code created because of implicit returns
|
|
|
|
* DS205: Consider reworking code to avoid use of IIFEs
|
|
|
|
* DS207: Consider shorter variations of null checks
|
|
|
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
|
|
|
*/
|
2020-02-17 12:34:28 -05:00
|
|
|
let RedisManager
|
2021-07-12 12:47:16 -04:00
|
|
|
const Settings = require('@overleaf/settings')
|
2020-11-10 06:32:05 -05:00
|
|
|
const redis = require('@overleaf/redis-wrapper')
|
2020-02-17 12:34:28 -05:00
|
|
|
const rclient = redis.createClient(Settings.redis.history)
|
|
|
|
const Keys = Settings.redis.history.key_schema
|
|
|
|
const async = require('async')
|
2014-02-25 11:27:14 -05:00
|
|
|
|
2020-02-17 12:34:28 -05:00
|
|
|
module.exports = RedisManager = {
|
2023-03-21 08:21:51 -04:00
|
|
|
getOldestDocUpdates(docId, batchSize, callback) {
|
2020-02-17 12:34:28 -05:00
|
|
|
if (callback == null) {
|
2021-10-27 05:49:18 -04:00
|
|
|
callback = function () {}
|
2020-02-17 12:34:28 -05:00
|
|
|
}
|
2023-03-21 08:21:51 -04:00
|
|
|
const key = Keys.uncompressedHistoryOps({ doc_id: docId })
|
2020-02-17 12:34:28 -05:00
|
|
|
return rclient.lrange(key, 0, batchSize - 1, callback)
|
|
|
|
},
|
2015-10-08 09:40:42 -04:00
|
|
|
|
2020-02-17 12:34:28 -05:00
|
|
|
expandDocUpdates(jsonUpdates, callback) {
|
|
|
|
let rawUpdates
|
|
|
|
if (callback == null) {
|
2021-10-27 05:49:18 -04:00
|
|
|
callback = function () {}
|
2020-02-17 12:34:28 -05:00
|
|
|
}
|
|
|
|
try {
|
2021-07-13 07:04:43 -04:00
|
|
|
rawUpdates = Array.from(jsonUpdates || []).map(update =>
|
2020-02-17 12:34:28 -05:00
|
|
|
JSON.parse(update)
|
|
|
|
)
|
|
|
|
} catch (e) {
|
|
|
|
return callback(e)
|
|
|
|
}
|
|
|
|
return callback(null, rawUpdates)
|
|
|
|
},
|
2015-10-08 09:40:42 -04:00
|
|
|
|
2023-03-21 08:21:51 -04:00
|
|
|
deleteAppliedDocUpdates(projectId, docId, docUpdates, callback) {
|
2020-02-17 12:34:28 -05:00
|
|
|
if (callback == null) {
|
2021-10-27 05:49:18 -04:00
|
|
|
callback = function () {}
|
2020-02-17 12:34:28 -05:00
|
|
|
}
|
|
|
|
const multi = rclient.multi()
|
|
|
|
// Delete all the updates which have been applied (exact match)
|
|
|
|
for (const update of Array.from(docUpdates || [])) {
|
2023-03-21 08:21:51 -04:00
|
|
|
multi.lrem(Keys.uncompressedHistoryOps({ doc_id: docId }), 1, update)
|
2020-02-17 12:34:28 -05:00
|
|
|
}
|
2020-06-04 04:24:21 -04:00
|
|
|
return multi.exec(function (error, results) {
|
2020-02-17 12:34:28 -05:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
|
|
|
// It's ok to delete the doc_id from the set here. Even though the list
|
|
|
|
// of updates may not be empty, we will continue to process it until it is.
|
|
|
|
return rclient.srem(
|
2023-03-21 08:21:51 -04:00
|
|
|
Keys.docsWithHistoryOps({ project_id: projectId }),
|
|
|
|
docId,
|
2020-06-04 04:24:21 -04:00
|
|
|
function (error) {
|
2020-02-17 12:34:28 -05:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
|
|
|
return callback(null)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
})
|
|
|
|
},
|
2014-02-25 11:27:14 -05:00
|
|
|
|
2023-03-21 08:21:51 -04:00
|
|
|
getDocIdsWithHistoryOps(projectId, callback) {
|
2020-02-17 12:34:28 -05:00
|
|
|
if (callback == null) {
|
2021-10-27 05:49:18 -04:00
|
|
|
callback = function () {}
|
2020-02-17 12:34:28 -05:00
|
|
|
}
|
2023-03-21 08:21:51 -04:00
|
|
|
return rclient.smembers(
|
|
|
|
Keys.docsWithHistoryOps({ project_id: projectId }),
|
|
|
|
callback
|
|
|
|
)
|
2020-02-17 12:34:28 -05:00
|
|
|
},
|
2014-03-21 09:48:14 -04:00
|
|
|
|
2020-02-17 12:34:28 -05:00
|
|
|
// iterate over keys asynchronously using redis scan (non-blocking)
|
|
|
|
// handle all the cluster nodes or single redis server
|
|
|
|
_getKeys(pattern, callback) {
|
|
|
|
const nodes = (typeof rclient.nodes === 'function'
|
|
|
|
? rclient.nodes('master')
|
|
|
|
: undefined) || [rclient]
|
|
|
|
const doKeyLookupForNode = (node, cb) =>
|
|
|
|
RedisManager._getKeysFromNode(node, pattern, cb)
|
|
|
|
return async.concatSeries(nodes, doKeyLookupForNode, callback)
|
|
|
|
},
|
2017-05-02 09:19:49 -04:00
|
|
|
|
2020-02-17 12:34:28 -05:00
|
|
|
_getKeysFromNode(node, pattern, callback) {
|
|
|
|
let cursor = 0 // redis iterator
|
|
|
|
const keySet = {} // use hash to avoid duplicate results
|
|
|
|
// scan over all keys looking for pattern
|
2021-10-26 04:08:56 -04:00
|
|
|
const doIteration = cb =>
|
2021-07-13 07:04:43 -04:00
|
|
|
node.scan(
|
|
|
|
cursor,
|
|
|
|
'MATCH',
|
|
|
|
pattern,
|
|
|
|
'COUNT',
|
|
|
|
1000,
|
|
|
|
function (error, reply) {
|
|
|
|
let keys
|
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
|
|
|
;[cursor, keys] = Array.from(reply)
|
|
|
|
for (const key of Array.from(keys)) {
|
|
|
|
keySet[key] = true
|
|
|
|
}
|
|
|
|
if (cursor === '0') {
|
|
|
|
// note redis returns string result not numeric
|
|
|
|
return callback(null, Object.keys(keySet))
|
|
|
|
} else {
|
|
|
|
return doIteration()
|
|
|
|
}
|
2020-02-17 12:34:28 -05:00
|
|
|
}
|
2021-07-13 07:04:43 -04:00
|
|
|
)
|
2020-02-17 12:34:28 -05:00
|
|
|
return doIteration()
|
|
|
|
},
|
2017-05-05 06:30:11 -04:00
|
|
|
|
2020-02-17 12:34:28 -05:00
|
|
|
// extract ids from keys like DocsWithHistoryOps:57fd0b1f53a8396d22b2c24b
|
|
|
|
// or DocsWithHistoryOps:{57fd0b1f53a8396d22b2c24b} (for redis cluster)
|
|
|
|
_extractIds(keyList) {
|
|
|
|
const ids = (() => {
|
|
|
|
const result = []
|
|
|
|
for (const key of Array.from(keyList)) {
|
|
|
|
const m = key.match(/:\{?([0-9a-f]{24})\}?/) // extract object id
|
|
|
|
result.push(m[1])
|
|
|
|
}
|
|
|
|
return result
|
|
|
|
})()
|
|
|
|
return ids
|
|
|
|
},
|
2017-05-02 09:19:49 -04:00
|
|
|
|
2020-02-17 12:34:28 -05:00
|
|
|
getProjectIdsWithHistoryOps(callback) {
|
|
|
|
if (callback == null) {
|
2021-10-27 05:49:18 -04:00
|
|
|
callback = function () {}
|
2020-02-17 12:34:28 -05:00
|
|
|
}
|
|
|
|
return RedisManager._getKeys(
|
|
|
|
Keys.docsWithHistoryOps({ project_id: '*' }),
|
2023-03-21 08:21:51 -04:00
|
|
|
function (error, projectKeys) {
|
2020-02-17 12:34:28 -05:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
2023-03-21 08:21:51 -04:00
|
|
|
const projectIds = RedisManager._extractIds(projectKeys)
|
|
|
|
return callback(error, projectIds)
|
2020-02-17 12:34:28 -05:00
|
|
|
}
|
|
|
|
)
|
|
|
|
},
|
2017-05-02 09:19:49 -04:00
|
|
|
|
2020-02-17 12:34:28 -05:00
|
|
|
getAllDocIdsWithHistoryOps(callback) {
|
|
|
|
// return all the docids, to find dangling history entries after
|
|
|
|
// everything is flushed.
|
|
|
|
if (callback == null) {
|
2021-10-27 05:49:18 -04:00
|
|
|
callback = function () {}
|
2020-02-17 12:34:28 -05:00
|
|
|
}
|
|
|
|
return RedisManager._getKeys(
|
|
|
|
Keys.uncompressedHistoryOps({ doc_id: '*' }),
|
2023-03-21 08:21:51 -04:00
|
|
|
function (error, docKeys) {
|
2020-02-17 12:34:28 -05:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
2023-03-21 08:21:51 -04:00
|
|
|
const docIds = RedisManager._extractIds(docKeys)
|
|
|
|
return callback(error, docIds)
|
2020-02-17 12:34:28 -05:00
|
|
|
}
|
|
|
|
)
|
2021-07-13 07:04:43 -04:00
|
|
|
},
|
2020-02-17 12:34:28 -05:00
|
|
|
}
|