diff --git a/services/track-changes/app.coffee b/services/track-changes/app.coffee index 0a43cd1503..43cddca498 100644 --- a/services/track-changes/app.coffee +++ b/services/track-changes/app.coffee @@ -50,6 +50,9 @@ app.post "/project/:project_id/doc/:doc_id/version/:version/restore", HttpContro app.post '/project/:project_id/doc/:doc_id/push', HttpController.pushDocHistory app.post '/project/:project_id/doc/:doc_id/pull', HttpController.pullDocHistory +app.post '/flush/all', HttpController.flushAll +app.post '/check/dangling', HttpController.checkDanglingUpdates + packWorker = null # use a single packing worker app.post "/pack", (req, res, next) -> diff --git a/services/track-changes/app/coffee/HttpController.coffee b/services/track-changes/app/coffee/HttpController.coffee index eecc618330..0b59a6e1ec 100644 --- a/services/track-changes/app/coffee/HttpController.coffee +++ b/services/track-changes/app/coffee/HttpController.coffee @@ -22,6 +22,28 @@ module.exports = HttpController = return next(error) if error? res.send 204 + flushAll: (req, res, next = (error) ->) -> + logger.log "flushing all projects" + UpdatesManager.flushAll (error, result) -> + return next(error) if error? + {failed, succeeded} = result + status = "#{succeeded.length} succeeded, #{failed.length} failed" + if failed.length > 0 + logger.log {failed: failed, succeeded: succeeded}, "error flushing projects" + res.status(500).send "#{status}\nfailed to flush:\n#{failed.join('\n')}\n" + else + res.status(200).send "#{status}\nflushed all #{succeeded.length} projects\n" + + checkDanglingUpdates: (req, res, next = (error) ->) -> + logger.log "checking dangling updates" + UpdatesManager.getDanglingUpdates (error, result) -> + return next(error) if error? + if result.length > 0 + logger.log {dangling: result}, "found dangling updates" + res.status(500).send "dangling updates:\n#{result.join('\n')}\n" + else + res.status(200).send "no dangling updates found\n" + checkDoc: (req, res, next = (error) ->) -> doc_id = req.params.doc_id project_id = req.params.project_id diff --git a/services/track-changes/app/coffee/RedisManager.coffee b/services/track-changes/app/coffee/RedisManager.coffee index a634bbfed9..b58b99f11f 100644 --- a/services/track-changes/app/coffee/RedisManager.coffee +++ b/services/track-changes/app/coffee/RedisManager.coffee @@ -32,3 +32,41 @@ module.exports = RedisManager = getDocIdsWithHistoryOps: (project_id, callback = (error, doc_ids) ->) -> rclient.smembers docsWithHistoryOpsKey(project_id), callback + + # iterate over keys asynchronously using redis scan (non-blocking) + _getKeys: (pattern, callback) -> + cursor = 0 # redis iterator + keySet = {} # use hash to avoid duplicate results + # scan over all keys looking for pattern + doIteration = (cb) -> + rclient.scan cursor, "MATCH", pattern, "COUNT", 1000, (error, reply) -> + return callback(error) if error? + [cursor, keys] = reply + for key in keys + keySet[key] = true + if cursor == '0' # note redis returns string result not numeric + return callback(null, Object.keys(keySet)) + else + doIteration() + doIteration() + + # extract ids from keys like DocsWithHistoryOps:57fd0b1f53a8396d22b2c24b + _extractIds: (keyList) -> + ids = (key.split(":")[1] for key in keyList) + return ids + + # this will only work on single node redis, not redis cluster + getProjectIdsWithHistoryOps: (callback = (error, project_ids) ->) -> + RedisManager._getKeys docsWithHistoryOpsKey("*"), (error, project_keys) -> + return callback(error) if error? + project_ids = RedisManager._extractIds project_keys + callback(error, project_ids) + + # this will only work on single node redis, not redis cluster + getAllDocIdsWithHistoryOps: (callback = (error, doc_ids) ->) -> + # return all the docids, to find dangling history entries after + # everything is flushed. + RedisManager._getKeys rawUpdatesKey("*"), (error, doc_keys) -> + return callback(error) if error? + doc_ids = RedisManager._extractIds doc_keys + callback(error, doc_ids) diff --git a/services/track-changes/app/coffee/UpdatesManager.coffee b/services/track-changes/app/coffee/UpdatesManager.coffee index f01681e5f0..ca79d26d59 100644 --- a/services/track-changes/app/coffee/UpdatesManager.coffee +++ b/services/track-changes/app/coffee/UpdatesManager.coffee @@ -144,6 +144,37 @@ module.exports = UpdatesManager = UpdatesManager._processUncompressedUpdatesForDocWithLock project_id, doc_id, temporary, cb async.parallelLimit jobs, 5, callback + # flush all outstanding changes + flushAll: (callback = (error, result) ->) -> + RedisManager.getProjectIdsWithHistoryOps (error, project_ids) -> + return callback(error) if error? + logger.log {count: project_ids?.length, project_ids: project_ids}, "found projects" + jobs = [] + for project_id in project_ids + do (project_id) -> + jobs.push (cb) -> + UpdatesManager.processUncompressedUpdatesForProject project_id, (err) -> + return cb(null, {failed: err?, project_id: project_id}) + async.series jobs, (error, result) -> + return callback(error) if error? + failedProjects = (x.project_id for x in result when x.failed) + succeededProjects = (x.project_id for x in result when not x.failed) + RedisManager.getAllDocIdsWithHistoryOps (error, doc_ids) -> + callback(null, {failed: failedProjects, succeeded: succeededProjects}) + + getDanglingUpdates: (callback = (error, doc_ids) ->) -> + RedisManager.getAllDocIdsWithHistoryOps (error, all_doc_ids) -> + return callback(error) if error? + RedisManager.getProjectIdsWithHistoryOps (error, all_project_ids) -> + return callback(error) if error? + # function to get doc_ids for each project + task = (cb) -> async.concatSeries all_project_ids, RedisManager.getDocIdsWithHistoryOps, cb + # find the dangling doc ids + task (error, project_doc_ids) -> + dangling_doc_ids = _.difference(all_doc_ids, project_doc_ids) + logger.log {all_doc_ids: all_doc_ids, all_project_ids: all_project_ids, project_doc_ids: project_doc_ids, dangling_doc_ids: dangling_doc_ids}, "checking for dangling doc ids" + callback(null, dangling_doc_ids) + getDocUpdates: (project_id, doc_id, options = {}, callback = (error, updates) ->) -> UpdatesManager.processUncompressedUpdatesWithLock project_id, doc_id, (error) -> return callback(error) if error?