diff --git a/services/track-changes/app/coffee/HttpController.coffee b/services/track-changes/app/coffee/HttpController.coffee index 0b59a6e1ec..9ede86f3ee 100644 --- a/services/track-changes/app/coffee/HttpController.coffee +++ b/services/track-changes/app/coffee/HttpController.coffee @@ -23,16 +23,20 @@ module.exports = HttpController = res.send 204 flushAll: (req, res, next = (error) ->) -> - logger.log "flushing all projects" - UpdatesManager.flushAll (error, result) -> + # limit on projects to flush or -1 for all (default) + limit = if req.query.limit? then parseInt(req.query.limit, 10) else -1 + logger.log {limit: limit}, "flushing all projects" + UpdatesManager.flushAll limit, (error, result) -> return next(error) if error? - {failed, succeeded} = result + {failed, succeeded, all} = result status = "#{succeeded.length} succeeded, #{failed.length} failed" - if failed.length > 0 + if limit == 0 + res.status(200).send "#{status}\nwould flush:\n#{all.join('\n')}\n" + else if failed.length > 0 logger.log {failed: failed, succeeded: succeeded}, "error flushing projects" res.status(500).send "#{status}\nfailed to flush:\n#{failed.join('\n')}\n" else - res.status(200).send "#{status}\nflushed all #{succeeded.length} projects\n" + res.status(200).send "#{status}\nflushed #{succeeded.length} projects of #{all.length}\n" checkDanglingUpdates: (req, res, next = (error) ->) -> logger.log "checking dangling updates" diff --git a/services/track-changes/app/coffee/UpdatesManager.coffee b/services/track-changes/app/coffee/UpdatesManager.coffee index ca79d26d59..f3571dc16d 100644 --- a/services/track-changes/app/coffee/UpdatesManager.coffee +++ b/services/track-changes/app/coffee/UpdatesManager.coffee @@ -145,12 +145,13 @@ module.exports = UpdatesManager = async.parallelLimit jobs, 5, callback # flush all outstanding changes - flushAll: (callback = (error, result) ->) -> + flushAll: (limit, callback = (error, result) ->) -> RedisManager.getProjectIdsWithHistoryOps (error, project_ids) -> return callback(error) if error? logger.log {count: project_ids?.length, project_ids: project_ids}, "found projects" jobs = [] - for project_id in project_ids + selectedProjects = if limit < 0 then project_ids else project_ids[0...limit] + for project_id in selectedProjects do (project_id) -> jobs.push (cb) -> UpdatesManager.processUncompressedUpdatesForProject project_id, (err) -> @@ -159,8 +160,7 @@ module.exports = UpdatesManager = return callback(error) if error? failedProjects = (x.project_id for x in result when x.failed) succeededProjects = (x.project_id for x in result when not x.failed) - RedisManager.getAllDocIdsWithHistoryOps (error, doc_ids) -> - callback(null, {failed: failedProjects, succeeded: succeededProjects}) + callback(null, {failed: failedProjects, succeeded: succeededProjects, all: project_ids}) getDanglingUpdates: (callback = (error, doc_ids) ->) -> RedisManager.getAllDocIdsWithHistoryOps (error, all_doc_ids) ->