mirror of
https://github.com/overleaf/overleaf.git
synced 2024-12-27 22:02:05 +00:00
Merge pull request #30 from sharelatex/bg-limit-flush
allow limit on flushed projects
This commit is contained in:
commit
7ba45cfc73
2 changed files with 13 additions and 9 deletions
|
@ -23,16 +23,20 @@ module.exports = HttpController =
|
|||
res.send 204
|
||||
|
||||
flushAll: (req, res, next = (error) ->) ->
|
||||
logger.log "flushing all projects"
|
||||
UpdatesManager.flushAll (error, result) ->
|
||||
# limit on projects to flush or -1 for all (default)
|
||||
limit = if req.query.limit? then parseInt(req.query.limit, 10) else -1
|
||||
logger.log {limit: limit}, "flushing all projects"
|
||||
UpdatesManager.flushAll limit, (error, result) ->
|
||||
return next(error) if error?
|
||||
{failed, succeeded} = result
|
||||
{failed, succeeded, all} = result
|
||||
status = "#{succeeded.length} succeeded, #{failed.length} failed"
|
||||
if failed.length > 0
|
||||
if limit == 0
|
||||
res.status(200).send "#{status}\nwould flush:\n#{all.join('\n')}\n"
|
||||
else if failed.length > 0
|
||||
logger.log {failed: failed, succeeded: succeeded}, "error flushing projects"
|
||||
res.status(500).send "#{status}\nfailed to flush:\n#{failed.join('\n')}\n"
|
||||
else
|
||||
res.status(200).send "#{status}\nflushed all #{succeeded.length} projects\n"
|
||||
res.status(200).send "#{status}\nflushed #{succeeded.length} projects of #{all.length}\n"
|
||||
|
||||
checkDanglingUpdates: (req, res, next = (error) ->) ->
|
||||
logger.log "checking dangling updates"
|
||||
|
|
|
@ -145,12 +145,13 @@ module.exports = UpdatesManager =
|
|||
async.parallelLimit jobs, 5, callback
|
||||
|
||||
# flush all outstanding changes
|
||||
flushAll: (callback = (error, result) ->) ->
|
||||
flushAll: (limit, callback = (error, result) ->) ->
|
||||
RedisManager.getProjectIdsWithHistoryOps (error, project_ids) ->
|
||||
return callback(error) if error?
|
||||
logger.log {count: project_ids?.length, project_ids: project_ids}, "found projects"
|
||||
jobs = []
|
||||
for project_id in project_ids
|
||||
selectedProjects = if limit < 0 then project_ids else project_ids[0...limit]
|
||||
for project_id in selectedProjects
|
||||
do (project_id) ->
|
||||
jobs.push (cb) ->
|
||||
UpdatesManager.processUncompressedUpdatesForProject project_id, (err) ->
|
||||
|
@ -159,8 +160,7 @@ module.exports = UpdatesManager =
|
|||
return callback(error) if error?
|
||||
failedProjects = (x.project_id for x in result when x.failed)
|
||||
succeededProjects = (x.project_id for x in result when not x.failed)
|
||||
RedisManager.getAllDocIdsWithHistoryOps (error, doc_ids) ->
|
||||
callback(null, {failed: failedProjects, succeeded: succeededProjects})
|
||||
callback(null, {failed: failedProjects, succeeded: succeededProjects, all: project_ids})
|
||||
|
||||
getDanglingUpdates: (callback = (error, doc_ids) ->) ->
|
||||
RedisManager.getAllDocIdsWithHistoryOps (error, all_doc_ids) ->
|
||||
|
|
Loading…
Reference in a new issue