mirror of
https://github.com/overleaf/overleaf.git
synced 2025-04-11 18:55:26 +00:00
add check for dangling updates
This commit is contained in:
parent
d9ed026d91
commit
9ce6d77cca
4 changed files with 40 additions and 3 deletions
|
@ -51,6 +51,7 @@ app.post '/project/:project_id/doc/:doc_id/push', HttpController.pushDocHistory
|
|||
app.post '/project/:project_id/doc/:doc_id/pull', HttpController.pullDocHistory
|
||||
|
||||
app.post '/flush/all', HttpController.flushAll
|
||||
app.post '/check/dangling', HttpController.checkDanglingUpdates
|
||||
|
||||
packWorker = null # use a single packing worker
|
||||
|
||||
|
|
|
@ -34,6 +34,16 @@ module.exports = HttpController =
|
|||
else
|
||||
res.status(200).send "#{status}\nflushed all #{succeeded.length} projects\n"
|
||||
|
||||
checkDanglingUpdates: (req, res, next = (error) ->) ->
|
||||
logger.log "checking dangling updates"
|
||||
UpdatesManager.getDanglingUpdates (error, result) ->
|
||||
return next(error) if error?
|
||||
if result.length > 0
|
||||
logger.log {dangling: result}, "found dangling updates"
|
||||
res.status(500).send "dangling updates:\n#{result.join('\n')}\n"
|
||||
else
|
||||
res.status(200).send "no dangling updates found\n"
|
||||
|
||||
checkDoc: (req, res, next = (error) ->) ->
|
||||
doc_id = req.params.doc_id
|
||||
project_id = req.params.project_id
|
||||
|
|
|
@ -33,11 +33,23 @@ module.exports = RedisManager =
|
|||
getDocIdsWithHistoryOps: (project_id, callback = (error, doc_ids) ->) ->
|
||||
rclient.smembers docsWithHistoryOpsKey(project_id), callback
|
||||
|
||||
# extract ids from keys like DocsWithHistoryOps:57fd0b1f53a8396d22b2c24b
|
||||
_extractIds: (keyList) ->
|
||||
ids = (key.split(":")[1] for key in keyList)
|
||||
return ids
|
||||
|
||||
# this will only work on single node redis, not redis cluster
|
||||
getProjectIdsWithHistoryOps: (callback = (error, project_ids) ->) ->
|
||||
rclient.keys docsWithHistoryOpsKey("*"), (error, project_keys) ->
|
||||
return callback(error) if error?
|
||||
project_ids = for key in project_keys
|
||||
[prefix, project_id] = key.split(":")
|
||||
project_id
|
||||
project_ids = RedisManager._extractIds project_keys
|
||||
callback(error, project_ids)
|
||||
|
||||
# this will only work on single node redis, not redis cluster
|
||||
getAllDocIdsWithHistoryOps: (callback = (error, doc_ids) ->) ->
|
||||
# return all the docids, to find dangling history entries after
|
||||
# everything is flushed.
|
||||
rclient.keys rawUpdatesKey("*"), (error, doc_keys) ->
|
||||
return callback(error) if error?
|
||||
doc_ids = RedisManager._extractIds doc_keys
|
||||
callback(error, doc_ids)
|
||||
|
|
|
@ -159,8 +159,22 @@ module.exports = UpdatesManager =
|
|||
return callback(error) if error?
|
||||
failedProjects = (x.project_id for x in result when x.failed)
|
||||
succeededProjects = (x.project_id for x in result when not x.failed)
|
||||
RedisManager.getAllDocIdsWithHistoryOps (error, doc_ids) ->
|
||||
callback(null, {failed: failedProjects, succeeded: succeededProjects})
|
||||
|
||||
getDanglingUpdates: (callback = (error, doc_ids) ->) ->
|
||||
RedisManager.getAllDocIdsWithHistoryOps (error, all_doc_ids) ->
|
||||
return callback(error) if error?
|
||||
RedisManager.getProjectIdsWithHistoryOps (error, all_project_ids) ->
|
||||
return callback(error) if error?
|
||||
# function to get doc_ids for each project
|
||||
task = (cb) -> async.concatSeries all_project_ids, RedisManager.getDocIdsWithHistoryOps, cb
|
||||
# find the dangling doc ids
|
||||
task (error, project_doc_ids) ->
|
||||
dangling_doc_ids = _.difference(all_doc_ids, project_doc_ids)
|
||||
logger.log {all_doc_ids: all_doc_ids, all_project_ids: all_project_ids, project_doc_ids: project_doc_ids, dangling_doc_ids: dangling_doc_ids}, "checking for dangling doc ids"
|
||||
callback(null, dangling_doc_ids)
|
||||
|
||||
getDocUpdates: (project_id, doc_id, options = {}, callback = (error, updates) ->) ->
|
||||
UpdatesManager.processUncompressedUpdatesWithLock project_id, doc_id, (error) ->
|
||||
return callback(error) if error?
|
||||
|
|
Loading…
Add table
Reference in a new issue