Merge pull request #45 from sharelatex/bg-auto-doc-flush

allow docupdater to flush docs in background (connects to overleaf/sharelatex#190)
This commit is contained in:
Brian Gough 2017-10-12 14:48:16 +01:00 committed by GitHub
commit 18370076a2
2 changed files with 4 additions and 9 deletions

View file

@ -146,13 +146,8 @@ module.exports = ClsiManager =
logger.log project_id: project_id, projectStateHash: projectStateHash, docs: docUpdaterDocs?, "checked project state"
# see if we can send an incremental update to the CLSI
if docUpdaterDocs? and (options.syncType isnt "full") and not error?
# Workaround: for now, always flush project to mongo on compile
# until we have automatic periodic flushing on the docupdater
# side, to prevent documents staying in redis too long.
DocumentUpdaterHandler.flushProjectToMongo project_id, (error) ->
return callback(error) if error?
Metrics.inc "compile-from-redis"
ClsiManager._buildRequestFromDocupdater project_id, options, project, projectStateHash, docUpdaterDocs, callback
Metrics.inc "compile-from-redis"
ClsiManager._buildRequestFromDocupdater project_id, options, project, projectStateHash, docUpdaterDocs, callback
else
Metrics.inc "compile-from-mongo"
ClsiManager._buildRequestFromMongo project_id, options, project, projectStateHash, callback

View file

@ -128,9 +128,9 @@ module.exports = DocumentUpdaterHandler =
# docs from redis via the docupdater. Otherwise we will need to
# fall back to getting them from mongo.
timer = new metrics.Timer("get-project-docs")
url = "#{settings.apis.documentupdater.url}/project/#{project_id}/doc?state=#{projectStateHash}"
url = "#{settings.apis.documentupdater.url}/project/#{project_id}/get_and_flush_if_old?state=#{projectStateHash}"
logger.log project_id:project_id, "getting project docs from document updater"
request.get url, (error, res, body)->
request.post url, (error, res, body)->
timer.done()
if error?
logger.error err:error, url:url, project_id:project_id, "error getting project docs from doc updater"