Merge pull request #92 from overleaf/bg-flush-queue-prod-fixes

add continuous background flush
This commit is contained in:
Brian Gough 2019-10-02 13:11:00 +01:00 committed by GitHub
commit c1454bc4ac
5 changed files with 33 additions and 7 deletions

View file

@ -14,6 +14,7 @@ if Settings.sentry?.dsn?
RedisManager = require('./app/js/RedisManager')
DispatchManager = require('./app/js/DispatchManager')
DeleteQueueManager = require('./app/js/DeleteQueueManager')
Errors = require "./app/js/Errors"
HttpController = require "./app/js/HttpController"
mongojs = require "./app/js/mongojs"
@ -142,7 +143,12 @@ host = Settings.internal.documentupdater.host or "localhost"
if !module.parent # Called directly
app.listen port, host, ->
logger.info "Document-updater starting up, listening on #{host}:#{port}"
if Settings.continuousBackgroundFlush
logger.info "Starting continuous background flush"
DeleteQueueManager.startBackgroundFlush()
module.exports = app
for signal in ['SIGINT', 'SIGHUP', 'SIGQUIT', 'SIGUSR1', 'SIGUSR2', 'SIGTERM', 'SIGABRT']
process.on signal, shutdownCleanly(signal)

View file

@ -1,3 +1,4 @@
Settings = require('settings-sharelatex')
RedisManager = require "./RedisManager"
ProjectManager = require "./ProjectManager"
logger = require "logger-sharelatex"
@ -22,7 +23,7 @@ async = require "async"
module.exports = DeleteQueueManager =
flushAndDeleteOldProjects: (options, callback) ->
startTime = Date.now()
cutoffTime = startTime - options.min_delete_age
cutoffTime = startTime - options.min_delete_age + 100 * (Math.random() - 0.5)
count = 0
flushProjectIfNotModified = (project_id, flushTimestamp, cb) ->
@ -61,3 +62,18 @@ module.exports = DeleteQueueManager =
flushNextProject()
flushNextProject()
startBackgroundFlush: () ->
SHORT_DELAY = 10
LONG_DELAY = 1000
doFlush = () ->
if Settings.shuttingDown
logger.warn "discontinuing background flush due to shutdown"
return
DeleteQueueManager.flushAndDeleteOldProjects {
timeout:1000,
min_delete_age:3*60*1000,
limit:1000 # high value, to ensure we always flush enough projects
}, (err, flushed) ->
setTimeout doFlush, (if flushed > 10 then SHORT_DELAY else LONG_DELAY)
doFlush()

View file

@ -295,8 +295,9 @@ module.exports = RedisManager =
, callback
queueFlushAndDeleteProject: (project_id, callback) ->
# store the project id in a sorted set ordered by time
rclient.zadd keys.flushAndDeleteQueue(), Date.now(), project_id, callback
# store the project id in a sorted set ordered by time with a random offset to smooth out spikes
SMOOTHING_OFFSET = if Settings.smoothingOffset > 0 then Math.round(Settings.smoothingOffset * Math.random()) else 0
rclient.zadd keys.flushAndDeleteQueue(), Date.now() + SMOOTHING_OFFSET, project_id, callback
getNextProjectToFlushAndDelete: (cutoffTime, callback = (error, key, timestamp)->) ->
# find the oldest queued flush that is before the cutoff time

View file

@ -95,3 +95,7 @@ module.exports =
dsn: process.env.SENTRY_DSN
publishOnIndividualChannels: process.env['PUBLISH_ON_INDIVIDUAL_CHANNELS'] or false
continuousBackgroundFlush: process.env['CONTINUOUS_BACKGROUND_FLUSH'] or false
smoothingOffset: process.env['SMOOTHING_OFFSET'] or 1000 # milliseconds

View file

@ -147,9 +147,8 @@ describe "Deleting a project", ->
@statusCode = res.statusCode
# after deleting the project and putting it in the queue, flush the queue
setTimeout () ->
DocUpdaterClient.flushOldProjects (error, res, body) =>
setTimeout done, 1000 # allow time for the flush to complete
, 100
DocUpdaterClient.flushOldProjects done
, 2000
, 200
after ->