add smoothing of delete spikes

This commit is contained in:
Brian Gough 2019-10-01 15:01:53 +01:00
parent 2c22a60052
commit 2845b23b70
3 changed files with 7 additions and 5 deletions

View file

@ -295,8 +295,9 @@ module.exports = RedisManager =
multi.exec callback
queueFlushAndDeleteProject: (project_id, callback) ->
# store the project id in a sorted set ordered by time
rclient.zadd keys.flushAndDeleteQueue(), Date.now(), project_id, callback
# store the project id in a sorted set ordered by time with a random offset to smooth out spikes
SMOOTHING_OFFSET = if Settings.smoothingOffset > 0 then Math.round(Settings.smoothingOffset * Math.random()) else 0
rclient.zadd keys.flushAndDeleteQueue(), Date.now() + SMOOTHING_OFFSET, project_id, callback
getNextProjectToFlushAndDelete: (cutoffTime, callback = (error, key, timestamp)->) ->
# find the oldest queued flush that is before the cutoff time

View file

@ -97,3 +97,5 @@ module.exports =
publishOnIndividualChannels: process.env['PUBLISH_ON_INDIVIDUAL_CHANNELS'] or false
continuousBackgroundFlush: process.env['CONTINUOUS_BACKGROUND_FLUSH'] or false
smoothingOffset: process.env['SMOOTHING_OFFSET'] or 1000 # milliseconds

View file

@ -147,9 +147,8 @@ describe "Deleting a project", ->
@statusCode = res.statusCode
# after deleting the project and putting it in the queue, flush the queue
setTimeout () ->
DocUpdaterClient.flushOldProjects (error, res, body) =>
setTimeout done, 1000 # allow time for the flush to complete
, 100
DocUpdaterClient.flushOldProjects done
, 2000
, 200
after ->