2014-02-24 12:43:27 -05:00
|
|
|
MongoManager = require "./MongoManager"
|
2014-02-25 11:48:42 -05:00
|
|
|
RedisManager = require "./RedisManager"
|
2014-01-27 11:26:58 -05:00
|
|
|
UpdateCompressor = require "./UpdateCompressor"
|
2014-02-26 05:55:20 -05:00
|
|
|
LockManager = require "./LockManager"
|
2014-03-06 13:04:00 -05:00
|
|
|
WebApiManager = require "./WebApiManager"
|
2014-03-28 12:01:34 -04:00
|
|
|
UpdateTrimmer = require "./UpdateTrimmer"
|
2014-01-27 11:26:58 -05:00
|
|
|
logger = require "logger-sharelatex"
|
2014-03-06 13:04:00 -05:00
|
|
|
async = require "async"
|
2014-01-27 11:26:58 -05:00
|
|
|
|
2014-03-05 10:59:40 -05:00
|
|
|
module.exports = UpdatesManager =
|
2014-05-16 10:59:12 -04:00
|
|
|
compressAndSaveRawUpdates: (project_id, doc_id, rawUpdates, temporary, callback = (error) ->) ->
|
2014-01-27 11:26:58 -05:00
|
|
|
length = rawUpdates.length
|
|
|
|
if length == 0
|
|
|
|
return callback()
|
|
|
|
|
2014-02-24 12:43:27 -05:00
|
|
|
MongoManager.popLastCompressedUpdate doc_id, (error, lastCompressedUpdate) ->
|
2014-01-27 11:26:58 -05:00
|
|
|
return callback(error) if error?
|
2014-02-25 07:27:42 -05:00
|
|
|
|
|
|
|
# Ensure that raw updates start where lastCompressedUpdate left off
|
|
|
|
if lastCompressedUpdate?
|
|
|
|
rawUpdates = rawUpdates.slice(0)
|
|
|
|
while rawUpdates[0]? and rawUpdates[0].v <= lastCompressedUpdate.v
|
|
|
|
rawUpdates.shift()
|
|
|
|
|
|
|
|
if rawUpdates[0]? and rawUpdates[0].v != lastCompressedUpdate.v + 1
|
2014-03-05 08:22:38 -05:00
|
|
|
error = new Error("Tried to apply raw op at version #{rawUpdates[0].v} to last compressed update with version #{lastCompressedUpdate.v}")
|
2014-03-19 12:40:55 -04:00
|
|
|
logger.error err: error, doc_id: doc_id, project_id: project_id, "inconsistent doc versions"
|
2014-03-05 08:22:38 -05:00
|
|
|
# Push the update back into Mongo - catching errors at this
|
|
|
|
# point is useless, we're already bailing
|
2014-05-16 10:59:12 -04:00
|
|
|
MongoManager.insertCompressedUpdates project_id, doc_id, [lastCompressedUpdate], temporary, () ->
|
2014-03-05 08:22:38 -05:00
|
|
|
return callback error
|
|
|
|
return
|
2014-02-25 07:27:42 -05:00
|
|
|
|
2014-01-27 11:26:58 -05:00
|
|
|
compressedUpdates = UpdateCompressor.compressRawUpdates lastCompressedUpdate, rawUpdates
|
2014-05-16 10:59:12 -04:00
|
|
|
MongoManager.insertCompressedUpdates project_id, doc_id, compressedUpdates, temporary,(error) ->
|
2014-01-27 11:26:58 -05:00
|
|
|
return callback(error) if error?
|
2014-03-19 12:40:55 -04:00
|
|
|
logger.log project_id: project_id, doc_id: doc_id, rawUpdatesLength: length, compressedUpdatesLength: compressedUpdates.length, "compressed doc updates"
|
2014-01-27 11:26:58 -05:00
|
|
|
callback()
|
|
|
|
|
2014-02-25 11:48:42 -05:00
|
|
|
REDIS_READ_BATCH_SIZE: 100
|
2014-03-19 12:40:55 -04:00
|
|
|
processUncompressedUpdates: (project_id, doc_id, callback = (error) ->) ->
|
2014-05-16 10:59:12 -04:00
|
|
|
UpdateTrimmer.shouldTrimUpdates project_id, (error, temporary) ->
|
2014-02-25 11:48:42 -05:00
|
|
|
return callback(error) if error?
|
2014-05-16 10:59:12 -04:00
|
|
|
MongoManager.backportProjectId project_id, doc_id, (error) ->
|
2014-02-25 11:48:42 -05:00
|
|
|
return callback(error) if error?
|
2014-05-16 10:59:12 -04:00
|
|
|
RedisManager.getOldestRawUpdates doc_id, UpdatesManager.REDIS_READ_BATCH_SIZE, (error, rawUpdates) ->
|
2014-02-25 11:48:42 -05:00
|
|
|
return callback(error) if error?
|
2014-05-16 10:59:12 -04:00
|
|
|
length = rawUpdates.length
|
|
|
|
UpdatesManager.compressAndSaveRawUpdates project_id, doc_id, rawUpdates, temporary, (error) ->
|
2014-03-21 10:40:51 -04:00
|
|
|
return callback(error) if error?
|
2014-05-16 10:59:12 -04:00
|
|
|
logger.log project_id: project_id, doc_id: doc_id, "compressed and saved doc updates"
|
|
|
|
RedisManager.deleteOldestRawUpdates project_id, doc_id, length, (error) ->
|
|
|
|
return callback(error) if error?
|
|
|
|
if length == UpdatesManager.REDIS_READ_BATCH_SIZE
|
|
|
|
# There might be more updates
|
|
|
|
logger.log project_id: project_id, doc_id: doc_id, "continuing processing updates"
|
|
|
|
setTimeout () ->
|
|
|
|
UpdatesManager.processUncompressedUpdates project_id, doc_id, callback
|
|
|
|
, 0
|
|
|
|
else
|
|
|
|
logger.log project_id: project_id, doc_id: doc_id, "all raw updates processed"
|
|
|
|
callback()
|
2014-02-25 11:48:42 -05:00
|
|
|
|
2014-03-19 12:40:55 -04:00
|
|
|
processUncompressedUpdatesWithLock: (project_id, doc_id, callback = (error) ->) ->
|
2014-02-26 05:55:20 -05:00
|
|
|
LockManager.runWithLock(
|
|
|
|
"HistoryLock:#{doc_id}",
|
2014-02-26 07:11:45 -05:00
|
|
|
(releaseLock) ->
|
2014-03-19 12:40:55 -04:00
|
|
|
UpdatesManager.processUncompressedUpdates project_id, doc_id, releaseLock
|
2014-02-26 05:55:20 -05:00
|
|
|
callback
|
|
|
|
)
|
2014-02-25 11:48:42 -05:00
|
|
|
|
2014-03-21 09:48:14 -04:00
|
|
|
processUncompressedUpdatesForProject: (project_id, callback = (error) ->) ->
|
|
|
|
RedisManager.getDocIdsWithHistoryOps project_id, (error, doc_ids) ->
|
|
|
|
return callback(error) if error?
|
|
|
|
jobs = []
|
|
|
|
for doc_id in doc_ids
|
|
|
|
do (doc_id) ->
|
|
|
|
jobs.push (callback) ->
|
|
|
|
UpdatesManager.processUncompressedUpdatesWithLock project_id, doc_id, callback
|
2014-05-29 10:37:16 -04:00
|
|
|
async.parallelLimit jobs, 5, callback
|
2014-03-21 09:48:14 -04:00
|
|
|
|
2014-03-19 13:44:16 -04:00
|
|
|
getDocUpdates: (project_id, doc_id, options = {}, callback = (error, updates) ->) ->
|
2014-03-19 12:40:55 -04:00
|
|
|
UpdatesManager.processUncompressedUpdatesWithLock project_id, doc_id, (error) ->
|
2014-03-05 10:59:40 -05:00
|
|
|
return callback(error) if error?
|
2014-03-19 13:44:16 -04:00
|
|
|
MongoManager.getDocUpdates doc_id, options, callback
|
2014-03-05 10:59:40 -05:00
|
|
|
|
2014-03-19 13:44:16 -04:00
|
|
|
getDocUpdatesWithUserInfo: (project_id, doc_id, options = {}, callback = (error, updates) ->) ->
|
|
|
|
UpdatesManager.getDocUpdates project_id, doc_id, options, (error, updates) ->
|
2014-03-06 13:04:00 -05:00
|
|
|
return callback(error) if error?
|
|
|
|
UpdatesManager.fillUserInfo updates, (error, updates) ->
|
|
|
|
return callback(error) if error?
|
|
|
|
callback null, updates
|
|
|
|
|
2014-03-19 13:44:16 -04:00
|
|
|
getProjectUpdates: (project_id, options = {}, callback = (error, updates) ->) ->
|
2014-03-21 09:48:14 -04:00
|
|
|
UpdatesManager.processUncompressedUpdatesForProject project_id, (error) ->
|
|
|
|
return callback(error) if error?
|
|
|
|
MongoManager.getProjectUpdates project_id, options, callback
|
2014-03-19 13:44:16 -04:00
|
|
|
|
|
|
|
getProjectUpdatesWithUserInfo: (project_id, options = {}, callback = (error, updates) ->) ->
|
|
|
|
UpdatesManager.getProjectUpdates project_id, options, (error, updates) ->
|
|
|
|
return callback(error) if error?
|
|
|
|
UpdatesManager.fillUserInfo updates, (error, updates) ->
|
|
|
|
return callback(error) if error?
|
|
|
|
callback null, updates
|
|
|
|
|
2014-03-20 08:10:04 -04:00
|
|
|
getSummarizedProjectUpdates: (project_id, options = {}, callback = (error, updates) ->) ->
|
|
|
|
options.min_count ||= 25
|
2014-03-18 14:09:25 -04:00
|
|
|
summarizedUpdates = []
|
2014-03-20 08:10:04 -04:00
|
|
|
before = options.before
|
2014-03-18 14:09:25 -04:00
|
|
|
do fetchNextBatch = () ->
|
2014-03-20 08:10:04 -04:00
|
|
|
UpdatesManager._extendBatchOfSummarizedUpdates project_id, summarizedUpdates, before, options.min_count, (error, updates, nextBeforeUpdate) ->
|
2014-03-18 14:09:25 -04:00
|
|
|
return callback(error) if error?
|
2014-03-20 08:10:04 -04:00
|
|
|
if !nextBeforeUpdate? or updates.length >= options.min_count
|
|
|
|
callback null, updates, nextBeforeUpdate
|
2014-03-18 14:09:25 -04:00
|
|
|
else
|
2014-03-20 08:10:04 -04:00
|
|
|
before = nextBeforeUpdate
|
2014-03-18 14:09:25 -04:00
|
|
|
summarizedUpdates = updates
|
|
|
|
fetchNextBatch()
|
|
|
|
|
|
|
|
_extendBatchOfSummarizedUpdates: (
|
2014-03-20 08:10:04 -04:00
|
|
|
project_id,
|
2014-03-18 14:09:25 -04:00
|
|
|
existingSummarizedUpdates,
|
2014-03-20 08:10:04 -04:00
|
|
|
before, desiredLength,
|
2014-03-18 14:09:25 -04:00
|
|
|
callback = (error, summarizedUpdates, endOfDatabase) ->
|
|
|
|
) ->
|
2014-03-20 08:10:04 -04:00
|
|
|
UpdatesManager.getProjectUpdatesWithUserInfo project_id, { before: before, limit: 3 * desiredLength }, (error, updates) ->
|
2014-03-18 07:41:48 -04:00
|
|
|
return callback(error) if error?
|
2014-03-20 08:10:04 -04:00
|
|
|
|
|
|
|
# Suppose in this request we have fetch the solid updates. In the next request we need
|
|
|
|
# to fetch the dotted updates. These are defined by having an end timestamp less than
|
|
|
|
# the last update's end timestamp (updates are ordered by descending end_ts). I.e.
|
|
|
|
# start_ts--v v--end_ts
|
|
|
|
# doc1: |......| |...| |-------|
|
|
|
|
# doc2: |------------------|
|
|
|
|
# ^----- Next time, fetch all updates with an
|
|
|
|
# end_ts less than this
|
|
|
|
#
|
|
|
|
if updates? and updates.length > 0
|
|
|
|
nextBeforeTimestamp = updates[updates.length - 1].meta.end_ts
|
2015-02-05 11:37:06 -05:00
|
|
|
if nextBeforeTimestamp >= before
|
|
|
|
error = new Error("history order is broken")
|
|
|
|
logger.error err: error, project_id:project_id, nextBeforeTimestamp: nextBeforeTimestamp, before:before, "error in project history"
|
|
|
|
return callback(error)
|
2014-03-18 14:09:25 -04:00
|
|
|
else
|
2014-03-20 08:10:04 -04:00
|
|
|
nextBeforeTimestamp = null
|
|
|
|
|
2014-03-18 14:09:25 -04:00
|
|
|
summarizedUpdates = UpdatesManager._summarizeUpdates(
|
|
|
|
updates, existingSummarizedUpdates
|
|
|
|
)
|
|
|
|
callback null,
|
2014-03-20 08:10:04 -04:00
|
|
|
summarizedUpdates,
|
|
|
|
nextBeforeTimestamp
|
2014-03-18 07:41:48 -04:00
|
|
|
|
2014-03-06 13:04:00 -05:00
|
|
|
fillUserInfo: (updates, callback = (error, updates) ->) ->
|
|
|
|
users = {}
|
|
|
|
for update in updates
|
2014-03-11 07:45:25 -04:00
|
|
|
if UpdatesManager._validUserId(update.meta.user_id)
|
|
|
|
users[update.meta.user_id] = true
|
2014-03-06 13:04:00 -05:00
|
|
|
|
|
|
|
jobs = []
|
|
|
|
for user_id, _ of users
|
|
|
|
do (user_id) ->
|
|
|
|
jobs.push (callback) ->
|
|
|
|
WebApiManager.getUserInfo user_id, (error, userInfo) ->
|
|
|
|
return callback(error) if error?
|
|
|
|
users[user_id] = userInfo
|
|
|
|
callback()
|
|
|
|
|
|
|
|
async.series jobs, (error) ->
|
|
|
|
return callback(error) if error?
|
|
|
|
for update in updates
|
|
|
|
user_id = update.meta.user_id
|
|
|
|
delete update.meta.user_id
|
2014-03-11 07:45:25 -04:00
|
|
|
if UpdatesManager._validUserId(user_id)
|
|
|
|
update.meta.user = users[user_id]
|
2014-03-06 13:04:00 -05:00
|
|
|
callback null, updates
|
2014-03-11 07:45:25 -04:00
|
|
|
|
|
|
|
_validUserId: (user_id) ->
|
|
|
|
if !user_id?
|
|
|
|
return false
|
|
|
|
else
|
|
|
|
return !!user_id.match(/^[a-f0-9]{24}$/)
|
2014-03-18 07:41:48 -04:00
|
|
|
|
|
|
|
|
|
|
|
TIME_BETWEEN_DISTINCT_UPDATES: fiveMinutes = 5 * 60 * 1000
|
2014-03-18 14:09:25 -04:00
|
|
|
_summarizeUpdates: (updates, existingSummarizedUpdates = []) ->
|
|
|
|
summarizedUpdates = existingSummarizedUpdates.slice()
|
|
|
|
for update in updates
|
|
|
|
earliestUpdate = summarizedUpdates[summarizedUpdates.length - 1]
|
|
|
|
if earliestUpdate and earliestUpdate.meta.start_ts - update.meta.end_ts < @TIME_BETWEEN_DISTINCT_UPDATES
|
2015-09-10 09:32:47 -04:00
|
|
|
userExists = false
|
|
|
|
for user in earliestUpdate.meta.users
|
|
|
|
if (!user and !update.meta.user) or (user.id == update.meta.user?.id)
|
|
|
|
userExists = true
|
|
|
|
break
|
|
|
|
if !userExists
|
|
|
|
earliestUpdate.meta.users.push update.meta.user
|
2014-03-20 08:10:04 -04:00
|
|
|
|
2014-03-20 09:37:23 -04:00
|
|
|
doc_id = update.doc_id.toString()
|
|
|
|
doc = earliestUpdate.docs[doc_id]
|
|
|
|
if doc?
|
|
|
|
doc.fromV = Math.min(doc.fromV, update.v)
|
|
|
|
doc.toV = Math.max(doc.toV, update.v)
|
|
|
|
else
|
|
|
|
earliestUpdate.docs[doc_id] =
|
|
|
|
fromV: update.v
|
|
|
|
toV: update.v
|
2014-03-20 08:10:04 -04:00
|
|
|
|
2014-03-18 14:09:25 -04:00
|
|
|
earliestUpdate.meta.start_ts = Math.min(earliestUpdate.meta.start_ts, update.meta.start_ts)
|
|
|
|
earliestUpdate.meta.end_ts = Math.max(earliestUpdate.meta.end_ts, update.meta.end_ts)
|
2014-03-18 07:41:48 -04:00
|
|
|
else
|
|
|
|
newUpdate =
|
|
|
|
meta:
|
|
|
|
users: []
|
|
|
|
start_ts: update.meta.start_ts
|
|
|
|
end_ts: update.meta.end_ts
|
2014-03-20 09:37:23 -04:00
|
|
|
docs: {}
|
|
|
|
|
|
|
|
newUpdate.docs[update.doc_id.toString()] =
|
2014-03-18 07:41:48 -04:00
|
|
|
fromV: update.v
|
|
|
|
toV: update.v
|
2015-09-10 09:32:47 -04:00
|
|
|
newUpdate.meta.users.push update.meta.user
|
2014-03-18 14:09:25 -04:00
|
|
|
summarizedUpdates.push newUpdate
|
|
|
|
|
|
|
|
return summarizedUpdates
|