2018-03-02 10:02:27 -05:00
|
|
|
async = require "async"
|
2016-11-28 05:14:42 -05:00
|
|
|
logger = require "logger-sharelatex"
|
2018-03-02 10:02:27 -05:00
|
|
|
request = require "request"
|
|
|
|
Settings = require "settings-sharelatex"
|
2017-04-13 12:00:42 -04:00
|
|
|
HistoryRedisManager = require "./HistoryRedisManager"
|
2018-03-09 09:14:14 -05:00
|
|
|
ProjectHistoryRedisManager = require "./ProjectHistoryRedisManager"
|
2018-02-28 12:31:43 -05:00
|
|
|
RedisManager = require "./RedisManager"
|
2016-11-28 05:14:42 -05:00
|
|
|
|
|
|
|
module.exports = HistoryManager =
|
2018-02-22 05:01:05 -05:00
|
|
|
flushDocChangesAsync: (project_id, doc_id) ->
|
2017-10-05 08:45:29 -04:00
|
|
|
if !Settings.apis?.trackchanges?
|
|
|
|
logger.warn { doc_id }, "track changes API is not configured, so not flushing"
|
|
|
|
return
|
|
|
|
|
|
|
|
url = "#{Settings.apis.trackchanges.url}/project/#{project_id}/doc/#{doc_id}/flush"
|
|
|
|
logger.log { project_id, doc_id, url }, "flushing doc in track changes api"
|
|
|
|
request.post url, (error, res, body)->
|
|
|
|
if error?
|
2017-11-13 06:56:08 -05:00
|
|
|
logger.error { error, doc_id, project_id}, "track changes doc to track changes api"
|
2017-10-05 08:45:29 -04:00
|
|
|
else if res.statusCode < 200 and res.statusCode >= 300
|
2017-11-13 06:56:08 -05:00
|
|
|
logger.error { doc_id, project_id }, "track changes api returned a failure status code: #{res.statusCode}"
|
2017-10-05 08:45:29 -04:00
|
|
|
|
2018-01-24 06:37:28 -05:00
|
|
|
flushProjectChangesAsync: (project_id) ->
|
2018-02-22 05:01:05 -05:00
|
|
|
return if !Settings.apis?.project_history?.enabled
|
2016-11-28 05:14:42 -05:00
|
|
|
|
2017-10-05 08:45:29 -04:00
|
|
|
url = "#{Settings.apis.project_history.url}/project/#{project_id}/flush"
|
|
|
|
logger.log { project_id, url }, "flushing doc in project history api"
|
2016-11-28 05:14:42 -05:00
|
|
|
request.post url, (error, res, body)->
|
|
|
|
if error?
|
2017-10-05 08:45:29 -04:00
|
|
|
logger.error { error, project_id}, "project history doc to track changes api"
|
|
|
|
else if res.statusCode < 200 and res.statusCode >= 300
|
|
|
|
logger.error { project_id }, "project history api returned a failure status code: #{res.statusCode}"
|
2016-11-28 05:14:42 -05:00
|
|
|
|
2017-11-13 06:53:39 -05:00
|
|
|
FLUSH_DOC_EVERY_N_OPS: 100
|
|
|
|
FLUSH_PROJECT_EVERY_N_OPS: 500
|
|
|
|
|
2017-10-05 08:45:29 -04:00
|
|
|
recordAndFlushHistoryOps: (project_id, doc_id, ops = [], doc_ops_length, project_ops_length, callback = (error) ->) ->
|
2016-11-28 05:14:42 -05:00
|
|
|
if ops.length == 0
|
|
|
|
return callback()
|
2017-10-05 08:45:29 -04:00
|
|
|
|
|
|
|
if Settings.apis?.project_history?.enabled
|
2018-01-31 06:27:40 -05:00
|
|
|
if HistoryManager.shouldFlushHistoryOps(project_ops_length, ops.length, HistoryManager.FLUSH_PROJECT_EVERY_N_OPS)
|
2018-01-24 06:37:28 -05:00
|
|
|
# Do this in the background since it uses HTTP and so may be too
|
|
|
|
# slow to wait for when processing a doc update.
|
|
|
|
logger.log { project_ops_length, project_id }, "flushing project history api"
|
|
|
|
HistoryManager.flushProjectChangesAsync project_id
|
2017-10-05 08:45:29 -04:00
|
|
|
|
2017-05-09 05:34:31 -04:00
|
|
|
HistoryRedisManager.recordDocHasHistoryOps project_id, doc_id, ops, (error) ->
|
2016-11-28 05:14:42 -05:00
|
|
|
return callback(error) if error?
|
2018-01-31 06:27:40 -05:00
|
|
|
if HistoryManager.shouldFlushHistoryOps(doc_ops_length, ops.length, HistoryManager.FLUSH_DOC_EVERY_N_OPS)
|
2016-11-28 05:14:42 -05:00
|
|
|
# Do this in the background since it uses HTTP and so may be too
|
|
|
|
# slow to wait for when processing a doc update.
|
2017-10-05 08:45:29 -04:00
|
|
|
logger.log { doc_ops_length, doc_id, project_id }, "flushing track changes api"
|
2018-02-22 05:16:41 -05:00
|
|
|
HistoryManager.flushDocChangesAsync project_id, doc_id
|
2017-05-09 04:32:56 -04:00
|
|
|
callback()
|
2017-10-05 08:45:29 -04:00
|
|
|
|
2018-01-24 06:37:28 -05:00
|
|
|
shouldFlushHistoryOps: (length, ops_length, threshold) ->
|
2017-10-05 08:45:29 -04:00
|
|
|
return false if !length # don't flush unless we know the length
|
|
|
|
# We want to flush every 100 ops, i.e. 100, 200, 300, etc
|
|
|
|
# Find out which 'block' (i.e. 0-99, 100-199) we were in before and after pushing these
|
|
|
|
# ops. If we've changed, then we've gone over a multiple of 100 and should flush.
|
|
|
|
# (Most of the time, we will only hit 100 and then flushing will put us back to 0)
|
2018-01-24 06:37:28 -05:00
|
|
|
previousLength = length - ops_length
|
2017-10-05 08:45:29 -04:00
|
|
|
prevBlock = Math.floor(previousLength / threshold)
|
|
|
|
newBlock = Math.floor(length / threshold)
|
|
|
|
return newBlock != prevBlock
|
2018-02-28 12:31:43 -05:00
|
|
|
|
2018-08-16 06:13:11 -04:00
|
|
|
MAX_PARALLEL_REQUESTS: 4
|
|
|
|
|
2018-04-23 07:08:04 -04:00
|
|
|
resyncProjectHistory: (project_id, projectHistoryId, docs, files, callback) ->
|
|
|
|
ProjectHistoryRedisManager.queueResyncProjectStructure project_id, projectHistoryId, docs, files, (error) ->
|
2018-02-28 12:31:43 -05:00
|
|
|
return callback(error) if error?
|
2018-03-02 10:02:27 -05:00
|
|
|
DocumentManager = require "./DocumentManager"
|
|
|
|
resyncDoc = (doc, cb) ->
|
|
|
|
DocumentManager.resyncDocContentsWithLock project_id, doc.doc, cb
|
2018-08-16 06:13:11 -04:00
|
|
|
async.eachLimit docs, HistoryManager.MAX_PARALLEL_REQUESTS, resyncDoc, callback
|