2020-05-06 10:09:33 +00:00
|
|
|
let HistoryManager
|
|
|
|
const async = require('async')
|
2021-10-06 09:10:28 +00:00
|
|
|
const logger = require('@overleaf/logger')
|
2020-05-06 10:09:33 +00:00
|
|
|
const request = require('request')
|
2021-07-12 16:47:15 +00:00
|
|
|
const Settings = require('@overleaf/settings')
|
2020-05-06 10:09:33 +00:00
|
|
|
const ProjectHistoryRedisManager = require('./ProjectHistoryRedisManager')
|
|
|
|
const metrics = require('./Metrics')
|
2016-11-28 10:14:42 +00:00
|
|
|
|
2020-05-06 10:09:33 +00:00
|
|
|
module.exports = HistoryManager = {
|
|
|
|
// flush changes in the background
|
2023-03-21 12:06:13 +00:00
|
|
|
flushProjectChangesAsync(projectId) {
|
2023-06-06 10:19:55 +00:00
|
|
|
HistoryManager.flushProjectChanges(
|
2023-03-21 12:06:13 +00:00
|
|
|
projectId,
|
2020-05-06 10:09:33 +00:00
|
|
|
{ background: true },
|
|
|
|
function () {}
|
|
|
|
)
|
|
|
|
},
|
2016-11-28 10:14:42 +00:00
|
|
|
|
2020-05-06 10:09:33 +00:00
|
|
|
// flush changes and callback (for when we need to know the queue is flushed)
|
2023-03-21 12:06:13 +00:00
|
|
|
flushProjectChanges(projectId, options, callback) {
|
2020-05-06 10:09:33 +00:00
|
|
|
if (callback == null) {
|
2021-10-27 09:49:18 +00:00
|
|
|
callback = function () {}
|
2020-05-06 10:09:33 +00:00
|
|
|
}
|
|
|
|
if (options.skip_history_flush) {
|
2023-03-21 12:06:13 +00:00
|
|
|
logger.debug({ projectId }, 'skipping flush of project history')
|
2020-05-06 10:09:33 +00:00
|
|
|
return callback()
|
|
|
|
}
|
|
|
|
metrics.inc('history-flush', 1, { status: 'project-history' })
|
2023-03-21 12:06:13 +00:00
|
|
|
const url = `${Settings.apis.project_history.url}/project/${projectId}/flush`
|
2020-05-06 10:09:33 +00:00
|
|
|
const qs = {}
|
|
|
|
if (options.background) {
|
|
|
|
qs.background = true
|
|
|
|
} // pass on the background flush option if present
|
2023-03-21 12:06:13 +00:00
|
|
|
logger.debug({ projectId, url, qs }, 'flushing doc in project history api')
|
2023-06-06 10:19:55 +00:00
|
|
|
request.post({ url, qs }, function (error, res, body) {
|
|
|
|
if (error) {
|
|
|
|
logger.error({ error, projectId }, 'project history api request failed')
|
|
|
|
callback(error)
|
2020-05-06 10:09:33 +00:00
|
|
|
} else if (res.statusCode < 200 && res.statusCode >= 300) {
|
|
|
|
logger.error(
|
2023-03-21 12:06:13 +00:00
|
|
|
{ projectId },
|
2020-05-06 10:09:33 +00:00
|
|
|
`project history api returned a failure status code: ${res.statusCode}`
|
|
|
|
)
|
2023-06-06 10:19:55 +00:00
|
|
|
callback(error)
|
2020-05-06 10:09:33 +00:00
|
|
|
} else {
|
2023-06-06 10:19:55 +00:00
|
|
|
callback()
|
2020-05-06 10:09:33 +00:00
|
|
|
}
|
|
|
|
})
|
|
|
|
},
|
2016-11-28 10:14:42 +00:00
|
|
|
|
2020-05-06 10:09:33 +00:00
|
|
|
FLUSH_DOC_EVERY_N_OPS: 100,
|
|
|
|
FLUSH_PROJECT_EVERY_N_OPS: 500,
|
2017-11-13 11:53:39 +00:00
|
|
|
|
2023-06-06 10:19:55 +00:00
|
|
|
recordAndFlushHistoryOps(projectId, ops, projectOpsLength) {
|
2020-05-06 10:09:33 +00:00
|
|
|
if (ops == null) {
|
|
|
|
ops = []
|
|
|
|
}
|
|
|
|
if (ops.length === 0) {
|
2023-06-06 10:19:55 +00:00
|
|
|
return
|
2020-05-06 10:09:33 +00:00
|
|
|
}
|
2017-10-05 12:45:29 +00:00
|
|
|
|
2020-05-06 10:09:33 +00:00
|
|
|
// record updates for project history
|
2023-06-06 10:19:55 +00:00
|
|
|
if (
|
|
|
|
HistoryManager.shouldFlushHistoryOps(
|
|
|
|
projectOpsLength,
|
|
|
|
ops.length,
|
|
|
|
HistoryManager.FLUSH_PROJECT_EVERY_N_OPS
|
|
|
|
)
|
|
|
|
) {
|
|
|
|
// Do this in the background since it uses HTTP and so may be too
|
|
|
|
// slow to wait for when processing a doc update.
|
2020-05-06 10:09:33 +00:00
|
|
|
logger.debug(
|
2023-06-06 10:19:55 +00:00
|
|
|
{ projectOpsLength, projectId },
|
|
|
|
'flushing project history api'
|
2020-05-06 10:09:33 +00:00
|
|
|
)
|
2023-06-06 10:19:55 +00:00
|
|
|
HistoryManager.flushProjectChangesAsync(projectId)
|
2020-05-06 10:09:33 +00:00
|
|
|
}
|
|
|
|
},
|
2017-10-05 12:45:29 +00:00
|
|
|
|
2023-03-21 12:06:13 +00:00
|
|
|
shouldFlushHistoryOps(length, opsLength, threshold) {
|
2020-05-06 10:09:33 +00:00
|
|
|
if (!length) {
|
|
|
|
return false
|
|
|
|
} // don't flush unless we know the length
|
|
|
|
// We want to flush every 100 ops, i.e. 100, 200, 300, etc
|
|
|
|
// Find out which 'block' (i.e. 0-99, 100-199) we were in before and after pushing these
|
|
|
|
// ops. If we've changed, then we've gone over a multiple of 100 and should flush.
|
|
|
|
// (Most of the time, we will only hit 100 and then flushing will put us back to 0)
|
2023-03-21 12:06:13 +00:00
|
|
|
const previousLength = length - opsLength
|
2020-05-06 10:09:33 +00:00
|
|
|
const prevBlock = Math.floor(previousLength / threshold)
|
|
|
|
const newBlock = Math.floor(length / threshold)
|
|
|
|
return newBlock !== prevBlock
|
|
|
|
},
|
2018-02-28 17:31:43 +00:00
|
|
|
|
2020-05-06 10:09:33 +00:00
|
|
|
MAX_PARALLEL_REQUESTS: 4,
|
2018-08-16 10:13:11 +00:00
|
|
|
|
2023-03-21 12:06:13 +00:00
|
|
|
resyncProjectHistory(projectId, projectHistoryId, docs, files, callback) {
|
2023-06-06 10:19:55 +00:00
|
|
|
ProjectHistoryRedisManager.queueResyncProjectStructure(
|
2023-03-21 12:06:13 +00:00
|
|
|
projectId,
|
2020-05-06 10:09:33 +00:00
|
|
|
projectHistoryId,
|
|
|
|
docs,
|
|
|
|
files,
|
|
|
|
function (error) {
|
2023-06-06 10:19:55 +00:00
|
|
|
if (error) {
|
2020-05-06 10:09:33 +00:00
|
|
|
return callback(error)
|
|
|
|
}
|
|
|
|
const DocumentManager = require('./DocumentManager')
|
2021-11-22 14:15:36 +00:00
|
|
|
const resyncDoc = (doc, cb) => {
|
|
|
|
DocumentManager.resyncDocContentsWithLock(
|
2023-03-21 12:06:13 +00:00
|
|
|
projectId,
|
2021-11-22 14:15:36 +00:00
|
|
|
doc.doc,
|
|
|
|
doc.path,
|
|
|
|
cb
|
|
|
|
)
|
|
|
|
}
|
2023-06-06 10:19:55 +00:00
|
|
|
async.eachLimit(
|
2020-05-06 10:09:33 +00:00
|
|
|
docs,
|
|
|
|
HistoryManager.MAX_PARALLEL_REQUESTS,
|
|
|
|
resyncDoc,
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
2021-07-13 11:04:42 +00:00
|
|
|
},
|
2020-05-06 10:09:33 +00:00
|
|
|
}
|