mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-07 20:31:06 -05:00
Merge pull request #12206 from overleaf/em-camel-case-docupdater
Camel case variables in document-updater GitOrigin-RevId: 76ad0921cc059878f21639547fad1bff1913bc8b
This commit is contained in:
parent
03f45c02c3
commit
3831416c2f
36 changed files with 406 additions and 456 deletions
|
@ -1,5 +1,4 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
|
@ -41,16 +40,16 @@ module.exports = DeleteQueueManager = {
|
|||
startTime - options.min_delete_age + 100 * (Math.random() - 0.5)
|
||||
let count = 0
|
||||
|
||||
const flushProjectIfNotModified = (project_id, flushTimestamp, cb) =>
|
||||
const flushProjectIfNotModified = (projectId, flushTimestamp, cb) =>
|
||||
ProjectManager.getProjectDocsTimestamps(
|
||||
project_id,
|
||||
projectId,
|
||||
function (err, timestamps) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
if (timestamps.length === 0) {
|
||||
logger.debug(
|
||||
{ project_id },
|
||||
{ projectId },
|
||||
'skipping flush of queued project - no timestamps'
|
||||
)
|
||||
return cb()
|
||||
|
@ -60,22 +59,19 @@ module.exports = DeleteQueueManager = {
|
|||
if (timestamp > flushTimestamp) {
|
||||
metrics.inc('queued-delete-skipped')
|
||||
logger.debug(
|
||||
{ project_id, timestamps, flushTimestamp },
|
||||
{ projectId, timestamps, flushTimestamp },
|
||||
'found newer timestamp, will skip delete'
|
||||
)
|
||||
return cb()
|
||||
}
|
||||
}
|
||||
logger.debug(
|
||||
{ project_id, flushTimestamp },
|
||||
'flushing queued project'
|
||||
)
|
||||
logger.debug({ projectId, flushTimestamp }, 'flushing queued project')
|
||||
return ProjectManager.flushAndDeleteProjectWithLocks(
|
||||
project_id,
|
||||
projectId,
|
||||
{ skip_history_flush: false },
|
||||
function (err) {
|
||||
if (err != null) {
|
||||
logger.err({ project_id, err }, 'error flushing queued project')
|
||||
logger.err({ projectId, err }, 'error flushing queued project')
|
||||
}
|
||||
metrics.inc('queued-delete-completed')
|
||||
return cb(null, true)
|
||||
|
@ -96,17 +92,17 @@ module.exports = DeleteQueueManager = {
|
|||
}
|
||||
return RedisManager.getNextProjectToFlushAndDelete(
|
||||
cutoffTime,
|
||||
function (err, project_id, flushTimestamp, queueLength) {
|
||||
function (err, projectId, flushTimestamp, queueLength) {
|
||||
if (err != null) {
|
||||
return callback(err, count)
|
||||
}
|
||||
if (project_id == null) {
|
||||
if (projectId == null) {
|
||||
return callback(null, count)
|
||||
}
|
||||
logger.debug({ project_id, queueLength }, 'flushing queued project')
|
||||
logger.debug({ projectId, queueLength }, 'flushing queued project')
|
||||
metrics.globalGauge('queued-flush-backlog', queueLength)
|
||||
return flushProjectIfNotModified(
|
||||
project_id,
|
||||
projectId,
|
||||
flushTimestamp,
|
||||
function (err, flushed) {
|
||||
if (err) {
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
|
@ -51,15 +50,15 @@ module.exports = DispatchManager = {
|
|||
if (result == null) {
|
||||
return callback()
|
||||
}
|
||||
const [list_name, doc_key] = Array.from(result)
|
||||
const [project_id, doc_id] = Array.from(
|
||||
Keys.splitProjectIdAndDocId(doc_key)
|
||||
const [listName, docKey] = Array.from(result)
|
||||
const [projectId, docId] = Array.from(
|
||||
Keys.splitProjectIdAndDocId(docKey)
|
||||
)
|
||||
// Dispatch this in the background
|
||||
const backgroundTask = cb =>
|
||||
UpdateManager.processOutstandingUpdatesWithLock(
|
||||
project_id,
|
||||
doc_id,
|
||||
projectId,
|
||||
docId,
|
||||
function (error) {
|
||||
// log everything except OpRangeNotAvailable errors, these are normal
|
||||
if (error != null) {
|
||||
|
@ -69,12 +68,12 @@ module.exports = DispatchManager = {
|
|||
error instanceof Errors.DeleteMismatchError
|
||||
if (logAsDebug) {
|
||||
logger.debug(
|
||||
{ err: error, project_id, doc_id },
|
||||
{ err: error, projectId, docId },
|
||||
'error processing update'
|
||||
)
|
||||
} else {
|
||||
logger.error(
|
||||
{ err: error, project_id, doc_id },
|
||||
{ err: error, projectId, docId },
|
||||
'error processing update'
|
||||
)
|
||||
}
|
||||
|
|
|
@ -1,6 +1,3 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
|
@ -20,25 +17,25 @@ const RedisManager = require('./RedisManager')
|
|||
const metrics = require('./Metrics')
|
||||
|
||||
module.exports = HistoryManager = {
|
||||
flushDocChangesAsync(project_id, doc_id) {
|
||||
flushDocChangesAsync(projectId, docId) {
|
||||
if (
|
||||
(Settings.apis != null ? Settings.apis.trackchanges : undefined) == null
|
||||
) {
|
||||
logger.warn(
|
||||
{ doc_id },
|
||||
{ docId },
|
||||
'track changes API is not configured, so not flushing'
|
||||
)
|
||||
return
|
||||
}
|
||||
if (Settings.disableTrackChanges) {
|
||||
logger.debug({ doc_id }, 'track changes is disabled, so not flushing')
|
||||
logger.debug({ docId }, 'track changes is disabled, so not flushing')
|
||||
return
|
||||
}
|
||||
return RedisManager.getHistoryType(
|
||||
doc_id,
|
||||
docId,
|
||||
function (err, projectHistoryType) {
|
||||
if (err != null) {
|
||||
logger.warn({ err, doc_id }, 'error getting history type')
|
||||
logger.warn({ err, docId }, 'error getting history type')
|
||||
}
|
||||
// if there's an error continue and flush to track-changes for safety
|
||||
if (
|
||||
|
@ -46,25 +43,25 @@ module.exports = HistoryManager = {
|
|||
projectHistoryType === 'project-history'
|
||||
) {
|
||||
return logger.debug(
|
||||
{ doc_id, projectHistoryType },
|
||||
{ docId, projectHistoryType },
|
||||
'skipping track-changes flush'
|
||||
)
|
||||
} else {
|
||||
metrics.inc('history-flush', 1, { status: 'track-changes' })
|
||||
const url = `${Settings.apis.trackchanges.url}/project/${project_id}/doc/${doc_id}/flush`
|
||||
const url = `${Settings.apis.trackchanges.url}/project/${projectId}/doc/${docId}/flush`
|
||||
logger.debug(
|
||||
{ project_id, doc_id, url, projectHistoryType },
|
||||
{ projectId, docId, url, projectHistoryType },
|
||||
'flushing doc in track changes api'
|
||||
)
|
||||
return request.post(url, function (error, res, body) {
|
||||
if (error != null) {
|
||||
return logger.error(
|
||||
{ error, doc_id, project_id },
|
||||
{ error, docId, projectId },
|
||||
'track changes doc to track changes api'
|
||||
)
|
||||
} else if (res.statusCode < 200 && res.statusCode >= 300) {
|
||||
return logger.error(
|
||||
{ doc_id, project_id },
|
||||
{ docId, projectId },
|
||||
`track changes api returned a failure status code: ${res.statusCode}`
|
||||
)
|
||||
}
|
||||
|
@ -75,19 +72,19 @@ module.exports = HistoryManager = {
|
|||
},
|
||||
|
||||
// flush changes in the background
|
||||
flushProjectChangesAsync(project_id) {
|
||||
flushProjectChangesAsync(projectId) {
|
||||
if (!Settings.apis?.project_history?.enabled) {
|
||||
return
|
||||
}
|
||||
return HistoryManager.flushProjectChanges(
|
||||
project_id,
|
||||
projectId,
|
||||
{ background: true },
|
||||
function () {}
|
||||
)
|
||||
},
|
||||
|
||||
// flush changes and callback (for when we need to know the queue is flushed)
|
||||
flushProjectChanges(project_id, options, callback) {
|
||||
flushProjectChanges(projectId, options, callback) {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
|
@ -95,26 +92,26 @@ module.exports = HistoryManager = {
|
|||
return callback()
|
||||
}
|
||||
if (options.skip_history_flush) {
|
||||
logger.debug({ project_id }, 'skipping flush of project history')
|
||||
logger.debug({ projectId }, 'skipping flush of project history')
|
||||
return callback()
|
||||
}
|
||||
metrics.inc('history-flush', 1, { status: 'project-history' })
|
||||
const url = `${Settings.apis.project_history.url}/project/${project_id}/flush`
|
||||
const url = `${Settings.apis.project_history.url}/project/${projectId}/flush`
|
||||
const qs = {}
|
||||
if (options.background) {
|
||||
qs.background = true
|
||||
} // pass on the background flush option if present
|
||||
logger.debug({ project_id, url, qs }, 'flushing doc in project history api')
|
||||
logger.debug({ projectId, url, qs }, 'flushing doc in project history api')
|
||||
return request.post({ url, qs }, function (error, res, body) {
|
||||
if (error != null) {
|
||||
logger.error(
|
||||
{ error, project_id },
|
||||
{ error, projectId },
|
||||
'project history doc to track changes api'
|
||||
)
|
||||
return callback(error)
|
||||
} else if (res.statusCode < 200 && res.statusCode >= 300) {
|
||||
logger.error(
|
||||
{ project_id },
|
||||
{ projectId },
|
||||
`project history api returned a failure status code: ${res.statusCode}`
|
||||
)
|
||||
return callback(error)
|
||||
|
@ -128,11 +125,11 @@ module.exports = HistoryManager = {
|
|||
FLUSH_PROJECT_EVERY_N_OPS: 500,
|
||||
|
||||
recordAndFlushHistoryOps(
|
||||
project_id,
|
||||
doc_id,
|
||||
projectId,
|
||||
docId,
|
||||
ops,
|
||||
doc_ops_length,
|
||||
project_ops_length,
|
||||
docOpsLength,
|
||||
projectOpsLength,
|
||||
callback
|
||||
) {
|
||||
if (ops == null) {
|
||||
|
@ -149,7 +146,7 @@ module.exports = HistoryManager = {
|
|||
if (Settings.apis?.project_history?.enabled) {
|
||||
if (
|
||||
HistoryManager.shouldFlushHistoryOps(
|
||||
project_ops_length,
|
||||
projectOpsLength,
|
||||
ops.length,
|
||||
HistoryManager.FLUSH_PROJECT_EVERY_N_OPS
|
||||
)
|
||||
|
@ -157,18 +154,18 @@ module.exports = HistoryManager = {
|
|||
// Do this in the background since it uses HTTP and so may be too
|
||||
// slow to wait for when processing a doc update.
|
||||
logger.debug(
|
||||
{ project_ops_length, project_id },
|
||||
{ projectOpsLength, projectId },
|
||||
'flushing project history api'
|
||||
)
|
||||
HistoryManager.flushProjectChangesAsync(project_id)
|
||||
HistoryManager.flushProjectChangesAsync(projectId)
|
||||
}
|
||||
}
|
||||
|
||||
// if the doc_ops_length is undefined it means the project is not using track-changes
|
||||
// so we can bail out here
|
||||
if (Settings.disableTrackChanges || typeof doc_ops_length === 'undefined') {
|
||||
if (Settings.disableTrackChanges || typeof docOpsLength === 'undefined') {
|
||||
logger.debug(
|
||||
{ project_id, doc_id },
|
||||
{ projectId, docId },
|
||||
'skipping flush to track-changes, only using project-history'
|
||||
)
|
||||
return callback()
|
||||
|
@ -176,8 +173,8 @@ module.exports = HistoryManager = {
|
|||
|
||||
// record updates for track-changes
|
||||
return HistoryRedisManager.recordDocHasHistoryOps(
|
||||
project_id,
|
||||
doc_id,
|
||||
projectId,
|
||||
docId,
|
||||
ops,
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
|
@ -185,7 +182,7 @@ module.exports = HistoryManager = {
|
|||
}
|
||||
if (
|
||||
HistoryManager.shouldFlushHistoryOps(
|
||||
doc_ops_length,
|
||||
docOpsLength,
|
||||
ops.length,
|
||||
HistoryManager.FLUSH_DOC_EVERY_N_OPS
|
||||
)
|
||||
|
@ -193,17 +190,17 @@ module.exports = HistoryManager = {
|
|||
// Do this in the background since it uses HTTP and so may be too
|
||||
// slow to wait for when processing a doc update.
|
||||
logger.debug(
|
||||
{ doc_ops_length, doc_id, project_id },
|
||||
{ docOpsLength, docId, projectId },
|
||||
'flushing track changes api'
|
||||
)
|
||||
HistoryManager.flushDocChangesAsync(project_id, doc_id)
|
||||
HistoryManager.flushDocChangesAsync(projectId, docId)
|
||||
}
|
||||
return callback()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
shouldFlushHistoryOps(length, ops_length, threshold) {
|
||||
shouldFlushHistoryOps(length, opsLength, threshold) {
|
||||
if (!length) {
|
||||
return false
|
||||
} // don't flush unless we know the length
|
||||
|
@ -211,7 +208,7 @@ module.exports = HistoryManager = {
|
|||
// Find out which 'block' (i.e. 0-99, 100-199) we were in before and after pushing these
|
||||
// ops. If we've changed, then we've gone over a multiple of 100 and should flush.
|
||||
// (Most of the time, we will only hit 100 and then flushing will put us back to 0)
|
||||
const previousLength = length - ops_length
|
||||
const previousLength = length - opsLength
|
||||
const prevBlock = Math.floor(previousLength / threshold)
|
||||
const newBlock = Math.floor(length / threshold)
|
||||
return newBlock !== prevBlock
|
||||
|
@ -219,9 +216,9 @@ module.exports = HistoryManager = {
|
|||
|
||||
MAX_PARALLEL_REQUESTS: 4,
|
||||
|
||||
resyncProjectHistory(project_id, projectHistoryId, docs, files, callback) {
|
||||
resyncProjectHistory(projectId, projectHistoryId, docs, files, callback) {
|
||||
return ProjectHistoryRedisManager.queueResyncProjectStructure(
|
||||
project_id,
|
||||
projectId,
|
||||
projectHistoryId,
|
||||
docs,
|
||||
files,
|
||||
|
@ -232,7 +229,7 @@ module.exports = HistoryManager = {
|
|||
const DocumentManager = require('./DocumentManager')
|
||||
const resyncDoc = (doc, cb) => {
|
||||
DocumentManager.resyncDocContentsWithLock(
|
||||
project_id,
|
||||
projectId,
|
||||
doc.doc,
|
||||
doc.path,
|
||||
cb
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
|
@ -17,7 +16,7 @@ const Keys = Settings.redis.history.key_schema
|
|||
const logger = require('@overleaf/logger')
|
||||
|
||||
module.exports = HistoryRedisManager = {
|
||||
recordDocHasHistoryOps(project_id, doc_id, ops, callback) {
|
||||
recordDocHasHistoryOps(projectId, docId, ops, callback) {
|
||||
if (ops == null) {
|
||||
ops = []
|
||||
}
|
||||
|
@ -27,13 +26,10 @@ module.exports = HistoryRedisManager = {
|
|||
if (ops.length === 0) {
|
||||
return callback(new Error('cannot push no ops')) // This should never be called with no ops, but protect against a redis error if we sent an empty array to rpush
|
||||
}
|
||||
logger.debug(
|
||||
{ project_id, doc_id },
|
||||
'marking doc in project for history ops'
|
||||
)
|
||||
logger.debug({ projectId, docId }, 'marking doc in project for history ops')
|
||||
return rclient.sadd(
|
||||
Keys.docsWithHistoryOps({ project_id }),
|
||||
doc_id,
|
||||
Keys.docsWithHistoryOps({ project_id: projectId }),
|
||||
docId,
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
|
@ -92,20 +91,20 @@ const ProjectFlusher = {
|
|||
return ProjectFlusher._getKeys(
|
||||
docUpdaterKeys.docsInProject({ project_id: '*' }),
|
||||
options.limit,
|
||||
function (error, project_keys) {
|
||||
function (error, projectKeys) {
|
||||
if (error != null) {
|
||||
logger.err({ err: error }, 'error getting keys for flushing')
|
||||
return callback(error)
|
||||
}
|
||||
const project_ids = ProjectFlusher._extractIds(project_keys)
|
||||
const projectIds = ProjectFlusher._extractIds(projectKeys)
|
||||
if (options.dryRun) {
|
||||
return callback(null, project_ids)
|
||||
return callback(null, projectIds)
|
||||
}
|
||||
const jobs = _.map(
|
||||
project_ids,
|
||||
project_id => cb =>
|
||||
projectIds,
|
||||
projectId => cb =>
|
||||
ProjectManager.flushAndDeleteProjectWithLocks(
|
||||
project_id,
|
||||
projectId,
|
||||
{ background: true },
|
||||
cb
|
||||
)
|
||||
|
@ -118,9 +117,9 @@ const ProjectFlusher = {
|
|||
const failure = []
|
||||
_.each(results, function (result, i) {
|
||||
if (result.error != null) {
|
||||
return failure.push(project_ids[i])
|
||||
return failure.push(projectIds[i])
|
||||
} else {
|
||||
return success.push(project_ids[i])
|
||||
return success.push(projectIds[i])
|
||||
}
|
||||
})
|
||||
logger.info(
|
||||
|
|
|
@ -1,6 +1,3 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
|
@ -22,7 +19,7 @@ module.exports = RangesManager = {
|
|||
MAX_COMMENTS: 500,
|
||||
MAX_CHANGES: 2000,
|
||||
|
||||
applyUpdate(project_id, doc_id, entries, updates, newDocLines, callback) {
|
||||
applyUpdate(projectId, docId, entries, updates, newDocLines, callback) {
|
||||
let error
|
||||
if (entries == null) {
|
||||
entries = {}
|
||||
|
@ -72,7 +69,7 @@ module.exports = RangesManager = {
|
|||
} catch (error2) {
|
||||
error = error2
|
||||
logger.error(
|
||||
{ err: error, project_id, doc_id, newDocLines, updates },
|
||||
{ err: error, projectId, docId, newDocLines, updates },
|
||||
'error validating ranges'
|
||||
)
|
||||
return callback(error)
|
||||
|
@ -95,8 +92,8 @@ module.exports = RangesManager = {
|
|||
const response = RangesManager._getRanges(rangesTracker)
|
||||
logger.debug(
|
||||
{
|
||||
project_id,
|
||||
doc_id,
|
||||
projectId,
|
||||
docId,
|
||||
changesCount:
|
||||
response.changes != null ? response.changes.length : undefined,
|
||||
commentsCount:
|
||||
|
@ -108,26 +105,26 @@ module.exports = RangesManager = {
|
|||
return callback(null, response, rangesWereCollapsed)
|
||||
},
|
||||
|
||||
acceptChanges(change_ids, ranges, callback) {
|
||||
acceptChanges(changeIds, ranges, callback) {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
const { changes, comments } = ranges
|
||||
logger.debug(`accepting ${change_ids.length} changes in ranges`)
|
||||
logger.debug(`accepting ${changeIds.length} changes in ranges`)
|
||||
const rangesTracker = new RangesTracker(changes, comments)
|
||||
rangesTracker.removeChangeIds(change_ids)
|
||||
rangesTracker.removeChangeIds(changeIds)
|
||||
const response = RangesManager._getRanges(rangesTracker)
|
||||
return callback(null, response)
|
||||
},
|
||||
|
||||
deleteComment(comment_id, ranges, callback) {
|
||||
deleteComment(commentId, ranges, callback) {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
const { changes, comments } = ranges
|
||||
logger.debug({ comment_id }, 'deleting comment in ranges')
|
||||
logger.debug({ commentId }, 'deleting comment in ranges')
|
||||
const rangesTracker = new RangesTracker(changes, comments)
|
||||
rangesTracker.removeCommentId(comment_id)
|
||||
rangesTracker.removeCommentId(commentId)
|
||||
const response = RangesManager._getRanges(rangesTracker)
|
||||
return callback(null, response)
|
||||
},
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
|
@ -32,10 +31,18 @@ let COUNT = 0
|
|||
const MAX_OPS_PER_ITERATION = 8 // process a limited number of ops for safety
|
||||
|
||||
module.exports = RealTimeRedisManager = {
|
||||
getPendingUpdatesForDoc(doc_id, callback) {
|
||||
getPendingUpdatesForDoc(docId, callback) {
|
||||
const multi = rclient.multi()
|
||||
multi.lrange(Keys.pendingUpdates({ doc_id }), 0, MAX_OPS_PER_ITERATION - 1)
|
||||
multi.ltrim(Keys.pendingUpdates({ doc_id }), MAX_OPS_PER_ITERATION, -1)
|
||||
multi.lrange(
|
||||
Keys.pendingUpdates({ doc_id: docId }),
|
||||
0,
|
||||
MAX_OPS_PER_ITERATION - 1
|
||||
)
|
||||
multi.ltrim(
|
||||
Keys.pendingUpdates({ doc_id: docId }),
|
||||
MAX_OPS_PER_ITERATION,
|
||||
-1
|
||||
)
|
||||
return multi.exec(function (error, replys) {
|
||||
let jsonUpdate
|
||||
if (error != null) {
|
||||
|
@ -62,15 +69,15 @@ module.exports = RealTimeRedisManager = {
|
|||
})
|
||||
},
|
||||
|
||||
getUpdatesLength(doc_id, callback) {
|
||||
return rclient.llen(Keys.pendingUpdates({ doc_id }), callback)
|
||||
getUpdatesLength(docId, callback) {
|
||||
return rclient.llen(Keys.pendingUpdates({ doc_id: docId }), callback)
|
||||
},
|
||||
|
||||
sendData(data) {
|
||||
// create a unique message id using a counter
|
||||
const message_id = `doc:${HOST}:${RND}-${COUNT++}`
|
||||
const messageId = `doc:${HOST}:${RND}-${COUNT++}`
|
||||
if (data != null) {
|
||||
data._id = message_id
|
||||
data._id = messageId
|
||||
}
|
||||
|
||||
const blob = JSON.stringify(data)
|
||||
|
|
|
@ -443,7 +443,7 @@ module.exports = RedisManager = {
|
|||
docId,
|
||||
version: newVersion,
|
||||
hash: newHash,
|
||||
op_versions: opVersions,
|
||||
opVersions,
|
||||
},
|
||||
'updating doc in redis'
|
||||
)
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
|
@ -17,9 +16,9 @@ const RedisManager = require('./RedisManager')
|
|||
const Errors = require('./Errors')
|
||||
|
||||
module.exports = ShareJsDB = class ShareJsDB {
|
||||
constructor(project_id, doc_id, lines, version) {
|
||||
this.project_id = project_id
|
||||
this.doc_id = doc_id
|
||||
constructor(projectId, docId, lines, version) {
|
||||
this.project_id = projectId
|
||||
this.doc_id = docId
|
||||
this.lines = lines
|
||||
this.version = version
|
||||
this.appliedOps = {}
|
||||
|
@ -28,7 +27,7 @@ module.exports = ShareJsDB = class ShareJsDB {
|
|||
this.writeOp = this._writeOp.bind(this)
|
||||
}
|
||||
|
||||
getOps(doc_key, start, end, callback) {
|
||||
getOps(docKey, start, end, callback) {
|
||||
if (start === end) {
|
||||
return callback(null, [])
|
||||
}
|
||||
|
@ -40,27 +39,25 @@ module.exports = ShareJsDB = class ShareJsDB {
|
|||
end = -1
|
||||
}
|
||||
|
||||
const [project_id, doc_id] = Array.from(
|
||||
Keys.splitProjectIdAndDocId(doc_key)
|
||||
)
|
||||
return RedisManager.getPreviousDocOps(doc_id, start, end, callback)
|
||||
const [projectId, docId] = Array.from(Keys.splitProjectIdAndDocId(docKey))
|
||||
return RedisManager.getPreviousDocOps(docId, start, end, callback)
|
||||
}
|
||||
|
||||
_writeOp(doc_key, opData, callback) {
|
||||
if (this.appliedOps[doc_key] == null) {
|
||||
this.appliedOps[doc_key] = []
|
||||
_writeOp(docKey, opData, callback) {
|
||||
if (this.appliedOps[docKey] == null) {
|
||||
this.appliedOps[docKey] = []
|
||||
}
|
||||
this.appliedOps[doc_key].push(opData)
|
||||
this.appliedOps[docKey].push(opData)
|
||||
return callback()
|
||||
}
|
||||
|
||||
getSnapshot(doc_key, callback) {
|
||||
getSnapshot(docKey, callback) {
|
||||
if (
|
||||
doc_key !== Keys.combineProjectIdAndDocId(this.project_id, this.doc_id)
|
||||
docKey !== Keys.combineProjectIdAndDocId(this.project_id, this.doc_id)
|
||||
) {
|
||||
return callback(
|
||||
new Errors.NotFoundError(
|
||||
`unexpected doc_key ${doc_key}, expected ${Keys.combineProjectIdAndDocId(
|
||||
`unexpected doc_key ${docKey}, expected ${Keys.combineProjectIdAndDocId(
|
||||
this.project_id,
|
||||
this.doc_id
|
||||
)}`
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
|
@ -30,8 +29,8 @@ util.inherits(ShareJsModel, EventEmitter)
|
|||
const MAX_AGE_OF_OP = 80
|
||||
|
||||
module.exports = ShareJsUpdateManager = {
|
||||
getNewShareJsModel(project_id, doc_id, lines, version) {
|
||||
const db = new ShareJsDB(project_id, doc_id, lines, version)
|
||||
getNewShareJsModel(projectId, docId, lines, version) {
|
||||
const db = new ShareJsDB(projectId, docId, lines, version)
|
||||
const model = new ShareJsModel(db, {
|
||||
maxDocLength: Settings.max_doc_length,
|
||||
maximumAge: MAX_AGE_OF_OP,
|
||||
|
@ -40,11 +39,11 @@ module.exports = ShareJsUpdateManager = {
|
|||
return model
|
||||
},
|
||||
|
||||
applyUpdate(project_id, doc_id, update, lines, version, callback) {
|
||||
applyUpdate(projectId, docId, update, lines, version, callback) {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
logger.debug({ project_id, doc_id, update }, 'applying sharejs updates')
|
||||
logger.debug({ projectId, docId, update }, 'applying sharejs updates')
|
||||
const jobs = []
|
||||
// record the update version before it is modified
|
||||
const incomingUpdateVersion = update.v
|
||||
|
@ -53,23 +52,23 @@ module.exports = ShareJsUpdateManager = {
|
|||
// getting stuck due to queued callbacks (line 260 of sharejs/server/model.coffee)
|
||||
// This adds a small but hopefully acceptable overhead (~12ms per 1000 updates on
|
||||
// my 2009 MBP).
|
||||
const model = this.getNewShareJsModel(project_id, doc_id, lines, version)
|
||||
const model = this.getNewShareJsModel(projectId, docId, lines, version)
|
||||
this._listenForOps(model)
|
||||
const doc_key = Keys.combineProjectIdAndDocId(project_id, doc_id)
|
||||
return model.applyOp(doc_key, update, function (error) {
|
||||
const docKey = Keys.combineProjectIdAndDocId(projectId, docId)
|
||||
return model.applyOp(docKey, update, function (error) {
|
||||
if (error != null) {
|
||||
if (error === 'Op already submitted') {
|
||||
metrics.inc('sharejs.already-submitted')
|
||||
logger.debug(
|
||||
{ project_id, doc_id, update },
|
||||
{ projectId, docId, update },
|
||||
'op has already been submitted'
|
||||
)
|
||||
update.dup = true
|
||||
ShareJsUpdateManager._sendOp(project_id, doc_id, update)
|
||||
ShareJsUpdateManager._sendOp(projectId, docId, update)
|
||||
} else if (/^Delete component/.test(error)) {
|
||||
metrics.inc('sharejs.delete-mismatch')
|
||||
logger.debug(
|
||||
{ project_id, doc_id, update, shareJsErr: error },
|
||||
{ projectId, docId, update, shareJsErr: error },
|
||||
'sharejs delete does not match'
|
||||
)
|
||||
error = new Errors.DeleteMismatchError(
|
||||
|
@ -81,8 +80,8 @@ module.exports = ShareJsUpdateManager = {
|
|||
return callback(error)
|
||||
}
|
||||
}
|
||||
logger.debug({ project_id, doc_id, error }, 'applied update')
|
||||
return model.getSnapshot(doc_key, (error, data) => {
|
||||
logger.debug({ projectId, docId, error }, 'applied update')
|
||||
return model.getSnapshot(docKey, (error, data) => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
|
@ -93,7 +92,7 @@ module.exports = ShareJsUpdateManager = {
|
|||
'blocking persistence of ShareJs update: doc size exceeds limits'
|
||||
)
|
||||
logger.error(
|
||||
{ project_id, doc_id, err, docSizeBefore, docSizeAfter },
|
||||
{ projectId, docId, err, docSizeBefore, docSizeAfter },
|
||||
err.message
|
||||
)
|
||||
metrics.inc('sharejs.other-error')
|
||||
|
@ -115,23 +114,25 @@ module.exports = ShareJsUpdateManager = {
|
|||
null,
|
||||
docLines,
|
||||
data.v,
|
||||
model.db.appliedOps[doc_key] || []
|
||||
model.db.appliedOps[docKey] || []
|
||||
)
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
_listenForOps(model) {
|
||||
return model.on('applyOp', function (doc_key, opData) {
|
||||
const [project_id, doc_id] = Array.from(
|
||||
Keys.splitProjectIdAndDocId(doc_key)
|
||||
)
|
||||
return ShareJsUpdateManager._sendOp(project_id, doc_id, opData)
|
||||
return model.on('applyOp', function (docKey, opData) {
|
||||
const [projectId, docId] = Array.from(Keys.splitProjectIdAndDocId(docKey))
|
||||
return ShareJsUpdateManager._sendOp(projectId, docId, opData)
|
||||
})
|
||||
},
|
||||
|
||||
_sendOp(project_id, doc_id, op) {
|
||||
return RealTimeRedisManager.sendData({ project_id, doc_id, op })
|
||||
_sendOp(projectId, docId, op) {
|
||||
return RealTimeRedisManager.sendData({
|
||||
project_id: projectId,
|
||||
doc_id: docId,
|
||||
op,
|
||||
})
|
||||
},
|
||||
|
||||
_computeHash(content) {
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-return-assign,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
|
@ -15,25 +14,17 @@ let SnapshotManager
|
|||
const { db, ObjectId } = require('./mongodb')
|
||||
|
||||
module.exports = SnapshotManager = {
|
||||
recordSnapshot(
|
||||
project_id,
|
||||
doc_id,
|
||||
version,
|
||||
pathname,
|
||||
lines,
|
||||
ranges,
|
||||
callback
|
||||
) {
|
||||
recordSnapshot(projectId, docId, version, pathname, lines, ranges, callback) {
|
||||
try {
|
||||
project_id = ObjectId(project_id)
|
||||
doc_id = ObjectId(doc_id)
|
||||
projectId = ObjectId(projectId)
|
||||
docId = ObjectId(docId)
|
||||
} catch (error) {
|
||||
return callback(error)
|
||||
}
|
||||
db.docSnapshots.insertOne(
|
||||
{
|
||||
project_id,
|
||||
doc_id,
|
||||
project_id: projectId,
|
||||
doc_id: docId,
|
||||
version,
|
||||
lines,
|
||||
pathname,
|
||||
|
|
|
@ -1,13 +1,10 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
module.exports = {
|
||||
combineProjectIdAndDocId(project_id, doc_id) {
|
||||
return `${project_id}:${doc_id}`
|
||||
combineProjectIdAndDocId(projectId, docId) {
|
||||
return `${projectId}:${docId}`
|
||||
},
|
||||
splitProjectIdAndDocId(project_and_doc_id) {
|
||||
return project_and_doc_id.split(':')
|
||||
splitProjectIdAndDocId(projectAndDocId) {
|
||||
return projectAndDocId.split(':')
|
||||
},
|
||||
}
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
|
@ -31,14 +30,14 @@ const SnapshotManager = require('./SnapshotManager')
|
|||
const Profiler = require('./Profiler')
|
||||
|
||||
module.exports = UpdateManager = {
|
||||
processOutstandingUpdates(project_id, doc_id, callback) {
|
||||
processOutstandingUpdates(projectId, docId, callback) {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
const timer = new Metrics.Timer('updateManager.processOutstandingUpdates')
|
||||
return UpdateManager.fetchAndApplyUpdates(
|
||||
project_id,
|
||||
doc_id,
|
||||
projectId,
|
||||
docId,
|
||||
function (error) {
|
||||
timer.done()
|
||||
if (error != null) {
|
||||
|
@ -49,15 +48,15 @@ module.exports = UpdateManager = {
|
|||
)
|
||||
},
|
||||
|
||||
processOutstandingUpdatesWithLock(project_id, doc_id, callback) {
|
||||
processOutstandingUpdatesWithLock(projectId, docId, callback) {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
const profile = new Profiler('processOutstandingUpdatesWithLock', {
|
||||
project_id,
|
||||
doc_id,
|
||||
project_id: projectId,
|
||||
doc_id: docId,
|
||||
})
|
||||
return LockManager.tryLock(doc_id, (error, gotLock, lockValue) => {
|
||||
return LockManager.tryLock(docId, (error, gotLock, lockValue) => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
|
@ -66,26 +65,26 @@ module.exports = UpdateManager = {
|
|||
}
|
||||
profile.log('tryLock')
|
||||
return UpdateManager.processOutstandingUpdates(
|
||||
project_id,
|
||||
doc_id,
|
||||
projectId,
|
||||
docId,
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
return UpdateManager._handleErrorInsideLock(
|
||||
doc_id,
|
||||
docId,
|
||||
lockValue,
|
||||
error,
|
||||
callback
|
||||
)
|
||||
}
|
||||
profile.log('processOutstandingUpdates')
|
||||
return LockManager.releaseLock(doc_id, lockValue, error => {
|
||||
return LockManager.releaseLock(docId, lockValue, error => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
profile.log('releaseLock').end()
|
||||
return UpdateManager.continueProcessingUpdatesWithLock(
|
||||
project_id,
|
||||
doc_id,
|
||||
projectId,
|
||||
docId,
|
||||
callback
|
||||
)
|
||||
})
|
||||
|
@ -94,18 +93,18 @@ module.exports = UpdateManager = {
|
|||
})
|
||||
},
|
||||
|
||||
continueProcessingUpdatesWithLock(project_id, doc_id, callback) {
|
||||
continueProcessingUpdatesWithLock(projectId, docId, callback) {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
return RealTimeRedisManager.getUpdatesLength(doc_id, (error, length) => {
|
||||
return RealTimeRedisManager.getUpdatesLength(docId, (error, length) => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
if (length > 0) {
|
||||
return UpdateManager.processOutstandingUpdatesWithLock(
|
||||
project_id,
|
||||
doc_id,
|
||||
projectId,
|
||||
docId,
|
||||
callback
|
||||
)
|
||||
} else {
|
||||
|
@ -114,19 +113,22 @@ module.exports = UpdateManager = {
|
|||
})
|
||||
},
|
||||
|
||||
fetchAndApplyUpdates(project_id, doc_id, callback) {
|
||||
fetchAndApplyUpdates(projectId, docId, callback) {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
const profile = new Profiler('fetchAndApplyUpdates', { project_id, doc_id })
|
||||
const profile = new Profiler('fetchAndApplyUpdates', {
|
||||
project_id: projectId,
|
||||
doc_id: docId,
|
||||
})
|
||||
return RealTimeRedisManager.getPendingUpdatesForDoc(
|
||||
doc_id,
|
||||
docId,
|
||||
(error, updates) => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
logger.debug(
|
||||
{ project_id, doc_id, count: updates.length },
|
||||
{ projectId, docId, count: updates.length },
|
||||
'processing updates'
|
||||
)
|
||||
if (updates.length === 0) {
|
||||
|
@ -134,7 +136,7 @@ module.exports = UpdateManager = {
|
|||
}
|
||||
profile.log('getPendingUpdatesForDoc')
|
||||
const doUpdate = (update, cb) =>
|
||||
UpdateManager.applyUpdate(project_id, doc_id, update, function (err) {
|
||||
UpdateManager.applyUpdate(projectId, docId, update, function (err) {
|
||||
profile.log('applyUpdate')
|
||||
return cb(err)
|
||||
})
|
||||
|
@ -147,15 +149,15 @@ module.exports = UpdateManager = {
|
|||
)
|
||||
},
|
||||
|
||||
applyUpdate(project_id, doc_id, update, _callback) {
|
||||
applyUpdate(projectId, docId, update, _callback) {
|
||||
if (_callback == null) {
|
||||
_callback = function () {}
|
||||
}
|
||||
const callback = function (error) {
|
||||
if (error != null) {
|
||||
RealTimeRedisManager.sendData({
|
||||
project_id,
|
||||
doc_id,
|
||||
project_id: projectId,
|
||||
doc_id: docId,
|
||||
error: error.message || error,
|
||||
})
|
||||
profile.log('sendData')
|
||||
|
@ -164,12 +166,15 @@ module.exports = UpdateManager = {
|
|||
return _callback(error)
|
||||
}
|
||||
|
||||
const profile = new Profiler('applyUpdate', { project_id, doc_id })
|
||||
const profile = new Profiler('applyUpdate', {
|
||||
project_id: projectId,
|
||||
doc_id: docId,
|
||||
})
|
||||
UpdateManager._sanitizeUpdate(update)
|
||||
profile.log('sanitizeUpdate', { sync: true })
|
||||
return DocumentManager.getDoc(
|
||||
project_id,
|
||||
doc_id,
|
||||
projectId,
|
||||
docId,
|
||||
function (error, lines, version, ranges, pathname, projectHistoryId) {
|
||||
profile.log('getDoc')
|
||||
if (error != null) {
|
||||
|
@ -177,14 +182,14 @@ module.exports = UpdateManager = {
|
|||
}
|
||||
if (lines == null || version == null) {
|
||||
return callback(
|
||||
new Errors.NotFoundError(`document not found: ${doc_id}`)
|
||||
new Errors.NotFoundError(`document not found: ${docId}`)
|
||||
)
|
||||
}
|
||||
const previousVersion = version
|
||||
const incomingUpdateVersion = update.v
|
||||
return ShareJsUpdateManager.applyUpdate(
|
||||
project_id,
|
||||
doc_id,
|
||||
projectId,
|
||||
docId,
|
||||
update,
|
||||
lines,
|
||||
version,
|
||||
|
@ -198,12 +203,12 @@ module.exports = UpdateManager = {
|
|||
return callback(error)
|
||||
}
|
||||
return RangesManager.applyUpdate(
|
||||
project_id,
|
||||
doc_id,
|
||||
projectId,
|
||||
docId,
|
||||
ranges,
|
||||
appliedOps,
|
||||
updatedDocLines,
|
||||
function (error, new_ranges, ranges_were_collapsed) {
|
||||
function (error, newRanges, rangesWereCollapsed) {
|
||||
UpdateManager._addProjectHistoryMetadataToOps(
|
||||
appliedOps,
|
||||
pathname,
|
||||
|
@ -215,35 +220,35 @@ module.exports = UpdateManager = {
|
|||
return callback(error)
|
||||
}
|
||||
return RedisManager.updateDocument(
|
||||
project_id,
|
||||
doc_id,
|
||||
projectId,
|
||||
docId,
|
||||
updatedDocLines,
|
||||
version,
|
||||
appliedOps,
|
||||
new_ranges,
|
||||
newRanges,
|
||||
update.meta,
|
||||
function (error, doc_ops_length, project_ops_length) {
|
||||
function (error, docOpsLength, projectOpsLength) {
|
||||
profile.log('RedisManager.updateDocument')
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return HistoryManager.recordAndFlushHistoryOps(
|
||||
project_id,
|
||||
doc_id,
|
||||
projectId,
|
||||
docId,
|
||||
appliedOps,
|
||||
doc_ops_length,
|
||||
project_ops_length,
|
||||
docOpsLength,
|
||||
projectOpsLength,
|
||||
function (error) {
|
||||
profile.log('recordAndFlushHistoryOps')
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
if (ranges_were_collapsed) {
|
||||
if (rangesWereCollapsed) {
|
||||
Metrics.inc('doc-snapshot')
|
||||
logger.debug(
|
||||
{
|
||||
project_id,
|
||||
doc_id,
|
||||
projectId,
|
||||
docId,
|
||||
previousVersion,
|
||||
lines,
|
||||
ranges,
|
||||
|
@ -254,8 +259,8 @@ module.exports = UpdateManager = {
|
|||
// Do this last, since it's a mongo call, and so potentially longest running
|
||||
// If it overruns the lock, it's ok, since all of our redis work is done
|
||||
return SnapshotManager.recordSnapshot(
|
||||
project_id,
|
||||
doc_id,
|
||||
projectId,
|
||||
docId,
|
||||
previousVersion,
|
||||
pathname,
|
||||
lines,
|
||||
|
@ -265,8 +270,8 @@ module.exports = UpdateManager = {
|
|||
logger.error(
|
||||
{
|
||||
err: error,
|
||||
project_id,
|
||||
doc_id,
|
||||
projectId,
|
||||
docId,
|
||||
version,
|
||||
lines,
|
||||
ranges,
|
||||
|
@ -294,23 +299,26 @@ module.exports = UpdateManager = {
|
|||
)
|
||||
},
|
||||
|
||||
lockUpdatesAndDo(method, project_id, doc_id, ...rest) {
|
||||
lockUpdatesAndDo(method, projectId, docId, ...rest) {
|
||||
const adjustedLength = Math.max(rest.length, 1)
|
||||
const args = rest.slice(0, adjustedLength - 1)
|
||||
const callback = rest[adjustedLength - 1]
|
||||
const profile = new Profiler('lockUpdatesAndDo', { project_id, doc_id })
|
||||
return LockManager.getLock(doc_id, function (error, lockValue) {
|
||||
const profile = new Profiler('lockUpdatesAndDo', {
|
||||
project_id: projectId,
|
||||
doc_id: docId,
|
||||
})
|
||||
return LockManager.getLock(docId, function (error, lockValue) {
|
||||
profile.log('getLock')
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return UpdateManager.processOutstandingUpdates(
|
||||
project_id,
|
||||
doc_id,
|
||||
projectId,
|
||||
docId,
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
return UpdateManager._handleErrorInsideLock(
|
||||
doc_id,
|
||||
docId,
|
||||
lockValue,
|
||||
error,
|
||||
callback
|
||||
|
@ -318,13 +326,13 @@ module.exports = UpdateManager = {
|
|||
}
|
||||
profile.log('processOutstandingUpdates')
|
||||
return method(
|
||||
project_id,
|
||||
doc_id,
|
||||
projectId,
|
||||
docId,
|
||||
...Array.from(args),
|
||||
function (error, ...response_args) {
|
||||
function (error, ...responseArgs) {
|
||||
if (error != null) {
|
||||
return UpdateManager._handleErrorInsideLock(
|
||||
doc_id,
|
||||
docId,
|
||||
lockValue,
|
||||
error,
|
||||
callback
|
||||
|
@ -332,18 +340,18 @@ module.exports = UpdateManager = {
|
|||
}
|
||||
profile.log('method')
|
||||
return LockManager.releaseLock(
|
||||
doc_id,
|
||||
docId,
|
||||
lockValue,
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
profile.log('releaseLock').end()
|
||||
callback(null, ...Array.from(response_args))
|
||||
callback(null, ...Array.from(responseArgs))
|
||||
// We held the lock for a while so updates might have queued up
|
||||
return UpdateManager.continueProcessingUpdatesWithLock(
|
||||
project_id,
|
||||
doc_id,
|
||||
projectId,
|
||||
docId,
|
||||
err => {
|
||||
if (err) {
|
||||
// The processing may fail for invalid user updates.
|
||||
|
@ -351,7 +359,7 @@ module.exports = UpdateManager = {
|
|||
// and record a metric.
|
||||
Metrics.inc('background-processing-updates-error')
|
||||
logger.debug(
|
||||
{ err, project_id, doc_id },
|
||||
{ err, projectId, docId },
|
||||
'error processing updates in background'
|
||||
)
|
||||
}
|
||||
|
@ -366,12 +374,12 @@ module.exports = UpdateManager = {
|
|||
})
|
||||
},
|
||||
|
||||
_handleErrorInsideLock(doc_id, lockValue, original_error, callback) {
|
||||
_handleErrorInsideLock(docId, lockValue, originalError, callback) {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
return LockManager.releaseLock(doc_id, lockValue, lock_error =>
|
||||
callback(original_error)
|
||||
return LockManager.releaseLock(docId, lockValue, lockError =>
|
||||
callback(originalError)
|
||||
)
|
||||
},
|
||||
|
||||
|
@ -397,15 +405,15 @@ module.exports = UpdateManager = {
|
|||
},
|
||||
|
||||
_addProjectHistoryMetadataToOps(updates, pathname, projectHistoryId, lines) {
|
||||
let doc_length = _.reduce(lines, (chars, line) => chars + line.length, 0)
|
||||
doc_length += lines.length - 1 // count newline characters
|
||||
let docLength = _.reduce(lines, (chars, line) => chars + line.length, 0)
|
||||
docLength += lines.length - 1 // count newline characters
|
||||
return updates.forEach(function (update) {
|
||||
update.projectHistoryId = projectHistoryId
|
||||
if (!update.meta) {
|
||||
update.meta = {}
|
||||
}
|
||||
update.meta.pathname = pathname
|
||||
update.meta.doc_length = doc_length
|
||||
update.meta.doc_length = docLength
|
||||
// Each update may contain multiple ops, i.e.
|
||||
// [{
|
||||
// ops: [{i: "foo", p: 4}, {d: "bar", p:8}]
|
||||
|
@ -419,10 +427,10 @@ module.exports = UpdateManager = {
|
|||
const result = []
|
||||
for (const op of Array.from(update.op)) {
|
||||
if (op.i != null) {
|
||||
doc_length += op.i.length
|
||||
docLength += op.i.length
|
||||
}
|
||||
if (op.d != null) {
|
||||
result.push((doc_length -= op.d.length))
|
||||
result.push((docLength -= op.d.length))
|
||||
} else {
|
||||
result.push(undefined)
|
||||
}
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-undef,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
|
@ -231,7 +230,7 @@ json.api = {
|
|||
// no change to structure
|
||||
continue
|
||||
}
|
||||
const to_remove = []
|
||||
const toRemove = []
|
||||
for (i = 0; i < this._listeners.length; i++) {
|
||||
// Transform a dummy op by the incoming op to work out what
|
||||
// should happen to the listener.
|
||||
|
@ -240,7 +239,7 @@ json.api = {
|
|||
const xformed = this.type.transformComponent([], dummy, c, 'left')
|
||||
if (xformed.length === 0) {
|
||||
// The op was transformed to noop, so we should delete the listener.
|
||||
to_remove.push(i)
|
||||
toRemove.push(i)
|
||||
} else if (xformed.length === 1) {
|
||||
// The op remained, so grab its new path into the listener.
|
||||
l.path = xformed[0].p
|
||||
|
@ -250,11 +249,11 @@ json.api = {
|
|||
)
|
||||
}
|
||||
}
|
||||
to_remove.sort((a, b) => b - a)
|
||||
toRemove.sort((a, b) => b - a)
|
||||
result.push(
|
||||
(() => {
|
||||
const result1 = []
|
||||
for (i of Array.from(to_remove)) {
|
||||
for (i of Array.from(toRemove)) {
|
||||
result1.push(this._listeners.splice(i, 1))
|
||||
}
|
||||
return result1
|
||||
|
@ -268,14 +267,14 @@ json.api = {
|
|||
return (() => {
|
||||
const result = []
|
||||
for (const c of Array.from(op)) {
|
||||
const match_path =
|
||||
const matchPath =
|
||||
c.na === undefined ? c.p.slice(0, c.p.length - 1) : c.p
|
||||
result.push(
|
||||
(() => {
|
||||
const result1 = []
|
||||
for (const { path, event, cb } of Array.from(this._listeners)) {
|
||||
let common
|
||||
if (pathEquals(path, match_path)) {
|
||||
if (pathEquals(path, matchPath)) {
|
||||
switch (event) {
|
||||
case 'insert':
|
||||
if (c.li !== undefined && c.ld === undefined) {
|
||||
|
@ -326,19 +325,19 @@ json.api = {
|
|||
result1.push(undefined)
|
||||
}
|
||||
} else if (
|
||||
(common = this.type.commonPath(match_path, path)) != null
|
||||
(common = this.type.commonPath(matchPath, path)) != null
|
||||
) {
|
||||
if (event === 'child op') {
|
||||
if (
|
||||
match_path.length === path.length &&
|
||||
matchPath.length === path.length &&
|
||||
path.length === common
|
||||
) {
|
||||
throw new Error(
|
||||
"paths match length and have commonality, but aren't equal?"
|
||||
)
|
||||
}
|
||||
const child_path = c.p.slice(common + 1)
|
||||
result1.push(cb(child_path, c))
|
||||
const childPath = c.p.slice(common + 1)
|
||||
result1.push(cb(childPath, c))
|
||||
} else {
|
||||
result1.push(undefined)
|
||||
}
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-return-assign,
|
||||
no-undef,
|
||||
*/
|
||||
|
@ -46,9 +45,9 @@ const checkValidComponent = function (c) {
|
|||
throw new Error('component missing position field')
|
||||
}
|
||||
|
||||
const i_type = typeof c.i
|
||||
const d_type = typeof c.d
|
||||
if (!((i_type === 'string') ^ (d_type === 'string'))) {
|
||||
const iType = typeof c.i
|
||||
const dType = typeof c.d
|
||||
if (!((iType === 'string') ^ (dType === 'string'))) {
|
||||
throw new Error('component needs an i or d field')
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-undef,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
|
@ -231,7 +230,7 @@ json.api = {
|
|||
// no change to structure
|
||||
continue
|
||||
}
|
||||
const to_remove = []
|
||||
const toRemove = []
|
||||
for (i = 0; i < this._listeners.length; i++) {
|
||||
// Transform a dummy op by the incoming op to work out what
|
||||
// should happen to the listener.
|
||||
|
@ -240,7 +239,7 @@ json.api = {
|
|||
const xformed = this.type.transformComponent([], dummy, c, 'left')
|
||||
if (xformed.length === 0) {
|
||||
// The op was transformed to noop, so we should delete the listener.
|
||||
to_remove.push(i)
|
||||
toRemove.push(i)
|
||||
} else if (xformed.length === 1) {
|
||||
// The op remained, so grab its new path into the listener.
|
||||
l.path = xformed[0].p
|
||||
|
@ -250,11 +249,11 @@ json.api = {
|
|||
)
|
||||
}
|
||||
}
|
||||
to_remove.sort((a, b) => b - a)
|
||||
toRemove.sort((a, b) => b - a)
|
||||
result.push(
|
||||
(() => {
|
||||
const result1 = []
|
||||
for (i of Array.from(to_remove)) {
|
||||
for (i of Array.from(toRemove)) {
|
||||
result1.push(this._listeners.splice(i, 1))
|
||||
}
|
||||
return result1
|
||||
|
@ -268,14 +267,14 @@ json.api = {
|
|||
return (() => {
|
||||
const result = []
|
||||
for (const c of Array.from(op)) {
|
||||
const match_path =
|
||||
const matchPath =
|
||||
c.na === undefined ? c.p.slice(0, c.p.length - 1) : c.p
|
||||
result.push(
|
||||
(() => {
|
||||
const result1 = []
|
||||
for (const { path, event, cb } of Array.from(this._listeners)) {
|
||||
let common
|
||||
if (pathEquals(path, match_path)) {
|
||||
if (pathEquals(path, matchPath)) {
|
||||
switch (event) {
|
||||
case 'insert':
|
||||
if (c.li !== undefined && c.ld === undefined) {
|
||||
|
@ -326,19 +325,19 @@ json.api = {
|
|||
result1.push(undefined)
|
||||
}
|
||||
} else if (
|
||||
(common = this.type.commonPath(match_path, path)) != null
|
||||
(common = this.type.commonPath(matchPath, path)) != null
|
||||
) {
|
||||
if (event === 'child op') {
|
||||
if (
|
||||
match_path.length === path.length &&
|
||||
matchPath.length === path.length &&
|
||||
path.length === common
|
||||
) {
|
||||
throw new Error(
|
||||
"paths match length and have commonality, but aren't equal?"
|
||||
)
|
||||
}
|
||||
const child_path = c.p.slice(common + 1)
|
||||
result1.push(cb(child_path, c))
|
||||
const childPath = c.p.slice(common + 1)
|
||||
result1.push(cb(childPath, c))
|
||||
} else {
|
||||
result1.push(undefined)
|
||||
}
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-return-assign,
|
||||
no-undef,
|
||||
*/
|
||||
|
@ -46,12 +45,10 @@ const checkValidComponent = function (c) {
|
|||
throw new Error('component missing position field')
|
||||
}
|
||||
|
||||
const i_type = typeof c.i
|
||||
const d_type = typeof c.d
|
||||
const c_type = typeof c.c
|
||||
if (
|
||||
!((i_type === 'string') ^ (d_type === 'string') ^ (c_type === 'string'))
|
||||
) {
|
||||
const iType = typeof c.i
|
||||
const dType = typeof c.d
|
||||
const cType = typeof c.c
|
||||
if (!((iType === 'string') ^ (dType === 'string') ^ (cType === 'string'))) {
|
||||
throw new Error('component needs an i, d or c field')
|
||||
}
|
||||
|
||||
|
@ -296,11 +293,11 @@ text._tc = transformComponent = function (dest, c, otherC, side) {
|
|||
if (otherC.i != null) {
|
||||
if (c.p < otherC.p && otherC.p < c.p + c.c.length) {
|
||||
const offset = otherC.p - c.p
|
||||
const new_c =
|
||||
const newC =
|
||||
c.c.slice(0, +(offset - 1) + 1 || undefined) +
|
||||
otherC.i +
|
||||
c.c.slice(offset)
|
||||
append(dest, { c: new_c, p: c.p, t: c.t })
|
||||
append(dest, { c: newC, p: c.p, t: c.t })
|
||||
} else {
|
||||
append(dest, {
|
||||
c: c.c,
|
||||
|
|
|
@ -1,6 +1,3 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
|
@ -14,13 +11,13 @@ const sinon = require('sinon')
|
|||
const { expect } = require('chai')
|
||||
const async = require('async')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const rclient_history = require('@overleaf/redis-wrapper').createClient(
|
||||
const rclientHistory = require('@overleaf/redis-wrapper').createClient(
|
||||
Settings.redis.history
|
||||
) // note: this is track changes, not project-history
|
||||
const rclient_project_history = require('@overleaf/redis-wrapper').createClient(
|
||||
const rclientProjectHistory = require('@overleaf/redis-wrapper').createClient(
|
||||
Settings.redis.project_history
|
||||
)
|
||||
const rclient_du = require('@overleaf/redis-wrapper').createClient(
|
||||
const rclientDU = require('@overleaf/redis-wrapper').createClient(
|
||||
Settings.redis.documentupdater
|
||||
)
|
||||
const Keys = Settings.redis.documentupdater.key_schema
|
||||
|
@ -100,7 +97,7 @@ describe('Applying updates to a doc', function () {
|
|||
})
|
||||
|
||||
it('should push the applied updates to the track changes api', function (done) {
|
||||
rclient_history.lrange(
|
||||
rclientHistory.lrange(
|
||||
HistoryKeys.uncompressedHistoryOps({ doc_id: this.doc_id }),
|
||||
0,
|
||||
-1,
|
||||
|
@ -109,7 +106,7 @@ describe('Applying updates to a doc', function () {
|
|||
throw error
|
||||
}
|
||||
JSON.parse(updates[0]).op.should.deep.equal(this.update.op)
|
||||
return rclient_history.sismember(
|
||||
return rclientHistory.sismember(
|
||||
HistoryKeys.docsWithHistoryOps({ project_id: this.project_id }),
|
||||
this.doc_id,
|
||||
(error, result) => {
|
||||
|
@ -126,7 +123,7 @@ describe('Applying updates to a doc', function () {
|
|||
})
|
||||
|
||||
it('should push the applied updates to the project history changes api', function (done) {
|
||||
rclient_project_history.lrange(
|
||||
rclientProjectHistory.lrange(
|
||||
ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }),
|
||||
0,
|
||||
-1,
|
||||
|
@ -142,7 +139,7 @@ describe('Applying updates to a doc', function () {
|
|||
})
|
||||
|
||||
it('should set the first op timestamp', function (done) {
|
||||
rclient_project_history.get(
|
||||
rclientProjectHistory.get(
|
||||
ProjectHistoryKeys.projectHistoryFirstOpTimestamp({
|
||||
project_id: this.project_id,
|
||||
}),
|
||||
|
@ -179,7 +176,7 @@ describe('Applying updates to a doc', function () {
|
|||
})
|
||||
|
||||
return it('should not change the first op timestamp', function (done) {
|
||||
rclient_project_history.get(
|
||||
rclientProjectHistory.get(
|
||||
ProjectHistoryKeys.projectHistoryFirstOpTimestamp({
|
||||
project_id: this.project_id,
|
||||
}),
|
||||
|
@ -250,14 +247,14 @@ describe('Applying updates to a doc', function () {
|
|||
})
|
||||
|
||||
it('should push the applied updates to the track changes api', function (done) {
|
||||
rclient_history.lrange(
|
||||
rclientHistory.lrange(
|
||||
HistoryKeys.uncompressedHistoryOps({ doc_id: this.doc_id }),
|
||||
0,
|
||||
-1,
|
||||
(error, updates) => {
|
||||
if (error) return done(error)
|
||||
JSON.parse(updates[0]).op.should.deep.equal(this.update.op)
|
||||
return rclient_history.sismember(
|
||||
return rclientHistory.sismember(
|
||||
HistoryKeys.docsWithHistoryOps({ project_id: this.project_id }),
|
||||
this.doc_id,
|
||||
(error, result) => {
|
||||
|
@ -272,7 +269,7 @@ describe('Applying updates to a doc', function () {
|
|||
})
|
||||
|
||||
return it('should push the applied updates to the project history changes api', function (done) {
|
||||
rclient_project_history.lrange(
|
||||
rclientProjectHistory.lrange(
|
||||
ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }),
|
||||
0,
|
||||
-1,
|
||||
|
@ -336,7 +333,7 @@ describe('Applying updates to a doc', function () {
|
|||
})
|
||||
|
||||
it('should not push any applied updates to the track changes api', function (done) {
|
||||
rclient_history.lrange(
|
||||
rclientHistory.lrange(
|
||||
HistoryKeys.uncompressedHistoryOps({ doc_id: this.doc_id }),
|
||||
0,
|
||||
-1,
|
||||
|
@ -350,7 +347,7 @@ describe('Applying updates to a doc', function () {
|
|||
})
|
||||
|
||||
return it('should push the applied updates to the project history changes api', function (done) {
|
||||
rclient_project_history.lrange(
|
||||
rclientProjectHistory.lrange(
|
||||
ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }),
|
||||
0,
|
||||
-1,
|
||||
|
@ -442,7 +439,7 @@ describe('Applying updates to a doc', function () {
|
|||
})
|
||||
|
||||
it('should push the applied updates to the track changes api', function (done) {
|
||||
rclient_history.lrange(
|
||||
rclientHistory.lrange(
|
||||
HistoryKeys.uncompressedHistoryOps({ doc_id: this.doc_id }),
|
||||
0,
|
||||
-1,
|
||||
|
@ -454,7 +451,7 @@ describe('Applying updates to a doc', function () {
|
|||
appliedUpdate.op.should.deep.equal(updates[i].op)
|
||||
}
|
||||
|
||||
return rclient_history.sismember(
|
||||
return rclientHistory.sismember(
|
||||
HistoryKeys.docsWithHistoryOps({ project_id: this.project_id }),
|
||||
this.doc_id,
|
||||
(error, result) => {
|
||||
|
@ -469,7 +466,7 @@ describe('Applying updates to a doc', function () {
|
|||
})
|
||||
|
||||
return it('should store the doc ops in the correct order', function (done) {
|
||||
rclient_du.lrange(
|
||||
rclientDU.lrange(
|
||||
Keys.docOps({ doc_id: this.doc_id }),
|
||||
0,
|
||||
-1,
|
||||
|
|
|
@ -1,6 +1,3 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
|
@ -21,14 +18,14 @@ const DocUpdaterApp = require('./helpers/DocUpdaterApp')
|
|||
|
||||
describe('Deleting a project', function () {
|
||||
before(function (done) {
|
||||
let doc_id0, doc_id1
|
||||
let docId0, docId1
|
||||
this.project_id = DocUpdaterClient.randomId()
|
||||
this.docs = [
|
||||
{
|
||||
id: (doc_id0 = DocUpdaterClient.randomId()),
|
||||
id: (docId0 = DocUpdaterClient.randomId()),
|
||||
lines: ['one', 'two', 'three'],
|
||||
update: {
|
||||
doc: doc_id0,
|
||||
doc: docId0,
|
||||
op: [
|
||||
{
|
||||
i: 'one and a half\n',
|
||||
|
@ -40,10 +37,10 @@ describe('Deleting a project', function () {
|
|||
updatedLines: ['one', 'one and a half', 'two', 'three'],
|
||||
},
|
||||
{
|
||||
id: (doc_id1 = DocUpdaterClient.randomId()),
|
||||
id: (docId1 = DocUpdaterClient.randomId()),
|
||||
lines: ['four', 'five', 'six'],
|
||||
update: {
|
||||
doc: doc_id1,
|
||||
doc: docId1,
|
||||
op: [
|
||||
{
|
||||
i: 'four and a half\n',
|
||||
|
|
|
@ -1,6 +1,3 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
|
@ -19,14 +16,14 @@ const DocUpdaterApp = require('./helpers/DocUpdaterApp')
|
|||
|
||||
describe('Flushing a project', function () {
|
||||
before(function (done) {
|
||||
let doc_id0, doc_id1
|
||||
let docId0, docId1
|
||||
this.project_id = DocUpdaterClient.randomId()
|
||||
this.docs = [
|
||||
{
|
||||
id: (doc_id0 = DocUpdaterClient.randomId()),
|
||||
id: (docId0 = DocUpdaterClient.randomId()),
|
||||
lines: ['one', 'two', 'three'],
|
||||
update: {
|
||||
doc: doc_id0,
|
||||
doc: docId0,
|
||||
op: [
|
||||
{
|
||||
i: 'one and a half\n',
|
||||
|
@ -38,10 +35,10 @@ describe('Flushing a project', function () {
|
|||
updatedLines: ['one', 'one and a half', 'two', 'three'],
|
||||
},
|
||||
{
|
||||
id: (doc_id1 = DocUpdaterClient.randomId()),
|
||||
id: (docId1 = DocUpdaterClient.randomId()),
|
||||
lines: ['four', 'five', 'six'],
|
||||
update: {
|
||||
doc: doc_id1,
|
||||
doc: docId1,
|
||||
op: [
|
||||
{
|
||||
i: 'four and a half\n',
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
|
@ -122,8 +121,8 @@ describe('Flushing a doc to Mongo', function () {
|
|||
.stub(MockWebApi, 'setDocument')
|
||||
.callsFake(
|
||||
(
|
||||
project_id,
|
||||
doc_id,
|
||||
projectId,
|
||||
docId,
|
||||
lines,
|
||||
version,
|
||||
ranges,
|
||||
|
|
|
@ -1,6 +1,3 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
|
@ -220,7 +217,7 @@ describe('Getting a document', function () {
|
|||
])
|
||||
sinon
|
||||
.stub(MockWebApi, 'getDocument')
|
||||
.callsFake((project_id, doc_id, callback) => {
|
||||
.callsFake((projectId, docId, callback) => {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
|
@ -255,7 +252,7 @@ describe('Getting a document', function () {
|
|||
])
|
||||
sinon
|
||||
.stub(MockWebApi, 'getDocument')
|
||||
.callsFake((project_id, doc_id, callback) => {
|
||||
.callsFake((projectId, docId, callback) => {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,3 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
|
@ -14,7 +11,7 @@ const express = require('express')
|
|||
const app = express()
|
||||
|
||||
module.exports = MockProjectHistoryApi = {
|
||||
flushProject(doc_id, callback) {
|
||||
flushProject(docId, callback) {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,3 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
|
@ -14,7 +11,7 @@ const express = require('express')
|
|||
const app = express()
|
||||
|
||||
module.exports = MockTrackChangesApi = {
|
||||
flushDoc(doc_id, callback) {
|
||||
flushDoc(docId, callback) {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-return-assign,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
|
@ -23,7 +22,7 @@ module.exports = MockWebApi = {
|
|||
return (this.docs = {})
|
||||
},
|
||||
|
||||
insertDoc(project_id, doc_id, doc) {
|
||||
insertDoc(projectId, docId, doc) {
|
||||
if (doc.version == null) {
|
||||
doc.version = 0
|
||||
}
|
||||
|
@ -31,12 +30,12 @@ module.exports = MockWebApi = {
|
|||
doc.lines = []
|
||||
}
|
||||
doc.pathname = '/a/b/c.tex'
|
||||
return (this.docs[`${project_id}:${doc_id}`] = doc)
|
||||
return (this.docs[`${projectId}:${docId}`] = doc)
|
||||
},
|
||||
|
||||
setDocument(
|
||||
project_id,
|
||||
doc_id,
|
||||
projectId,
|
||||
docId,
|
||||
lines,
|
||||
version,
|
||||
ranges,
|
||||
|
@ -48,8 +47,8 @@ module.exports = MockWebApi = {
|
|||
callback = function () {}
|
||||
}
|
||||
const doc =
|
||||
this.docs[`${project_id}:${doc_id}`] ||
|
||||
(this.docs[`${project_id}:${doc_id}`] = {})
|
||||
this.docs[`${projectId}:${docId}`] ||
|
||||
(this.docs[`${projectId}:${docId}`] = {})
|
||||
doc.lines = lines
|
||||
doc.version = version
|
||||
doc.ranges = ranges
|
||||
|
@ -59,11 +58,11 @@ module.exports = MockWebApi = {
|
|||
return callback(null)
|
||||
},
|
||||
|
||||
getDocument(project_id, doc_id, callback) {
|
||||
getDocument(projectId, docId, callback) {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
return callback(null, this.docs[`${project_id}:${doc_id}`])
|
||||
return callback(null, this.docs[`${projectId}:${docId}`])
|
||||
},
|
||||
|
||||
run() {
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-return-assign,
|
||||
no-undef,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
|
@ -149,16 +147,16 @@ class StressTestClient {
|
|||
} else {
|
||||
assert(update.op.op.length === 1)
|
||||
this.counts.remote_updates++
|
||||
let external_op = update.op.op[0]
|
||||
let externalOp = update.op.op[0]
|
||||
if (this.inflight_op != null) {
|
||||
this.counts.conflicts++
|
||||
this.inflight_op = transform(this.inflight_op, external_op)
|
||||
external_op = transform(external_op, this.inflight_op)
|
||||
this.inflight_op = transform(this.inflight_op, externalOp)
|
||||
externalOp = transform(externalOp, this.inflight_op)
|
||||
}
|
||||
if (external_op.p < this.pos) {
|
||||
this.pos += external_op.i.length
|
||||
if (externalOp.p < this.pos) {
|
||||
this.pos += externalOp.i.length
|
||||
}
|
||||
return (this.content = insert(this.content, external_op.p, external_op.i))
|
||||
return (this.content = insert(this.content, externalOp.p, externalOp.i))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -198,7 +196,7 @@ class StressTestClient {
|
|||
`[${new Date()}] \t[${this.client_id.slice(
|
||||
0,
|
||||
4
|
||||
)}] ERROR: Invalid response from get doc (${doc_id})`,
|
||||
)}] ERROR: Invalid response from get doc (${this.doc_id})`,
|
||||
body
|
||||
)
|
||||
}
|
||||
|
@ -272,7 +270,7 @@ class StressTestClient {
|
|||
}
|
||||
}
|
||||
|
||||
const checkDocument = function (project_id, doc_id, clients, callback) {
|
||||
const checkDocument = function (projectId, docId, clients, callback) {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
|
@ -280,11 +278,11 @@ const checkDocument = function (project_id, doc_id, clients, callback) {
|
|||
return async.parallel(jobs, callback)
|
||||
}
|
||||
|
||||
const printSummary = function (doc_id, clients) {
|
||||
const printSummary = function (docId, clients) {
|
||||
const slot = require('cluster-key-slot')
|
||||
const now = new Date()
|
||||
console.log(
|
||||
`[${now}] [${doc_id.slice(0, 4)} (slot: ${slot(doc_id)})] ${
|
||||
`[${now}] [${docId.slice(0, 4)} (slot: ${slot(docId)})] ${
|
||||
clients.length
|
||||
} clients...`
|
||||
)
|
||||
|
@ -315,13 +313,13 @@ const CLIENT_COUNT = parseInt(process.argv[2], 10)
|
|||
const UPDATE_DELAY = parseInt(process.argv[3], 10)
|
||||
const SAMPLE_INTERVAL = parseInt(process.argv[4], 10)
|
||||
|
||||
for (const doc_and_project_id of Array.from(process.argv.slice(5))) {
|
||||
;(function (doc_and_project_id) {
|
||||
const [project_id, doc_id] = Array.from(doc_and_project_id.split(':'))
|
||||
console.log({ project_id, doc_id })
|
||||
for (const docAndProjectId of Array.from(process.argv.slice(5))) {
|
||||
;(function (docAndProjectId) {
|
||||
const [projectId, docId] = Array.from(docAndProjectId.split(':'))
|
||||
console.log({ projectId, docId })
|
||||
return DocUpdaterClient.setDocLines(
|
||||
project_id,
|
||||
doc_id,
|
||||
projectId,
|
||||
docId,
|
||||
[new Array(CLIENT_COUNT + 2).join('a')],
|
||||
null,
|
||||
null,
|
||||
|
@ -329,63 +327,59 @@ for (const doc_and_project_id of Array.from(process.argv.slice(5))) {
|
|||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return DocUpdaterClient.getDoc(
|
||||
project_id,
|
||||
doc_id,
|
||||
(error, res, body) => {
|
||||
let runBatch
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
if (body.lines == null) {
|
||||
return console.error(
|
||||
`[${new Date()}] ERROR: Invalid response from get doc (${doc_id})`,
|
||||
body
|
||||
)
|
||||
}
|
||||
const content = body.lines.join('\n')
|
||||
const { version } = body
|
||||
return DocUpdaterClient.getDoc(projectId, docId, (error, res, body) => {
|
||||
let runBatch
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
if (body.lines == null) {
|
||||
return console.error(
|
||||
`[${new Date()}] ERROR: Invalid response from get doc (${docId})`,
|
||||
body
|
||||
)
|
||||
}
|
||||
const content = body.lines.join('\n')
|
||||
const { version } = body
|
||||
|
||||
const clients = []
|
||||
for (
|
||||
let pos = 1, end = CLIENT_COUNT, asc = end >= 1;
|
||||
asc ? pos <= end : pos >= end;
|
||||
asc ? pos++ : pos--
|
||||
) {
|
||||
;(function (pos) {
|
||||
const client = new StressTestClient({
|
||||
doc_id,
|
||||
project_id,
|
||||
content,
|
||||
pos,
|
||||
version,
|
||||
updateDelay: UPDATE_DELAY,
|
||||
})
|
||||
return clients.push(client)
|
||||
})(pos)
|
||||
}
|
||||
const clients = []
|
||||
for (
|
||||
let pos = 1, end = CLIENT_COUNT, asc = end >= 1;
|
||||
asc ? pos <= end : pos >= end;
|
||||
asc ? pos++ : pos--
|
||||
) {
|
||||
;(function (pos) {
|
||||
const client = new StressTestClient({
|
||||
doc_id: docId,
|
||||
project_id: projectId,
|
||||
content,
|
||||
pos,
|
||||
version,
|
||||
updateDelay: UPDATE_DELAY,
|
||||
})
|
||||
return clients.push(client)
|
||||
})(pos)
|
||||
}
|
||||
|
||||
return (runBatch = function () {
|
||||
const jobs = clients.map(
|
||||
client => cb =>
|
||||
client.runForNUpdates(SAMPLE_INTERVAL / UPDATE_DELAY, cb)
|
||||
)
|
||||
return async.parallel(jobs, error => {
|
||||
return (runBatch = function () {
|
||||
const jobs = clients.map(
|
||||
client => cb =>
|
||||
client.runForNUpdates(SAMPLE_INTERVAL / UPDATE_DELAY, cb)
|
||||
)
|
||||
return async.parallel(jobs, error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
printSummary(docId, clients)
|
||||
return checkDocument(projectId, docId, clients, error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
printSummary(doc_id, clients)
|
||||
return checkDocument(project_id, doc_id, clients, error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return runBatch()
|
||||
})
|
||||
return runBatch()
|
||||
})
|
||||
})()
|
||||
}
|
||||
)
|
||||
})
|
||||
})()
|
||||
})
|
||||
}
|
||||
)
|
||||
})(doc_and_project_id)
|
||||
})(docAndProjectId)
|
||||
}
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
|
@ -30,11 +29,11 @@ describe('HistoryRedisManager', function () {
|
|||
redis: {
|
||||
history: (this.settings = {
|
||||
key_schema: {
|
||||
uncompressedHistoryOps({ doc_id }) {
|
||||
return `UncompressedHistoryOps:${doc_id}`
|
||||
uncompressedHistoryOps({ doc_id: docId }) {
|
||||
return `UncompressedHistoryOps:${docId}`
|
||||
},
|
||||
docsWithHistoryOps({ project_id }) {
|
||||
return `DocsWithHistoryOps:${project_id}`
|
||||
docsWithHistoryOps({ project_id: projectId }) {
|
||||
return `DocsWithHistoryOps:${projectId}`
|
||||
},
|
||||
},
|
||||
}),
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
|
@ -14,9 +13,9 @@ const sinon = require('sinon')
|
|||
const assert = require('assert')
|
||||
const path = require('path')
|
||||
const modulePath = path.join(__dirname, '../../../../app/js/LockManager.js')
|
||||
const project_id = 1234
|
||||
const doc_id = 5678
|
||||
const blockingKey = `Blocking:${doc_id}`
|
||||
const projectId = 1234
|
||||
const docId = 5678
|
||||
const blockingKey = `Blocking:${docId}`
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
|
||||
describe('LockManager - checking the lock', function () {
|
||||
|
@ -48,7 +47,7 @@ describe('LockManager - checking the lock', function () {
|
|||
|
||||
it('should return true if the key does not exists', function (done) {
|
||||
existsStub.yields(null, '0')
|
||||
return LockManager.checkLock(doc_id, (err, free) => {
|
||||
return LockManager.checkLock(docId, (err, free) => {
|
||||
if (err) return done(err)
|
||||
free.should.equal(true)
|
||||
return done()
|
||||
|
@ -57,7 +56,7 @@ describe('LockManager - checking the lock', function () {
|
|||
|
||||
return it('should return false if the key does exists', function (done) {
|
||||
existsStub.yields(null, '1')
|
||||
return LockManager.checkLock(doc_id, (err, free) => {
|
||||
return LockManager.checkLock(docId, (err, free) => {
|
||||
if (err) return done(err)
|
||||
free.should.equal(false)
|
||||
return done()
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
|
@ -15,8 +14,8 @@ const sinon = require('sinon')
|
|||
const assert = require('assert')
|
||||
const path = require('path')
|
||||
const modulePath = path.join(__dirname, '../../../../app/js/LockManager.js')
|
||||
const project_id = 1234
|
||||
const doc_id = 5678
|
||||
const projectId = 1234
|
||||
const docId = 5678
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
|
||||
describe('LockManager - releasing the lock', function () {
|
||||
|
@ -34,8 +33,8 @@ describe('LockManager - releasing the lock', function () {
|
|||
redis: {
|
||||
lock: {
|
||||
key_schema: {
|
||||
blockingKey({ doc_id }) {
|
||||
return `Blocking:${doc_id}`
|
||||
blockingKey({ doc_id: docId }) {
|
||||
return `Blocking:${docId}`
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -61,7 +60,7 @@ describe('LockManager - releasing the lock', function () {
|
|||
describe('when the lock is current', function () {
|
||||
beforeEach(function () {
|
||||
this.client.eval = sinon.stub().yields(null, 1)
|
||||
return this.LockManager.releaseLock(doc_id, this.lockValue, this.callback)
|
||||
return this.LockManager.releaseLock(docId, this.lockValue, this.callback)
|
||||
})
|
||||
|
||||
it('should clear the data from redis', function () {
|
||||
|
@ -69,7 +68,7 @@ describe('LockManager - releasing the lock', function () {
|
|||
.calledWith(
|
||||
this.LockManager.unlockScript,
|
||||
1,
|
||||
`Blocking:${doc_id}`,
|
||||
`Blocking:${docId}`,
|
||||
this.lockValue
|
||||
)
|
||||
.should.equal(true)
|
||||
|
@ -83,7 +82,7 @@ describe('LockManager - releasing the lock', function () {
|
|||
return describe('when the lock has expired', function () {
|
||||
beforeEach(function () {
|
||||
this.client.eval = sinon.stub().yields(null, 0)
|
||||
return this.LockManager.releaseLock(doc_id, this.lockValue, this.callback)
|
||||
return this.LockManager.releaseLock(docId, this.lockValue, this.callback)
|
||||
})
|
||||
|
||||
return it('should return an error if the lock has expired', function () {
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
|
@ -75,7 +74,7 @@ describe('LockManager - getting the lock', function () {
|
|||
const startTime = Date.now()
|
||||
let tries = 0
|
||||
this.LockManager.LOCK_TEST_INTERVAL = 5
|
||||
this.LockManager.tryLock = (doc_id, callback) => {
|
||||
this.LockManager.tryLock = (docId, callback) => {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
|
@ -34,8 +33,8 @@ describe('LockManager - trying the lock', function () {
|
|||
redis: {
|
||||
lock: {
|
||||
key_schema: {
|
||||
blockingKey({ doc_id }) {
|
||||
return `Blocking:${doc_id}`
|
||||
blockingKey({ doc_id: docId }) {
|
||||
return `Blocking:${docId}`
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
|
@ -68,9 +67,9 @@ describe('ProjectManager - flushAndDeleteProject', function () {
|
|||
})
|
||||
|
||||
it('should delete each doc in the project', function () {
|
||||
return Array.from(this.doc_ids).map(doc_id =>
|
||||
return Array.from(this.doc_ids).map(docId =>
|
||||
this.DocumentManager.flushAndDeleteDocWithLock
|
||||
.calledWith(this.project_id, doc_id, {})
|
||||
.calledWith(this.project_id, docId, {})
|
||||
.should.equal(true)
|
||||
)
|
||||
})
|
||||
|
@ -97,8 +96,8 @@ describe('ProjectManager - flushAndDeleteProject', function () {
|
|||
.stub()
|
||||
.callsArgWith(1, null, this.doc_ids)
|
||||
this.DocumentManager.flushAndDeleteDocWithLock = sinon.spy(
|
||||
(project_id, doc_id, options, callback) => {
|
||||
if (doc_id === 'doc-id-1') {
|
||||
(projectId, docId, options, callback) => {
|
||||
if (docId === 'doc-id-1') {
|
||||
return callback(
|
||||
(this.error = new Error('oops, something went wrong'))
|
||||
)
|
||||
|
@ -118,9 +117,9 @@ describe('ProjectManager - flushAndDeleteProject', function () {
|
|||
})
|
||||
|
||||
it('should still flush each doc in the project', function () {
|
||||
return Array.from(this.doc_ids).map(doc_id =>
|
||||
return Array.from(this.doc_ids).map(docId =>
|
||||
this.DocumentManager.flushAndDeleteDocWithLock
|
||||
.calledWith(this.project_id, doc_id, {})
|
||||
.calledWith(this.project_id, docId, {})
|
||||
.should.equal(true)
|
||||
)
|
||||
})
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
|
@ -66,9 +65,9 @@ describe('ProjectManager - flushProject', function () {
|
|||
})
|
||||
|
||||
it('should flush each doc in the project', function () {
|
||||
return Array.from(this.doc_ids).map(doc_id =>
|
||||
return Array.from(this.doc_ids).map(docId =>
|
||||
this.DocumentManager.flushDocIfLoadedWithLock
|
||||
.calledWith(this.project_id, doc_id)
|
||||
.calledWith(this.project_id, docId)
|
||||
.should.equal(true)
|
||||
)
|
||||
})
|
||||
|
@ -89,11 +88,11 @@ describe('ProjectManager - flushProject', function () {
|
|||
.stub()
|
||||
.callsArgWith(1, null, this.doc_ids)
|
||||
this.DocumentManager.flushDocIfLoadedWithLock = sinon.spy(
|
||||
(project_id, doc_id, callback) => {
|
||||
(projectId, docId, callback) => {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
if (doc_id === 'doc-id-1') {
|
||||
if (docId === 'doc-id-1') {
|
||||
return callback(
|
||||
(this.error = new Error('oops, something went wrong'))
|
||||
)
|
||||
|
@ -112,9 +111,9 @@ describe('ProjectManager - flushProject', function () {
|
|||
})
|
||||
|
||||
it('should still flush each doc in the project', function () {
|
||||
return Array.from(this.doc_ids).map(doc_id =>
|
||||
return Array.from(this.doc_ids).map(docId =>
|
||||
this.DocumentManager.flushDocIfLoadedWithLock
|
||||
.calledWith(this.project_id, doc_id)
|
||||
.calledWith(this.project_id, docId)
|
||||
.should.equal(true)
|
||||
)
|
||||
})
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
|
@ -84,11 +83,11 @@ describe('RangesManager', function () {
|
|||
|
||||
return it('should return the modified the comments and changes', function () {
|
||||
this.callback.called.should.equal(true)
|
||||
const [error, entries, ranges_were_collapsed] = Array.from(
|
||||
const [error, entries, rangesWereCollapsed] = Array.from(
|
||||
this.callback.args[0]
|
||||
)
|
||||
expect(error).to.be.null
|
||||
expect(ranges_were_collapsed).to.equal(false)
|
||||
expect(rangesWereCollapsed).to.equal(false)
|
||||
entries.comments[0].op.should.deep.equal({
|
||||
c: 'three ',
|
||||
p: 8,
|
||||
|
@ -347,10 +346,10 @@ describe('RangesManager', function () {
|
|||
|
||||
return it('should return ranges_were_collapsed == true', function () {
|
||||
this.callback.called.should.equal(true)
|
||||
const [error, entries, ranges_were_collapsed] = Array.from(
|
||||
const [error, entries, rangesWereCollapsed] = Array.from(
|
||||
this.callback.args[0]
|
||||
)
|
||||
return expect(ranges_were_collapsed).to.equal(true)
|
||||
return expect(rangesWereCollapsed).to.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -411,10 +410,10 @@ describe('RangesManager', function () {
|
|||
|
||||
return it('should return ranges_were_collapsed == true', function () {
|
||||
this.callback.called.should.equal(true)
|
||||
const [error, entries, ranges_were_collapsed] = Array.from(
|
||||
const [error, entries, rangesWereCollapsed] = Array.from(
|
||||
this.callback.args[0]
|
||||
)
|
||||
return expect(ranges_were_collapsed).to.equal(true)
|
||||
return expect(rangesWereCollapsed).to.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
|
@ -33,8 +32,8 @@ describe('RealTimeRedisManager', function () {
|
|||
redis: {
|
||||
documentupdater: (this.settings = {
|
||||
key_schema: {
|
||||
pendingUpdates({ doc_id }) {
|
||||
return `PendingUpdates:${doc_id}`
|
||||
pendingUpdates({ doc_id: docId }) {
|
||||
return `PendingUpdates:${docId}`
|
||||
},
|
||||
},
|
||||
}),
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
mocha/no-identical-title,
|
||||
no-return-assign,
|
||||
*/
|
||||
|
@ -381,28 +380,35 @@ describe('ShareJS text type', function () {
|
|||
(() => {
|
||||
const result1 = []
|
||||
for (const op2 of Array.from(OPS)) {
|
||||
const op1_t = transform(op1, op2, 'left')
|
||||
const op2_t = transform(op2, op1, 'right')
|
||||
const op1T = transform(op1, op2, 'left')
|
||||
const op2T = transform(op2, op1, 'right')
|
||||
|
||||
const rt12 = new RangesTracker()
|
||||
const snapshot12 = applySnapshot(
|
||||
applySnapshot(SNAPSHOT, [op1]),
|
||||
op2_t
|
||||
op2T
|
||||
)
|
||||
applyRanges(rt12, [op1])
|
||||
applyRanges(rt12, op2_t)
|
||||
applyRanges(rt12, op2T)
|
||||
|
||||
const rt21 = new RangesTracker()
|
||||
const snapshot21 = applySnapshot(
|
||||
applySnapshot(SNAPSHOT, [op2]),
|
||||
op1_t
|
||||
op1T
|
||||
)
|
||||
applyRanges(rt21, [op2])
|
||||
applyRanges(rt21, op1_t)
|
||||
applyRanges(rt21, op1T)
|
||||
|
||||
if (snapshot12 !== snapshot21) {
|
||||
console.error(
|
||||
{ op1, op2, op1_t, op2_t, snapshot12, snapshot21 },
|
||||
{
|
||||
op1,
|
||||
op2,
|
||||
op1T,
|
||||
op2T,
|
||||
snapshot12,
|
||||
snapshot21,
|
||||
},
|
||||
'Ops are not consistent'
|
||||
)
|
||||
throw new Error('OT is inconsistent')
|
||||
|
@ -415,8 +421,8 @@ describe('ShareJS text type', function () {
|
|||
{
|
||||
op1,
|
||||
op2,
|
||||
op1_t,
|
||||
op2_t,
|
||||
op1T,
|
||||
op2T,
|
||||
rt12_comments: rt12.comments,
|
||||
rt21_comments: rt21.comments,
|
||||
},
|
||||
|
|
Loading…
Reference in a new issue