[misc] run format_fix and lint:fix

This commit is contained in:
Jakob Ackermann 2021-07-13 12:04:42 +01:00
parent 2a3f264140
commit c532376e21
97 changed files with 2113 additions and 2071 deletions

View file

@ -114,7 +114,7 @@ const pubsubClient = require('@overleaf/redis-wrapper').createClient(
Settings.redis.pubsub
)
app.get('/health_check/redis', (req, res, next) => {
pubsubClient.healthCheck((error) => {
pubsubClient.healthCheck(error => {
if (error) {
logger.err({ err: error }, 'failed redis health check')
return res.sendStatus(500)
@ -128,7 +128,7 @@ const docUpdaterRedisClient = require('@overleaf/redis-wrapper').createClient(
Settings.redis.documentupdater
)
app.get('/health_check/redis_cluster', (req, res, next) => {
docUpdaterRedisClient.healthCheck((error) => {
docUpdaterRedisClient.healthCheck(error => {
if (error) {
logger.err({ err: error }, 'failed redis cluster health check')
return res.sendStatus(500)
@ -141,32 +141,32 @@ app.get('/health_check/redis_cluster', (req, res, next) => {
app.get('/health_check', (req, res, next) => {
async.series(
[
(cb) => {
pubsubClient.healthCheck((error) => {
cb => {
pubsubClient.healthCheck(error => {
if (error) {
logger.err({ err: error }, 'failed redis health check')
}
cb(error)
})
},
(cb) => {
docUpdaterRedisClient.healthCheck((error) => {
cb => {
docUpdaterRedisClient.healthCheck(error => {
if (error) {
logger.err({ err: error }, 'failed redis cluster health check')
}
cb(error)
})
},
(cb) => {
mongodb.healthCheck((error) => {
cb => {
mongodb.healthCheck(error => {
if (error) {
logger.err({ err: error }, 'failed mongo health check')
}
cb(error)
})
}
},
],
(error) => {
error => {
if (error) {
return res.sendStatus(500)
} else {
@ -189,7 +189,7 @@ app.use((error, req, res, next) => {
}
})
const shutdownCleanly = (signal) => () => {
const shutdownCleanly = signal => () => {
logger.log({ signal }, 'received interrupt, cleaning up')
Settings.shuttingDown = true
setTimeout(() => {
@ -198,8 +198,8 @@ const shutdownCleanly = (signal) => () => {
}, 10000)
}
const watchForEvent = (eventName) => {
docUpdaterRedisClient.on(eventName, (e) => {
const watchForEvent = eventName => {
docUpdaterRedisClient.on(eventName, e => {
console.log(`redis event: ${eventName} ${e}`) // eslint-disable-line no-console
})
}
@ -236,7 +236,7 @@ if (!module.parent) {
}
})
})
.catch((err) => {
.catch(err => {
logger.fatal({ err }, 'Cannot connect to mongo. Exiting.')
process.exit(1)
})
@ -251,7 +251,7 @@ for (const signal of [
'SIGUSR1',
'SIGUSR2',
'SIGTERM',
'SIGABRT'
'SIGABRT',
]) {
process.on(signal, shutdownCleanly(signal))
}

View file

@ -43,10 +43,9 @@ module.exports = DeleteQueueManager = {
let count = 0
const flushProjectIfNotModified = (project_id, flushTimestamp, cb) =>
ProjectManager.getProjectDocsTimestamps(project_id, function (
err,
timestamps
) {
ProjectManager.getProjectDocsTimestamps(
project_id,
function (err, timestamps) {
if (err != null) {
return callback(err)
}
@ -80,7 +79,8 @@ module.exports = DeleteQueueManager = {
return cb(null, true)
}
)
})
}
)
var flushNextProject = function () {
const now = Date.now()
@ -92,12 +92,9 @@ module.exports = DeleteQueueManager = {
logger.log('hit count limit on flushing old projects')
return callback(null, count)
}
return RedisManager.getNextProjectToFlushAndDelete(cutoffTime, function (
err,
project_id,
flushTimestamp,
queueLength
) {
return RedisManager.getNextProjectToFlushAndDelete(
cutoffTime,
function (err, project_id, flushTimestamp, queueLength) {
if (err != null) {
return callback(err)
}
@ -106,16 +103,18 @@ module.exports = DeleteQueueManager = {
}
logger.log({ project_id, queueLength }, 'flushing queued project')
metrics.globalGauge('queued-flush-backlog', queueLength)
return flushProjectIfNotModified(project_id, flushTimestamp, function (
err,
flushed
) {
return flushProjectIfNotModified(
project_id,
flushTimestamp,
function (err, flushed) {
if (flushed) {
count++
}
return flushNextProject()
})
})
}
)
}
)
}
return flushNextProject()
@ -133,12 +132,12 @@ module.exports = DeleteQueueManager = {
{
timeout: 1000,
min_delete_age: 3 * 60 * 1000,
limit: 1000 // high value, to ensure we always flush enough projects
limit: 1000, // high value, to ensure we always flush enough projects
},
(err, flushed) =>
setTimeout(doFlush, flushed > 10 ? SHORT_DELAY : LONG_DELAY)
)
}
return doFlush()
}
},
}

View file

@ -21,13 +21,13 @@ module.exports = {
if (type === this.ADDED) {
ops.push({
i: content,
p: position
p: position,
})
position += content.length
} else if (type === this.REMOVED) {
ops.push({
d: content,
p: position
p: position,
})
} else if (type === this.UNCHANGED) {
position += content.length
@ -36,5 +36,5 @@ module.exports = {
}
}
callback(null, ops)
}
},
}

View file

@ -57,7 +57,7 @@ module.exports = DispatchManager = {
Keys.splitProjectIdAndDocId(doc_key)
)
// Dispatch this in the background
const backgroundTask = (cb) =>
const backgroundTask = cb =>
UpdateManager.processOutstandingUpdatesWithLock(
project_id,
doc_id,
@ -91,7 +91,7 @@ module.exports = DispatchManager = {
if (Settings.shuttingDown) {
return
}
return worker._waitForUpdateThenDispatchWorker((error) => {
return worker._waitForUpdateThenDispatchWorker(error => {
if (error != null) {
logger.error({ err: error }, 'Error in worker process')
throw error
@ -99,7 +99,7 @@ module.exports = DispatchManager = {
return worker.run()
}
})
}
},
}
return worker
@ -110,5 +110,5 @@ module.exports = DispatchManager = {
_.times(number, function (shardNumber) {
return DispatchManager.createDispatcher(RateLimiter, shardNumber).run()
})
}
},
}

View file

@ -47,7 +47,10 @@ module.exports = DocumentManager = {
return _callback(...Array.from(args || []))
}
return RedisManager.getDoc(project_id, doc_id, function (
return RedisManager.getDoc(
project_id,
doc_id,
function (
error,
lines,
version,
@ -64,7 +67,10 @@ module.exports = DocumentManager = {
{ project_id, doc_id },
'doc not in redis so getting from persistence API'
)
return PersistenceManager.getDoc(project_id, doc_id, function (
return PersistenceManager.getDoc(
project_id,
doc_id,
function (
error,
lines,
version,
@ -84,7 +90,7 @@ module.exports = DocumentManager = {
version,
pathname,
projectHistoryId,
projectHistoryType
projectHistoryType,
},
'got doc from persistence API'
)
@ -121,7 +127,8 @@ module.exports = DocumentManager = {
)
}
)
})
}
)
} else {
return callback(
null,
@ -134,7 +141,8 @@ module.exports = DocumentManager = {
true
)
}
})
}
)
},
getDocAndRecentOps(project_id, doc_id, fromVersion, _callback) {
@ -155,14 +163,10 @@ module.exports = DocumentManager = {
return _callback(...Array.from(args || []))
}
return DocumentManager.getDoc(project_id, doc_id, function (
error,
lines,
version,
ranges,
pathname,
projectHistoryId
) {
return DocumentManager.getDoc(
project_id,
doc_id,
function (error, lines, version, ranges, pathname, projectHistoryId) {
if (error != null) {
return callback(error)
}
@ -197,7 +201,8 @@ module.exports = DocumentManager = {
}
)
}
})
}
)
},
setDoc(project_id, doc_id, newLines, source, user_id, undoing, _callback) {
@ -215,7 +220,10 @@ module.exports = DocumentManager = {
}
const UpdateManager = require('./UpdateManager')
return DocumentManager.getDoc(project_id, doc_id, function (
return DocumentManager.getDoc(
project_id,
doc_id,
function (
error,
oldLines,
version,
@ -229,7 +237,11 @@ module.exports = DocumentManager = {
return callback(error)
}
if (oldLines != null && oldLines.length > 0 && oldLines[0].text != null) {
if (
oldLines != null &&
oldLines.length > 0 &&
oldLines[0].text != null
) {
logger.log(
{ doc_id, project_id, oldLines, newLines },
'document is JSON so not updating'
@ -241,10 +253,10 @@ module.exports = DocumentManager = {
{ doc_id, project_id, oldLines, newLines },
'setting a document via http'
)
return DiffCodec.diffAsShareJsOp(oldLines, newLines, function (
error,
op
) {
return DiffCodec.diffAsShareJsOp(
oldLines,
newLines,
function (error, op) {
if (error != null) {
return callback(error)
}
@ -260,12 +272,14 @@ module.exports = DocumentManager = {
meta: {
type: 'external',
source,
user_id
user_id,
},
}
}
return UpdateManager.applyUpdate(project_id, doc_id, update, function (
error
) {
return UpdateManager.applyUpdate(
project_id,
doc_id,
update,
function (error) {
if (error != null) {
return callback(error)
}
@ -301,9 +315,12 @@ module.exports = DocumentManager = {
}
)
}
})
})
})
}
)
}
)
}
)
},
flushDocIfLoaded(project_id, doc_id, _callback) {
@ -315,7 +332,10 @@ module.exports = DocumentManager = {
timer.done()
return _callback(...Array.from(args || []))
}
return RedisManager.getDoc(project_id, doc_id, function (
return RedisManager.getDoc(
project_id,
doc_id,
function (
error,
lines,
version,
@ -330,7 +350,10 @@ module.exports = DocumentManager = {
return callback(error)
}
if (lines == null || version == null) {
logger.log({ project_id, doc_id }, 'doc is not loaded so not flushing')
logger.log(
{ project_id, doc_id },
'doc is not loaded so not flushing'
)
return callback(null) // TODO: return a flag to bail out, as we go on to remove doc from memory?
} else {
logger.log({ project_id, doc_id, version }, 'flushing doc')
@ -350,7 +373,8 @@ module.exports = DocumentManager = {
}
)
}
})
}
)
},
flushAndDeleteDoc(project_id, doc_id, options, _callback) {
@ -360,9 +384,10 @@ module.exports = DocumentManager = {
return _callback(...Array.from(args || []))
}
return DocumentManager.flushDocIfLoaded(project_id, doc_id, function (
error
) {
return DocumentManager.flushDocIfLoaded(
project_id,
doc_id,
function (error) {
if (error != null) {
if (options.ignoreFlushErrors) {
logger.warn(
@ -377,15 +402,18 @@ module.exports = DocumentManager = {
// Flush in the background since it requires a http request
HistoryManager.flushDocChangesAsync(project_id, doc_id)
return RedisManager.removeDocFromMemory(project_id, doc_id, function (
error
) {
return RedisManager.removeDocFromMemory(
project_id,
doc_id,
function (error) {
if (error != null) {
return callback(error)
}
return callback(null)
})
})
}
)
}
)
},
acceptChanges(project_id, doc_id, change_ids, _callback) {
@ -401,12 +429,10 @@ module.exports = DocumentManager = {
return _callback(...Array.from(args || []))
}
return DocumentManager.getDoc(project_id, doc_id, function (
error,
lines,
version,
ranges
) {
return DocumentManager.getDoc(
project_id,
doc_id,
function (error, lines, version, ranges) {
if (error != null) {
return callback(error)
}
@ -415,10 +441,10 @@ module.exports = DocumentManager = {
new Errors.NotFoundError(`document not found: ${doc_id}`)
)
}
return RangesManager.acceptChanges(change_ids, ranges, function (
error,
new_ranges
) {
return RangesManager.acceptChanges(
change_ids,
ranges,
function (error, new_ranges) {
if (error != null) {
return callback(error)
}
@ -437,8 +463,10 @@ module.exports = DocumentManager = {
return callback()
}
)
})
})
}
)
}
)
},
deleteComment(project_id, doc_id, comment_id, _callback) {
@ -451,12 +479,10 @@ module.exports = DocumentManager = {
return _callback(...Array.from(args || []))
}
return DocumentManager.getDoc(project_id, doc_id, function (
error,
lines,
version,
ranges
) {
return DocumentManager.getDoc(
project_id,
doc_id,
function (error, lines, version, ranges) {
if (error != null) {
return callback(error)
}
@ -465,10 +491,10 @@ module.exports = DocumentManager = {
new Errors.NotFoundError(`document not found: ${doc_id}`)
)
}
return RangesManager.deleteComment(comment_id, ranges, function (
error,
new_ranges
) {
return RangesManager.deleteComment(
comment_id,
ranges,
function (error, new_ranges) {
if (error != null) {
return callback(error)
}
@ -487,8 +513,10 @@ module.exports = DocumentManager = {
return callback()
}
)
})
})
}
)
}
)
},
renameDoc(project_id, doc_id, user_id, update, projectHistoryId, _callback) {
@ -515,7 +543,10 @@ module.exports = DocumentManager = {
if (callback == null) {
callback = function (error, doc) {}
}
return DocumentManager.getDoc(project_id, doc_id, function (
return DocumentManager.getDoc(
project_id,
doc_id,
function (
error,
lines,
version,
@ -534,30 +565,29 @@ module.exports = DocumentManager = {
unflushedTime != null &&
Date.now() - unflushedTime > MAX_UNFLUSHED_AGE
) {
return DocumentManager.flushDocIfLoaded(project_id, doc_id, function (
error
) {
return DocumentManager.flushDocIfLoaded(
project_id,
doc_id,
function (error) {
if (error != null) {
return callback(error)
}
return callback(null, lines, version)
})
}
)
} else {
return callback(null, lines, version)
}
})
}
)
},
resyncDocContents(project_id, doc_id, callback) {
logger.log({ project_id, doc_id }, 'start resyncing doc contents')
return RedisManager.getDoc(project_id, doc_id, function (
error,
lines,
version,
ranges,
pathname,
projectHistoryId
) {
return RedisManager.getDoc(
project_id,
doc_id,
function (error, lines, version, ranges, pathname, projectHistoryId) {
if (error != null) {
return callback(error)
}
@ -567,7 +597,10 @@ module.exports = DocumentManager = {
{ project_id, doc_id },
'resyncing doc contents - not found in redis - retrieving from web'
)
return PersistenceManager.getDoc(project_id, doc_id, function (
return PersistenceManager.getDoc(
project_id,
doc_id,
function (
error,
lines,
version,
@ -591,7 +624,8 @@ module.exports = DocumentManager = {
pathname,
callback
)
})
}
)
} else {
logger.log(
{ project_id, doc_id },
@ -607,7 +641,8 @@ module.exports = DocumentManager = {
callback
)
}
})
}
)
},
getDocWithLock(project_id, doc_id, callback) {
@ -769,5 +804,5 @@ module.exports = DocumentManager = {
doc_id,
callback
)
}
},
}

View file

@ -41,5 +41,5 @@ module.exports = Errors = {
NotFoundError,
OpRangeNotAvailableError,
ProjectStateChangedError,
DeleteMismatchError
DeleteMismatchError,
}

View file

@ -32,10 +32,9 @@ module.exports = HistoryManager = {
)
return
}
return RedisManager.getHistoryType(doc_id, function (
err,
projectHistoryType
) {
return RedisManager.getHistoryType(
doc_id,
function (err, projectHistoryType) {
if (err != null) {
logger.warn({ err, doc_id }, 'error getting history type')
}
@ -69,7 +68,8 @@ module.exports = HistoryManager = {
}
})
}
})
}
)
},
// flush changes in the background
@ -77,7 +77,7 @@ module.exports = HistoryManager = {
if (
!__guard__(
Settings.apis != null ? Settings.apis.project_history : undefined,
(x) => x.enabled
x => x.enabled
)
) {
return
@ -97,7 +97,7 @@ module.exports = HistoryManager = {
if (
!__guard__(
Settings.apis != null ? Settings.apis.project_history : undefined,
(x) => x.enabled
x => x.enabled
)
) {
return callback()
@ -157,7 +157,7 @@ module.exports = HistoryManager = {
if (
__guard__(
Settings.apis != null ? Settings.apis.project_history : undefined,
(x) => x.enabled
x => x.enabled
)
) {
if (
@ -253,7 +253,7 @@ module.exports = HistoryManager = {
)
}
)
}
},
}
function __guard__(value, transform) {

View file

@ -41,5 +41,5 @@ module.exports = HistoryRedisManager = {
return callback()
}
)
}
},
}

View file

@ -24,7 +24,7 @@ module.exports = {
updateProject,
resyncProjectHistory,
flushAllProjects,
flushQueuedProjects
flushQueuedProjects,
}
function getDoc(req, res, next) {
@ -59,7 +59,7 @@ function getDoc(req, res, next) {
version,
ops,
ranges,
pathname
pathname,
})
}
)
@ -104,7 +104,7 @@ function getProjectDocsAndFlushIfOld(req, res, next) {
logger.log(
{
projectId,
result: result.map((doc) => `${doc._id}:${doc.v}`)
result: result.map(doc => `${doc._id}:${doc.v}`),
},
'got docs via http'
)
@ -118,7 +118,7 @@ function clearProjectState(req, res, next) {
const projectId = req.params.project_id
const timer = new Metrics.Timer('http.clearProjectState')
logger.log({ projectId }, 'clearing project state via http')
ProjectManager.clearProjectState(projectId, (error) => {
ProjectManager.clearProjectState(projectId, error => {
timer.done()
if (error) {
next(error)
@ -152,7 +152,7 @@ function setDoc(req, res, next) {
source,
userId,
undoing,
(error) => {
error => {
timer.done()
if (error) {
return next(error)
@ -168,7 +168,7 @@ function flushDocIfLoaded(req, res, next) {
const projectId = req.params.project_id
logger.log({ projectId, docId }, 'flushing doc via http')
const timer = new Metrics.Timer('http.flushDoc')
DocumentManager.flushDocIfLoadedWithLock(projectId, docId, (error) => {
DocumentManager.flushDocIfLoadedWithLock(projectId, docId, error => {
timer.done()
if (error) {
return next(error)
@ -188,7 +188,7 @@ function deleteDoc(req, res, next) {
projectId,
docId,
{ ignoreFlushErrors },
(error) => {
error => {
timer.done()
// There is no harm in flushing project history if the previous call
// failed and sometimes it is required
@ -207,7 +207,7 @@ function flushProject(req, res, next) {
const projectId = req.params.project_id
logger.log({ projectId }, 'flushing project via http')
const timer = new Metrics.Timer('http.flushProject')
ProjectManager.flushProjectWithLocks(projectId, (error) => {
ProjectManager.flushProjectWithLocks(projectId, error => {
timer.done()
if (error) {
return next(error)
@ -228,7 +228,7 @@ function deleteProject(req, res, next) {
options.skip_history_flush = true
} // don't flush history when realtime shuts down
if (req.query.background) {
ProjectManager.queueFlushAndDeleteProject(projectId, (error) => {
ProjectManager.queueFlushAndDeleteProject(projectId, error => {
if (error) {
return next(error)
}
@ -237,18 +237,14 @@ function deleteProject(req, res, next) {
}) // No Content
} else {
const timer = new Metrics.Timer('http.deleteProject')
ProjectManager.flushAndDeleteProjectWithLocks(
projectId,
options,
(error) => {
ProjectManager.flushAndDeleteProjectWithLocks(projectId, options, error => {
timer.done()
if (error) {
return next(error)
}
logger.log({ projectId }, 'deleted project via http')
res.sendStatus(204) // No Content
}
)
})
}
}
@ -261,7 +257,7 @@ function deleteMultipleProjects(req, res, next) {
logger.log({ projectId }, 'queue delete of project via http')
ProjectManager.queueFlushAndDeleteProject(projectId, cb)
},
(error) => {
error => {
if (error) {
return next(error)
}
@ -281,11 +277,7 @@ function acceptChanges(req, res, next) {
`accepting ${changeIds.length} changes via http`
)
const timer = new Metrics.Timer('http.acceptChanges')
DocumentManager.acceptChangesWithLock(
projectId,
docId,
changeIds,
(error) => {
DocumentManager.acceptChangesWithLock(projectId, docId, changeIds, error => {
timer.done()
if (error) {
return next(error)
@ -295,31 +287,25 @@ function acceptChanges(req, res, next) {
`accepted ${changeIds.length} changes via http`
)
res.sendStatus(204) // No Content
}
)
})
}
function deleteComment(req, res, next) {
const {
project_id: projectId,
doc_id: docId,
comment_id: commentId
comment_id: commentId,
} = req.params
logger.log({ projectId, docId, commentId }, 'deleting comment via http')
const timer = new Metrics.Timer('http.deleteComment')
DocumentManager.deleteCommentWithLock(
projectId,
docId,
commentId,
(error) => {
DocumentManager.deleteCommentWithLock(projectId, docId, commentId, error => {
timer.done()
if (error) {
return next(error)
}
logger.log({ projectId, docId, commentId }, 'deleted comment via http')
res.sendStatus(204) // No Content
}
)
})
}
function updateProject(req, res, next) {
@ -333,7 +319,7 @@ function updateProject(req, res, next) {
userId,
updates,
version,
(error) => {
error => {
timer.done()
if (error) {
return next(error)
@ -357,7 +343,7 @@ function resyncProjectHistory(req, res, next) {
projectHistoryId,
docs,
files,
(error) => {
error => {
if (error) {
return next(error)
}
@ -372,7 +358,7 @@ function flushAllProjects(req, res, next) {
const options = {
limit: req.query.limit || 1000,
concurrency: req.query.concurrency || 5,
dryRun: req.query.dryRun || false
dryRun: req.query.dryRun || false,
}
ProjectFlusher.flushAllProjects(options, (err, projectIds) => {
if (err) {
@ -389,7 +375,7 @@ function flushQueuedProjects(req, res, next) {
const options = {
limit: req.query.limit || 1000,
timeout: 5 * 60 * 1000,
min_delete_age: req.query.min_delete_age || 5 * 60 * 1000
min_delete_age: req.query.min_delete_age || 5 * 60 * 1000,
}
DeleteQueueManager.flushAndDeleteOldProjects(options, (err, flushed) => {
if (err) {

View file

@ -54,10 +54,13 @@ module.exports = LockManager = {
const lockValue = LockManager.randomLock()
const key = keys.blockingKey({ doc_id })
const profile = new Profiler('tryLock', { doc_id, key, lockValue })
return rclient.set(key, lockValue, 'EX', this.LOCK_TTL, 'NX', function (
err,
gotLock
) {
return rclient.set(
key,
lockValue,
'EX',
this.LOCK_TTL,
'NX',
function (err, gotLock) {
if (err != null) {
return callback(err)
}
@ -66,15 +69,16 @@ module.exports = LockManager = {
const timeTaken = profile.log('got lock').end()
if (timeTaken > MAX_REDIS_REQUEST_LENGTH) {
// took too long, so try to free the lock
return LockManager.releaseLock(doc_id, lockValue, function (
err,
result
) {
return LockManager.releaseLock(
doc_id,
lockValue,
function (err, result) {
if (err != null) {
return callback(err)
} // error freeing lock
return callback(null, false)
}) // tell caller they didn't get the lock
}
) // tell caller they didn't get the lock
} else {
return callback(null, true, lockValue)
}
@ -83,7 +87,8 @@ module.exports = LockManager = {
profile.log('doc is locked').end()
return callback(null, false)
}
})
}
)
},
getLock(doc_id, callback) {
@ -145,10 +150,12 @@ module.exports = LockManager = {
releaseLock(doc_id, lockValue, callback) {
const key = keys.blockingKey({ doc_id })
const profile = new Profiler('releaseLock', { doc_id, key, lockValue })
return rclient.eval(LockManager.unlockScript, 1, key, lockValue, function (
err,
result
) {
return rclient.eval(
LockManager.unlockScript,
1,
key,
lockValue,
function (err, result) {
if (err != null) {
return callback(err)
} else if (result != null && result !== 1) {
@ -164,6 +171,7 @@ module.exports = LockManager = {
profile.log('unlockScript:ok').end()
return callback(null, result)
}
})
}
)
},
}

View file

@ -25,19 +25,19 @@ const showUpdateLength = function (update) {
const copy = _.cloneDeep(update)
copy.op.forEach(function (element, index) {
if (
__guard__(element != null ? element.i : undefined, (x) => x.length) !=
__guard__(element != null ? element.i : undefined, x => x.length) !=
null
) {
copy.op[index].i = element.i.length
}
if (
__guard__(element != null ? element.d : undefined, (x1) => x1.length) !=
__guard__(element != null ? element.d : undefined, x1 => x1.length) !=
null
) {
copy.op[index].d = element.d.length
}
if (
__guard__(element != null ? element.c : undefined, (x2) => x2.length) !=
__guard__(element != null ? element.c : undefined, x2 => x2.length) !=
null
) {
return (copy.op[index].c = element.c.length)
@ -57,7 +57,7 @@ module.exports = {
docLines: showLength,
newDocLines: showLength,
ranges: showLength,
update: showUpdateLength
update: showUpdateLength,
}
function __guard__(value, transform) {

View file

@ -21,7 +21,7 @@ const Metrics = require('./Metrics')
const logger = require('logger-sharelatex')
const request = require('requestretry').defaults({
maxAttempts: 2,
retryDelay: 10
retryDelay: 10,
})
// We have to be quick with HTTP calls because we're holding a lock that
@ -75,15 +75,15 @@ module.exports = PersistenceManager = {
url: `${Settings.apis.web.url}${urlPath}`,
method: 'GET',
headers: {
accept: 'application/json'
accept: 'application/json',
},
auth: {
user: Settings.apis.web.user,
pass: Settings.apis.web.pass,
sendImmediately: true
sendImmediately: true,
},
jar: false,
timeout: MAX_HTTP_REQUEST_LENGTH
timeout: MAX_HTTP_REQUEST_LENGTH,
},
function (error, res, body) {
updateMetric('getDoc', error, res)
@ -164,15 +164,15 @@ module.exports = PersistenceManager = {
ranges,
version,
lastUpdatedBy,
lastUpdatedAt
lastUpdatedAt,
},
auth: {
user: Settings.apis.web.user,
pass: Settings.apis.web.pass,
sendImmediately: true
sendImmediately: true,
},
jar: false,
timeout: MAX_HTTP_REQUEST_LENGTH
timeout: MAX_HTTP_REQUEST_LENGTH,
},
function (error, res, body) {
updateMetric('setDoc', error, res)
@ -196,5 +196,5 @@ module.exports = PersistenceManager = {
}
}
)
}
},
}

View file

@ -45,10 +45,13 @@ var ProjectFlusher = {
var doIteration = (
cb // avoid hitting redis too hard
) =>
node.scan(cursor, 'MATCH', pattern, 'COUNT', batchSize, function (
error,
reply
) {
node.scan(
cursor,
'MATCH',
pattern,
'COUNT',
batchSize,
function (error, reply) {
let keys
if (error != null) {
return callback(error)
@ -65,7 +68,8 @@ var ProjectFlusher = {
} else {
return setTimeout(doIteration, 10)
}
})
}
)
return doIteration()
},
@ -97,7 +101,9 @@ var ProjectFlusher = {
if (options.dryRun) {
return callback(null, project_ids)
}
const jobs = _.map(project_ids, (project_id) => (cb) =>
const jobs = _.map(
project_ids,
project_id => cb =>
ProjectManager.flushAndDeleteProjectWithLocks(
project_id,
{ background: true },
@ -123,7 +129,7 @@ var ProjectFlusher = {
)
}
)
}
},
}
module.exports = ProjectFlusher

View file

@ -17,7 +17,7 @@ let ProjectHistoryRedisManager
const Settings = require('@overleaf/settings')
const projectHistoryKeys = __guard__(
Settings.redis != null ? Settings.redis.project_history : undefined,
(x) => x.key_schema
x => x.key_schema
)
const rclient = require('@overleaf/redis-wrapper').createClient(
Settings.redis.project_history
@ -70,10 +70,10 @@ module.exports = ProjectHistoryRedisManager = {
new_pathname: projectUpdate.newPathname,
meta: {
user_id,
ts: new Date()
ts: new Date(),
},
version: projectUpdate.version,
projectHistoryId
projectHistoryId,
}
projectUpdate[entity_type] = entity_id
@ -104,10 +104,10 @@ module.exports = ProjectHistoryRedisManager = {
url: projectUpdate.url,
meta: {
user_id,
ts: new Date()
ts: new Date(),
},
version: projectUpdate.version,
projectHistoryId
projectHistoryId,
}
projectUpdate[entity_type] = entitiy_id
@ -132,8 +132,8 @@ module.exports = ProjectHistoryRedisManager = {
resyncProjectStructure: { docs, files },
projectHistoryId,
meta: {
ts: new Date()
}
ts: new Date(),
},
}
const jsonUpdate = JSON.stringify(projectUpdate)
return ProjectHistoryRedisManager.queueOps(project_id, jsonUpdate, callback)
@ -155,18 +155,18 @@ module.exports = ProjectHistoryRedisManager = {
const projectUpdate = {
resyncDocContent: {
content: lines.join('\n'),
version
version,
},
projectHistoryId,
path: pathname,
doc: doc_id,
meta: {
ts: new Date()
}
ts: new Date(),
},
}
const jsonUpdate = JSON.stringify(projectUpdate)
return ProjectHistoryRedisManager.queueOps(project_id, jsonUpdate, callback)
}
},
}
function __guard__(value, transform) {

View file

@ -14,7 +14,7 @@ module.exports = {
getProjectDocsTimestamps,
getProjectDocsAndFlushIfOld,
clearProjectState,
updateProjectWithLocks
updateProjectWithLocks,
}
function flushProjectWithLocks(projectId, _callback) {
@ -29,8 +29,8 @@ function flushProjectWithLocks(projectId, _callback) {
return callback(error)
}
const errors = []
const jobs = docIds.map((docId) => (callback) => {
DocumentManager.flushDocIfLoadedWithLock(projectId, docId, (error) => {
const jobs = docIds.map(docId => callback => {
DocumentManager.flushDocIfLoadedWithLock(projectId, docId, error => {
if (error instanceof Errors.NotFoundError) {
logger.warn(
{ err: error, projectId, docId },
@ -72,19 +72,14 @@ function flushAndDeleteProjectWithLocks(projectId, options, _callback) {
return callback(error)
}
const errors = []
const jobs = docIds.map((docId) => (callback) => {
DocumentManager.flushAndDeleteDocWithLock(
projectId,
docId,
{},
(error) => {
const jobs = docIds.map(docId => callback => {
DocumentManager.flushAndDeleteDocWithLock(projectId, docId, {}, error => {
if (error) {
logger.error({ err: error, projectId, docId }, 'error deleting doc')
errors.push(error)
}
callback()
}
)
})
})
logger.log({ projectId, docIds }, 'deleting docs')
@ -93,7 +88,7 @@ function flushAndDeleteProjectWithLocks(projectId, options, _callback) {
// history is completely flushed because the project may be
// deleted in web after this call completes, and so further
// attempts to flush would fail after that.
HistoryManager.flushProjectChanges(projectId, options, (error) => {
HistoryManager.flushProjectChanges(projectId, options, error => {
if (errors.length > 0) {
callback(new Error('Errors deleting docs. See log for details'))
} else if (error) {
@ -107,7 +102,7 @@ function flushAndDeleteProjectWithLocks(projectId, options, _callback) {
}
function queueFlushAndDeleteProject(projectId, callback) {
RedisManager.queueFlushAndDeleteProject(projectId, (error) => {
RedisManager.queueFlushAndDeleteProject(projectId, error => {
if (error) {
logger.error(
{ projectId, error },
@ -176,7 +171,7 @@ function getProjectDocsAndFlushIfOld(
return callback(error)
}
// get the doc lines from redis
const jobs = docIds.map((docId) => (cb) => {
const jobs = docIds.map(docId => cb => {
DocumentManager.getDocAndFlushIfOldWithLock(
projectId,
docId,
@ -288,7 +283,7 @@ function updateProjectWithLocks(
}
}
async.eachSeries(updates, handleUpdate, (error) => {
async.eachSeries(updates, handleUpdate, error => {
if (error) {
return callback(error)
}

View file

@ -42,7 +42,7 @@ module.exports = RangesManager = {
for (const op of Array.from(update.op)) {
try {
rangesTracker.applyOp(op, {
user_id: update.meta != null ? update.meta.user_id : undefined
user_id: update.meta != null ? update.meta.user_id : undefined,
})
} catch (error1) {
error = error1
@ -86,7 +86,7 @@ module.exports = RangesManager = {
response.changes != null ? response.changes.length : undefined,
commentsCount:
response.comments != null ? response.comments.length : undefined,
rangesWereCollapsed
rangesWereCollapsed,
},
'applied updates to ranges'
)
@ -159,5 +159,5 @@ module.exports = RangesManager = {
}
}
return count
}
},
}

View file

@ -120,7 +120,7 @@ const load = function () {
if (comment == null) {
return
}
this.comments = this.comments.filter((c) => c.id !== comment_id)
this.comments = this.comments.filter(c => c.id !== comment_id)
return this._markAsDirty(comment, 'comment', 'removed')
}
@ -257,7 +257,7 @@ const load = function () {
if (metadata == null) {
metadata = {}
}
return Array.from(ops).map((op) => this.applyOp(op, metadata))
return Array.from(ops).map(op => this.applyOp(op, metadata))
}
addComment(op, metadata) {
@ -274,9 +274,9 @@ const load = function () {
// Copy because we'll modify in place
c: op.c,
p: op.p,
t: op.t
t: op.t,
},
metadata
metadata,
})
)
this._markAsDirty(comment, 'comment', 'added')
@ -488,9 +488,9 @@ const load = function () {
const after_change = {
op: {
i: after_content,
p: change_start + offset + op_length
p: change_start + offset + op_length,
},
metadata: {}
metadata: {},
}
for (const key in change.metadata) {
const value = change.metadata[key]
@ -606,7 +606,7 @@ const load = function () {
delete_removed_start,
delete_removed_start + delete_removed_length
),
p: delete_removed_start
p: delete_removed_start,
}
if (modification.d.length > 0) {
op_modifications.push(modification)
@ -643,7 +643,7 @@ const load = function () {
// Copy rather than modify because we still need to apply it to comments
op = {
p: op.p,
d: this._applyOpModifications(op.d, op_modifications)
d: this._applyOpModifications(op.d, op_modifications),
}
for (change of Array.from(remove_changes)) {
@ -678,7 +678,7 @@ const load = function () {
moved_changes = moved_changes.concat(results.moved_changes)
for (change of Array.from(results.remove_changes)) {
this._removeChange(change)
moved_changes = moved_changes.filter((c) => c !== change)
moved_changes = moved_changes.filter(c => c !== change)
}
}
@ -695,7 +695,7 @@ const load = function () {
const change = {
id: this.newId(),
op: this._clone(op), // Don't take a reference to the existing op since we'll modify this in place with future changes
metadata: this._clone(metadata)
metadata: this._clone(metadata),
}
this.changes.push(change)
@ -717,7 +717,7 @@ const load = function () {
}
_removeChange(change) {
this.changes = this.changes.filter((c) => c.id !== change.id)
this.changes = this.changes.filter(c => c.id !== change.id)
return this._markAsDirty(change, 'change', 'removed')
}
@ -813,13 +813,13 @@ const load = function () {
comment: {
moved: {},
removed: {},
added: {}
added: {},
},
change: {
moved: {},
removed: {},
added: {}
}
added: {},
},
})
}

View file

@ -47,7 +47,7 @@ module.exports = RateLimiter = class RateLimiter {
}
this.ActiveWorkerCount++
Metrics.gauge('processingUpdates', this.ActiveWorkerCount)
return task((err) => {
return task(err => {
this.ActiveWorkerCount--
Metrics.gauge('processingUpdates', this.ActiveWorkerCount)
return callback(err)
@ -65,11 +65,11 @@ module.exports = RateLimiter = class RateLimiter {
logger.log(
{
active: this.ActiveWorkerCount,
currentLimit: Math.ceil(this.CurrentWorkerLimit)
currentLimit: Math.ceil(this.CurrentWorkerLimit),
},
'hit rate limit'
)
return this._trackAndRun(task, (err) => {
return this._trackAndRun(task, err => {
if (err == null) {
this._adjustLimitUp()
} // don't increment rate limit if there was an error

View file

@ -45,7 +45,7 @@ module.exports = RealTimeRedisManager = {
for (jsonUpdate of Array.from(jsonUpdates)) {
// record metric for each update removed from queue
metrics.summary('redis.pendingUpdates', jsonUpdate.length, {
status: 'pop'
status: 'pop',
})
}
const updates = []
@ -83,5 +83,5 @@ module.exports = RealTimeRedisManager = {
} else {
return pubsubClient.publish('applied-ops', blob)
}
}
},
}

View file

@ -92,7 +92,7 @@ module.exports = RedisManager = {
return callback(error)
}
// update docsInProject set before writing doc contents
rclient.sadd(keys.docsInProject({ project_id }), doc_id, (error) => {
rclient.sadd(keys.docsInProject({ project_id }), doc_id, error => {
if (error) return callback(error)
rclient.mset(
@ -103,7 +103,7 @@ module.exports = RedisManager = {
[keys.docHash({ doc_id })]: docHash,
[keys.ranges({ doc_id })]: ranges,
[keys.pathname({ doc_id })]: pathname,
[keys.projectHistoryId({ doc_id })]: projectHistoryId
[keys.projectHistoryId({ doc_id })]: projectHistoryId,
},
callback
)
@ -203,7 +203,7 @@ module.exports = RedisManager = {
keys.projectHistoryId({ doc_id }),
keys.unflushedTime({ doc_id }),
keys.lastUpdatedAt({ doc_id }),
keys.lastUpdatedBy({ doc_id })
keys.lastUpdatedBy({ doc_id }),
]
rclient.mget(...collectKeys, (error, ...rest) => {
let [
@ -216,7 +216,7 @@ module.exports = RedisManager = {
projectHistoryId,
unflushedTime,
lastUpdatedAt,
lastUpdatedBy
lastUpdatedBy,
] = Array.from(rest[0])
const timeSpan = timer.done()
if (error != null) {
@ -244,7 +244,7 @@ module.exports = RedisManager = {
doc_project_id,
computedHash,
storedHash,
docLines
docLines,
},
'hash mismatch on retrieved document'
)
@ -325,10 +325,9 @@ module.exports = RedisManager = {
if (error != null) {
return callback(error)
}
return rclient.get(keys.docVersion({ doc_id }), function (
error,
version
) {
return rclient.get(
keys.docVersion({ doc_id }),
function (error, version) {
if (error != null) {
return callback(error)
}
@ -360,16 +359,17 @@ module.exports = RedisManager = {
return callback(error)
}
return rclient.lrange(keys.docOps({ doc_id }), start, end, function (
error,
jsonOps
) {
return rclient.lrange(
keys.docOps({ doc_id }),
start,
end,
function (error, jsonOps) {
let ops
if (error != null) {
return callback(error)
}
try {
ops = jsonOps.map((jsonOp) => JSON.parse(jsonOp))
ops = jsonOps.map(jsonOp => JSON.parse(jsonOp))
} catch (e) {
return callback(e)
}
@ -379,8 +379,10 @@ module.exports = RedisManager = {
return callback(error)
}
return callback(null, ops)
})
})
}
)
}
)
})
},
@ -388,15 +390,15 @@ module.exports = RedisManager = {
if (callback == null) {
callback = function (error, projectHistoryType) {}
}
return rclient.get(keys.projectHistoryType({ doc_id }), function (
error,
projectHistoryType
) {
return rclient.get(
keys.projectHistoryType({ doc_id }),
function (error, projectHistoryType) {
if (error != null) {
return callback(error)
}
return callback(null, projectHistoryType)
})
}
)
},
setHistoryType(doc_id, projectHistoryType, callback) {
@ -428,11 +430,9 @@ module.exports = RedisManager = {
if (callback == null) {
callback = function (error) {}
}
return RedisManager.getDocVersion(doc_id, function (
error,
currentVersion,
projectHistoryType
) {
return RedisManager.getDocVersion(
doc_id,
function (error, currentVersion, projectHistoryType) {
if (error != null) {
return callback(error)
}
@ -444,14 +444,14 @@ module.exports = RedisManager = {
doc_id,
currentVersion,
newVersion,
opsLength: appliedOps.length
opsLength: appliedOps.length,
},
'version mismatch'
)
return callback(error)
}
const jsonOps = appliedOps.map((op) => JSON.stringify(op))
const jsonOps = appliedOps.map(op => JSON.stringify(op))
for (const op of Array.from(jsonOps)) {
if (op.indexOf('\u0000') !== -1) {
error = new Error('null bytes found in jsonOps')
@ -477,14 +477,19 @@ module.exports = RedisManager = {
}
const newHash = RedisManager._computeHash(newDocLines)
const opVersions = appliedOps.map((op) => (op != null ? op.v : undefined))
const opVersions = appliedOps.map(op => (op != null ? op.v : undefined))
logger.log(
{ doc_id, version: newVersion, hash: newHash, op_versions: opVersions },
{
doc_id,
version: newVersion,
hash: newHash,
op_versions: opVersions,
},
'updating doc in redis'
)
// record bytes sent to redis in update
metrics.summary('redis.docLines', newDocLines.length, {
status: 'update'
status: 'update',
})
return RedisManager._serializeRanges(ranges, function (error, ranges) {
if (error != null) {
@ -504,7 +509,7 @@ module.exports = RedisManager = {
[keys.docHash({ doc_id })]: newHash,
[keys.ranges({ doc_id })]: ranges,
[keys.lastUpdatedAt({ doc_id })]: Date.now(),
[keys.lastUpdatedBy({ doc_id })]: updateMeta && updateMeta.user_id
[keys.lastUpdatedBy({ doc_id })]: updateMeta && updateMeta.user_id,
})
multi.ltrim(
keys.docOps({ doc_id }),
@ -551,8 +556,10 @@ module.exports = RedisManager = {
if (
jsonOps.length > 0 &&
__guard__(
Settings.apis != null ? Settings.apis.project_history : undefined,
(x) => x.enabled
Settings.apis != null
? Settings.apis.project_history
: undefined,
x => x.enabled
)
) {
metrics.inc('history-queue', 1, { status: 'project-history' })
@ -567,18 +574,18 @@ module.exports = RedisManager = {
}
})
})
})
}
)
},
renameDoc(project_id, doc_id, user_id, update, projectHistoryId, callback) {
if (callback == null) {
callback = function (error) {}
}
return RedisManager.getDoc(project_id, doc_id, function (
error,
lines,
version
) {
return RedisManager.getDoc(
project_id,
doc_id,
function (error, lines, version) {
if (error != null) {
return callback(error)
}
@ -613,7 +620,8 @@ module.exports = RedisManager = {
callback
)
}
})
}
)
},
clearUnflushedTime(doc_id, callback) {
@ -726,7 +734,7 @@ module.exports = RedisManager = {
// note: must specify 'utf8' encoding explicitly, as the default is
// binary in node < v5
return crypto.createHash('sha1').update(docLines, 'utf8').digest('hex')
}
},
}
function __guard__(value, transform) {

View file

@ -70,7 +70,7 @@ module.exports = ShareJsDB = class ShareJsDB {
return callback(null, {
snapshot: this.lines.join('\n'),
v: parseInt(this.version, 10),
type: 'text'
type: 'text',
})
}
}

View file

@ -35,7 +35,7 @@ module.exports = ShareJsUpdateManager = {
const db = new ShareJsDB(project_id, doc_id, lines, version)
const model = new ShareJsModel(db, {
maxDocLength: Settings.max_doc_length,
maximumAge: MAX_AGE_OF_OP
maximumAge: MAX_AGE_OF_OP,
})
model.db = db
return model
@ -141,5 +141,5 @@ module.exports = ShareJsUpdateManager = {
.update('blob ' + content.length + '\x00')
.update(content, 'utf8')
.digest('hex')
}
},
}

View file

@ -38,7 +38,7 @@ module.exports = SnapshotManager = {
lines,
pathname,
ranges: SnapshotManager.jsonRangesToMongo(ranges),
ts: new Date()
ts: new Date(),
},
callback
)
@ -83,5 +83,5 @@ module.exports = SnapshotManager = {
} catch (error) {
return data
}
}
},
}

View file

@ -9,5 +9,5 @@ module.exports = {
},
splitProjectIdAndDocId(project_and_doc_id) {
return project_and_doc_id.split(':')
}
},
}

View file

@ -37,15 +37,17 @@ module.exports = UpdateManager = {
callback = function (error) {}
}
const timer = new Metrics.Timer('updateManager.processOutstandingUpdates')
return UpdateManager.fetchAndApplyUpdates(project_id, doc_id, function (
error
) {
return UpdateManager.fetchAndApplyUpdates(
project_id,
doc_id,
function (error) {
timer.done()
if (error != null) {
return callback(error)
}
return callback()
})
}
)
},
processOutstandingUpdatesWithLock(project_id, doc_id, callback) {
@ -54,7 +56,7 @@ module.exports = UpdateManager = {
}
const profile = new Profiler('processOutstandingUpdatesWithLock', {
project_id,
doc_id
doc_id,
})
return LockManager.tryLock(doc_id, (error, gotLock, lockValue) => {
if (error != null) {
@ -77,7 +79,7 @@ module.exports = UpdateManager = {
)
}
profile.log('processOutstandingUpdates')
return LockManager.releaseLock(doc_id, lockValue, (error) => {
return LockManager.releaseLock(doc_id, lockValue, error => {
if (error != null) {
return callback(error)
}
@ -155,7 +157,7 @@ module.exports = UpdateManager = {
RealTimeRedisManager.sendData({
project_id,
doc_id,
error: error.message || error
error: error.message || error,
})
profile.log('sendData')
}
@ -166,14 +168,10 @@ module.exports = UpdateManager = {
var profile = new Profiler('applyUpdate', { project_id, doc_id })
UpdateManager._sanitizeUpdate(update)
profile.log('sanitizeUpdate')
return DocumentManager.getDoc(project_id, doc_id, function (
error,
lines,
version,
ranges,
pathname,
projectHistoryId
) {
return DocumentManager.getDoc(
project_id,
doc_id,
function (error, lines, version, ranges, pathname, projectHistoryId) {
profile.log('getDoc')
if (error != null) {
return callback(error)
@ -244,7 +242,7 @@ module.exports = UpdateManager = {
previousVersion,
lines,
ranges,
update
update,
},
'update collapsed some ranges, snapshotting previous content'
)
@ -266,7 +264,7 @@ module.exports = UpdateManager = {
doc_id,
version,
lines,
ranges
ranges,
},
'error recording snapshot'
)
@ -287,7 +285,8 @@ module.exports = UpdateManager = {
)
}
)
})
}
)
},
lockUpdatesAndDo(method, project_id, doc_id, ...rest) {
@ -313,10 +312,11 @@ module.exports = UpdateManager = {
)
}
profile.log('processOutstandingUpdates')
return method(project_id, doc_id, ...Array.from(args), function (
error,
...response_args
) {
return method(
project_id,
doc_id,
...Array.from(args),
function (error, ...response_args) {
if (error != null) {
return UpdateManager._handleErrorInsideLock(
doc_id,
@ -326,7 +326,10 @@ module.exports = UpdateManager = {
)
}
profile.log('method')
return LockManager.releaseLock(doc_id, lockValue, function (error) {
return LockManager.releaseLock(
doc_id,
lockValue,
function (error) {
if (error != null) {
return callback(error)
}
@ -337,8 +340,10 @@ module.exports = UpdateManager = {
project_id,
doc_id
)
})
})
}
)
}
)
}
)
})
@ -348,7 +353,7 @@ module.exports = UpdateManager = {
if (callback == null) {
callback = function (error) {}
}
return LockManager.releaseLock(doc_id, lockValue, (lock_error) =>
return LockManager.releaseLock(doc_id, lockValue, lock_error =>
callback(original_error)
)
},
@ -408,5 +413,5 @@ module.exports = UpdateManager = {
return result
})()
})
}
},
}

View file

@ -33,5 +33,5 @@ module.exports = {
db,
ObjectId,
healthCheck: require('util').callbackify(healthCheck),
waitForDb
waitForDb,
}

View file

@ -34,4 +34,4 @@ exports.compose = function (op1, op2) {
return [op1[0], op1[1] + op2[1]]
}
exports.generateRandomOp = (doc) => [[doc, 1], doc + 1]
exports.generateRandomOp = doc => [[doc, 1], doc + 1]

View file

@ -31,7 +31,10 @@ exports._bt = bootstrapTransform = function (
}
// Transforms rightOp by leftOp. Returns ['rightOp', clientOp']
type.transformX = type.transformX = transformX = function (leftOp, rightOp) {
type.transformX =
type.transformX =
transformX =
function (leftOp, rightOp) {
checkValidOp(leftOp)
checkValidOp(rightOp)
@ -80,7 +83,8 @@ exports._bt = bootstrapTransform = function (
}
// Transforms op with specified type ('left' or 'right') by otherOp.
return (type.transform = type.transform = function (op, otherOp, type) {
return (type.transform = type.transform =
function (op, otherOp, type) {
let _
if (type !== 'left' && type !== 'right') {
throw new Error("type must be 'left' or 'right'")

View file

@ -353,5 +353,5 @@ json.api = {
return result
})()
})
}
},
}

View file

@ -59,12 +59,12 @@ json.invertComponent = function (c) {
return c_
}
json.invert = (op) =>
Array.from(op.slice().reverse()).map((c) => json.invertComponent(c))
json.invert = op =>
Array.from(op.slice().reverse()).map(c => json.invertComponent(c))
json.checkValidOp = function (op) {}
const isArray = (o) => Object.prototype.toString.call(o) === '[object Array]'
const isArray = o => Object.prototype.toString.call(o) === '[object Array]'
json.checkList = function (elem) {
if (!isArray(elem)) {
throw new Error('Referenced element not a list')
@ -264,7 +264,7 @@ json.normalize = function (op) {
// hax, copied from test/types/json. Apparently this is still the fastest way to deep clone an object, assuming
// we have browser support for JSON.
// http://jsperf.com/cloning-an-object/12
var clone = (o) => JSON.parse(JSON.stringify(o))
var clone = o => JSON.parse(JSON.stringify(o))
json.commonPath = function (p1, p2) {
p1 = p1.slice()

View file

@ -27,7 +27,7 @@ const { EventEmitter } = require('events')
const queue = require('./syncqueue')
const types = require('../types')
const isArray = (o) => Object.prototype.toString.call(o) === '[object Array]'
const isArray = o => Object.prototype.toString.call(o) === '[object Array]'
// This constructor creates a new Model object. There will be one model object
// per server context.
@ -225,7 +225,7 @@ module.exports = Model = function (db, options) {
return callback(error)
}
__guardMethod__(options.stats, 'writeOp', (o) => o.writeOp())
__guardMethod__(options.stats, 'writeOp', o => o.writeOp())
// This is needed when we emit the 'change' event, below.
const oldSnapshot = doc.snapshot
@ -303,7 +303,7 @@ module.exports = Model = function (db, options) {
// Version of the snapshot thats in the database
committedVersion: committedVersion != null ? committedVersion : data.v,
snapshotWriteLock: false,
dbMeta
dbMeta,
}
doc.opQueue = makeOpQueue(docName, doc)
@ -352,9 +352,7 @@ module.exports = Model = function (db, options) {
const load = function (docName, callback) {
if (docs[docName]) {
// The document is already loaded. Return immediately.
__guardMethod__(options.stats, 'cacheHit', (o) =>
o.cacheHit('getSnapshot')
)
__guardMethod__(options.stats, 'cacheHit', o => o.cacheHit('getSnapshot'))
return callback(null, docs[docName])
}
@ -370,7 +368,7 @@ module.exports = Model = function (db, options) {
return callbacks.push(callback)
}
__guardMethod__(options.stats, 'cacheMiss', (o1) =>
__guardMethod__(options.stats, 'cacheMiss', o1 =>
o1.cacheMiss('getSnapshot')
)
@ -447,7 +445,8 @@ module.exports = Model = function (db, options) {
) {
let reapTimer
clearTimeout(doc.reapTimer)
return (doc.reapTimer = reapTimer = setTimeout(
return (doc.reapTimer = reapTimer =
setTimeout(
() =>
tryWriteSnapshot(docName, function () {
// If the reaping timeout has been refreshed while we're writing the snapshot, or if we're
@ -490,7 +489,7 @@ module.exports = Model = function (db, options) {
doc.snapshotWriteLock = true
__guardMethod__(options.stats, 'writeSnapshot', (o) => o.writeSnapshot())
__guardMethod__(options.stats, 'writeSnapshot', o => o.writeSnapshot())
const writeSnapshot =
(db != null ? db.writeSnapshot : undefined) ||
@ -501,7 +500,7 @@ module.exports = Model = function (db, options) {
meta: doc.meta,
snapshot: doc.snapshot,
// The database doesn't know about object types.
type: doc.type.name
type: doc.type.name,
}
// Commit snapshot.
@ -551,7 +550,7 @@ module.exports = Model = function (db, options) {
snapshot: type.create(),
type: type.name,
meta: meta || {},
v: 0
v: 0,
}
const done = function (error, dbMeta) {
@ -864,7 +863,7 @@ module.exports = Model = function (db, options) {
// Close the database connection. This is needed so nodejs can shut down cleanly.
this.closeDb = function () {
__guardMethod__(db, 'close', (o) => o.close())
__guardMethod__(db, 'close', o => o.close())
return (db = null)
}
}

View file

@ -27,7 +27,7 @@ const { EventEmitter } = require('events')
const queue = require('./syncqueue')
const types = require('../types')
const isArray = (o) => Object.prototype.toString.call(o) === '[object Array]'
const isArray = o => Object.prototype.toString.call(o) === '[object Array]'
// This constructor creates a new Model object. There will be one model object
// per server context.
@ -232,7 +232,7 @@ module.exports = Model = function (db, options) {
return callback(error)
}
__guardMethod__(options.stats, 'writeOp', (o) => o.writeOp())
__guardMethod__(options.stats, 'writeOp', o => o.writeOp())
// This is needed when we emit the 'change' event, below.
const oldSnapshot = doc.snapshot
@ -310,7 +310,7 @@ module.exports = Model = function (db, options) {
// Version of the snapshot thats in the database
committedVersion: committedVersion != null ? committedVersion : data.v,
snapshotWriteLock: false,
dbMeta
dbMeta,
}
doc.opQueue = makeOpQueue(docName, doc)
@ -359,9 +359,7 @@ module.exports = Model = function (db, options) {
const load = function (docName, callback) {
if (docs[docName]) {
// The document is already loaded. Return immediately.
__guardMethod__(options.stats, 'cacheHit', (o) =>
o.cacheHit('getSnapshot')
)
__guardMethod__(options.stats, 'cacheHit', o => o.cacheHit('getSnapshot'))
return callback(null, docs[docName])
}
@ -377,7 +375,7 @@ module.exports = Model = function (db, options) {
return callbacks.push(callback)
}
__guardMethod__(options.stats, 'cacheMiss', (o1) =>
__guardMethod__(options.stats, 'cacheMiss', o1 =>
o1.cacheMiss('getSnapshot')
)
@ -454,7 +452,8 @@ module.exports = Model = function (db, options) {
) {
let reapTimer
clearTimeout(doc.reapTimer)
return (doc.reapTimer = reapTimer = setTimeout(
return (doc.reapTimer = reapTimer =
setTimeout(
() =>
tryWriteSnapshot(docName, function () {
// If the reaping timeout has been refreshed while we're writing the snapshot, or if we're
@ -497,7 +496,7 @@ module.exports = Model = function (db, options) {
doc.snapshotWriteLock = true
__guardMethod__(options.stats, 'writeSnapshot', (o) => o.writeSnapshot())
__guardMethod__(options.stats, 'writeSnapshot', o => o.writeSnapshot())
const writeSnapshot =
(db != null ? db.writeSnapshot : undefined) ||
@ -508,7 +507,7 @@ module.exports = Model = function (db, options) {
meta: doc.meta,
snapshot: doc.snapshot,
// The database doesn't know about object types.
type: doc.type.name
type: doc.type.name,
}
// Commit snapshot.
@ -558,7 +557,7 @@ module.exports = Model = function (db, options) {
snapshot: type.create(),
type: type.name,
meta: meta || {},
v: 0
v: 0,
}
const done = function (error, dbMeta) {
@ -871,7 +870,7 @@ module.exports = Model = function (db, options) {
// Close the database connection. This is needed so nodejs can shut down cleanly.
this.closeDb = function () {
__guardMethod__(db, 'close', (o) => o.close())
__guardMethod__(db, 'close', o => o.close())
return (db = null)
}
}

View file

@ -50,5 +50,5 @@ module.exports = {
}
return { position: pos, text: op1.text }
}
},
}

View file

@ -42,11 +42,11 @@ text.api = {
_register() {
return this.on('remoteop', function (op) {
return Array.from(op).map((component) =>
return Array.from(op).map(component =>
component.i !== undefined
? this.emit('insert', component.p, component.i)
: this.emit('delete', component.p, component.d)
)
})
}
},
}

View file

@ -44,7 +44,7 @@ type.api = {
const op = type.normalize([
pos,
{ d: this.snapshot.slice(pos, pos + length) },
this.snapshot.length - pos - length
this.snapshot.length - pos - length,
])
this.submitOp(op, callback)
@ -70,7 +70,7 @@ type.api = {
return result
})()
})
}
},
}
// We don't increment pos, because the position
// specified is after the delete has happened.

View file

@ -74,7 +74,7 @@ const checkOp = function (op) {
// Makes a function for appending components to a given op.
// Exported for the randomOpGenerator.
exports._makeAppend = makeAppend = (op) =>
exports._makeAppend = makeAppend = op =>
function (component) {
if (component === 0 || component.i === '' || component.d === '') {
} else if (op.length === 0) {

View file

@ -49,7 +49,7 @@ type.api = {
// Flatten a document into a string
getText() {
const strings = Array.from(this.snapshot.data).filter(
(elem) => typeof elem === 'string'
elem => typeof elem === 'string'
)
return strings.join('')
},
@ -129,5 +129,5 @@ type.api = {
}
}
})
}
},
}

View file

@ -65,7 +65,7 @@ var type = {
}
return doc
}
},
}
const checkOp = function (op) {
@ -346,7 +346,7 @@ const transformer = function (op, otherOp, goForwards, side) {
// transform - insert skips over inserted parts
if (side === 'left') {
// The left insert should go first.
while (__guard__(peek(), (x) => x.i) !== undefined) {
while (__guard__(peek(), x => x.i) !== undefined) {
append(newOp, take())
}
}

View file

@ -110,7 +110,7 @@ text._append = append = function (newOp, c) {
) {
return (newOp[newOp.length - 1] = {
i: strInject(last.i, c.p - last.p, c.i),
p: last.p
p: last.p,
})
} else if (
last.d != null &&
@ -120,7 +120,7 @@ text._append = append = function (newOp, c) {
) {
return (newOp[newOp.length - 1] = {
d: strInject(c.d, last.p - c.p, last.d),
p: c.p
p: c.p,
})
} else {
return newOp.push(c)
@ -142,7 +142,7 @@ text.compose = function (op1, op2) {
// Attempt to compress the op components together 'as much as possible'.
// This implementation preserves order and preserves create/delete pairs.
text.compress = (op) => text.compose([], op)
text.compress = op => text.compose([], op)
text.normalize = function (op) {
const newOp = []
@ -216,7 +216,7 @@ text._tc = transformComponent = function (dest, c, otherC, side) {
if (c.i != null) {
append(dest, {
i: c.i,
p: transformPosition(c.p, otherC, side === 'right')
p: transformPosition(c.p, otherC, side === 'right'),
})
} else {
// Delete
@ -286,8 +286,8 @@ const invertComponent = function (c) {
// No need to use append for invert, because the components won't be able to
// cancel with one another.
text.invert = (op) =>
Array.from(op.slice().reverse()).map((c) => invertComponent(c))
text.invert = op =>
Array.from(op.slice().reverse()).map(c => invertComponent(c))
if (typeof WEB !== 'undefined' && WEB !== null) {
if (!exports.types) {

View file

@ -34,4 +34,4 @@ exports.compose = function (op1, op2) {
return [op1[0], op1[1] + op2[1]]
}
exports.generateRandomOp = (doc) => [[doc, 1], doc + 1]
exports.generateRandomOp = doc => [[doc, 1], doc + 1]

View file

@ -31,7 +31,10 @@ exports._bt = bootstrapTransform = function (
}
// Transforms rightOp by leftOp. Returns ['rightOp', clientOp']
type.transformX = type.transformX = transformX = function (leftOp, rightOp) {
type.transformX =
type.transformX =
transformX =
function (leftOp, rightOp) {
checkValidOp(leftOp)
checkValidOp(rightOp)
@ -80,7 +83,8 @@ exports._bt = bootstrapTransform = function (
}
// Transforms op with specified type ('left' or 'right') by otherOp.
return (type.transform = type.transform = function (op, otherOp, type) {
return (type.transform = type.transform =
function (op, otherOp, type) {
let _
if (type !== 'left' && type !== 'right') {
throw new Error("type must be 'left' or 'right'")

View file

@ -353,5 +353,5 @@ json.api = {
return result
})()
})
}
},
}

View file

@ -59,12 +59,12 @@ json.invertComponent = function (c) {
return c_
}
json.invert = (op) =>
Array.from(op.slice().reverse()).map((c) => json.invertComponent(c))
json.invert = op =>
Array.from(op.slice().reverse()).map(c => json.invertComponent(c))
json.checkValidOp = function (op) {}
const isArray = (o) => Object.prototype.toString.call(o) === '[object Array]'
const isArray = o => Object.prototype.toString.call(o) === '[object Array]'
json.checkList = function (elem) {
if (!isArray(elem)) {
throw new Error('Referenced element not a list')
@ -264,7 +264,7 @@ json.normalize = function (op) {
// hax, copied from test/types/json. Apparently this is still the fastest way to deep clone an object, assuming
// we have browser support for JSON.
// http://jsperf.com/cloning-an-object/12
var clone = (o) => JSON.parse(JSON.stringify(o))
var clone = o => JSON.parse(JSON.stringify(o))
json.commonPath = function (p1, p2) {
p1 = p1.slice()

View file

@ -27,7 +27,7 @@ const { EventEmitter } = require('events')
const queue = require('./syncqueue')
const types = require('../types')
const isArray = (o) => Object.prototype.toString.call(o) === '[object Array]'
const isArray = o => Object.prototype.toString.call(o) === '[object Array]'
// This constructor creates a new Model object. There will be one model object
// per server context.
@ -225,7 +225,7 @@ module.exports = Model = function (db, options) {
return callback(error)
}
__guardMethod__(options.stats, 'writeOp', (o) => o.writeOp())
__guardMethod__(options.stats, 'writeOp', o => o.writeOp())
// This is needed when we emit the 'change' event, below.
const oldSnapshot = doc.snapshot
@ -303,7 +303,7 @@ module.exports = Model = function (db, options) {
// Version of the snapshot thats in the database
committedVersion: committedVersion != null ? committedVersion : data.v,
snapshotWriteLock: false,
dbMeta
dbMeta,
}
doc.opQueue = makeOpQueue(docName, doc)
@ -352,9 +352,7 @@ module.exports = Model = function (db, options) {
const load = function (docName, callback) {
if (docs[docName]) {
// The document is already loaded. Return immediately.
__guardMethod__(options.stats, 'cacheHit', (o) =>
o.cacheHit('getSnapshot')
)
__guardMethod__(options.stats, 'cacheHit', o => o.cacheHit('getSnapshot'))
return callback(null, docs[docName])
}
@ -370,7 +368,7 @@ module.exports = Model = function (db, options) {
return callbacks.push(callback)
}
__guardMethod__(options.stats, 'cacheMiss', (o1) =>
__guardMethod__(options.stats, 'cacheMiss', o1 =>
o1.cacheMiss('getSnapshot')
)
@ -447,7 +445,8 @@ module.exports = Model = function (db, options) {
) {
let reapTimer
clearTimeout(doc.reapTimer)
return (doc.reapTimer = reapTimer = setTimeout(
return (doc.reapTimer = reapTimer =
setTimeout(
() =>
tryWriteSnapshot(docName, function () {
// If the reaping timeout has been refreshed while we're writing the snapshot, or if we're
@ -490,7 +489,7 @@ module.exports = Model = function (db, options) {
doc.snapshotWriteLock = true
__guardMethod__(options.stats, 'writeSnapshot', (o) => o.writeSnapshot())
__guardMethod__(options.stats, 'writeSnapshot', o => o.writeSnapshot())
const writeSnapshot =
(db != null ? db.writeSnapshot : undefined) ||
@ -501,7 +500,7 @@ module.exports = Model = function (db, options) {
meta: doc.meta,
snapshot: doc.snapshot,
// The database doesn't know about object types.
type: doc.type.name
type: doc.type.name,
}
// Commit snapshot.
@ -551,7 +550,7 @@ module.exports = Model = function (db, options) {
snapshot: type.create(),
type: type.name,
meta: meta || {},
v: 0
v: 0,
}
const done = function (error, dbMeta) {
@ -864,7 +863,7 @@ module.exports = Model = function (db, options) {
// Close the database connection. This is needed so nodejs can shut down cleanly.
this.closeDb = function () {
__guardMethod__(db, 'close', (o) => o.close())
__guardMethod__(db, 'close', o => o.close())
return (db = null)
}
}

View file

@ -50,5 +50,5 @@ module.exports = {
}
return { position: pos, text: op1.text }
}
},
}

View file

@ -42,11 +42,11 @@ text.api = {
_register() {
return this.on('remoteop', function (op) {
return Array.from(op).map((component) =>
return Array.from(op).map(component =>
component.i !== undefined
? this.emit('insert', component.p, component.i)
: this.emit('delete', component.p, component.d)
)
})
}
},
}

View file

@ -44,7 +44,7 @@ type.api = {
const op = type.normalize([
pos,
{ d: this.snapshot.slice(pos, pos + length) },
this.snapshot.length - pos - length
this.snapshot.length - pos - length,
])
this.submitOp(op, callback)
@ -70,7 +70,7 @@ type.api = {
return result
})()
})
}
},
}
// We don't increment pos, because the position
// specified is after the delete has happened.

View file

@ -75,7 +75,7 @@ const checkOp = function (op) {
// Makes a function for appending components to a given op.
// Exported for the randomOpGenerator.
moduleExport._makeAppend = makeAppend = (op) =>
moduleExport._makeAppend = makeAppend = op =>
function (component) {
if (component === 0 || component.i === '' || component.d === '') {
} else if (op.length === 0) {

View file

@ -49,7 +49,7 @@ type.api = {
// Flatten a document into a string
getText() {
const strings = Array.from(this.snapshot.data).filter(
(elem) => typeof elem === 'string'
elem => typeof elem === 'string'
)
return strings.join('')
},
@ -129,5 +129,5 @@ type.api = {
}
}
})
}
},
}

View file

@ -65,7 +65,7 @@ var type = {
}
return doc
}
},
}
const checkOp = function (op) {
@ -346,7 +346,7 @@ const transformer = function (op, otherOp, goForwards, side) {
// transform - insert skips over inserted parts
if (side === 'left') {
// The left insert should go first.
while (__guard__(peek(), (x) => x.i) !== undefined) {
while (__guard__(peek(), x => x.i) !== undefined) {
append(newOp, take())
}
}

View file

@ -124,7 +124,7 @@ text._append = append = function (newOp, c) {
) {
return (newOp[newOp.length - 1] = {
i: strInject(last.i, c.p - last.p, c.i),
p: last.p
p: last.p,
})
} else if (
last.d != null &&
@ -134,7 +134,7 @@ text._append = append = function (newOp, c) {
) {
return (newOp[newOp.length - 1] = {
d: strInject(c.d, last.p - c.p, last.d),
p: c.p
p: c.p,
})
} else {
return newOp.push(c)
@ -156,7 +156,7 @@ text.compose = function (op1, op2) {
// Attempt to compress the op components together 'as much as possible'.
// This implementation preserves order and preserves create/delete pairs.
text.compress = (op) => text.compose([], op)
text.compress = op => text.compose([], op)
text.normalize = function (op) {
const newOp = []
@ -235,7 +235,7 @@ text._tc = transformComponent = function (dest, c, otherC, side) {
if (c.i != null) {
append(dest, {
i: c.i,
p: transformPosition(c.p, otherC, side === 'right')
p: transformPosition(c.p, otherC, side === 'right'),
})
} else if (c.d != null) {
// Delete
@ -305,7 +305,7 @@ text._tc = transformComponent = function (dest, c, otherC, side) {
append(dest, {
c: c.c,
p: transformPosition(c.p, otherC, true),
t: c.t
t: c.t,
})
}
} else if (otherC.d != null) {
@ -362,8 +362,8 @@ const invertComponent = function (c) {
// No need to use append for invert, because the components won't be able to
// cancel with one another.
text.invert = (op) =>
Array.from(op.slice().reverse()).map((c) => invertComponent(c))
text.invert = op =>
Array.from(op.slice().reverse()).map(c => invertComponent(c))
if (typeof WEB !== 'undefined' && WEB !== null) {
if (!exports.types) {

View file

@ -2,8 +2,8 @@ module.exports = {
internal: {
documentupdater: {
host: process.env.LISTEN_ADDRESS || 'localhost',
port: 3003
}
port: 3003,
},
},
apis: {
@ -12,15 +12,15 @@ module.exports = {
process.env.WEB_API_HOST || process.env.WEB_HOST || 'localhost'
}:${process.env.WEB_API_PORT || process.env.WEB_PORT || 3000}`,
user: process.env.WEB_API_USER || 'sharelatex',
pass: process.env.WEB_API_PASSWORD || 'password'
pass: process.env.WEB_API_PASSWORD || 'password',
},
trackchanges: {
url: `http://${process.env.TRACK_CHANGES_HOST || 'localhost'}:3015`
url: `http://${process.env.TRACK_CHANGES_HOST || 'localhost'}:3015`,
},
project_history: {
enabled: true,
url: `http://${process.env.PROJECT_HISTORY_HOST || 'localhost'}:3054`
}
url: `http://${process.env.PROJECT_HISTORY_HOST || 'localhost'}:3054`,
},
},
redis: {
@ -32,7 +32,7 @@ module.exports = {
process.env.PUBSUB_REDIS_PASSWORD || process.env.REDIS_PASSWORD || '',
maxRetriesPerRequest: parseInt(
process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20'
)
),
},
history: {
@ -50,8 +50,8 @@ module.exports = {
},
docsWithHistoryOps({ project_id: projectId }) {
return `DocsWithHistoryOps:{${projectId}}`
}
}
},
},
},
project_history: {
@ -74,8 +74,8 @@ module.exports = {
},
projectHistoryFirstOpTimestamp({ project_id: projectId }) {
return `ProjectHistory:FirstOpTimestamp:{${projectId}}`
}
}
},
},
},
lock: {
@ -90,8 +90,8 @@ module.exports = {
key_schema: {
blockingKey({ doc_id: docId }) {
return `Blocking:{${docId}}`
}
}
},
},
},
documentupdater: {
@ -159,9 +159,9 @@ module.exports = {
},
flushAndDeleteQueue() {
return 'DocUpdaterFlushAndDeleteQueue'
}
}
}
},
},
},
},
max_doc_length: 2 * 1024 * 1024, // 2mb
@ -173,15 +173,15 @@ module.exports = {
mongo: {
options: {
useUnifiedTopology:
(process.env.MONGO_USE_UNIFIED_TOPOLOGY || 'true') === 'true'
(process.env.MONGO_USE_UNIFIED_TOPOLOGY || 'true') === 'true',
},
url:
process.env.MONGO_CONNECTION_STRING ||
`mongodb://${process.env.MONGO_HOST || '127.0.0.1'}/sharelatex`
`mongodb://${process.env.MONGO_HOST || '127.0.0.1'}/sharelatex`,
},
sentry: {
dsn: process.env.SENTRY_DSN
dsn: process.env.SENTRY_DSN,
},
publishOnIndividualChannels:
@ -191,5 +191,5 @@ module.exports = {
smoothingOffset: process.env.SMOOTHING_OFFSET || 1000, // milliseconds
disableDoubleFlush: process.env.DISABLE_DOUBLE_FLUSH || false // don't flush track-changes for projects using project-history
disableDoubleFlush: process.env.DISABLE_DOUBLE_FLUSH || false, // don't flush track-changes for projects using project-history
}

View file

@ -39,7 +39,7 @@ const getKeys = function (pattern, callback) {
return async.concatSeries(nodes, doKeyLookupForNode, callback)
}
const expireDocOps = (callback) =>
const expireDocOps = callback =>
// eslint-disable-next-line handle-callback-err
getKeys(keys.docOps({ doc_id: '*' }), (error, keys) =>
async.mapSeries(

View file

@ -42,10 +42,10 @@ describe('Applying updates to a doc', function () {
op: [
{
i: 'one and a half\n',
p: 4
}
p: 4,
},
],
v: this.version
v: this.version,
}
this.result = ['one', 'one and a half', 'two', 'three']
return DocUpdaterApp.ensureRunning(done)
@ -55,19 +55,19 @@ describe('Applying updates to a doc', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
sinon.spy(MockWebApi, 'getDocument')
this.startTime = Date.now()
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc_id,
this.update,
(error) => {
error => {
if (error != null) {
throw error
}
@ -144,7 +144,7 @@ describe('Applying updates to a doc', function () {
it('should set the first op timestamp', function (done) {
rclient_project_history.get(
ProjectHistoryKeys.projectHistoryFirstOpTimestamp({
project_id: this.project_id
project_id: this.project_id,
}),
(error, result) => {
if (error != null) {
@ -167,7 +167,7 @@ describe('Applying updates to a doc', function () {
this.project_id,
this.doc_id,
this.second_update,
(error) => {
error => {
if (error != null) {
throw error
}
@ -180,7 +180,7 @@ describe('Applying updates to a doc', function () {
return it('should not change the first op timestamp', function (done) {
rclient_project_history.get(
ProjectHistoryKeys.projectHistoryFirstOpTimestamp({
project_id: this.project_id
project_id: this.project_id,
}),
(error, result) => {
if (error != null) {
@ -199,14 +199,14 @@ describe('Applying updates to a doc', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, (error) => {
DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => {
if (error != null) {
throw error
}
@ -215,7 +215,7 @@ describe('Applying updates to a doc', function () {
this.project_id,
this.doc_id,
this.update,
(error) => {
error => {
if (error != null) {
throw error
}
@ -284,15 +284,15 @@ describe('Applying updates to a doc', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version,
projectHistoryType: 'project-history'
projectHistoryType: 'project-history',
})
DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, (error) => {
DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => {
if (error != null) {
throw error
}
@ -301,7 +301,7 @@ describe('Applying updates to a doc', function () {
this.project_id,
this.doc_id,
this.update,
(error) => {
error => {
if (error != null) {
throw error
}
@ -360,12 +360,12 @@ describe('Applying updates to a doc', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
const lines = ['', '', '']
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines,
version: 0
version: 0,
})
this.updates = [
{ doc_id: this.doc_id, v: 0, op: [{ i: 'h', p: 0 }] },
@ -378,7 +378,7 @@ describe('Applying updates to a doc', function () {
{ doc_id: this.doc_id, v: 7, op: [{ i: 'o', p: 7 }] },
{ doc_id: this.doc_id, v: 8, op: [{ i: 'r', p: 8 }] },
{ doc_id: this.doc_id, v: 9, op: [{ i: 'l', p: 9 }] },
{ doc_id: this.doc_id, v: 10, op: [{ i: 'd', p: 10 }] }
{ doc_id: this.doc_id, v: 10, op: [{ i: 'd', p: 10 }] },
]
this.my_result = ['hello world', '', '']
return done()
@ -388,8 +388,8 @@ describe('Applying updates to a doc', function () {
let update
const actions = []
for (update of Array.from(this.updates.slice(0, 6))) {
;((update) => {
return actions.push((callback) =>
;(update => {
return actions.push(callback =>
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc_id,
@ -399,12 +399,12 @@ describe('Applying updates to a doc', function () {
)
})(update)
}
actions.push((callback) =>
actions.push(callback =>
DocUpdaterClient.deleteDoc(this.project_id, this.doc_id, callback)
)
for (update of Array.from(this.updates.slice(6))) {
;((update) => {
return actions.push((callback) =>
;(update => {
return actions.push(callback =>
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc_id,
@ -415,7 +415,7 @@ describe('Applying updates to a doc', function () {
})(update)
}
async.series(actions, (error) => {
async.series(actions, error => {
if (error != null) {
throw error
}
@ -437,7 +437,7 @@ describe('Applying updates to a doc', function () {
0,
-1,
(error, updates) => {
updates = Array.from(updates).map((u) => JSON.parse(u))
updates = Array.from(updates).map(u => JSON.parse(u))
for (let i = 0; i < this.updates.length; i++) {
const appliedUpdate = this.updates[i]
appliedUpdate.op.should.deep.equal(updates[i].op)
@ -462,7 +462,7 @@ describe('Applying updates to a doc', function () {
0,
-1,
(error, updates) => {
updates = Array.from(updates).map((u) => JSON.parse(u))
updates = Array.from(updates).map(u => JSON.parse(u))
for (let i = 0; i < this.updates.length; i++) {
const appliedUpdate = this.updates[i]
appliedUpdate.op.should.deep.equal(updates[i].op)
@ -478,12 +478,12 @@ describe('Applying updates to a doc', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
const lines = ['', '', '']
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines,
version: 0
version: 0,
})
this.updates = [
{ doc_id: this.doc_id, v: 0, op: [{ i: 'h', p: 0 }] },
@ -491,7 +491,7 @@ describe('Applying updates to a doc', function () {
{ doc_id: this.doc_id, v: 2, op: [{ i: 'l', p: 2 }] },
{ doc_id: this.doc_id, v: 3, op: [{ i: 'l', p: 3 }] },
{ doc_id: this.doc_id, v: 4, op: [{ i: 'o', p: 4 }] },
{ doc_id: this.doc_id, v: 0, op: [{ i: 'world', p: 1 }] }
{ doc_id: this.doc_id, v: 0, op: [{ i: 'world', p: 1 }] },
]
this.my_result = ['hello', 'world', '']
return done()
@ -501,8 +501,8 @@ describe('Applying updates to a doc', function () {
let update
const actions = []
for (update of Array.from(this.updates.slice(0, 5))) {
;((update) => {
return actions.push((callback) =>
;(update => {
return actions.push(callback =>
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc_id,
@ -512,12 +512,12 @@ describe('Applying updates to a doc', function () {
)
})(update)
}
actions.push((callback) =>
actions.push(callback =>
DocUpdaterClient.deleteDoc(this.project_id, this.doc_id, callback)
)
for (update of Array.from(this.updates.slice(5))) {
;((update) => {
return actions.push((callback) =>
;(update => {
return actions.push(callback =>
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc_id,
@ -528,7 +528,7 @@ describe('Applying updates to a doc', function () {
})(update)
}
async.series(actions, (error) => {
async.series(actions, error => {
if (error != null) {
throw error
}
@ -550,16 +550,16 @@ describe('Applying updates to a doc', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
this.broken_update = {
doc_id: this.doc_id,
v: this.version,
op: [{ d: 'not the correct content', p: 0 }]
op: [{ d: 'not the correct content', p: 0 }],
}
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
DocUpdaterClient.subscribeToAppliedOps(
@ -570,7 +570,7 @@ describe('Applying updates to a doc', function () {
this.project_id,
this.doc_id,
this.broken_update,
(error) => {
error => {
if (error != null) {
throw error
}
@ -599,7 +599,7 @@ describe('Applying updates to a doc', function () {
return JSON.parse(message).should.deep.include({
project_id: this.project_id,
doc_id: this.doc_id,
error: 'Delete component does not match'
error: 'Delete component does not match',
})
})
})
@ -608,7 +608,7 @@ describe('Applying updates to a doc', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
const updates = []
for (let v = 0; v <= 199; v++) {
@ -616,7 +616,7 @@ describe('Applying updates to a doc', function () {
updates.push({
doc_id: this.doc_id,
op: [{ i: v.toString(), p: 0 }],
v
v,
})
}
@ -624,14 +624,14 @@ describe('Applying updates to a doc', function () {
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: 0
version: 0,
})
// Send updates in chunks to causes multiple flushes
const actions = []
for (let i = 0; i <= 19; i++) {
;((i) => {
return actions.push((cb) => {
;(i => {
return actions.push(cb => {
return DocUpdaterClient.sendUpdates(
this.project_id,
this.doc_id,
@ -641,7 +641,7 @@ describe('Applying updates to a doc', function () {
})
})(i)
}
async.series(actions, (error) => {
async.series(actions, error => {
if (error != null) {
throw error
}
@ -663,22 +663,22 @@ describe('Applying updates to a doc', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines
lines: this.lines,
})
const update = {
doc: this.doc_id,
op: this.update.op,
v: 0
v: 0,
}
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc_id,
update,
(error) => {
error => {
if (error != null) {
throw error
}
@ -705,11 +705,11 @@ describe('Applying updates to a doc', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
DocUpdaterClient.subscribeToAppliedOps(
@ -725,15 +725,15 @@ describe('Applying updates to a doc', function () {
op: [
{
i: 'one and a half\n',
p: 4
}
p: 4,
},
],
v: this.version,
meta: {
source: 'ikHceq3yfAdQYzBo4-xZ'
}
source: 'ikHceq3yfAdQYzBo4-xZ',
},
(error) => {
},
error => {
if (error != null) {
throw error
}
@ -746,16 +746,16 @@ describe('Applying updates to a doc', function () {
op: [
{
i: 'one and a half\n',
p: 4
}
p: 4,
},
],
v: this.version,
dupIfSource: ['ikHceq3yfAdQYzBo4-xZ'],
meta: {
source: 'ikHceq3yfAdQYzBo4-xZ'
}
source: 'ikHceq3yfAdQYzBo4-xZ',
},
(error) => {
},
error => {
if (error != null) {
throw error
}
@ -795,12 +795,12 @@ describe('Applying updates to a doc', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
this.non_existing = {
doc_id: this.doc_id,
v: this.version,
op: [{ d: 'content', p: 0 }]
op: [{ d: 'content', p: 0 }],
}
DocUpdaterClient.subscribeToAppliedOps(
@ -811,7 +811,7 @@ describe('Applying updates to a doc', function () {
this.project_id,
this.doc_id,
this.non_existing,
(error) => {
error => {
if (error != null) {
throw error
}
@ -840,7 +840,7 @@ describe('Applying updates to a doc', function () {
return JSON.parse(message).should.deep.include({
project_id: this.project_id,
doc_id: this.doc_id,
error: `doc not not found: /project/${this.project_id}/doc/${this.doc_id}`
error: `doc not not found: /project/${this.project_id}/doc/${this.doc_id}`,
})
})
})

View file

@ -23,10 +23,10 @@ describe("Applying updates to a project's structure", function () {
type: 'rename-file',
id: DocUpdaterClient.randomId(),
pathname: '/file-path',
newPathname: '/new-file-path'
newPathname: '/new-file-path',
}
this.updates = [this.fileUpdate]
DocUpdaterApp.ensureRunning((error) => {
DocUpdaterApp.ensureRunning(error => {
if (error) {
return done(error)
}
@ -35,7 +35,7 @@ describe("Applying updates to a project's structure", function () {
this.user_id,
this.updates,
this.version,
(error) => {
error => {
if (error) {
return done(error)
}
@ -75,7 +75,7 @@ describe("Applying updates to a project's structure", function () {
type: 'rename-doc',
id: DocUpdaterClient.randomId(),
pathname: '/doc-path',
newPathname: '/new-doc-path'
newPathname: '/new-doc-path',
}
this.updates = [this.update]
})
@ -88,7 +88,7 @@ describe("Applying updates to a project's structure", function () {
this.user_id,
this.updates,
this.version,
(error) => {
error => {
if (error) {
return done(error)
}
@ -125,10 +125,7 @@ describe("Applying updates to a project's structure", function () {
before(function (done) {
this.project_id = DocUpdaterClient.randomId()
MockWebApi.insertDoc(this.project_id, this.update.id, {})
DocUpdaterClient.preloadDoc(
this.project_id,
this.update.id,
(error) => {
DocUpdaterClient.preloadDoc(this.project_id, this.update.id, error => {
if (error) {
return done(error)
}
@ -138,15 +135,14 @@ describe("Applying updates to a project's structure", function () {
this.user_id,
this.updates,
this.version,
(error) => {
error => {
if (error) {
return done(error)
}
setTimeout(done, 200)
}
)
}
)
})
})
after(function () {
@ -198,31 +194,31 @@ describe("Applying updates to a project's structure", function () {
type: 'rename-doc',
id: DocUpdaterClient.randomId(),
pathname: '/doc-path0',
newPathname: '/new-doc-path0'
newPathname: '/new-doc-path0',
}
this.docUpdate1 = {
type: 'rename-doc',
id: DocUpdaterClient.randomId(),
pathname: '/doc-path1',
newPathname: '/new-doc-path1'
newPathname: '/new-doc-path1',
}
this.fileUpdate0 = {
type: 'rename-file',
id: DocUpdaterClient.randomId(),
pathname: '/file-path0',
newPathname: '/new-file-path0'
newPathname: '/new-file-path0',
}
this.fileUpdate1 = {
type: 'rename-file',
id: DocUpdaterClient.randomId(),
pathname: '/file-path1',
newPathname: '/new-file-path1'
newPathname: '/new-file-path1',
}
this.updates = [
this.docUpdate0,
this.docUpdate1,
this.fileUpdate0,
this.fileUpdate1
this.fileUpdate1,
]
})
@ -234,7 +230,7 @@ describe("Applying updates to a project's structure", function () {
this.user_id,
this.updates,
this.version,
(error) => {
error => {
if (error) {
return done(error)
}
@ -299,7 +295,7 @@ describe("Applying updates to a project's structure", function () {
type: 'add-file',
id: DocUpdaterClient.randomId(),
pathname: '/file-path',
url: 'filestore.example.com'
url: 'filestore.example.com',
}
this.updates = [this.fileUpdate]
DocUpdaterClient.sendProjectUpdate(
@ -307,7 +303,7 @@ describe("Applying updates to a project's structure", function () {
this.user_id,
this.updates,
this.version,
(error) => {
error => {
if (error) {
return done(error)
}
@ -347,7 +343,7 @@ describe("Applying updates to a project's structure", function () {
type: 'add-doc',
id: DocUpdaterClient.randomId(),
pathname: '/file-path',
docLines: 'a\nb'
docLines: 'a\nb',
}
this.updates = [this.docUpdate]
DocUpdaterClient.sendProjectUpdate(
@ -355,7 +351,7 @@ describe("Applying updates to a project's structure", function () {
this.user_id,
this.updates,
this.version,
(error) => {
error => {
if (error) {
return done(error)
}
@ -401,7 +397,7 @@ describe("Applying updates to a project's structure", function () {
type: 'add-doc',
id: DocUpdaterClient.randomId(),
pathname: '/file-' + v,
docLines: 'a\nb'
docLines: 'a\nb',
})
}
@ -424,7 +420,7 @@ describe("Applying updates to a project's structure", function () {
userId,
updates.slice(250),
this.version1,
(error) => {
error => {
if (error) {
return done(error)
}
@ -460,7 +456,7 @@ describe("Applying updates to a project's structure", function () {
type: 'add-doc',
id: DocUpdaterClient.randomId(),
pathname: '/file-' + v,
docLines: 'a\nb'
docLines: 'a\nb',
})
}
@ -483,7 +479,7 @@ describe("Applying updates to a project's structure", function () {
userId,
updates.slice(10),
this.version1,
(error) => {
error => {
if (error) {
return done(error)
}

View file

@ -26,10 +26,10 @@ describe('Deleting a document', function () {
op: [
{
i: 'one and a half\n',
p: 4
}
p: 4,
},
],
v: this.version
v: this.version,
}
this.result = ['one', 'one and a half', 'two', 'three']
@ -47,19 +47,19 @@ describe('Deleting a document', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
sinon.spy(MockWebApi, 'setDocument')
sinon.spy(MockWebApi, 'getDocument')
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
return DocUpdaterClient.preloadDoc(
this.project_id,
this.doc_id,
(error) => {
error => {
if (error != null) {
throw error
}
@ -67,7 +67,7 @@ describe('Deleting a document', function () {
this.project_id,
this.doc_id,
this.update,
(error) => {
error => {
if (error != null) {
throw error
}
@ -133,10 +133,10 @@ describe('Deleting a document', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines
lines: this.lines,
})
sinon.spy(MockWebApi, 'setDocument')
sinon.spy(MockWebApi, 'getDocument')

View file

@ -33,12 +33,12 @@ describe('Deleting a project', function () {
op: [
{
i: 'one and a half\n',
p: 4
}
],
v: 0
p: 4,
},
updatedLines: ['one', 'one and a half', 'two', 'three']
],
v: 0,
},
updatedLines: ['one', 'one and a half', 'two', 'three'],
},
{
id: (doc_id1 = DocUpdaterClient.randomId()),
@ -48,18 +48,18 @@ describe('Deleting a project', function () {
op: [
{
i: 'four and a half\n',
p: 5
}
],
v: 0
p: 5,
},
],
v: 0,
},
updatedLines: ['four', 'four and a half', 'five', 'six'],
},
updatedLines: ['four', 'four and a half', 'five', 'six']
}
]
for (const doc of Array.from(this.docs)) {
MockWebApi.insertDoc(this.project_id, doc.id, {
lines: doc.lines,
version: doc.update.v
version: doc.update.v,
})
}
@ -73,12 +73,12 @@ describe('Deleting a project', function () {
sinon.spy(MockProjectHistoryApi, 'flushProject')
return async.series(
this.docs.map((doc) => {
return (callback) => {
this.docs.map(doc => {
return callback => {
return DocUpdaterClient.preloadDoc(
this.project_id,
doc.id,
(error) => {
error => {
if (error != null) {
return callback(error)
}
@ -86,7 +86,7 @@ describe('Deleting a project', function () {
this.project_id,
doc.id,
doc.update,
(error) => {
error => {
return callback(error)
}
)
@ -94,7 +94,7 @@ describe('Deleting a project', function () {
)
}
}),
(error) => {
error => {
if (error != null) {
throw error
}
@ -122,7 +122,7 @@ describe('Deleting a project', function () {
})
it('should send each document to the web api', function () {
return Array.from(this.docs).map((doc) =>
return Array.from(this.docs).map(doc =>
MockWebApi.setDocument
.calledWith(this.project_id, doc.id, doc.updatedLines)
.should.equal(true)
@ -132,8 +132,8 @@ describe('Deleting a project', function () {
it('should need to reload the docs if read again', function (done) {
sinon.spy(MockWebApi, 'getDocument')
return async.series(
this.docs.map((doc) => {
return (callback) => {
this.docs.map(doc => {
return callback => {
MockWebApi.getDocument
.calledWith(this.project_id, doc.id)
.should.equal(false)
@ -157,7 +157,7 @@ describe('Deleting a project', function () {
})
it('should flush each doc in track changes', function () {
return Array.from(this.docs).map((doc) =>
return Array.from(this.docs).map(doc =>
MockTrackChangesApi.flushDoc.calledWith(doc.id).should.equal(true)
)
})
@ -176,8 +176,8 @@ describe('Deleting a project', function () {
sinon.spy(MockProjectHistoryApi, 'flushProject')
return async.series(
this.docs.map((doc) => {
return (callback) => {
this.docs.map(doc => {
return callback => {
return DocUpdaterClient.preloadDoc(
this.project_id,
doc.id,
@ -185,7 +185,7 @@ describe('Deleting a project', function () {
)
}
}),
(error) => {
error => {
if (error != null) {
throw error
}
@ -232,8 +232,8 @@ describe('Deleting a project', function () {
sinon.spy(MockProjectHistoryApi, 'flushProject')
return async.series(
this.docs.map((doc) => {
return (callback) => {
this.docs.map(doc => {
return callback => {
return DocUpdaterClient.preloadDoc(
this.project_id,
doc.id,
@ -241,7 +241,7 @@ describe('Deleting a project', function () {
)
}
}),
(error) => {
error => {
if (error != null) {
throw error
}
@ -273,7 +273,7 @@ describe('Deleting a project', function () {
})
it('should send each document to the web api', function () {
return Array.from(this.docs).map((doc) =>
return Array.from(this.docs).map(doc =>
MockWebApi.setDocument
.calledWith(this.project_id, doc.id, doc.updatedLines)
.should.equal(true)
@ -281,7 +281,7 @@ describe('Deleting a project', function () {
})
it('should flush each doc in track changes', function () {
return Array.from(this.docs).map((doc) =>
return Array.from(this.docs).map(doc =>
MockTrackChangesApi.flushDoc.calledWith(doc.id).should.equal(true)
)
})

View file

@ -31,12 +31,12 @@ describe('Flushing a project', function () {
op: [
{
i: 'one and a half\n',
p: 4
}
],
v: 0
p: 4,
},
updatedLines: ['one', 'one and a half', 'two', 'three']
],
v: 0,
},
updatedLines: ['one', 'one and a half', 'two', 'three'],
},
{
id: (doc_id1 = DocUpdaterClient.randomId()),
@ -46,18 +46,18 @@ describe('Flushing a project', function () {
op: [
{
i: 'four and a half\n',
p: 5
}
],
v: 0
p: 5,
},
],
v: 0,
},
updatedLines: ['four', 'four and a half', 'five', 'six'],
},
updatedLines: ['four', 'four and a half', 'five', 'six']
}
]
for (const doc of Array.from(this.docs)) {
MockWebApi.insertDoc(this.project_id, doc.id, {
lines: doc.lines,
version: doc.update.v
version: doc.update.v,
})
}
return DocUpdaterApp.ensureRunning(done)
@ -68,12 +68,12 @@ describe('Flushing a project', function () {
sinon.spy(MockWebApi, 'setDocument')
return async.series(
this.docs.map((doc) => {
return (callback) => {
this.docs.map(doc => {
return callback => {
return DocUpdaterClient.preloadDoc(
this.project_id,
doc.id,
(error) => {
error => {
if (error != null) {
return callback(error)
}
@ -81,7 +81,7 @@ describe('Flushing a project', function () {
this.project_id,
doc.id,
doc.update,
(error) => {
error => {
return callback(error)
}
)
@ -89,7 +89,7 @@ describe('Flushing a project', function () {
)
}
}),
(error) => {
error => {
if (error != null) {
throw error
}
@ -115,7 +115,7 @@ describe('Flushing a project', function () {
})
it('should send each document to the web api', function () {
return Array.from(this.docs).map((doc) =>
return Array.from(this.docs).map(doc =>
MockWebApi.setDocument
.calledWith(this.project_id, doc.id, doc.updatedLines)
.should.equal(true)
@ -124,8 +124,8 @@ describe('Flushing a project', function () {
return it('should update the lines in the doc updater', function (done) {
return async.series(
this.docs.map((doc) => {
return (callback) => {
this.docs.map(doc => {
return callback => {
return DocUpdaterClient.getDoc(
this.project_id,
doc.id,

View file

@ -31,10 +31,10 @@ describe('Flushing a doc to Mongo', function () {
op: [
{
i: 'one and a half\n',
p: 4
}
p: 4,
},
],
v: this.version
v: this.version,
}
this.result = ['one', 'one and a half', 'two', 'three']
return DocUpdaterApp.ensureRunning(done)
@ -44,19 +44,19 @@ describe('Flushing a doc to Mongo', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
sinon.spy(MockWebApi, 'setDocument')
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
return DocUpdaterClient.sendUpdates(
this.project_id,
this.doc_id,
[this.update],
(error) => {
error => {
if (error != null) {
throw error
}
@ -90,10 +90,10 @@ describe('Flushing a doc to Mongo', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines
lines: this.lines,
})
sinon.spy(MockWebApi, 'setDocument')
return DocUpdaterClient.flushDoc(this.project_id, this.doc_id, done)
@ -112,11 +112,11 @@ describe('Flushing a doc to Mongo', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
let t = 30000
sinon

View file

@ -29,13 +29,13 @@ describe('Getting a document', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
sinon.spy(MockWebApi, 'getDocument')
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
return DocUpdaterClient.getDoc(
@ -71,17 +71,17 @@ describe('Getting a document', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
return DocUpdaterClient.preloadDoc(
this.project_id,
this.doc_id,
(error) => {
error => {
if (error != null) {
throw error
}
@ -115,23 +115,23 @@ describe('Getting a document', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: (this.lines = ['one', 'two', 'three'])
lines: (this.lines = ['one', 'two', 'three']),
})
this.updates = __range__(0, 199, true).map((v) => ({
this.updates = __range__(0, 199, true).map(v => ({
doc_id: this.doc_id,
op: [{ i: v.toString(), p: 0 }],
v
v,
}))
return DocUpdaterClient.sendUpdates(
this.project_id,
this.doc_id,
this.updates,
(error) => {
error => {
if (error != null) {
throw error
}
@ -191,7 +191,7 @@ describe('Getting a document', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
return DocUpdaterClient.getDoc(
this.project_id,
@ -212,7 +212,7 @@ describe('Getting a document', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
sinon
.stub(MockWebApi, 'getDocument')
@ -246,7 +246,7 @@ describe('Getting a document', function () {
this.timeout = 10000
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
sinon
.stub(MockWebApi, 'getDocument')

View file

@ -30,17 +30,17 @@ describe('Getting documents for project', function () {
this.projectStateHash = DocUpdaterClient.randomId()
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
return DocUpdaterClient.preloadDoc(
this.project_id,
this.doc_id,
(error) => {
error => {
if (error != null) {
throw error
}
@ -67,17 +67,17 @@ describe('Getting documents for project', function () {
this.projectStateHash = DocUpdaterClient.randomId()
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
return DocUpdaterClient.preloadDoc(
this.project_id,
this.doc_id,
(error) => {
error => {
if (error != null) {
throw error
}
@ -110,7 +110,7 @@ describe('Getting documents for project', function () {
return it('should return the documents', function () {
return this.returnedDocs.should.deep.equal([
{ _id: this.doc_id, lines: this.lines, v: this.version }
{ _id: this.doc_id, lines: this.lines, v: this.version },
])
})
})
@ -120,17 +120,17 @@ describe('Getting documents for project', function () {
this.projectStateHash = DocUpdaterClient.randomId()
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
return DocUpdaterClient.preloadDoc(
this.project_id,
this.doc_id,
(error) => {
error => {
if (error != null) {
throw error
}

View file

@ -32,36 +32,36 @@ describe('Ranges', function () {
this.id_seed = '587357bd35e64f6157'
this.doc = {
id: DocUpdaterClient.randomId(),
lines: ['aaa']
lines: ['aaa'],
}
this.updates = [
{
doc: this.doc.id,
op: [{ i: '123', p: 1 }],
v: 0,
meta: { user_id: this.user_id }
meta: { user_id: this.user_id },
},
{
doc: this.doc.id,
op: [{ i: '456', p: 5 }],
v: 1,
meta: { user_id: this.user_id, tc: this.id_seed }
meta: { user_id: this.user_id, tc: this.id_seed },
},
{
doc: this.doc.id,
op: [{ d: '12', p: 1 }],
v: 2,
meta: { user_id: this.user_id }
}
meta: { user_id: this.user_id },
},
]
MockWebApi.insertDoc(this.project_id, this.doc.id, {
lines: this.doc.lines,
version: 0
version: 0,
})
const jobs = []
for (const update of Array.from(this.updates)) {
;((update) => {
return jobs.push((callback) =>
;(update => {
return jobs.push(callback =>
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc.id,
@ -72,18 +72,18 @@ describe('Ranges', function () {
})(update)
}
return DocUpdaterApp.ensureRunning((error) => {
return DocUpdaterApp.ensureRunning(error => {
if (error != null) {
throw error
}
return DocUpdaterClient.preloadDoc(
this.project_id,
this.doc.id,
(error) => {
error => {
if (error != null) {
throw error
}
return async.series(jobs, (error) => {
return async.series(jobs, error => {
if (error != null) {
throw error
}
@ -119,25 +119,25 @@ describe('Ranges', function () {
this.user_id = DocUpdaterClient.randomId()
this.doc = {
id: DocUpdaterClient.randomId(),
lines: ['foo bar baz']
lines: ['foo bar baz'],
}
this.updates = [
{
doc: this.doc.id,
op: [
{ c: 'bar', p: 4, t: (this.tid = DocUpdaterClient.randomId()) }
{ c: 'bar', p: 4, t: (this.tid = DocUpdaterClient.randomId()) },
],
v: 0
}
v: 0,
},
]
MockWebApi.insertDoc(this.project_id, this.doc.id, {
lines: this.doc.lines,
version: 0
version: 0,
})
const jobs = []
for (const update of Array.from(this.updates)) {
;((update) => {
return jobs.push((callback) =>
;(update => {
return jobs.push(callback =>
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc.id,
@ -150,11 +150,11 @@ describe('Ranges', function () {
return DocUpdaterClient.preloadDoc(
this.project_id,
this.doc.id,
(error) => {
error => {
if (error != null) {
throw error
}
return async.series(jobs, (error) => {
return async.series(jobs, error => {
if (error != null) {
throw error
}
@ -188,31 +188,31 @@ describe('Ranges', function () {
this.user_id = DocUpdaterClient.randomId()
this.doc = {
id: DocUpdaterClient.randomId(),
lines: ['foo bar baz']
lines: ['foo bar baz'],
}
this.updates = [
{
doc: this.doc.id,
op: [{ i: 'ABC', p: 3 }],
v: 0,
meta: { user_id: this.user_id }
meta: { user_id: this.user_id },
},
{
doc: this.doc.id,
op: [
{ c: 'bar', p: 4, t: (this.tid = DocUpdaterClient.randomId()) }
{ c: 'bar', p: 4, t: (this.tid = DocUpdaterClient.randomId()) },
],
v: 0
}
v: 0,
},
]
MockWebApi.insertDoc(this.project_id, this.doc.id, {
lines: this.doc.lines,
version: 0
version: 0,
})
const jobs = []
for (const update of Array.from(this.updates)) {
;((update) => {
return jobs.push((callback) =>
;(update => {
return jobs.push(callback =>
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc.id,
@ -225,11 +225,11 @@ describe('Ranges', function () {
return DocUpdaterClient.preloadDoc(
this.project_id,
this.doc.id,
(error) => {
error => {
if (error != null) {
throw error
}
return async.series(jobs, (error) => {
return async.series(jobs, error => {
if (error != null) {
throw error
}
@ -265,13 +265,13 @@ describe('Ranges', function () {
this.id_seed = '587357bd35e64f6157'
this.doc = {
id: DocUpdaterClient.randomId(),
lines: ['a123aa']
lines: ['a123aa'],
}
this.update = {
doc: this.doc.id,
op: [{ i: '456', p: 5 }],
v: 0,
meta: { user_id: this.user_id, tc: this.id_seed }
meta: { user_id: this.user_id, tc: this.id_seed },
}
MockWebApi.insertDoc(this.project_id, this.doc.id, {
lines: this.doc.lines,
@ -282,16 +282,16 @@ describe('Ranges', function () {
op: { i: '123', p: 1 },
metadata: {
user_id: this.user_id,
ts: new Date()
}
}
]
}
ts: new Date(),
},
},
],
},
})
return DocUpdaterClient.preloadDoc(
this.project_id,
this.doc.id,
(error) => {
error => {
if (error != null) {
throw error
}
@ -299,7 +299,7 @@ describe('Ranges', function () {
this.project_id,
this.doc.id,
this.update,
(error) => {
error => {
if (error != null) {
throw error
}
@ -327,10 +327,7 @@ describe('Ranges', function () {
})
return it('should flush the ranges to the persistence layer again', function (done) {
return DocUpdaterClient.flushDoc(
this.project_id,
this.doc.id,
(error) => {
return DocUpdaterClient.flushDoc(this.project_id, this.doc.id, error => {
if (error != null) {
throw error
}
@ -344,8 +341,7 @@ describe('Ranges', function () {
return done()
}
)
}
)
})
})
})
@ -356,22 +352,22 @@ describe('Ranges', function () {
this.id_seed = '587357bd35e64f6157'
this.doc = {
id: DocUpdaterClient.randomId(),
lines: ['aaa']
lines: ['aaa'],
}
this.update = {
doc: this.doc.id,
op: [{ i: '456', p: 1 }],
v: 0,
meta: { user_id: this.user_id, tc: this.id_seed }
meta: { user_id: this.user_id, tc: this.id_seed },
}
MockWebApi.insertDoc(this.project_id, this.doc.id, {
lines: this.doc.lines,
version: 0
version: 0,
})
return DocUpdaterClient.preloadDoc(
this.project_id,
this.doc.id,
(error) => {
error => {
if (error != null) {
throw error
}
@ -379,7 +375,7 @@ describe('Ranges', function () {
this.project_id,
this.doc.id,
this.update,
(error) => {
error => {
if (error != null) {
throw error
}
@ -411,7 +407,7 @@ describe('Ranges', function () {
this.project_id,
this.doc.id,
this.id_seed + '000001',
(error) => {
error => {
if (error != null) {
throw error
}
@ -437,21 +433,21 @@ describe('Ranges', function () {
this.user_id = DocUpdaterClient.randomId()
this.doc = {
id: DocUpdaterClient.randomId(),
lines: ['foo bar']
lines: ['foo bar'],
}
this.update = {
doc: this.doc.id,
op: [{ c: 'bar', p: 4, t: (this.tid = DocUpdaterClient.randomId()) }],
v: 0
v: 0,
}
MockWebApi.insertDoc(this.project_id, this.doc.id, {
lines: this.doc.lines,
version: 0
version: 0,
})
return DocUpdaterClient.preloadDoc(
this.project_id,
this.doc.id,
(error) => {
error => {
if (error != null) {
throw error
}
@ -459,7 +455,7 @@ describe('Ranges', function () {
this.project_id,
this.doc.id,
this.update,
(error) => {
error => {
if (error != null) {
throw error
}
@ -518,7 +514,7 @@ describe('Ranges', function () {
this.id_seed = DocUpdaterClient.randomId()
this.doc = {
id: DocUpdaterClient.randomId(),
lines: ['aaa']
lines: ['aaa'],
}
this.i = new Array(3 * 1024 * 1024).join('a')
this.updates = [
@ -526,17 +522,17 @@ describe('Ranges', function () {
doc: this.doc.id,
op: [{ i: this.i, p: 1 }],
v: 0,
meta: { user_id: this.user_id, tc: this.id_seed }
}
meta: { user_id: this.user_id, tc: this.id_seed },
},
]
MockWebApi.insertDoc(this.project_id, this.doc.id, {
lines: this.doc.lines,
version: 0
version: 0,
})
const jobs = []
for (const update of Array.from(this.updates)) {
;((update) => {
return jobs.push((callback) =>
;(update => {
return jobs.push(callback =>
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc.id,
@ -549,11 +545,11 @@ describe('Ranges', function () {
return DocUpdaterClient.preloadDoc(
this.project_id,
this.doc.id,
(error) => {
error => {
if (error != null) {
throw error
}
return async.series(jobs, (error) => {
return async.series(jobs, error => {
if (error != null) {
throw error
}
@ -593,34 +589,34 @@ describe('Ranges', function () {
op: {
c: 'a',
p: 5,
tid: (this.tid = DocUpdaterClient.randomId())
tid: (this.tid = DocUpdaterClient.randomId()),
},
metadata: {
user_id: this.user_id,
ts: new Date()
}
}
]
}
ts: new Date(),
},
},
],
},
})
this.updates = [
{
doc: this.doc_id,
op: [{ d: 'foo ', p: 0 }],
v: 0,
meta: { user_id: this.user_id }
meta: { user_id: this.user_id },
},
{
doc: this.doc_id,
op: [{ d: 'bar ', p: 0 }],
v: 1,
meta: { user_id: this.user_id }
}
meta: { user_id: this.user_id },
},
]
const jobs = []
for (const update of Array.from(this.updates)) {
;((update) => {
return jobs.push((callback) =>
;(update => {
return jobs.push(callback =>
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc_id,
@ -633,7 +629,7 @@ describe('Ranges', function () {
return DocUpdaterClient.preloadDoc(
this.project_id,
this.doc_id,
(error) => {
error => {
if (error != null) {
throw error
}
@ -669,7 +665,7 @@ describe('Ranges', function () {
db.docSnapshots
.find({
project_id: ObjectId(this.project_id),
doc_id: ObjectId(this.doc_id)
doc_id: ObjectId(this.doc_id),
})
.toArray((error, docSnapshots) => {
if (error != null) {
@ -681,7 +677,7 @@ describe('Ranges', function () {
expect(docSnapshots[0].ranges.comments[0].op).to.deep.equal({
c: 'a',
p: 1,
tid: this.tid
tid: this.tid,
})
return done()
})

View file

@ -21,10 +21,10 @@ describe('Setting a document', function () {
op: [
{
i: 'one and a half\n',
p: 4
}
p: 4,
},
],
v: this.version
v: this.version,
}
this.result = ['one', 'one and a half', 'two', 'three']
this.newLines = ['these', 'are', 'the', 'new', 'lines']
@ -49,9 +49,9 @@ describe('Setting a document', function () {
this.doc_id = DocUpdaterClient.randomId()
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, (error) => {
DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => {
if (error) {
throw error
}
@ -59,7 +59,7 @@ describe('Setting a document', function () {
this.project_id,
this.doc_id,
this.update,
(error) => {
error => {
if (error) {
throw error
}
@ -149,7 +149,7 @@ describe('Setting a document', function () {
this.doc_id = DocUpdaterClient.randomId()
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
DocUpdaterClient.setDocLines(
this.project_id,
@ -212,23 +212,23 @@ describe('Setting a document', function () {
{
desc: 'when the updated doc is too large for the body parser',
size: Settings.maxJsonRequestSize,
expectedStatusCode: 413
expectedStatusCode: 413,
},
{
desc: 'when the updated doc is larger than the HTTP controller limit',
size: Settings.max_doc_length,
expectedStatusCode: 406
}
expectedStatusCode: 406,
},
]
DOC_TOO_LARGE_TEST_CASES.forEach((testCase) => {
DOC_TOO_LARGE_TEST_CASES.forEach(testCase => {
describe(testCase.desc, function () {
before(function (done) {
this.project_id = DocUpdaterClient.randomId()
this.doc_id = DocUpdaterClient.randomId()
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
this.newLines = []
while (JSON.stringify(this.newLines).length <= testCase.size) {
@ -281,7 +281,7 @@ describe('Setting a document', function () {
this.doc_id = DocUpdaterClient.randomId()
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
this.newLines = []
@ -333,14 +333,14 @@ describe('Setting a document', function () {
op: [
{
d: 'one and a half\n',
p: 4
}
p: 4,
},
],
meta: {
tc: this.id_seed,
user_id: this.user_id
user_id: this.user_id,
},
v: this.version
v: this.version,
}
})
@ -350,9 +350,9 @@ describe('Setting a document', function () {
this.doc_id = DocUpdaterClient.randomId()
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, (error) => {
DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => {
if (error) {
throw error
}
@ -360,7 +360,7 @@ describe('Setting a document', function () {
this.project_id,
this.doc_id,
this.update,
(error) => {
error => {
if (error) {
throw error
}
@ -413,9 +413,9 @@ describe('Setting a document', function () {
this.doc_id = DocUpdaterClient.randomId()
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, (error) => {
DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => {
if (error) {
throw error
}
@ -423,7 +423,7 @@ describe('Setting a document', function () {
this.project_id,
this.doc_id,
this.update,
(error) => {
error => {
if (error) {
throw error
}

View file

@ -16,10 +16,10 @@ describe('SizeChecks', function () {
op: [
{
i: 'insert some more lines that will bring it above the limit\n',
p: 42
}
p: 42,
},
],
v: this.version
v: this.version,
}
this.project_id = DocUpdaterClient.randomId()
this.doc_id = DocUpdaterClient.randomId()
@ -30,7 +30,7 @@ describe('SizeChecks', function () {
this.lines = ['0123456789'.repeat(Settings.max_doc_length / 10 + 1)]
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
v: this.version
v: this.version,
})
})
@ -47,13 +47,13 @@ describe('SizeChecks', function () {
const update = {
doc: this.doc_id,
op: this.update.op,
v: this.version
v: this.version,
}
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc_id,
update,
(error) => {
error => {
if (error != null) {
throw error
}
@ -77,7 +77,7 @@ describe('SizeChecks', function () {
this.lines = ['0123456789'.repeat(Settings.max_doc_length / 10 - 1)]
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
v: this.version
v: this.version,
})
})
@ -98,13 +98,13 @@ describe('SizeChecks', function () {
const update = {
doc: this.doc_id,
op: this.update.op,
v: this.version
v: this.version,
}
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc_id,
update,
(error) => {
error => {
if (error != null) {
throw error
}

View file

@ -31,7 +31,7 @@ module.exports = {
this.initing = true
this.callbacks.push(callback)
waitForDb().then(() => {
return app.listen(3003, 'localhost', (error) => {
return app.listen(3003, 'localhost', error => {
if (error != null) {
throw error
}
@ -45,5 +45,5 @@ module.exports = {
})()
})
})
}
},
}

View file

@ -40,12 +40,12 @@ module.exports = DocUpdaterClient = {
rclient.rpush(
keys.pendingUpdates({ doc_id: docId }),
JSON.stringify(update),
(error) => {
error => {
if (error) {
return callback(error)
}
const docKey = `${projectId}:${docId}`
rclient.sadd('DocsWithPendingUpdates', docKey, (error) => {
rclient.sadd('DocsWithPendingUpdates', docKey, error => {
if (error) {
return callback(error)
}
@ -61,14 +61,14 @@ module.exports = DocUpdaterClient = {
},
sendUpdates(projectId, docId, updates, callback) {
DocUpdaterClient.preloadDoc(projectId, docId, (error) => {
DocUpdaterClient.preloadDoc(projectId, docId, error => {
if (error) {
return callback(error)
}
const jobs = updates.map((update) => (callback) => {
const jobs = updates.map(update => callback => {
DocUpdaterClient.sendUpdate(projectId, docId, update, callback)
})
async.series(jobs, (err) => {
async.series(jobs, err => {
if (err) {
return callback(err)
}
@ -80,7 +80,7 @@ module.exports = DocUpdaterClient = {
waitForPendingUpdates(projectId, docId, callback) {
async.retry(
{ times: 30, interval: 100 },
(cb) =>
cb =>
rclient.llen(keys.pendingUpdates({ doc_id: docId }), (err, length) => {
if (err) {
return cb(err)
@ -138,8 +138,8 @@ module.exports = DocUpdaterClient = {
lines,
source,
user_id: userId,
undoing
}
undoing,
},
},
(error, res, body) => callback(error, res, body)
)
@ -204,9 +204,9 @@ module.exports = DocUpdaterClient = {
request.post(
{
url: `http://localhost:3003/project/${projectId}`,
json: { userId, updates, version }
json: { userId, updates, version },
},
(error, res, body) => callback(error, res, body)
)
}
},
}

View file

@ -24,7 +24,7 @@ module.exports = MockProjectHistoryApi = {
run() {
app.post('/project/:project_id/flush', (req, res, next) => {
return this.flushProject(req.params.project_id, (error) => {
return this.flushProject(req.params.project_id, error => {
if (error != null) {
return res.sendStatus(500)
} else {
@ -33,12 +33,12 @@ module.exports = MockProjectHistoryApi = {
})
})
return app.listen(3054, (error) => {
return app.listen(3054, error => {
if (error != null) {
throw error
}
})
}
},
}
MockProjectHistoryApi.run()

View file

@ -24,7 +24,7 @@ module.exports = MockTrackChangesApi = {
run() {
app.post('/project/:project_id/doc/:doc_id/flush', (req, res, next) => {
return this.flushDoc(req.params.doc_id, (error) => {
return this.flushDoc(req.params.doc_id, error => {
if (error != null) {
return res.sendStatus(500)
} else {
@ -34,16 +34,16 @@ module.exports = MockTrackChangesApi = {
})
return app
.listen(3015, (error) => {
.listen(3015, error => {
if (error != null) {
throw error
}
})
.on('error', (error) => {
.on('error', error => {
console.error('error starting MockTrackChangesApi:', error.message)
return process.exit(1)
})
}
},
}
MockTrackChangesApi.run()

View file

@ -96,7 +96,7 @@ module.exports = MockWebApi = {
req.body.ranges,
req.body.lastUpdatedAt,
req.body.lastUpdatedBy,
(error) => {
error => {
if (error != null) {
return res.sendStatus(500)
} else {
@ -108,16 +108,16 @@ module.exports = MockWebApi = {
)
return app
.listen(3000, (error) => {
.listen(3000, error => {
if (error != null) {
throw error
}
})
.on('error', (error) => {
.on('error', error => {
console.error('error starting MockWebApi:', error.message)
return process.exit(1)
})
}
},
}
MockWebApi.run()

View file

@ -4,18 +4,18 @@ const rclient1 = redis.createClient({
cluster: [
{
port: '7000',
host: 'localhost'
}
]
host: 'localhost',
},
],
})
const rclient2 = redis.createClient({
cluster: [
{
port: '7000',
host: 'localhost'
}
]
host: 'localhost',
},
],
})
let counter = 0
@ -23,7 +23,7 @@ const sendPing = function (cb) {
if (cb == null) {
cb = function () {}
}
return rclient1.rpush('test-blpop', counter, (error) => {
return rclient1.rpush('test-blpop', counter, error => {
if (error != null) {
console.error('[SENDING ERROR]', error.message)
}
@ -35,7 +35,7 @@ const sendPing = function (cb) {
}
let previous = null
const listenForPing = (cb) =>
const listenForPing = cb =>
rclient2.blpop('test-blpop', 200, (error, result) => {
if (error != null) {
return cb(error)
@ -57,7 +57,7 @@ const listenForPing = (cb) =>
const PING_DELAY = 100
;(sendPings = () => sendPing(() => setTimeout(sendPings, PING_DELAY)))()
;(listenInBackground = () =>
listenForPing((error) => {
listenForPing(error => {
if (error) {
console.error('[RECEIVING ERROR]', error.message)
}

View file

@ -4,18 +4,18 @@ const rclient1 = redis.createClient({
cluster: [
{
port: '7000',
host: 'localhost'
}
]
host: 'localhost',
},
],
})
const rclient2 = redis.createClient({
cluster: [
{
port: '7000',
host: 'localhost'
}
]
host: 'localhost',
},
],
})
let counter = 0
@ -23,7 +23,7 @@ const sendPing = function (cb) {
if (cb == null) {
cb = function () {}
}
return rclient1.publish('test-pubsub', counter, (error) => {
return rclient1.publish('test-pubsub', counter, error => {
if (error) {
console.error('[SENDING ERROR]', error.message)
}

View file

@ -13,16 +13,16 @@ const stubs = {
log: sandbox.stub(),
warn: sandbox.stub(),
err: sandbox.stub(),
error: sandbox.stub()
}
error: sandbox.stub(),
},
}
// SandboxedModule configuration
SandboxedModule.configure({
requires: {
'logger-sharelatex': stubs.logger
'logger-sharelatex': stubs.logger,
},
globals: { Buffer, JSON, Math, console, process }
globals: { Buffer, JSON, Math, console, process },
})
// Mocha hooks
@ -33,5 +33,5 @@ exports.mochaHooks = {
afterEach() {
sandbox.reset()
}
},
}

View file

@ -31,7 +31,7 @@ const transform = function (op1, op2) {
if (op2.p < op1.p) {
return {
p: op1.p + op2.i.length,
i: op1.i
i: op1.i,
}
} else {
return op1
@ -61,7 +61,7 @@ class StressTestClient {
conflicts: 0,
local_updates: 0,
remote_updates: 0,
max_delay: 0
max_delay: 0,
}
DocUpdaterClient.subscribeToAppliedOps((channel, update) => {
@ -81,7 +81,7 @@ class StressTestClient {
this.content = insert(this.content, this.pos, data)
this.inflight_op = {
i: data,
p: this.pos++
p: this.pos++,
}
this.resendUpdate()
return (this.inflight_op_sent = Date.now())
@ -94,9 +94,9 @@ class StressTestClient {
op: [this.inflight_op],
v: this.version,
meta: {
source: this.client_id
source: this.client_id,
},
dupIfSource: [this.client_id]
dupIfSource: [this.client_id],
})
return (this.update_timer = setTimeout(() => {
console.log(
@ -277,7 +277,7 @@ const checkDocument = function (project_id, doc_id, clients, callback) {
if (callback == null) {
callback = function (error) {}
}
const jobs = clients.map((client) => (cb) => client.check(cb))
const jobs = clients.map(client => cb => client.check(cb))
return async.parallel(jobs, callback)
}
@ -304,7 +304,7 @@ const printSummary = function (doc_id, clients) {
local_updates: 0,
remote_updates: 0,
conflicts: 0,
max_delay: 0
max_delay: 0,
})
)
}
@ -326,7 +326,7 @@ for (const doc_and_project_id of Array.from(process.argv.slice(5))) {
[new Array(CLIENT_COUNT + 2).join('a')],
null,
null,
(error) => {
error => {
if (error != null) {
throw error
}
@ -360,22 +360,23 @@ for (const doc_and_project_id of Array.from(process.argv.slice(5))) {
content,
pos,
version,
updateDelay: UPDATE_DELAY
updateDelay: UPDATE_DELAY,
})
return clients.push(client)
})(pos)
}
return (runBatch = function () {
const jobs = clients.map((client) => (cb) =>
const jobs = clients.map(
client => cb =>
client.runForNUpdates(SAMPLE_INTERVAL / UPDATE_DELAY, cb)
)
return async.parallel(jobs, (error) => {
return async.parallel(jobs, error => {
if (error != null) {
throw error
}
printSummary(doc_id, clients)
return checkDocument(project_id, doc_id, clients, (error) => {
return checkDocument(project_id, doc_id, clients, error => {
if (error != null) {
throw error
}

View file

@ -32,8 +32,8 @@ describe('DiffCodec', function () {
expect(ops).to.deep.equal([
{
i: 'beautiful ',
p: 6
}
p: 6,
},
])
return done()
}
@ -49,7 +49,7 @@ describe('DiffCodec', function () {
(error, ops) => {
expect(ops).to.deep.equal([
{ i: 'tall ', p: 4 },
{ i: 'red ', p: 29 }
{ i: 'red ', p: 29 },
])
return done()
}
@ -66,8 +66,8 @@ describe('DiffCodec', function () {
expect(ops).to.deep.equal([
{
d: 'beautiful ',
p: 6
}
p: 6,
},
])
return done()
}
@ -83,7 +83,7 @@ describe('DiffCodec', function () {
(error, ops) => {
expect(ops).to.deep.equal([
{ d: 'tall ', p: 4 },
{ d: 'red ', p: 24 }
{ d: 'red ', p: 24 },
])
return done()
}

View file

@ -25,8 +25,8 @@ describe('DispatchManager', function () {
'./UpdateManager': (this.UpdateManager = {}),
'@overleaf/settings': (this.settings = {
redis: {
documentupdater: {}
}
documentupdater: {},
},
}),
'@overleaf/redis-wrapper': (this.redis = {}),
'./RateLimitManager': {},
@ -40,15 +40,15 @@ describe('DispatchManager', function () {
}
Timer.initClass()
return Timer
})())
})
}
})()),
}),
},
})
this.callback = sinon.stub()
return (this.RateLimiter = {
run(task, cb) {
return task(cb)
}
},
})
}) // run task without rate limit
@ -144,7 +144,7 @@ describe('DispatchManager', function () {
beforeEach(function (done) {
this.client = {
auth: sinon.stub(),
blpop: sinon.stub().callsArgWith(2)
blpop: sinon.stub().callsArgWith(2),
}
this.redis.createClient = sinon.stub().returns(this.client)
this.queueShardNumber = 7
@ -166,7 +166,7 @@ describe('DispatchManager', function () {
return describe('run', function () {
return it('should call _waitForUpdateThenDispatchWorker until shutting down', function (done) {
let callCount = 0
this.worker._waitForUpdateThenDispatchWorker = (callback) => {
this.worker._waitForUpdateThenDispatchWorker = callback => {
if (callback == null) {
callback = function (error) {}
}

View file

@ -29,7 +29,7 @@ describe('DocumentManager', function () {
'./PersistenceManager': (this.PersistenceManager = {}),
'./HistoryManager': (this.HistoryManager = {
flushDocChangesAsync: sinon.stub(),
flushProjectChangesAsync: sinon.stub()
flushProjectChangesAsync: sinon.stub(),
}),
'./Metrics': (this.Metrics = {
Timer: (Timer = (function () {
@ -40,14 +40,14 @@ describe('DocumentManager', function () {
}
Timer.initClass()
return Timer
})())
})()),
}),
'./RealTimeRedisManager': (this.RealTimeRedisManager = {}),
'./DiffCodec': (this.DiffCodec = {}),
'./UpdateManager': (this.UpdateManager = {}),
'./RangesManager': (this.RangesManager = {}),
'./Errors': Errors
}
'./Errors': Errors,
},
})
this.project_id = 'project-id-123'
this.projectHistoryId = 'history-id-123'
@ -123,7 +123,7 @@ describe('DocumentManager', function () {
this.project_id,
this.doc_id,
{},
(error) => {
error => {
error.should.exist
this.RedisManager.removeDocFromMemory.called.should.equal(false)
return done()
@ -137,7 +137,7 @@ describe('DocumentManager', function () {
this.project_id,
this.doc_id,
{ ignoreFlushErrors: true },
(error) => {
error => {
if (error != null) {
return done(error)
}
@ -484,7 +484,7 @@ describe('DocumentManager', function () {
this.afterLines = ['after', 'lines']
this.ops = [
{ i: 'foo', p: 4 },
{ d: 'bar', p: 42 }
{ d: 'bar', p: 42 },
]
this.DocumentManager.getDoc = sinon
.stub()
@ -543,8 +543,8 @@ describe('DocumentManager', function () {
meta: {
type: 'external',
source: this.source,
user_id: this.user_id
}
user_id: this.user_id,
},
})
.should.equal(true)
})
@ -636,7 +636,7 @@ describe('DocumentManager', function () {
// Copy ops so we don't interfere with other tests
this.ops = [
{ i: 'foo', p: 4 },
{ d: 'bar', p: 42 }
{ d: 'bar', p: 42 },
]
this.DiffCodec.diffAsShareJsOp = sinon
.stub()
@ -653,7 +653,7 @@ describe('DocumentManager', function () {
})
return it('should set the undo flag on each op', function () {
return Array.from(this.ops).map((op) => op.u.should.equal(true))
return Array.from(this.ops).map(op => op.u.should.equal(true))
})
})
})
@ -666,7 +666,7 @@ describe('DocumentManager', function () {
'mock-change-id-1',
'mock-change-id-2',
'mock-change-id-3',
'mock-change-id-4'
'mock-change-id-4',
]
this.version = 34
this.lines = ['original', 'lines']

View file

@ -25,19 +25,19 @@ describe('HistoryManager', function () {
apis: {
project_history: {
enabled: true,
url: 'http://project_history.example.com'
url: 'http://project_history.example.com',
},
trackchanges: {
url: 'http://trackchanges.example.com'
}
}
url: 'http://trackchanges.example.com',
},
},
}),
'./DocumentManager': (this.DocumentManager = {}),
'./HistoryRedisManager': (this.HistoryRedisManager = {}),
'./RedisManager': (this.RedisManager = {}),
'./ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}),
'./Metrics': (this.metrics = { inc: sinon.stub() })
}
'./Metrics': (this.metrics = { inc: sinon.stub() }),
},
})
this.project_id = 'mock-project-id'
this.doc_id = 'mock-doc-id'
@ -118,7 +118,7 @@ describe('HistoryManager', function () {
return this.request.post
.calledWith({
url: `${this.Settings.apis.project_history.url}/project/${this.project_id}/flush`,
qs: { background: true }
qs: { background: true },
})
.should.equal(true)
})
@ -131,7 +131,7 @@ describe('HistoryManager', function () {
.stub()
.callsArgWith(1, null, { statusCode: 204 })
return this.HistoryManager.flushProjectChanges(this.project_id, {
background: true
background: true,
})
})
@ -139,7 +139,7 @@ describe('HistoryManager', function () {
return this.request.post
.calledWith({
url: `${this.Settings.apis.project_history.url}/project/${this.project_id}/flush`,
qs: { background: true }
qs: { background: true },
})
.should.equal(true)
})
@ -149,7 +149,7 @@ describe('HistoryManager', function () {
beforeEach(function () {
this.request.post = sinon.stub()
return this.HistoryManager.flushProjectChanges(this.project_id, {
skip_history_flush: true
skip_history_flush: true,
})
})
@ -372,15 +372,15 @@ describe('HistoryManager', function () {
this.docs = [
{
doc: this.doc_id,
path: 'main.tex'
}
path: 'main.tex',
},
]
this.files = [
{
file: 'mock-file-id',
path: 'universe.png',
url: `www.filestore.test/${this.project_id}/mock-file-id`
}
url: `www.filestore.test/${this.project_id}/mock-file-id`,
},
]
this.ProjectHistoryRedisManager.queueResyncProjectStructure = sinon
.stub()

View file

@ -20,7 +20,7 @@ describe('HistoryRedisManager', function () {
beforeEach(function () {
this.rclient = {
auth() {},
exec: sinon.stub()
exec: sinon.stub(),
}
this.rclient.multi = () => this.rclient
this.HistoryRedisManager = SandboxedModule.require(modulePath, {
@ -35,12 +35,12 @@ describe('HistoryRedisManager', function () {
},
docsWithHistoryOps({ project_id }) {
return `DocsWithHistoryOps:${project_id}`
}
}
})
}
}
}
},
},
}),
},
},
},
})
this.doc_id = 'doc-id-123'
this.project_id = 'project-id-123'

View file

@ -9,14 +9,14 @@ describe('HttpController', function () {
requires: {
'./DocumentManager': (this.DocumentManager = {}),
'./HistoryManager': (this.HistoryManager = {
flushProjectChangesAsync: sinon.stub()
flushProjectChangesAsync: sinon.stub(),
}),
'./ProjectManager': (this.ProjectManager = {}),
'./ProjectFlusher': { flushAllProjects() {} },
'./DeleteQueueManager': (this.DeleteQueueManager = {}),
'./Metrics': (this.Metrics = {}),
'./Errors': Errors
}
'./Errors': Errors,
},
})
this.Metrics.Timer = class Timer {}
this.Metrics.Timer.prototype.done = sinon.stub()
@ -27,7 +27,7 @@ describe('HttpController', function () {
this.res = {
send: sinon.stub(),
sendStatus: sinon.stub(),
json: sinon.stub()
json: sinon.stub(),
}
})
@ -42,10 +42,10 @@ describe('HttpController', function () {
this.req = {
params: {
project_id: this.project_id,
doc_id: this.doc_id
doc_id: this.doc_id,
},
query: {},
body: {}
body: {},
}
})
@ -79,7 +79,7 @@ describe('HttpController', function () {
version: this.version,
ops: [],
ranges: this.ranges,
pathname: this.pathname
pathname: this.pathname,
})
.should.equal(true)
})
@ -129,7 +129,7 @@ describe('HttpController', function () {
version: this.version,
ops: this.ops,
ranges: this.ranges,
pathname: this.pathname
pathname: this.pathname,
})
.should.equal(true)
})
@ -186,15 +186,15 @@ describe('HttpController', function () {
headers: {},
params: {
project_id: this.project_id,
doc_id: this.doc_id
doc_id: this.doc_id,
},
query: {},
body: {
lines: this.lines,
source: this.source,
user_id: this.user_id,
undoing: (this.undoing = true)
}
undoing: (this.undoing = true),
},
}
})
@ -230,7 +230,7 @@ describe('HttpController', function () {
lines: this.lines,
source: this.source,
userId: this.user_id,
undoing: this.undoing
undoing: this.undoing,
},
'setting doc via http'
)
@ -280,10 +280,10 @@ describe('HttpController', function () {
beforeEach(function () {
this.req = {
params: {
project_id: this.project_id
project_id: this.project_id,
},
query: {},
body: {}
body: {},
}
})
@ -338,10 +338,10 @@ describe('HttpController', function () {
this.req = {
params: {
project_id: this.project_id,
doc_id: this.doc_id
doc_id: this.doc_id,
},
query: {},
body: {}
body: {},
}
})
@ -396,10 +396,10 @@ describe('HttpController', function () {
this.req = {
params: {
project_id: this.project_id,
doc_id: this.doc_id
doc_id: this.doc_id,
},
query: {},
body: {}
body: {},
}
})
@ -414,7 +414,7 @@ describe('HttpController', function () {
it('should flush and delete the doc', function () {
this.DocumentManager.flushAndDeleteDocWithLock
.calledWith(this.project_id, this.doc_id, {
ignoreFlushErrors: false
ignoreFlushErrors: false,
})
.should.equal(true)
})
@ -485,10 +485,10 @@ describe('HttpController', function () {
beforeEach(function () {
this.req = {
params: {
project_id: this.project_id
project_id: this.project_id,
},
query: {},
body: {}
body: {},
}
})
@ -560,10 +560,10 @@ describe('HttpController', function () {
params: {
project_id: this.project_id,
doc_id: this.doc_id,
change_id: (this.change_id = 'mock-change-od-1')
change_id: (this.change_id = 'mock-change-od-1'),
},
query: {},
body: {}
body: {},
}
})
@ -605,7 +605,7 @@ describe('HttpController', function () {
'mock-change-od-1',
'mock-change-od-2',
'mock-change-od-3',
'mock-change-od-4'
'mock-change-od-4',
]
this.req.body = { change_ids: this.change_ids }
this.DocumentManager.acceptChangesWithLock = sinon
@ -650,10 +650,10 @@ describe('HttpController', function () {
params: {
project_id: this.project_id,
doc_id: this.doc_id,
comment_id: (this.comment_id = 'mock-comment-id')
comment_id: (this.comment_id = 'mock-comment-id'),
},
query: {},
body: {}
body: {},
}
})
@ -681,7 +681,7 @@ describe('HttpController', function () {
{
projectId: this.project_id,
docId: this.doc_id,
commentId: this.comment_id
commentId: this.comment_id,
},
'deleting comment via http'
)
@ -712,16 +712,16 @@ describe('HttpController', function () {
this.state = '01234567890abcdef'
this.docs = [
{ _id: '1234', lines: 'hello', v: 23 },
{ _id: '4567', lines: 'world', v: 45 }
{ _id: '4567', lines: 'world', v: 45 },
]
this.req = {
params: {
project_id: this.project_id
project_id: this.project_id,
},
query: {
state: this.state
state: this.state,
},
body: {}
body: {},
}
})
@ -817,16 +817,16 @@ describe('HttpController', function () {
type: 'rename-doc',
id: 1,
pathname: 'thesis.tex',
newPathname: 'book.tex'
newPathname: 'book.tex',
},
{ type: 'add-doc', id: 2, pathname: 'article.tex', docLines: 'hello' },
{
type: 'rename-file',
id: 3,
pathname: 'apple.png',
newPathname: 'banana.png'
newPathname: 'banana.png',
},
{ type: 'add-file', id: 4, url: 'filestore.example.com/4' }
{ type: 'add-file', id: 4, url: 'filestore.example.com/4' },
]
this.version = 1234567
this.req = {
@ -835,11 +835,11 @@ describe('HttpController', function () {
projectHistoryId: this.projectHistoryId,
userId: this.userId,
updates: this.updates,
version: this.version
version: this.version,
},
params: {
project_id: this.project_id
}
project_id: this.project_id,
},
}
})
@ -895,11 +895,11 @@ describe('HttpController', function () {
body: {
projectHistoryId: this.projectHistoryId,
docs: this.docs,
files: this.files
files: this.files,
},
params: {
project_id: this.project_id
}
project_id: this.project_id,
},
}
})

View file

@ -29,10 +29,10 @@ describe('LockManager - checking the lock', function () {
createClient() {
return {
auth() {},
exists: existsStub
}
exists: existsStub,
}
},
},
'./Metrics': { inc() {} },
'./Profiler': (Profiler = (function () {
Profiler = class Profiler {
@ -43,7 +43,7 @@ describe('LockManager - checking the lock', function () {
}
Profiler.initClass()
return Profiler
})())
})()),
}
const LockManager = SandboxedModule.require(modulePath, { requires: mocks })

View file

@ -24,11 +24,11 @@ describe('LockManager - releasing the lock', function () {
let Profiler
this.client = {
auth() {},
eval: sinon.stub()
eval: sinon.stub(),
}
const mocks = {
'@overleaf/redis-wrapper': {
createClient: () => this.client
createClient: () => this.client,
},
'@overleaf/settings': {
redis: {
@ -36,10 +36,10 @@ describe('LockManager - releasing the lock', function () {
key_schema: {
blockingKey({ doc_id }) {
return `Blocking:${doc_id}`
}
}
}
}
},
},
},
},
},
'./Metrics': { inc() {} },
'./Profiler': (Profiler = (function () {
@ -51,7 +51,7 @@ describe('LockManager - releasing the lock', function () {
}
Profiler.initClass()
return Profiler
})())
})()),
}
this.LockManager = SandboxedModule.require(modulePath, { requires: mocks })
this.lockValue = 'lock-value-stub'

View file

@ -26,7 +26,7 @@ describe('LockManager - getting the lock', function () {
'@overleaf/redis-wrapper': {
createClient: () => {
return { auth() {} }
}
},
},
'./Metrics': { inc() {} },
'./Profiler': (Profiler = (function () {
@ -38,8 +38,8 @@ describe('LockManager - getting the lock', function () {
}
Profiler.initClass()
return Profiler
})())
}
})()),
},
})
this.callback = sinon.stub()
return (this.doc_id = 'doc-id-123')

View file

@ -24,10 +24,10 @@ describe('LockManager - trying the lock', function () {
createClient: () => {
return {
auth() {},
set: (this.set = sinon.stub())
}
set: (this.set = sinon.stub()),
}
},
},
'./Metrics': { inc() {} },
'@overleaf/settings': {
redis: {
@ -35,22 +35,26 @@ describe('LockManager - trying the lock', function () {
key_schema: {
blockingKey({ doc_id }) {
return `Blocking:${doc_id}`
}
}
}
}
},
'./Profiler': (this.Profiler = Profiler = (function () {
},
},
},
},
'./Profiler':
(this.Profiler = Profiler =
(function () {
Profiler = class Profiler {
static initClass() {
this.prototype.log = sinon.stub().returns({ end: sinon.stub() })
this.prototype.log = sinon
.stub()
.returns({ end: sinon.stub() })
this.prototype.end = sinon.stub()
}
}
Profiler.initClass()
return Profiler
})())
}
})()),
},
})
this.callback = sinon.stub()

View file

@ -34,10 +34,10 @@ describe('PersistenceManager', function () {
Timer.initClass()
return Timer
})()),
inc: sinon.stub()
inc: sinon.stub(),
}),
'./Errors': Errors
}
'./Errors': Errors,
},
})
this.project_id = 'project-id-123'
this.projectHistoryId = 'history-id-123'
@ -53,8 +53,8 @@ describe('PersistenceManager', function () {
web: {
url: (this.url = 'www.example.com'),
user: (this.user = 'sharelatex'),
pass: (this.pass = 'password')
}
pass: (this.pass = 'password'),
},
})
})
@ -65,7 +65,7 @@ describe('PersistenceManager', function () {
version: this.version,
ranges: this.ranges,
pathname: this.pathname,
projectHistoryId: this.projectHistoryId
projectHistoryId: this.projectHistoryId,
})
})
@ -90,15 +90,15 @@ describe('PersistenceManager', function () {
url: `${this.url}/project/${this.project_id}/doc/${this.doc_id}`,
method: 'GET',
headers: {
accept: 'application/json'
accept: 'application/json',
},
auth: {
user: this.user,
pass: this.pass,
sendImmediately: true
sendImmediately: true,
},
jar: false,
timeout: 5000
timeout: 5000,
})
.should.equal(true)
})
@ -309,16 +309,16 @@ describe('PersistenceManager', function () {
version: this.version,
ranges: this.ranges,
lastUpdatedAt: this.lastUpdatedAt,
lastUpdatedBy: this.lastUpdatedBy
lastUpdatedBy: this.lastUpdatedBy,
},
method: 'POST',
auth: {
user: this.user,
pass: this.pass,
sendImmediately: true
sendImmediately: true,
},
jar: false,
timeout: 5000
timeout: 5000,
})
.should.equal(true)
})

View file

@ -37,16 +37,16 @@ describe('ProjectHistoryRedisManager', function () {
},
projectHistoryFirstOpTimestamp({ project_id }) {
return `ProjectHistory:FirstOpTimestamp:${project_id}`
}
}
}
}
},
},
},
},
}),
'@overleaf/redis-wrapper': {
createClient: () => this.rclient
createClient: () => this.rclient,
},
'./Metrics': (this.metrics = { summary: sinon.stub() }),
},
'./Metrics': (this.metrics = { summary: sinon.stub() })
}
}
))
})
@ -97,7 +97,7 @@ describe('ProjectHistoryRedisManager', function () {
this.rawUpdate = {
pathname: (this.pathname = '/old'),
newPathname: (this.newPathname = '/new'),
version: (this.version = 2)
version: (this.version = 2),
}
this.ProjectHistoryRedisManager.queueOps = sinon.stub()
@ -118,11 +118,11 @@ describe('ProjectHistoryRedisManager', function () {
new_pathname: this.newPathname,
meta: {
user_id: this.user_id,
ts: new Date()
ts: new Date(),
},
version: this.version,
projectHistoryId: this.projectHistoryId,
file: this.file_id
file: this.file_id,
}
return this.ProjectHistoryRedisManager.queueOps
@ -144,7 +144,7 @@ describe('ProjectHistoryRedisManager', function () {
pathname: (this.pathname = '/old'),
docLines: (this.docLines = 'a\nb'),
version: (this.version = 2),
url: (this.url = 'filestore.example.com')
url: (this.url = 'filestore.example.com'),
}
this.ProjectHistoryRedisManager.queueOps = sinon.stub()
@ -166,11 +166,11 @@ describe('ProjectHistoryRedisManager', function () {
url: this.url,
meta: {
user_id: this.user_id,
ts: new Date()
ts: new Date(),
},
version: this.version,
projectHistoryId: this.projectHistoryId,
doc: this.doc_id
doc: this.doc_id,
}
return this.ProjectHistoryRedisManager.queueOps

View file

@ -25,7 +25,7 @@ describe('ProjectManager - flushAndDeleteProject', function () {
'./ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}),
'./DocumentManager': (this.DocumentManager = {}),
'./HistoryManager': (this.HistoryManager = {
flushProjectChanges: sinon.stub().callsArg(2)
flushProjectChanges: sinon.stub().callsArg(2),
}),
'./Metrics': (this.Metrics = {
Timer: (Timer = (function () {
@ -36,9 +36,9 @@ describe('ProjectManager - flushAndDeleteProject', function () {
}
Timer.initClass()
return Timer
})())
})
}
})()),
}),
},
})
this.project_id = 'project-id-123'
return (this.callback = sinon.stub())
@ -54,7 +54,7 @@ describe('ProjectManager - flushAndDeleteProject', function () {
return this.ProjectManager.flushAndDeleteProjectWithLocks(
this.project_id,
{},
(error) => {
error => {
this.callback(error)
return done()
}
@ -68,7 +68,7 @@ describe('ProjectManager - flushAndDeleteProject', function () {
})
it('should delete each doc in the project', function () {
return Array.from(this.doc_ids).map((doc_id) =>
return Array.from(this.doc_ids).map(doc_id =>
this.DocumentManager.flushAndDeleteDocWithLock
.calledWith(this.project_id, doc_id, {})
.should.equal(true)
@ -110,7 +110,7 @@ describe('ProjectManager - flushAndDeleteProject', function () {
return this.ProjectManager.flushAndDeleteProjectWithLocks(
this.project_id,
{},
(error) => {
error => {
this.callback(error)
return done()
}
@ -118,7 +118,7 @@ describe('ProjectManager - flushAndDeleteProject', function () {
})
it('should still flush each doc in the project', function () {
return Array.from(this.doc_ids).map((doc_id) =>
return Array.from(this.doc_ids).map(doc_id =>
this.DocumentManager.flushAndDeleteDocWithLock
.calledWith(this.project_id, doc_id, {})
.should.equal(true)

View file

@ -36,9 +36,9 @@ describe('ProjectManager - flushProject', function () {
}
Timer.initClass()
return Timer
})())
})
}
})()),
}),
},
})
this.project_id = 'project-id-123'
return (this.callback = sinon.stub())
@ -53,7 +53,7 @@ describe('ProjectManager - flushProject', function () {
this.DocumentManager.flushDocIfLoadedWithLock = sinon.stub().callsArg(2)
return this.ProjectManager.flushProjectWithLocks(
this.project_id,
(error) => {
error => {
this.callback(error)
return done()
}
@ -67,7 +67,7 @@ describe('ProjectManager - flushProject', function () {
})
it('should flush each doc in the project', function () {
return Array.from(this.doc_ids).map((doc_id) =>
return Array.from(this.doc_ids).map(doc_id =>
this.DocumentManager.flushDocIfLoadedWithLock
.calledWith(this.project_id, doc_id)
.should.equal(true)
@ -105,7 +105,7 @@ describe('ProjectManager - flushProject', function () {
)
return this.ProjectManager.flushProjectWithLocks(
this.project_id,
(error) => {
error => {
this.callback(error)
return done()
}
@ -113,7 +113,7 @@ describe('ProjectManager - flushProject', function () {
})
it('should still flush each doc in the project', function () {
return Array.from(this.doc_ids).map((doc_id) =>
return Array.from(this.doc_ids).map(doc_id =>
this.DocumentManager.flushDocIfLoadedWithLock
.calledWith(this.project_id, doc_id)
.should.equal(true)

View file

@ -33,10 +33,10 @@ describe('ProjectManager - getProjectDocsAndFlushIfOld', function () {
}
Timer.initClass()
return Timer
})())
})()),
}),
'./Errors': Errors
}
'./Errors': Errors,
},
})
this.project_id = 'project-id-123'
this.callback = sinon.stub()
@ -49,24 +49,24 @@ describe('ProjectManager - getProjectDocsAndFlushIfOld', function () {
this.doc_lines = [
['aaa', 'aaa'],
['bbb', 'bbb'],
['ccc', 'ccc']
['ccc', 'ccc'],
]
this.docs = [
{
_id: this.doc_ids[0],
lines: this.doc_lines[0],
v: this.doc_versions[0]
v: this.doc_versions[0],
},
{
_id: this.doc_ids[1],
lines: this.doc_lines[1],
v: this.doc_versions[1]
v: this.doc_versions[1],
},
{
_id: this.doc_ids[2],
lines: this.doc_lines[2],
v: this.doc_versions[2]
}
v: this.doc_versions[2],
},
]
this.RedisManager.checkOrSetProjectState = sinon
.stub()
@ -200,7 +200,7 @@ describe('ProjectManager - getProjectDocsAndFlushIfOld', function () {
return describe('clearing the project state with clearProjectState', function () {
beforeEach(function (done) {
this.RedisManager.clearProjectState = sinon.stub().callsArg(1)
return this.ProjectManager.clearProjectState(this.project_id, (error) => {
return this.ProjectManager.clearProjectState(this.project_id, error => {
this.callback(error)
return done()
})

View file

@ -8,17 +8,17 @@ describe('ProjectManager', function () {
this.RedisManager = {}
this.ProjectHistoryRedisManager = {
queueRenameEntity: sinon.stub().yields(),
queueAddEntity: sinon.stub().yields()
queueAddEntity: sinon.stub().yields(),
}
this.DocumentManager = {
renameDocWithLock: sinon.stub().yields()
renameDocWithLock: sinon.stub().yields(),
}
this.HistoryManager = {
flushProjectChangesAsync: sinon.stub(),
shouldFlushHistoryOps: sinon.stub().returns(false)
shouldFlushHistoryOps: sinon.stub().returns(false),
}
this.Metrics = {
Timer: class Timer {}
Timer: class Timer {},
}
this.Metrics.Timer.prototype.done = sinon.stub()
@ -28,8 +28,8 @@ describe('ProjectManager', function () {
'./ProjectHistoryRedisManager': this.ProjectHistoryRedisManager,
'./DocumentManager': this.DocumentManager,
'./HistoryManager': this.HistoryManager,
'./Metrics': this.Metrics
}
'./Metrics': this.Metrics,
},
})
this.project_id = 'project-id-123'
@ -46,24 +46,24 @@ describe('ProjectManager', function () {
type: 'rename-doc',
id: 1,
pathname: 'foo',
newPathname: 'foo'
newPathname: 'foo',
}
this.secondDocUpdate = {
type: 'rename-doc',
id: 2,
pathname: 'bar',
newPathname: 'bar2'
newPathname: 'bar2',
}
this.firstFileUpdate = {
type: 'rename-file',
id: 2,
pathname: 'bar',
newPathname: 'bar2'
newPathname: 'bar2',
}
this.updates = [
this.firstDocUpdate,
this.secondDocUpdate,
this.firstFileUpdate
this.firstFileUpdate,
]
})
@ -81,7 +81,7 @@ describe('ProjectManager', function () {
it('should rename the docs in the updates', function () {
const firstDocUpdateWithVersion = _.extend({}, this.firstDocUpdate, {
version: `${this.version}.0`
version: `${this.version}.0`,
})
const secondDocUpdateWithVersion = _.extend(
{},
@ -201,28 +201,28 @@ describe('ProjectManager', function () {
this.firstDocUpdate = {
type: 'add-doc',
id: 1,
docLines: 'a\nb'
docLines: 'a\nb',
}
this.secondDocUpdate = {
type: 'add-doc',
id: 2,
docLines: 'a\nb'
docLines: 'a\nb',
}
this.firstFileUpdate = {
type: 'add-file',
id: 3,
url: 'filestore.example.com/2'
url: 'filestore.example.com/2',
}
this.secondFileUpdate = {
type: 'add-file',
id: 4,
url: 'filestore.example.com/3'
url: 'filestore.example.com/3',
}
this.updates = [
this.firstDocUpdate,
this.secondDocUpdate,
this.firstFileUpdate,
this.secondFileUpdate
this.secondFileUpdate,
]
})
@ -240,7 +240,7 @@ describe('ProjectManager', function () {
it('should add the docs in the updates', function () {
const firstDocUpdateWithVersion = _.extend({}, this.firstDocUpdate, {
version: `${this.version}.0`
version: `${this.version}.0`,
})
const secondDocUpdateWithVersion = _.extend(
{},

View file

@ -32,39 +32,39 @@ describe('RangesManager', function () {
this.updates = [
{
meta: {
user_id: this.user_id
user_id: this.user_id,
},
op: [
{
i: 'two ',
p: 4
}
]
}
p: 4,
},
],
},
]
this.entries = {
comments: [
{
op: {
c: 'three ',
p: 4
p: 4,
},
metadata: {
user_id: this.user_id
}
}
user_id: this.user_id,
},
},
],
changes: [
{
op: {
i: 'five',
p: 15
p: 15,
},
metadata: {
user_id: this.user_id
}
}
]
user_id: this.user_id,
},
},
],
}
return (this.newDocLines = ['one two three four five'])
}) // old is "one three four five"
@ -90,11 +90,11 @@ describe('RangesManager', function () {
expect(ranges_were_collapsed).to.equal(false)
entries.comments[0].op.should.deep.equal({
c: 'three ',
p: 8
p: 8,
})
return entries.changes[0].op.should.deep.equal({
i: 'five',
p: 19
p: 19,
})
})
})
@ -149,16 +149,16 @@ describe('RangesManager', function () {
this.updates = [
{
meta: {
user_id: this.user_id
user_id: this.user_id,
},
op: [
{
c: 'one',
p: 0,
t: 'thread-id-1'
}
]
}
t: 'thread-id-1',
},
],
},
]
this.entries = {
comments: [
@ -166,24 +166,24 @@ describe('RangesManager', function () {
op: {
c: 'three ',
p: 4,
t: 'thread-id-2'
t: 'thread-id-2',
},
metadata: {
user_id: this.user_id
}
user_id: this.user_id,
},
},
{
op: {
c: 'four ',
p: 10,
t: 'thread-id-3'
t: 'thread-id-3',
},
metadata: {
user_id: this.user_id
}
}
user_id: this.user_id,
},
},
],
changes: []
changes: [],
}
return this.RangesManager.applyUpdate(
this.project_id,
@ -212,38 +212,38 @@ describe('RangesManager', function () {
{
meta: {
user_id: this.user_id,
tc: 'track-changes-id-yes'
tc: 'track-changes-id-yes',
},
op: [
{
i: 'one ',
p: 0
}
]
}
p: 0,
},
],
},
]
this.entries = {
changes: [
{
op: {
i: 'three',
p: 4
p: 4,
},
metadata: {
user_id: this.user_id
}
user_id: this.user_id,
},
},
{
op: {
i: 'four',
p: 10
p: 10,
},
metadata: {
user_id: this.user_id
}
}
user_id: this.user_id,
},
},
],
comments: []
comments: [],
}
this.newDocLines = ['one two three four']
return this.RangesManager.applyUpdate(
@ -272,15 +272,15 @@ describe('RangesManager', function () {
this.updates = [
{
meta: {
user_id: this.user_id
user_id: this.user_id,
},
op: [
{
c: "doesn't match",
p: 0
}
]
}
p: 0,
},
],
},
]
return this.RangesManager.applyUpdate(
this.project_id,
@ -308,16 +308,16 @@ describe('RangesManager', function () {
this.updates = [
{
meta: {
user_id: this.user_id
user_id: this.user_id,
},
op: [
{
d: 'one',
p: 0,
t: 'thread-id-1'
}
]
}
t: 'thread-id-1',
},
],
},
]
this.entries = {
comments: [
@ -325,14 +325,14 @@ describe('RangesManager', function () {
op: {
c: 'n',
p: 1,
t: 'thread-id-2'
t: 'thread-id-2',
},
metadata: {
user_id: this.user_id
}
}
user_id: this.user_id,
},
},
],
changes: []
changes: [],
}
return this.RangesManager.applyUpdate(
this.project_id,
@ -360,8 +360,8 @@ describe('RangesManager', function () {
requires: {
'./RangesTracker': (this.RangesTracker = SandboxedModule.require(
'../../../../app/js/RangesTracker.js'
))
}
)),
},
})
this.ranges = {
@ -371,38 +371,38 @@ describe('RangesManager', function () {
id: 'a1',
op: {
i: 'lorem',
p: 0
}
p: 0,
},
},
{
id: 'a2',
op: {
i: 'ipsum',
p: 10
}
p: 10,
},
},
{
id: 'a3',
op: {
i: 'dolor',
p: 20
}
p: 20,
},
},
{
id: 'a4',
op: {
i: 'sit',
p: 30
}
p: 30,
},
},
{
id: 'a5',
op: {
i: 'amet',
p: 40
}
}
]
p: 40,
},
},
],
}
return (this.removeChangeIdsSpy = sinon.spy(
this.RangesTracker.prototype,
@ -438,7 +438,7 @@ describe('RangesManager', function () {
it('should remove the change', function () {
return expect(
this.rangesResponse.changes.find(
(change) => change.id === this.ranges.changes[1].id
change => change.id === this.ranges.changes[1].id
)
).to.be.undefined
})
@ -450,10 +450,10 @@ describe('RangesManager', function () {
})
return it('should not touch other changes', function () {
return [0, 2, 3, 4].map((i) =>
return [0, 2, 3, 4].map(i =>
expect(
this.rangesResponse.changes.find(
(change) => change.id === this.ranges.changes[i].id
change => change.id === this.ranges.changes[i].id
)
).to.deep.equal(this.ranges.changes[i])
)
@ -465,7 +465,7 @@ describe('RangesManager', function () {
this.change_ids = [
this.ranges.changes[1].id,
this.ranges.changes[3].id,
this.ranges.changes[4].id
this.ranges.changes[4].id,
]
return this.RangesManager.acceptChanges(
this.change_ids,
@ -491,10 +491,10 @@ describe('RangesManager', function () {
it('should remove the changes', function () {
return [1, 3, 4].map(
(i) =>
i =>
expect(
this.rangesResponse.changes.find(
(change) => change.id === this.ranges.changes[1].id
change => change.id === this.ranges.changes[1].id
)
).to.be.undefined
)
@ -507,10 +507,10 @@ describe('RangesManager', function () {
})
return it('should not touch other changes', function () {
return [0, 2].map((i) =>
return [0, 2].map(i =>
expect(
this.rangesResponse.changes.find(
(change) => change.id === this.ranges.changes[i].id
change => change.id === this.ranges.changes[i].id
)
).to.deep.equal(this.ranges.changes[i])
)

View file

@ -31,9 +31,9 @@ describe('RateLimitManager', function () {
Timer.initClass()
return Timer
})()),
gauge: sinon.stub()
})
}
gauge: sinon.stub(),
}),
},
})
this.callback = sinon.stub()
return (this.RateLimiter = new this.RateLimitManager(1))
@ -63,18 +63,18 @@ describe('RateLimitManager', function () {
beforeEach(function (done) {
this.task = sinon.stub()
this.finalTask = sinon.stub()
const task = (cb) => {
const task = cb => {
this.task()
return setTimeout(cb, 100)
}
const finalTask = (cb) => {
const finalTask = cb => {
this.finalTask()
return setTimeout(cb, 100)
}
this.RateLimiter.run(task, this.callback)
this.RateLimiter.run(task, this.callback)
this.RateLimiter.run(task, this.callback)
return this.RateLimiter.run(finalTask, (err) => {
return this.RateLimiter.run(finalTask, err => {
this.callback(err)
return done()
})
@ -101,14 +101,14 @@ describe('RateLimitManager', function () {
beforeEach(function (done) {
this.task = sinon.stub()
this.finalTask = sinon.stub()
const finalTask = (cb) => {
const finalTask = cb => {
this.finalTask()
return setTimeout(cb, 100)
}
this.RateLimiter.run(this.task, this.callback)
this.RateLimiter.run(this.task, this.callback)
this.RateLimiter.run(this.task, this.callback)
return this.RateLimiter.run(finalTask, (err) => {
return this.RateLimiter.run(finalTask, err => {
this.callback(err)
return done()
})

View file

@ -19,15 +19,15 @@ describe('RealTimeRedisManager', function () {
beforeEach(function () {
this.rclient = {
auth() {},
exec: sinon.stub()
exec: sinon.stub(),
}
this.rclient.multi = () => this.rclient
this.pubsubClient = { publish: sinon.stub() }
this.RealTimeRedisManager = SandboxedModule.require(modulePath, {
requires: {
'@overleaf/redis-wrapper': {
createClient: (config) =>
config.name === 'pubsub' ? this.pubsubClient : this.rclient
createClient: config =>
config.name === 'pubsub' ? this.pubsubClient : this.rclient,
},
'@overleaf/settings': {
redis: {
@ -35,23 +35,23 @@ describe('RealTimeRedisManager', function () {
key_schema: {
pendingUpdates({ doc_id }) {
return `PendingUpdates:${doc_id}`
}
}
},
},
}),
pubsub: {
name: 'pubsub'
}
}
name: 'pubsub',
},
},
},
crypto: (this.crypto = {
randomBytes: sinon
.stub()
.withArgs(4)
.returns(Buffer.from([0x1, 0x2, 0x3, 0x4]))
.returns(Buffer.from([0x1, 0x2, 0x3, 0x4])),
}),
os: (this.os = { hostname: sinon.stub().returns('somehost') }),
'./Metrics': (this.metrics = { summary: sinon.stub() })
}
'./Metrics': (this.metrics = { summary: sinon.stub() }),
},
})
this.doc_id = 'doc-id-123'
@ -69,9 +69,9 @@ describe('RealTimeRedisManager', function () {
beforeEach(function () {
this.updates = [
{ op: [{ i: 'foo', p: 4 }] },
{ op: [{ i: 'foo', p: 4 }] }
{ op: [{ i: 'foo', p: 4 }] },
]
this.jsonUpdates = this.updates.map((update) => JSON.stringify(update))
this.jsonUpdates = this.updates.map(update => JSON.stringify(update))
this.rclient.exec = sinon
.stub()
.callsArgWith(0, null, [this.jsonUpdates])
@ -102,7 +102,7 @@ describe('RealTimeRedisManager', function () {
beforeEach(function () {
this.jsonUpdates = [
JSON.stringify({ op: [{ i: 'foo', p: 4 }] }),
'broken json'
'broken json',
]
this.rclient.exec = sinon
.stub()

View file

@ -30,7 +30,7 @@ describe('RedisManager', function () {
'@overleaf/settings': (this.settings = {
documentupdater: { logHashErrors: { write: true, read: true } },
apis: {
project_history: { enabled: true }
project_history: { enabled: true },
},
redis: {
documentupdater: {
@ -82,8 +82,8 @@ describe('RedisManager', function () {
},
lastUpdatedAt({ doc_id }) {
return `lastUpdatedAt:${doc_id}`
}
}
},
},
},
history: {
key_schema: {
@ -92,13 +92,13 @@ describe('RedisManager', function () {
},
docsWithHistoryOps({ project_id }) {
return `DocsWithHistoryOps:${project_id}`
}
}
}
}
},
},
},
},
}),
'@overleaf/redis-wrapper': {
createClient: () => this.rclient
createClient: () => this.rclient,
},
'./Metrics': (this.metrics = {
inc: sinon.stub(),
@ -112,10 +112,10 @@ describe('RedisManager', function () {
const timeSpan = new Date() - this.start
return timeSpan
}
})
}),
'./Errors': Errors
}
}),
'./Errors': Errors,
},
})
this.doc_id = 'doc-id-123'
@ -151,7 +151,7 @@ describe('RedisManager', function () {
this.json_ranges,
this.pathname,
this.projectHistoryId.toString(),
this.unflushed_time
this.unflushed_time,
])
})
@ -212,7 +212,7 @@ describe('RedisManager', function () {
this.version,
this.badHash,
this.project_id,
this.json_ranges
this.json_ranges,
])
return this.RedisManager.getDoc(
this.project_id,
@ -244,7 +244,7 @@ describe('RedisManager', function () {
this.another_project_id,
this.json_ranges,
this.pathname,
this.unflushed_time
this.unflushed_time,
])
}
@ -278,7 +278,7 @@ describe('RedisManager', function () {
this.another_project_id,
this.json_ranges,
this.pathname,
this.unflushed_time
this.unflushed_time,
])
return this.RedisManager.getDoc(
this.project_id,
@ -304,7 +304,7 @@ describe('RedisManager', function () {
this.start = 50
this.end = 60
this.ops = [{ mock: 'op-1' }, { mock: 'op-2' }]
this.jsonOps = this.ops.map((op) => JSON.stringify(op))
this.jsonOps = this.ops.map(op => JSON.stringify(op))
this.rclient.llen = sinon.stub().callsArgWith(1, null, this.length)
this.rclient.get = sinon
.stub()
@ -353,7 +353,7 @@ describe('RedisManager', function () {
this.start = 50
this.end = -1
this.ops = [{ mock: 'op-1' }, { mock: 'op-2' }]
this.jsonOps = this.ops.map((op) => JSON.stringify(op))
this.jsonOps = this.ops.map(op => JSON.stringify(op))
this.rclient.llen = sinon.stub().callsArgWith(1, null, this.length)
this.rclient.get = sinon
.stub()
@ -390,7 +390,7 @@ describe('RedisManager', function () {
this.start = 20
this.end = -1
this.ops = [{ mock: 'op-1' }, { mock: 'op-2' }]
this.jsonOps = this.ops.map((op) => JSON.stringify(op))
this.jsonOps = this.ops.map(op => JSON.stringify(op))
this.rclient.llen = sinon.stub().callsArgWith(1, null, this.length)
this.rclient.get = sinon
.stub()
@ -423,7 +423,7 @@ describe('RedisManager', function () {
this.start = 50
this.end = 60
this.ops = [{ mock: 'op-1' }, { mock: 'op-2' }]
this.jsonOps = this.ops.map((op) => JSON.stringify(op))
this.jsonOps = this.ops.map(op => JSON.stringify(op))
this.rclient.llen = sinon.stub().callsArgWith(1, null, this.length)
this.rclient.get = sinon
.stub()
@ -483,7 +483,7 @@ describe('RedisManager', function () {
null,
this.doc_update_list_length,
null,
null
null,
])
return (this.ProjectHistoryRedisManager.queueOps = sinon
.stub()
@ -529,7 +529,7 @@ describe('RedisManager', function () {
[`DocHash:${this.doc_id}`]: this.hash,
[`Ranges:${this.doc_id}`]: JSON.stringify(this.ranges),
[`lastUpdatedAt:${this.doc_id}`]: Date.now(),
[`lastUpdatedBy:${this.doc_id}`]: 'last-author-fake-id'
[`lastUpdatedBy:${this.doc_id}`]: 'last-author-fake-id',
})
.should.equal(true)
})
@ -728,7 +728,7 @@ describe('RedisManager', function () {
[`DocHash:${this.doc_id}`]: this.hash,
[`Ranges:${this.doc_id}`]: JSON.stringify(this.ranges),
[`lastUpdatedAt:${this.doc_id}`]: Date.now(),
[`lastUpdatedBy:${this.doc_id}`]: 'last-author-fake-id'
[`lastUpdatedBy:${this.doc_id}`]: 'last-author-fake-id',
})
.should.equal(true)
})
@ -759,7 +759,7 @@ describe('RedisManager', function () {
[`DocHash:${this.doc_id}`]: this.hash,
[`Ranges:${this.doc_id}`]: null,
[`lastUpdatedAt:${this.doc_id}`]: Date.now(),
[`lastUpdatedBy:${this.doc_id}`]: 'last-author-fake-id'
[`lastUpdatedBy:${this.doc_id}`]: 'last-author-fake-id',
})
.should.equal(true)
})
@ -856,7 +856,7 @@ describe('RedisManager', function () {
[`DocHash:${this.doc_id}`]: this.hash,
[`Ranges:${this.doc_id}`]: JSON.stringify(this.ranges),
[`lastUpdatedAt:${this.doc_id}`]: Date.now(),
[`lastUpdatedBy:${this.doc_id}`]: undefined
[`lastUpdatedBy:${this.doc_id}`]: undefined,
})
.should.equal(true)
})
@ -900,7 +900,7 @@ describe('RedisManager', function () {
[`DocHash:${this.doc_id}`]: this.hash,
[`Ranges:${this.doc_id}`]: JSON.stringify(this.ranges),
[`Pathname:${this.doc_id}`]: this.pathname,
[`ProjectHistoryId:${this.doc_id}`]: this.projectHistoryId
[`ProjectHistoryId:${this.doc_id}`]: this.projectHistoryId,
})
.should.equal(true)
})
@ -939,7 +939,7 @@ describe('RedisManager', function () {
[`DocHash:${this.doc_id}`]: this.hash,
[`Ranges:${this.doc_id}`]: null,
[`Pathname:${this.doc_id}`]: this.pathname,
[`ProjectHistoryId:${this.doc_id}`]: this.projectHistoryId
[`ProjectHistoryId:${this.doc_id}`]: this.projectHistoryId,
})
.should.equal(true)
})
@ -1070,7 +1070,7 @@ describe('RedisManager', function () {
return (this.update = {
id: this.doc_id,
pathname: (this.pathname = 'pathname'),
newPathname: (this.newPathname = 'new-pathname')
newPathname: (this.newPathname = 'new-pathname'),
})
})

View file

@ -105,7 +105,7 @@ describe('ShareJS text type', function () {
text._tc(dest, { d: 'foo', p: 3 }, { i: 'bar', p: 4 })
return dest.should.deep.equal([
{ d: 'f', p: 3 },
{ d: 'oo', p: 6 }
{ d: 'oo', p: 6 },
])
})
})
@ -418,7 +418,7 @@ describe('ShareJS text type', function () {
op1_t,
op2_t,
rt12_comments: rt12.comments,
rt21_comments: rt21.comments
rt21_comments: rt21.comments,
},
'Comments are not consistent'
)

View file

@ -25,8 +25,8 @@ describe('ShareJsDB', function () {
this.ShareJsDB = SandboxedModule.require(modulePath, {
requires: {
'./RedisManager': (this.RedisManager = {}),
'./Errors': Errors
}
'./Errors': Errors,
},
})
this.version = 42
@ -130,14 +130,14 @@ describe('ShareJsDB', function () {
this.opData = {
op: { p: 20, t: 'foo' },
meta: { source: 'bar' },
v: this.version
v: this.version,
}
return this.db.writeOp(this.doc_key, this.opData, this.callback)
})
it('should write into appliedOps', function () {
return expect(this.db.appliedOps[this.doc_key]).to.deep.equal([
this.opData
this.opData,
])
})

View file

@ -31,14 +31,14 @@ describe('ShareJsUpdateManager', function () {
'@overleaf/redis-wrapper': {
createClient: () => {
return (this.rclient = { auth() {} })
}
},
},
'./RealTimeRedisManager': (this.RealTimeRedisManager = {}),
'./Metrics': (this.metrics = { inc: sinon.stub() })
'./Metrics': (this.metrics = { inc: sinon.stub() }),
},
globals: {
clearTimeout: (this.clearTimeout = sinon.stub())
}
clearTimeout: (this.clearTimeout = sinon.stub()),
},
}))
})
@ -58,8 +58,8 @@ describe('ShareJsUpdateManager', function () {
applyOp: sinon.stub().callsArg(2),
getSnapshot: sinon.stub(),
db: {
appliedOps: {}
}
appliedOps: {},
},
}
this.ShareJsUpdateManager.getNewShareJsModel = sinon
.stub()
@ -74,11 +74,10 @@ describe('ShareJsUpdateManager', function () {
beforeEach(function (done) {
this.model.getSnapshot.callsArgWith(1, null, {
snapshot: this.updatedDocLines.join('\n'),
v: this.version
v: this.version,
})
this.model.db.appliedOps[
`${this.project_id}:${this.doc_id}`
] = this.appliedOps = ['mock-ops']
this.model.db.appliedOps[`${this.project_id}:${this.doc_id}`] =
this.appliedOps = ['mock-ops']
return this.ShareJsUpdateManager.applyUpdate(
this.project_id,
this.doc_id,
@ -172,11 +171,10 @@ describe('ShareJsUpdateManager', function () {
this.error = new Error('invalid hash')
this.model.getSnapshot.callsArgWith(1, null, {
snapshot: 'unexpected content',
v: this.version
v: this.version,
})
this.model.db.appliedOps[
`${this.project_id}:${this.doc_id}`
] = this.appliedOps = ['mock-ops']
this.model.db.appliedOps[`${this.project_id}:${this.doc_id}`] =
this.appliedOps = ['mock-ops']
return this.ShareJsUpdateManager.applyUpdate(
this.project_id,
this.doc_id,
@ -203,7 +201,7 @@ describe('ShareJsUpdateManager', function () {
this.model = {
on: (event, callback) => {
return (this.callback = callback)
}
},
}
sinon.spy(this.model, 'on')
return this.ShareJsUpdateManager._listenForOps(this.model)
@ -217,7 +215,7 @@ describe('ShareJsUpdateManager', function () {
beforeEach(function () {
this.opData = {
op: { t: 'foo', p: 1 },
meta: { source: 'bar' }
meta: { source: 'bar' },
}
this.RealTimeRedisManager.sendData = sinon.stub()
return this.callback(`${this.project_id}:${this.doc_id}`, this.opData)
@ -228,7 +226,7 @@ describe('ShareJsUpdateManager', function () {
.calledWith({
project_id: this.project_id,
doc_id: this.doc_id,
op: this.opData
op: this.opData,
})
.should.equal(true)
})

View file

@ -38,7 +38,7 @@ describe('UpdateManager', function () {
}
Timer.initClass()
return Timer
})())
})()),
}),
'@overleaf/settings': (this.Settings = {}),
'./DocumentManager': (this.DocumentManager = {}),
@ -53,8 +53,8 @@ describe('UpdateManager', function () {
}
Profiler.initClass()
return Profiler
})())
}
})()),
},
}))
})
@ -272,7 +272,7 @@ describe('UpdateManager', function () {
})
it('should apply the updates', function () {
return Array.from(this.updates).map((update) =>
return Array.from(this.updates).map(update =>
this.UpdateManager.applyUpdate
.calledWith(this.project_id, this.doc_id, update)
.should.equal(true)
@ -320,7 +320,7 @@ describe('UpdateManager', function () {
this.updated_ranges = { entries: 'updated', comments: 'updated' }
this.appliedOps = [
{ v: 42, op: 'mock-op-42' },
{ v: 45, op: 'mock-op-45' }
{ v: 45, op: 'mock-op-45' },
]
this.doc_ops_length = sinon.stub()
this.project_ops_length = sinon.stub()
@ -465,7 +465,7 @@ describe('UpdateManager', function () {
.calledWith({
project_id: this.project_id,
doc_id: this.doc_id,
error: this.error.message
error: this.error.message,
})
.should.equal(true)
})
@ -512,17 +512,17 @@ describe('UpdateManager', function () {
v: 42,
op: [
{ i: 'foo', p: 4 },
{ i: 'bar', p: 6 }
]
{ i: 'bar', p: 6 },
],
},
{
v: 45,
op: [
{ d: 'qux', p: 4 },
{ i: 'bazbaz', p: 14 }
]
{ i: 'bazbaz', p: 14 },
],
},
{ v: 49, op: [{ i: 'penguin', p: 18 }] }
{ v: 49, op: [{ i: 'penguin', p: 18 }] },
]
this.UpdateManager._addProjectHistoryMetadataToOps(
appliedOps,
@ -536,24 +536,24 @@ describe('UpdateManager', function () {
v: 42,
op: [
{ i: 'foo', p: 4 },
{ i: 'bar', p: 6 }
{ i: 'bar', p: 6 },
],
meta: {
pathname: this.pathname,
doc_length: 14
}
doc_length: 14,
},
},
{
projectHistoryId: this.projectHistoryId,
v: 45,
op: [
{ d: 'qux', p: 4 },
{ i: 'bazbaz', p: 14 }
{ i: 'bazbaz', p: 14 },
],
meta: {
pathname: this.pathname,
doc_length: 20
} // 14 + 'foo' + 'bar'
doc_length: 20,
}, // 14 + 'foo' + 'bar'
},
{
projectHistoryId: this.projectHistoryId,
@ -561,9 +561,9 @@ describe('UpdateManager', function () {
op: [{ i: 'penguin', p: 18 }],
meta: {
pathname: this.pathname,
doc_length: 23
} // 14 - 'qux' + 'bazbaz'
}
doc_length: 23,
}, // 14 - 'qux' + 'bazbaz'
},
])
})
})