Merge pull request #179 from overleaf/jpa-bulk-dependency-upgrades

[misc] bulk dependency upgrades
This commit is contained in:
Jakob Ackermann 2021-07-16 10:07:22 +02:00 committed by GitHub
commit 316078b3b8
107 changed files with 3132 additions and 5160 deletions

View file

@ -3,9 +3,9 @@
// https://github.com/sharelatex/sharelatex-dev-environment
{
"extends": [
"eslint:recommended",
"standard",
"prettier",
"prettier/standard"
"prettier"
],
"parserOptions": {
"ecmaVersion": 2018
@ -20,6 +20,19 @@
"mocha": true
},
"rules": {
// TODO(das7pad): remove overrides after fixing all the violations manually (https://github.com/overleaf/issues/issues/3882#issuecomment-878999671)
// START of temporary overrides
"array-callback-return": "off",
"no-dupe-else-if": "off",
"no-var": "off",
"no-empty": "off",
"node/handle-callback-err": "off",
"no-loss-of-precision": "off",
"node/no-callback-literal": "off",
"node/no-path-concat": "off",
"prefer-regex-literals": "off",
// END of temporary overrides
// Swap the no-unused-expressions rule with a more chai-friendly one
"no-unused-expressions": 0,
"chai-friendly/no-unused-expressions": "error",

View file

@ -20,4 +20,4 @@ updates:
# future if we reorganise teams
labels:
- "dependencies"
- "Team-Magma"
- "type:maintenance"

View file

@ -1 +1 @@
12.21.0
12.22.3

View file

@ -2,6 +2,10 @@
# Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment
{
"arrowParens": "avoid",
"semi": false,
"singleQuote": true
"singleQuote": true,
"trailingComma": "es5",
"tabWidth": 2,
"useTabs": false
}

View file

@ -2,7 +2,7 @@
# Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment
FROM node:12.21.0 as base
FROM node:12.22.3 as base
WORKDIR /app

View file

@ -2,7 +2,7 @@ const Metrics = require('@overleaf/metrics')
Metrics.initialize('doc-updater')
const express = require('express')
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const logger = require('logger-sharelatex')
logger.initialize('document-updater')
@ -114,7 +114,7 @@ const pubsubClient = require('@overleaf/redis-wrapper').createClient(
Settings.redis.pubsub
)
app.get('/health_check/redis', (req, res, next) => {
pubsubClient.healthCheck((error) => {
pubsubClient.healthCheck(error => {
if (error) {
logger.err({ err: error }, 'failed redis health check')
return res.sendStatus(500)
@ -128,7 +128,7 @@ const docUpdaterRedisClient = require('@overleaf/redis-wrapper').createClient(
Settings.redis.documentupdater
)
app.get('/health_check/redis_cluster', (req, res, next) => {
docUpdaterRedisClient.healthCheck((error) => {
docUpdaterRedisClient.healthCheck(error => {
if (error) {
logger.err({ err: error }, 'failed redis cluster health check')
return res.sendStatus(500)
@ -141,32 +141,32 @@ app.get('/health_check/redis_cluster', (req, res, next) => {
app.get('/health_check', (req, res, next) => {
async.series(
[
(cb) => {
pubsubClient.healthCheck((error) => {
cb => {
pubsubClient.healthCheck(error => {
if (error) {
logger.err({ err: error }, 'failed redis health check')
}
cb(error)
})
},
(cb) => {
docUpdaterRedisClient.healthCheck((error) => {
cb => {
docUpdaterRedisClient.healthCheck(error => {
if (error) {
logger.err({ err: error }, 'failed redis cluster health check')
}
cb(error)
})
},
(cb) => {
mongodb.healthCheck((error) => {
cb => {
mongodb.healthCheck(error => {
if (error) {
logger.err({ err: error }, 'failed mongo health check')
}
cb(error)
})
}
},
],
(error) => {
error => {
if (error) {
return res.sendStatus(500)
} else {
@ -189,7 +189,7 @@ app.use((error, req, res, next) => {
}
})
const shutdownCleanly = (signal) => () => {
const shutdownCleanly = signal => () => {
logger.log({ signal }, 'received interrupt, cleaning up')
Settings.shuttingDown = true
setTimeout(() => {
@ -198,8 +198,8 @@ const shutdownCleanly = (signal) => () => {
}, 10000)
}
const watchForEvent = (eventName) => {
docUpdaterRedisClient.on(eventName, (e) => {
const watchForEvent = eventName => {
docUpdaterRedisClient.on(eventName, e => {
console.log(`redis event: ${eventName} ${e}`) // eslint-disable-line no-console
})
}
@ -236,7 +236,7 @@ if (!module.parent) {
}
})
})
.catch((err) => {
.catch(err => {
logger.fatal({ err }, 'Cannot connect to mongo. Exiting.')
process.exit(1)
})
@ -251,7 +251,7 @@ for (const signal of [
'SIGUSR1',
'SIGUSR2',
'SIGTERM',
'SIGABRT'
'SIGABRT',
]) {
process.on(signal, shutdownCleanly(signal))
}

View file

@ -13,7 +13,7 @@
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let DeleteQueueManager
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const RedisManager = require('./RedisManager')
const ProjectManager = require('./ProjectManager')
const logger = require('logger-sharelatex')
@ -43,44 +43,44 @@ module.exports = DeleteQueueManager = {
let count = 0
const flushProjectIfNotModified = (project_id, flushTimestamp, cb) =>
ProjectManager.getProjectDocsTimestamps(project_id, function (
err,
timestamps
) {
if (err != null) {
return callback(err)
}
if (timestamps.length === 0) {
logger.log(
{ project_id },
'skipping flush of queued project - no timestamps'
)
return cb()
}
// are any of the timestamps newer than the time the project was flushed?
for (const timestamp of Array.from(timestamps)) {
if (timestamp > flushTimestamp) {
metrics.inc('queued-delete-skipped')
logger.debug(
{ project_id, timestamps, flushTimestamp },
'found newer timestamp, will skip delete'
ProjectManager.getProjectDocsTimestamps(
project_id,
function (err, timestamps) {
if (err != null) {
return callback(err)
}
if (timestamps.length === 0) {
logger.log(
{ project_id },
'skipping flush of queued project - no timestamps'
)
return cb()
}
}
logger.log({ project_id, flushTimestamp }, 'flushing queued project')
return ProjectManager.flushAndDeleteProjectWithLocks(
project_id,
{ skip_history_flush: false },
function (err) {
if (err != null) {
logger.err({ project_id, err }, 'error flushing queued project')
// are any of the timestamps newer than the time the project was flushed?
for (const timestamp of Array.from(timestamps)) {
if (timestamp > flushTimestamp) {
metrics.inc('queued-delete-skipped')
logger.debug(
{ project_id, timestamps, flushTimestamp },
'found newer timestamp, will skip delete'
)
return cb()
}
metrics.inc('queued-delete-completed')
return cb(null, true)
}
)
})
logger.log({ project_id, flushTimestamp }, 'flushing queued project')
return ProjectManager.flushAndDeleteProjectWithLocks(
project_id,
{ skip_history_flush: false },
function (err) {
if (err != null) {
logger.err({ project_id, err }, 'error flushing queued project')
}
metrics.inc('queued-delete-completed')
return cb(null, true)
}
)
}
)
var flushNextProject = function () {
const now = Date.now()
@ -92,30 +92,29 @@ module.exports = DeleteQueueManager = {
logger.log('hit count limit on flushing old projects')
return callback(null, count)
}
return RedisManager.getNextProjectToFlushAndDelete(cutoffTime, function (
err,
project_id,
flushTimestamp,
queueLength
) {
if (err != null) {
return callback(err)
}
if (project_id == null) {
return callback(null, count)
}
logger.log({ project_id, queueLength }, 'flushing queued project')
metrics.globalGauge('queued-flush-backlog', queueLength)
return flushProjectIfNotModified(project_id, flushTimestamp, function (
err,
flushed
) {
if (flushed) {
count++
return RedisManager.getNextProjectToFlushAndDelete(
cutoffTime,
function (err, project_id, flushTimestamp, queueLength) {
if (err != null) {
return callback(err)
}
return flushNextProject()
})
})
if (project_id == null) {
return callback(null, count)
}
logger.log({ project_id, queueLength }, 'flushing queued project')
metrics.globalGauge('queued-flush-backlog', queueLength)
return flushProjectIfNotModified(
project_id,
flushTimestamp,
function (err, flushed) {
if (flushed) {
count++
}
return flushNextProject()
}
)
}
)
}
return flushNextProject()
@ -133,12 +132,12 @@ module.exports = DeleteQueueManager = {
{
timeout: 1000,
min_delete_age: 3 * 60 * 1000,
limit: 1000 // high value, to ensure we always flush enough projects
limit: 1000, // high value, to ensure we always flush enough projects
},
(err, flushed) =>
setTimeout(doFlush, flushed > 10 ? SHORT_DELAY : LONG_DELAY)
)
}
return doFlush()
}
},
}

View file

@ -21,13 +21,13 @@ module.exports = {
if (type === this.ADDED) {
ops.push({
i: content,
p: position
p: position,
})
position += content.length
} else if (type === this.REMOVED) {
ops.push({
d: content,
p: position
p: position,
})
} else if (type === this.UNCHANGED) {
position += content.length
@ -36,5 +36,5 @@ module.exports = {
}
}
callback(null, ops)
}
},
}

View file

@ -15,7 +15,7 @@
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let DispatchManager
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const logger = require('logger-sharelatex')
const Keys = require('./UpdateKeys')
const redis = require('@overleaf/redis-wrapper')
@ -57,7 +57,7 @@ module.exports = DispatchManager = {
Keys.splitProjectIdAndDocId(doc_key)
)
// Dispatch this in the background
const backgroundTask = (cb) =>
const backgroundTask = cb =>
UpdateManager.processOutstandingUpdatesWithLock(
project_id,
doc_id,
@ -91,7 +91,7 @@ module.exports = DispatchManager = {
if (Settings.shuttingDown) {
return
}
return worker._waitForUpdateThenDispatchWorker((error) => {
return worker._waitForUpdateThenDispatchWorker(error => {
if (error != null) {
logger.error({ err: error }, 'Error in worker process')
throw error
@ -99,7 +99,7 @@ module.exports = DispatchManager = {
return worker.run()
}
})
}
},
}
return worker
@ -110,5 +110,5 @@ module.exports = DispatchManager = {
_.times(number, function (shardNumber) {
return DispatchManager.createDispatcher(RateLimiter, shardNumber).run()
})
}
},
}

View file

@ -47,94 +47,102 @@ module.exports = DocumentManager = {
return _callback(...Array.from(args || []))
}
return RedisManager.getDoc(project_id, doc_id, function (
error,
lines,
version,
ranges,
pathname,
projectHistoryId,
unflushedTime
) {
if (error != null) {
return callback(error)
}
if (lines == null || version == null) {
logger.log(
{ project_id, doc_id },
'doc not in redis so getting from persistence API'
)
return PersistenceManager.getDoc(project_id, doc_id, function (
error,
lines,
version,
ranges,
pathname,
projectHistoryId,
projectHistoryType
) {
if (error != null) {
return callback(error)
}
return RedisManager.getDoc(
project_id,
doc_id,
function (
error,
lines,
version,
ranges,
pathname,
projectHistoryId,
unflushedTime
) {
if (error != null) {
return callback(error)
}
if (lines == null || version == null) {
logger.log(
{
project_id,
doc_id,
{ project_id, doc_id },
'doc not in redis so getting from persistence API'
)
return PersistenceManager.getDoc(
project_id,
doc_id,
function (
error,
lines,
version,
ranges,
pathname,
projectHistoryId,
projectHistoryType
},
'got doc from persistence API'
)
return RedisManager.putDocInMemory(
project_id,
doc_id,
lines,
version,
ranges,
pathname,
projectHistoryId,
function (error) {
) {
if (error != null) {
return callback(error)
}
return RedisManager.setHistoryType(
logger.log(
{
project_id,
doc_id,
lines,
version,
pathname,
projectHistoryId,
projectHistoryType,
},
'got doc from persistence API'
)
return RedisManager.putDocInMemory(
project_id,
doc_id,
projectHistoryType,
lines,
version,
ranges,
pathname,
projectHistoryId,
function (error) {
if (error != null) {
return callback(error)
}
return callback(
null,
lines,
version,
ranges || {},
pathname,
projectHistoryId,
null,
false
return RedisManager.setHistoryType(
doc_id,
projectHistoryType,
function (error) {
if (error != null) {
return callback(error)
}
return callback(
null,
lines,
version,
ranges || {},
pathname,
projectHistoryId,
null,
false
)
}
)
}
)
}
)
})
} else {
return callback(
null,
lines,
version,
ranges,
pathname,
projectHistoryId,
unflushedTime,
true
)
} else {
return callback(
null,
lines,
version,
ranges,
pathname,
projectHistoryId,
unflushedTime,
true
)
}
}
})
)
},
getDocAndRecentOps(project_id, doc_id, fromVersion, _callback) {
@ -155,49 +163,46 @@ module.exports = DocumentManager = {
return _callback(...Array.from(args || []))
}
return DocumentManager.getDoc(project_id, doc_id, function (
error,
lines,
version,
ranges,
pathname,
projectHistoryId
) {
if (error != null) {
return callback(error)
}
if (fromVersion === -1) {
return callback(
null,
lines,
version,
[],
ranges,
pathname,
projectHistoryId
)
} else {
return RedisManager.getPreviousDocOps(
doc_id,
fromVersion,
version,
function (error, ops) {
if (error != null) {
return callback(error)
return DocumentManager.getDoc(
project_id,
doc_id,
function (error, lines, version, ranges, pathname, projectHistoryId) {
if (error != null) {
return callback(error)
}
if (fromVersion === -1) {
return callback(
null,
lines,
version,
[],
ranges,
pathname,
projectHistoryId
)
} else {
return RedisManager.getPreviousDocOps(
doc_id,
fromVersion,
version,
function (error, ops) {
if (error != null) {
return callback(error)
}
return callback(
null,
lines,
version,
ops,
ranges,
pathname,
projectHistoryId
)
}
return callback(
null,
lines,
version,
ops,
ranges,
pathname,
projectHistoryId
)
}
)
)
}
}
})
)
},
setDoc(project_id, doc_id, newLines, source, user_id, undoing, _callback) {
@ -215,95 +220,107 @@ module.exports = DocumentManager = {
}
const UpdateManager = require('./UpdateManager')
return DocumentManager.getDoc(project_id, doc_id, function (
error,
oldLines,
version,
ranges,
pathname,
projectHistoryId,
unflushedTime,
alreadyLoaded
) {
if (error != null) {
return callback(error)
}
if (oldLines != null && oldLines.length > 0 && oldLines[0].text != null) {
logger.log(
{ doc_id, project_id, oldLines, newLines },
'document is JSON so not updating'
)
return callback(null)
}
logger.log(
{ doc_id, project_id, oldLines, newLines },
'setting a document via http'
)
return DiffCodec.diffAsShareJsOp(oldLines, newLines, function (
return DocumentManager.getDoc(
project_id,
doc_id,
function (
error,
op
oldLines,
version,
ranges,
pathname,
projectHistoryId,
unflushedTime,
alreadyLoaded
) {
if (error != null) {
return callback(error)
}
if (undoing) {
for (const o of Array.from(op || [])) {
o.u = true
} // Turn on undo flag for each op for track changes
}
const update = {
doc: doc_id,
op,
v: version,
meta: {
type: 'external',
source,
user_id
}
}
return UpdateManager.applyUpdate(project_id, doc_id, update, function (
error
) {
if (error != null) {
return callback(error)
}
// If the document was loaded already, then someone has it open
// in a project, and the usual flushing mechanism will happen.
// Otherwise we should remove it immediately since nothing else
// is using it.
if (alreadyLoaded) {
return DocumentManager.flushDocIfLoaded(
project_id,
doc_id,
function (error) {
if (error != null) {
return callback(error)
}
return callback(null)
}
)
} else {
return DocumentManager.flushAndDeleteDoc(
project_id,
doc_id,
{},
function (error) {
// There is no harm in flushing project history if the previous
// call failed and sometimes it is required
HistoryManager.flushProjectChangesAsync(project_id)
if (
oldLines != null &&
oldLines.length > 0 &&
oldLines[0].text != null
) {
logger.log(
{ doc_id, project_id, oldLines, newLines },
'document is JSON so not updating'
)
return callback(null)
}
logger.log(
{ doc_id, project_id, oldLines, newLines },
'setting a document via http'
)
return DiffCodec.diffAsShareJsOp(
oldLines,
newLines,
function (error, op) {
if (error != null) {
return callback(error)
}
if (undoing) {
for (const o of Array.from(op || [])) {
o.u = true
} // Turn on undo flag for each op for track changes
}
const update = {
doc: doc_id,
op,
v: version,
meta: {
type: 'external',
source,
user_id,
},
}
return UpdateManager.applyUpdate(
project_id,
doc_id,
update,
function (error) {
if (error != null) {
return callback(error)
}
return callback(null)
// If the document was loaded already, then someone has it open
// in a project, and the usual flushing mechanism will happen.
// Otherwise we should remove it immediately since nothing else
// is using it.
if (alreadyLoaded) {
return DocumentManager.flushDocIfLoaded(
project_id,
doc_id,
function (error) {
if (error != null) {
return callback(error)
}
return callback(null)
}
)
} else {
return DocumentManager.flushAndDeleteDoc(
project_id,
doc_id,
{},
function (error) {
// There is no harm in flushing project history if the previous
// call failed and sometimes it is required
HistoryManager.flushProjectChangesAsync(project_id)
if (error != null) {
return callback(error)
}
return callback(null)
}
)
}
}
)
}
})
})
})
)
}
)
},
flushDocIfLoaded(project_id, doc_id, _callback) {
@ -315,42 +332,49 @@ module.exports = DocumentManager = {
timer.done()
return _callback(...Array.from(args || []))
}
return RedisManager.getDoc(project_id, doc_id, function (
error,
lines,
version,
ranges,
pathname,
projectHistoryId,
unflushedTime,
lastUpdatedAt,
lastUpdatedBy
) {
if (error != null) {
return callback(error)
}
if (lines == null || version == null) {
logger.log({ project_id, doc_id }, 'doc is not loaded so not flushing')
return callback(null) // TODO: return a flag to bail out, as we go on to remove doc from memory?
} else {
logger.log({ project_id, doc_id, version }, 'flushing doc')
return PersistenceManager.setDoc(
project_id,
doc_id,
lines,
version,
ranges,
lastUpdatedAt,
lastUpdatedBy,
function (error) {
if (error != null) {
return callback(error)
return RedisManager.getDoc(
project_id,
doc_id,
function (
error,
lines,
version,
ranges,
pathname,
projectHistoryId,
unflushedTime,
lastUpdatedAt,
lastUpdatedBy
) {
if (error != null) {
return callback(error)
}
if (lines == null || version == null) {
logger.log(
{ project_id, doc_id },
'doc is not loaded so not flushing'
)
return callback(null) // TODO: return a flag to bail out, as we go on to remove doc from memory?
} else {
logger.log({ project_id, doc_id, version }, 'flushing doc')
return PersistenceManager.setDoc(
project_id,
doc_id,
lines,
version,
ranges,
lastUpdatedAt,
lastUpdatedBy,
function (error) {
if (error != null) {
return callback(error)
}
return RedisManager.clearUnflushedTime(doc_id, callback)
}
return RedisManager.clearUnflushedTime(doc_id, callback)
}
)
)
}
}
})
)
},
flushAndDeleteDoc(project_id, doc_id, options, _callback) {
@ -360,32 +384,36 @@ module.exports = DocumentManager = {
return _callback(...Array.from(args || []))
}
return DocumentManager.flushDocIfLoaded(project_id, doc_id, function (
error
) {
if (error != null) {
if (options.ignoreFlushErrors) {
logger.warn(
{ project_id, doc_id, err: error },
'ignoring flush error while deleting document'
)
} else {
return callback(error)
}
}
// Flush in the background since it requires a http request
HistoryManager.flushDocChangesAsync(project_id, doc_id)
return RedisManager.removeDocFromMemory(project_id, doc_id, function (
error
) {
return DocumentManager.flushDocIfLoaded(
project_id,
doc_id,
function (error) {
if (error != null) {
return callback(error)
if (options.ignoreFlushErrors) {
logger.warn(
{ project_id, doc_id, err: error },
'ignoring flush error while deleting document'
)
} else {
return callback(error)
}
}
return callback(null)
})
})
// Flush in the background since it requires a http request
HistoryManager.flushDocChangesAsync(project_id, doc_id)
return RedisManager.removeDocFromMemory(
project_id,
doc_id,
function (error) {
if (error != null) {
return callback(error)
}
return callback(null)
}
)
}
)
},
acceptChanges(project_id, doc_id, change_ids, _callback) {
@ -401,44 +429,44 @@ module.exports = DocumentManager = {
return _callback(...Array.from(args || []))
}
return DocumentManager.getDoc(project_id, doc_id, function (
error,
lines,
version,
ranges
) {
if (error != null) {
return callback(error)
}
if (lines == null || version == null) {
return callback(
new Errors.NotFoundError(`document not found: ${doc_id}`)
)
}
return RangesManager.acceptChanges(change_ids, ranges, function (
error,
new_ranges
) {
return DocumentManager.getDoc(
project_id,
doc_id,
function (error, lines, version, ranges) {
if (error != null) {
return callback(error)
}
return RedisManager.updateDocument(
project_id,
doc_id,
lines,
version,
[],
new_ranges,
{},
function (error) {
if (lines == null || version == null) {
return callback(
new Errors.NotFoundError(`document not found: ${doc_id}`)
)
}
return RangesManager.acceptChanges(
change_ids,
ranges,
function (error, new_ranges) {
if (error != null) {
return callback(error)
}
return callback()
return RedisManager.updateDocument(
project_id,
doc_id,
lines,
version,
[],
new_ranges,
{},
function (error) {
if (error != null) {
return callback(error)
}
return callback()
}
)
}
)
})
})
}
)
},
deleteComment(project_id, doc_id, comment_id, _callback) {
@ -451,44 +479,44 @@ module.exports = DocumentManager = {
return _callback(...Array.from(args || []))
}
return DocumentManager.getDoc(project_id, doc_id, function (
error,
lines,
version,
ranges
) {
if (error != null) {
return callback(error)
}
if (lines == null || version == null) {
return callback(
new Errors.NotFoundError(`document not found: ${doc_id}`)
)
}
return RangesManager.deleteComment(comment_id, ranges, function (
error,
new_ranges
) {
return DocumentManager.getDoc(
project_id,
doc_id,
function (error, lines, version, ranges) {
if (error != null) {
return callback(error)
}
return RedisManager.updateDocument(
project_id,
doc_id,
lines,
version,
[],
new_ranges,
{},
function (error) {
if (lines == null || version == null) {
return callback(
new Errors.NotFoundError(`document not found: ${doc_id}`)
)
}
return RangesManager.deleteComment(
comment_id,
ranges,
function (error, new_ranges) {
if (error != null) {
return callback(error)
}
return callback()
return RedisManager.updateDocument(
project_id,
doc_id,
lines,
version,
[],
new_ranges,
{},
function (error) {
if (error != null) {
return callback(error)
}
return callback()
}
)
}
)
})
})
}
)
},
renameDoc(project_id, doc_id, user_id, update, projectHistoryId, _callback) {
@ -515,73 +543,94 @@ module.exports = DocumentManager = {
if (callback == null) {
callback = function (error, doc) {}
}
return DocumentManager.getDoc(project_id, doc_id, function (
error,
lines,
version,
ranges,
pathname,
projectHistoryId,
unflushedTime,
alreadyLoaded
) {
if (error != null) {
return callback(error)
}
// if doc was already loaded see if it needs to be flushed
if (
alreadyLoaded &&
unflushedTime != null &&
Date.now() - unflushedTime > MAX_UNFLUSHED_AGE
return DocumentManager.getDoc(
project_id,
doc_id,
function (
error,
lines,
version,
ranges,
pathname,
projectHistoryId,
unflushedTime,
alreadyLoaded
) {
return DocumentManager.flushDocIfLoaded(project_id, doc_id, function (
error
if (error != null) {
return callback(error)
}
// if doc was already loaded see if it needs to be flushed
if (
alreadyLoaded &&
unflushedTime != null &&
Date.now() - unflushedTime > MAX_UNFLUSHED_AGE
) {
if (error != null) {
return callback(error)
}
return DocumentManager.flushDocIfLoaded(
project_id,
doc_id,
function (error) {
if (error != null) {
return callback(error)
}
return callback(null, lines, version)
}
)
} else {
return callback(null, lines, version)
})
} else {
return callback(null, lines, version)
}
}
})
)
},
resyncDocContents(project_id, doc_id, callback) {
logger.log({ project_id, doc_id }, 'start resyncing doc contents')
return RedisManager.getDoc(project_id, doc_id, function (
error,
lines,
version,
ranges,
pathname,
projectHistoryId
) {
if (error != null) {
return callback(error)
}
return RedisManager.getDoc(
project_id,
doc_id,
function (error, lines, version, ranges, pathname, projectHistoryId) {
if (error != null) {
return callback(error)
}
if (lines == null || version == null) {
logger.log(
{ project_id, doc_id },
'resyncing doc contents - not found in redis - retrieving from web'
)
return PersistenceManager.getDoc(project_id, doc_id, function (
error,
lines,
version,
ranges,
pathname,
projectHistoryId
) {
if (error != null) {
logger.error(
{ project_id, doc_id, getDocError: error },
'resyncing doc contents - error retrieving from web'
)
return callback(error)
}
if (lines == null || version == null) {
logger.log(
{ project_id, doc_id },
'resyncing doc contents - not found in redis - retrieving from web'
)
return PersistenceManager.getDoc(
project_id,
doc_id,
function (
error,
lines,
version,
ranges,
pathname,
projectHistoryId
) {
if (error != null) {
logger.error(
{ project_id, doc_id, getDocError: error },
'resyncing doc contents - error retrieving from web'
)
return callback(error)
}
return ProjectHistoryRedisManager.queueResyncDocContent(
project_id,
projectHistoryId,
doc_id,
lines,
version,
pathname,
callback
)
}
)
} else {
logger.log(
{ project_id, doc_id },
'resyncing doc contents - doc in redis - will queue in redis'
)
return ProjectHistoryRedisManager.queueResyncDocContent(
project_id,
projectHistoryId,
@ -591,23 +640,9 @@ module.exports = DocumentManager = {
pathname,
callback
)
})
} else {
logger.log(
{ project_id, doc_id },
'resyncing doc contents - doc in redis - will queue in redis'
)
return ProjectHistoryRedisManager.queueResyncDocContent(
project_id,
projectHistoryId,
doc_id,
lines,
version,
pathname,
callback
)
}
}
})
)
},
getDocWithLock(project_id, doc_id, callback) {
@ -769,5 +804,5 @@ module.exports = DocumentManager = {
doc_id,
callback
)
}
},
}

View file

@ -41,5 +41,5 @@ module.exports = Errors = {
NotFoundError,
OpRangeNotAvailableError,
ProjectStateChangedError,
DeleteMismatchError
DeleteMismatchError,
}

View file

@ -15,7 +15,7 @@ let HistoryManager
const async = require('async')
const logger = require('logger-sharelatex')
const request = require('request')
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const HistoryRedisManager = require('./HistoryRedisManager')
const ProjectHistoryRedisManager = require('./ProjectHistoryRedisManager')
const RedisManager = require('./RedisManager')
@ -32,44 +32,44 @@ module.exports = HistoryManager = {
)
return
}
return RedisManager.getHistoryType(doc_id, function (
err,
projectHistoryType
) {
if (err != null) {
logger.warn({ err, doc_id }, 'error getting history type')
return RedisManager.getHistoryType(
doc_id,
function (err, projectHistoryType) {
if (err != null) {
logger.warn({ err, doc_id }, 'error getting history type')
}
// if there's an error continue and flush to track-changes for safety
if (
Settings.disableDoubleFlush &&
projectHistoryType === 'project-history'
) {
return logger.debug(
{ doc_id, projectHistoryType },
'skipping track-changes flush'
)
} else {
metrics.inc('history-flush', 1, { status: 'track-changes' })
const url = `${Settings.apis.trackchanges.url}/project/${project_id}/doc/${doc_id}/flush`
logger.log(
{ project_id, doc_id, url, projectHistoryType },
'flushing doc in track changes api'
)
return request.post(url, function (error, res, body) {
if (error != null) {
return logger.error(
{ error, doc_id, project_id },
'track changes doc to track changes api'
)
} else if (res.statusCode < 200 && res.statusCode >= 300) {
return logger.error(
{ doc_id, project_id },
`track changes api returned a failure status code: ${res.statusCode}`
)
}
})
}
}
// if there's an error continue and flush to track-changes for safety
if (
Settings.disableDoubleFlush &&
projectHistoryType === 'project-history'
) {
return logger.debug(
{ doc_id, projectHistoryType },
'skipping track-changes flush'
)
} else {
metrics.inc('history-flush', 1, { status: 'track-changes' })
const url = `${Settings.apis.trackchanges.url}/project/${project_id}/doc/${doc_id}/flush`
logger.log(
{ project_id, doc_id, url, projectHistoryType },
'flushing doc in track changes api'
)
return request.post(url, function (error, res, body) {
if (error != null) {
return logger.error(
{ error, doc_id, project_id },
'track changes doc to track changes api'
)
} else if (res.statusCode < 200 && res.statusCode >= 300) {
return logger.error(
{ doc_id, project_id },
`track changes api returned a failure status code: ${res.statusCode}`
)
}
})
}
})
)
},
// flush changes in the background
@ -77,7 +77,7 @@ module.exports = HistoryManager = {
if (
!__guard__(
Settings.apis != null ? Settings.apis.project_history : undefined,
(x) => x.enabled
x => x.enabled
)
) {
return
@ -97,7 +97,7 @@ module.exports = HistoryManager = {
if (
!__guard__(
Settings.apis != null ? Settings.apis.project_history : undefined,
(x) => x.enabled
x => x.enabled
)
) {
return callback()
@ -157,7 +157,7 @@ module.exports = HistoryManager = {
if (
__guard__(
Settings.apis != null ? Settings.apis.project_history : undefined,
(x) => x.enabled
x => x.enabled
)
) {
if (
@ -253,7 +253,7 @@ module.exports = HistoryManager = {
)
}
)
}
},
}
function __guard__(value, transform) {

View file

@ -12,7 +12,7 @@
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let HistoryRedisManager
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const rclient = require('@overleaf/redis-wrapper').createClient(
Settings.redis.history
)
@ -41,5 +41,5 @@ module.exports = HistoryRedisManager = {
return callback()
}
)
}
},
}

View file

@ -3,7 +3,7 @@ const HistoryManager = require('./HistoryManager')
const ProjectManager = require('./ProjectManager')
const Errors = require('./Errors')
const logger = require('logger-sharelatex')
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const Metrics = require('./Metrics')
const ProjectFlusher = require('./ProjectFlusher')
const DeleteQueueManager = require('./DeleteQueueManager')
@ -24,7 +24,7 @@ module.exports = {
updateProject,
resyncProjectHistory,
flushAllProjects,
flushQueuedProjects
flushQueuedProjects,
}
function getDoc(req, res, next) {
@ -59,7 +59,7 @@ function getDoc(req, res, next) {
version,
ops,
ranges,
pathname
pathname,
})
}
)
@ -104,7 +104,7 @@ function getProjectDocsAndFlushIfOld(req, res, next) {
logger.log(
{
projectId,
result: result.map((doc) => `${doc._id}:${doc.v}`)
result: result.map(doc => `${doc._id}:${doc.v}`),
},
'got docs via http'
)
@ -118,7 +118,7 @@ function clearProjectState(req, res, next) {
const projectId = req.params.project_id
const timer = new Metrics.Timer('http.clearProjectState')
logger.log({ projectId }, 'clearing project state via http')
ProjectManager.clearProjectState(projectId, (error) => {
ProjectManager.clearProjectState(projectId, error => {
timer.done()
if (error) {
next(error)
@ -152,7 +152,7 @@ function setDoc(req, res, next) {
source,
userId,
undoing,
(error) => {
error => {
timer.done()
if (error) {
return next(error)
@ -168,7 +168,7 @@ function flushDocIfLoaded(req, res, next) {
const projectId = req.params.project_id
logger.log({ projectId, docId }, 'flushing doc via http')
const timer = new Metrics.Timer('http.flushDoc')
DocumentManager.flushDocIfLoadedWithLock(projectId, docId, (error) => {
DocumentManager.flushDocIfLoadedWithLock(projectId, docId, error => {
timer.done()
if (error) {
return next(error)
@ -188,7 +188,7 @@ function deleteDoc(req, res, next) {
projectId,
docId,
{ ignoreFlushErrors },
(error) => {
error => {
timer.done()
// There is no harm in flushing project history if the previous call
// failed and sometimes it is required
@ -207,7 +207,7 @@ function flushProject(req, res, next) {
const projectId = req.params.project_id
logger.log({ projectId }, 'flushing project via http')
const timer = new Metrics.Timer('http.flushProject')
ProjectManager.flushProjectWithLocks(projectId, (error) => {
ProjectManager.flushProjectWithLocks(projectId, error => {
timer.done()
if (error) {
return next(error)
@ -228,7 +228,7 @@ function deleteProject(req, res, next) {
options.skip_history_flush = true
} // don't flush history when realtime shuts down
if (req.query.background) {
ProjectManager.queueFlushAndDeleteProject(projectId, (error) => {
ProjectManager.queueFlushAndDeleteProject(projectId, error => {
if (error) {
return next(error)
}
@ -237,18 +237,14 @@ function deleteProject(req, res, next) {
}) // No Content
} else {
const timer = new Metrics.Timer('http.deleteProject')
ProjectManager.flushAndDeleteProjectWithLocks(
projectId,
options,
(error) => {
timer.done()
if (error) {
return next(error)
}
logger.log({ projectId }, 'deleted project via http')
res.sendStatus(204) // No Content
ProjectManager.flushAndDeleteProjectWithLocks(projectId, options, error => {
timer.done()
if (error) {
return next(error)
}
)
logger.log({ projectId }, 'deleted project via http')
res.sendStatus(204) // No Content
})
}
}
@ -261,7 +257,7 @@ function deleteMultipleProjects(req, res, next) {
logger.log({ projectId }, 'queue delete of project via http')
ProjectManager.queueFlushAndDeleteProject(projectId, cb)
},
(error) => {
error => {
if (error) {
return next(error)
}
@ -281,45 +277,35 @@ function acceptChanges(req, res, next) {
`accepting ${changeIds.length} changes via http`
)
const timer = new Metrics.Timer('http.acceptChanges')
DocumentManager.acceptChangesWithLock(
projectId,
docId,
changeIds,
(error) => {
timer.done()
if (error) {
return next(error)
}
logger.log(
{ projectId, docId },
`accepted ${changeIds.length} changes via http`
)
res.sendStatus(204) // No Content
DocumentManager.acceptChangesWithLock(projectId, docId, changeIds, error => {
timer.done()
if (error) {
return next(error)
}
)
logger.log(
{ projectId, docId },
`accepted ${changeIds.length} changes via http`
)
res.sendStatus(204) // No Content
})
}
function deleteComment(req, res, next) {
const {
project_id: projectId,
doc_id: docId,
comment_id: commentId
comment_id: commentId,
} = req.params
logger.log({ projectId, docId, commentId }, 'deleting comment via http')
const timer = new Metrics.Timer('http.deleteComment')
DocumentManager.deleteCommentWithLock(
projectId,
docId,
commentId,
(error) => {
timer.done()
if (error) {
return next(error)
}
logger.log({ projectId, docId, commentId }, 'deleted comment via http')
res.sendStatus(204) // No Content
DocumentManager.deleteCommentWithLock(projectId, docId, commentId, error => {
timer.done()
if (error) {
return next(error)
}
)
logger.log({ projectId, docId, commentId }, 'deleted comment via http')
res.sendStatus(204) // No Content
})
}
function updateProject(req, res, next) {
@ -333,7 +319,7 @@ function updateProject(req, res, next) {
userId,
updates,
version,
(error) => {
error => {
timer.done()
if (error) {
return next(error)
@ -357,7 +343,7 @@ function resyncProjectHistory(req, res, next) {
projectHistoryId,
docs,
files,
(error) => {
error => {
if (error) {
return next(error)
}
@ -372,7 +358,7 @@ function flushAllProjects(req, res, next) {
const options = {
limit: req.query.limit || 1000,
concurrency: req.query.concurrency || 5,
dryRun: req.query.dryRun || false
dryRun: req.query.dryRun || false,
}
ProjectFlusher.flushAllProjects(options, (err, projectIds) => {
if (err) {
@ -389,7 +375,7 @@ function flushQueuedProjects(req, res, next) {
const options = {
limit: req.query.limit || 1000,
timeout: 5 * 60 * 1000,
min_delete_age: req.query.min_delete_age || 5 * 60 * 1000
min_delete_age: req.query.min_delete_age || 5 * 60 * 1000,
}
DeleteQueueManager.flushAndDeleteOldProjects(options, (err, flushed) => {
if (err) {

View file

@ -13,7 +13,7 @@
*/
let LockManager
const metrics = require('./Metrics')
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const redis = require('@overleaf/redis-wrapper')
const rclient = redis.createClient(Settings.redis.lock)
const keys = Settings.redis.lock.key_schema
@ -54,36 +54,41 @@ module.exports = LockManager = {
const lockValue = LockManager.randomLock()
const key = keys.blockingKey({ doc_id })
const profile = new Profiler('tryLock', { doc_id, key, lockValue })
return rclient.set(key, lockValue, 'EX', this.LOCK_TTL, 'NX', function (
err,
gotLock
) {
if (err != null) {
return callback(err)
}
if (gotLock === 'OK') {
metrics.inc('doc-not-blocking')
const timeTaken = profile.log('got lock').end()
if (timeTaken > MAX_REDIS_REQUEST_LENGTH) {
// took too long, so try to free the lock
return LockManager.releaseLock(doc_id, lockValue, function (
err,
result
) {
if (err != null) {
return callback(err)
} // error freeing lock
return callback(null, false)
}) // tell caller they didn't get the lock
} else {
return callback(null, true, lockValue)
return rclient.set(
key,
lockValue,
'EX',
this.LOCK_TTL,
'NX',
function (err, gotLock) {
if (err != null) {
return callback(err)
}
if (gotLock === 'OK') {
metrics.inc('doc-not-blocking')
const timeTaken = profile.log('got lock').end()
if (timeTaken > MAX_REDIS_REQUEST_LENGTH) {
// took too long, so try to free the lock
return LockManager.releaseLock(
doc_id,
lockValue,
function (err, result) {
if (err != null) {
return callback(err)
} // error freeing lock
return callback(null, false)
}
) // tell caller they didn't get the lock
} else {
return callback(null, true, lockValue)
}
} else {
metrics.inc('doc-blocking')
profile.log('doc is locked').end()
return callback(null, false)
}
} else {
metrics.inc('doc-blocking')
profile.log('doc is locked').end()
return callback(null, false)
}
})
)
},
getLock(doc_id, callback) {
@ -145,25 +150,28 @@ module.exports = LockManager = {
releaseLock(doc_id, lockValue, callback) {
const key = keys.blockingKey({ doc_id })
const profile = new Profiler('releaseLock', { doc_id, key, lockValue })
return rclient.eval(LockManager.unlockScript, 1, key, lockValue, function (
err,
result
) {
if (err != null) {
return callback(err)
} else if (result != null && result !== 1) {
// successful unlock should release exactly one key
profile.log('unlockScript:expired-lock').end()
logger.error(
{ doc_id, key, lockValue, redis_err: err, redis_result: result },
'unlocking error'
)
metrics.inc('unlock-error')
return callback(new Error('tried to release timed out lock'))
} else {
profile.log('unlockScript:ok').end()
return callback(null, result)
return rclient.eval(
LockManager.unlockScript,
1,
key,
lockValue,
function (err, result) {
if (err != null) {
return callback(err)
} else if (result != null && result !== 1) {
// successful unlock should release exactly one key
profile.log('unlockScript:expired-lock').end()
logger.error(
{ doc_id, key, lockValue, redis_err: err, redis_result: result },
'unlocking error'
)
metrics.inc('unlock-error')
return callback(new Error('tried to release timed out lock'))
} else {
profile.log('unlockScript:ok').end()
return callback(null, result)
}
}
})
}
)
},
}

View file

@ -25,19 +25,19 @@ const showUpdateLength = function (update) {
const copy = _.cloneDeep(update)
copy.op.forEach(function (element, index) {
if (
__guard__(element != null ? element.i : undefined, (x) => x.length) !=
__guard__(element != null ? element.i : undefined, x => x.length) !=
null
) {
copy.op[index].i = element.i.length
}
if (
__guard__(element != null ? element.d : undefined, (x1) => x1.length) !=
__guard__(element != null ? element.d : undefined, x1 => x1.length) !=
null
) {
copy.op[index].d = element.d.length
}
if (
__guard__(element != null ? element.c : undefined, (x2) => x2.length) !=
__guard__(element != null ? element.c : undefined, x2 => x2.length) !=
null
) {
return (copy.op[index].c = element.c.length)
@ -57,7 +57,7 @@ module.exports = {
docLines: showLength,
newDocLines: showLength,
ranges: showLength,
update: showUpdateLength
update: showUpdateLength,
}
function __guard__(value, transform) {

View file

@ -15,13 +15,13 @@
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let PersistenceManager
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const Errors = require('./Errors')
const Metrics = require('./Metrics')
const logger = require('logger-sharelatex')
const request = require('requestretry').defaults({
maxAttempts: 2,
retryDelay: 10
retryDelay: 10,
})
// We have to be quick with HTTP calls because we're holding a lock that
@ -75,15 +75,15 @@ module.exports = PersistenceManager = {
url: `${Settings.apis.web.url}${urlPath}`,
method: 'GET',
headers: {
accept: 'application/json'
accept: 'application/json',
},
auth: {
user: Settings.apis.web.user,
pass: Settings.apis.web.pass,
sendImmediately: true
sendImmediately: true,
},
jar: false,
timeout: MAX_HTTP_REQUEST_LENGTH
timeout: MAX_HTTP_REQUEST_LENGTH,
},
function (error, res, body) {
updateMetric('getDoc', error, res)
@ -164,15 +164,15 @@ module.exports = PersistenceManager = {
ranges,
version,
lastUpdatedBy,
lastUpdatedAt
lastUpdatedAt,
},
auth: {
user: Settings.apis.web.user,
pass: Settings.apis.web.pass,
sendImmediately: true
sendImmediately: true,
},
jar: false,
timeout: MAX_HTTP_REQUEST_LENGTH
timeout: MAX_HTTP_REQUEST_LENGTH,
},
function (error, res, body) {
updateMetric('setDoc', error, res)
@ -196,5 +196,5 @@ module.exports = PersistenceManager = {
}
}
)
}
},
}

View file

@ -9,7 +9,7 @@
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let Profiler
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const logger = require('logger-sharelatex')
const deltaMs = function (ta, tb) {

View file

@ -13,7 +13,7 @@
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const request = require('request')
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const RedisManager = require('./RedisManager')
const { rclient } = RedisManager
const docUpdaterKeys = Settings.redis.documentupdater.key_schema
@ -45,27 +45,31 @@ var ProjectFlusher = {
var doIteration = (
cb // avoid hitting redis too hard
) =>
node.scan(cursor, 'MATCH', pattern, 'COUNT', batchSize, function (
error,
reply
) {
let keys
if (error != null) {
return callback(error)
node.scan(
cursor,
'MATCH',
pattern,
'COUNT',
batchSize,
function (error, reply) {
let keys
if (error != null) {
return callback(error)
}
;[cursor, keys] = Array.from(reply)
for (const key of Array.from(keys)) {
keySet[key] = true
}
keys = Object.keys(keySet)
const noResults = cursor === '0' // redis returns string results not numeric
const limitReached = limit != null && keys.length >= limit
if (noResults || limitReached) {
return callback(null, keys)
} else {
return setTimeout(doIteration, 10)
}
}
;[cursor, keys] = Array.from(reply)
for (const key of Array.from(keys)) {
keySet[key] = true
}
keys = Object.keys(keySet)
const noResults = cursor === '0' // redis returns string results not numeric
const limitReached = limit != null && keys.length >= limit
if (noResults || limitReached) {
return callback(null, keys)
} else {
return setTimeout(doIteration, 10)
}
})
)
return doIteration()
},
@ -97,12 +101,14 @@ var ProjectFlusher = {
if (options.dryRun) {
return callback(null, project_ids)
}
const jobs = _.map(project_ids, (project_id) => (cb) =>
ProjectManager.flushAndDeleteProjectWithLocks(
project_id,
{ background: true },
cb
)
const jobs = _.map(
project_ids,
project_id => cb =>
ProjectManager.flushAndDeleteProjectWithLocks(
project_id,
{ background: true },
cb
)
)
return async.parallelLimit(
async.reflectAll(jobs),
@ -123,7 +129,7 @@ var ProjectFlusher = {
)
}
)
}
},
}
module.exports = ProjectFlusher

View file

@ -14,10 +14,10 @@
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let ProjectHistoryRedisManager
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const projectHistoryKeys = __guard__(
Settings.redis != null ? Settings.redis.project_history : undefined,
(x) => x.key_schema
x => x.key_schema
)
const rclient = require('@overleaf/redis-wrapper').createClient(
Settings.redis.project_history
@ -70,10 +70,10 @@ module.exports = ProjectHistoryRedisManager = {
new_pathname: projectUpdate.newPathname,
meta: {
user_id,
ts: new Date()
ts: new Date(),
},
version: projectUpdate.version,
projectHistoryId
projectHistoryId,
}
projectUpdate[entity_type] = entity_id
@ -104,10 +104,10 @@ module.exports = ProjectHistoryRedisManager = {
url: projectUpdate.url,
meta: {
user_id,
ts: new Date()
ts: new Date(),
},
version: projectUpdate.version,
projectHistoryId
projectHistoryId,
}
projectUpdate[entity_type] = entitiy_id
@ -132,8 +132,8 @@ module.exports = ProjectHistoryRedisManager = {
resyncProjectStructure: { docs, files },
projectHistoryId,
meta: {
ts: new Date()
}
ts: new Date(),
},
}
const jsonUpdate = JSON.stringify(projectUpdate)
return ProjectHistoryRedisManager.queueOps(project_id, jsonUpdate, callback)
@ -155,18 +155,18 @@ module.exports = ProjectHistoryRedisManager = {
const projectUpdate = {
resyncDocContent: {
content: lines.join('\n'),
version
version,
},
projectHistoryId,
path: pathname,
doc: doc_id,
meta: {
ts: new Date()
}
ts: new Date(),
},
}
const jsonUpdate = JSON.stringify(projectUpdate)
return ProjectHistoryRedisManager.queueOps(project_id, jsonUpdate, callback)
}
},
}
function __guard__(value, transform) {

View file

@ -14,7 +14,7 @@ module.exports = {
getProjectDocsTimestamps,
getProjectDocsAndFlushIfOld,
clearProjectState,
updateProjectWithLocks
updateProjectWithLocks,
}
function flushProjectWithLocks(projectId, _callback) {
@ -29,8 +29,8 @@ function flushProjectWithLocks(projectId, _callback) {
return callback(error)
}
const errors = []
const jobs = docIds.map((docId) => (callback) => {
DocumentManager.flushDocIfLoadedWithLock(projectId, docId, (error) => {
const jobs = docIds.map(docId => callback => {
DocumentManager.flushDocIfLoadedWithLock(projectId, docId, error => {
if (error instanceof Errors.NotFoundError) {
logger.warn(
{ err: error, projectId, docId },
@ -72,19 +72,14 @@ function flushAndDeleteProjectWithLocks(projectId, options, _callback) {
return callback(error)
}
const errors = []
const jobs = docIds.map((docId) => (callback) => {
DocumentManager.flushAndDeleteDocWithLock(
projectId,
docId,
{},
(error) => {
if (error) {
logger.error({ err: error, projectId, docId }, 'error deleting doc')
errors.push(error)
}
callback()
const jobs = docIds.map(docId => callback => {
DocumentManager.flushAndDeleteDocWithLock(projectId, docId, {}, error => {
if (error) {
logger.error({ err: error, projectId, docId }, 'error deleting doc')
errors.push(error)
}
)
callback()
})
})
logger.log({ projectId, docIds }, 'deleting docs')
@ -93,7 +88,7 @@ function flushAndDeleteProjectWithLocks(projectId, options, _callback) {
// history is completely flushed because the project may be
// deleted in web after this call completes, and so further
// attempts to flush would fail after that.
HistoryManager.flushProjectChanges(projectId, options, (error) => {
HistoryManager.flushProjectChanges(projectId, options, error => {
if (errors.length > 0) {
callback(new Error('Errors deleting docs. See log for details'))
} else if (error) {
@ -107,7 +102,7 @@ function flushAndDeleteProjectWithLocks(projectId, options, _callback) {
}
function queueFlushAndDeleteProject(projectId, callback) {
RedisManager.queueFlushAndDeleteProject(projectId, (error) => {
RedisManager.queueFlushAndDeleteProject(projectId, error => {
if (error) {
logger.error(
{ projectId, error },
@ -176,7 +171,7 @@ function getProjectDocsAndFlushIfOld(
return callback(error)
}
// get the doc lines from redis
const jobs = docIds.map((docId) => (cb) => {
const jobs = docIds.map(docId => cb => {
DocumentManager.getDocAndFlushIfOldWithLock(
projectId,
docId,
@ -288,7 +283,7 @@ function updateProjectWithLocks(
}
}
async.eachSeries(updates, handleUpdate, (error) => {
async.eachSeries(updates, handleUpdate, error => {
if (error) {
return callback(error)
}

View file

@ -42,7 +42,7 @@ module.exports = RangesManager = {
for (const op of Array.from(update.op)) {
try {
rangesTracker.applyOp(op, {
user_id: update.meta != null ? update.meta.user_id : undefined
user_id: update.meta != null ? update.meta.user_id : undefined,
})
} catch (error1) {
error = error1
@ -86,7 +86,7 @@ module.exports = RangesManager = {
response.changes != null ? response.changes.length : undefined,
commentsCount:
response.comments != null ? response.comments.length : undefined,
rangesWereCollapsed
rangesWereCollapsed,
},
'applied updates to ranges'
)
@ -159,5 +159,5 @@ module.exports = RangesManager = {
}
}
return count
}
},
}

View file

@ -120,7 +120,7 @@ const load = function () {
if (comment == null) {
return
}
this.comments = this.comments.filter((c) => c.id !== comment_id)
this.comments = this.comments.filter(c => c.id !== comment_id)
return this._markAsDirty(comment, 'comment', 'removed')
}
@ -257,7 +257,7 @@ const load = function () {
if (metadata == null) {
metadata = {}
}
return Array.from(ops).map((op) => this.applyOp(op, metadata))
return Array.from(ops).map(op => this.applyOp(op, metadata))
}
addComment(op, metadata) {
@ -274,9 +274,9 @@ const load = function () {
// Copy because we'll modify in place
c: op.c,
p: op.p,
t: op.t
t: op.t,
},
metadata
metadata,
})
)
this._markAsDirty(comment, 'comment', 'added')
@ -488,9 +488,9 @@ const load = function () {
const after_change = {
op: {
i: after_content,
p: change_start + offset + op_length
p: change_start + offset + op_length,
},
metadata: {}
metadata: {},
}
for (const key in change.metadata) {
const value = change.metadata[key]
@ -606,7 +606,7 @@ const load = function () {
delete_removed_start,
delete_removed_start + delete_removed_length
),
p: delete_removed_start
p: delete_removed_start,
}
if (modification.d.length > 0) {
op_modifications.push(modification)
@ -643,7 +643,7 @@ const load = function () {
// Copy rather than modify because we still need to apply it to comments
op = {
p: op.p,
d: this._applyOpModifications(op.d, op_modifications)
d: this._applyOpModifications(op.d, op_modifications),
}
for (change of Array.from(remove_changes)) {
@ -678,7 +678,7 @@ const load = function () {
moved_changes = moved_changes.concat(results.moved_changes)
for (change of Array.from(results.remove_changes)) {
this._removeChange(change)
moved_changes = moved_changes.filter((c) => c !== change)
moved_changes = moved_changes.filter(c => c !== change)
}
}
@ -695,7 +695,7 @@ const load = function () {
const change = {
id: this.newId(),
op: this._clone(op), // Don't take a reference to the existing op since we'll modify this in place with future changes
metadata: this._clone(metadata)
metadata: this._clone(metadata),
}
this.changes.push(change)
@ -717,7 +717,7 @@ const load = function () {
}
_removeChange(change) {
this.changes = this.changes.filter((c) => c.id !== change.id)
this.changes = this.changes.filter(c => c.id !== change.id)
return this._markAsDirty(change, 'change', 'removed')
}
@ -813,13 +813,13 @@ const load = function () {
comment: {
moved: {},
removed: {},
added: {}
added: {},
},
change: {
moved: {},
removed: {},
added: {}
}
added: {},
},
})
}

View file

@ -10,7 +10,7 @@
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let RateLimiter
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const logger = require('logger-sharelatex')
const Metrics = require('./Metrics')
@ -47,7 +47,7 @@ module.exports = RateLimiter = class RateLimiter {
}
this.ActiveWorkerCount++
Metrics.gauge('processingUpdates', this.ActiveWorkerCount)
return task((err) => {
return task(err => {
this.ActiveWorkerCount--
Metrics.gauge('processingUpdates', this.ActiveWorkerCount)
return callback(err)
@ -65,11 +65,11 @@ module.exports = RateLimiter = class RateLimiter {
logger.log(
{
active: this.ActiveWorkerCount,
currentLimit: Math.ceil(this.CurrentWorkerLimit)
currentLimit: Math.ceil(this.CurrentWorkerLimit),
},
'hit rate limit'
)
return this._trackAndRun(task, (err) => {
return this._trackAndRun(task, err => {
if (err == null) {
this._adjustLimitUp()
} // don't increment rate limit if there was an error

View file

@ -12,7 +12,7 @@
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let RealTimeRedisManager
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const rclient = require('@overleaf/redis-wrapper').createClient(
Settings.redis.documentupdater
)
@ -45,7 +45,7 @@ module.exports = RealTimeRedisManager = {
for (jsonUpdate of Array.from(jsonUpdates)) {
// record metric for each update removed from queue
metrics.summary('redis.pendingUpdates', jsonUpdate.length, {
status: 'pop'
status: 'pop',
})
}
const updates = []
@ -83,5 +83,5 @@ module.exports = RealTimeRedisManager = {
} else {
return pubsubClient.publish('applied-ops', blob)
}
}
},
}

View file

@ -14,7 +14,7 @@
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let RedisManager
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const rclient = require('@overleaf/redis-wrapper').createClient(
Settings.redis.documentupdater
)
@ -92,7 +92,7 @@ module.exports = RedisManager = {
return callback(error)
}
// update docsInProject set before writing doc contents
rclient.sadd(keys.docsInProject({ project_id }), doc_id, (error) => {
rclient.sadd(keys.docsInProject({ project_id }), doc_id, error => {
if (error) return callback(error)
rclient.mset(
@ -103,7 +103,7 @@ module.exports = RedisManager = {
[keys.docHash({ doc_id })]: docHash,
[keys.ranges({ doc_id })]: ranges,
[keys.pathname({ doc_id })]: pathname,
[keys.projectHistoryId({ doc_id })]: projectHistoryId
[keys.projectHistoryId({ doc_id })]: projectHistoryId,
},
callback
)
@ -203,7 +203,7 @@ module.exports = RedisManager = {
keys.projectHistoryId({ doc_id }),
keys.unflushedTime({ doc_id }),
keys.lastUpdatedAt({ doc_id }),
keys.lastUpdatedBy({ doc_id })
keys.lastUpdatedBy({ doc_id }),
]
rclient.mget(...collectKeys, (error, ...rest) => {
let [
@ -216,7 +216,7 @@ module.exports = RedisManager = {
projectHistoryId,
unflushedTime,
lastUpdatedAt,
lastUpdatedBy
lastUpdatedBy,
] = Array.from(rest[0])
const timeSpan = timer.done()
if (error != null) {
@ -244,7 +244,7 @@ module.exports = RedisManager = {
doc_project_id,
computedHash,
storedHash,
docLines
docLines,
},
'hash mismatch on retrieved document'
)
@ -325,62 +325,64 @@ module.exports = RedisManager = {
if (error != null) {
return callback(error)
}
return rclient.get(keys.docVersion({ doc_id }), function (
error,
version
) {
if (error != null) {
return callback(error)
}
version = parseInt(version, 10)
const first_version_in_redis = version - length
if (start < first_version_in_redis || end > version) {
error = new Errors.OpRangeNotAvailableError(
'doc ops range is not loaded in redis'
)
logger.warn(
{ err: error, doc_id, length, version, start, end },
'doc ops range is not loaded in redis'
)
return callback(error)
}
start = start - first_version_in_redis
if (end > -1) {
end = end - first_version_in_redis
}
if (isNaN(start) || isNaN(end)) {
error = new Error('inconsistent version or lengths')
logger.error(
{ err: error, doc_id, length, version, start, end },
'inconsistent version or length'
)
return callback(error)
}
return rclient.lrange(keys.docOps({ doc_id }), start, end, function (
error,
jsonOps
) {
let ops
return rclient.get(
keys.docVersion({ doc_id }),
function (error, version) {
if (error != null) {
return callback(error)
}
try {
ops = jsonOps.map((jsonOp) => JSON.parse(jsonOp))
} catch (e) {
return callback(e)
}
const timeSpan = timer.done()
if (timeSpan > MAX_REDIS_REQUEST_LENGTH) {
error = new Error('redis getPreviousDocOps exceeded timeout')
version = parseInt(version, 10)
const first_version_in_redis = version - length
if (start < first_version_in_redis || end > version) {
error = new Errors.OpRangeNotAvailableError(
'doc ops range is not loaded in redis'
)
logger.warn(
{ err: error, doc_id, length, version, start, end },
'doc ops range is not loaded in redis'
)
return callback(error)
}
return callback(null, ops)
})
})
start = start - first_version_in_redis
if (end > -1) {
end = end - first_version_in_redis
}
if (isNaN(start) || isNaN(end)) {
error = new Error('inconsistent version or lengths')
logger.error(
{ err: error, doc_id, length, version, start, end },
'inconsistent version or length'
)
return callback(error)
}
return rclient.lrange(
keys.docOps({ doc_id }),
start,
end,
function (error, jsonOps) {
let ops
if (error != null) {
return callback(error)
}
try {
ops = jsonOps.map(jsonOp => JSON.parse(jsonOp))
} catch (e) {
return callback(e)
}
const timeSpan = timer.done()
if (timeSpan > MAX_REDIS_REQUEST_LENGTH) {
error = new Error('redis getPreviousDocOps exceeded timeout')
return callback(error)
}
return callback(null, ops)
}
)
}
)
})
},
@ -388,15 +390,15 @@ module.exports = RedisManager = {
if (callback == null) {
callback = function (error, projectHistoryType) {}
}
return rclient.get(keys.projectHistoryType({ doc_id }), function (
error,
projectHistoryType
) {
if (error != null) {
return callback(error)
return rclient.get(
keys.projectHistoryType({ doc_id }),
function (error, projectHistoryType) {
if (error != null) {
return callback(error)
}
return callback(null, projectHistoryType)
}
return callback(null, projectHistoryType)
})
)
},
setHistoryType(doc_id, projectHistoryType, callback) {
@ -428,192 +430,198 @@ module.exports = RedisManager = {
if (callback == null) {
callback = function (error) {}
}
return RedisManager.getDocVersion(doc_id, function (
error,
currentVersion,
projectHistoryType
) {
if (error != null) {
return callback(error)
}
if (currentVersion + appliedOps.length !== newVersion) {
error = new Error(`Version mismatch. '${doc_id}' is corrupted.`)
logger.error(
{
err: error,
doc_id,
currentVersion,
newVersion,
opsLength: appliedOps.length
},
'version mismatch'
)
return callback(error)
}
const jsonOps = appliedOps.map((op) => JSON.stringify(op))
for (const op of Array.from(jsonOps)) {
if (op.indexOf('\u0000') !== -1) {
error = new Error('null bytes found in jsonOps')
// this check was added to catch memory corruption in JSON.stringify
logger.error({ err: error, doc_id, jsonOps }, error.message)
return callback(error)
}
}
const newDocLines = JSON.stringify(docLines)
if (newDocLines.indexOf('\u0000') !== -1) {
error = new Error('null bytes found in doc lines')
// this check was added to catch memory corruption in JSON.stringify
logger.error({ err: error, doc_id, newDocLines }, error.message)
return callback(error)
}
// Do a cheap size check on the serialized blob.
if (newDocLines.length > Settings.max_doc_length) {
const err = new Error('blocking doc update: doc is too large')
const docSize = newDocLines.length
logger.error({ project_id, doc_id, err, docSize }, err.message)
return callback(err)
}
const newHash = RedisManager._computeHash(newDocLines)
const opVersions = appliedOps.map((op) => (op != null ? op.v : undefined))
logger.log(
{ doc_id, version: newVersion, hash: newHash, op_versions: opVersions },
'updating doc in redis'
)
// record bytes sent to redis in update
metrics.summary('redis.docLines', newDocLines.length, {
status: 'update'
})
return RedisManager._serializeRanges(ranges, function (error, ranges) {
return RedisManager.getDocVersion(
doc_id,
function (error, currentVersion, projectHistoryType) {
if (error != null) {
logger.error({ err: error, doc_id }, error.message)
return callback(error)
}
if (ranges != null && ranges.indexOf('\u0000') !== -1) {
error = new Error('null bytes found in ranges')
// this check was added to catch memory corruption in JSON.stringify
logger.error({ err: error, doc_id, ranges }, error.message)
if (currentVersion + appliedOps.length !== newVersion) {
error = new Error(`Version mismatch. '${doc_id}' is corrupted.`)
logger.error(
{
err: error,
doc_id,
currentVersion,
newVersion,
opsLength: appliedOps.length,
},
'version mismatch'
)
return callback(error)
}
const multi = rclient.multi()
multi.mset({
[keys.docLines({ doc_id })]: newDocLines,
[keys.docVersion({ doc_id })]: newVersion,
[keys.docHash({ doc_id })]: newHash,
[keys.ranges({ doc_id })]: ranges,
[keys.lastUpdatedAt({ doc_id })]: Date.now(),
[keys.lastUpdatedBy({ doc_id })]: updateMeta && updateMeta.user_id
})
multi.ltrim(
keys.docOps({ doc_id }),
-RedisManager.DOC_OPS_MAX_LENGTH,
-1
) // index 3
// push the ops last so we can get the lengths at fixed index position 7
if (jsonOps.length > 0) {
multi.rpush(keys.docOps({ doc_id }), ...Array.from(jsonOps)) // index 5
// expire must come after rpush since before it will be a no-op if the list is empty
multi.expire(keys.docOps({ doc_id }), RedisManager.DOC_OPS_TTL) // index 6
if (projectHistoryType === 'project-history') {
metrics.inc('history-queue', 1, { status: 'skip-track-changes' })
logger.log(
{ doc_id },
'skipping push of uncompressed ops for project using project-history'
)
} else {
// project is using old track-changes history service
metrics.inc('history-queue', 1, { status: 'track-changes' })
multi.rpush(
historyKeys.uncompressedHistoryOps({ doc_id }),
...Array.from(jsonOps)
) // index 7
}
// Set the unflushed timestamp to the current time if the doc
// hasn't been modified before (the content in mongo has been
// valid up to this point). Otherwise leave it alone ("NX" flag).
multi.set(keys.unflushedTime({ doc_id }), Date.now(), 'NX')
}
return multi.exec(function (error, result) {
let docUpdateCount
if (error != null) {
const jsonOps = appliedOps.map(op => JSON.stringify(op))
for (const op of Array.from(jsonOps)) {
if (op.indexOf('\u0000') !== -1) {
error = new Error('null bytes found in jsonOps')
// this check was added to catch memory corruption in JSON.stringify
logger.error({ err: error, doc_id, jsonOps }, error.message)
return callback(error)
}
}
if (projectHistoryType === 'project-history') {
docUpdateCount = undefined // only using project history, don't bother with track-changes
} else {
// project is using old track-changes history service
docUpdateCount = result[4]
}
const newDocLines = JSON.stringify(docLines)
if (newDocLines.indexOf('\u0000') !== -1) {
error = new Error('null bytes found in doc lines')
// this check was added to catch memory corruption in JSON.stringify
logger.error({ err: error, doc_id, newDocLines }, error.message)
return callback(error)
}
// Do a cheap size check on the serialized blob.
if (newDocLines.length > Settings.max_doc_length) {
const err = new Error('blocking doc update: doc is too large')
const docSize = newDocLines.length
logger.error({ project_id, doc_id, err, docSize }, err.message)
return callback(err)
}
const newHash = RedisManager._computeHash(newDocLines)
if (
jsonOps.length > 0 &&
__guard__(
Settings.apis != null ? Settings.apis.project_history : undefined,
(x) => x.enabled
)
) {
metrics.inc('history-queue', 1, { status: 'project-history' })
return ProjectHistoryRedisManager.queueOps(
project_id,
...Array.from(jsonOps),
(error, projectUpdateCount) =>
callback(null, docUpdateCount, projectUpdateCount)
)
} else {
return callback(null, docUpdateCount)
}
const opVersions = appliedOps.map(op => (op != null ? op.v : undefined))
logger.log(
{
doc_id,
version: newVersion,
hash: newHash,
op_versions: opVersions,
},
'updating doc in redis'
)
// record bytes sent to redis in update
metrics.summary('redis.docLines', newDocLines.length, {
status: 'update',
})
})
})
return RedisManager._serializeRanges(ranges, function (error, ranges) {
if (error != null) {
logger.error({ err: error, doc_id }, error.message)
return callback(error)
}
if (ranges != null && ranges.indexOf('\u0000') !== -1) {
error = new Error('null bytes found in ranges')
// this check was added to catch memory corruption in JSON.stringify
logger.error({ err: error, doc_id, ranges }, error.message)
return callback(error)
}
const multi = rclient.multi()
multi.mset({
[keys.docLines({ doc_id })]: newDocLines,
[keys.docVersion({ doc_id })]: newVersion,
[keys.docHash({ doc_id })]: newHash,
[keys.ranges({ doc_id })]: ranges,
[keys.lastUpdatedAt({ doc_id })]: Date.now(),
[keys.lastUpdatedBy({ doc_id })]: updateMeta && updateMeta.user_id,
})
multi.ltrim(
keys.docOps({ doc_id }),
-RedisManager.DOC_OPS_MAX_LENGTH,
-1
) // index 3
// push the ops last so we can get the lengths at fixed index position 7
if (jsonOps.length > 0) {
multi.rpush(keys.docOps({ doc_id }), ...Array.from(jsonOps)) // index 5
// expire must come after rpush since before it will be a no-op if the list is empty
multi.expire(keys.docOps({ doc_id }), RedisManager.DOC_OPS_TTL) // index 6
if (projectHistoryType === 'project-history') {
metrics.inc('history-queue', 1, { status: 'skip-track-changes' })
logger.log(
{ doc_id },
'skipping push of uncompressed ops for project using project-history'
)
} else {
// project is using old track-changes history service
metrics.inc('history-queue', 1, { status: 'track-changes' })
multi.rpush(
historyKeys.uncompressedHistoryOps({ doc_id }),
...Array.from(jsonOps)
) // index 7
}
// Set the unflushed timestamp to the current time if the doc
// hasn't been modified before (the content in mongo has been
// valid up to this point). Otherwise leave it alone ("NX" flag).
multi.set(keys.unflushedTime({ doc_id }), Date.now(), 'NX')
}
return multi.exec(function (error, result) {
let docUpdateCount
if (error != null) {
return callback(error)
}
if (projectHistoryType === 'project-history') {
docUpdateCount = undefined // only using project history, don't bother with track-changes
} else {
// project is using old track-changes history service
docUpdateCount = result[4]
}
if (
jsonOps.length > 0 &&
__guard__(
Settings.apis != null
? Settings.apis.project_history
: undefined,
x => x.enabled
)
) {
metrics.inc('history-queue', 1, { status: 'project-history' })
return ProjectHistoryRedisManager.queueOps(
project_id,
...Array.from(jsonOps),
(error, projectUpdateCount) =>
callback(null, docUpdateCount, projectUpdateCount)
)
} else {
return callback(null, docUpdateCount)
}
})
})
}
)
},
renameDoc(project_id, doc_id, user_id, update, projectHistoryId, callback) {
if (callback == null) {
callback = function (error) {}
}
return RedisManager.getDoc(project_id, doc_id, function (
error,
lines,
version
) {
if (error != null) {
return callback(error)
}
return RedisManager.getDoc(
project_id,
doc_id,
function (error, lines, version) {
if (error != null) {
return callback(error)
}
if (lines != null && version != null) {
return rclient.set(
keys.pathname({ doc_id }),
update.newPathname,
function (error) {
if (error != null) {
return callback(error)
if (lines != null && version != null) {
return rclient.set(
keys.pathname({ doc_id }),
update.newPathname,
function (error) {
if (error != null) {
return callback(error)
}
return ProjectHistoryRedisManager.queueRenameEntity(
project_id,
projectHistoryId,
'doc',
doc_id,
user_id,
update,
callback
)
}
return ProjectHistoryRedisManager.queueRenameEntity(
project_id,
projectHistoryId,
'doc',
doc_id,
user_id,
update,
callback
)
}
)
} else {
return ProjectHistoryRedisManager.queueRenameEntity(
project_id,
projectHistoryId,
'doc',
doc_id,
user_id,
update,
callback
)
)
} else {
return ProjectHistoryRedisManager.queueRenameEntity(
project_id,
projectHistoryId,
'doc',
doc_id,
user_id,
update,
callback
)
}
}
})
)
},
clearUnflushedTime(doc_id, callback) {
@ -726,7 +734,7 @@ module.exports = RedisManager = {
// note: must specify 'utf8' encoding explicitly, as the default is
// binary in node < v5
return crypto.createHash('sha1').update(docLines, 'utf8').digest('hex')
}
},
}
function __guard__(value, transform) {

View file

@ -70,7 +70,7 @@ module.exports = ShareJsDB = class ShareJsDB {
return callback(null, {
snapshot: this.lines.join('\n'),
v: parseInt(this.version, 10),
type: 'text'
type: 'text',
})
}
}

View file

@ -16,7 +16,7 @@ let ShareJsUpdateManager
const ShareJsModel = require('./sharejs/server/model')
const ShareJsDB = require('./ShareJsDB')
const logger = require('logger-sharelatex')
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const Keys = require('./UpdateKeys')
const { EventEmitter } = require('events')
const util = require('util')
@ -35,7 +35,7 @@ module.exports = ShareJsUpdateManager = {
const db = new ShareJsDB(project_id, doc_id, lines, version)
const model = new ShareJsModel(db, {
maxDocLength: Settings.max_doc_length,
maximumAge: MAX_AGE_OF_OP
maximumAge: MAX_AGE_OF_OP,
})
model.db = db
return model
@ -141,5 +141,5 @@ module.exports = ShareJsUpdateManager = {
.update('blob ' + content.length + '\x00')
.update(content, 'utf8')
.digest('hex')
}
},
}

View file

@ -38,7 +38,7 @@ module.exports = SnapshotManager = {
lines,
pathname,
ranges: SnapshotManager.jsonRangesToMongo(ranges),
ts: new Date()
ts: new Date(),
},
callback
)
@ -83,5 +83,5 @@ module.exports = SnapshotManager = {
} catch (error) {
return data
}
}
},
}

View file

@ -9,5 +9,5 @@ module.exports = {
},
splitProjectIdAndDocId(project_and_doc_id) {
return project_and_doc_id.split(':')
}
},
}

View file

@ -20,7 +20,7 @@ const RedisManager = require('./RedisManager')
const RealTimeRedisManager = require('./RealTimeRedisManager')
const ShareJsUpdateManager = require('./ShareJsUpdateManager')
const HistoryManager = require('./HistoryManager')
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const _ = require('lodash')
const async = require('async')
const logger = require('logger-sharelatex')
@ -37,15 +37,17 @@ module.exports = UpdateManager = {
callback = function (error) {}
}
const timer = new Metrics.Timer('updateManager.processOutstandingUpdates')
return UpdateManager.fetchAndApplyUpdates(project_id, doc_id, function (
error
) {
timer.done()
if (error != null) {
return callback(error)
return UpdateManager.fetchAndApplyUpdates(
project_id,
doc_id,
function (error) {
timer.done()
if (error != null) {
return callback(error)
}
return callback()
}
return callback()
})
)
},
processOutstandingUpdatesWithLock(project_id, doc_id, callback) {
@ -54,7 +56,7 @@ module.exports = UpdateManager = {
}
const profile = new Profiler('processOutstandingUpdatesWithLock', {
project_id,
doc_id
doc_id,
})
return LockManager.tryLock(doc_id, (error, gotLock, lockValue) => {
if (error != null) {
@ -77,7 +79,7 @@ module.exports = UpdateManager = {
)
}
profile.log('processOutstandingUpdates')
return LockManager.releaseLock(doc_id, lockValue, (error) => {
return LockManager.releaseLock(doc_id, lockValue, error => {
if (error != null) {
return callback(error)
}
@ -155,7 +157,7 @@ module.exports = UpdateManager = {
RealTimeRedisManager.sendData({
project_id,
doc_id,
error: error.message || error
error: error.message || error,
})
profile.log('sendData')
}
@ -166,128 +168,125 @@ module.exports = UpdateManager = {
var profile = new Profiler('applyUpdate', { project_id, doc_id })
UpdateManager._sanitizeUpdate(update)
profile.log('sanitizeUpdate')
return DocumentManager.getDoc(project_id, doc_id, function (
error,
lines,
version,
ranges,
pathname,
projectHistoryId
) {
profile.log('getDoc')
if (error != null) {
return callback(error)
}
if (lines == null || version == null) {
return callback(
new Errors.NotFoundError(`document not found: ${doc_id}`)
)
}
const previousVersion = version
return ShareJsUpdateManager.applyUpdate(
project_id,
doc_id,
update,
lines,
version,
function (error, updatedDocLines, version, appliedOps) {
profile.log('sharejs.applyUpdate')
if (error != null) {
return callback(error)
}
return RangesManager.applyUpdate(
project_id,
doc_id,
ranges,
appliedOps,
updatedDocLines,
function (error, new_ranges, ranges_were_collapsed) {
UpdateManager._addProjectHistoryMetadataToOps(
appliedOps,
pathname,
projectHistoryId,
lines
)
profile.log('RangesManager.applyUpdate')
if (error != null) {
return callback(error)
}
return RedisManager.updateDocument(
project_id,
doc_id,
updatedDocLines,
version,
appliedOps,
new_ranges,
update.meta,
function (error, doc_ops_length, project_ops_length) {
profile.log('RedisManager.updateDocument')
if (error != null) {
return callback(error)
}
return HistoryManager.recordAndFlushHistoryOps(
project_id,
doc_id,
appliedOps,
doc_ops_length,
project_ops_length,
function (error) {
profile.log('recordAndFlushHistoryOps')
if (error != null) {
return callback(error)
}
if (ranges_were_collapsed) {
logger.log(
{
return DocumentManager.getDoc(
project_id,
doc_id,
function (error, lines, version, ranges, pathname, projectHistoryId) {
profile.log('getDoc')
if (error != null) {
return callback(error)
}
if (lines == null || version == null) {
return callback(
new Errors.NotFoundError(`document not found: ${doc_id}`)
)
}
const previousVersion = version
return ShareJsUpdateManager.applyUpdate(
project_id,
doc_id,
update,
lines,
version,
function (error, updatedDocLines, version, appliedOps) {
profile.log('sharejs.applyUpdate')
if (error != null) {
return callback(error)
}
return RangesManager.applyUpdate(
project_id,
doc_id,
ranges,
appliedOps,
updatedDocLines,
function (error, new_ranges, ranges_were_collapsed) {
UpdateManager._addProjectHistoryMetadataToOps(
appliedOps,
pathname,
projectHistoryId,
lines
)
profile.log('RangesManager.applyUpdate')
if (error != null) {
return callback(error)
}
return RedisManager.updateDocument(
project_id,
doc_id,
updatedDocLines,
version,
appliedOps,
new_ranges,
update.meta,
function (error, doc_ops_length, project_ops_length) {
profile.log('RedisManager.updateDocument')
if (error != null) {
return callback(error)
}
return HistoryManager.recordAndFlushHistoryOps(
project_id,
doc_id,
appliedOps,
doc_ops_length,
project_ops_length,
function (error) {
profile.log('recordAndFlushHistoryOps')
if (error != null) {
return callback(error)
}
if (ranges_were_collapsed) {
logger.log(
{
project_id,
doc_id,
previousVersion,
lines,
ranges,
update,
},
'update collapsed some ranges, snapshotting previous content'
)
// Do this last, since it's a mongo call, and so potentially longest running
// If it overruns the lock, it's ok, since all of our redis work is done
return SnapshotManager.recordSnapshot(
project_id,
doc_id,
previousVersion,
pathname,
lines,
ranges,
update
},
'update collapsed some ranges, snapshotting previous content'
)
// Do this last, since it's a mongo call, and so potentially longest running
// If it overruns the lock, it's ok, since all of our redis work is done
return SnapshotManager.recordSnapshot(
project_id,
doc_id,
previousVersion,
pathname,
lines,
ranges,
function (error) {
if (error != null) {
logger.error(
{
err: error,
project_id,
doc_id,
version,
lines,
ranges
},
'error recording snapshot'
)
return callback(error)
} else {
return callback()
function (error) {
if (error != null) {
logger.error(
{
err: error,
project_id,
doc_id,
version,
lines,
ranges,
},
'error recording snapshot'
)
return callback(error)
} else {
return callback()
}
}
}
)
} else {
return callback()
)
} else {
return callback()
}
}
}
)
}
)
}
)
}
)
})
)
}
)
}
)
}
)
}
)
},
lockUpdatesAndDo(method, project_id, doc_id, ...rest) {
@ -313,32 +312,38 @@ module.exports = UpdateManager = {
)
}
profile.log('processOutstandingUpdates')
return method(project_id, doc_id, ...Array.from(args), function (
error,
...response_args
) {
if (error != null) {
return UpdateManager._handleErrorInsideLock(
return method(
project_id,
doc_id,
...Array.from(args),
function (error, ...response_args) {
if (error != null) {
return UpdateManager._handleErrorInsideLock(
doc_id,
lockValue,
error,
callback
)
}
profile.log('method')
return LockManager.releaseLock(
doc_id,
lockValue,
error,
callback
function (error) {
if (error != null) {
return callback(error)
}
profile.log('releaseLock').end()
callback(null, ...Array.from(response_args))
// We held the lock for a while so updates might have queued up
return UpdateManager.continueProcessingUpdatesWithLock(
project_id,
doc_id
)
}
)
}
profile.log('method')
return LockManager.releaseLock(doc_id, lockValue, function (error) {
if (error != null) {
return callback(error)
}
profile.log('releaseLock').end()
callback(null, ...Array.from(response_args))
// We held the lock for a while so updates might have queued up
return UpdateManager.continueProcessingUpdatesWithLock(
project_id,
doc_id
)
})
})
)
}
)
})
@ -348,7 +353,7 @@ module.exports = UpdateManager = {
if (callback == null) {
callback = function (error) {}
}
return LockManager.releaseLock(doc_id, lockValue, (lock_error) =>
return LockManager.releaseLock(doc_id, lockValue, lock_error =>
callback(original_error)
)
},
@ -408,5 +413,5 @@ module.exports = UpdateManager = {
return result
})()
})
}
},
}

View file

@ -1,4 +1,4 @@
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const { MongoClient, ObjectId } = require('mongodb')
const clientPromise = MongoClient.connect(
@ -33,5 +33,5 @@ module.exports = {
db,
ObjectId,
healthCheck: require('util').callbackify(healthCheck),
waitForDb
waitForDb,
}

View file

@ -34,4 +34,4 @@ exports.compose = function (op1, op2) {
return [op1[0], op1[1] + op2[1]]
}
exports.generateRandomOp = (doc) => [[doc, 1], doc + 1]
exports.generateRandomOp = doc => [[doc, 1], doc + 1]

View file

@ -31,80 +31,84 @@ exports._bt = bootstrapTransform = function (
}
// Transforms rightOp by leftOp. Returns ['rightOp', clientOp']
type.transformX = type.transformX = transformX = function (leftOp, rightOp) {
checkValidOp(leftOp)
checkValidOp(rightOp)
type.transformX =
type.transformX =
transformX =
function (leftOp, rightOp) {
checkValidOp(leftOp)
checkValidOp(rightOp)
const newRightOp = []
const newRightOp = []
for (let rightComponent of Array.from(rightOp)) {
// Generate newLeftOp by composing leftOp by rightComponent
const newLeftOp = []
for (let rightComponent of Array.from(rightOp)) {
// Generate newLeftOp by composing leftOp by rightComponent
const newLeftOp = []
let k = 0
while (k < leftOp.length) {
var l
const nextC = []
transformComponentX(leftOp[k], rightComponent, newLeftOp, nextC)
k++
let k = 0
while (k < leftOp.length) {
var l
const nextC = []
transformComponentX(leftOp[k], rightComponent, newLeftOp, nextC)
k++
if (nextC.length === 1) {
rightComponent = nextC[0]
} else if (nextC.length === 0) {
for (l of Array.from(leftOp.slice(k))) {
append(newLeftOp, l)
if (nextC.length === 1) {
rightComponent = nextC[0]
} else if (nextC.length === 0) {
for (l of Array.from(leftOp.slice(k))) {
append(newLeftOp, l)
}
rightComponent = null
break
} else {
// Recurse.
const [l_, r_] = Array.from(transformX(leftOp.slice(k), nextC))
for (l of Array.from(l_)) {
append(newLeftOp, l)
}
for (const r of Array.from(r_)) {
append(newRightOp, r)
}
rightComponent = null
break
}
}
rightComponent = null
break
} else {
// Recurse.
const [l_, r_] = Array.from(transformX(leftOp.slice(k), nextC))
for (l of Array.from(l_)) {
append(newLeftOp, l)
if (rightComponent != null) {
append(newRightOp, rightComponent)
}
for (const r of Array.from(r_)) {
append(newRightOp, r)
}
rightComponent = null
break
leftOp = newLeftOp
}
}
if (rightComponent != null) {
append(newRightOp, rightComponent)
return [leftOp, newRightOp]
}
leftOp = newLeftOp
}
return [leftOp, newRightOp]
}
// Transforms op with specified type ('left' or 'right') by otherOp.
return (type.transform = type.transform = function (op, otherOp, type) {
let _
if (type !== 'left' && type !== 'right') {
throw new Error("type must be 'left' or 'right'")
}
return (type.transform = type.transform =
function (op, otherOp, type) {
let _
if (type !== 'left' && type !== 'right') {
throw new Error("type must be 'left' or 'right'")
}
if (otherOp.length === 0) {
return op
}
if (otherOp.length === 0) {
return op
}
// TODO: Benchmark with and without this line. I _think_ it'll make a big difference...?
if (op.length === 1 && otherOp.length === 1) {
return transformComponent([], op[0], otherOp[0], type)
}
// TODO: Benchmark with and without this line. I _think_ it'll make a big difference...?
if (op.length === 1 && otherOp.length === 1) {
return transformComponent([], op[0], otherOp[0], type)
}
if (type === 'left') {
let left
;[left, _] = Array.from(transformX(op, otherOp))
return left
} else {
let right
;[_, right] = Array.from(transformX(otherOp, op))
return right
}
})
if (type === 'left') {
let left
;[left, _] = Array.from(transformX(op, otherOp))
return left
} else {
let right
;[_, right] = Array.from(transformX(otherOp, op))
return right
}
})
}
if (typeof WEB === 'undefined') {

View file

@ -353,5 +353,5 @@ json.api = {
return result
})()
})
}
},
}

View file

@ -59,12 +59,12 @@ json.invertComponent = function (c) {
return c_
}
json.invert = (op) =>
Array.from(op.slice().reverse()).map((c) => json.invertComponent(c))
json.invert = op =>
Array.from(op.slice().reverse()).map(c => json.invertComponent(c))
json.checkValidOp = function (op) {}
const isArray = (o) => Object.prototype.toString.call(o) === '[object Array]'
const isArray = o => Object.prototype.toString.call(o) === '[object Array]'
json.checkList = function (elem) {
if (!isArray(elem)) {
throw new Error('Referenced element not a list')
@ -264,7 +264,7 @@ json.normalize = function (op) {
// hax, copied from test/types/json. Apparently this is still the fastest way to deep clone an object, assuming
// we have browser support for JSON.
// http://jsperf.com/cloning-an-object/12
var clone = (o) => JSON.parse(JSON.stringify(o))
var clone = o => JSON.parse(JSON.stringify(o))
json.commonPath = function (p1, p2) {
p1 = p1.slice()

View file

@ -1,7 +1,6 @@
/* eslint-disable
no-console,
no-return-assign,
standard/no-callback-literal,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
@ -27,7 +26,7 @@ const { EventEmitter } = require('events')
const queue = require('./syncqueue')
const types = require('../types')
const isArray = (o) => Object.prototype.toString.call(o) === '[object Array]'
const isArray = o => Object.prototype.toString.call(o) === '[object Array]'
// This constructor creates a new Model object. There will be one model object
// per server context.
@ -225,7 +224,7 @@ module.exports = Model = function (db, options) {
return callback(error)
}
__guardMethod__(options.stats, 'writeOp', (o) => o.writeOp())
__guardMethod__(options.stats, 'writeOp', o => o.writeOp())
// This is needed when we emit the 'change' event, below.
const oldSnapshot = doc.snapshot
@ -303,7 +302,7 @@ module.exports = Model = function (db, options) {
// Version of the snapshot thats in the database
committedVersion: committedVersion != null ? committedVersion : data.v,
snapshotWriteLock: false,
dbMeta
dbMeta,
}
doc.opQueue = makeOpQueue(docName, doc)
@ -352,9 +351,7 @@ module.exports = Model = function (db, options) {
const load = function (docName, callback) {
if (docs[docName]) {
// The document is already loaded. Return immediately.
__guardMethod__(options.stats, 'cacheHit', (o) =>
o.cacheHit('getSnapshot')
)
__guardMethod__(options.stats, 'cacheHit', o => o.cacheHit('getSnapshot'))
return callback(null, docs[docName])
}
@ -370,7 +367,7 @@ module.exports = Model = function (db, options) {
return callbacks.push(callback)
}
__guardMethod__(options.stats, 'cacheMiss', (o1) =>
__guardMethod__(options.stats, 'cacheMiss', o1 =>
o1.cacheMiss('getSnapshot')
)
@ -447,20 +444,21 @@ module.exports = Model = function (db, options) {
) {
let reapTimer
clearTimeout(doc.reapTimer)
return (doc.reapTimer = reapTimer = setTimeout(
() =>
tryWriteSnapshot(docName, function () {
// If the reaping timeout has been refreshed while we're writing the snapshot, or if we're
// in the middle of applying an operation, don't reap.
if (
docs[docName].reapTimer === reapTimer &&
doc.opQueue.busy === false
) {
return delete docs[docName]
}
}),
options.reapTime
))
return (doc.reapTimer = reapTimer =
setTimeout(
() =>
tryWriteSnapshot(docName, function () {
// If the reaping timeout has been refreshed while we're writing the snapshot, or if we're
// in the middle of applying an operation, don't reap.
if (
docs[docName].reapTimer === reapTimer &&
doc.opQueue.busy === false
) {
return delete docs[docName]
}
}),
options.reapTime
))
}
})
}
@ -490,7 +488,7 @@ module.exports = Model = function (db, options) {
doc.snapshotWriteLock = true
__guardMethod__(options.stats, 'writeSnapshot', (o) => o.writeSnapshot())
__guardMethod__(options.stats, 'writeSnapshot', o => o.writeSnapshot())
const writeSnapshot =
(db != null ? db.writeSnapshot : undefined) ||
@ -501,7 +499,7 @@ module.exports = Model = function (db, options) {
meta: doc.meta,
snapshot: doc.snapshot,
// The database doesn't know about object types.
type: doc.type.name
type: doc.type.name,
}
// Commit snapshot.
@ -551,7 +549,7 @@ module.exports = Model = function (db, options) {
snapshot: type.create(),
type: type.name,
meta: meta || {},
v: 0
v: 0,
}
const done = function (error, dbMeta) {
@ -864,7 +862,7 @@ module.exports = Model = function (db, options) {
// Close the database connection. This is needed so nodejs can shut down cleanly.
this.closeDb = function () {
__guardMethod__(db, 'close', (o) => o.close())
__guardMethod__(db, 'close', o => o.close())
return (db = null)
}
}

View file

@ -1,7 +1,6 @@
/* eslint-disable
no-console,
no-return-assign,
standard/no-callback-literal,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
@ -27,7 +26,7 @@ const { EventEmitter } = require('events')
const queue = require('./syncqueue')
const types = require('../types')
const isArray = (o) => Object.prototype.toString.call(o) === '[object Array]'
const isArray = o => Object.prototype.toString.call(o) === '[object Array]'
// This constructor creates a new Model object. There will be one model object
// per server context.
@ -232,7 +231,7 @@ module.exports = Model = function (db, options) {
return callback(error)
}
__guardMethod__(options.stats, 'writeOp', (o) => o.writeOp())
__guardMethod__(options.stats, 'writeOp', o => o.writeOp())
// This is needed when we emit the 'change' event, below.
const oldSnapshot = doc.snapshot
@ -310,7 +309,7 @@ module.exports = Model = function (db, options) {
// Version of the snapshot thats in the database
committedVersion: committedVersion != null ? committedVersion : data.v,
snapshotWriteLock: false,
dbMeta
dbMeta,
}
doc.opQueue = makeOpQueue(docName, doc)
@ -359,9 +358,7 @@ module.exports = Model = function (db, options) {
const load = function (docName, callback) {
if (docs[docName]) {
// The document is already loaded. Return immediately.
__guardMethod__(options.stats, 'cacheHit', (o) =>
o.cacheHit('getSnapshot')
)
__guardMethod__(options.stats, 'cacheHit', o => o.cacheHit('getSnapshot'))
return callback(null, docs[docName])
}
@ -377,7 +374,7 @@ module.exports = Model = function (db, options) {
return callbacks.push(callback)
}
__guardMethod__(options.stats, 'cacheMiss', (o1) =>
__guardMethod__(options.stats, 'cacheMiss', o1 =>
o1.cacheMiss('getSnapshot')
)
@ -454,20 +451,21 @@ module.exports = Model = function (db, options) {
) {
let reapTimer
clearTimeout(doc.reapTimer)
return (doc.reapTimer = reapTimer = setTimeout(
() =>
tryWriteSnapshot(docName, function () {
// If the reaping timeout has been refreshed while we're writing the snapshot, or if we're
// in the middle of applying an operation, don't reap.
if (
docs[docName].reapTimer === reapTimer &&
doc.opQueue.busy === false
) {
return delete docs[docName]
}
}),
options.reapTime
))
return (doc.reapTimer = reapTimer =
setTimeout(
() =>
tryWriteSnapshot(docName, function () {
// If the reaping timeout has been refreshed while we're writing the snapshot, or if we're
// in the middle of applying an operation, don't reap.
if (
docs[docName].reapTimer === reapTimer &&
doc.opQueue.busy === false
) {
return delete docs[docName]
}
}),
options.reapTime
))
}
})
}
@ -497,7 +495,7 @@ module.exports = Model = function (db, options) {
doc.snapshotWriteLock = true
__guardMethod__(options.stats, 'writeSnapshot', (o) => o.writeSnapshot())
__guardMethod__(options.stats, 'writeSnapshot', o => o.writeSnapshot())
const writeSnapshot =
(db != null ? db.writeSnapshot : undefined) ||
@ -508,7 +506,7 @@ module.exports = Model = function (db, options) {
meta: doc.meta,
snapshot: doc.snapshot,
// The database doesn't know about object types.
type: doc.type.name
type: doc.type.name,
}
// Commit snapshot.
@ -558,7 +556,7 @@ module.exports = Model = function (db, options) {
snapshot: type.create(),
type: type.name,
meta: meta || {},
v: 0
v: 0,
}
const done = function (error, dbMeta) {
@ -871,7 +869,7 @@ module.exports = Model = function (db, options) {
// Close the database connection. This is needed so nodejs can shut down cleanly.
this.closeDb = function () {
__guardMethod__(db, 'close', (o) => o.close())
__guardMethod__(db, 'close', o => o.close())
return (db = null)
}
}

View file

@ -50,5 +50,5 @@ module.exports = {
}
return { position: pos, text: op1.text }
}
},
}

View file

@ -42,11 +42,11 @@ text.api = {
_register() {
return this.on('remoteop', function (op) {
return Array.from(op).map((component) =>
return Array.from(op).map(component =>
component.i !== undefined
? this.emit('insert', component.p, component.i)
: this.emit('delete', component.p, component.d)
)
})
}
},
}

View file

@ -44,7 +44,7 @@ type.api = {
const op = type.normalize([
pos,
{ d: this.snapshot.slice(pos, pos + length) },
this.snapshot.length - pos - length
this.snapshot.length - pos - length,
])
this.submitOp(op, callback)
@ -70,7 +70,7 @@ type.api = {
return result
})()
})
}
},
}
// We don't increment pos, because the position
// specified is after the delete has happened.

View file

@ -74,7 +74,7 @@ const checkOp = function (op) {
// Makes a function for appending components to a given op.
// Exported for the randomOpGenerator.
exports._makeAppend = makeAppend = (op) =>
exports._makeAppend = makeAppend = op =>
function (component) {
if (component === 0 || component.i === '' || component.d === '') {
} else if (op.length === 0) {

View file

@ -49,7 +49,7 @@ type.api = {
// Flatten a document into a string
getText() {
const strings = Array.from(this.snapshot.data).filter(
(elem) => typeof elem === 'string'
elem => typeof elem === 'string'
)
return strings.join('')
},
@ -129,5 +129,5 @@ type.api = {
}
}
})
}
},
}

View file

@ -65,7 +65,7 @@ var type = {
}
return doc
}
},
}
const checkOp = function (op) {
@ -346,7 +346,7 @@ const transformer = function (op, otherOp, goForwards, side) {
// transform - insert skips over inserted parts
if (side === 'left') {
// The left insert should go first.
while (__guard__(peek(), (x) => x.i) !== undefined) {
while (__guard__(peek(), x => x.i) !== undefined) {
append(newOp, take())
}
}

View file

@ -110,7 +110,7 @@ text._append = append = function (newOp, c) {
) {
return (newOp[newOp.length - 1] = {
i: strInject(last.i, c.p - last.p, c.i),
p: last.p
p: last.p,
})
} else if (
last.d != null &&
@ -120,7 +120,7 @@ text._append = append = function (newOp, c) {
) {
return (newOp[newOp.length - 1] = {
d: strInject(c.d, last.p - c.p, last.d),
p: c.p
p: c.p,
})
} else {
return newOp.push(c)
@ -142,7 +142,7 @@ text.compose = function (op1, op2) {
// Attempt to compress the op components together 'as much as possible'.
// This implementation preserves order and preserves create/delete pairs.
text.compress = (op) => text.compose([], op)
text.compress = op => text.compose([], op)
text.normalize = function (op) {
const newOp = []
@ -216,7 +216,7 @@ text._tc = transformComponent = function (dest, c, otherC, side) {
if (c.i != null) {
append(dest, {
i: c.i,
p: transformPosition(c.p, otherC, side === 'right')
p: transformPosition(c.p, otherC, side === 'right'),
})
} else {
// Delete
@ -286,8 +286,8 @@ const invertComponent = function (c) {
// No need to use append for invert, because the components won't be able to
// cancel with one another.
text.invert = (op) =>
Array.from(op.slice().reverse()).map((c) => invertComponent(c))
text.invert = op =>
Array.from(op.slice().reverse()).map(c => invertComponent(c))
if (typeof WEB !== 'undefined' && WEB !== null) {
if (!exports.types) {

View file

@ -34,4 +34,4 @@ exports.compose = function (op1, op2) {
return [op1[0], op1[1] + op2[1]]
}
exports.generateRandomOp = (doc) => [[doc, 1], doc + 1]
exports.generateRandomOp = doc => [[doc, 1], doc + 1]

View file

@ -31,80 +31,84 @@ exports._bt = bootstrapTransform = function (
}
// Transforms rightOp by leftOp. Returns ['rightOp', clientOp']
type.transformX = type.transformX = transformX = function (leftOp, rightOp) {
checkValidOp(leftOp)
checkValidOp(rightOp)
type.transformX =
type.transformX =
transformX =
function (leftOp, rightOp) {
checkValidOp(leftOp)
checkValidOp(rightOp)
const newRightOp = []
const newRightOp = []
for (let rightComponent of Array.from(rightOp)) {
// Generate newLeftOp by composing leftOp by rightComponent
const newLeftOp = []
for (let rightComponent of Array.from(rightOp)) {
// Generate newLeftOp by composing leftOp by rightComponent
const newLeftOp = []
let k = 0
while (k < leftOp.length) {
var l
const nextC = []
transformComponentX(leftOp[k], rightComponent, newLeftOp, nextC)
k++
let k = 0
while (k < leftOp.length) {
var l
const nextC = []
transformComponentX(leftOp[k], rightComponent, newLeftOp, nextC)
k++
if (nextC.length === 1) {
rightComponent = nextC[0]
} else if (nextC.length === 0) {
for (l of Array.from(leftOp.slice(k))) {
append(newLeftOp, l)
if (nextC.length === 1) {
rightComponent = nextC[0]
} else if (nextC.length === 0) {
for (l of Array.from(leftOp.slice(k))) {
append(newLeftOp, l)
}
rightComponent = null
break
} else {
// Recurse.
const [l_, r_] = Array.from(transformX(leftOp.slice(k), nextC))
for (l of Array.from(l_)) {
append(newLeftOp, l)
}
for (const r of Array.from(r_)) {
append(newRightOp, r)
}
rightComponent = null
break
}
}
rightComponent = null
break
} else {
// Recurse.
const [l_, r_] = Array.from(transformX(leftOp.slice(k), nextC))
for (l of Array.from(l_)) {
append(newLeftOp, l)
if (rightComponent != null) {
append(newRightOp, rightComponent)
}
for (const r of Array.from(r_)) {
append(newRightOp, r)
}
rightComponent = null
break
leftOp = newLeftOp
}
}
if (rightComponent != null) {
append(newRightOp, rightComponent)
return [leftOp, newRightOp]
}
leftOp = newLeftOp
}
return [leftOp, newRightOp]
}
// Transforms op with specified type ('left' or 'right') by otherOp.
return (type.transform = type.transform = function (op, otherOp, type) {
let _
if (type !== 'left' && type !== 'right') {
throw new Error("type must be 'left' or 'right'")
}
return (type.transform = type.transform =
function (op, otherOp, type) {
let _
if (type !== 'left' && type !== 'right') {
throw new Error("type must be 'left' or 'right'")
}
if (otherOp.length === 0) {
return op
}
if (otherOp.length === 0) {
return op
}
// TODO: Benchmark with and without this line. I _think_ it'll make a big difference...?
if (op.length === 1 && otherOp.length === 1) {
return transformComponent([], op[0], otherOp[0], type)
}
// TODO: Benchmark with and without this line. I _think_ it'll make a big difference...?
if (op.length === 1 && otherOp.length === 1) {
return transformComponent([], op[0], otherOp[0], type)
}
if (type === 'left') {
let left
;[left, _] = Array.from(transformX(op, otherOp))
return left
} else {
let right
;[_, right] = Array.from(transformX(otherOp, op))
return right
}
})
if (type === 'left') {
let left
;[left, _] = Array.from(transformX(op, otherOp))
return left
} else {
let right
;[_, right] = Array.from(transformX(otherOp, op))
return right
}
})
}
if (typeof WEB === 'undefined') {

View file

@ -353,5 +353,5 @@ json.api = {
return result
})()
})
}
},
}

View file

@ -59,12 +59,12 @@ json.invertComponent = function (c) {
return c_
}
json.invert = (op) =>
Array.from(op.slice().reverse()).map((c) => json.invertComponent(c))
json.invert = op =>
Array.from(op.slice().reverse()).map(c => json.invertComponent(c))
json.checkValidOp = function (op) {}
const isArray = (o) => Object.prototype.toString.call(o) === '[object Array]'
const isArray = o => Object.prototype.toString.call(o) === '[object Array]'
json.checkList = function (elem) {
if (!isArray(elem)) {
throw new Error('Referenced element not a list')
@ -264,7 +264,7 @@ json.normalize = function (op) {
// hax, copied from test/types/json. Apparently this is still the fastest way to deep clone an object, assuming
// we have browser support for JSON.
// http://jsperf.com/cloning-an-object/12
var clone = (o) => JSON.parse(JSON.stringify(o))
var clone = o => JSON.parse(JSON.stringify(o))
json.commonPath = function (p1, p2) {
p1 = p1.slice()

View file

@ -1,7 +1,6 @@
/* eslint-disable
no-console,
no-return-assign,
standard/no-callback-literal,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
@ -27,7 +26,7 @@ const { EventEmitter } = require('events')
const queue = require('./syncqueue')
const types = require('../types')
const isArray = (o) => Object.prototype.toString.call(o) === '[object Array]'
const isArray = o => Object.prototype.toString.call(o) === '[object Array]'
// This constructor creates a new Model object. There will be one model object
// per server context.
@ -225,7 +224,7 @@ module.exports = Model = function (db, options) {
return callback(error)
}
__guardMethod__(options.stats, 'writeOp', (o) => o.writeOp())
__guardMethod__(options.stats, 'writeOp', o => o.writeOp())
// This is needed when we emit the 'change' event, below.
const oldSnapshot = doc.snapshot
@ -303,7 +302,7 @@ module.exports = Model = function (db, options) {
// Version of the snapshot thats in the database
committedVersion: committedVersion != null ? committedVersion : data.v,
snapshotWriteLock: false,
dbMeta
dbMeta,
}
doc.opQueue = makeOpQueue(docName, doc)
@ -352,9 +351,7 @@ module.exports = Model = function (db, options) {
const load = function (docName, callback) {
if (docs[docName]) {
// The document is already loaded. Return immediately.
__guardMethod__(options.stats, 'cacheHit', (o) =>
o.cacheHit('getSnapshot')
)
__guardMethod__(options.stats, 'cacheHit', o => o.cacheHit('getSnapshot'))
return callback(null, docs[docName])
}
@ -370,7 +367,7 @@ module.exports = Model = function (db, options) {
return callbacks.push(callback)
}
__guardMethod__(options.stats, 'cacheMiss', (o1) =>
__guardMethod__(options.stats, 'cacheMiss', o1 =>
o1.cacheMiss('getSnapshot')
)
@ -447,20 +444,21 @@ module.exports = Model = function (db, options) {
) {
let reapTimer
clearTimeout(doc.reapTimer)
return (doc.reapTimer = reapTimer = setTimeout(
() =>
tryWriteSnapshot(docName, function () {
// If the reaping timeout has been refreshed while we're writing the snapshot, or if we're
// in the middle of applying an operation, don't reap.
if (
docs[docName].reapTimer === reapTimer &&
doc.opQueue.busy === false
) {
return delete docs[docName]
}
}),
options.reapTime
))
return (doc.reapTimer = reapTimer =
setTimeout(
() =>
tryWriteSnapshot(docName, function () {
// If the reaping timeout has been refreshed while we're writing the snapshot, or if we're
// in the middle of applying an operation, don't reap.
if (
docs[docName].reapTimer === reapTimer &&
doc.opQueue.busy === false
) {
return delete docs[docName]
}
}),
options.reapTime
))
}
})
}
@ -490,7 +488,7 @@ module.exports = Model = function (db, options) {
doc.snapshotWriteLock = true
__guardMethod__(options.stats, 'writeSnapshot', (o) => o.writeSnapshot())
__guardMethod__(options.stats, 'writeSnapshot', o => o.writeSnapshot())
const writeSnapshot =
(db != null ? db.writeSnapshot : undefined) ||
@ -501,7 +499,7 @@ module.exports = Model = function (db, options) {
meta: doc.meta,
snapshot: doc.snapshot,
// The database doesn't know about object types.
type: doc.type.name
type: doc.type.name,
}
// Commit snapshot.
@ -551,7 +549,7 @@ module.exports = Model = function (db, options) {
snapshot: type.create(),
type: type.name,
meta: meta || {},
v: 0
v: 0,
}
const done = function (error, dbMeta) {
@ -864,7 +862,7 @@ module.exports = Model = function (db, options) {
// Close the database connection. This is needed so nodejs can shut down cleanly.
this.closeDb = function () {
__guardMethod__(db, 'close', (o) => o.close())
__guardMethod__(db, 'close', o => o.close())
return (db = null)
}
}

View file

@ -50,5 +50,5 @@ module.exports = {
}
return { position: pos, text: op1.text }
}
},
}

View file

@ -42,11 +42,11 @@ text.api = {
_register() {
return this.on('remoteop', function (op) {
return Array.from(op).map((component) =>
return Array.from(op).map(component =>
component.i !== undefined
? this.emit('insert', component.p, component.i)
: this.emit('delete', component.p, component.d)
)
})
}
},
}

View file

@ -44,7 +44,7 @@ type.api = {
const op = type.normalize([
pos,
{ d: this.snapshot.slice(pos, pos + length) },
this.snapshot.length - pos - length
this.snapshot.length - pos - length,
])
this.submitOp(op, callback)
@ -70,7 +70,7 @@ type.api = {
return result
})()
})
}
},
}
// We don't increment pos, because the position
// specified is after the delete has happened.

View file

@ -75,7 +75,7 @@ const checkOp = function (op) {
// Makes a function for appending components to a given op.
// Exported for the randomOpGenerator.
moduleExport._makeAppend = makeAppend = (op) =>
moduleExport._makeAppend = makeAppend = op =>
function (component) {
if (component === 0 || component.i === '' || component.d === '') {
} else if (op.length === 0) {

View file

@ -49,7 +49,7 @@ type.api = {
// Flatten a document into a string
getText() {
const strings = Array.from(this.snapshot.data).filter(
(elem) => typeof elem === 'string'
elem => typeof elem === 'string'
)
return strings.join('')
},
@ -129,5 +129,5 @@ type.api = {
}
}
})
}
},
}

View file

@ -65,7 +65,7 @@ var type = {
}
return doc
}
},
}
const checkOp = function (op) {
@ -346,7 +346,7 @@ const transformer = function (op, otherOp, goForwards, side) {
// transform - insert skips over inserted parts
if (side === 'left') {
// The left insert should go first.
while (__guard__(peek(), (x) => x.i) !== undefined) {
while (__guard__(peek(), x => x.i) !== undefined) {
append(newOp, take())
}
}

View file

@ -124,7 +124,7 @@ text._append = append = function (newOp, c) {
) {
return (newOp[newOp.length - 1] = {
i: strInject(last.i, c.p - last.p, c.i),
p: last.p
p: last.p,
})
} else if (
last.d != null &&
@ -134,7 +134,7 @@ text._append = append = function (newOp, c) {
) {
return (newOp[newOp.length - 1] = {
d: strInject(c.d, last.p - c.p, last.d),
p: c.p
p: c.p,
})
} else {
return newOp.push(c)
@ -156,7 +156,7 @@ text.compose = function (op1, op2) {
// Attempt to compress the op components together 'as much as possible'.
// This implementation preserves order and preserves create/delete pairs.
text.compress = (op) => text.compose([], op)
text.compress = op => text.compose([], op)
text.normalize = function (op) {
const newOp = []
@ -235,7 +235,7 @@ text._tc = transformComponent = function (dest, c, otherC, side) {
if (c.i != null) {
append(dest, {
i: c.i,
p: transformPosition(c.p, otherC, side === 'right')
p: transformPosition(c.p, otherC, side === 'right'),
})
} else if (c.d != null) {
// Delete
@ -305,7 +305,7 @@ text._tc = transformComponent = function (dest, c, otherC, side) {
append(dest, {
c: c.c,
p: transformPosition(c.p, otherC, true),
t: c.t
t: c.t,
})
}
} else if (otherC.d != null) {
@ -362,8 +362,8 @@ const invertComponent = function (c) {
// No need to use append for invert, because the components won't be able to
// cancel with one another.
text.invert = (op) =>
Array.from(op.slice().reverse()).map((c) => invertComponent(c))
text.invert = op =>
Array.from(op.slice().reverse()).map(c => invertComponent(c))
if (typeof WEB !== 'undefined' && WEB !== null) {
if (!exports.types) {

View file

@ -3,6 +3,6 @@ document-updater
--docker-repos=gcr.io/overleaf-ops
--env-add=
--env-pass-through=
--node-version=12.21.0
--node-version=12.22.3
--public-repo=True
--script-version=3.8.0
--script-version=3.11.0

View file

@ -2,8 +2,8 @@ module.exports = {
internal: {
documentupdater: {
host: process.env.LISTEN_ADDRESS || 'localhost',
port: 3003
}
port: 3003,
},
},
apis: {
@ -12,15 +12,15 @@ module.exports = {
process.env.WEB_API_HOST || process.env.WEB_HOST || 'localhost'
}:${process.env.WEB_API_PORT || process.env.WEB_PORT || 3000}`,
user: process.env.WEB_API_USER || 'sharelatex',
pass: process.env.WEB_API_PASSWORD || 'password'
pass: process.env.WEB_API_PASSWORD || 'password',
},
trackchanges: {
url: `http://${process.env.TRACK_CHANGES_HOST || 'localhost'}:3015`
url: `http://${process.env.TRACK_CHANGES_HOST || 'localhost'}:3015`,
},
project_history: {
enabled: true,
url: `http://${process.env.PROJECT_HISTORY_HOST || 'localhost'}:3054`
}
url: `http://${process.env.PROJECT_HISTORY_HOST || 'localhost'}:3054`,
},
},
redis: {
@ -32,7 +32,7 @@ module.exports = {
process.env.PUBSUB_REDIS_PASSWORD || process.env.REDIS_PASSWORD || '',
maxRetriesPerRequest: parseInt(
process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20'
)
),
},
history: {
@ -50,8 +50,8 @@ module.exports = {
},
docsWithHistoryOps({ project_id: projectId }) {
return `DocsWithHistoryOps:{${projectId}}`
}
}
},
},
},
project_history: {
@ -74,8 +74,8 @@ module.exports = {
},
projectHistoryFirstOpTimestamp({ project_id: projectId }) {
return `ProjectHistory:FirstOpTimestamp:{${projectId}}`
}
}
},
},
},
lock: {
@ -90,8 +90,8 @@ module.exports = {
key_schema: {
blockingKey({ doc_id: docId }) {
return `Blocking:{${docId}}`
}
}
},
},
},
documentupdater: {
@ -159,9 +159,9 @@ module.exports = {
},
flushAndDeleteQueue() {
return 'DocUpdaterFlushAndDeleteQueue'
}
}
}
},
},
},
},
max_doc_length: 2 * 1024 * 1024, // 2mb
@ -173,15 +173,15 @@ module.exports = {
mongo: {
options: {
useUnifiedTopology:
(process.env.MONGO_USE_UNIFIED_TOPOLOGY || 'true') === 'true'
(process.env.MONGO_USE_UNIFIED_TOPOLOGY || 'true') === 'true',
},
url:
process.env.MONGO_CONNECTION_STRING ||
`mongodb://${process.env.MONGO_HOST || '127.0.0.1'}/sharelatex`
`mongodb://${process.env.MONGO_HOST || '127.0.0.1'}/sharelatex`,
},
sentry: {
dsn: process.env.SENTRY_DSN
dsn: process.env.SENTRY_DSN,
},
publishOnIndividualChannels:
@ -191,5 +191,5 @@ module.exports = {
smoothingOffset: process.env.SMOOTHING_OFFSET || 1000, // milliseconds
disableDoubleFlush: process.env.DISABLE_DOUBLE_FLUSH || false // don't flush track-changes for projects using project-history
disableDoubleFlush: process.env.DISABLE_DOUBLE_FLUSH || false, // don't flush track-changes for projects using project-history
}

View file

@ -6,7 +6,7 @@ version: "2.3"
services:
test_unit:
image: node:12.21.0
image: node:12.22.3
volumes:
- .:/app
working_dir: /app
@ -18,7 +18,7 @@ services:
user: node
test_acceptance:
image: node:12.21.0
image: node:12.22.3
volumes:
- .:/app
working_dir: /app

View file

@ -1,4 +1,4 @@
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const rclient = require('@overleaf/redis-wrapper').createClient(
Settings.redis.documentupdater
)
@ -39,7 +39,7 @@ const getKeys = function (pattern, callback) {
return async.concatSeries(nodes, doKeyLookupForNode, callback)
}
const expireDocOps = (callback) =>
const expireDocOps = callback =>
// eslint-disable-next-line handle-callback-err
getKeys(keys.docOps({ doc_id: '*' }), (error, keys) =>
async.mapSeries(

File diff suppressed because it is too large Load diff

View file

@ -13,49 +13,43 @@
"test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js",
"test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP",
"nodemon": "nodemon --config nodemon.json",
"lint": "node_modules/.bin/eslint --max-warnings 0 .",
"format": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --list-different",
"format:fix": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --write"
"lint": "eslint --max-warnings 0 --format unix .",
"format": "prettier --list-different $PWD/'**/*.js'",
"format:fix": "prettier --write $PWD/'**/*.js'",
"lint:fix": "eslint --fix ."
},
"dependencies": {
"@overleaf/metrics": "^3.5.1",
"@overleaf/o-error": "^3.3.1",
"@overleaf/redis-wrapper": "^2.0.1",
"@overleaf/settings": "^2.1.1",
"async": "^2.5.0",
"body-parser": "^1.19.0",
"bunyan": "~0.22.1",
"bunyan": "^1.8.15",
"diff-match-patch": "https://github.com/overleaf/diff-match-patch/archive/89805f9c671a77a263fc53461acd62aa7498f688.tar.gz",
"express": "4.17.1",
"lodash": "^4.17.21",
"logger-sharelatex": "^2.2.0",
"mongodb": "^3.6.6",
"request": "^2.88.2",
"requestretry": "^4.1.2",
"settings-sharelatex": "^1.3.0"
"requestretry": "^4.1.2"
},
"devDependencies": {
"babel-eslint": "^10.1.0",
"chai": "^3.5.0",
"chai": "^4.2.0",
"chai-as-promised": "^7.1.1",
"cluster-key-slot": "^1.0.5",
"coffee-script": "^1.12.7",
"eslint": "^6.8.0",
"eslint-config-prettier": "^6.10.0",
"eslint-config-standard": "^14.1.0",
"eslint-config-standard-jsx": "^8.1.0",
"eslint-config-standard-react": "^9.2.0",
"eslint-plugin-chai-expect": "^2.1.0",
"eslint-plugin-chai-friendly": "^0.5.0",
"eslint-plugin-import": "^2.20.1",
"eslint-plugin-jsx-a11y": "^6.2.3",
"eslint-plugin-mocha": "^6.3.0",
"eslint-plugin-node": "^11.0.0",
"eslint": "^7.21.0",
"eslint-config-prettier": "^8.1.0",
"eslint-config-standard": "^16.0.2",
"eslint-plugin-chai-expect": "^2.2.0",
"eslint-plugin-chai-friendly": "^0.6.0",
"eslint-plugin-import": "^2.22.1",
"eslint-plugin-mocha": "^8.0.0",
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-prettier": "^3.1.2",
"eslint-plugin-promise": "^4.2.1",
"eslint-plugin-react": "^7.19.0",
"eslint-plugin-standard": "^4.0.1",
"mocha": "^8.3.2",
"prettier": "^2.0.0",
"prettier-eslint-cli": "^5.0.0",
"prettier": "^2.2.1",
"sandboxed-module": "^2.0.4",
"sinon": "^9.0.2",
"timekeeper": "^2.0.0"

View file

@ -14,7 +14,7 @@
const sinon = require('sinon')
const { expect } = require('chai')
const async = require('async')
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const rclient_history = require('@overleaf/redis-wrapper').createClient(
Settings.redis.history
) // note: this is track changes, not project-history
@ -42,10 +42,10 @@ describe('Applying updates to a doc', function () {
op: [
{
i: 'one and a half\n',
p: 4
}
p: 4,
},
],
v: this.version
v: this.version,
}
this.result = ['one', 'one and a half', 'two', 'three']
return DocUpdaterApp.ensureRunning(done)
@ -55,19 +55,19 @@ describe('Applying updates to a doc', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
sinon.spy(MockWebApi, 'getDocument')
this.startTime = Date.now()
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc_id,
this.update,
(error) => {
error => {
if (error != null) {
throw error
}
@ -144,12 +144,13 @@ describe('Applying updates to a doc', function () {
it('should set the first op timestamp', function (done) {
rclient_project_history.get(
ProjectHistoryKeys.projectHistoryFirstOpTimestamp({
project_id: this.project_id
project_id: this.project_id,
}),
(error, result) => {
if (error != null) {
throw error
}
result = parseInt(result, 10)
result.should.be.within(this.startTime, Date.now())
this.firstOpTimestamp = result
return done()
@ -167,7 +168,7 @@ describe('Applying updates to a doc', function () {
this.project_id,
this.doc_id,
this.second_update,
(error) => {
error => {
if (error != null) {
throw error
}
@ -180,12 +181,13 @@ describe('Applying updates to a doc', function () {
return it('should not change the first op timestamp', function (done) {
rclient_project_history.get(
ProjectHistoryKeys.projectHistoryFirstOpTimestamp({
project_id: this.project_id
project_id: this.project_id,
}),
(error, result) => {
if (error != null) {
throw error
}
result = parseInt(result, 10)
result.should.equal(this.firstOpTimestamp)
return done()
}
@ -199,14 +201,14 @@ describe('Applying updates to a doc', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, (error) => {
DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => {
if (error != null) {
throw error
}
@ -215,7 +217,7 @@ describe('Applying updates to a doc', function () {
this.project_id,
this.doc_id,
this.update,
(error) => {
error => {
if (error != null) {
throw error
}
@ -284,15 +286,15 @@ describe('Applying updates to a doc', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version,
projectHistoryType: 'project-history'
projectHistoryType: 'project-history',
})
DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, (error) => {
DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => {
if (error != null) {
throw error
}
@ -301,7 +303,7 @@ describe('Applying updates to a doc', function () {
this.project_id,
this.doc_id,
this.update,
(error) => {
error => {
if (error != null) {
throw error
}
@ -360,12 +362,12 @@ describe('Applying updates to a doc', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
const lines = ['', '', '']
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines,
version: 0
version: 0,
})
this.updates = [
{ doc_id: this.doc_id, v: 0, op: [{ i: 'h', p: 0 }] },
@ -378,7 +380,7 @@ describe('Applying updates to a doc', function () {
{ doc_id: this.doc_id, v: 7, op: [{ i: 'o', p: 7 }] },
{ doc_id: this.doc_id, v: 8, op: [{ i: 'r', p: 8 }] },
{ doc_id: this.doc_id, v: 9, op: [{ i: 'l', p: 9 }] },
{ doc_id: this.doc_id, v: 10, op: [{ i: 'd', p: 10 }] }
{ doc_id: this.doc_id, v: 10, op: [{ i: 'd', p: 10 }] },
]
this.my_result = ['hello world', '', '']
return done()
@ -388,8 +390,8 @@ describe('Applying updates to a doc', function () {
let update
const actions = []
for (update of Array.from(this.updates.slice(0, 6))) {
;((update) => {
return actions.push((callback) =>
;(update => {
return actions.push(callback =>
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc_id,
@ -399,12 +401,12 @@ describe('Applying updates to a doc', function () {
)
})(update)
}
actions.push((callback) =>
actions.push(callback =>
DocUpdaterClient.deleteDoc(this.project_id, this.doc_id, callback)
)
for (update of Array.from(this.updates.slice(6))) {
;((update) => {
return actions.push((callback) =>
;(update => {
return actions.push(callback =>
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc_id,
@ -415,7 +417,7 @@ describe('Applying updates to a doc', function () {
})(update)
}
async.series(actions, (error) => {
async.series(actions, error => {
if (error != null) {
throw error
}
@ -437,7 +439,7 @@ describe('Applying updates to a doc', function () {
0,
-1,
(error, updates) => {
updates = Array.from(updates).map((u) => JSON.parse(u))
updates = Array.from(updates).map(u => JSON.parse(u))
for (let i = 0; i < this.updates.length; i++) {
const appliedUpdate = this.updates[i]
appliedUpdate.op.should.deep.equal(updates[i].op)
@ -462,7 +464,7 @@ describe('Applying updates to a doc', function () {
0,
-1,
(error, updates) => {
updates = Array.from(updates).map((u) => JSON.parse(u))
updates = Array.from(updates).map(u => JSON.parse(u))
for (let i = 0; i < this.updates.length; i++) {
const appliedUpdate = this.updates[i]
appliedUpdate.op.should.deep.equal(updates[i].op)
@ -478,12 +480,12 @@ describe('Applying updates to a doc', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
const lines = ['', '', '']
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines,
version: 0
version: 0,
})
this.updates = [
{ doc_id: this.doc_id, v: 0, op: [{ i: 'h', p: 0 }] },
@ -491,7 +493,7 @@ describe('Applying updates to a doc', function () {
{ doc_id: this.doc_id, v: 2, op: [{ i: 'l', p: 2 }] },
{ doc_id: this.doc_id, v: 3, op: [{ i: 'l', p: 3 }] },
{ doc_id: this.doc_id, v: 4, op: [{ i: 'o', p: 4 }] },
{ doc_id: this.doc_id, v: 0, op: [{ i: 'world', p: 1 }] }
{ doc_id: this.doc_id, v: 0, op: [{ i: 'world', p: 1 }] },
]
this.my_result = ['hello', 'world', '']
return done()
@ -501,8 +503,8 @@ describe('Applying updates to a doc', function () {
let update
const actions = []
for (update of Array.from(this.updates.slice(0, 5))) {
;((update) => {
return actions.push((callback) =>
;(update => {
return actions.push(callback =>
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc_id,
@ -512,12 +514,12 @@ describe('Applying updates to a doc', function () {
)
})(update)
}
actions.push((callback) =>
actions.push(callback =>
DocUpdaterClient.deleteDoc(this.project_id, this.doc_id, callback)
)
for (update of Array.from(this.updates.slice(5))) {
;((update) => {
return actions.push((callback) =>
;(update => {
return actions.push(callback =>
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc_id,
@ -528,7 +530,7 @@ describe('Applying updates to a doc', function () {
})(update)
}
async.series(actions, (error) => {
async.series(actions, error => {
if (error != null) {
throw error
}
@ -550,16 +552,16 @@ describe('Applying updates to a doc', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
this.broken_update = {
doc_id: this.doc_id,
v: this.version,
op: [{ d: 'not the correct content', p: 0 }]
op: [{ d: 'not the correct content', p: 0 }],
}
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
DocUpdaterClient.subscribeToAppliedOps(
@ -570,7 +572,7 @@ describe('Applying updates to a doc', function () {
this.project_id,
this.doc_id,
this.broken_update,
(error) => {
error => {
if (error != null) {
throw error
}
@ -599,7 +601,7 @@ describe('Applying updates to a doc', function () {
return JSON.parse(message).should.deep.include({
project_id: this.project_id,
doc_id: this.doc_id,
error: 'Delete component does not match'
error: 'Delete component does not match',
})
})
})
@ -608,7 +610,7 @@ describe('Applying updates to a doc', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
const updates = []
for (let v = 0; v <= 199; v++) {
@ -616,7 +618,7 @@ describe('Applying updates to a doc', function () {
updates.push({
doc_id: this.doc_id,
op: [{ i: v.toString(), p: 0 }],
v
v,
})
}
@ -624,14 +626,14 @@ describe('Applying updates to a doc', function () {
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: 0
version: 0,
})
// Send updates in chunks to causes multiple flushes
const actions = []
for (let i = 0; i <= 19; i++) {
;((i) => {
return actions.push((cb) => {
;(i => {
return actions.push(cb => {
return DocUpdaterClient.sendUpdates(
this.project_id,
this.doc_id,
@ -641,7 +643,7 @@ describe('Applying updates to a doc', function () {
})
})(i)
}
async.series(actions, (error) => {
async.series(actions, error => {
if (error != null) {
throw error
}
@ -663,22 +665,22 @@ describe('Applying updates to a doc', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines
lines: this.lines,
})
const update = {
doc: this.doc_id,
op: this.update.op,
v: 0
v: 0,
}
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc_id,
update,
(error) => {
error => {
if (error != null) {
throw error
}
@ -705,11 +707,11 @@ describe('Applying updates to a doc', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
DocUpdaterClient.subscribeToAppliedOps(
@ -725,15 +727,15 @@ describe('Applying updates to a doc', function () {
op: [
{
i: 'one and a half\n',
p: 4
}
p: 4,
},
],
v: this.version,
meta: {
source: 'ikHceq3yfAdQYzBo4-xZ'
}
source: 'ikHceq3yfAdQYzBo4-xZ',
},
},
(error) => {
error => {
if (error != null) {
throw error
}
@ -746,16 +748,16 @@ describe('Applying updates to a doc', function () {
op: [
{
i: 'one and a half\n',
p: 4
}
p: 4,
},
],
v: this.version,
dupIfSource: ['ikHceq3yfAdQYzBo4-xZ'],
meta: {
source: 'ikHceq3yfAdQYzBo4-xZ'
}
source: 'ikHceq3yfAdQYzBo4-xZ',
},
},
(error) => {
error => {
if (error != null) {
throw error
}
@ -795,12 +797,12 @@ describe('Applying updates to a doc', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
this.non_existing = {
doc_id: this.doc_id,
v: this.version,
op: [{ d: 'content', p: 0 }]
op: [{ d: 'content', p: 0 }],
}
DocUpdaterClient.subscribeToAppliedOps(
@ -811,7 +813,7 @@ describe('Applying updates to a doc', function () {
this.project_id,
this.doc_id,
this.non_existing,
(error) => {
error => {
if (error != null) {
throw error
}
@ -840,7 +842,7 @@ describe('Applying updates to a doc', function () {
return JSON.parse(message).should.deep.include({
project_id: this.project_id,
doc_id: this.doc_id,
error: `doc not not found: /project/${this.project_id}/doc/${this.doc_id}`
error: `doc not not found: /project/${this.project_id}/doc/${this.doc_id}`,
})
})
})

View file

@ -1,5 +1,5 @@
const sinon = require('sinon')
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const rclientProjectHistory = require('@overleaf/redis-wrapper').createClient(
Settings.redis.project_history
)
@ -23,10 +23,10 @@ describe("Applying updates to a project's structure", function () {
type: 'rename-file',
id: DocUpdaterClient.randomId(),
pathname: '/file-path',
newPathname: '/new-file-path'
newPathname: '/new-file-path',
}
this.updates = [this.fileUpdate]
DocUpdaterApp.ensureRunning((error) => {
DocUpdaterApp.ensureRunning(error => {
if (error) {
return done(error)
}
@ -35,7 +35,7 @@ describe("Applying updates to a project's structure", function () {
this.user_id,
this.updates,
this.version,
(error) => {
error => {
if (error) {
return done(error)
}
@ -75,7 +75,7 @@ describe("Applying updates to a project's structure", function () {
type: 'rename-doc',
id: DocUpdaterClient.randomId(),
pathname: '/doc-path',
newPathname: '/new-doc-path'
newPathname: '/new-doc-path',
}
this.updates = [this.update]
})
@ -88,7 +88,7 @@ describe("Applying updates to a project's structure", function () {
this.user_id,
this.updates,
this.version,
(error) => {
error => {
if (error) {
return done(error)
}
@ -125,28 +125,24 @@ describe("Applying updates to a project's structure", function () {
before(function (done) {
this.project_id = DocUpdaterClient.randomId()
MockWebApi.insertDoc(this.project_id, this.update.id, {})
DocUpdaterClient.preloadDoc(
this.project_id,
this.update.id,
(error) => {
if (error) {
return done(error)
}
sinon.spy(MockWebApi, 'getDocument')
DocUpdaterClient.sendProjectUpdate(
this.project_id,
this.user_id,
this.updates,
this.version,
(error) => {
if (error) {
return done(error)
}
setTimeout(done, 200)
}
)
DocUpdaterClient.preloadDoc(this.project_id, this.update.id, error => {
if (error) {
return done(error)
}
)
sinon.spy(MockWebApi, 'getDocument')
DocUpdaterClient.sendProjectUpdate(
this.project_id,
this.user_id,
this.updates,
this.version,
error => {
if (error) {
return done(error)
}
setTimeout(done, 200)
}
)
})
})
after(function () {
@ -198,31 +194,31 @@ describe("Applying updates to a project's structure", function () {
type: 'rename-doc',
id: DocUpdaterClient.randomId(),
pathname: '/doc-path0',
newPathname: '/new-doc-path0'
newPathname: '/new-doc-path0',
}
this.docUpdate1 = {
type: 'rename-doc',
id: DocUpdaterClient.randomId(),
pathname: '/doc-path1',
newPathname: '/new-doc-path1'
newPathname: '/new-doc-path1',
}
this.fileUpdate0 = {
type: 'rename-file',
id: DocUpdaterClient.randomId(),
pathname: '/file-path0',
newPathname: '/new-file-path0'
newPathname: '/new-file-path0',
}
this.fileUpdate1 = {
type: 'rename-file',
id: DocUpdaterClient.randomId(),
pathname: '/file-path1',
newPathname: '/new-file-path1'
newPathname: '/new-file-path1',
}
this.updates = [
this.docUpdate0,
this.docUpdate1,
this.fileUpdate0,
this.fileUpdate1
this.fileUpdate1,
]
})
@ -234,7 +230,7 @@ describe("Applying updates to a project's structure", function () {
this.user_id,
this.updates,
this.version,
(error) => {
error => {
if (error) {
return done(error)
}
@ -299,7 +295,7 @@ describe("Applying updates to a project's structure", function () {
type: 'add-file',
id: DocUpdaterClient.randomId(),
pathname: '/file-path',
url: 'filestore.example.com'
url: 'filestore.example.com',
}
this.updates = [this.fileUpdate]
DocUpdaterClient.sendProjectUpdate(
@ -307,7 +303,7 @@ describe("Applying updates to a project's structure", function () {
this.user_id,
this.updates,
this.version,
(error) => {
error => {
if (error) {
return done(error)
}
@ -347,7 +343,7 @@ describe("Applying updates to a project's structure", function () {
type: 'add-doc',
id: DocUpdaterClient.randomId(),
pathname: '/file-path',
docLines: 'a\nb'
docLines: 'a\nb',
}
this.updates = [this.docUpdate]
DocUpdaterClient.sendProjectUpdate(
@ -355,7 +351,7 @@ describe("Applying updates to a project's structure", function () {
this.user_id,
this.updates,
this.version,
(error) => {
error => {
if (error) {
return done(error)
}
@ -401,7 +397,7 @@ describe("Applying updates to a project's structure", function () {
type: 'add-doc',
id: DocUpdaterClient.randomId(),
pathname: '/file-' + v,
docLines: 'a\nb'
docLines: 'a\nb',
})
}
@ -424,7 +420,7 @@ describe("Applying updates to a project's structure", function () {
userId,
updates.slice(250),
this.version1,
(error) => {
error => {
if (error) {
return done(error)
}
@ -460,7 +456,7 @@ describe("Applying updates to a project's structure", function () {
type: 'add-doc',
id: DocUpdaterClient.randomId(),
pathname: '/file-' + v,
docLines: 'a\nb'
docLines: 'a\nb',
})
}
@ -483,7 +479,7 @@ describe("Applying updates to a project's structure", function () {
userId,
updates.slice(10),
this.version1,
(error) => {
error => {
if (error) {
return done(error)
}

View file

@ -26,10 +26,10 @@ describe('Deleting a document', function () {
op: [
{
i: 'one and a half\n',
p: 4
}
p: 4,
},
],
v: this.version
v: this.version,
}
this.result = ['one', 'one and a half', 'two', 'three']
@ -47,19 +47,19 @@ describe('Deleting a document', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
sinon.spy(MockWebApi, 'setDocument')
sinon.spy(MockWebApi, 'getDocument')
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
return DocUpdaterClient.preloadDoc(
this.project_id,
this.doc_id,
(error) => {
error => {
if (error != null) {
throw error
}
@ -67,7 +67,7 @@ describe('Deleting a document', function () {
this.project_id,
this.doc_id,
this.update,
(error) => {
error => {
if (error != null) {
throw error
}
@ -103,7 +103,8 @@ describe('Deleting a document', function () {
})
it('should need to reload the doc if read again', function (done) {
MockWebApi.getDocument.called.should.equal.false
MockWebApi.getDocument.resetHistory()
MockWebApi.getDocument.called.should.equals(false)
return DocUpdaterClient.getDoc(
this.project_id,
this.doc_id,
@ -133,10 +134,10 @@ describe('Deleting a document', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines
lines: this.lines,
})
sinon.spy(MockWebApi, 'setDocument')
sinon.spy(MockWebApi, 'getDocument')
@ -164,7 +165,7 @@ describe('Deleting a document', function () {
})
it('should need to reload the doc if read again', function (done) {
MockWebApi.getDocument.called.should.equal.false
MockWebApi.getDocument.called.should.equals(false)
return DocUpdaterClient.getDoc(
this.project_id,
this.doc_id,

View file

@ -33,12 +33,12 @@ describe('Deleting a project', function () {
op: [
{
i: 'one and a half\n',
p: 4
}
p: 4,
},
],
v: 0
v: 0,
},
updatedLines: ['one', 'one and a half', 'two', 'three']
updatedLines: ['one', 'one and a half', 'two', 'three'],
},
{
id: (doc_id1 = DocUpdaterClient.randomId()),
@ -48,18 +48,18 @@ describe('Deleting a project', function () {
op: [
{
i: 'four and a half\n',
p: 5
}
p: 5,
},
],
v: 0
v: 0,
},
updatedLines: ['four', 'four and a half', 'five', 'six']
}
updatedLines: ['four', 'four and a half', 'five', 'six'],
},
]
for (const doc of Array.from(this.docs)) {
MockWebApi.insertDoc(this.project_id, doc.id, {
lines: doc.lines,
version: doc.update.v
version: doc.update.v,
})
}
@ -73,12 +73,12 @@ describe('Deleting a project', function () {
sinon.spy(MockProjectHistoryApi, 'flushProject')
return async.series(
this.docs.map((doc) => {
return (callback) => {
this.docs.map(doc => {
return callback => {
return DocUpdaterClient.preloadDoc(
this.project_id,
doc.id,
(error) => {
error => {
if (error != null) {
return callback(error)
}
@ -86,7 +86,7 @@ describe('Deleting a project', function () {
this.project_id,
doc.id,
doc.update,
(error) => {
error => {
return callback(error)
}
)
@ -94,7 +94,7 @@ describe('Deleting a project', function () {
)
}
}),
(error) => {
error => {
if (error != null) {
throw error
}
@ -122,7 +122,7 @@ describe('Deleting a project', function () {
})
it('should send each document to the web api', function () {
return Array.from(this.docs).map((doc) =>
return Array.from(this.docs).map(doc =>
MockWebApi.setDocument
.calledWith(this.project_id, doc.id, doc.updatedLines)
.should.equal(true)
@ -132,8 +132,8 @@ describe('Deleting a project', function () {
it('should need to reload the docs if read again', function (done) {
sinon.spy(MockWebApi, 'getDocument')
return async.series(
this.docs.map((doc) => {
return (callback) => {
this.docs.map(doc => {
return callback => {
MockWebApi.getDocument
.calledWith(this.project_id, doc.id)
.should.equal(false)
@ -157,7 +157,7 @@ describe('Deleting a project', function () {
})
it('should flush each doc in track changes', function () {
return Array.from(this.docs).map((doc) =>
return Array.from(this.docs).map(doc =>
MockTrackChangesApi.flushDoc.calledWith(doc.id).should.equal(true)
)
})
@ -176,8 +176,8 @@ describe('Deleting a project', function () {
sinon.spy(MockProjectHistoryApi, 'flushProject')
return async.series(
this.docs.map((doc) => {
return (callback) => {
this.docs.map(doc => {
return callback => {
return DocUpdaterClient.preloadDoc(
this.project_id,
doc.id,
@ -185,7 +185,7 @@ describe('Deleting a project', function () {
)
}
}),
(error) => {
error => {
if (error != null) {
throw error
}
@ -232,8 +232,8 @@ describe('Deleting a project', function () {
sinon.spy(MockProjectHistoryApi, 'flushProject')
return async.series(
this.docs.map((doc) => {
return (callback) => {
this.docs.map(doc => {
return callback => {
return DocUpdaterClient.preloadDoc(
this.project_id,
doc.id,
@ -241,7 +241,7 @@ describe('Deleting a project', function () {
)
}
}),
(error) => {
error => {
if (error != null) {
throw error
}
@ -273,7 +273,7 @@ describe('Deleting a project', function () {
})
it('should send each document to the web api', function () {
return Array.from(this.docs).map((doc) =>
return Array.from(this.docs).map(doc =>
MockWebApi.setDocument
.calledWith(this.project_id, doc.id, doc.updatedLines)
.should.equal(true)
@ -281,7 +281,7 @@ describe('Deleting a project', function () {
})
it('should flush each doc in track changes', function () {
return Array.from(this.docs).map((doc) =>
return Array.from(this.docs).map(doc =>
MockTrackChangesApi.flushDoc.calledWith(doc.id).should.equal(true)
)
})

View file

@ -31,12 +31,12 @@ describe('Flushing a project', function () {
op: [
{
i: 'one and a half\n',
p: 4
}
p: 4,
},
],
v: 0
v: 0,
},
updatedLines: ['one', 'one and a half', 'two', 'three']
updatedLines: ['one', 'one and a half', 'two', 'three'],
},
{
id: (doc_id1 = DocUpdaterClient.randomId()),
@ -46,18 +46,18 @@ describe('Flushing a project', function () {
op: [
{
i: 'four and a half\n',
p: 5
}
p: 5,
},
],
v: 0
v: 0,
},
updatedLines: ['four', 'four and a half', 'five', 'six']
}
updatedLines: ['four', 'four and a half', 'five', 'six'],
},
]
for (const doc of Array.from(this.docs)) {
MockWebApi.insertDoc(this.project_id, doc.id, {
lines: doc.lines,
version: doc.update.v
version: doc.update.v,
})
}
return DocUpdaterApp.ensureRunning(done)
@ -68,12 +68,12 @@ describe('Flushing a project', function () {
sinon.spy(MockWebApi, 'setDocument')
return async.series(
this.docs.map((doc) => {
return (callback) => {
this.docs.map(doc => {
return callback => {
return DocUpdaterClient.preloadDoc(
this.project_id,
doc.id,
(error) => {
error => {
if (error != null) {
return callback(error)
}
@ -81,7 +81,7 @@ describe('Flushing a project', function () {
this.project_id,
doc.id,
doc.update,
(error) => {
error => {
return callback(error)
}
)
@ -89,7 +89,7 @@ describe('Flushing a project', function () {
)
}
}),
(error) => {
error => {
if (error != null) {
throw error
}
@ -115,7 +115,7 @@ describe('Flushing a project', function () {
})
it('should send each document to the web api', function () {
return Array.from(this.docs).map((doc) =>
return Array.from(this.docs).map(doc =>
MockWebApi.setDocument
.calledWith(this.project_id, doc.id, doc.updatedLines)
.should.equal(true)
@ -124,8 +124,8 @@ describe('Flushing a project', function () {
return it('should update the lines in the doc updater', function (done) {
return async.series(
this.docs.map((doc) => {
return (callback) => {
this.docs.map(doc => {
return callback => {
return DocUpdaterClient.getDoc(
this.project_id,
doc.id,

View file

@ -31,10 +31,10 @@ describe('Flushing a doc to Mongo', function () {
op: [
{
i: 'one and a half\n',
p: 4
}
p: 4,
},
],
v: this.version
v: this.version,
}
this.result = ['one', 'one and a half', 'two', 'three']
return DocUpdaterApp.ensureRunning(done)
@ -44,19 +44,19 @@ describe('Flushing a doc to Mongo', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
sinon.spy(MockWebApi, 'setDocument')
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
return DocUpdaterClient.sendUpdates(
this.project_id,
this.doc_id,
[this.update],
(error) => {
error => {
if (error != null) {
throw error
}
@ -90,10 +90,10 @@ describe('Flushing a doc to Mongo', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines
lines: this.lines,
})
sinon.spy(MockWebApi, 'setDocument')
return DocUpdaterClient.flushDoc(this.project_id, this.doc_id, done)
@ -112,11 +112,11 @@ describe('Flushing a doc to Mongo', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
let t = 30000
sinon

View file

@ -29,13 +29,13 @@ describe('Getting a document', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
sinon.spy(MockWebApi, 'getDocument')
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
return DocUpdaterClient.getDoc(
@ -71,17 +71,17 @@ describe('Getting a document', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
return DocUpdaterClient.preloadDoc(
this.project_id,
this.doc_id,
(error) => {
error => {
if (error != null) {
throw error
}
@ -115,23 +115,23 @@ describe('Getting a document', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: (this.lines = ['one', 'two', 'three'])
lines: (this.lines = ['one', 'two', 'three']),
})
this.updates = __range__(0, 199, true).map((v) => ({
this.updates = __range__(0, 199, true).map(v => ({
doc_id: this.doc_id,
op: [{ i: v.toString(), p: 0 }],
v
v,
}))
return DocUpdaterClient.sendUpdates(
this.project_id,
this.doc_id,
this.updates,
(error) => {
error => {
if (error != null) {
throw error
}
@ -191,7 +191,7 @@ describe('Getting a document', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
return DocUpdaterClient.getDoc(
this.project_id,
@ -212,7 +212,7 @@ describe('Getting a document', function () {
before(function (done) {
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
sinon
.stub(MockWebApi, 'getDocument')
@ -246,7 +246,7 @@ describe('Getting a document', function () {
this.timeout = 10000
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
sinon
.stub(MockWebApi, 'getDocument')

View file

@ -30,17 +30,17 @@ describe('Getting documents for project', function () {
this.projectStateHash = DocUpdaterClient.randomId()
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
return DocUpdaterClient.preloadDoc(
this.project_id,
this.doc_id,
(error) => {
error => {
if (error != null) {
throw error
}
@ -67,17 +67,17 @@ describe('Getting documents for project', function () {
this.projectStateHash = DocUpdaterClient.randomId()
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
return DocUpdaterClient.preloadDoc(
this.project_id,
this.doc_id,
(error) => {
error => {
if (error != null) {
throw error
}
@ -110,7 +110,7 @@ describe('Getting documents for project', function () {
return it('should return the documents', function () {
return this.returnedDocs.should.deep.equal([
{ _id: this.doc_id, lines: this.lines, v: this.version }
{ _id: this.doc_id, lines: this.lines, v: this.version },
])
})
})
@ -120,17 +120,17 @@ describe('Getting documents for project', function () {
this.projectStateHash = DocUpdaterClient.randomId()
;[this.project_id, this.doc_id] = Array.from([
DocUpdaterClient.randomId(),
DocUpdaterClient.randomId()
DocUpdaterClient.randomId(),
])
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
return DocUpdaterClient.preloadDoc(
this.project_id,
this.doc_id,
(error) => {
error => {
if (error != null) {
throw error
}

View file

@ -32,36 +32,36 @@ describe('Ranges', function () {
this.id_seed = '587357bd35e64f6157'
this.doc = {
id: DocUpdaterClient.randomId(),
lines: ['aaa']
lines: ['aaa'],
}
this.updates = [
{
doc: this.doc.id,
op: [{ i: '123', p: 1 }],
v: 0,
meta: { user_id: this.user_id }
meta: { user_id: this.user_id },
},
{
doc: this.doc.id,
op: [{ i: '456', p: 5 }],
v: 1,
meta: { user_id: this.user_id, tc: this.id_seed }
meta: { user_id: this.user_id, tc: this.id_seed },
},
{
doc: this.doc.id,
op: [{ d: '12', p: 1 }],
v: 2,
meta: { user_id: this.user_id }
}
meta: { user_id: this.user_id },
},
]
MockWebApi.insertDoc(this.project_id, this.doc.id, {
lines: this.doc.lines,
version: 0
version: 0,
})
const jobs = []
for (const update of Array.from(this.updates)) {
;((update) => {
return jobs.push((callback) =>
;(update => {
return jobs.push(callback =>
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc.id,
@ -72,18 +72,18 @@ describe('Ranges', function () {
})(update)
}
return DocUpdaterApp.ensureRunning((error) => {
return DocUpdaterApp.ensureRunning(error => {
if (error != null) {
throw error
}
return DocUpdaterClient.preloadDoc(
this.project_id,
this.doc.id,
(error) => {
error => {
if (error != null) {
throw error
}
return async.series(jobs, (error) => {
return async.series(jobs, error => {
if (error != null) {
throw error
}
@ -119,25 +119,25 @@ describe('Ranges', function () {
this.user_id = DocUpdaterClient.randomId()
this.doc = {
id: DocUpdaterClient.randomId(),
lines: ['foo bar baz']
lines: ['foo bar baz'],
}
this.updates = [
{
doc: this.doc.id,
op: [
{ c: 'bar', p: 4, t: (this.tid = DocUpdaterClient.randomId()) }
{ c: 'bar', p: 4, t: (this.tid = DocUpdaterClient.randomId()) },
],
v: 0
}
v: 0,
},
]
MockWebApi.insertDoc(this.project_id, this.doc.id, {
lines: this.doc.lines,
version: 0
version: 0,
})
const jobs = []
for (const update of Array.from(this.updates)) {
;((update) => {
return jobs.push((callback) =>
;(update => {
return jobs.push(callback =>
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc.id,
@ -150,11 +150,11 @@ describe('Ranges', function () {
return DocUpdaterClient.preloadDoc(
this.project_id,
this.doc.id,
(error) => {
error => {
if (error != null) {
throw error
}
return async.series(jobs, (error) => {
return async.series(jobs, error => {
if (error != null) {
throw error
}
@ -188,31 +188,31 @@ describe('Ranges', function () {
this.user_id = DocUpdaterClient.randomId()
this.doc = {
id: DocUpdaterClient.randomId(),
lines: ['foo bar baz']
lines: ['foo bar baz'],
}
this.updates = [
{
doc: this.doc.id,
op: [{ i: 'ABC', p: 3 }],
v: 0,
meta: { user_id: this.user_id }
meta: { user_id: this.user_id },
},
{
doc: this.doc.id,
op: [
{ c: 'bar', p: 4, t: (this.tid = DocUpdaterClient.randomId()) }
{ c: 'bar', p: 4, t: (this.tid = DocUpdaterClient.randomId()) },
],
v: 0
}
v: 0,
},
]
MockWebApi.insertDoc(this.project_id, this.doc.id, {
lines: this.doc.lines,
version: 0
version: 0,
})
const jobs = []
for (const update of Array.from(this.updates)) {
;((update) => {
return jobs.push((callback) =>
;(update => {
return jobs.push(callback =>
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc.id,
@ -225,11 +225,11 @@ describe('Ranges', function () {
return DocUpdaterClient.preloadDoc(
this.project_id,
this.doc.id,
(error) => {
error => {
if (error != null) {
throw error
}
return async.series(jobs, (error) => {
return async.series(jobs, error => {
if (error != null) {
throw error
}
@ -265,13 +265,13 @@ describe('Ranges', function () {
this.id_seed = '587357bd35e64f6157'
this.doc = {
id: DocUpdaterClient.randomId(),
lines: ['a123aa']
lines: ['a123aa'],
}
this.update = {
doc: this.doc.id,
op: [{ i: '456', p: 5 }],
v: 0,
meta: { user_id: this.user_id, tc: this.id_seed }
meta: { user_id: this.user_id, tc: this.id_seed },
}
MockWebApi.insertDoc(this.project_id, this.doc.id, {
lines: this.doc.lines,
@ -282,16 +282,16 @@ describe('Ranges', function () {
op: { i: '123', p: 1 },
metadata: {
user_id: this.user_id,
ts: new Date()
}
}
]
}
ts: new Date(),
},
},
],
},
})
return DocUpdaterClient.preloadDoc(
this.project_id,
this.doc.id,
(error) => {
error => {
if (error != null) {
throw error
}
@ -299,7 +299,7 @@ describe('Ranges', function () {
this.project_id,
this.doc.id,
this.update,
(error) => {
error => {
if (error != null) {
throw error
}
@ -327,25 +327,21 @@ describe('Ranges', function () {
})
return it('should flush the ranges to the persistence layer again', function (done) {
return DocUpdaterClient.flushDoc(
this.project_id,
this.doc.id,
(error) => {
if (error != null) {
throw error
}
return MockWebApi.getDocument(
this.project_id,
this.doc.id,
(error, doc) => {
const { changes } = doc.ranges
changes[0].op.should.deep.equal({ i: '123', p: 1 })
changes[1].op.should.deep.equal({ i: '456', p: 5 })
return done()
}
)
return DocUpdaterClient.flushDoc(this.project_id, this.doc.id, error => {
if (error != null) {
throw error
}
)
return MockWebApi.getDocument(
this.project_id,
this.doc.id,
(error, doc) => {
const { changes } = doc.ranges
changes[0].op.should.deep.equal({ i: '123', p: 1 })
changes[1].op.should.deep.equal({ i: '456', p: 5 })
return done()
}
)
})
})
})
@ -356,22 +352,22 @@ describe('Ranges', function () {
this.id_seed = '587357bd35e64f6157'
this.doc = {
id: DocUpdaterClient.randomId(),
lines: ['aaa']
lines: ['aaa'],
}
this.update = {
doc: this.doc.id,
op: [{ i: '456', p: 1 }],
v: 0,
meta: { user_id: this.user_id, tc: this.id_seed }
meta: { user_id: this.user_id, tc: this.id_seed },
}
MockWebApi.insertDoc(this.project_id, this.doc.id, {
lines: this.doc.lines,
version: 0
version: 0,
})
return DocUpdaterClient.preloadDoc(
this.project_id,
this.doc.id,
(error) => {
error => {
if (error != null) {
throw error
}
@ -379,7 +375,7 @@ describe('Ranges', function () {
this.project_id,
this.doc.id,
this.update,
(error) => {
error => {
if (error != null) {
throw error
}
@ -411,7 +407,7 @@ describe('Ranges', function () {
this.project_id,
this.doc.id,
this.id_seed + '000001',
(error) => {
error => {
if (error != null) {
throw error
}
@ -437,21 +433,21 @@ describe('Ranges', function () {
this.user_id = DocUpdaterClient.randomId()
this.doc = {
id: DocUpdaterClient.randomId(),
lines: ['foo bar']
lines: ['foo bar'],
}
this.update = {
doc: this.doc.id,
op: [{ c: 'bar', p: 4, t: (this.tid = DocUpdaterClient.randomId()) }],
v: 0
v: 0,
}
MockWebApi.insertDoc(this.project_id, this.doc.id, {
lines: this.doc.lines,
version: 0
version: 0,
})
return DocUpdaterClient.preloadDoc(
this.project_id,
this.doc.id,
(error) => {
error => {
if (error != null) {
throw error
}
@ -459,7 +455,7 @@ describe('Ranges', function () {
this.project_id,
this.doc.id,
this.update,
(error) => {
error => {
if (error != null) {
throw error
}
@ -518,7 +514,7 @@ describe('Ranges', function () {
this.id_seed = DocUpdaterClient.randomId()
this.doc = {
id: DocUpdaterClient.randomId(),
lines: ['aaa']
lines: ['aaa'],
}
this.i = new Array(3 * 1024 * 1024).join('a')
this.updates = [
@ -526,17 +522,17 @@ describe('Ranges', function () {
doc: this.doc.id,
op: [{ i: this.i, p: 1 }],
v: 0,
meta: { user_id: this.user_id, tc: this.id_seed }
}
meta: { user_id: this.user_id, tc: this.id_seed },
},
]
MockWebApi.insertDoc(this.project_id, this.doc.id, {
lines: this.doc.lines,
version: 0
version: 0,
})
const jobs = []
for (const update of Array.from(this.updates)) {
;((update) => {
return jobs.push((callback) =>
;(update => {
return jobs.push(callback =>
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc.id,
@ -549,11 +545,11 @@ describe('Ranges', function () {
return DocUpdaterClient.preloadDoc(
this.project_id,
this.doc.id,
(error) => {
error => {
if (error != null) {
throw error
}
return async.series(jobs, (error) => {
return async.series(jobs, error => {
if (error != null) {
throw error
}
@ -593,34 +589,34 @@ describe('Ranges', function () {
op: {
c: 'a',
p: 5,
tid: (this.tid = DocUpdaterClient.randomId())
tid: (this.tid = DocUpdaterClient.randomId()),
},
metadata: {
user_id: this.user_id,
ts: new Date()
}
}
]
}
ts: new Date(),
},
},
],
},
})
this.updates = [
{
doc: this.doc_id,
op: [{ d: 'foo ', p: 0 }],
v: 0,
meta: { user_id: this.user_id }
meta: { user_id: this.user_id },
},
{
doc: this.doc_id,
op: [{ d: 'bar ', p: 0 }],
v: 1,
meta: { user_id: this.user_id }
}
meta: { user_id: this.user_id },
},
]
const jobs = []
for (const update of Array.from(this.updates)) {
;((update) => {
return jobs.push((callback) =>
;(update => {
return jobs.push(callback =>
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc_id,
@ -633,7 +629,7 @@ describe('Ranges', function () {
return DocUpdaterClient.preloadDoc(
this.project_id,
this.doc_id,
(error) => {
error => {
if (error != null) {
throw error
}
@ -669,7 +665,7 @@ describe('Ranges', function () {
db.docSnapshots
.find({
project_id: ObjectId(this.project_id),
doc_id: ObjectId(this.doc_id)
doc_id: ObjectId(this.doc_id),
})
.toArray((error, docSnapshots) => {
if (error != null) {
@ -681,7 +677,7 @@ describe('Ranges', function () {
expect(docSnapshots[0].ranges.comments[0].op).to.deep.equal({
c: 'a',
p: 1,
tid: this.tid
tid: this.tid,
})
return done()
})

View file

@ -1,6 +1,6 @@
const sinon = require('sinon')
const { expect } = require('chai')
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const docUpdaterRedis = require('@overleaf/redis-wrapper').createClient(
Settings.redis.documentupdater
)
@ -21,10 +21,10 @@ describe('Setting a document', function () {
op: [
{
i: 'one and a half\n',
p: 4
}
p: 4,
},
],
v: this.version
v: this.version,
}
this.result = ['one', 'one and a half', 'two', 'three']
this.newLines = ['these', 'are', 'the', 'new', 'lines']
@ -49,9 +49,9 @@ describe('Setting a document', function () {
this.doc_id = DocUpdaterClient.randomId()
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, (error) => {
DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => {
if (error) {
throw error
}
@ -59,7 +59,7 @@ describe('Setting a document', function () {
this.project_id,
this.doc_id,
this.update,
(error) => {
error => {
if (error) {
throw error
}
@ -149,7 +149,7 @@ describe('Setting a document', function () {
this.doc_id = DocUpdaterClient.randomId()
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
DocUpdaterClient.setDocLines(
this.project_id,
@ -212,23 +212,23 @@ describe('Setting a document', function () {
{
desc: 'when the updated doc is too large for the body parser',
size: Settings.maxJsonRequestSize,
expectedStatusCode: 413
expectedStatusCode: 413,
},
{
desc: 'when the updated doc is larger than the HTTP controller limit',
size: Settings.max_doc_length,
expectedStatusCode: 406
}
expectedStatusCode: 406,
},
]
DOC_TOO_LARGE_TEST_CASES.forEach((testCase) => {
DOC_TOO_LARGE_TEST_CASES.forEach(testCase => {
describe(testCase.desc, function () {
before(function (done) {
this.project_id = DocUpdaterClient.randomId()
this.doc_id = DocUpdaterClient.randomId()
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
this.newLines = []
while (JSON.stringify(this.newLines).length <= testCase.size) {
@ -281,7 +281,7 @@ describe('Setting a document', function () {
this.doc_id = DocUpdaterClient.randomId()
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
this.newLines = []
@ -333,14 +333,14 @@ describe('Setting a document', function () {
op: [
{
d: 'one and a half\n',
p: 4
}
p: 4,
},
],
meta: {
tc: this.id_seed,
user_id: this.user_id
user_id: this.user_id,
},
v: this.version
v: this.version,
}
})
@ -350,9 +350,9 @@ describe('Setting a document', function () {
this.doc_id = DocUpdaterClient.randomId()
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, (error) => {
DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => {
if (error) {
throw error
}
@ -360,7 +360,7 @@ describe('Setting a document', function () {
this.project_id,
this.doc_id,
this.update,
(error) => {
error => {
if (error) {
throw error
}
@ -413,9 +413,9 @@ describe('Setting a document', function () {
this.doc_id = DocUpdaterClient.randomId()
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
version: this.version
version: this.version,
})
DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, (error) => {
DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => {
if (error) {
throw error
}
@ -423,7 +423,7 @@ describe('Setting a document', function () {
this.project_id,
this.doc_id,
this.update,
(error) => {
error => {
if (error) {
throw error
}

View file

@ -1,5 +1,5 @@
const { expect } = require('chai')
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const MockWebApi = require('./helpers/MockWebApi')
const DocUpdaterClient = require('./helpers/DocUpdaterClient')
@ -16,10 +16,10 @@ describe('SizeChecks', function () {
op: [
{
i: 'insert some more lines that will bring it above the limit\n',
p: 42
}
p: 42,
},
],
v: this.version
v: this.version,
}
this.project_id = DocUpdaterClient.randomId()
this.doc_id = DocUpdaterClient.randomId()
@ -30,7 +30,7 @@ describe('SizeChecks', function () {
this.lines = ['0123456789'.repeat(Settings.max_doc_length / 10 + 1)]
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
v: this.version
v: this.version,
})
})
@ -47,13 +47,13 @@ describe('SizeChecks', function () {
const update = {
doc: this.doc_id,
op: this.update.op,
v: this.version
v: this.version,
}
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc_id,
update,
(error) => {
error => {
if (error != null) {
throw error
}
@ -77,7 +77,7 @@ describe('SizeChecks', function () {
this.lines = ['0123456789'.repeat(Settings.max_doc_length / 10 - 1)]
MockWebApi.insertDoc(this.project_id, this.doc_id, {
lines: this.lines,
v: this.version
v: this.version,
})
})
@ -98,13 +98,13 @@ describe('SizeChecks', function () {
const update = {
doc: this.doc_id,
op: this.update.op,
v: this.version
v: this.version,
}
DocUpdaterClient.sendUpdate(
this.project_id,
this.doc_id,
update,
(error) => {
error => {
if (error != null) {
throw error
}

View file

@ -31,7 +31,7 @@ module.exports = {
this.initing = true
this.callbacks.push(callback)
waitForDb().then(() => {
return app.listen(3003, 'localhost', (error) => {
return app.listen(3003, 'localhost', error => {
if (error != null) {
throw error
}
@ -45,5 +45,5 @@ module.exports = {
})()
})
})
}
},
}

View file

@ -1,5 +1,5 @@
let DocUpdaterClient
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const _ = require('lodash')
const rclient = require('@overleaf/redis-wrapper').createClient(
Settings.redis.documentupdater
@ -40,12 +40,12 @@ module.exports = DocUpdaterClient = {
rclient.rpush(
keys.pendingUpdates({ doc_id: docId }),
JSON.stringify(update),
(error) => {
error => {
if (error) {
return callback(error)
}
const docKey = `${projectId}:${docId}`
rclient.sadd('DocsWithPendingUpdates', docKey, (error) => {
rclient.sadd('DocsWithPendingUpdates', docKey, error => {
if (error) {
return callback(error)
}
@ -61,14 +61,14 @@ module.exports = DocUpdaterClient = {
},
sendUpdates(projectId, docId, updates, callback) {
DocUpdaterClient.preloadDoc(projectId, docId, (error) => {
DocUpdaterClient.preloadDoc(projectId, docId, error => {
if (error) {
return callback(error)
}
const jobs = updates.map((update) => (callback) => {
const jobs = updates.map(update => callback => {
DocUpdaterClient.sendUpdate(projectId, docId, update, callback)
})
async.series(jobs, (err) => {
async.series(jobs, err => {
if (err) {
return callback(err)
}
@ -80,7 +80,7 @@ module.exports = DocUpdaterClient = {
waitForPendingUpdates(projectId, docId, callback) {
async.retry(
{ times: 30, interval: 100 },
(cb) =>
cb =>
rclient.llen(keys.pendingUpdates({ doc_id: docId }), (err, length) => {
if (err) {
return cb(err)
@ -138,8 +138,8 @@ module.exports = DocUpdaterClient = {
lines,
source,
user_id: userId,
undoing
}
undoing,
},
},
(error, res, body) => callback(error, res, body)
)
@ -204,9 +204,9 @@ module.exports = DocUpdaterClient = {
request.post(
{
url: `http://localhost:3003/project/${projectId}`,
json: { userId, updates, version }
json: { userId, updates, version },
},
(error, res, body) => callback(error, res, body)
)
}
},
}

View file

@ -24,7 +24,7 @@ module.exports = MockProjectHistoryApi = {
run() {
app.post('/project/:project_id/flush', (req, res, next) => {
return this.flushProject(req.params.project_id, (error) => {
return this.flushProject(req.params.project_id, error => {
if (error != null) {
return res.sendStatus(500)
} else {
@ -33,12 +33,12 @@ module.exports = MockProjectHistoryApi = {
})
})
return app.listen(3054, (error) => {
return app.listen(3054, error => {
if (error != null) {
throw error
}
})
}
},
}
MockProjectHistoryApi.run()

View file

@ -24,7 +24,7 @@ module.exports = MockTrackChangesApi = {
run() {
app.post('/project/:project_id/doc/:doc_id/flush', (req, res, next) => {
return this.flushDoc(req.params.doc_id, (error) => {
return this.flushDoc(req.params.doc_id, error => {
if (error != null) {
return res.sendStatus(500)
} else {
@ -34,16 +34,16 @@ module.exports = MockTrackChangesApi = {
})
return app
.listen(3015, (error) => {
.listen(3015, error => {
if (error != null) {
throw error
}
})
.on('error', (error) => {
.on('error', error => {
console.error('error starting MockTrackChangesApi:', error.message)
return process.exit(1)
})
}
},
}
MockTrackChangesApi.run()

View file

@ -96,7 +96,7 @@ module.exports = MockWebApi = {
req.body.ranges,
req.body.lastUpdatedAt,
req.body.lastUpdatedBy,
(error) => {
error => {
if (error != null) {
return res.sendStatus(500)
} else {
@ -108,16 +108,16 @@ module.exports = MockWebApi = {
)
return app
.listen(3000, (error) => {
.listen(3000, error => {
if (error != null) {
throw error
}
})
.on('error', (error) => {
.on('error', error => {
console.error('error starting MockWebApi:', error.message)
return process.exit(1)
})
}
},
}
MockWebApi.run()

View file

@ -4,18 +4,18 @@ const rclient1 = redis.createClient({
cluster: [
{
port: '7000',
host: 'localhost'
}
]
host: 'localhost',
},
],
})
const rclient2 = redis.createClient({
cluster: [
{
port: '7000',
host: 'localhost'
}
]
host: 'localhost',
},
],
})
let counter = 0
@ -23,7 +23,7 @@ const sendPing = function (cb) {
if (cb == null) {
cb = function () {}
}
return rclient1.rpush('test-blpop', counter, (error) => {
return rclient1.rpush('test-blpop', counter, error => {
if (error != null) {
console.error('[SENDING ERROR]', error.message)
}
@ -35,7 +35,7 @@ const sendPing = function (cb) {
}
let previous = null
const listenForPing = (cb) =>
const listenForPing = cb =>
rclient2.blpop('test-blpop', 200, (error, result) => {
if (error != null) {
return cb(error)
@ -57,7 +57,7 @@ const listenForPing = (cb) =>
const PING_DELAY = 100
;(sendPings = () => sendPing(() => setTimeout(sendPings, PING_DELAY)))()
;(listenInBackground = () =>
listenForPing((error) => {
listenForPing(error => {
if (error) {
console.error('[RECEIVING ERROR]', error.message)
}

View file

@ -4,18 +4,18 @@ const rclient1 = redis.createClient({
cluster: [
{
port: '7000',
host: 'localhost'
}
]
host: 'localhost',
},
],
})
const rclient2 = redis.createClient({
cluster: [
{
port: '7000',
host: 'localhost'
}
]
host: 'localhost',
},
],
})
let counter = 0
@ -23,7 +23,7 @@ const sendPing = function (cb) {
if (cb == null) {
cb = function () {}
}
return rclient1.publish('test-pubsub', counter, (error) => {
return rclient1.publish('test-pubsub', counter, error => {
if (error) {
console.error('[SENDING ERROR]', error.message)
}

View file

@ -13,16 +13,16 @@ const stubs = {
log: sandbox.stub(),
warn: sandbox.stub(),
err: sandbox.stub(),
error: sandbox.stub()
}
error: sandbox.stub(),
},
}
// SandboxedModule configuration
SandboxedModule.configure({
requires: {
'logger-sharelatex': stubs.logger
'logger-sharelatex': stubs.logger,
},
globals: { Buffer, JSON, Math, console, process }
globals: { Buffer, JSON, Math, console, process },
})
// Mocha hooks
@ -33,5 +33,5 @@ exports.mochaHooks = {
afterEach() {
sandbox.reset()
}
},
}

View file

@ -31,7 +31,7 @@ const transform = function (op1, op2) {
if (op2.p < op1.p) {
return {
p: op1.p + op2.i.length,
i: op1.i
i: op1.i,
}
} else {
return op1
@ -61,7 +61,7 @@ class StressTestClient {
conflicts: 0,
local_updates: 0,
remote_updates: 0,
max_delay: 0
max_delay: 0,
}
DocUpdaterClient.subscribeToAppliedOps((channel, update) => {
@ -81,7 +81,7 @@ class StressTestClient {
this.content = insert(this.content, this.pos, data)
this.inflight_op = {
i: data,
p: this.pos++
p: this.pos++,
}
this.resendUpdate()
return (this.inflight_op_sent = Date.now())
@ -94,9 +94,9 @@ class StressTestClient {
op: [this.inflight_op],
v: this.version,
meta: {
source: this.client_id
source: this.client_id,
},
dupIfSource: [this.client_id]
dupIfSource: [this.client_id],
})
return (this.update_timer = setTimeout(() => {
console.log(
@ -277,7 +277,7 @@ const checkDocument = function (project_id, doc_id, clients, callback) {
if (callback == null) {
callback = function (error) {}
}
const jobs = clients.map((client) => (cb) => client.check(cb))
const jobs = clients.map(client => cb => client.check(cb))
return async.parallel(jobs, callback)
}
@ -304,7 +304,7 @@ const printSummary = function (doc_id, clients) {
local_updates: 0,
remote_updates: 0,
conflicts: 0,
max_delay: 0
max_delay: 0,
})
)
}
@ -326,7 +326,7 @@ for (const doc_and_project_id of Array.from(process.argv.slice(5))) {
[new Array(CLIENT_COUNT + 2).join('a')],
null,
null,
(error) => {
error => {
if (error != null) {
throw error
}
@ -360,22 +360,23 @@ for (const doc_and_project_id of Array.from(process.argv.slice(5))) {
content,
pos,
version,
updateDelay: UPDATE_DELAY
updateDelay: UPDATE_DELAY,
})
return clients.push(client)
})(pos)
}
return (runBatch = function () {
const jobs = clients.map((client) => (cb) =>
client.runForNUpdates(SAMPLE_INTERVAL / UPDATE_DELAY, cb)
const jobs = clients.map(
client => cb =>
client.runForNUpdates(SAMPLE_INTERVAL / UPDATE_DELAY, cb)
)
return async.parallel(jobs, (error) => {
return async.parallel(jobs, error => {
if (error != null) {
throw error
}
printSummary(doc_id, clients)
return checkDocument(project_id, doc_id, clients, (error) => {
return checkDocument(project_id, doc_id, clients, error => {
if (error != null) {
throw error
}

View file

@ -32,8 +32,8 @@ describe('DiffCodec', function () {
expect(ops).to.deep.equal([
{
i: 'beautiful ',
p: 6
}
p: 6,
},
])
return done()
}
@ -49,7 +49,7 @@ describe('DiffCodec', function () {
(error, ops) => {
expect(ops).to.deep.equal([
{ i: 'tall ', p: 4 },
{ i: 'red ', p: 29 }
{ i: 'red ', p: 29 },
])
return done()
}
@ -66,8 +66,8 @@ describe('DiffCodec', function () {
expect(ops).to.deep.equal([
{
d: 'beautiful ',
p: 6
}
p: 6,
},
])
return done()
}
@ -83,7 +83,7 @@ describe('DiffCodec', function () {
(error, ops) => {
expect(ops).to.deep.equal([
{ d: 'tall ', p: 4 },
{ d: 'red ', p: 24 }
{ d: 'red ', p: 24 },
])
return done()
}

View file

@ -23,10 +23,10 @@ describe('DispatchManager', function () {
this.DispatchManager = SandboxedModule.require(modulePath, {
requires: {
'./UpdateManager': (this.UpdateManager = {}),
'settings-sharelatex': (this.settings = {
'@overleaf/settings': (this.settings = {
redis: {
documentupdater: {}
}
documentupdater: {},
},
}),
'@overleaf/redis-wrapper': (this.redis = {}),
'./RateLimitManager': {},
@ -40,15 +40,15 @@ describe('DispatchManager', function () {
}
Timer.initClass()
return Timer
})())
})
}
})()),
}),
},
})
this.callback = sinon.stub()
return (this.RateLimiter = {
run(task, cb) {
return task(cb)
}
},
})
}) // run task without rate limit
@ -144,7 +144,7 @@ describe('DispatchManager', function () {
beforeEach(function (done) {
this.client = {
auth: sinon.stub(),
blpop: sinon.stub().callsArgWith(2)
blpop: sinon.stub().callsArgWith(2),
}
this.redis.createClient = sinon.stub().returns(this.client)
this.queueShardNumber = 7
@ -166,7 +166,7 @@ describe('DispatchManager', function () {
return describe('run', function () {
return it('should call _waitForUpdateThenDispatchWorker until shutting down', function (done) {
let callCount = 0
this.worker._waitForUpdateThenDispatchWorker = (callback) => {
this.worker._waitForUpdateThenDispatchWorker = callback => {
if (callback == null) {
callback = function (error) {}
}

View file

@ -29,7 +29,7 @@ describe('DocumentManager', function () {
'./PersistenceManager': (this.PersistenceManager = {}),
'./HistoryManager': (this.HistoryManager = {
flushDocChangesAsync: sinon.stub(),
flushProjectChangesAsync: sinon.stub()
flushProjectChangesAsync: sinon.stub(),
}),
'./Metrics': (this.Metrics = {
Timer: (Timer = (function () {
@ -40,14 +40,14 @@ describe('DocumentManager', function () {
}
Timer.initClass()
return Timer
})())
})()),
}),
'./RealTimeRedisManager': (this.RealTimeRedisManager = {}),
'./DiffCodec': (this.DiffCodec = {}),
'./UpdateManager': (this.UpdateManager = {}),
'./RangesManager': (this.RangesManager = {}),
'./Errors': Errors
}
'./Errors': Errors,
},
})
this.project_id = 'project-id-123'
this.projectHistoryId = 'history-id-123'
@ -123,7 +123,7 @@ describe('DocumentManager', function () {
this.project_id,
this.doc_id,
{},
(error) => {
error => {
error.should.exist
this.RedisManager.removeDocFromMemory.called.should.equal(false)
return done()
@ -137,7 +137,7 @@ describe('DocumentManager', function () {
this.project_id,
this.doc_id,
{ ignoreFlushErrors: true },
(error) => {
error => {
if (error != null) {
return done(error)
}
@ -484,7 +484,7 @@ describe('DocumentManager', function () {
this.afterLines = ['after', 'lines']
this.ops = [
{ i: 'foo', p: 4 },
{ d: 'bar', p: 42 }
{ d: 'bar', p: 42 },
]
this.DocumentManager.getDoc = sinon
.stub()
@ -543,8 +543,8 @@ describe('DocumentManager', function () {
meta: {
type: 'external',
source: this.source,
user_id: this.user_id
}
user_id: this.user_id,
},
})
.should.equal(true)
})
@ -636,7 +636,7 @@ describe('DocumentManager', function () {
// Copy ops so we don't interfere with other tests
this.ops = [
{ i: 'foo', p: 4 },
{ d: 'bar', p: 42 }
{ d: 'bar', p: 42 },
]
this.DiffCodec.diffAsShareJsOp = sinon
.stub()
@ -653,7 +653,7 @@ describe('DocumentManager', function () {
})
return it('should set the undo flag on each op', function () {
return Array.from(this.ops).map((op) => op.u.should.equal(true))
return Array.from(this.ops).map(op => op.u.should.equal(true))
})
})
})
@ -666,7 +666,7 @@ describe('DocumentManager', function () {
'mock-change-id-1',
'mock-change-id-2',
'mock-change-id-3',
'mock-change-id-4'
'mock-change-id-4',
]
this.version = 34
this.lines = ['original', 'lines']

View file

@ -21,23 +21,23 @@ describe('HistoryManager', function () {
this.HistoryManager = SandboxedModule.require(modulePath, {
requires: {
request: (this.request = {}),
'settings-sharelatex': (this.Settings = {
'@overleaf/settings': (this.Settings = {
apis: {
project_history: {
enabled: true,
url: 'http://project_history.example.com'
url: 'http://project_history.example.com',
},
trackchanges: {
url: 'http://trackchanges.example.com'
}
}
url: 'http://trackchanges.example.com',
},
},
}),
'./DocumentManager': (this.DocumentManager = {}),
'./HistoryRedisManager': (this.HistoryRedisManager = {}),
'./RedisManager': (this.RedisManager = {}),
'./ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}),
'./Metrics': (this.metrics = { inc: sinon.stub() })
}
'./Metrics': (this.metrics = { inc: sinon.stub() }),
},
})
this.project_id = 'mock-project-id'
this.doc_id = 'mock-doc-id'
@ -118,7 +118,7 @@ describe('HistoryManager', function () {
return this.request.post
.calledWith({
url: `${this.Settings.apis.project_history.url}/project/${this.project_id}/flush`,
qs: { background: true }
qs: { background: true },
})
.should.equal(true)
})
@ -131,7 +131,7 @@ describe('HistoryManager', function () {
.stub()
.callsArgWith(1, null, { statusCode: 204 })
return this.HistoryManager.flushProjectChanges(this.project_id, {
background: true
background: true,
})
})
@ -139,7 +139,7 @@ describe('HistoryManager', function () {
return this.request.post
.calledWith({
url: `${this.Settings.apis.project_history.url}/project/${this.project_id}/flush`,
qs: { background: true }
qs: { background: true },
})
.should.equal(true)
})
@ -149,7 +149,7 @@ describe('HistoryManager', function () {
beforeEach(function () {
this.request.post = sinon.stub()
return this.HistoryManager.flushProjectChanges(this.project_id, {
skip_history_flush: true
skip_history_flush: true,
})
})
@ -372,15 +372,15 @@ describe('HistoryManager', function () {
this.docs = [
{
doc: this.doc_id,
path: 'main.tex'
}
path: 'main.tex',
},
]
this.files = [
{
file: 'mock-file-id',
path: 'universe.png',
url: `www.filestore.test/${this.project_id}/mock-file-id`
}
url: `www.filestore.test/${this.project_id}/mock-file-id`,
},
]
this.ProjectHistoryRedisManager.queueResyncProjectStructure = sinon
.stub()

View file

@ -20,13 +20,13 @@ describe('HistoryRedisManager', function () {
beforeEach(function () {
this.rclient = {
auth() {},
exec: sinon.stub()
exec: sinon.stub(),
}
this.rclient.multi = () => this.rclient
this.HistoryRedisManager = SandboxedModule.require(modulePath, {
requires: {
'@overleaf/redis-wrapper': { createClient: () => this.rclient },
'settings-sharelatex': {
'@overleaf/settings': {
redis: {
history: (this.settings = {
key_schema: {
@ -35,12 +35,12 @@ describe('HistoryRedisManager', function () {
},
docsWithHistoryOps({ project_id }) {
return `DocsWithHistoryOps:${project_id}`
}
}
})
}
}
}
},
},
}),
},
},
},
})
this.doc_id = 'doc-id-123'
this.project_id = 'project-id-123'

View file

@ -9,14 +9,14 @@ describe('HttpController', function () {
requires: {
'./DocumentManager': (this.DocumentManager = {}),
'./HistoryManager': (this.HistoryManager = {
flushProjectChangesAsync: sinon.stub()
flushProjectChangesAsync: sinon.stub(),
}),
'./ProjectManager': (this.ProjectManager = {}),
'./ProjectFlusher': { flushAllProjects() {} },
'./DeleteQueueManager': (this.DeleteQueueManager = {}),
'./Metrics': (this.Metrics = {}),
'./Errors': Errors
}
'./Errors': Errors,
},
})
this.Metrics.Timer = class Timer {}
this.Metrics.Timer.prototype.done = sinon.stub()
@ -27,7 +27,7 @@ describe('HttpController', function () {
this.res = {
send: sinon.stub(),
sendStatus: sinon.stub(),
json: sinon.stub()
json: sinon.stub(),
}
})
@ -42,10 +42,10 @@ describe('HttpController', function () {
this.req = {
params: {
project_id: this.project_id,
doc_id: this.doc_id
doc_id: this.doc_id,
},
query: {},
body: {}
body: {},
}
})
@ -79,7 +79,7 @@ describe('HttpController', function () {
version: this.version,
ops: [],
ranges: this.ranges,
pathname: this.pathname
pathname: this.pathname,
})
.should.equal(true)
})
@ -129,7 +129,7 @@ describe('HttpController', function () {
version: this.version,
ops: this.ops,
ranges: this.ranges,
pathname: this.pathname
pathname: this.pathname,
})
.should.equal(true)
})
@ -186,15 +186,15 @@ describe('HttpController', function () {
headers: {},
params: {
project_id: this.project_id,
doc_id: this.doc_id
doc_id: this.doc_id,
},
query: {},
body: {
lines: this.lines,
source: this.source,
user_id: this.user_id,
undoing: (this.undoing = true)
}
undoing: (this.undoing = true),
},
}
})
@ -230,7 +230,7 @@ describe('HttpController', function () {
lines: this.lines,
source: this.source,
userId: this.user_id,
undoing: this.undoing
undoing: this.undoing,
},
'setting doc via http'
)
@ -280,10 +280,10 @@ describe('HttpController', function () {
beforeEach(function () {
this.req = {
params: {
project_id: this.project_id
project_id: this.project_id,
},
query: {},
body: {}
body: {},
}
})
@ -338,10 +338,10 @@ describe('HttpController', function () {
this.req = {
params: {
project_id: this.project_id,
doc_id: this.doc_id
doc_id: this.doc_id,
},
query: {},
body: {}
body: {},
}
})
@ -396,10 +396,10 @@ describe('HttpController', function () {
this.req = {
params: {
project_id: this.project_id,
doc_id: this.doc_id
doc_id: this.doc_id,
},
query: {},
body: {}
body: {},
}
})
@ -414,7 +414,7 @@ describe('HttpController', function () {
it('should flush and delete the doc', function () {
this.DocumentManager.flushAndDeleteDocWithLock
.calledWith(this.project_id, this.doc_id, {
ignoreFlushErrors: false
ignoreFlushErrors: false,
})
.should.equal(true)
})
@ -485,10 +485,10 @@ describe('HttpController', function () {
beforeEach(function () {
this.req = {
params: {
project_id: this.project_id
project_id: this.project_id,
},
query: {},
body: {}
body: {},
}
})
@ -560,10 +560,10 @@ describe('HttpController', function () {
params: {
project_id: this.project_id,
doc_id: this.doc_id,
change_id: (this.change_id = 'mock-change-od-1')
change_id: (this.change_id = 'mock-change-od-1'),
},
query: {},
body: {}
body: {},
}
})
@ -605,7 +605,7 @@ describe('HttpController', function () {
'mock-change-od-1',
'mock-change-od-2',
'mock-change-od-3',
'mock-change-od-4'
'mock-change-od-4',
]
this.req.body = { change_ids: this.change_ids }
this.DocumentManager.acceptChangesWithLock = sinon
@ -650,10 +650,10 @@ describe('HttpController', function () {
params: {
project_id: this.project_id,
doc_id: this.doc_id,
comment_id: (this.comment_id = 'mock-comment-id')
comment_id: (this.comment_id = 'mock-comment-id'),
},
query: {},
body: {}
body: {},
}
})
@ -681,7 +681,7 @@ describe('HttpController', function () {
{
projectId: this.project_id,
docId: this.doc_id,
commentId: this.comment_id
commentId: this.comment_id,
},
'deleting comment via http'
)
@ -712,16 +712,16 @@ describe('HttpController', function () {
this.state = '01234567890abcdef'
this.docs = [
{ _id: '1234', lines: 'hello', v: 23 },
{ _id: '4567', lines: 'world', v: 45 }
{ _id: '4567', lines: 'world', v: 45 },
]
this.req = {
params: {
project_id: this.project_id
project_id: this.project_id,
},
query: {
state: this.state
state: this.state,
},
body: {}
body: {},
}
})
@ -817,16 +817,16 @@ describe('HttpController', function () {
type: 'rename-doc',
id: 1,
pathname: 'thesis.tex',
newPathname: 'book.tex'
newPathname: 'book.tex',
},
{ type: 'add-doc', id: 2, pathname: 'article.tex', docLines: 'hello' },
{
type: 'rename-file',
id: 3,
pathname: 'apple.png',
newPathname: 'banana.png'
newPathname: 'banana.png',
},
{ type: 'add-file', id: 4, url: 'filestore.example.com/4' }
{ type: 'add-file', id: 4, url: 'filestore.example.com/4' },
]
this.version = 1234567
this.req = {
@ -835,11 +835,11 @@ describe('HttpController', function () {
projectHistoryId: this.projectHistoryId,
userId: this.userId,
updates: this.updates,
version: this.version
version: this.version,
},
params: {
project_id: this.project_id
}
project_id: this.project_id,
},
}
})
@ -895,11 +895,11 @@ describe('HttpController', function () {
body: {
projectHistoryId: this.projectHistoryId,
docs: this.docs,
files: this.files
files: this.files,
},
params: {
project_id: this.project_id
}
project_id: this.project_id,
},
}
})

View file

@ -29,9 +29,9 @@ describe('LockManager - checking the lock', function () {
createClient() {
return {
auth() {},
exists: existsStub
exists: existsStub,
}
}
},
},
'./Metrics': { inc() {} },
'./Profiler': (Profiler = (function () {
@ -43,7 +43,7 @@ describe('LockManager - checking the lock', function () {
}
Profiler.initClass()
return Profiler
})())
})()),
}
const LockManager = SandboxedModule.require(modulePath, { requires: mocks })

View file

@ -24,22 +24,22 @@ describe('LockManager - releasing the lock', function () {
let Profiler
this.client = {
auth() {},
eval: sinon.stub()
eval: sinon.stub(),
}
const mocks = {
'@overleaf/redis-wrapper': {
createClient: () => this.client
createClient: () => this.client,
},
'settings-sharelatex': {
'@overleaf/settings': {
redis: {
lock: {
key_schema: {
blockingKey({ doc_id }) {
return `Blocking:${doc_id}`
}
}
}
}
},
},
},
},
},
'./Metrics': { inc() {} },
'./Profiler': (Profiler = (function () {
@ -51,7 +51,7 @@ describe('LockManager - releasing the lock', function () {
}
Profiler.initClass()
return Profiler
})())
})()),
}
this.LockManager = SandboxedModule.require(modulePath, { requires: mocks })
this.lockValue = 'lock-value-stub'

View file

@ -26,7 +26,7 @@ describe('LockManager - getting the lock', function () {
'@overleaf/redis-wrapper': {
createClient: () => {
return { auth() {} }
}
},
},
'./Metrics': { inc() {} },
'./Profiler': (Profiler = (function () {
@ -38,8 +38,8 @@ describe('LockManager - getting the lock', function () {
}
Profiler.initClass()
return Profiler
})())
}
})()),
},
})
this.callback = sinon.stub()
return (this.doc_id = 'doc-id-123')

View file

@ -24,33 +24,37 @@ describe('LockManager - trying the lock', function () {
createClient: () => {
return {
auth() {},
set: (this.set = sinon.stub())
set: (this.set = sinon.stub()),
}
}
},
},
'./Metrics': { inc() {} },
'settings-sharelatex': {
'@overleaf/settings': {
redis: {
lock: {
key_schema: {
blockingKey({ doc_id }) {
return `Blocking:${doc_id}`
},
},
},
},
},
'./Profiler':
(this.Profiler = Profiler =
(function () {
Profiler = class Profiler {
static initClass() {
this.prototype.log = sinon
.stub()
.returns({ end: sinon.stub() })
this.prototype.end = sinon.stub()
}
}
}
}
},
'./Profiler': (this.Profiler = Profiler = (function () {
Profiler = class Profiler {
static initClass() {
this.prototype.log = sinon.stub().returns({ end: sinon.stub() })
this.prototype.end = sinon.stub()
}
}
Profiler.initClass()
return Profiler
})())
}
Profiler.initClass()
return Profiler
})()),
},
})
this.callback = sinon.stub()

View file

@ -23,7 +23,7 @@ describe('PersistenceManager', function () {
this.PersistenceManager = SandboxedModule.require(modulePath, {
requires: {
requestretry: this.request,
'settings-sharelatex': (this.Settings = {}),
'@overleaf/settings': (this.Settings = {}),
'./Metrics': (this.Metrics = {
Timer: (Timer = (function () {
Timer = class Timer {
@ -34,10 +34,10 @@ describe('PersistenceManager', function () {
Timer.initClass()
return Timer
})()),
inc: sinon.stub()
inc: sinon.stub(),
}),
'./Errors': Errors
}
'./Errors': Errors,
},
})
this.project_id = 'project-id-123'
this.projectHistoryId = 'history-id-123'
@ -53,8 +53,8 @@ describe('PersistenceManager', function () {
web: {
url: (this.url = 'www.example.com'),
user: (this.user = 'sharelatex'),
pass: (this.pass = 'password')
}
pass: (this.pass = 'password'),
},
})
})
@ -65,7 +65,7 @@ describe('PersistenceManager', function () {
version: this.version,
ranges: this.ranges,
pathname: this.pathname,
projectHistoryId: this.projectHistoryId
projectHistoryId: this.projectHistoryId,
})
})
@ -90,15 +90,15 @@ describe('PersistenceManager', function () {
url: `${this.url}/project/${this.project_id}/doc/${this.doc_id}`,
method: 'GET',
headers: {
accept: 'application/json'
accept: 'application/json',
},
auth: {
user: this.user,
pass: this.pass,
sendImmediately: true
sendImmediately: true,
},
jar: false,
timeout: 5000
timeout: 5000,
})
.should.equal(true)
})
@ -309,16 +309,16 @@ describe('PersistenceManager', function () {
version: this.version,
ranges: this.ranges,
lastUpdatedAt: this.lastUpdatedAt,
lastUpdatedBy: this.lastUpdatedBy
lastUpdatedBy: this.lastUpdatedBy,
},
method: 'POST',
auth: {
user: this.user,
pass: this.pass,
sendImmediately: true
sendImmediately: true,
},
jar: false,
timeout: 5000
timeout: 5000,
})
.should.equal(true)
})

View file

@ -28,7 +28,7 @@ describe('ProjectHistoryRedisManager', function () {
modulePath,
{
requires: {
'settings-sharelatex': (this.settings = {
'@overleaf/settings': (this.settings = {
redis: {
project_history: {
key_schema: {
@ -37,16 +37,16 @@ describe('ProjectHistoryRedisManager', function () {
},
projectHistoryFirstOpTimestamp({ project_id }) {
return `ProjectHistory:FirstOpTimestamp:${project_id}`
}
}
}
}
},
},
},
},
}),
'@overleaf/redis-wrapper': {
createClient: () => this.rclient
createClient: () => this.rclient,
},
'./Metrics': (this.metrics = { summary: sinon.stub() })
}
'./Metrics': (this.metrics = { summary: sinon.stub() }),
},
}
))
})
@ -97,7 +97,7 @@ describe('ProjectHistoryRedisManager', function () {
this.rawUpdate = {
pathname: (this.pathname = '/old'),
newPathname: (this.newPathname = '/new'),
version: (this.version = 2)
version: (this.version = 2),
}
this.ProjectHistoryRedisManager.queueOps = sinon.stub()
@ -118,11 +118,11 @@ describe('ProjectHistoryRedisManager', function () {
new_pathname: this.newPathname,
meta: {
user_id: this.user_id,
ts: new Date()
ts: new Date(),
},
version: this.version,
projectHistoryId: this.projectHistoryId,
file: this.file_id
file: this.file_id,
}
return this.ProjectHistoryRedisManager.queueOps
@ -144,7 +144,7 @@ describe('ProjectHistoryRedisManager', function () {
pathname: (this.pathname = '/old'),
docLines: (this.docLines = 'a\nb'),
version: (this.version = 2),
url: (this.url = 'filestore.example.com')
url: (this.url = 'filestore.example.com'),
}
this.ProjectHistoryRedisManager.queueOps = sinon.stub()
@ -166,11 +166,11 @@ describe('ProjectHistoryRedisManager', function () {
url: this.url,
meta: {
user_id: this.user_id,
ts: new Date()
ts: new Date(),
},
version: this.version,
projectHistoryId: this.projectHistoryId,
doc: this.doc_id
doc: this.doc_id,
}
return this.ProjectHistoryRedisManager.queueOps

View file

@ -25,7 +25,7 @@ describe('ProjectManager - flushAndDeleteProject', function () {
'./ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}),
'./DocumentManager': (this.DocumentManager = {}),
'./HistoryManager': (this.HistoryManager = {
flushProjectChanges: sinon.stub().callsArg(2)
flushProjectChanges: sinon.stub().callsArg(2),
}),
'./Metrics': (this.Metrics = {
Timer: (Timer = (function () {
@ -36,9 +36,9 @@ describe('ProjectManager - flushAndDeleteProject', function () {
}
Timer.initClass()
return Timer
})())
})
}
})()),
}),
},
})
this.project_id = 'project-id-123'
return (this.callback = sinon.stub())
@ -54,7 +54,7 @@ describe('ProjectManager - flushAndDeleteProject', function () {
return this.ProjectManager.flushAndDeleteProjectWithLocks(
this.project_id,
{},
(error) => {
error => {
this.callback(error)
return done()
}
@ -68,7 +68,7 @@ describe('ProjectManager - flushAndDeleteProject', function () {
})
it('should delete each doc in the project', function () {
return Array.from(this.doc_ids).map((doc_id) =>
return Array.from(this.doc_ids).map(doc_id =>
this.DocumentManager.flushAndDeleteDocWithLock
.calledWith(this.project_id, doc_id, {})
.should.equal(true)
@ -110,7 +110,7 @@ describe('ProjectManager - flushAndDeleteProject', function () {
return this.ProjectManager.flushAndDeleteProjectWithLocks(
this.project_id,
{},
(error) => {
error => {
this.callback(error)
return done()
}
@ -118,7 +118,7 @@ describe('ProjectManager - flushAndDeleteProject', function () {
})
it('should still flush each doc in the project', function () {
return Array.from(this.doc_ids).map((doc_id) =>
return Array.from(this.doc_ids).map(doc_id =>
this.DocumentManager.flushAndDeleteDocWithLock
.calledWith(this.project_id, doc_id, {})
.should.equal(true)

View file

@ -36,9 +36,9 @@ describe('ProjectManager - flushProject', function () {
}
Timer.initClass()
return Timer
})())
})
}
})()),
}),
},
})
this.project_id = 'project-id-123'
return (this.callback = sinon.stub())
@ -53,7 +53,7 @@ describe('ProjectManager - flushProject', function () {
this.DocumentManager.flushDocIfLoadedWithLock = sinon.stub().callsArg(2)
return this.ProjectManager.flushProjectWithLocks(
this.project_id,
(error) => {
error => {
this.callback(error)
return done()
}
@ -67,7 +67,7 @@ describe('ProjectManager - flushProject', function () {
})
it('should flush each doc in the project', function () {
return Array.from(this.doc_ids).map((doc_id) =>
return Array.from(this.doc_ids).map(doc_id =>
this.DocumentManager.flushDocIfLoadedWithLock
.calledWith(this.project_id, doc_id)
.should.equal(true)
@ -105,7 +105,7 @@ describe('ProjectManager - flushProject', function () {
)
return this.ProjectManager.flushProjectWithLocks(
this.project_id,
(error) => {
error => {
this.callback(error)
return done()
}
@ -113,7 +113,7 @@ describe('ProjectManager - flushProject', function () {
})
it('should still flush each doc in the project', function () {
return Array.from(this.doc_ids).map((doc_id) =>
return Array.from(this.doc_ids).map(doc_id =>
this.DocumentManager.flushDocIfLoadedWithLock
.calledWith(this.project_id, doc_id)
.should.equal(true)

View file

@ -33,10 +33,10 @@ describe('ProjectManager - getProjectDocsAndFlushIfOld', function () {
}
Timer.initClass()
return Timer
})())
})()),
}),
'./Errors': Errors
}
'./Errors': Errors,
},
})
this.project_id = 'project-id-123'
this.callback = sinon.stub()
@ -49,24 +49,24 @@ describe('ProjectManager - getProjectDocsAndFlushIfOld', function () {
this.doc_lines = [
['aaa', 'aaa'],
['bbb', 'bbb'],
['ccc', 'ccc']
['ccc', 'ccc'],
]
this.docs = [
{
_id: this.doc_ids[0],
lines: this.doc_lines[0],
v: this.doc_versions[0]
v: this.doc_versions[0],
},
{
_id: this.doc_ids[1],
lines: this.doc_lines[1],
v: this.doc_versions[1]
v: this.doc_versions[1],
},
{
_id: this.doc_ids[2],
lines: this.doc_lines[2],
v: this.doc_versions[2]
}
v: this.doc_versions[2],
},
]
this.RedisManager.checkOrSetProjectState = sinon
.stub()
@ -200,7 +200,7 @@ describe('ProjectManager - getProjectDocsAndFlushIfOld', function () {
return describe('clearing the project state with clearProjectState', function () {
beforeEach(function (done) {
this.RedisManager.clearProjectState = sinon.stub().callsArg(1)
return this.ProjectManager.clearProjectState(this.project_id, (error) => {
return this.ProjectManager.clearProjectState(this.project_id, error => {
this.callback(error)
return done()
})

View file

@ -8,17 +8,17 @@ describe('ProjectManager', function () {
this.RedisManager = {}
this.ProjectHistoryRedisManager = {
queueRenameEntity: sinon.stub().yields(),
queueAddEntity: sinon.stub().yields()
queueAddEntity: sinon.stub().yields(),
}
this.DocumentManager = {
renameDocWithLock: sinon.stub().yields()
renameDocWithLock: sinon.stub().yields(),
}
this.HistoryManager = {
flushProjectChangesAsync: sinon.stub(),
shouldFlushHistoryOps: sinon.stub().returns(false)
shouldFlushHistoryOps: sinon.stub().returns(false),
}
this.Metrics = {
Timer: class Timer {}
Timer: class Timer {},
}
this.Metrics.Timer.prototype.done = sinon.stub()
@ -28,8 +28,8 @@ describe('ProjectManager', function () {
'./ProjectHistoryRedisManager': this.ProjectHistoryRedisManager,
'./DocumentManager': this.DocumentManager,
'./HistoryManager': this.HistoryManager,
'./Metrics': this.Metrics
}
'./Metrics': this.Metrics,
},
})
this.project_id = 'project-id-123'
@ -46,24 +46,24 @@ describe('ProjectManager', function () {
type: 'rename-doc',
id: 1,
pathname: 'foo',
newPathname: 'foo'
newPathname: 'foo',
}
this.secondDocUpdate = {
type: 'rename-doc',
id: 2,
pathname: 'bar',
newPathname: 'bar2'
newPathname: 'bar2',
}
this.firstFileUpdate = {
type: 'rename-file',
id: 2,
pathname: 'bar',
newPathname: 'bar2'
newPathname: 'bar2',
}
this.updates = [
this.firstDocUpdate,
this.secondDocUpdate,
this.firstFileUpdate
this.firstFileUpdate,
]
})
@ -81,7 +81,7 @@ describe('ProjectManager', function () {
it('should rename the docs in the updates', function () {
const firstDocUpdateWithVersion = _.extend({}, this.firstDocUpdate, {
version: `${this.version}.0`
version: `${this.version}.0`,
})
const secondDocUpdateWithVersion = _.extend(
{},
@ -201,28 +201,28 @@ describe('ProjectManager', function () {
this.firstDocUpdate = {
type: 'add-doc',
id: 1,
docLines: 'a\nb'
docLines: 'a\nb',
}
this.secondDocUpdate = {
type: 'add-doc',
id: 2,
docLines: 'a\nb'
docLines: 'a\nb',
}
this.firstFileUpdate = {
type: 'add-file',
id: 3,
url: 'filestore.example.com/2'
url: 'filestore.example.com/2',
}
this.secondFileUpdate = {
type: 'add-file',
id: 4,
url: 'filestore.example.com/3'
url: 'filestore.example.com/3',
}
this.updates = [
this.firstDocUpdate,
this.secondDocUpdate,
this.firstFileUpdate,
this.secondFileUpdate
this.secondFileUpdate,
]
})
@ -240,7 +240,7 @@ describe('ProjectManager', function () {
it('should add the docs in the updates', function () {
const firstDocUpdateWithVersion = _.extend({}, this.firstDocUpdate, {
version: `${this.version}.0`
version: `${this.version}.0`,
})
const secondDocUpdateWithVersion = _.extend(
{},

View file

@ -32,39 +32,39 @@ describe('RangesManager', function () {
this.updates = [
{
meta: {
user_id: this.user_id
user_id: this.user_id,
},
op: [
{
i: 'two ',
p: 4
}
]
}
p: 4,
},
],
},
]
this.entries = {
comments: [
{
op: {
c: 'three ',
p: 4
p: 4,
},
metadata: {
user_id: this.user_id
}
}
user_id: this.user_id,
},
},
],
changes: [
{
op: {
i: 'five',
p: 15
p: 15,
},
metadata: {
user_id: this.user_id
}
}
]
user_id: this.user_id,
},
},
],
}
return (this.newDocLines = ['one two three four five'])
}) // old is "one three four five"
@ -90,11 +90,11 @@ describe('RangesManager', function () {
expect(ranges_were_collapsed).to.equal(false)
entries.comments[0].op.should.deep.equal({
c: 'three ',
p: 8
p: 8,
})
return entries.changes[0].op.should.deep.equal({
i: 'five',
p: 19
p: 19,
})
})
})
@ -149,16 +149,16 @@ describe('RangesManager', function () {
this.updates = [
{
meta: {
user_id: this.user_id
user_id: this.user_id,
},
op: [
{
c: 'one',
p: 0,
t: 'thread-id-1'
}
]
}
t: 'thread-id-1',
},
],
},
]
this.entries = {
comments: [
@ -166,24 +166,24 @@ describe('RangesManager', function () {
op: {
c: 'three ',
p: 4,
t: 'thread-id-2'
t: 'thread-id-2',
},
metadata: {
user_id: this.user_id
}
user_id: this.user_id,
},
},
{
op: {
c: 'four ',
p: 10,
t: 'thread-id-3'
t: 'thread-id-3',
},
metadata: {
user_id: this.user_id
}
}
user_id: this.user_id,
},
},
],
changes: []
changes: [],
}
return this.RangesManager.applyUpdate(
this.project_id,
@ -212,38 +212,38 @@ describe('RangesManager', function () {
{
meta: {
user_id: this.user_id,
tc: 'track-changes-id-yes'
tc: 'track-changes-id-yes',
},
op: [
{
i: 'one ',
p: 0
}
]
}
p: 0,
},
],
},
]
this.entries = {
changes: [
{
op: {
i: 'three',
p: 4
p: 4,
},
metadata: {
user_id: this.user_id
}
user_id: this.user_id,
},
},
{
op: {
i: 'four',
p: 10
p: 10,
},
metadata: {
user_id: this.user_id
}
}
user_id: this.user_id,
},
},
],
comments: []
comments: [],
}
this.newDocLines = ['one two three four']
return this.RangesManager.applyUpdate(
@ -272,15 +272,15 @@ describe('RangesManager', function () {
this.updates = [
{
meta: {
user_id: this.user_id
user_id: this.user_id,
},
op: [
{
c: "doesn't match",
p: 0
}
]
}
p: 0,
},
],
},
]
return this.RangesManager.applyUpdate(
this.project_id,
@ -308,16 +308,16 @@ describe('RangesManager', function () {
this.updates = [
{
meta: {
user_id: this.user_id
user_id: this.user_id,
},
op: [
{
d: 'one',
p: 0,
t: 'thread-id-1'
}
]
}
t: 'thread-id-1',
},
],
},
]
this.entries = {
comments: [
@ -325,14 +325,14 @@ describe('RangesManager', function () {
op: {
c: 'n',
p: 1,
t: 'thread-id-2'
t: 'thread-id-2',
},
metadata: {
user_id: this.user_id
}
}
user_id: this.user_id,
},
},
],
changes: []
changes: [],
}
return this.RangesManager.applyUpdate(
this.project_id,
@ -360,8 +360,8 @@ describe('RangesManager', function () {
requires: {
'./RangesTracker': (this.RangesTracker = SandboxedModule.require(
'../../../../app/js/RangesTracker.js'
))
}
)),
},
})
this.ranges = {
@ -371,38 +371,38 @@ describe('RangesManager', function () {
id: 'a1',
op: {
i: 'lorem',
p: 0
}
p: 0,
},
},
{
id: 'a2',
op: {
i: 'ipsum',
p: 10
}
p: 10,
},
},
{
id: 'a3',
op: {
i: 'dolor',
p: 20
}
p: 20,
},
},
{
id: 'a4',
op: {
i: 'sit',
p: 30
}
p: 30,
},
},
{
id: 'a5',
op: {
i: 'amet',
p: 40
}
}
]
p: 40,
},
},
],
}
return (this.removeChangeIdsSpy = sinon.spy(
this.RangesTracker.prototype,
@ -438,7 +438,7 @@ describe('RangesManager', function () {
it('should remove the change', function () {
return expect(
this.rangesResponse.changes.find(
(change) => change.id === this.ranges.changes[1].id
change => change.id === this.ranges.changes[1].id
)
).to.be.undefined
})
@ -450,10 +450,10 @@ describe('RangesManager', function () {
})
return it('should not touch other changes', function () {
return [0, 2, 3, 4].map((i) =>
return [0, 2, 3, 4].map(i =>
expect(
this.rangesResponse.changes.find(
(change) => change.id === this.ranges.changes[i].id
change => change.id === this.ranges.changes[i].id
)
).to.deep.equal(this.ranges.changes[i])
)
@ -465,7 +465,7 @@ describe('RangesManager', function () {
this.change_ids = [
this.ranges.changes[1].id,
this.ranges.changes[3].id,
this.ranges.changes[4].id
this.ranges.changes[4].id,
]
return this.RangesManager.acceptChanges(
this.change_ids,
@ -491,10 +491,10 @@ describe('RangesManager', function () {
it('should remove the changes', function () {
return [1, 3, 4].map(
(i) =>
i =>
expect(
this.rangesResponse.changes.find(
(change) => change.id === this.ranges.changes[1].id
change => change.id === this.ranges.changes[1].id
)
).to.be.undefined
)
@ -507,10 +507,10 @@ describe('RangesManager', function () {
})
return it('should not touch other changes', function () {
return [0, 2].map((i) =>
return [0, 2].map(i =>
expect(
this.rangesResponse.changes.find(
(change) => change.id === this.ranges.changes[i].id
change => change.id === this.ranges.changes[i].id
)
).to.deep.equal(this.ranges.changes[i])
)

Some files were not shown because too many files have changed in this diff Show more