Merge pull request #116 from overleaf/jpa-bulk-dependency-upgrades

[misc] bulk dependency upgrades
This commit is contained in:
Jakob Ackermann 2021-07-16 14:27:28 +02:00 committed by GitHub
commit 4fb29b4486
57 changed files with 2810 additions and 4458 deletions

View file

@ -3,9 +3,9 @@
// https://github.com/sharelatex/sharelatex-dev-environment
{
"extends": [
"eslint:recommended",
"standard",
"prettier",
"prettier/standard"
"prettier"
],
"parserOptions": {
"ecmaVersion": 2018
@ -20,6 +20,19 @@
"mocha": true
},
"rules": {
// TODO(das7pad): remove overrides after fixing all the violations manually (https://github.com/overleaf/issues/issues/3882#issuecomment-878999671)
// START of temporary overrides
"array-callback-return": "off",
"no-dupe-else-if": "off",
"no-var": "off",
"no-empty": "off",
"node/handle-callback-err": "off",
"no-loss-of-precision": "off",
"node/no-callback-literal": "off",
"node/no-path-concat": "off",
"prefer-regex-literals": "off",
// END of temporary overrides
// Swap the no-unused-expressions rule with a more chai-friendly one
"no-unused-expressions": 0,
"chai-friendly/no-unused-expressions": "error",

View file

@ -20,4 +20,4 @@ updates:
# future if we reorganise teams
labels:
- "dependencies"
- "Team-Magma"
- "type:maintenance"

View file

@ -1 +1 @@
12.21.0
12.22.3

View file

@ -2,6 +2,10 @@
# Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment
{
"arrowParens": "avoid",
"semi": false,
"singleQuote": true
"singleQuote": true,
"trailingComma": "es5",
"tabWidth": 2,
"useTabs": false
}

View file

@ -2,7 +2,7 @@
# Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment
FROM node:12.21.0 as base
FROM node:12.22.3 as base
WORKDIR /app

View file

@ -7,7 +7,7 @@
*/
const Metrics = require('@overleaf/metrics')
Metrics.initialize('track-changes')
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const logger = require('logger-sharelatex')
const TrackChangesLogger = logger.initialize('track-changes').logger
@ -16,7 +16,7 @@ if ((Settings.sentry != null ? Settings.sentry.dsn : undefined) != null) {
}
// log updates as truncated strings
const truncateFn = (updates) =>
const truncateFn = updates =>
JSON.parse(
JSON.stringify(updates, function (key, value) {
let len
@ -35,7 +35,7 @@ TrackChangesLogger.addSerializers({
rawUpdate: truncateFn,
rawUpdates: truncateFn,
newUpdates: truncateFn,
lastUpdate: truncateFn
lastUpdate: truncateFn,
})
const Path = require('path')
@ -91,7 +91,7 @@ app.post('/pack', function (req, res, next) {
[
req.query.limit || 1000,
req.query.delay || 1000,
req.query.timeout || 30 * 60 * 1000
req.query.timeout || 30 * 60 * 1000,
]
)
packWorker.on('exit', function (code, signal) {
@ -120,12 +120,12 @@ app.use(function (error, req, res, next) {
const port =
__guard__(
Settings.internal != null ? Settings.internal.trackchanges : undefined,
(x) => x.port
x => x.port
) || 3015
const host =
__guard__(
Settings.internal != null ? Settings.internal.trackchanges : undefined,
(x1) => x1.host
x1 => x1.host
) || 'localhost'
if (!module.parent) {
@ -146,7 +146,7 @@ if (!module.parent) {
}
})
})
.catch((err) => {
.catch(err => {
logger.fatal({ err }, 'Cannot connect to mongo. Exiting.')
process.exit(1)
})

View file

@ -162,13 +162,11 @@ module.exports = DiffGenerator = {
if (op.i != null) {
newDiff.push({
i: op.i,
meta
meta,
})
} else if (op.d != null) {
;({
consumedDiff,
remainingDiff
} = DiffGenerator._consumeDiffAffectedByDeleteOp(remainingDiff, op, meta))
;({ consumedDiff, remainingDiff } =
DiffGenerator._consumeDiffAffectedByDeleteOp(remainingDiff, op, meta))
newDiff.push(...Array.from(consumedDiff || []))
}
@ -211,7 +209,7 @@ module.exports = DiffGenerator = {
return {
consumedDiff,
remainingDiff
remainingDiff,
}
},
@ -220,18 +218,15 @@ module.exports = DiffGenerator = {
let remainingOp = deleteOp
while (remainingOp && remainingDiff.length > 0) {
let newPart
;({
newPart,
remainingDiff,
remainingOp
} = DiffGenerator._consumeDeletedPart(remainingDiff, remainingOp, meta))
;({ newPart, remainingDiff, remainingOp } =
DiffGenerator._consumeDeletedPart(remainingDiff, remainingOp, meta))
if (newPart != null) {
consumedDiff.push(newPart)
}
}
return {
consumedDiff,
remainingDiff
remainingDiff,
}
},
@ -262,7 +257,7 @@ module.exports = DiffGenerator = {
if (part.u != null) {
newPart = {
d: op.d,
meta
meta,
}
} else if (part.i != null) {
newPart = null
@ -282,7 +277,7 @@ module.exports = DiffGenerator = {
if (part.u != null) {
newPart = {
d: op.d,
meta
meta,
}
} else if (part.i != null) {
newPart = null
@ -303,7 +298,7 @@ module.exports = DiffGenerator = {
if (part.u) {
newPart = {
d: part.u,
meta
meta,
}
} else if (part.i != null) {
newPart = null
@ -311,14 +306,14 @@ module.exports = DiffGenerator = {
remainingOp = {
p: op.p,
d: op.d.slice(DiffGenerator._getLengthOfDiffPart(part))
d: op.d.slice(DiffGenerator._getLengthOfDiffPart(part)),
}
}
return {
newPart,
remainingDiff,
remainingOp
remainingOp,
}
},
@ -341,5 +336,5 @@ module.exports = DiffGenerator = {
_getContentOfPart(part) {
return part.u || part.d || part.i || ''
}
},
}

View file

@ -24,30 +24,30 @@ module.exports = DiffManager = {
if (callback == null) {
callback = function (error, content, version, updates) {}
}
return DocumentUpdaterManager.getDocument(project_id, doc_id, function (
error,
content,
version
) {
if (error != null) {
return callback(error)
}
if (fromVersion == null) {
// If we haven't been given a version, just return lastest doc and no updates
return callback(null, content, version, [])
}
return UpdatesManager.getDocUpdatesWithUserInfo(
project_id,
doc_id,
{ from: fromVersion },
function (error, updates) {
if (error != null) {
return callback(error)
}
return callback(null, content, version, updates)
return DocumentUpdaterManager.getDocument(
project_id,
doc_id,
function (error, content, version) {
if (error != null) {
return callback(error)
}
)
})
if (fromVersion == null) {
// If we haven't been given a version, just return lastest doc and no updates
return callback(null, content, version, [])
}
return UpdatesManager.getDocUpdatesWithUserInfo(
project_id,
doc_id,
{ from: fromVersion },
function (error, updates) {
if (error != null) {
return callback(error)
}
return callback(null, content, version, updates)
}
)
}
)
},
getDiff(project_id, doc_id, fromVersion, toVersion, callback) {
@ -167,7 +167,7 @@ module.exports = DiffManager = {
{
docVersion: version,
lastUpdateVersion: lastUpdate != null ? lastUpdate.v : undefined,
updateCount: updates.length
updateCount: updates.length,
},
'rewinding updates'
)
@ -184,5 +184,5 @@ module.exports = DiffManager = {
return callback(null, startingContent, tryUpdates)
}
)
}
},
}

View file

@ -14,7 +14,7 @@
let DocumentUpdaterManager
const request = require('request')
const logger = require('logger-sharelatex')
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
module.exports = DocumentUpdaterManager = {
getDocument(project_id, doc_id, callback) {
@ -65,8 +65,8 @@ module.exports = DocumentUpdaterManager = {
lines: content.split('\n'),
source: 'restore',
user_id,
undoing: true
}
undoing: true,
},
},
function (error, res, body) {
if (error != null) {
@ -86,5 +86,5 @@ module.exports = DocumentUpdaterManager = {
}
}
)
}
},
}

View file

@ -1,6 +1,5 @@
/* eslint-disable
camelcase,
standard/no-callback-literal,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
@ -13,7 +12,7 @@
const { ObjectId } = require('./mongodb')
const request = require('request')
const async = require('async')
const settings = require('settings-sharelatex')
const settings = require('@overleaf/settings')
const { port } = settings.internal.trackchanges
const logger = require('logger-sharelatex')
const LockManager = require('./LockManager')
@ -24,7 +23,7 @@ module.exports = {
const url = `http://localhost:${port}/project/${project_id}`
logger.log({ project_id }, 'running health check')
const jobs = [
(cb) =>
cb =>
request.get(
{ url: `http://localhost:${port}/check_lock`, timeout: 3000 },
function (err, res, body) {
@ -41,44 +40,42 @@ module.exports = {
}
}
),
(cb) =>
request.post({ url: `${url}/flush`, timeout: 10000 }, function (
err,
res,
body
) {
if (err != null) {
logger.err({ err, project_id }, 'error flushing for health check')
return cb(err)
} else if ((res != null ? res.statusCode : undefined) !== 204) {
return cb(`status code not 204, it's ${res.statusCode}`)
} else {
return cb()
cb =>
request.post(
{ url: `${url}/flush`, timeout: 10000 },
function (err, res, body) {
if (err != null) {
logger.err({ err, project_id }, 'error flushing for health check')
return cb(err)
} else if ((res != null ? res.statusCode : undefined) !== 204) {
return cb(`status code not 204, it's ${res.statusCode}`)
} else {
return cb()
}
}
}),
(cb) =>
request.get({ url: `${url}/updates`, timeout: 10000 }, function (
err,
res,
body
) {
if (err != null) {
logger.err(
{ err, project_id },
'error getting updates for health check'
)
return cb(err)
} else if ((res != null ? res.statusCode : undefined) !== 200) {
return cb(`status code not 200, it's ${res.statusCode}`)
} else {
return cb()
),
cb =>
request.get(
{ url: `${url}/updates`, timeout: 10000 },
function (err, res, body) {
if (err != null) {
logger.err(
{ err, project_id },
'error getting updates for health check'
)
return cb(err)
} else if ((res != null ? res.statusCode : undefined) !== 200) {
return cb(`status code not 200, it's ${res.statusCode}`)
} else {
return cb()
}
}
})
),
]
return async.series(jobs, callback)
},
checkLock(callback) {
return LockManager.healthCheck(callback)
}
},
}

View file

@ -160,15 +160,18 @@ module.exports = HttpController = {
}
logger.log({ project_id, doc_id, from, to }, 'getting diff')
return DiffManager.getDiff(project_id, doc_id, from, to, function (
error,
diff
) {
if (error != null) {
return next(error)
return DiffManager.getDiff(
project_id,
doc_id,
from,
to,
function (error, diff) {
if (error != null) {
return next(error)
}
return res.json({ diff })
}
return res.json({ diff })
})
)
},
getUpdates(req, res, next) {
@ -194,7 +197,7 @@ module.exports = HttpController = {
}
return res.json({
updates,
nextBeforeTimestamp
nextBeforeTimestamp,
})
}
)
@ -207,60 +210,59 @@ module.exports = HttpController = {
// Flush updates per pack onto the wire.
const { project_id } = req.params
logger.log({ project_id }, 'exporting project history')
UpdatesManager.exportProject(project_id, function (
err,
{ updates, userIds },
confirmWrite
) {
const abortStreaming = req.aborted || res.finished || res.destroyed
if (abortStreaming) {
// Tell the producer to stop emitting data
if (confirmWrite) confirmWrite(new Error('stop'))
return
}
const hasStartedStreamingResponse = res.headersSent
if (err) {
logger.error({ project_id, err }, 'export failed')
if (!hasStartedStreamingResponse) {
// Generate a nice 500
return next(err)
} else {
// Stop streaming
return res.destroy()
UpdatesManager.exportProject(
project_id,
function (err, { updates, userIds }, confirmWrite) {
const abortStreaming = req.aborted || res.finished || res.destroyed
if (abortStreaming) {
// Tell the producer to stop emitting data
if (confirmWrite) confirmWrite(new Error('stop'))
return
}
const hasStartedStreamingResponse = res.headersSent
if (err) {
logger.error({ project_id, err }, 'export failed')
if (!hasStartedStreamingResponse) {
// Generate a nice 500
return next(err)
} else {
// Stop streaming
return res.destroy()
}
}
// Compose the response incrementally
const isFirstWrite = !hasStartedStreamingResponse
const isLastWrite = updates.length === 0
if (isFirstWrite) {
// The first write will emit the 200 status, headers and start of the
// response payload (open array)
res.setHeader('Content-Type', 'application/json')
res.setHeader('Trailer', 'X-User-Ids')
res.writeHead(200)
res.write('[')
}
if (!isFirstWrite && !isLastWrite) {
// Starting from the 2nd non-empty write, emit a continuing comma.
// write 1: [updates1
// write 2: ,updates2
// write 3: ,updates3
// write N: ]
res.write(',')
}
// Every write will emit a blob onto the response stream:
// '[update1,update2,...]'
// ^^^^^^^^^^^^^^^^^^^
res.write(JSON.stringify(updates).slice(1, -1), confirmWrite)
if (isLastWrite) {
// The last write will have no updates and will finish the response
// payload (close array) and emit the userIds as trailer.
res.addTrailers({ 'X-User-Ids': JSON.stringify(userIds) })
res.end(']')
}
}
// Compose the response incrementally
const isFirstWrite = !hasStartedStreamingResponse
const isLastWrite = updates.length === 0
if (isFirstWrite) {
// The first write will emit the 200 status, headers and start of the
// response payload (open array)
res.setHeader('Content-Type', 'application/json')
res.setHeader('Trailer', 'X-User-Ids')
res.writeHead(200)
res.write('[')
}
if (!isFirstWrite && !isLastWrite) {
// Starting from the 2nd non-empty write, emit a continuing comma.
// write 1: [updates1
// write 2: ,updates2
// write 3: ,updates3
// write N: ]
res.write(',')
}
// Every write will emit a blob onto the response stream:
// '[update1,update2,...]'
// ^^^^^^^^^^^^^^^^^^^
res.write(JSON.stringify(updates).slice(1, -1), confirmWrite)
if (isLastWrite) {
// The last write will have no updates and will finish the response
// payload (close array) and emit the userIds as trailer.
res.addTrailers({ 'X-User-Ids': JSON.stringify(userIds) })
res.end(']')
}
})
)
},
restore(req, res, next) {
@ -334,5 +336,5 @@ module.exports = HttpController = {
return res.sendStatus(200)
}
})
}
},
}

View file

@ -10,7 +10,7 @@
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let LockManager
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const redis = require('@overleaf/redis-wrapper')
const rclient = redis.createClient(Settings.redis.lock)
const os = require('os')
@ -43,19 +43,23 @@ module.exports = LockManager = {
callback = function (err, gotLock) {}
}
const lockValue = LockManager.randomLock()
return rclient.set(key, lockValue, 'EX', this.LOCK_TTL, 'NX', function (
err,
gotLock
) {
if (err != null) {
return callback(err)
return rclient.set(
key,
lockValue,
'EX',
this.LOCK_TTL,
'NX',
function (err, gotLock) {
if (err != null) {
return callback(err)
}
if (gotLock === 'OK') {
return callback(err, true, lockValue)
} else {
return callback(err, false)
}
}
if (gotLock === 'OK') {
return callback(err, true, lockValue)
} else {
return callback(err, false)
}
})
)
},
getLock(key, callback) {
@ -102,23 +106,26 @@ module.exports = LockManager = {
},
releaseLock(key, lockValue, callback) {
return rclient.eval(LockManager.unlockScript, 1, key, lockValue, function (
err,
result
) {
if (err != null) {
return callback(err)
return rclient.eval(
LockManager.unlockScript,
1,
key,
lockValue,
function (err, result) {
if (err != null) {
return callback(err)
}
if (result != null && result !== 1) {
// successful unlock should release exactly one key
logger.error(
{ key, lockValue, redis_err: err, redis_result: result },
'unlocking error'
)
return callback(new Error('tried to release timed out lock'))
}
return callback(err, result)
}
if (result != null && result !== 1) {
// successful unlock should release exactly one key
logger.error(
{ key, lockValue, redis_err: err, redis_result: result },
'unlocking error'
)
return callback(new Error('tried to release timed out lock'))
}
return callback(err, result)
})
)
},
runWithLock(key, runner, callback) {
@ -129,7 +136,7 @@ module.exports = LockManager = {
if (error != null) {
return callback(error)
}
return runner((error1) =>
return runner(error1 =>
LockManager.releaseLock(key, lockValue, function (error2) {
error = error1 || error2
if (error != null) {
@ -142,7 +149,7 @@ module.exports = LockManager = {
},
healthCheck(callback) {
const action = (releaseLock) => releaseLock()
const action = releaseLock => releaseLock()
return LockManager.runWithLock(
`HistoryLock:HealthCheck:host=${HOST}:pid=${PID}:random=${RND}`,
action,
@ -153,5 +160,5 @@ module.exports = LockManager = {
close(callback) {
rclient.quit()
return rclient.once('end', callback)
}
},
}

View file

@ -14,7 +14,7 @@
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let MongoAWS
const settings = require('settings-sharelatex')
const settings = require('@overleaf/settings')
const logger = require('logger-sharelatex')
const AWS = require('aws-sdk')
const S3S = require('s3-streams')
@ -31,12 +31,12 @@ const createStream = function (streamConstructor, project_id, doc_id, pack_id) {
accessKeyId: settings.trackchanges.s3.key,
secretAccessKey: settings.trackchanges.s3.secret,
endpoint: settings.trackchanges.s3.endpoint,
s3ForcePathStyle: settings.trackchanges.s3.pathStyle
s3ForcePathStyle: settings.trackchanges.s3.pathStyle,
}
return streamConstructor(new AWS.S3(AWS_CONFIG), {
Bucket: settings.trackchanges.stores.doc_history,
Key: project_id + '/changes-' + doc_id + '/pack-' + pack_id
Key: project_id + '/changes-' + doc_id + '/pack-' + pack_id,
})
}
@ -52,7 +52,7 @@ module.exports = MongoAWS = {
const query = {
_id: ObjectId(pack_id),
doc_id: ObjectId(doc_id)
doc_id: ObjectId(doc_id),
}
if (project_id == null) {
@ -92,14 +92,14 @@ module.exports = MongoAWS = {
doc_id,
pack_id,
origSize: uncompressedData.length,
newSize: buf.length
newSize: buf.length,
},
'compressed pack'
)
if (err != null) {
return callback(err)
}
upload.on('error', (err) => callback(err))
upload.on('error', err => callback(err))
upload.on('finish', function () {
Metrics.inc('archive-pack')
logger.log({ project_id, doc_id, pack_id }, 'upload to s3 completed')
@ -135,8 +135,8 @@ module.exports = MongoAWS = {
const download = createStream(S3S.ReadStream, project_id, doc_id, pack_id)
const inputStream = download
.on('open', (obj) => 1)
.on('error', (err) => callback(err))
.on('open', obj => 1)
.on('error', err => callback(err))
const gunzip = zlib.createGunzip()
gunzip.setEncoding('utf8')
@ -150,7 +150,7 @@ module.exports = MongoAWS = {
const outputStream = inputStream.pipe(gunzip)
const parts = []
outputStream.on('error', (err) => callback(err))
outputStream.on('error', err => callback(err))
outputStream.on('end', function () {
let object
logger.log({ project_id, doc_id, pack_id }, 'download from s3 completed')
@ -169,29 +169,31 @@ module.exports = MongoAWS = {
}
return callback(null, object)
})
return outputStream.on('data', (data) => parts.push(data))
return outputStream.on('data', data => parts.push(data))
},
unArchivePack(project_id, doc_id, pack_id, callback) {
if (callback == null) {
callback = function (error) {}
}
return MongoAWS.readArchivedPack(project_id, doc_id, pack_id, function (
err,
object
) {
if (err != null) {
return callback(err)
return MongoAWS.readArchivedPack(
project_id,
doc_id,
pack_id,
function (err, object) {
if (err != null) {
return callback(err)
}
Metrics.inc('unarchive-pack')
// allow the object to expire, we can always retrieve it again
object.expiresAt = new Date(Date.now() + 7 * DAYS)
logger.log({ project_id, doc_id, pack_id }, 'inserting object from s3')
return db.docHistory.insertOne(object, (err, confirmation) => {
if (err) return callback(err)
object._id = confirmation.insertedId
callback(null, object)
})
}
Metrics.inc('unarchive-pack')
// allow the object to expire, we can always retrieve it again
object.expiresAt = new Date(Date.now() + 7 * DAYS)
logger.log({ project_id, doc_id, pack_id }, 'inserting object from s3')
return db.docHistory.insertOne(object, (err, confirmation) => {
if (err) return callback(err)
object._id = confirmation.insertedId
callback(null, object)
})
})
}
)
},
}

View file

@ -50,50 +50,53 @@ module.exports = MongoManager = {
if (callback == null) {
callback = function (error, update, version) {}
}
return MongoManager.getLastCompressedUpdate(doc_id, function (
error,
update
) {
if (error != null) {
return callback(error)
}
if (update != null) {
if (update.broken) {
// marked as broken so we will force a new op
return callback(null, null)
} else if (update.pack != null) {
if (update.finalised) {
// no more ops can be appended
return callback(
null,
null,
update.pack[0] != null ? update.pack[0].v : undefined
)
return MongoManager.getLastCompressedUpdate(
doc_id,
function (error, update) {
if (error != null) {
return callback(error)
}
if (update != null) {
if (update.broken) {
// marked as broken so we will force a new op
return callback(null, null)
} else if (update.pack != null) {
if (update.finalised) {
// no more ops can be appended
return callback(
null,
null,
update.pack[0] != null ? update.pack[0].v : undefined
)
} else {
return callback(
null,
update,
update.pack[0] != null ? update.pack[0].v : undefined
)
}
} else {
return callback(
null,
update,
update.pack[0] != null ? update.pack[0].v : undefined
)
return callback(null, update, update.v)
}
} else {
return callback(null, update, update.v)
return PackManager.getLastPackFromIndex(
doc_id,
function (error, pack) {
if (error != null) {
return callback(error)
}
if (
(pack != null ? pack.inS3 : undefined) != null &&
(pack != null ? pack.v_end : undefined) != null
) {
return callback(null, null, pack.v_end)
}
return callback(null, null)
}
)
}
} else {
return PackManager.getLastPackFromIndex(doc_id, function (error, pack) {
if (error != null) {
return callback(error)
}
if (
(pack != null ? pack.inS3 : undefined) != null &&
(pack != null ? pack.v_end : undefined) != null
) {
return callback(null, null, pack.v_end)
}
return callback(null, null)
})
}
})
)
},
backportProjectId(project_id, doc_id, callback) {
@ -103,10 +106,10 @@ module.exports = MongoManager = {
return db.docHistory.updateMany(
{
doc_id: ObjectId(doc_id.toString()),
project_id: { $exists: false }
project_id: { $exists: false },
},
{
$set: { project_id: ObjectId(project_id.toString()) }
$set: { project_id: ObjectId(project_id.toString()) },
},
callback
)
@ -118,7 +121,7 @@ module.exports = MongoManager = {
}
return db.projectHistoryMetaData.findOne(
{
project_id: ObjectId(project_id.toString())
project_id: ObjectId(project_id.toString()),
},
callback
)
@ -130,13 +133,13 @@ module.exports = MongoManager = {
}
return db.projectHistoryMetaData.updateOne(
{
project_id: ObjectId(project_id)
project_id: ObjectId(project_id),
},
{
$set: metadata
$set: metadata,
},
{
upsert: true
upsert: true,
},
callback
)
@ -151,11 +154,11 @@ module.exports = MongoManager = {
{
project_id: ObjectId(project_id),
temporary: true,
expiresAt: { $exists: true }
expiresAt: { $exists: true },
},
{
$set: { temporary: false },
$unset: { expiresAt: '' }
$unset: { expiresAt: '' },
},
callback
)
@ -191,12 +194,9 @@ module.exports = MongoManager = {
{ project_id: 1 },
{ background: true }
)
}
},
}
;[
'getLastCompressedUpdate',
'getProjectMetaData',
'setProjectMetaData'
].map((method) =>
metrics.timeAsyncMethod(MongoManager, method, 'mongo.MongoManager', logger)
;['getLastCompressedUpdate', 'getProjectMetaData', 'setProjectMetaData'].map(
method =>
metrics.timeAsyncMethod(MongoManager, method, 'mongo.MongoManager', logger)
)

View file

@ -24,7 +24,7 @@ const LockManager = require('./LockManager')
const MongoAWS = require('./MongoAWS')
const Metrics = require('@overleaf/metrics')
const ProjectIterator = require('./ProjectIterator')
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const keys = Settings.redis.lock.key_schema
// Sharejs operations are stored in a 'pack' object
@ -206,11 +206,11 @@ module.exports = PackManager = {
sz,
meta: {
start_ts: first.meta.start_ts,
end_ts: last.meta.end_ts
end_ts: last.meta.end_ts,
},
v: first.v,
v_end: last.v,
temporary
temporary,
}
if (temporary) {
newPack.expiresAt = new Date(Date.now() + 7 * DAYS)
@ -252,20 +252,20 @@ module.exports = PackManager = {
_id: lastUpdate._id,
project_id: ObjectId(project_id.toString()),
doc_id: ObjectId(doc_id.toString()),
pack: { $exists: true }
pack: { $exists: true },
}
const update = {
$push: {
pack: { $each: newUpdates }
pack: { $each: newUpdates },
},
$inc: {
n: n,
sz: sz
sz: sz,
},
$set: {
'meta.end_ts': last.meta.end_ts,
v_end: last.v
}
v_end: last.v,
},
}
if (lastUpdate.expiresAt && temporary) {
update.$set.expiresAt = new Date(Date.now() + 7 * DAYS)
@ -396,7 +396,7 @@ module.exports = PackManager = {
}
return result1
})()
const loadedPackIds = Array.from(loadedPacks).map((pack) =>
const loadedPackIds = Array.from(loadedPacks).map(pack =>
pack._id.toString()
)
const packIdsToFetch = _.difference(allPackIds, loadedPackIds)
@ -494,7 +494,7 @@ module.exports = PackManager = {
return db.docHistory.updateOne(
{ _id: pack._id },
{ $set: { expiresAt: new Date(Date.now() + 7 * DAYS) } },
(err) => callback(err, pack)
err => callback(err, pack)
)
} else {
return callback(null, pack)
@ -552,60 +552,62 @@ module.exports = PackManager = {
},
initialiseIndex(project_id, doc_id, callback) {
return PackManager.findCompletedPacks(project_id, doc_id, function (
err,
packs
) {
// console.log 'err', err, 'packs', packs, packs?.length
if (err != null) {
return callback(err)
return PackManager.findCompletedPacks(
project_id,
doc_id,
function (err, packs) {
// console.log 'err', err, 'packs', packs, packs?.length
if (err != null) {
return callback(err)
}
if (packs == null) {
return callback()
}
return PackManager.insertPacksIntoIndexWithLock(
project_id,
doc_id,
packs,
callback
)
}
if (packs == null) {
return callback()
}
return PackManager.insertPacksIntoIndexWithLock(
project_id,
doc_id,
packs,
callback
)
})
)
},
updateIndex(project_id, doc_id, callback) {
// find all packs prior to current pack
return PackManager.findUnindexedPacks(project_id, doc_id, function (
err,
newPacks
) {
if (err != null) {
return callback(err)
}
if (newPacks == null || newPacks.length === 0) {
return callback()
}
return PackManager.insertPacksIntoIndexWithLock(
project_id,
doc_id,
newPacks,
function (err) {
if (err != null) {
return callback(err)
}
logger.log(
{ project_id, doc_id, newPacks },
'added new packs to index'
)
return PackManager.findUnindexedPacks(
project_id,
doc_id,
function (err, newPacks) {
if (err != null) {
return callback(err)
}
if (newPacks == null || newPacks.length === 0) {
return callback()
}
)
})
return PackManager.insertPacksIntoIndexWithLock(
project_id,
doc_id,
newPacks,
function (err) {
if (err != null) {
return callback(err)
}
logger.log(
{ project_id, doc_id, newPacks },
'added new packs to index'
)
return callback()
}
)
}
)
},
findCompletedPacks(project_id, doc_id, callback) {
const query = {
doc_id: ObjectId(doc_id.toString()),
expiresAt: { $exists: false }
expiresAt: { $exists: false },
}
return db.docHistory
.find(query, { projection: { pack: false } })
@ -631,7 +633,7 @@ module.exports = PackManager = {
findPacks(project_id, doc_id, callback) {
const query = {
doc_id: ObjectId(doc_id.toString()),
expiresAt: { $exists: false }
expiresAt: { $exists: false },
}
return db.docHistory
.find(query, { projection: { pack: false } })
@ -655,61 +657,63 @@ module.exports = PackManager = {
if (err != null) {
return callback(err)
}
return PackManager.findCompletedPacks(project_id, doc_id, function (
err,
historyPacks
) {
let pack
if (err != null) {
return callback(err)
}
if (historyPacks == null) {
return callback()
}
// select only the new packs not already in the index
let newPacks = (() => {
const result = []
for (pack of Array.from(historyPacks)) {
if (
(indexResult != null ? indexResult[pack._id] : undefined) == null
) {
result.push(pack)
}
return PackManager.findCompletedPacks(
project_id,
doc_id,
function (err, historyPacks) {
let pack
if (err != null) {
return callback(err)
}
return result
})()
newPacks = (() => {
const result1 = []
for (pack of Array.from(newPacks)) {
result1.push(
_.omit(
pack,
'doc_id',
'project_id',
'n',
'sz',
'last_checked',
'finalised'
if (historyPacks == null) {
return callback()
}
// select only the new packs not already in the index
let newPacks = (() => {
const result = []
for (pack of Array.from(historyPacks)) {
if (
(indexResult != null ? indexResult[pack._id] : undefined) ==
null
) {
result.push(pack)
}
}
return result
})()
newPacks = (() => {
const result1 = []
for (pack of Array.from(newPacks)) {
result1.push(
_.omit(
pack,
'doc_id',
'project_id',
'n',
'sz',
'last_checked',
'finalised'
)
)
}
return result1
})()
if (newPacks.length) {
logger.log(
{ project_id, doc_id, n: newPacks.length },
'found new packs'
)
}
return result1
})()
if (newPacks.length) {
logger.log(
{ project_id, doc_id, n: newPacks.length },
'found new packs'
)
return callback(null, newPacks)
}
return callback(null, newPacks)
})
)
})
},
insertPacksIntoIndexWithLock(project_id, doc_id, newPacks, callback) {
return LockManager.runWithLock(
keys.historyIndexLock({ doc_id }),
(releaseLock) =>
releaseLock =>
PackManager._insertPacksIntoIndex(
project_id,
doc_id,
@ -726,11 +730,11 @@ module.exports = PackManager = {
{
$setOnInsert: { project_id: ObjectId(project_id.toString()) },
$push: {
packs: { $each: newPacks, $sort: { v: 1 } }
}
packs: { $each: newPacks, $sort: { v: 1 } },
},
},
{
upsert: true
upsert: true,
},
callback
)
@ -759,36 +763,36 @@ module.exports = PackManager = {
}
return async.series(
[
(cb) =>
cb =>
PackManager.checkArchiveNotInProgress(
project_id,
doc_id,
pack_id,
cb
),
(cb) =>
cb =>
PackManager.markPackAsArchiveInProgress(
project_id,
doc_id,
pack_id,
cb
),
(cb) =>
MongoAWS.archivePack(project_id, doc_id, pack_id, (err) =>
cb =>
MongoAWS.archivePack(project_id, doc_id, pack_id, err =>
clearFlagOnError(err, cb)
),
(cb) =>
PackManager.checkArchivedPack(project_id, doc_id, pack_id, (err) =>
cb =>
PackManager.checkArchivedPack(project_id, doc_id, pack_id, err =>
clearFlagOnError(err, cb)
),
(cb) => PackManager.markPackAsArchived(project_id, doc_id, pack_id, cb),
(cb) =>
cb => PackManager.markPackAsArchived(project_id, doc_id, pack_id, cb),
cb =>
PackManager.setTTLOnArchivedPack(
project_id,
doc_id,
pack_id,
callback
)
),
],
callback
)
@ -802,40 +806,42 @@ module.exports = PackManager = {
if (pack == null) {
return callback(new Error('pack not found'))
}
return MongoAWS.readArchivedPack(project_id, doc_id, pack_id, function (
err,
result
) {
delete result.last_checked
delete pack.last_checked
// need to compare ids as ObjectIds with .equals()
for (const key of ['_id', 'project_id', 'doc_id']) {
if (result[key].equals(pack[key])) {
result[key] = pack[key]
return MongoAWS.readArchivedPack(
project_id,
doc_id,
pack_id,
function (err, result) {
delete result.last_checked
delete pack.last_checked
// need to compare ids as ObjectIds with .equals()
for (const key of ['_id', 'project_id', 'doc_id']) {
if (result[key].equals(pack[key])) {
result[key] = pack[key]
}
}
for (let i = 0; i < result.pack.length; i++) {
const op = result.pack[i]
if (op._id != null && op._id.equals(pack.pack[i]._id)) {
op._id = pack.pack[i]._id
}
}
if (_.isEqual(pack, result)) {
return callback()
} else {
logger.err(
{
pack,
result,
jsondiff: JSON.stringify(pack) === JSON.stringify(result),
},
'difference when comparing packs'
)
return callback(
new Error('pack retrieved from s3 does not match pack in mongo')
)
}
}
for (let i = 0; i < result.pack.length; i++) {
const op = result.pack[i]
if (op._id != null && op._id.equals(pack.pack[i]._id)) {
op._id = pack.pack[i]._id
}
}
if (_.isEqual(pack, result)) {
return callback()
} else {
logger.err(
{
pack,
result,
jsondiff: JSON.stringify(pack) === JSON.stringify(result)
},
'difference when comparing packs'
)
return callback(
new Error('pack retrieved from s3 does not match pack in mongo')
)
}
})
)
})
},
// Extra methods to test archive/unarchive for a doc_id
@ -870,15 +876,18 @@ module.exports = PackManager = {
// Processing old packs via worker
processOldPack(project_id, doc_id, pack_id, callback) {
const markAsChecked = (err) =>
PackManager.markPackAsChecked(project_id, doc_id, pack_id, function (
err2
) {
if (err2 != null) {
return callback(err2)
const markAsChecked = err =>
PackManager.markPackAsChecked(
project_id,
doc_id,
pack_id,
function (err2) {
if (err2 != null) {
return callback(err2)
}
return callback(err)
}
return callback(err)
})
)
logger.log({ project_id, doc_id }, 'processing old packs')
return db.docHistory.findOne({ _id: pack_id }, function (err, pack) {
if (err != null) {
@ -899,42 +908,47 @@ module.exports = PackManager = {
if (err != null) {
return markAsChecked(err)
}
return PackManager.updateIndexIfNeeded(project_id, doc_id, function (
err
) {
if (err != null) {
return markAsChecked(err)
}
return PackManager.findUnarchivedPacks(
project_id,
doc_id,
function (err, unarchivedPacks) {
if (err != null) {
return markAsChecked(err)
}
if (
!(unarchivedPacks != null
? unarchivedPacks.length
: undefined)
) {
logger.log({ project_id, doc_id }, 'no packs need archiving')
return markAsChecked()
}
return async.eachSeries(
unarchivedPacks,
(pack, cb) =>
PackManager.archivePack(project_id, doc_id, pack._id, cb),
function (err) {
if (err != null) {
return markAsChecked(err)
}
logger.log({ project_id, doc_id }, 'done processing')
return PackManager.updateIndexIfNeeded(
project_id,
doc_id,
function (err) {
if (err != null) {
return markAsChecked(err)
}
return PackManager.findUnarchivedPacks(
project_id,
doc_id,
function (err, unarchivedPacks) {
if (err != null) {
return markAsChecked(err)
}
if (
!(unarchivedPacks != null
? unarchivedPacks.length
: undefined)
) {
logger.log(
{ project_id, doc_id },
'no packs need archiving'
)
return markAsChecked()
}
)
}
)
})
return async.eachSeries(
unarchivedPacks,
(pack, cb) =>
PackManager.archivePack(project_id, doc_id, pack._id, cb),
function (err) {
if (err != null) {
return markAsChecked(err)
}
logger.log({ project_id, doc_id }, 'done processing')
return markAsChecked()
}
)
}
)
}
)
}
)
})
@ -974,7 +988,7 @@ module.exports = PackManager = {
markPackAsFinalisedWithLock(project_id, doc_id, pack_id, callback) {
return LockManager.runWithLock(
keys.historyLock({ doc_id }),
(releaseLock) =>
releaseLock =>
PackManager._markPackAsFinalised(
project_id,
doc_id,
@ -1050,24 +1064,25 @@ module.exports = PackManager = {
{ project_id, doc_id, pack_id },
'checking if archive in progress'
)
return PackManager.getPackFromIndex(doc_id, pack_id, function (
err,
result
) {
if (err != null) {
return callback(err)
return PackManager.getPackFromIndex(
doc_id,
pack_id,
function (err, result) {
if (err != null) {
return callback(err)
}
if (result == null) {
return callback(new Error('pack not found in index'))
}
if (result.inS3) {
return callback(new Error('pack archiving already done'))
} else if (result.inS3 != null) {
return callback(new Error('pack archiving already in progress'))
} else {
return callback()
}
}
if (result == null) {
return callback(new Error('pack not found in index'))
}
if (result.inS3) {
return callback(new Error('pack archiving already done'))
} else if (result.inS3 != null) {
return callback(new Error('pack archiving already in progress'))
} else {
return callback()
}
})
)
},
markPackAsArchiveInProgress(project_id, doc_id, pack_id, callback) {
@ -1078,7 +1093,7 @@ module.exports = PackManager = {
return db.docHistoryIndex.findOneAndUpdate(
{
_id: ObjectId(doc_id.toString()),
packs: { $elemMatch: { _id: pack_id, inS3: { $exists: false } } }
packs: { $elemMatch: { _id: pack_id, inS3: { $exists: false } } },
},
{ $set: { 'packs.$.inS3': false } },
{ projection: { 'packs.$': 1 } },
@ -1106,7 +1121,7 @@ module.exports = PackManager = {
return db.docHistoryIndex.updateOne(
{
_id: ObjectId(doc_id.toString()),
packs: { $elemMatch: { _id: pack_id, inS3: false } }
packs: { $elemMatch: { _id: pack_id, inS3: false } },
},
{ $unset: { 'packs.$.inS3': true } },
callback
@ -1118,7 +1133,7 @@ module.exports = PackManager = {
return db.docHistoryIndex.findOneAndUpdate(
{
_id: ObjectId(doc_id.toString()),
packs: { $elemMatch: { _id: pack_id, inS3: false } }
packs: { $elemMatch: { _id: pack_id, inS3: false } },
},
{ $set: { 'packs.$.inS3': true } },
{ projection: { 'packs.$': 1 } },
@ -1147,7 +1162,7 @@ module.exports = PackManager = {
return callback()
}
)
}
},
}
// _getOneDayInFutureWithRandomDelay: ->

View file

@ -16,7 +16,7 @@
let LIMIT, pending
let project_id, doc_id
const { callbackify } = require('util')
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const async = require('async')
const _ = require('underscore')
const { db, ObjectId, waitForDb, closeDb } = require('./mongodb')
@ -53,8 +53,8 @@ if (!source.match(/^[0-9]+$/)) {
}
return result1
})()
pending = _.filter(result, (row) =>
__guard__(row != null ? row.doc_id : undefined, (x) =>
pending = _.filter(result, row =>
__guard__(row != null ? row.doc_id : undefined, x =>
x.match(/^[a-f0-9]{24}$/)
)
)
@ -101,9 +101,9 @@ const finish = function () {
})
}
process.on('exit', (code) => logger.log({ code }, 'pack archive worker exited'))
process.on('exit', code => logger.log({ code }, 'pack archive worker exited'))
const processUpdates = (pending) =>
const processUpdates = pending =>
async.eachSeries(
pending,
function (result, callback) {
@ -170,7 +170,7 @@ waitForDb()
processFromOneWeekAgo()
}
})
.catch((err) => {
.catch(err => {
logger.fatal({ err }, 'cannot connect to mongo, exiting')
process.exit(1)
})
@ -184,12 +184,12 @@ function processFromOneWeekAgo() {
project_id: { $exists: true },
v_end: { $exists: true },
_id: { $lt: ObjectIdFromDate(oneWeekAgo) },
last_checked: { $lt: oneWeekAgo }
last_checked: { $lt: oneWeekAgo },
},
{ projection: { _id: 1, doc_id: 1, project_id: 1 } }
)
.sort({
last_checked: 1
last_checked: 1,
})
.limit(LIMIT)
.toArray(function (err, results) {
@ -198,7 +198,7 @@ function processFromOneWeekAgo() {
finish()
return
}
pending = _.uniq(results, false, (result) => result.doc_id.toString())
pending = _.uniq(results, false, result => result.doc_id.toString())
TOTAL = pending.length
logger.log(`found ${TOTAL} documents to archive`)
return processUpdates(pending)

View file

@ -14,96 +14,100 @@
let ProjectIterator
const Heap = require('heap')
module.exports = ProjectIterator = ProjectIterator = class ProjectIterator {
constructor(packs, before, getPackByIdFn) {
this.before = before
this.getPackByIdFn = getPackByIdFn
const byEndTs = (a, b) =>
b.meta.end_ts - a.meta.end_ts || a.fromIndex - b.fromIndex
this.packs = packs.slice().sort(byEndTs)
this.queue = new Heap(byEndTs)
}
module.exports =
ProjectIterator =
ProjectIterator =
class ProjectIterator {
constructor(packs, before, getPackByIdFn) {
this.before = before
this.getPackByIdFn = getPackByIdFn
const byEndTs = (a, b) =>
b.meta.end_ts - a.meta.end_ts || a.fromIndex - b.fromIndex
this.packs = packs.slice().sort(byEndTs)
this.queue = new Heap(byEndTs)
}
next(callback) {
// what's up next
// console.log ">>> top item", iterator.packs[0]
const iterator = this
const { before } = this
const { queue } = iterator
const opsToReturn = []
let nextPack = iterator.packs[0]
let lowWaterMark =
(nextPack != null ? nextPack.meta.end_ts : undefined) || 0
let nextItem = queue.peek()
next(callback) {
// what's up next
// console.log ">>> top item", iterator.packs[0]
const iterator = this
const { before } = this
const { queue } = iterator
const opsToReturn = []
let nextPack = iterator.packs[0]
let lowWaterMark =
(nextPack != null ? nextPack.meta.end_ts : undefined) || 0
let nextItem = queue.peek()
// console.log "queue empty?", queue.empty()
// console.log "nextItem", nextItem
// console.log "nextItem.meta.end_ts", nextItem?.meta.end_ts
// console.log "lowWaterMark", lowWaterMark
// console.log "queue empty?", queue.empty()
// console.log "nextItem", nextItem
// console.log "nextItem.meta.end_ts", nextItem?.meta.end_ts
// console.log "lowWaterMark", lowWaterMark
while (
before != null &&
(nextPack != null ? nextPack.meta.start_ts : undefined) > before
) {
// discard pack that is outside range
iterator.packs.shift()
nextPack = iterator.packs[0]
lowWaterMark = (nextPack != null ? nextPack.meta.end_ts : undefined) || 0
}
if (
(queue.empty() ||
(nextItem != null ? nextItem.meta.end_ts : undefined) <=
lowWaterMark) &&
nextPack != null
) {
// retrieve the next pack and populate the queue
return this.getPackByIdFn(
nextPack.project_id,
nextPack.doc_id,
nextPack._id,
function (err, pack) {
if (err != null) {
return callback(err)
}
iterator.packs.shift() // have now retrieved this pack, remove it
// console.log "got pack", pack
for (const op of Array.from(pack.pack)) {
// console.log "adding op", op
if (before == null || op.meta.end_ts < before) {
op.doc_id = nextPack.doc_id
op.project_id = nextPack.project_id
queue.push(op)
}
}
// now try again
return iterator.next(callback)
while (
before != null &&
(nextPack != null ? nextPack.meta.start_ts : undefined) > before
) {
// discard pack that is outside range
iterator.packs.shift()
nextPack = iterator.packs[0]
lowWaterMark =
(nextPack != null ? nextPack.meta.end_ts : undefined) || 0
}
)
if (
(queue.empty() ||
(nextItem != null ? nextItem.meta.end_ts : undefined) <=
lowWaterMark) &&
nextPack != null
) {
// retrieve the next pack and populate the queue
return this.getPackByIdFn(
nextPack.project_id,
nextPack.doc_id,
nextPack._id,
function (err, pack) {
if (err != null) {
return callback(err)
}
iterator.packs.shift() // have now retrieved this pack, remove it
// console.log "got pack", pack
for (const op of Array.from(pack.pack)) {
// console.log "adding op", op
if (before == null || op.meta.end_ts < before) {
op.doc_id = nextPack.doc_id
op.project_id = nextPack.project_id
queue.push(op)
}
}
// now try again
return iterator.next(callback)
}
)
}
// console.log "nextItem", nextItem, "lowWaterMark", lowWaterMark
while (
nextItem != null &&
(nextItem != null ? nextItem.meta.end_ts : undefined) > lowWaterMark
) {
opsToReturn.push(nextItem)
queue.pop()
nextItem = queue.peek()
}
// console.log "queue empty?", queue.empty()
// console.log "nextPack", nextPack?
if (queue.empty() && nextPack == null) {
// got everything
iterator._done = true
}
return callback(null, opsToReturn)
}
done() {
return this._done
}
}
// console.log "nextItem", nextItem, "lowWaterMark", lowWaterMark
while (
nextItem != null &&
(nextItem != null ? nextItem.meta.end_ts : undefined) > lowWaterMark
) {
opsToReturn.push(nextItem)
queue.pop()
nextItem = queue.peek()
}
// console.log "queue empty?", queue.empty()
// console.log "nextPack", nextPack?
if (queue.empty() && nextPack == null) {
// got everything
iterator._done = true
}
return callback(null, opsToReturn)
}
done() {
return this._done
}
}

View file

@ -13,7 +13,7 @@
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let RedisManager
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const redis = require('@overleaf/redis-wrapper')
const rclient = redis.createClient(Settings.redis.history)
const Keys = Settings.redis.history.key_schema
@ -34,7 +34,7 @@ module.exports = RedisManager = {
callback = function (error, rawUpdates) {}
}
try {
rawUpdates = Array.from(jsonUpdates || []).map((update) =>
rawUpdates = Array.from(jsonUpdates || []).map(update =>
JSON.parse(update)
)
} catch (e) {
@ -93,26 +93,30 @@ module.exports = RedisManager = {
let cursor = 0 // redis iterator
const keySet = {} // use hash to avoid duplicate results
// scan over all keys looking for pattern
var doIteration = (cb) =>
node.scan(cursor, 'MATCH', pattern, 'COUNT', 1000, function (
error,
reply
) {
let keys
if (error != null) {
return callback(error)
var doIteration = cb =>
node.scan(
cursor,
'MATCH',
pattern,
'COUNT',
1000,
function (error, reply) {
let keys
if (error != null) {
return callback(error)
}
;[cursor, keys] = Array.from(reply)
for (const key of Array.from(keys)) {
keySet[key] = true
}
if (cursor === '0') {
// note redis returns string result not numeric
return callback(null, Object.keys(keySet))
} else {
return doIteration()
}
}
;[cursor, keys] = Array.from(reply)
for (const key of Array.from(keys)) {
keySet[key] = true
}
if (cursor === '0') {
// note redis returns string result not numeric
return callback(null, Object.keys(keySet))
} else {
return doIteration()
}
})
)
return doIteration()
},
@ -162,5 +166,5 @@ module.exports = RedisManager = {
return callback(error, doc_ids)
}
)
}
},
}

View file

@ -44,5 +44,5 @@ module.exports = RestoreManager = {
)
}
)
}
},
}

View file

@ -42,16 +42,16 @@ module.exports = UpdateCompressor = {
const splitUpdates = []
for (const update of Array.from(updates)) {
// Reject any non-insert or delete ops, i.e. comments
const ops = update.op.filter((o) => o.i != null || o.d != null)
const ops = update.op.filter(o => o.i != null || o.d != null)
if (ops.length === 0) {
splitUpdates.push({
op: UpdateCompressor.NOOP,
meta: {
start_ts: update.meta.start_ts || update.meta.ts,
end_ts: update.meta.end_ts || update.meta.ts,
user_id: update.meta.user_id
user_id: update.meta.user_id,
},
v: update.v
v: update.v,
})
} else {
for (const op of Array.from(ops)) {
@ -60,9 +60,9 @@ module.exports = UpdateCompressor = {
meta: {
start_ts: update.meta.start_ts || update.meta.ts,
end_ts: update.meta.end_ts || update.meta.ts,
user_id: update.meta.user_id
user_id: update.meta.user_id,
},
v: update.v
v: update.v,
})
}
}
@ -82,7 +82,7 @@ module.exports = UpdateCompressor = {
const nextUpdate = {
op: [],
meta: update.meta,
v: update.v
v: update.v,
}
if (update.op !== UpdateCompressor.NOOP) {
nextUpdate.op.push(update.op)
@ -97,7 +97,7 @@ module.exports = UpdateCompressor = {
if (
__guard__(
lastPreviousUpdate != null ? lastPreviousUpdate.op : undefined,
(x) => x.length
x => x.length
) > 1
) {
// if the last previous update was an array op, don't compress onto it.
@ -144,18 +144,18 @@ module.exports = UpdateCompressor = {
meta: {
user_id: firstUpdate.meta.user_id || null,
start_ts: firstUpdate.meta.start_ts || firstUpdate.meta.ts,
end_ts: firstUpdate.meta.end_ts || firstUpdate.meta.ts
end_ts: firstUpdate.meta.end_ts || firstUpdate.meta.ts,
},
v: firstUpdate.v
v: firstUpdate.v,
}
secondUpdate = {
op: secondUpdate.op,
meta: {
user_id: secondUpdate.meta.user_id || null,
start_ts: secondUpdate.meta.start_ts || secondUpdate.meta.ts,
end_ts: secondUpdate.meta.end_ts || secondUpdate.meta.ts
end_ts: secondUpdate.meta.end_ts || secondUpdate.meta.ts,
},
v: secondUpdate.v
v: secondUpdate.v,
}
if (firstUpdate.meta.user_id !== secondUpdate.meta.user_id) {
@ -192,14 +192,14 @@ module.exports = UpdateCompressor = {
meta: {
start_ts: firstUpdate.meta.start_ts,
end_ts: secondUpdate.meta.end_ts,
user_id: firstUpdate.meta.user_id
user_id: firstUpdate.meta.user_id,
},
op: {
p: firstOp.p,
i: strInject(firstOp.i, secondOp.p - firstOp.p, secondOp.i)
i: strInject(firstOp.i, secondOp.p - firstOp.p, secondOp.i),
},
v: secondUpdate.v
}
v: secondUpdate.v,
},
]
// Two deletes
} else if (
@ -214,14 +214,14 @@ module.exports = UpdateCompressor = {
meta: {
start_ts: firstUpdate.meta.start_ts,
end_ts: secondUpdate.meta.end_ts,
user_id: firstUpdate.meta.user_id
user_id: firstUpdate.meta.user_id,
},
op: {
p: secondOp.p,
d: strInject(secondOp.d, firstOp.p - secondOp.p, firstOp.d)
d: strInject(secondOp.d, firstOp.p - secondOp.p, firstOp.d),
},
v: secondUpdate.v
}
v: secondUpdate.v,
},
]
// An insert and then a delete
} else if (
@ -240,14 +240,14 @@ module.exports = UpdateCompressor = {
meta: {
start_ts: firstUpdate.meta.start_ts,
end_ts: secondUpdate.meta.end_ts,
user_id: firstUpdate.meta.user_id
user_id: firstUpdate.meta.user_id,
},
op: {
p: firstOp.p,
i: insert
i: insert,
},
v: secondUpdate.v
}
v: secondUpdate.v,
},
]
} else {
// This will only happen if the delete extends outside the insert
@ -269,14 +269,14 @@ module.exports = UpdateCompressor = {
meta: {
start_ts: firstUpdate.meta.start_ts,
end_ts: secondUpdate.meta.end_ts,
user_id: firstUpdate.meta.user_id
user_id: firstUpdate.meta.user_id,
},
op: {
p: firstOp.p,
i: ''
i: '',
},
v: secondUpdate.v
}
v: secondUpdate.v,
},
]
} else {
return diff_ops.map(function (op) {
@ -285,10 +285,10 @@ module.exports = UpdateCompressor = {
meta: {
start_ts: firstUpdate.meta.start_ts,
end_ts: secondUpdate.meta.end_ts,
user_id: firstUpdate.meta.user_id
user_id: firstUpdate.meta.user_id,
},
op,
v: secondUpdate.v
v: secondUpdate.v,
}
})
}
@ -315,13 +315,13 @@ module.exports = UpdateCompressor = {
if (type === this.ADDED) {
ops.push({
i: content,
p: position
p: position,
})
position += content.length
} else if (type === this.REMOVED) {
ops.push({
d: content,
p: position
p: position,
})
} else if (type === this.UNCHANGED) {
position += content.length
@ -330,7 +330,7 @@ module.exports = UpdateCompressor = {
}
}
return ops
}
},
}
function __guard__(value, transform) {

View file

@ -22,54 +22,55 @@ module.exports = UpdateTrimmer = {
if (callback == null) {
callback = function (error, shouldTrim) {}
}
return MongoManager.getProjectMetaData(project_id, function (
error,
metadata
) {
if (error != null) {
return callback(error)
}
if (metadata != null ? metadata.preserveHistory : undefined) {
return callback(null, false)
} else {
return WebApiManager.getProjectDetails(project_id, function (
error,
details
) {
if (error != null) {
return callback(error)
}
logger.log({ project_id, details }, 'got details')
if (
__guard__(
details != null ? details.features : undefined,
(x) => x.versioning
)
) {
return MongoManager.setProjectMetaData(
project_id,
{ preserveHistory: true },
function (error) {
if (error != null) {
return callback(error)
}
return MongoManager.upgradeHistory(project_id, function (
error
) {
if (error != null) {
return callback(error)
}
return callback(null, false)
})
return MongoManager.getProjectMetaData(
project_id,
function (error, metadata) {
if (error != null) {
return callback(error)
}
if (metadata != null ? metadata.preserveHistory : undefined) {
return callback(null, false)
} else {
return WebApiManager.getProjectDetails(
project_id,
function (error, details) {
if (error != null) {
return callback(error)
}
)
} else {
return callback(null, true)
}
})
logger.log({ project_id, details }, 'got details')
if (
__guard__(
details != null ? details.features : undefined,
x => x.versioning
)
) {
return MongoManager.setProjectMetaData(
project_id,
{ preserveHistory: true },
function (error) {
if (error != null) {
return callback(error)
}
return MongoManager.upgradeHistory(
project_id,
function (error) {
if (error != null) {
return callback(error)
}
return callback(null, false)
}
)
}
)
} else {
return callback(null, true)
}
}
)
}
}
})
}
)
},
}
function __guard__(value, transform) {

View file

@ -25,7 +25,7 @@ const UpdateTrimmer = require('./UpdateTrimmer')
const logger = require('logger-sharelatex')
const async = require('async')
const _ = require('underscore')
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const keys = Settings.redis.lock.key_schema
module.exports = UpdatesManager = {
@ -50,7 +50,7 @@ module.exports = UpdatesManager = {
const op = rawUpdates[i]
if (i > 0) {
const thisVersion = op != null ? op.v : undefined
const prevVersion = __guard__(rawUpdates[i - 1], (x) => x.v)
const prevVersion = __guard__(rawUpdates[i - 1], x => x.v)
if (!(prevVersion < thisVersion)) {
logger.error(
{
@ -59,7 +59,7 @@ module.exports = UpdatesManager = {
rawUpdates,
temporary,
thisVersion,
prevVersion
prevVersion,
},
'op versions out of order'
)
@ -69,138 +69,137 @@ module.exports = UpdatesManager = {
// FIXME: we no longer need the lastCompressedUpdate, so change functions not to need it
// CORRECTION: we do use it to log the time in case of error
return MongoManager.peekLastCompressedUpdate(doc_id, function (
error,
lastCompressedUpdate,
lastVersion
) {
// lastCompressedUpdate is the most recent update in Mongo, and
// lastVersion is its sharejs version number.
//
// The peekLastCompressedUpdate method may pass the update back
// as 'null' (for example if the previous compressed update has
// been archived). In this case it can still pass back the
// lastVersion from the update to allow us to check consistency.
let op
if (error != null) {
return callback(error)
}
// Ensure that raw updates start where lastVersion left off
if (lastVersion != null) {
const discardedUpdates = []
rawUpdates = rawUpdates.slice(0)
while (rawUpdates[0] != null && rawUpdates[0].v <= lastVersion) {
discardedUpdates.push(rawUpdates.shift())
}
if (discardedUpdates.length) {
logger.error(
{ project_id, doc_id, discardedUpdates, temporary, lastVersion },
'discarded updates already present'
)
return MongoManager.peekLastCompressedUpdate(
doc_id,
function (error, lastCompressedUpdate, lastVersion) {
// lastCompressedUpdate is the most recent update in Mongo, and
// lastVersion is its sharejs version number.
//
// The peekLastCompressedUpdate method may pass the update back
// as 'null' (for example if the previous compressed update has
// been archived). In this case it can still pass back the
// lastVersion from the update to allow us to check consistency.
let op
if (error != null) {
return callback(error)
}
if (rawUpdates[0] != null && rawUpdates[0].v !== lastVersion + 1) {
const ts = __guard__(
lastCompressedUpdate != null
? lastCompressedUpdate.meta
: undefined,
(x1) => x1.end_ts
)
const last_timestamp = ts != null ? new Date(ts) : 'unknown time'
error = new Error(
`Tried to apply raw op at version ${rawUpdates[0].v} to last compressed update with version ${lastVersion} from ${last_timestamp}`
)
logger.error(
{
err: error,
doc_id,
project_id,
prev_end_ts: ts,
temporary,
lastCompressedUpdate
},
'inconsistent doc versions'
)
if (
(Settings.trackchanges != null
? Settings.trackchanges.continueOnError
: undefined) &&
rawUpdates[0].v > lastVersion + 1
) {
// we have lost some ops - continue to write into the database, we can't recover at this point
lastCompressedUpdate = null
} else {
return callback(error)
// Ensure that raw updates start where lastVersion left off
if (lastVersion != null) {
const discardedUpdates = []
rawUpdates = rawUpdates.slice(0)
while (rawUpdates[0] != null && rawUpdates[0].v <= lastVersion) {
discardedUpdates.push(rawUpdates.shift())
}
}
}
if (rawUpdates.length === 0) {
return callback()
}
// some old large ops in redis need to be rejected, they predate
// the size limit that now prevents them going through the system
const REJECT_LARGE_OP_SIZE = 4 * 1024 * 1024
for (var rawUpdate of Array.from(rawUpdates)) {
const opSizes = (() => {
const result = []
for (op of Array.from(
(rawUpdate != null ? rawUpdate.op : undefined) || []
)) {
result.push(
(op.i != null ? op.i.length : undefined) ||
(op.d != null ? op.d.length : undefined)
if (discardedUpdates.length) {
logger.error(
{ project_id, doc_id, discardedUpdates, temporary, lastVersion },
'discarded updates already present'
)
}
return result
})()
const size = _.max(opSizes)
if (size > REJECT_LARGE_OP_SIZE) {
error = new Error(
`dropped op exceeding maximum allowed size of ${REJECT_LARGE_OP_SIZE}`
)
logger.error(
{ err: error, doc_id, project_id, size, rawUpdate },
'dropped op - too big'
)
rawUpdate.op = []
}
}
const compressedUpdates = UpdateCompressor.compressRawUpdates(
null,
rawUpdates
)
return PackManager.insertCompressedUpdates(
project_id,
doc_id,
lastCompressedUpdate,
compressedUpdates,
temporary,
function (error, result) {
if (error != null) {
return callback(error)
}
if (result != null) {
logger.log(
if (rawUpdates[0] != null && rawUpdates[0].v !== lastVersion + 1) {
const ts = __guard__(
lastCompressedUpdate != null
? lastCompressedUpdate.meta
: undefined,
x1 => x1.end_ts
)
const last_timestamp = ts != null ? new Date(ts) : 'unknown time'
error = new Error(
`Tried to apply raw op at version ${rawUpdates[0].v} to last compressed update with version ${lastVersion} from ${last_timestamp}`
)
logger.error(
{
project_id,
err: error,
doc_id,
orig_v:
lastCompressedUpdate != null
? lastCompressedUpdate.v
: undefined,
new_v: result.v
project_id,
prev_end_ts: ts,
temporary,
lastCompressedUpdate,
},
'inserted updates into pack'
'inconsistent doc versions'
)
if (
(Settings.trackchanges != null
? Settings.trackchanges.continueOnError
: undefined) &&
rawUpdates[0].v > lastVersion + 1
) {
// we have lost some ops - continue to write into the database, we can't recover at this point
lastCompressedUpdate = null
} else {
return callback(error)
}
}
}
if (rawUpdates.length === 0) {
return callback()
}
)
})
// some old large ops in redis need to be rejected, they predate
// the size limit that now prevents them going through the system
const REJECT_LARGE_OP_SIZE = 4 * 1024 * 1024
for (var rawUpdate of Array.from(rawUpdates)) {
const opSizes = (() => {
const result = []
for (op of Array.from(
(rawUpdate != null ? rawUpdate.op : undefined) || []
)) {
result.push(
(op.i != null ? op.i.length : undefined) ||
(op.d != null ? op.d.length : undefined)
)
}
return result
})()
const size = _.max(opSizes)
if (size > REJECT_LARGE_OP_SIZE) {
error = new Error(
`dropped op exceeding maximum allowed size of ${REJECT_LARGE_OP_SIZE}`
)
logger.error(
{ err: error, doc_id, project_id, size, rawUpdate },
'dropped op - too big'
)
rawUpdate.op = []
}
}
const compressedUpdates = UpdateCompressor.compressRawUpdates(
null,
rawUpdates
)
return PackManager.insertCompressedUpdates(
project_id,
doc_id,
lastCompressedUpdate,
compressedUpdates,
temporary,
function (error, result) {
if (error != null) {
return callback(error)
}
if (result != null) {
logger.log(
{
project_id,
doc_id,
orig_v:
lastCompressedUpdate != null
? lastCompressedUpdate.v
: undefined,
new_v: result.v,
},
'inserted updates into pack'
)
}
return callback()
}
)
}
)
},
// Check whether the updates are temporary (per-project property)
@ -208,15 +207,15 @@ module.exports = UpdatesManager = {
if (callback == null) {
callback = function (error, temporary) {}
}
return UpdateTrimmer.shouldTrimUpdates(project_id, function (
error,
temporary
) {
if (error != null) {
return callback(error)
return UpdateTrimmer.shouldTrimUpdates(
project_id,
function (error, temporary) {
if (error != null) {
return callback(error)
}
return callback(null, temporary)
}
return callback(null, temporary)
})
)
},
// Check for project id on document history (per-document property)
@ -248,71 +247,71 @@ module.exports = UpdatesManager = {
}
const { length } = docUpdates
// parse the redis strings into ShareJs updates
return RedisManager.expandDocUpdates(docUpdates, function (
error,
rawUpdates
) {
if (error != null) {
logger.err(
{ project_id, doc_id, docUpdates },
'failed to parse docUpdates'
)
return callback(error)
}
logger.log(
{ project_id, doc_id, rawUpdates },
'retrieved raw updates from redis'
)
return UpdatesManager.compressAndSaveRawUpdates(
project_id,
doc_id,
rawUpdates,
temporary,
function (error) {
if (error != null) {
return callback(error)
}
logger.log(
{ project_id, doc_id },
'compressed and saved doc updates'
)
// delete the applied updates from redis
return RedisManager.deleteAppliedDocUpdates(
project_id,
doc_id,
docUpdates,
function (error) {
if (error != null) {
return callback(error)
}
if (length === UpdatesManager.REDIS_READ_BATCH_SIZE) {
// There might be more updates
logger.log(
{ project_id, doc_id },
'continuing processing updates'
)
return setTimeout(
() =>
UpdatesManager.processUncompressedUpdates(
project_id,
doc_id,
temporary,
callback
),
0
)
} else {
logger.log(
{ project_id, doc_id },
'all raw updates processed'
)
return callback()
}
}
return RedisManager.expandDocUpdates(
docUpdates,
function (error, rawUpdates) {
if (error != null) {
logger.err(
{ project_id, doc_id, docUpdates },
'failed to parse docUpdates'
)
return callback(error)
}
)
})
logger.log(
{ project_id, doc_id, rawUpdates },
'retrieved raw updates from redis'
)
return UpdatesManager.compressAndSaveRawUpdates(
project_id,
doc_id,
rawUpdates,
temporary,
function (error) {
if (error != null) {
return callback(error)
}
logger.log(
{ project_id, doc_id },
'compressed and saved doc updates'
)
// delete the applied updates from redis
return RedisManager.deleteAppliedDocUpdates(
project_id,
doc_id,
docUpdates,
function (error) {
if (error != null) {
return callback(error)
}
if (length === UpdatesManager.REDIS_READ_BATCH_SIZE) {
// There might be more updates
logger.log(
{ project_id, doc_id },
'continuing processing updates'
)
return setTimeout(
() =>
UpdatesManager.processUncompressedUpdates(
project_id,
doc_id,
temporary,
callback
),
0
)
} else {
logger.log(
{ project_id, doc_id },
'all raw updates processed'
)
return callback()
}
}
)
}
)
}
)
}
)
},
@ -322,20 +321,20 @@ module.exports = UpdatesManager = {
if (callback == null) {
callback = function (error) {}
}
return UpdatesManager._prepareProjectForUpdates(project_id, function (
error,
temporary
) {
if (error != null) {
return callback(error)
return UpdatesManager._prepareProjectForUpdates(
project_id,
function (error, temporary) {
if (error != null) {
return callback(error)
}
return UpdatesManager._processUncompressedUpdatesForDocWithLock(
project_id,
doc_id,
temporary,
callback
)
}
return UpdatesManager._processUncompressedUpdatesForDocWithLock(
project_id,
doc_id,
temporary,
callback
)
})
)
},
// Process updates for a doc when the whole project is flushed (internal method)
@ -348,24 +347,26 @@ module.exports = UpdatesManager = {
if (callback == null) {
callback = function (error) {}
}
return UpdatesManager._prepareDocForUpdates(project_id, doc_id, function (
error
) {
if (error != null) {
return callback(error)
return UpdatesManager._prepareDocForUpdates(
project_id,
doc_id,
function (error) {
if (error != null) {
return callback(error)
}
return LockManager.runWithLock(
keys.historyLock({ doc_id }),
releaseLock =>
UpdatesManager.processUncompressedUpdates(
project_id,
doc_id,
temporary,
releaseLock
),
callback
)
}
return LockManager.runWithLock(
keys.historyLock({ doc_id }),
(releaseLock) =>
UpdatesManager.processUncompressedUpdates(
project_id,
doc_id,
temporary,
releaseLock
),
callback
)
})
)
},
// Process all updates for a project, only check project-level information once
@ -373,32 +374,32 @@ module.exports = UpdatesManager = {
if (callback == null) {
callback = function (error) {}
}
return RedisManager.getDocIdsWithHistoryOps(project_id, function (
error,
doc_ids
) {
if (error != null) {
return callback(error)
}
return UpdatesManager._prepareProjectForUpdates(project_id, function (
error,
temporary
) {
const jobs = []
for (const doc_id of Array.from(doc_ids)) {
;((doc_id) =>
jobs.push((cb) =>
UpdatesManager._processUncompressedUpdatesForDocWithLock(
project_id,
doc_id,
temporary,
cb
)
))(doc_id)
return RedisManager.getDocIdsWithHistoryOps(
project_id,
function (error, doc_ids) {
if (error != null) {
return callback(error)
}
return async.parallelLimit(jobs, 5, callback)
})
})
return UpdatesManager._prepareProjectForUpdates(
project_id,
function (error, temporary) {
const jobs = []
for (const doc_id of Array.from(doc_ids)) {
;(doc_id =>
jobs.push(cb =>
UpdatesManager._processUncompressedUpdatesForDocWithLock(
project_id,
doc_id,
temporary,
cb
)
))(doc_id)
}
return async.parallelLimit(jobs, 5, callback)
}
)
}
)
},
// flush all outstanding changes
@ -417,7 +418,7 @@ module.exports = UpdatesManager = {
logger.log(
{
count: project_ids != null ? project_ids.length : undefined,
project_ids
project_ids,
},
'found projects'
)
@ -426,11 +427,11 @@ module.exports = UpdatesManager = {
const selectedProjects =
limit < 0 ? project_ids : project_ids.slice(0, limit)
for (project_id of Array.from(selectedProjects)) {
;((project_id) =>
jobs.push((cb) =>
;(project_id =>
jobs.push(cb =>
UpdatesManager.processUncompressedUpdatesForProject(
project_id,
(err) => cb(null, { failed: err != null, project_id })
err => cb(null, { failed: err != null, project_id })
)
))(project_id)
}
@ -460,7 +461,7 @@ module.exports = UpdatesManager = {
return callback(null, {
failed: failedProjects,
succeeded: succeededProjects,
all: project_ids
all: project_ids,
})
})
})
@ -485,7 +486,7 @@ module.exports = UpdatesManager = {
return callback(error)
}
// function to get doc_ids for each project
const task = (cb) =>
const task = cb =>
async.concatSeries(
all_project_ids,
RedisManager.getDocIdsWithHistoryOps,
@ -542,20 +543,22 @@ module.exports = UpdatesManager = {
if (callback == null) {
callback = function (error, updates) {}
}
return UpdatesManager.getDocUpdates(project_id, doc_id, options, function (
error,
updates
) {
if (error != null) {
return callback(error)
}
return UpdatesManager.fillUserInfo(updates, function (error, updates) {
return UpdatesManager.getDocUpdates(
project_id,
doc_id,
options,
function (error, updates) {
if (error != null) {
return callback(error)
}
return callback(null, updates)
})
})
return UpdatesManager.fillUserInfo(updates, function (error, updates) {
if (error != null) {
return callback(error)
}
return callback(null, updates)
})
}
)
},
getSummarizedProjectUpdates(project_id, options, callback) {
@ -577,63 +580,65 @@ module.exports = UpdatesManager = {
if (error != null) {
return callback(error)
}
return PackManager.makeProjectIterator(project_id, before, function (
err,
iterator
) {
if (err != null) {
return callback(err)
}
// repeatedly get updates and pass them through the summariser to get an final output with user info
return async.whilst(
() =>
// console.log "checking iterator.done", iterator.done()
summarizedUpdates.length < options.min_count && !iterator.done(),
return PackManager.makeProjectIterator(
project_id,
before,
function (err, iterator) {
if (err != null) {
return callback(err)
}
// repeatedly get updates and pass them through the summariser to get an final output with user info
return async.whilst(
() =>
// console.log "checking iterator.done", iterator.done()
summarizedUpdates.length < options.min_count &&
!iterator.done(),
(cb) =>
iterator.next(function (err, partialUpdates) {
if (err != null) {
return callback(err)
}
// logger.log {partialUpdates}, 'got partialUpdates'
if (partialUpdates.length === 0) {
return cb()
} // # FIXME should try to avoid this happening
nextBeforeTimestamp =
partialUpdates[partialUpdates.length - 1].meta.end_ts
// add the updates to the summary list
summarizedUpdates = UpdatesManager._summarizeUpdates(
partialUpdates,
summarizedUpdates
)
return cb()
}),
() =>
// finally done all updates
// console.log 'summarized Updates', summarizedUpdates
UpdatesManager.fillSummarizedUserInfo(
summarizedUpdates,
function (err, results) {
cb =>
iterator.next(function (err, partialUpdates) {
if (err != null) {
return callback(err)
}
return callback(
null,
results,
!iterator.done() ? nextBeforeTimestamp : undefined
// logger.log {partialUpdates}, 'got partialUpdates'
if (partialUpdates.length === 0) {
return cb()
} // # FIXME should try to avoid this happening
nextBeforeTimestamp =
partialUpdates[partialUpdates.length - 1].meta.end_ts
// add the updates to the summary list
summarizedUpdates = UpdatesManager._summarizeUpdates(
partialUpdates,
summarizedUpdates
)
}
)
)
})
return cb()
}),
() =>
// finally done all updates
// console.log 'summarized Updates', summarizedUpdates
UpdatesManager.fillSummarizedUserInfo(
summarizedUpdates,
function (err, results) {
if (err != null) {
return callback(err)
}
return callback(
null,
results,
!iterator.done() ? nextBeforeTimestamp : undefined
)
}
)
)
}
)
}
)
},
exportProject(projectId, consumer) {
// Flush anything before collecting updates.
UpdatesManager.processUncompressedUpdatesForProject(projectId, (err) => {
UpdatesManager.processUncompressedUpdatesForProject(projectId, err => {
if (err) return consumer(err)
// Fetch all the packs.
@ -646,7 +651,7 @@ module.exports = UpdatesManager = {
async.whilst(
() => !iterator.done(),
(cb) =>
cb =>
iterator.next((err, updatesFromASinglePack) => {
if (err) return cb(err)
@ -656,7 +661,7 @@ module.exports = UpdatesManager = {
// call.
return cb()
}
updatesFromASinglePack.forEach((update) => {
updatesFromASinglePack.forEach(update => {
accumulatedUserIds.add(
// Super defensive access on update details.
String(update && update.meta && update.meta.user_id)
@ -666,7 +671,7 @@ module.exports = UpdatesManager = {
consumer(null, { updates: updatesFromASinglePack }, cb)
}),
(err) => {
err => {
if (err) return consumer(err)
// Adding undefined can happen for broken updates.
@ -674,7 +679,7 @@ module.exports = UpdatesManager = {
consumer(null, {
updates: [],
userIds: Array.from(accumulatedUserIds).sort()
userIds: Array.from(accumulatedUserIds).sort(),
})
}
)
@ -689,8 +694,8 @@ module.exports = UpdatesManager = {
const jobs = []
const fetchedUserInfo = {}
for (const user_id in users) {
;((user_id) =>
jobs.push((callback) =>
;(user_id =>
jobs.push(callback =>
WebApiManager.getUserInfo(user_id, function (error, userInfo) {
if (error != null) {
return callback(error)
@ -722,22 +727,22 @@ module.exports = UpdatesManager = {
}
}
return UpdatesManager.fetchUserInfo(users, function (
error,
fetchedUserInfo
) {
if (error != null) {
return callback(error)
}
for (update of Array.from(updates)) {
;({ user_id } = update.meta)
delete update.meta.user_id
if (UpdatesManager._validUserId(user_id)) {
update.meta.user = fetchedUserInfo[user_id]
return UpdatesManager.fetchUserInfo(
users,
function (error, fetchedUserInfo) {
if (error != null) {
return callback(error)
}
for (update of Array.from(updates)) {
;({ user_id } = update.meta)
delete update.meta.user_id
if (UpdatesManager._validUserId(user_id)) {
update.meta.user = fetchedUserInfo[user_id]
}
}
return callback(null, updates)
}
return callback(null, updates)
})
)
},
fillSummarizedUserInfo(updates, callback) {
@ -755,27 +760,27 @@ module.exports = UpdatesManager = {
}
}
return UpdatesManager.fetchUserInfo(users, function (
error,
fetchedUserInfo
) {
if (error != null) {
return callback(error)
}
for (update of Array.from(updates)) {
user_ids = update.meta.user_ids || []
update.meta.users = []
delete update.meta.user_ids
for (user_id of Array.from(user_ids)) {
if (UpdatesManager._validUserId(user_id)) {
update.meta.users.push(fetchedUserInfo[user_id])
} else {
update.meta.users.push(null)
return UpdatesManager.fetchUserInfo(
users,
function (error, fetchedUserInfo) {
if (error != null) {
return callback(error)
}
for (update of Array.from(updates)) {
user_ids = update.meta.user_ids || []
update.meta.users = []
delete update.meta.user_ids
for (user_id of Array.from(user_ids)) {
if (UpdatesManager._validUserId(user_id)) {
update.meta.users.push(fetchedUserInfo[user_id])
} else {
update.meta.users.push(null)
}
}
}
return callback(null, updates)
}
return callback(null, updates)
})
)
},
_validUserId(user_id) {
@ -830,7 +835,7 @@ module.exports = UpdatesManager = {
// check if the user in this update is already present in the earliest update,
// if not, add them to the users list of the earliest update
earliestUpdate.meta.user_ids = _.union(earliestUpdate.meta.user_ids, [
update.meta.user_id
update.meta.user_id,
])
doc_id = update.doc_id.toString()
@ -841,7 +846,7 @@ module.exports = UpdatesManager = {
} else {
earliestUpdate.docs[doc_id] = {
fromV: update.v,
toV: update.v
toV: update.v,
}
}
@ -858,14 +863,14 @@ module.exports = UpdatesManager = {
meta: {
user_ids: [],
start_ts: update.meta.start_ts,
end_ts: update.meta.end_ts
end_ts: update.meta.end_ts,
},
docs: {}
docs: {},
}
newUpdate.docs[update.doc_id.toString()] = {
fromV: update.v,
toV: update.v
toV: update.v,
}
newUpdate.meta.user_ids.push(update.meta.user_id)
summarizedUpdates.push(newUpdate)
@ -873,7 +878,7 @@ module.exports = UpdatesManager = {
}
return summarizedUpdates
}
},
}
function __guard__(value, transform) {

View file

@ -13,7 +13,7 @@
let WebApiManager
const request = require('requestretry') // allow retry on error https://github.com/FGRibreau/node-request-retry
const logger = require('logger-sharelatex')
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
// Don't let HTTP calls hang for a long time
const MAX_HTTP_REQUEST_LENGTH = 15000 // 15 seconds
@ -36,8 +36,8 @@ module.exports = WebApiManager = {
auth: {
user: Settings.apis.web.user,
pass: Settings.apis.web.pass,
sendImmediately: true
}
sendImmediately: true,
},
},
function (error, res, body) {
if (error != null) {
@ -86,7 +86,7 @@ module.exports = WebApiManager = {
id: user.id,
email: user.email,
first_name: user.first_name,
last_name: user.last_name
last_name: user.last_name,
})
})
},
@ -112,5 +112,5 @@ module.exports = WebApiManager = {
}
return callback(null, project)
})
}
},
}

View file

@ -1,4 +1,4 @@
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const { MongoClient, ObjectId } = require('mongodb')
const clientPromise = MongoClient.connect(
@ -38,5 +38,5 @@ module.exports = {
db,
ObjectId,
closeDb,
waitForDb
waitForDb,
}

View file

@ -178,7 +178,7 @@ diff_match_patch.prototype.diff_compute_ = function (
diffs = [
[DIFF_INSERT, longtext.substring(0, i)],
[DIFF_EQUAL, shorttext],
[DIFF_INSERT, longtext.substring(i + shorttext.length)]
[DIFF_INSERT, longtext.substring(i + shorttext.length)],
]
// Swap insertions for deletions if diff is reversed.
if (text1.length > text2.length) {
@ -192,7 +192,7 @@ diff_match_patch.prototype.diff_compute_ = function (
// After the previous speedup, the character can't be an equality.
return [
[DIFF_DELETE, text1],
[DIFF_INSERT, text2]
[DIFF_INSERT, text2],
]
}
@ -415,7 +415,7 @@ diff_match_patch.prototype.diff_bisect_ = function (text1, text2, deadline) {
// number of diffs equals number of characters, no commonality at all.
return [
[DIFF_DELETE, text1],
[DIFF_INSERT, text2]
[DIFF_INSERT, text2],
]
}
@ -716,7 +716,7 @@ diff_match_patch.prototype.diff_halfMatch_ = function (text1, text2) {
best_longtext_b,
best_shorttext_a,
best_shorttext_b,
best_common
best_common,
]
} else {
return null
@ -809,7 +809,7 @@ diff_match_patch.prototype.diff_cleanupSemantic = function (diffs) {
// Duplicate record.
diffs.splice(equalities[equalitiesLength - 1], 0, [
DIFF_DELETE,
lastequality
lastequality,
])
// Change second copy to insert.
diffs[equalities[equalitiesLength - 1] + 1][0] = DIFF_INSERT
@ -859,7 +859,7 @@ diff_match_patch.prototype.diff_cleanupSemantic = function (diffs) {
// Overlap found. Insert an equality and trim the surrounding edits.
diffs.splice(pointer, 0, [
DIFF_EQUAL,
insertion.substring(0, overlap_length1)
insertion.substring(0, overlap_length1),
])
diffs[pointer - 1][1] = deletion.substring(
0,
@ -877,7 +877,7 @@ diff_match_patch.prototype.diff_cleanupSemantic = function (diffs) {
// Insert an equality and swap and trim the surrounding edits.
diffs.splice(pointer, 0, [
DIFF_EQUAL,
deletion.substring(0, overlap_length2)
deletion.substring(0, overlap_length2),
])
diffs[pointer - 1][0] = DIFF_INSERT
diffs[pointer - 1][1] = insertion.substring(
@ -1093,7 +1093,7 @@ diff_match_patch.prototype.diff_cleanupEfficiency = function (diffs) {
// Duplicate record.
diffs.splice(equalities[equalitiesLength - 1], 0, [
DIFF_DELETE,
lastequality
lastequality,
])
// Change second copy to insert.
diffs[equalities[equalitiesLength - 1] + 1][0] = DIFF_INSERT
@ -1156,13 +1156,12 @@ diff_match_patch.prototype.diff_cleanupMerge = function (diffs) {
diffs[pointer - count_delete - count_insert - 1][0] ==
DIFF_EQUAL
) {
diffs[
pointer - count_delete - count_insert - 1
][1] += text_insert.substring(0, commonlength)
diffs[pointer - count_delete - count_insert - 1][1] +=
text_insert.substring(0, commonlength)
} else {
diffs.splice(0, 0, [
DIFF_EQUAL,
text_insert.substring(0, commonlength)
text_insert.substring(0, commonlength),
])
pointer++
}
@ -1189,12 +1188,12 @@ diff_match_patch.prototype.diff_cleanupMerge = function (diffs) {
if (count_delete === 0) {
diffs.splice(pointer - count_insert, count_delete + count_insert, [
DIFF_INSERT,
text_insert
text_insert,
])
} else if (count_insert === 0) {
diffs.splice(pointer - count_delete, count_delete + count_insert, [
DIFF_DELETE,
text_delete
text_delete,
])
} else {
diffs.splice(

View file

@ -3,6 +3,6 @@ track-changes
--docker-repos=gcr.io/overleaf-ops
--env-add=AWS_BUCKET=bucket
--env-pass-through=
--node-version=12.21.0
--node-version=12.22.3
--public-repo=True
--script-version=3.8.0
--script-version=3.11.0

View file

@ -6,18 +6,18 @@ module.exports = {
mongo: {
options: {
useUnifiedTopology:
(process.env.MONGO_USE_UNIFIED_TOPOLOGY || 'true') === 'true'
(process.env.MONGO_USE_UNIFIED_TOPOLOGY || 'true') === 'true',
},
url:
process.env.MONGO_CONNECTION_STRING ||
`mongodb://${process.env.MONGO_HOST || 'localhost'}/sharelatex`
`mongodb://${process.env.MONGO_HOST || 'localhost'}/sharelatex`,
},
internal: {
trackchanges: {
port: 3015,
host: process.env.LISTEN_ADDRESS || 'localhost'
}
host: process.env.LISTEN_ADDRESS || 'localhost',
},
},
apis: {
documentupdater: {
@ -25,18 +25,18 @@ module.exports = {
process.env.DOCUMENT_UPDATER_HOST ||
process.env.DOCUPDATER_HOST ||
'localhost'
}:3003`
}:3003`,
},
docstore: {
url: `http://${process.env.DOCSTORE_HOST || 'localhost'}:3016`
url: `http://${process.env.DOCSTORE_HOST || 'localhost'}:3016`,
},
web: {
url: `http://${
process.env.WEB_API_HOST || process.env.WEB_HOST || 'localhost'
}:${process.env.WEB_API_PORT || process.env.WEB_PORT || 3000}`,
user: process.env.WEB_API_USER || 'sharelatex',
pass: process.env.WEB_API_PASSWORD || 'password'
}
pass: process.env.WEB_API_PASSWORD || 'password',
},
},
redis: {
lock: {
@ -49,8 +49,8 @@ module.exports = {
},
historyIndexLock({ project_id: projectId }) {
return `HistoryIndexLock:{${projectId}}`
}
}
},
},
},
history: {
host: process.env.REDIS_HOST || 'localhost',
@ -62,9 +62,9 @@ module.exports = {
},
docsWithHistoryOps({ project_id: projectId }) {
return `DocsWithHistoryOps:{${projectId}}`
}
}
}
},
},
},
},
trackchanges: {
@ -72,19 +72,19 @@ module.exports = {
key: process.env.AWS_ACCESS_KEY_ID,
secret: process.env.AWS_SECRET_ACCESS_KEY,
endpoint: process.env.AWS_S3_ENDPOINT,
pathStyle: process.env.AWS_S3_PATH_STYLE === 'true'
pathStyle: process.env.AWS_S3_PATH_STYLE === 'true',
},
stores: {
doc_history: process.env.AWS_BUCKET
doc_history: process.env.AWS_BUCKET,
},
continueOnError: process.env.TRACK_CHANGES_CONTINUE_ON_ERROR || false
continueOnError: process.env.TRACK_CHANGES_CONTINUE_ON_ERROR || false,
},
path: {
dumpFolder: Path.join(TMP_DIR, 'dumpFolder')
dumpFolder: Path.join(TMP_DIR, 'dumpFolder'),
},
sentry: {
dsn: process.env.SENTRY_DSN
}
dsn: process.env.SENTRY_DSN,
},
}

View file

@ -6,7 +6,7 @@ version: "2.3"
services:
test_unit:
image: node:12.21.0
image: node:12.22.3
volumes:
- .:/app
working_dir: /app
@ -18,7 +18,7 @@ services:
user: node
test_acceptance:
image: node:12.21.0
image: node:12.22.3
volumes:
- .:/app
working_dir: /app

File diff suppressed because it is too large Load diff

View file

@ -13,19 +13,22 @@
"test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js",
"test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP",
"nodemon": "nodemon --config nodemon.json",
"lint": "node_modules/.bin/eslint --max-warnings 0 .",
"format": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --list-different",
"format:fix": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --write"
"lint": "eslint --max-warnings 0 --format unix .",
"format": "prettier --list-different $PWD/'**/*.js'",
"format:fix": "prettier --write $PWD/'**/*.js'",
"lint:fix": "eslint --fix ."
},
"dependencies": {
"@overleaf/metrics": "^3.5.1",
"@overleaf/o-error": "^3.1.0",
"@overleaf/redis-wrapper": "^2.0.0",
"@overleaf/settings": "^2.1.1",
"JSONStream": "^1.3.5",
"async": "^2.6.3",
"aws-sdk": "^2.643.0",
"body-parser": "^1.19.0",
"bson": "^1.1.5",
"bunyan": "^1.8.15",
"byline": "^5.0.0",
"express": "4.17.1",
"heap": "^0.2.6",
@ -37,33 +40,25 @@
"request": "~2.88.2",
"requestretry": "^4.1.0",
"s3-streams": "^0.4.0",
"settings-sharelatex": "^1.1.0",
"underscore": "~1.13.1"
},
"devDependencies": {
"babel-eslint": "^10.1.0",
"bunyan": "~2.0.2",
"chai": "~4.2.0",
"chai": "^4.2.0",
"chai-as-promised": "^7.1.1",
"cli": "^1.0.1",
"eslint": "^6.8.0",
"eslint-config-prettier": "^6.10.0",
"eslint-config-standard": "^14.1.0",
"eslint-config-standard-jsx": "^8.1.0",
"eslint-config-standard-react": "^9.2.0",
"eslint-plugin-chai-expect": "^2.1.0",
"eslint-plugin-chai-friendly": "^0.5.0",
"eslint-plugin-import": "^2.20.1",
"eslint-plugin-jsx-a11y": "^6.2.3",
"eslint-plugin-mocha": "^6.3.0",
"eslint-plugin-node": "^11.0.0",
"eslint": "^7.21.0",
"eslint-config-prettier": "^8.1.0",
"eslint-config-standard": "^16.0.2",
"eslint-plugin-chai-expect": "^2.2.0",
"eslint-plugin-chai-friendly": "^0.6.0",
"eslint-plugin-import": "^2.22.1",
"eslint-plugin-mocha": "^8.0.0",
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-prettier": "^3.1.2",
"eslint-plugin-promise": "^4.2.1",
"eslint-plugin-react": "^7.19.0",
"eslint-plugin-standard": "^4.0.1",
"memorystream": "0.3.1",
"mocha": "^7.1.1",
"prettier": "^2.0.0",
"prettier-eslint-cli": "^5.0.0",
"mocha": "^8.3.2",
"prettier": "^2.2.1",
"sandboxed-module": "~2.0.3",
"sinon": "~9.0.1",
"timekeeper": "2.2.0"

View file

@ -13,7 +13,7 @@
const sinon = require('sinon')
const { expect } = require('chai')
const { ObjectId } = require('../../../app/js/mongodb')
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const request = require('request')
const rclient = require('redis').createClient(Settings.redis.history) // Only works locally for now
@ -39,20 +39,20 @@ describe('Appending doc ops to the history', function () {
{
op: [{ i: 'f', p: 3 }],
meta: { ts: Date.now(), user_id: this.user_id },
v: 3
v: 3,
},
{
op: [{ i: 'o', p: 4 }],
meta: { ts: Date.now(), user_id: this.user_id },
v: 4
v: 4,
},
{
op: [{ i: 'o', p: 5 }],
meta: { ts: Date.now(), user_id: this.user_id },
v: 5
}
v: 5,
},
],
(error) => {
error => {
if (error != null) {
throw error
}
@ -76,8 +76,8 @@ describe('Appending doc ops to the history', function () {
return expect(this.updates[0].pack[0].op).to.deep.equal([
{
p: 3,
i: 'foo'
}
i: 'foo',
},
])
})
@ -121,20 +121,20 @@ describe('Appending doc ops to the history', function () {
{
op: [{ i: 'f', p: 3 }],
meta: { ts: Date.now(), user_id: this.user_id },
v: 3
v: 3,
},
{
op: [{ i: 'o', p: 4 }],
meta: { ts: Date.now(), user_id: this.user_id },
v: 4
v: 4,
},
{
op: [{ i: 'o', p: 5 }],
meta: { ts: Date.now(), user_id: this.user_id },
v: 5
}
v: 5,
},
],
(error) => {
error => {
if (error != null) {
throw error
}
@ -162,20 +162,20 @@ describe('Appending doc ops to the history', function () {
{
op: [{ i: 'b', p: 6 }],
meta: { ts: Date.now(), user_id: this.user_id },
v: 6
v: 6,
},
{
op: [{ i: 'a', p: 7 }],
meta: { ts: Date.now(), user_id: this.user_id },
v: 7
v: 7,
},
{
op: [{ i: 'r', p: 8 }],
meta: { ts: Date.now(), user_id: this.user_id },
v: 8
}
v: 8,
},
],
(error) => {
error => {
if (error != null) {
throw error
}
@ -199,8 +199,8 @@ describe('Appending doc ops to the history', function () {
return expect(this.updates[0].pack[1].op).to.deep.equal([
{
p: 6,
i: 'bar'
}
i: 'bar',
},
])
})
@ -219,20 +219,20 @@ describe('Appending doc ops to the history', function () {
{
op: [{ i: 'b', p: 6 }],
meta: { ts: Date.now() + oneDay, user_id: this.user_id },
v: 6
v: 6,
},
{
op: [{ i: 'a', p: 7 }],
meta: { ts: Date.now() + oneDay, user_id: this.user_id },
v: 7
v: 7,
},
{
op: [{ i: 'r', p: 8 }],
meta: { ts: Date.now() + oneDay, user_id: this.user_id },
v: 8
}
v: 8,
},
],
(error) => {
error => {
if (error != null) {
throw error
}
@ -256,14 +256,14 @@ describe('Appending doc ops to the history', function () {
expect(this.updates[0].pack[0].op).to.deep.equal([
{
p: 3,
i: 'foo'
}
i: 'foo',
},
])
return expect(this.updates[0].pack[1].op).to.deep.equal([
{
p: 6,
i: 'bar'
}
i: 'bar',
},
])
})
})
@ -281,7 +281,7 @@ describe('Appending doc ops to the history', function () {
updates.push({
op: [{ i: 'a', p: 0 }],
meta: { ts: Date.now(), user_id: this.user_id },
v: i
v: i,
})
this.expectedOp[0].i = `a${this.expectedOp[0].i}`
}
@ -290,7 +290,7 @@ describe('Appending doc ops to the history', function () {
this.project_id,
this.doc_id,
updates,
(error) => {
error => {
if (error != null) {
throw error
}
@ -334,22 +334,22 @@ describe('Appending doc ops to the history', function () {
op: [
{ i: 'f', p: 3 },
{ i: 'o', p: 4 },
{ i: 'o', p: 5 }
{ i: 'o', p: 5 },
],
meta: { ts: Date.now(), user_id: this.user_id },
v: 3
v: 3,
},
{
op: [
{ i: 'b', p: 6 },
{ i: 'a', p: 7 },
{ i: 'r', p: 8 }
{ i: 'r', p: 8 },
],
meta: { ts: Date.now() + oneDay, user_id: this.user_id },
v: 4
}
v: 4,
},
],
(error) => {
error => {
if (error != null) {
throw error
}
@ -373,14 +373,14 @@ describe('Appending doc ops to the history', function () {
expect(this.updates[0].pack[0].op).to.deep.equal([
{
p: 3,
i: 'foo'
}
i: 'foo',
},
])
return expect(this.updates[0].pack[1].op).to.deep.equal([
{
p: 6,
i: 'bar'
}
i: 'bar',
},
])
})
@ -404,15 +404,15 @@ describe('Appending doc ops to the history', function () {
{
op: [],
meta: { ts: Date.now(), user_id: this.user_id },
v: 3
v: 3,
},
{
op: [{ i: 'foo', p: 3 }],
meta: { ts: Date.now() + oneDay, user_id: this.user_id },
v: 4
}
v: 4,
},
],
(error) => {
error => {
if (error != null) {
throw error
}
@ -440,8 +440,8 @@ describe('Appending doc ops to the history', function () {
return expect(this.updates[0].pack[1].op).to.deep.equal([
{
p: 3,
i: 'foo'
}
i: 'foo',
},
])
})
@ -464,13 +464,13 @@ describe('Appending doc ops to the history', function () {
{
op: [
{ c: 'foo', p: 3 },
{ d: 'bar', p: 6 }
{ d: 'bar', p: 6 },
],
meta: { ts: Date.now(), user_id: this.user_id },
v: 3
}
v: 3,
},
],
(error) => {
error => {
if (error != null) {
throw error
}
@ -492,7 +492,7 @@ describe('Appending doc ops to the history', function () {
it('should ignore the comment op', function () {
return expect(this.updates[0].pack[0].op).to.deep.equal([
{ d: 'bar', p: 6 }
{ d: 'bar', p: 6 },
])
})
@ -515,10 +515,10 @@ describe('Appending doc ops to the history', function () {
{
op: [{ i: 'f', p: 3 }],
meta: { ts: Date.now(), user_id: this.user_id },
v: 3
}
v: 3,
},
],
(error) => {
error => {
if (error != null) {
throw error
}
@ -557,10 +557,10 @@ describe('Appending doc ops to the history', function () {
{
op: [{ i: 'f', p: 3 }],
meta: { ts: Date.now(), user_id: this.user_id },
v: 3
}
v: 3,
},
],
(error) => {
error => {
if (error != null) {
throw error
}

View file

@ -17,7 +17,7 @@
const sinon = require('sinon')
const { expect } = require('chai')
const { db, ObjectId } = require('../../../app/js/mongodb')
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const request = require('request')
const rclient = require('redis').createClient(Settings.redis.history) // Only works locally for now
@ -32,9 +32,9 @@ describe('Archiving updates', function () {
__guard__(
__guard__(
Settings != null ? Settings.trackchanges : undefined,
(x1) => x1.s3
x1 => x1.s3
),
(x) => x.key.length
x => x.key.length
) < 1
) {
const message = new Error('s3 keys not setup, this test setup will fail')
@ -57,8 +57,8 @@ describe('Archiving updates', function () {
MockWebApi.projects[this.project_id] = {
features: {
versioning: true
}
versioning: true,
},
}
sinon.spy(MockWebApi, 'getProjectDetails')
@ -66,13 +66,13 @@ describe('Archiving updates', function () {
email: 'user@sharelatex.com',
first_name: 'Leo',
last_name: 'Lion',
id: this.user_id
id: this.user_id,
}
sinon.spy(MockWebApi, 'getUserInfo')
MockDocStoreApi.docs[this.doc_id] = this.doc = {
_id: this.doc_id,
project_id: this.project_id
project_id: this.project_id,
}
sinon.spy(MockDocStoreApi, 'getAllDoc')
@ -85,15 +85,15 @@ describe('Archiving updates', function () {
this.updates.push({
op: [{ i: 'a', p: 0 }],
meta: { ts: this.now + (i - 2048) * this.hours, user_id: this.user_id },
v: 2 * i + 1
v: 2 * i + 1,
})
this.updates.push({
op: [{ i: 'b', p: 0 }],
meta: {
ts: this.now + (i - 2048) * this.hours + 10 * this.minutes,
user_id: this.user_id_2
user_id: this.user_id_2,
},
v: 2 * i + 2
v: 2 * i + 2,
})
}
TrackChangesApp.ensureRunning(() => {
@ -101,14 +101,14 @@ describe('Archiving updates', function () {
this.project_id,
this.doc_id,
this.updates,
(error) => {
error => {
if (error != null) {
throw error
}
return TrackChangesClient.flushDoc(
this.project_id,
this.doc_id,
(error) => {
error => {
if (error != null) {
throw error
}
@ -163,7 +163,7 @@ describe('Archiving updates', function () {
const expectedExportedUpdates = this.updates
.slice()
.reverse()
.map((update) => {
.map(update => {
// clone object, updates are created once in before handler
const exportedUpdate = Object.assign({}, update)
exportedUpdate.meta = Object.assign({}, update.meta)
@ -180,7 +180,7 @@ describe('Archiving updates', function () {
expect(this.exportedUpdates).to.deep.equal(expectedExportedUpdates)
expect(this.exportedUserIds).to.deep.equal([
this.user_id,
this.user_id_2
this.user_id_2,
])
})
})
@ -192,16 +192,12 @@ describe('Archiving updates', function () {
describe("archiving a doc's updates", function () {
before(function (done) {
TrackChangesClient.pushDocHistory(
this.project_id,
this.doc_id,
(error) => {
if (error != null) {
throw error
}
return done()
TrackChangesClient.pushDocHistory(this.project_id, this.doc_id, error => {
if (error != null) {
throw error
}
)
return done()
})
return null
})
@ -222,7 +218,7 @@ describe('Archiving updates', function () {
return db.docHistory.deleteMany(
{
doc_id: ObjectId(this.doc_id),
expiresAt: { $exists: true }
expiresAt: { $exists: true },
},
(err, result) => {
if (typeof error !== 'undefined' && error !== null) {
@ -295,16 +291,12 @@ describe('Archiving updates', function () {
return describe("unarchiving a doc's updates", function () {
before(function (done) {
TrackChangesClient.pullDocHistory(
this.project_id,
this.doc_id,
(error) => {
if (error != null) {
throw error
}
return done()
TrackChangesClient.pullDocHistory(this.project_id, this.doc_id, error => {
if (error != null) {
throw error
}
)
return done()
})
return null
})

View file

@ -13,7 +13,7 @@
const sinon = require('sinon')
const { expect } = require('chai')
const { ObjectId } = require('../../../app/js/mongodb')
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const request = require('request')
const rclient = require('redis').createClient(Settings.redis.history) // Only works locally for now
@ -40,17 +40,17 @@ describe('Flushing updates', function () {
{
op: [{ i: 'f', p: 3 }],
meta: { ts: Date.now(), user_id: this.user_id },
v: 3
}
v: 3,
},
],
(error) => {
error => {
if (error != null) {
throw error
}
return TrackChangesClient.flushDoc(
this.project_id,
this.doc_id,
(error) => {
error => {
if (error != null) {
throw error
}
@ -67,8 +67,8 @@ describe('Flushing updates', function () {
expect(updates[0].pack[0].op).to.deep.equal([
{
p: 3,
i: 'f'
}
i: 'f',
},
])
return done()
})
@ -87,8 +87,8 @@ describe('Flushing updates', function () {
MockWebApi.projects[this.project_id] = {
features: {
versioning: true
}
versioning: true,
},
}
TrackChangesClient.pushRawUpdates(
@ -98,19 +98,19 @@ describe('Flushing updates', function () {
{
op: [{ i: 'g', p: 2 }],
meta: { ts: Date.now() - 2 * this.weeks, user_id: this.user_id },
v: 2
v: 2,
},
{
op: [{ i: 'f', p: 3 }],
meta: { ts: Date.now(), user_id: this.user_id },
v: 3
}
v: 3,
},
],
(error) => {
error => {
if (error != null) {
throw error
}
return TrackChangesClient.flushProject(this.project_id, (error) => {
return TrackChangesClient.flushProject(this.project_id, error => {
if (error != null) {
throw error
}
@ -154,8 +154,8 @@ describe('Flushing updates', function () {
MockWebApi.projects[this.project_id] = {
features: {
versioning: false
}
versioning: false,
},
}
TrackChangesClient.pushRawUpdates(
@ -165,19 +165,19 @@ describe('Flushing updates', function () {
{
op: [{ i: 'g', p: 2 }],
meta: { ts: Date.now() - 2 * this.weeks, user_id: this.user_id },
v: 2
v: 2,
},
{
op: [{ i: 'f', p: 3 }],
meta: { ts: Date.now(), user_id: this.user_id },
v: 3
}
v: 3,
},
],
(error) => {
error => {
if (error != null) {
throw error
}
return TrackChangesClient.flushProject(this.project_id, (error) => {
return TrackChangesClient.flushProject(this.project_id, error => {
if (error != null) {
throw error
}
@ -210,13 +210,13 @@ describe('Flushing updates', function () {
MockWebApi.projects[this.project_id] = {
features: {
versioning: false
}
versioning: false,
},
}
TrackChangesClient.setPreserveHistoryForProject(
this.project_id,
(error) => {
error => {
if (error != null) {
throw error
}
@ -228,23 +228,23 @@ describe('Flushing updates', function () {
op: [{ i: 'g', p: 2 }],
meta: {
ts: Date.now() - 2 * this.weeks,
user_id: this.user_id
user_id: this.user_id,
},
v: 2
v: 2,
},
{
op: [{ i: 'f', p: 3 }],
meta: { ts: Date.now(), user_id: this.user_id },
v: 3
}
v: 3,
},
],
(error) => {
error => {
if (error != null) {
throw error
}
return TrackChangesClient.flushProject(
this.project_id,
(error) => {
error => {
if (error != null) {
throw error
}

View file

@ -12,7 +12,7 @@
const sinon = require('sinon')
const { expect } = require('chai')
const { ObjectId } = require('../../../app/js/mongodb')
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const TrackChangesApp = require('./helpers/TrackChangesApp')
const TrackChangesClient = require('./helpers/TrackChangesClient')
@ -35,7 +35,7 @@ describe('Getting a diff', function () {
email: 'user@sharelatex.com',
first_name: 'Leo',
last_name: 'Lion',
id: this.user_id
id: this.user_id,
}
sinon.spy(MockWebApi, 'getUserInfo')
@ -45,23 +45,23 @@ describe('Getting a diff', function () {
{
op: [{ i: 'one ', p: 0 }],
meta: { ts: this.from - twoMinutes, user_id: this.user_id },
v: 3
v: 3,
},
{
op: [{ i: 'two ', p: 4 }],
meta: { ts: this.from + twoMinutes, user_id: this.user_id },
v: (this.fromVersion = 4)
v: (this.fromVersion = 4),
},
{
op: [{ i: 'three ', p: 8 }],
meta: { ts: this.to - twoMinutes, user_id: this.user_id },
v: (this.toVersion = 5)
v: (this.toVersion = 5),
},
{
op: [{ i: 'four', p: 14 }],
meta: { ts: this.to + twoMinutes, user_id: this.user_id },
v: 6
}
v: 6,
},
]
this.lines = ['one two three four']
this.expected_diff = [
@ -71,21 +71,21 @@ describe('Getting a diff', function () {
meta: {
start_ts: this.from + twoMinutes,
end_ts: this.to - twoMinutes,
user: this.user
}
}
user: this.user,
},
},
]
MockDocUpdaterApi.docs[this.doc_id] = {
lines: this.lines,
version: 7
version: 7,
}
TrackChangesApp.ensureRunning(() => {
return TrackChangesClient.pushRawUpdates(
this.project_id,
this.doc_id,
this.updates,
(error) => {
error => {
if (error != null) {
throw error
}

View file

@ -13,7 +13,7 @@
const sinon = require('sinon')
const { expect } = require('chai')
const { ObjectId } = require('../../../app/js/mongodb')
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const TrackChangesApp = require('./helpers/TrackChangesApp')
const TrackChangesClient = require('./helpers/TrackChangesClient')
@ -33,15 +33,15 @@ describe('Getting updates', function () {
MockWebApi.projects[this.project_id] = {
features: {
versioning: true
}
versioning: true,
},
}
MockWebApi.users[this.user_id] = this.user = {
email: 'user@sharelatex.com',
first_name: 'Leo',
last_name: 'Lion',
id: this.user_id
id: this.user_id,
}
sinon.spy(MockWebApi, 'getUserInfo')
@ -51,14 +51,14 @@ describe('Getting updates', function () {
op: [{ i: 'a', p: 0 }],
meta: {
ts: this.now - (9 - i) * this.hours - 2 * this.minutes,
user_id: this.user_id
user_id: this.user_id,
},
v: 2 * i + 1
v: 2 * i + 1,
})
this.updates.push({
op: [{ i: 'b', p: 0 }],
meta: { ts: this.now - (9 - i) * this.hours, user_id: this.user_id },
v: 2 * i + 2
v: 2 * i + 2,
})
}
this.updates[0].meta.user_id = this.deleted_user_id
@ -68,7 +68,7 @@ describe('Getting updates', function () {
this.project_id,
this.doc_id,
this.updates,
(error) => {
error => {
if (error != null) {
throw error
}
@ -82,7 +82,7 @@ describe('Getting updates', function () {
after() {
MockWebApi.getUserInfo.restore()
return null
}
},
})
describe('getting updates up to the limit', function () {
@ -118,25 +118,25 @@ describe('Getting updates', function () {
meta: {
start_ts: this.to - 2 * this.minutes,
end_ts: this.to,
users: [this.user]
}
users: [this.user],
},
},
{
docs: docs2,
meta: {
start_ts: this.to - 1 * this.hours - 2 * this.minutes,
end_ts: this.to - 1 * this.hours,
users: [this.user]
}
users: [this.user],
},
},
{
docs: docs3,
meta: {
start_ts: this.to - 2 * this.hours - 2 * this.minutes,
end_ts: this.to - 2 * this.hours,
users: [this.user]
}
}
users: [this.user],
},
},
])
})
})
@ -168,17 +168,17 @@ describe('Getting updates', function () {
meta: {
start_ts: this.to - 8 * this.hours - 2 * this.minutes,
end_ts: this.to - 8 * this.hours,
users: [this.user]
}
users: [this.user],
},
},
{
docs: docs2,
meta: {
start_ts: this.to - 9 * this.hours - 2 * this.minutes,
end_ts: this.to - 9 * this.hours,
users: [this.user, null]
}
}
users: [this.user, null],
},
},
])
})
})

View file

@ -11,7 +11,7 @@
*/
const sinon = require('sinon')
const { expect } = require('chai')
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const LockManager = require('../../../app/js/LockManager')
const rclient = require('redis').createClient(Settings.redis.history) // Only works locally for now
const TrackChangesApp = require('./helpers/TrackChangesApp')
@ -27,24 +27,24 @@ describe('Locking document', function () {
LockManager.LOCK_TTL = 1 // second
LockManager.runWithLock(
'doc123',
(releaseA) => {
releaseA => {
// we create a lock A and allow it to expire in redis
return setTimeout(
() =>
// now we create a new lock B and try to release A
LockManager.runWithLock(
'doc123',
(releaseB) => {
releaseB => {
return releaseA()
}, // try to release lock A to see if it wipes out lock B
(error) => {}
error => {}
),
// we never release lock B so nothing should happen here
1500
)
}, // enough time to wait until the lock has expired
(error) =>
error =>
// we get here after trying to release lock A
done()
)

View file

@ -12,7 +12,7 @@
const sinon = require('sinon')
const { expect } = require('chai')
const { ObjectId } = require('../../../app/js/mongodb')
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const TrackChangesApp = require('./helpers/TrackChangesApp')
const TrackChangesClient = require('./helpers/TrackChangesClient')
@ -35,23 +35,23 @@ describe('Restoring a version', function () {
{
op: [{ i: 'one ', p: 0 }],
meta: { ts: this.now - 6 * minutes, user_id: this.user_id },
v: 3
v: 3,
},
{
op: [{ i: 'two ', p: 4 }],
meta: { ts: this.now - 4 * minutes, user_id: this.user_id },
v: 4
v: 4,
},
{
op: [{ i: 'three ', p: 8 }],
meta: { ts: this.now - 2 * minutes, user_id: this.user_id },
v: 5
v: 5,
},
{
op: [{ i: 'four', p: 14 }],
meta: { ts: this.now, user_id: this.user_id },
v: 6
}
v: 6,
},
]
this.lines = ['one two three four']
this.restored_lines = ['one two ']
@ -61,12 +61,12 @@ describe('Restoring a version', function () {
email: 'user@sharelatex.com',
first_name: 'Leo',
last_name: 'Lion',
id: this.user_id
id: this.user_id,
}
MockDocUpdaterApi.docs[this.doc_id] = {
lines: this.lines,
version: 7
version: 7,
}
TrackChangesApp.ensureRunning(() => {
@ -74,7 +74,7 @@ describe('Restoring a version', function () {
this.project_id,
this.doc_id,
this.updates,
(error) => {
error => {
if (error != null) {
throw error
}
@ -83,7 +83,7 @@ describe('Restoring a version', function () {
this.doc_id,
this.beforeVersion,
this.user_id,
(error) => {
error => {
if (error != null) {
throw error
}

View file

@ -39,16 +39,16 @@ module.exports = MockDocUpdaterApi = {
})
return app
.listen(3016, (error) => {
.listen(3016, error => {
if (error != null) {
throw error
}
})
.on('error', (error) => {
.on('error', error => {
console.error('error starting MockDocStoreApi:', error.message)
return process.exit(1)
})
}
},
}
MockDocUpdaterApi.run()

View file

@ -74,16 +74,16 @@ module.exports = MockDocUpdaterApi = {
})
return app
.listen(3003, (error) => {
.listen(3003, error => {
if (error != null) {
throw error
}
})
.on('error', (error) => {
.on('error', error => {
console.error('error starting MockDocUpdaterApi:', error.message)
return process.exit(1)
})
}
},
}
MockDocUpdaterApi.run()

View file

@ -61,16 +61,16 @@ module.exports = MockWebApi = {
})
return app
.listen(3000, (error) => {
.listen(3000, error => {
if (error != null) {
throw error
}
})
.on('error', (error) => {
.on('error', error => {
console.error('error starting MockWebApiServer:', error.message)
return process.exit(1)
})
}
},
}
MockWebApi.run()

View file

@ -15,7 +15,7 @@
const app = require('../../../../app')
const { waitForDb } = require('../../../../app/js/mongodb')
const logger = require('logger-sharelatex')
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
module.exports = {
running: false,
@ -38,10 +38,10 @@ module.exports = {
Settings.internal != null
? Settings.internal.trackchanges
: undefined,
(x) => x.port
x => x.port
),
'localhost',
(error) => {
error => {
if (error != null) {
throw error
}
@ -58,7 +58,7 @@ module.exports = {
}
)
})
}
},
}
function __guard__(value, transform) {
return typeof value !== 'undefined' && value !== null

View file

@ -16,7 +16,7 @@ let TrackChangesClient
const async = require('async')
const zlib = require('zlib')
const request = require('request')
const Settings = require('settings-sharelatex')
const Settings = require('@overleaf/settings')
const rclient = require('@overleaf/redis-wrapper').createClient(
Settings.redis.history
) // Only works locally for now
@ -28,7 +28,7 @@ const s3 = new aws.S3({
accessKeyId: Settings.trackchanges.s3.key,
secretAccessKey: Settings.trackchanges.s3.secret,
endpoint: Settings.trackchanges.s3.endpoint,
s3ForcePathStyle: Settings.trackchanges.s3.pathStyle
s3ForcePathStyle: Settings.trackchanges.s3.pathStyle,
})
const S3_BUCKET = Settings.trackchanges.stores.doc_history
@ -37,7 +37,7 @@ module.exports = TrackChangesClient = {
if (callback == null) {
callback = function (error, updates) {}
}
return TrackChangesClient.flushDoc(project_id, doc_id, (error) => {
return TrackChangesClient.flushDoc(project_id, doc_id, error => {
if (error != null) {
return callback(error)
}
@ -51,7 +51,7 @@ module.exports = TrackChangesClient = {
}
return request.post(
{
url: `http://localhost:3015/project/${project_id}/doc/${doc_id}/flush`
url: `http://localhost:3015/project/${project_id}/doc/${doc_id}/flush`,
},
(error, response, body) => {
response.statusCode.should.equal(204)
@ -66,7 +66,7 @@ module.exports = TrackChangesClient = {
}
return request.post(
{
url: `http://localhost:3015/project/${project_id}/flush`
url: `http://localhost:3015/project/${project_id}/flush`,
},
(error, response, body) => {
response.statusCode.should.equal(204)
@ -91,7 +91,7 @@ module.exports = TrackChangesClient = {
}
return db.projectHistoryMetaData.findOne(
{
project_id: ObjectId(project_id)
project_id: ObjectId(project_id),
},
callback
)
@ -103,13 +103,13 @@ module.exports = TrackChangesClient = {
}
return db.projectHistoryMetaData.updateOne(
{
project_id: ObjectId(project_id)
project_id: ObjectId(project_id),
},
{
$set: { preserveHistory: true }
$set: { preserveHistory: true },
},
{
upsert: true
upsert: true,
},
callback
)
@ -122,13 +122,13 @@ module.exports = TrackChangesClient = {
return rclient.sadd(
Keys.docsWithHistoryOps({ project_id }),
doc_id,
(error) => {
error => {
if (error != null) {
return callback(error)
}
return rclient.rpush(
Keys.uncompressedHistoryOps({ doc_id }),
...Array.from(Array.from(updates).map((u) => JSON.stringify(u))),
...Array.from(Array.from(updates).map(u => JSON.stringify(u))),
callback
)
}
@ -141,7 +141,7 @@ module.exports = TrackChangesClient = {
}
return request.get(
{
url: `http://localhost:3015/project/${project_id}/doc/${doc_id}/diff?from=${from}&to=${to}`
url: `http://localhost:3015/project/${project_id}/doc/${doc_id}/diff?from=${from}&to=${to}`,
},
(error, response, body) => {
response.statusCode.should.equal(200)
@ -156,7 +156,7 @@ module.exports = TrackChangesClient = {
}
return request.get(
{
url: `http://localhost:3015/project/${project_id}/updates?before=${options.before}&min_count=${options.min_count}`
url: `http://localhost:3015/project/${project_id}/updates?before=${options.before}&min_count=${options.min_count}`,
},
(error, response, body) => {
response.statusCode.should.equal(200)
@ -184,8 +184,8 @@ module.exports = TrackChangesClient = {
{
url: `http://localhost:3015/project/${project_id}/doc/${doc_id}/version/${version}/restore`,
headers: {
'X-User-Id': user_id
}
'X-User-Id': user_id,
},
},
(error, response, body) => {
response.statusCode.should.equal(204)
@ -200,7 +200,7 @@ module.exports = TrackChangesClient = {
}
return request.post(
{
url: `http://localhost:3015/project/${project_id}/doc/${doc_id}/push`
url: `http://localhost:3015/project/${project_id}/doc/${doc_id}/push`,
},
(error, response, body) => {
response.statusCode.should.equal(204)
@ -215,7 +215,7 @@ module.exports = TrackChangesClient = {
}
return request.post(
{
url: `http://localhost:3015/project/${project_id}/doc/${doc_id}/pull`
url: `http://localhost:3015/project/${project_id}/doc/${doc_id}/pull`,
},
(error, response, body) => {
response.statusCode.should.equal(204)
@ -254,7 +254,7 @@ module.exports = TrackChangesClient = {
}
const params = {
Bucket: S3_BUCKET,
Key: `${project_id}/changes-${doc_id}/pack-${pack_id}`
Key: `${project_id}/changes-${doc_id}/pack-${pack_id}`,
}
return s3.getObject(params, (error, data) => {
@ -280,7 +280,7 @@ module.exports = TrackChangesClient = {
}
let params = {
Bucket: S3_BUCKET,
Prefix: `${project_id}/changes-${doc_id}`
Prefix: `${project_id}/changes-${doc_id}`,
}
return s3.listObjects(params, (error, data) => {
@ -291,11 +291,11 @@ module.exports = TrackChangesClient = {
params = {
Bucket: S3_BUCKET,
Delete: {
Objects: data.Contents.map((s3object) => ({ Key: s3object.Key }))
}
Objects: data.Contents.map(s3object => ({ Key: s3object.Key })),
},
}
return s3.deleteObjects(params, callback)
})
}
},
}

View file

@ -14,8 +14,8 @@ SandboxedModule.configure({
warn() {},
err() {},
error() {},
fatal() {}
}
fatal() {},
},
},
globals: { Buffer, JSON, console, process }
globals: { Buffer, JSON, console, process },
})

View file

@ -24,7 +24,7 @@ describe('DiffGenerator', function () {
return (this.meta = {
start_ts: this.ts,
end_ts: this.ts,
user_id: this.user_id
user_id: this.user_id,
})
})
@ -34,7 +34,7 @@ describe('DiffGenerator', function () {
const content = 'hello world'
const rewoundContent = this.DiffGenerator.rewindOp(content, {
p: 6,
i: 'wo'
i: 'wo',
})
return rewoundContent.should.equal('hello rld')
})
@ -45,7 +45,7 @@ describe('DiffGenerator', function () {
const content = 'hello rld'
const rewoundContent = this.DiffGenerator.rewindOp(content, {
p: 6,
d: 'wo'
d: 'wo',
})
return rewoundContent.should.equal('hello world')
})
@ -65,7 +65,7 @@ describe('DiffGenerator', function () {
const content = 'foobar'
const rewoundContent = this.DiffGenerator.rewindOp(content, {
p: 4,
i: 'bar'
i: 'bar',
})
return rewoundContent.should.equal('foo')
})
@ -78,8 +78,8 @@ describe('DiffGenerator', function () {
const update = {
op: [
{ p: 3, i: 'bbb' },
{ p: 6, i: 'ccc' }
]
{ p: 6, i: 'ccc' },
],
}
const rewoundContent = this.DiffGenerator.rewindUpdate(content, update)
return rewoundContent.should.equal('aaa')
@ -91,7 +91,7 @@ describe('DiffGenerator', function () {
const content = 'aaabbbccc'
const updates = [
{ op: [{ p: 3, i: 'bbb' }] },
{ op: [{ p: 6, i: 'ccc' }] }
{ op: [{ p: 6, i: 'ccc' }] },
]
const rewoundContent = this.DiffGenerator.rewindUpdates(content, updates)
return rewoundContent.should.equal('aaa')
@ -105,7 +105,7 @@ describe('DiffGenerator', function () {
this.updates = [
{ i: 'mock-update-1' },
{ i: 'mock-update-2' },
{ i: 'mock-update-3' }
{ i: 'mock-update-3' },
]
this.DiffGenerator.applyUpdateToDiff = sinon.stub().returns(this.diff)
this.DiffGenerator.compressDiff = sinon.stub().returns(this.diff)
@ -124,8 +124,8 @@ describe('DiffGenerator', function () {
.calledWith(
[
{
u: this.content
}
u: this.content,
},
],
this.updates[0]
)
@ -133,7 +133,7 @@ describe('DiffGenerator', function () {
})
it('should apply each update', function () {
return Array.from(this.updates).map((update) =>
return Array.from(this.updates).map(update =>
this.DiffGenerator.applyUpdateToDiff
.calledWith(sinon.match.any, update)
.should.equal(true)
@ -153,18 +153,18 @@ describe('DiffGenerator', function () {
const diff = this.DiffGenerator.compressDiff([
{
i: 'foo',
meta: { start_ts: 10, end_ts: 20, user: { id: this.user_id } }
meta: { start_ts: 10, end_ts: 20, user: { id: this.user_id } },
},
{
i: 'bar',
meta: { start_ts: 5, end_ts: 15, user: { id: this.user_id } }
}
meta: { start_ts: 5, end_ts: 15, user: { id: this.user_id } },
},
])
return expect(diff).to.deep.equal([
{
i: 'foobar',
meta: { start_ts: 5, end_ts: 20, user: { id: this.user_id } }
}
meta: { start_ts: 5, end_ts: 20, user: { id: this.user_id } },
},
])
})
})
@ -174,12 +174,12 @@ describe('DiffGenerator', function () {
const input = [
{
i: 'foo',
meta: { start_ts: 10, end_ts: 20, user: { id: this.user_id } }
meta: { start_ts: 10, end_ts: 20, user: { id: this.user_id } },
},
{
i: 'bar',
meta: { start_ts: 5, end_ts: 15, user: { id: this.user_id_2 } }
}
meta: { start_ts: 5, end_ts: 15, user: { id: this.user_id_2 } },
},
]
const output = this.DiffGenerator.compressDiff(input)
return expect(output).to.deep.equal(input)
@ -191,18 +191,18 @@ describe('DiffGenerator', function () {
const diff = this.DiffGenerator.compressDiff([
{
d: 'foo',
meta: { start_ts: 10, end_ts: 20, user: { id: this.user_id } }
meta: { start_ts: 10, end_ts: 20, user: { id: this.user_id } },
},
{
d: 'bar',
meta: { start_ts: 5, end_ts: 15, user: { id: this.user_id } }
}
meta: { start_ts: 5, end_ts: 15, user: { id: this.user_id } },
},
])
return expect(diff).to.deep.equal([
{
d: 'foobar',
meta: { start_ts: 5, end_ts: 20, user: { id: this.user_id } }
}
meta: { start_ts: 5, end_ts: 20, user: { id: this.user_id } },
},
])
})
})
@ -212,12 +212,12 @@ describe('DiffGenerator', function () {
const input = [
{
d: 'foo',
meta: { start_ts: 10, end_ts: 20, user: { id: this.user_id } }
meta: { start_ts: 10, end_ts: 20, user: { id: this.user_id } },
},
{
d: 'bar',
meta: { start_ts: 5, end_ts: 15, user: { id: this.user_id_2 } }
}
meta: { start_ts: 5, end_ts: 15, user: { id: this.user_id_2 } },
},
]
const output = this.DiffGenerator.compressDiff(input)
return expect(output).to.deep.equal(input)
@ -230,34 +230,34 @@ describe('DiffGenerator', function () {
it('should insert into the middle of (u)nchanged text', function () {
const diff = this.DiffGenerator.applyUpdateToDiff([{ u: 'foobar' }], {
op: [{ p: 3, i: 'baz' }],
meta: this.meta
meta: this.meta,
})
return expect(diff).to.deep.equal([
{ u: 'foo' },
{ i: 'baz', meta: this.meta },
{ u: 'bar' }
{ u: 'bar' },
])
})
it('should insert into the start of (u)changed text', function () {
const diff = this.DiffGenerator.applyUpdateToDiff([{ u: 'foobar' }], {
op: [{ p: 0, i: 'baz' }],
meta: this.meta
meta: this.meta,
})
return expect(diff).to.deep.equal([
{ i: 'baz', meta: this.meta },
{ u: 'foobar' }
{ u: 'foobar' },
])
})
it('should insert into the end of (u)changed text', function () {
const diff = this.DiffGenerator.applyUpdateToDiff([{ u: 'foobar' }], {
op: [{ p: 6, i: 'baz' }],
meta: this.meta
meta: this.meta,
})
return expect(diff).to.deep.equal([
{ u: 'foobar' },
{ i: 'baz', meta: this.meta }
{ i: 'baz', meta: this.meta },
])
})
@ -269,7 +269,7 @@ describe('DiffGenerator', function () {
return expect(diff).to.deep.equal([
{ i: 'foo', meta: this.meta },
{ i: 'baz', meta: this.meta },
{ i: 'bar', meta: this.meta }
{ i: 'bar', meta: this.meta },
])
})
@ -282,7 +282,7 @@ describe('DiffGenerator', function () {
{ d: 'deleted', meta: this.meta },
{ u: 'foo' },
{ i: 'baz', meta: this.meta },
{ u: 'bar' }
{ u: 'bar' },
])
})
})
@ -297,7 +297,7 @@ describe('DiffGenerator', function () {
return expect(diff).to.deep.equal([
{ u: 'foo' },
{ d: 'baz', meta: this.meta },
{ u: 'bar' }
{ u: 'bar' },
])
})
@ -308,7 +308,7 @@ describe('DiffGenerator', function () {
)
return expect(diff).to.deep.equal([
{ d: 'foo', meta: this.meta },
{ u: 'bazbar' }
{ u: 'bazbar' },
])
})
@ -319,7 +319,7 @@ describe('DiffGenerator', function () {
)
return expect(diff).to.deep.equal([
{ u: 'foobaz' },
{ d: 'bar', meta: this.meta }
{ d: 'bar', meta: this.meta },
])
})
@ -333,7 +333,7 @@ describe('DiffGenerator', function () {
{ d: 'o', meta: this.meta },
{ d: 'baz', meta: this.meta },
{ d: 'b', meta: this.meta },
{ u: 'ar' }
{ u: 'ar' },
])
})
})
@ -346,7 +346,7 @@ describe('DiffGenerator', function () {
)
return expect(diff).to.deep.equal([
{ i: 'foo', meta: this.meta },
{ i: 'bar', meta: this.meta }
{ i: 'bar', meta: this.meta },
])
})
@ -375,7 +375,7 @@ describe('DiffGenerator', function () {
{ u: 'fo' },
{ d: 'o', meta: this.meta },
{ d: 'b', meta: this.meta },
{ u: 'ar' }
{ u: 'ar' },
])
})
})
@ -391,7 +391,7 @@ describe('DiffGenerator', function () {
{ d: 'o', meta: this.meta },
{ d: 'baz', meta: this.meta },
{ d: 'b', meta: this.meta },
{ u: 'ar' }
{ u: 'ar' },
])
})
})
@ -401,7 +401,7 @@ describe('DiffGenerator', function () {
return expect(() =>
this.DiffGenerator.applyUpdateToDiff([{ u: 'foobazbar' }], {
op: [{ p: 3, d: 'xxx' }],
meta: this.meta
meta: this.meta,
})
).to.throw(this.DiffGenerator.ConsistencyError)
})
@ -410,7 +410,7 @@ describe('DiffGenerator', function () {
return expect(() =>
this.DiffGenerator.applyUpdateToDiff([{ u: 'foobazbar' }], {
op: [{ p: 0, d: 'xxx' }],
meta: this.meta
meta: this.meta,
})
).to.throw(this.DiffGenerator.ConsistencyError)
})
@ -419,7 +419,7 @@ describe('DiffGenerator', function () {
return expect(() =>
this.DiffGenerator.applyUpdateToDiff([{ u: 'foobazbar' }], {
op: [{ p: 6, d: 'xxx' }],
meta: this.meta
meta: this.meta,
})
).to.throw(this.DiffGenerator.ConsistencyError)
})
@ -434,7 +434,7 @@ describe('DiffGenerator', function () {
return expect(diff).to.deep.equal([
{ u: 'foo' },
{ i: 'baz', meta: this.meta },
{ d: 'bar', meta: this.meta }
{ d: 'bar', meta: this.meta },
])
})
})
@ -447,7 +447,7 @@ describe('DiffGenerator', function () {
)
return expect(diff).to.deep.equal([
{ d: 'bar', meta: this.meta },
{ i: 'baz', meta: this.meta }
{ i: 'baz', meta: this.meta },
])
})
})

View file

@ -21,8 +21,8 @@ describe('DiffManager', function () {
requires: {
'./UpdatesManager': (this.UpdatesManager = {}),
'./DocumentUpdaterManager': (this.DocumentUpdaterManager = {}),
'./DiffGenerator': (this.DiffGenerator = {})
}
'./DiffGenerator': (this.DiffGenerator = {}),
},
})
this.callback = sinon.stub()
this.from = new Date()
@ -114,23 +114,23 @@ describe('DiffManager', function () {
{
op: 'mock-4',
v: 42,
meta: { start_ts: new Date(this.to.getTime() + 20) }
meta: { start_ts: new Date(this.to.getTime() + 20) },
},
{
op: 'mock-3',
v: 41,
meta: { start_ts: new Date(this.to.getTime() + 10) }
meta: { start_ts: new Date(this.to.getTime() + 10) },
},
{
op: 'mock-2',
v: 40,
meta: { start_ts: new Date(this.to.getTime() - 10) }
meta: { start_ts: new Date(this.to.getTime() - 10) },
},
{
op: 'mock-1',
v: 39,
meta: { start_ts: new Date(this.to.getTime() - 20) }
}
meta: { start_ts: new Date(this.to.getTime() - 20) },
},
]
this.fromVersion = 39
this.toVersion = 40
@ -333,23 +333,23 @@ describe('DiffManager', function () {
{
op: 'mock-4',
v: 42,
meta: { start_ts: new Date(this.to.getTime() + 20) }
meta: { start_ts: new Date(this.to.getTime() + 20) },
},
{
op: 'mock-3',
v: 41,
meta: { start_ts: new Date(this.to.getTime() + 10) }
meta: { start_ts: new Date(this.to.getTime() + 10) },
},
{
op: 'mock-2',
v: 40,
meta: { start_ts: new Date(this.to.getTime() - 10) }
meta: { start_ts: new Date(this.to.getTime() - 10) },
},
{
op: 'mock-1',
v: 39,
meta: { start_ts: new Date(this.to.getTime() - 20) }
}
meta: { start_ts: new Date(this.to.getTime() - 20) },
},
]
this.fromVersion = 39
this.rewound_content = 'rewound-content'
@ -400,7 +400,7 @@ describe('DiffManager', function () {
this.version = 50
this.updates = [
{ op: 'mock-1', v: 40 },
{ op: 'mock-1', v: 39 }
{ op: 'mock-1', v: 39 },
]
this.DiffManager.getLatestDocAndUpdates = sinon
.stub()

View file

@ -21,16 +21,16 @@ describe('MongoAWS', function () {
this.MongoAWS = SandboxedModule.require(modulePath, {
singleOnly: true,
requires: {
'settings-sharelatex': (this.settings = {
'@overleaf/settings': (this.settings = {
trackchanges: {
s3: {
secret: 's3-secret',
key: 's3-key'
key: 's3-key',
},
stores: {
doc_history: 's3-bucket'
}
}
doc_history: 's3-bucket',
},
},
}),
child_process: (this.child_process = {}),
'mongo-uri': (this.mongouri = {}),
@ -40,8 +40,8 @@ describe('MongoAWS', function () {
'./mongodb': { db: (this.db = {}), ObjectId },
JSONStream: (this.JSONStream = {}),
'readline-stream': (this.readline = sinon.stub()),
'@overleaf/metrics': { inc() {} }
}
'@overleaf/metrics': { inc() {} },
},
})
this.project_id = ObjectId().toString()

View file

@ -19,10 +19,10 @@ describe('DocumentUpdaterManager', function () {
this.DocumentUpdaterManager = SandboxedModule.require(modulePath, {
requires: {
request: (this.request = {}),
'settings-sharelatex': (this.settings = {
apis: { documentupdater: { url: 'http://example.com' } }
})
}
'@overleaf/settings': (this.settings = {
apis: { documentupdater: { url: 'http://example.com' } },
}),
},
})
this.callback = sinon.stub()
this.lines = ['one', 'two', 'three']
@ -35,7 +35,7 @@ describe('DocumentUpdaterManager', function () {
this.body = JSON.stringify({
lines: this.lines,
version: this.version,
ops: []
ops: [],
})
this.request.get = sinon
.stub()
@ -135,8 +135,8 @@ describe('DocumentUpdaterManager', function () {
lines: this.content.split('\n'),
source: 'restore',
user_id: this.user_id,
undoing: true
}
undoing: true,
},
})
.should.equal(true)
})

View file

@ -24,8 +24,8 @@ describe('HttpController', function () {
'./RestoreManager': (this.RestoreManager = {}),
'./PackManager': (this.PackManager = {}),
'./DocArchiveManager': (this.DocArchiveManager = {}),
'./HealthChecker': (this.HealthChecker = {})
}
'./HealthChecker': (this.HealthChecker = {}),
},
})
this.doc_id = 'doc-id-123'
this.project_id = 'project-id-123'
@ -39,8 +39,8 @@ describe('HttpController', function () {
this.req = {
params: {
doc_id: this.doc_id,
project_id: this.project_id
}
project_id: this.project_id,
},
}
this.res = { sendStatus: sinon.stub() }
this.UpdatesManager.processUncompressedUpdatesWithLock = sinon
@ -64,8 +64,8 @@ describe('HttpController', function () {
beforeEach(function () {
this.req = {
params: {
project_id: this.project_id
}
project_id: this.project_id,
},
}
this.res = { sendStatus: sinon.stub() }
this.UpdatesManager.processUncompressedUpdatesForProject = sinon
@ -92,12 +92,12 @@ describe('HttpController', function () {
this.req = {
params: {
doc_id: this.doc_id,
project_id: this.project_id
project_id: this.project_id,
},
query: {
from: this.from.toString(),
to: this.to.toString()
}
to: this.to.toString(),
},
}
this.res = { json: sinon.stub() }
this.diff = [{ u: 'mock-diff' }]
@ -128,12 +128,12 @@ describe('HttpController', function () {
this.min_count = 10
this.req = {
params: {
project_id: this.project_id
project_id: this.project_id,
},
query: {
before: this.before.toString(),
min_count: this.min_count.toString()
}
min_count: this.min_count.toString(),
},
}
this.res = { json: sinon.stub() }
this.updates = ['mock-summarized-updates']
@ -147,7 +147,7 @@ describe('HttpController', function () {
return this.UpdatesManager.getSummarizedProjectUpdates
.calledWith(this.project_id, {
before: this.before,
min_count: this.min_count
min_count: this.min_count,
})
.should.equal(true)
})
@ -156,7 +156,7 @@ describe('HttpController', function () {
return this.res.json
.calledWith({
updates: this.updates,
nextBeforeTimestamp: this.nextBeforeTimestamp
nextBeforeTimestamp: this.nextBeforeTimestamp,
})
.should.equal(true)
})
@ -169,11 +169,11 @@ describe('HttpController', function () {
params: {
doc_id: this.doc_id,
project_id: this.project_id,
version: this.version
version: this.version,
},
headers: {
'x-user-id': this.user_id
}
'x-user-id': this.user_id,
},
}
this.res = { sendStatus: sinon.stub() }

View file

@ -24,18 +24,18 @@ describe('LockManager', function () {
beforeEach(function () {
this.Settings = {
redis: {
lock: {}
}
lock: {},
},
}
this.LockManager = SandboxedModule.require(modulePath, {
requires: {
'@overleaf/redis-wrapper': {
createClient: () => {
return (this.rclient = { auth: sinon.stub() })
}
},
},
'settings-sharelatex': this.Settings
}
'@overleaf/settings': this.Settings,
},
})
this.key = 'lock-key'
@ -240,7 +240,7 @@ describe('LockManager', function () {
describe('when the runner function returns an error', function () {
beforeEach(function () {
this.error = new Error('oops')
this.runner = (releaseLock) => {
this.runner = releaseLock => {
if (releaseLock == null) {
releaseLock = function (error) {}
}

View file

@ -24,8 +24,8 @@ describe('MongoManager', function () {
requires: {
'./mongodb': { db: (this.db = {}), ObjectId },
'./PackManager': (this.PackManager = {}),
'@overleaf/metrics': { timeAsyncMethod() {} }
}
'@overleaf/metrics': { timeAsyncMethod() {} },
},
})
this.callback = sinon.stub()
this.doc_id = ObjectId().toString()
@ -166,10 +166,10 @@ describe('MongoManager', function () {
.calledWith(
{
doc_id: ObjectId(this.doc_id),
project_id: { $exists: false }
project_id: { $exists: false },
},
{
$set: { project_id: ObjectId(this.project_id) }
$set: { project_id: ObjectId(this.project_id) },
}
)
.should.equal(true)
@ -184,7 +184,7 @@ describe('MongoManager', function () {
beforeEach(function () {
this.metadata = { mock: 'metadata' }
this.db.projectHistoryMetaData = {
findOne: sinon.stub().callsArgWith(1, null, this.metadata)
findOne: sinon.stub().callsArgWith(1, null, this.metadata),
}
return this.MongoManager.getProjectMetaData(
this.project_id,
@ -207,7 +207,7 @@ describe('MongoManager', function () {
beforeEach(function () {
this.metadata = { mock: 'metadata' }
this.db.projectHistoryMetaData = {
updateOne: sinon.stub().yields()
updateOne: sinon.stub().yields(),
}
return this.MongoManager.setProjectMetaData(
this.project_id,
@ -220,13 +220,13 @@ describe('MongoManager', function () {
return this.db.projectHistoryMetaData.updateOne
.calledWith(
{
project_id: ObjectId(this.project_id)
project_id: ObjectId(this.project_id),
},
{
$set: this.metadata
$set: this.metadata,
},
{
upsert: true
upsert: true,
}
)
.should.equal(true)

View file

@ -30,10 +30,10 @@ describe('PackManager', function () {
'./MongoAWS': {},
'@overleaf/metrics': { inc() {} },
'./ProjectIterator': require('../../../../app/js/ProjectIterator.js'), // Cache for speed
'settings-sharelatex': {
redis: { lock: { key_schema: {} } }
}
}
'@overleaf/settings': {
redis: { lock: { key_schema: {} } },
},
},
})
this.callback = sinon.stub()
this.doc_id = ObjectId().toString()
@ -51,20 +51,20 @@ describe('PackManager', function () {
_id: '12345',
pack: [
{ op: 'op-1', meta: 'meta-1', v: 1 },
{ op: 'op-2', meta: 'meta-2', v: 2 }
{ op: 'op-2', meta: 'meta-2', v: 2 },
],
n: 2,
sz: 100
sz: 100,
}
this.newUpdates = [
{ op: 'op-3', meta: 'meta-3', v: 3 },
{ op: 'op-4', meta: 'meta-4', v: 4 }
{ op: 'op-4', meta: 'meta-4', v: 4 },
]
return (this.db.docHistory = {
insertOne: sinon.stub().yields(),
insert: sinon.stub().callsArg(1),
updateOne: sinon.stub().yields(),
findAndModify: sinon.stub().callsArg(1)
findAndModify: sinon.stub().callsArg(1),
})
})
@ -95,10 +95,10 @@ describe('PackManager', function () {
return describe('for many small updates', function () {
beforeEach(function () {
this.newUpdates = __range__(0, 2048, true).map((i) => ({
this.newUpdates = __range__(0, 2048, true).map(i => ({
op: `op-${i}`,
meta: `meta-${i}`,
v: i
v: i,
}))
return this.PackManager.insertCompressedUpdates(
this.project_id,
@ -209,10 +209,10 @@ describe('PackManager', function () {
describe('for many small updates', function () {
beforeEach(function () {
this.newUpdates = __range__(0, 2048, true).map((i) => ({
this.newUpdates = __range__(0, 2048, true).map(i => ({
op: `op-${i}`,
meta: `meta-${i}`,
v: i
v: i,
}))
return this.PackManager.insertCompressedUpdates(
this.project_id,
@ -292,12 +292,12 @@ describe('PackManager', function () {
0.75 * this.PackManager.MAX_SIZE,
true
)
.map((j) => 'a')
.map(j => 'a')
.join('')
this.newUpdates = [0, 1, 2, 3, 4].map((i) => ({
this.newUpdates = [0, 1, 2, 3, 4].map(i => ({
op: `op-${i}-${longString}`,
meta: `meta-${i}`,
v: i
v: i,
}))
return this.PackManager.insertCompressedUpdates(
this.project_id,
@ -393,7 +393,7 @@ describe('PackManager', function () {
doc_id: ObjectId(this.doc_id),
n: this.newUpdates.length,
v: this.newUpdates[0].v,
v_end: this.newUpdates[this.newUpdates.length - 1].v
v_end: this.newUpdates[this.newUpdates.length - 1].v,
})
.should.equal(true)
})
@ -401,7 +401,7 @@ describe('PackManager', function () {
it('should set an expiry time in the future', function () {
return this.db.docHistory.insertOne
.calledWithMatch({
expiresAt: new Date(Date.now() + 7 * 24 * 3600 * 1000)
expiresAt: new Date(Date.now() + 7 * 24 * 3600 * 1000),
})
.should.equal(true)
})
@ -419,12 +419,12 @@ describe('PackManager', function () {
_id: '12345',
pack: [
{ op: 'op-1', meta: 'meta-1', v: 1 },
{ op: 'op-2', meta: 'meta-2', v: 2 }
{ op: 'op-2', meta: 'meta-2', v: 2 },
],
n: 2,
sz: 100,
meta: { start_ts: Date.now() - 6 * 3600 * 1000 },
expiresAt: new Date(Date.now())
expiresAt: new Date(Date.now()),
}
return this.PackManager.flushCompressedUpdates(
@ -444,7 +444,7 @@ describe('PackManager', function () {
{ _id: this.lastUpdate._id },
{
$push: { pack: { $each: this.newUpdates } },
$set: { v_end: this.newUpdates[this.newUpdates.length - 1].v }
$set: { v_end: this.newUpdates[this.newUpdates.length - 1].v },
}
)
.should.equal(true)
@ -453,7 +453,7 @@ describe('PackManager', function () {
it('should set an expiry time in the future', function () {
return this.db.docHistory.updateOne
.calledWithMatch(sinon.match.any, {
$set: { expiresAt: new Date(Date.now() + 7 * 24 * 3600 * 1000) }
$set: { expiresAt: new Date(Date.now() + 7 * 24 * 3600 * 1000) },
})
.should.equal(true)
})
@ -472,12 +472,12 @@ describe('PackManager', function () {
_id: '12345',
pack: [
{ op: 'op-1', meta: 'meta-1', v: 1 },
{ op: 'op-2', meta: 'meta-2', v: 2 }
{ op: 'op-2', meta: 'meta-2', v: 2 },
],
n: 2,
sz: 100,
meta: { start_ts: Date.now() - 6 * 3600 * 1000 },
expiresAt: new Date(Date.now())
expiresAt: new Date(Date.now()),
}
return this.PackManager.flushCompressedUpdates(
@ -499,7 +499,7 @@ describe('PackManager', function () {
doc_id: ObjectId(this.doc_id),
n: this.newUpdates.length,
v: this.newUpdates[0].v,
v_end: this.newUpdates[this.newUpdates.length - 1].v
v_end: this.newUpdates[this.newUpdates.length - 1].v,
})
.should.equal(true)
})
@ -522,12 +522,12 @@ describe('PackManager', function () {
_id: '12345',
pack: [
{ op: 'op-1', meta: 'meta-1', v: 1 },
{ op: 'op-2', meta: 'meta-2', v: 2 }
{ op: 'op-2', meta: 'meta-2', v: 2 },
],
n: 2,
sz: 100,
meta: { start_ts: Date.now() - 30 * 24 * 3600 * 1000 },
expiresAt: new Date(Date.now() - 30 * 24 * 3600 * 1000)
expiresAt: new Date(Date.now() - 30 * 24 * 3600 * 1000),
}
return this.PackManager.flushCompressedUpdates(
@ -549,7 +549,7 @@ describe('PackManager', function () {
doc_id: ObjectId(this.doc_id),
n: this.newUpdates.length,
v: this.newUpdates[0].v,
v_end: this.newUpdates[this.newUpdates.length - 1].v
v_end: this.newUpdates[this.newUpdates.length - 1].v,
})
.should.equal(true)
})
@ -557,7 +557,7 @@ describe('PackManager', function () {
it('should set an expiry time in the future', function () {
return this.db.docHistory.insertOne
.calledWithMatch({
expiresAt: new Date(Date.now() + 7 * 24 * 3600 * 1000)
expiresAt: new Date(Date.now() + 7 * 24 * 3600 * 1000),
})
.should.equal(true)
})
@ -602,7 +602,7 @@ describe('PackManager', function () {
describe('when an archive is in progress', function () {
beforeEach(function () {
this.db.docHistoryIndex = {
findOne: sinon.stub().callsArgWith(2, null, { inS3: false })
findOne: sinon.stub().callsArgWith(2, null, { inS3: false }),
}
return this.PackManager.checkArchiveNotInProgress(
this.project_id,
@ -624,7 +624,7 @@ describe('PackManager', function () {
describe('when an archive is completed', function () {
beforeEach(function () {
this.db.docHistoryIndex = {
findOne: sinon.stub().callsArgWith(2, null, { inS3: true })
findOne: sinon.stub().callsArgWith(2, null, { inS3: true }),
}
return this.PackManager.checkArchiveNotInProgress(
this.project_id,
@ -646,7 +646,7 @@ describe('PackManager', function () {
return describe('when the archive has not started or completed', function () {
beforeEach(function () {
this.db.docHistoryIndex = {
findOne: sinon.stub().callsArgWith(2, null, {})
findOne: sinon.stub().callsArgWith(2, null, {}),
}
return this.PackManager.checkArchiveNotInProgress(
this.project_id,

View file

@ -24,11 +24,11 @@ describe('RedisManager', function () {
createClient: () => {
return (this.rclient = {
auth: sinon.stub(),
multi: () => this.rclient
multi: () => this.rclient,
})
}
},
},
'settings-sharelatex': {
'@overleaf/settings': {
redis: {
history: {
key_schema: {
@ -37,12 +37,12 @@ describe('RedisManager', function () {
},
docsWithHistoryOps({ project_id }) {
return `DocsWithHistoryOps:${project_id}`
}
}
}
}
}
}
},
},
},
},
},
},
})
this.doc_id = 'doc-id-123'
this.project_id = 'project-id-123'
@ -54,9 +54,9 @@ describe('RedisManager', function () {
beforeEach(function () {
this.rawUpdates = [
{ v: 42, op: 'mock-op-42' },
{ v: 45, op: 'mock-op-45' }
{ v: 45, op: 'mock-op-45' },
]
this.jsonUpdates = Array.from(this.rawUpdates).map((update) =>
this.jsonUpdates = Array.from(this.rawUpdates).map(update =>
JSON.stringify(update)
)
this.rclient.lrange = sinon.stub().callsArgWith(3, null, this.jsonUpdates)

View file

@ -19,8 +19,8 @@ describe('RestoreManager', function () {
this.RestoreManager = SandboxedModule.require(modulePath, {
requires: {
'./DocumentUpdaterManager': (this.DocumentUpdaterManager = {}),
'./DiffManager': (this.DiffManager = {})
}
'./DiffManager': (this.DiffManager = {}),
},
})
this.callback = sinon.stub()
this.project_id = 'mock-project-id'

View file

@ -15,18 +15,18 @@ const modulePath = '../../../../app/js/UpdateCompressor.js'
const SandboxedModule = require('sandboxed-module')
const bigstring = __range__(0, 2 * 1024 * 1024, true)
.map((i) => 'a')
.map(i => 'a')
.join('')
const mediumstring = __range__(0, 1024 * 1024, true)
.map((j) => 'a')
.map(j => 'a')
.join('')
describe('UpdateCompressor', function () {
beforeEach(function () {
this.UpdateCompressor = SandboxedModule.require(modulePath, {
requires: {
'../lib/diff_match_patch': require('../../../../app/lib/diff_match_patch')
}
'../lib/diff_match_patch': require('../../../../app/lib/diff_match_patch'),
},
})
this.user_id = 'user-id-1'
this.other_user_id = 'user-id-2'
@ -41,37 +41,37 @@ describe('UpdateCompressor', function () {
{
op: [
(this.op1 = { p: 0, i: 'Foo' }),
(this.op2 = { p: 6, i: 'bar' })
(this.op2 = { p: 6, i: 'bar' }),
],
meta: { ts: this.ts1, user_id: this.user_id },
v: 42
v: 42,
},
{
op: [(this.op3 = { p: 10, i: 'baz' })],
meta: { ts: this.ts2, user_id: this.other_user_id },
v: 43
}
v: 43,
},
])
).to.deep.equal([
{
op: this.op1,
meta: { start_ts: this.ts1, end_ts: this.ts1, user_id: this.user_id },
v: 42
v: 42,
},
{
op: this.op2,
meta: { start_ts: this.ts1, end_ts: this.ts1, user_id: this.user_id },
v: 42
v: 42,
},
{
op: this.op3,
meta: {
start_ts: this.ts2,
end_ts: this.ts2,
user_id: this.other_user_id
user_id: this.other_user_id,
},
v: 43
}
v: 43,
},
])
})
@ -81,15 +81,15 @@ describe('UpdateCompressor', function () {
{
op: [],
meta: { ts: this.ts1, user_id: this.user_id },
v: 42
}
v: 42,
},
])
).to.deep.equal([
{
op: this.UpdateCompressor.NOOP,
meta: { start_ts: this.ts1, end_ts: this.ts1, user_id: this.user_id },
v: 42
}
v: 42,
},
])
})
@ -100,23 +100,23 @@ describe('UpdateCompressor', function () {
op: [
(this.op1 = { p: 0, i: 'Foo' }),
(this.op2 = { p: 9, c: 'baz' }),
(this.op3 = { p: 6, i: 'bar' })
(this.op3 = { p: 6, i: 'bar' }),
],
meta: { ts: this.ts1, user_id: this.user_id },
v: 42
}
v: 42,
},
])
).to.deep.equal([
{
op: this.op1,
meta: { start_ts: this.ts1, end_ts: this.ts1, user_id: this.user_id },
v: 42
v: 42,
},
{
op: this.op3,
meta: { start_ts: this.ts1, end_ts: this.ts1, user_id: this.user_id },
v: 42
}
v: 42,
},
])
})
})
@ -130,44 +130,44 @@ describe('UpdateCompressor', function () {
meta: {
start_ts: this.ts1,
end_ts: this.ts1,
user_id: this.user_id
user_id: this.user_id,
},
v: 42
v: 42,
},
{
op: (this.op2 = { p: 6, i: 'bar' }),
meta: {
start_ts: this.ts1,
end_ts: this.ts1,
user_id: this.user_id
user_id: this.user_id,
},
v: 42
v: 42,
},
{
op: (this.op3 = { p: 10, i: 'baz' }),
meta: {
start_ts: this.ts2,
end_ts: this.ts2,
user_id: this.other_user_id
user_id: this.other_user_id,
},
v: 43
}
v: 43,
},
])
).to.deep.equal([
{
op: [this.op1, this.op2],
meta: { start_ts: this.ts1, end_ts: this.ts1, user_id: this.user_id },
v: 42
v: 42,
},
{
op: [this.op3],
meta: {
start_ts: this.ts2,
end_ts: this.ts2,
user_id: this.other_user_id
user_id: this.other_user_id,
},
v: 43
}
v: 43,
},
])
})
@ -179,17 +179,17 @@ describe('UpdateCompressor', function () {
meta: {
start_ts: this.ts1,
end_ts: this.ts1,
user_id: this.user_id
user_id: this.user_id,
},
v: 42
}
v: 42,
},
])
).to.deep.equal([
{
op: [],
meta: { start_ts: this.ts1, end_ts: this.ts1, user_id: this.user_id },
v: 42
}
v: 42,
},
])
})
})
@ -202,13 +202,13 @@ describe('UpdateCompressor', function () {
{
op: { p: 3, i: 'foo' },
meta: { ts: this.ts1, user_id: this.user_id },
v: 42
v: 42,
},
{
op: { p: 6, i: 'bar' },
meta: { ts: this.ts2, user_id: this.user_id },
v: 43
}
v: 43,
},
])
).to.deep.equal([
{
@ -216,10 +216,10 @@ describe('UpdateCompressor', function () {
meta: {
start_ts: this.ts1,
end_ts: this.ts2,
user_id: this.user_id
user_id: this.user_id,
},
v: 43
}
v: 43,
},
])
})
@ -229,13 +229,13 @@ describe('UpdateCompressor', function () {
{
op: { p: 3, i: 'foo' },
meta: { ts: this.ts1, user_id: this.user_id },
v: 42
v: 42,
},
{
op: { p: 5, i: 'bar' },
meta: { ts: this.ts2, user_id: this.user_id },
v: 43
}
v: 43,
},
])
).to.deep.equal([
{
@ -243,10 +243,10 @@ describe('UpdateCompressor', function () {
meta: {
start_ts: this.ts1,
end_ts: this.ts2,
user_id: this.user_id
user_id: this.user_id,
},
v: 43
}
v: 43,
},
])
})
@ -256,13 +256,13 @@ describe('UpdateCompressor', function () {
{
op: { p: 3, i: 'foo' },
meta: { ts: this.ts1, user_id: this.user_id },
v: 42
v: 42,
},
{
op: { p: 9, i: 'bar' },
meta: { ts: this.ts2, user_id: this.user_id },
v: 43
}
v: 43,
},
])
).to.deep.equal([
{
@ -270,19 +270,19 @@ describe('UpdateCompressor', function () {
meta: {
start_ts: this.ts1,
end_ts: this.ts1,
user_id: this.user_id
user_id: this.user_id,
},
v: 42
v: 42,
},
{
op: { p: 9, i: 'bar' },
meta: {
start_ts: this.ts2,
end_ts: this.ts2,
user_id: this.user_id
user_id: this.user_id,
},
v: 43
}
v: 43,
},
])
})
@ -292,13 +292,13 @@ describe('UpdateCompressor', function () {
{
op: { p: 3, i: 'foo' },
meta: { ts: this.ts1, user_id: this.user_id },
v: 42
v: 42,
},
{
op: { p: 6, i: bigstring },
meta: { ts: this.ts2, user_id: this.user_id },
v: 43
}
v: 43,
},
])
).to.deep.equal([
{
@ -306,19 +306,19 @@ describe('UpdateCompressor', function () {
meta: {
start_ts: this.ts1,
end_ts: this.ts1,
user_id: this.user_id
user_id: this.user_id,
},
v: 42
v: 42,
},
{
op: { p: 6, i: bigstring },
meta: {
start_ts: this.ts2,
end_ts: this.ts2,
user_id: this.user_id
user_id: this.user_id,
},
v: 43
}
v: 43,
},
])
})
@ -328,13 +328,13 @@ describe('UpdateCompressor', function () {
{
op: { p: 3, i: bigstring },
meta: { ts: this.ts1, user_id: this.user_id },
v: 42
v: 42,
},
{
op: { p: 3 + bigstring.length, i: 'bar' },
meta: { ts: this.ts2, user_id: this.user_id },
v: 43
}
v: 43,
},
])
).to.deep.equal([
{
@ -342,19 +342,19 @@ describe('UpdateCompressor', function () {
meta: {
start_ts: this.ts1,
end_ts: this.ts1,
user_id: this.user_id
user_id: this.user_id,
},
v: 42
v: 42,
},
{
op: { p: 3 + bigstring.length, i: 'bar' },
meta: {
start_ts: this.ts2,
end_ts: this.ts2,
user_id: this.user_id
user_id: this.user_id,
},
v: 43
}
v: 43,
},
])
})
@ -364,13 +364,13 @@ describe('UpdateCompressor', function () {
{
op: { p: 3, i: mediumstring },
meta: { ts: this.ts1, user_id: this.user_id },
v: 42
v: 42,
},
{
op: { p: 3 + mediumstring.length, i: mediumstring },
meta: { ts: this.ts2, user_id: this.user_id },
v: 43
}
v: 43,
},
])
).to.deep.equal([
{
@ -378,19 +378,19 @@ describe('UpdateCompressor', function () {
meta: {
start_ts: this.ts1,
end_ts: this.ts1,
user_id: this.user_id
user_id: this.user_id,
},
v: 42
v: 42,
},
{
op: { p: 3 + mediumstring.length, i: mediumstring },
meta: {
start_ts: this.ts2,
end_ts: this.ts2,
user_id: this.user_id
user_id: this.user_id,
},
v: 43
}
v: 43,
},
])
})
})
@ -402,13 +402,13 @@ describe('UpdateCompressor', function () {
{
op: { p: 3, d: 'foo' },
meta: { ts: this.ts1, user_id: this.user_id },
v: 42
v: 42,
},
{
op: { p: 3, d: 'bar' },
meta: { ts: this.ts2, user_id: this.user_id },
v: 43
}
v: 43,
},
])
).to.deep.equal([
{
@ -416,10 +416,10 @@ describe('UpdateCompressor', function () {
meta: {
start_ts: this.ts1,
end_ts: this.ts2,
user_id: this.user_id
user_id: this.user_id,
},
v: 43
}
v: 43,
},
])
})
@ -429,13 +429,13 @@ describe('UpdateCompressor', function () {
{
op: { p: 3, d: 'foo' },
meta: { ts: this.ts1, user_id: this.user_id },
v: 42
v: 42,
},
{
op: { p: 1, d: 'bar' },
meta: { ts: this.ts2, user_id: this.user_id },
v: 43
}
v: 43,
},
])
).to.deep.equal([
{
@ -443,10 +443,10 @@ describe('UpdateCompressor', function () {
meta: {
start_ts: this.ts1,
end_ts: this.ts2,
user_id: this.user_id
user_id: this.user_id,
},
v: 43
}
v: 43,
},
])
})
@ -456,13 +456,13 @@ describe('UpdateCompressor', function () {
{
op: { p: 3, d: 'foo' },
meta: { ts: this.ts1, user_id: this.user_id },
v: 42
v: 42,
},
{
op: { p: 9, d: 'bar' },
meta: { ts: this.ts2, user_id: this.user_id },
v: 43
}
v: 43,
},
])
).to.deep.equal([
{
@ -470,19 +470,19 @@ describe('UpdateCompressor', function () {
meta: {
start_ts: this.ts1,
end_ts: this.ts1,
user_id: this.user_id
user_id: this.user_id,
},
v: 42
v: 42,
},
{
op: { p: 9, d: 'bar' },
meta: {
start_ts: this.ts2,
end_ts: this.ts2,
user_id: this.user_id
user_id: this.user_id,
},
v: 43
}
v: 43,
},
])
})
})
@ -494,13 +494,13 @@ describe('UpdateCompressor', function () {
{
op: { p: 3, i: 'foo' },
meta: { ts: this.ts1, user_id: this.user_id },
v: 42
v: 42,
},
{
op: { p: 5, d: 'o' },
meta: { ts: this.ts2, user_id: this.user_id },
v: 43
}
v: 43,
},
])
).to.deep.equal([
{
@ -508,10 +508,10 @@ describe('UpdateCompressor', function () {
meta: {
start_ts: this.ts1,
end_ts: this.ts2,
user_id: this.user_id
user_id: this.user_id,
},
v: 43
}
v: 43,
},
])
})
@ -521,13 +521,13 @@ describe('UpdateCompressor', function () {
{
op: { p: 3, i: 'fobaro' },
meta: { ts: this.ts1, user_id: this.user_id },
v: 42
v: 42,
},
{
op: { p: 5, d: 'bar' },
meta: { ts: this.ts2, user_id: this.user_id },
v: 43
}
v: 43,
},
])
).to.deep.equal([
{
@ -535,10 +535,10 @@ describe('UpdateCompressor', function () {
meta: {
start_ts: this.ts1,
end_ts: this.ts2,
user_id: this.user_id
user_id: this.user_id,
},
v: 43
}
v: 43,
},
])
})
@ -548,13 +548,13 @@ describe('UpdateCompressor', function () {
{
op: { p: 3, i: 'foo' },
meta: { ts: this.ts1, user_id: this.user_id },
v: 42
v: 42,
},
{
op: { p: 3, d: 'foo' },
meta: { ts: this.ts2, user_id: this.user_id },
v: 43
}
v: 43,
},
])
).to.deep.equal([
{
@ -562,10 +562,10 @@ describe('UpdateCompressor', function () {
meta: {
start_ts: this.ts1,
end_ts: this.ts2,
user_id: this.user_id
user_id: this.user_id,
},
v: 43
}
v: 43,
},
])
})
@ -575,13 +575,13 @@ describe('UpdateCompressor', function () {
{
op: { p: 3, i: 'foo' },
meta: { ts: this.ts1, user_id: this.user_id },
v: 42
v: 42,
},
{
op: { p: 9, d: 'bar' },
meta: { ts: this.ts2, user_id: this.user_id },
v: 43
}
v: 43,
},
])
).to.deep.equal([
{
@ -589,19 +589,19 @@ describe('UpdateCompressor', function () {
meta: {
start_ts: this.ts1,
end_ts: this.ts1,
user_id: this.user_id
user_id: this.user_id,
},
v: 42
v: 42,
},
{
op: { p: 9, d: 'bar' },
meta: {
start_ts: this.ts2,
end_ts: this.ts2,
user_id: this.user_id
user_id: this.user_id,
},
v: 43
}
v: 43,
},
])
})
@ -611,13 +611,13 @@ describe('UpdateCompressor', function () {
{
op: { p: 3, i: 'foobar' },
meta: { ts: this.ts1, user_id: this.user_id },
v: 42
v: 42,
},
{
op: { p: 6, d: 'bardle' },
meta: { ts: this.ts2, user_id: this.user_id },
v: 43
}
v: 43,
},
])
).to.deep.equal([
{
@ -625,19 +625,19 @@ describe('UpdateCompressor', function () {
meta: {
start_ts: this.ts1,
end_ts: this.ts1,
user_id: this.user_id
user_id: this.user_id,
},
v: 42
v: 42,
},
{
op: { p: 6, d: 'bardle' },
meta: {
start_ts: this.ts2,
end_ts: this.ts2,
user_id: this.user_id
user_id: this.user_id,
},
v: 43
}
v: 43,
},
])
})
})
@ -649,13 +649,13 @@ describe('UpdateCompressor', function () {
{
op: { p: 3, d: 'one two three four five six seven eight' },
meta: { ts: this.ts1, user_id: this.user_id },
v: 42
v: 42,
},
{
op: { p: 3, i: 'one 2 three four five six seven eight' },
meta: { ts: this.ts2, user_id: this.user_id },
v: 43
}
v: 43,
},
])
).to.deep.equal([
{
@ -663,19 +663,19 @@ describe('UpdateCompressor', function () {
meta: {
start_ts: this.ts1,
end_ts: this.ts2,
user_id: this.user_id
user_id: this.user_id,
},
v: 43
v: 43,
},
{
op: { p: 7, i: '2' },
meta: {
start_ts: this.ts1,
end_ts: this.ts2,
user_id: this.user_id
user_id: this.user_id,
},
v: 43
}
v: 43,
},
])
})
@ -685,13 +685,13 @@ describe('UpdateCompressor', function () {
{
op: { p: 3, d: 'one two three four five six seven eight' },
meta: { ts: this.ts1, user_id: this.user_id },
v: 42
v: 42,
},
{
op: { p: 3, i: 'one two three four five six seven eight' },
meta: { ts: this.ts2, user_id: this.user_id },
v: 43
}
v: 43,
},
])
).to.deep.equal([
{
@ -699,10 +699,10 @@ describe('UpdateCompressor', function () {
meta: {
start_ts: this.ts1,
end_ts: this.ts2,
user_id: this.user_id
user_id: this.user_id,
},
v: 43
}
v: 43,
},
])
})
})
@ -714,13 +714,13 @@ describe('UpdateCompressor', function () {
{
op: this.UpdateCompressor.NOOP,
meta: { ts: this.ts1, user_id: this.user_id },
v: 42
v: 42,
},
{
op: { p: 6, i: 'bar' },
meta: { ts: this.ts1, user_id: this.user_id },
v: 43
}
v: 43,
},
])
).to.deep.equal([
{
@ -728,19 +728,19 @@ describe('UpdateCompressor', function () {
meta: {
start_ts: this.ts1,
end_ts: this.ts1,
user_id: this.user_id
user_id: this.user_id,
},
v: 42
v: 42,
},
{
op: { p: 6, i: 'bar' },
meta: {
start_ts: this.ts1,
end_ts: this.ts1,
user_id: this.user_id
user_id: this.user_id,
},
v: 43
}
v: 43,
},
])
})
})
@ -752,13 +752,13 @@ describe('UpdateCompressor', function () {
{
op: this.UpdateCompressor.NOOP,
meta: { ts: this.ts1, user_id: this.user_id },
v: 42
v: 42,
},
{
op: { p: 6, d: 'bar' },
meta: { ts: this.ts1, user_id: this.user_id },
v: 43
}
v: 43,
},
])
).to.deep.equal([
{
@ -766,19 +766,19 @@ describe('UpdateCompressor', function () {
meta: {
start_ts: this.ts1,
end_ts: this.ts1,
user_id: this.user_id
user_id: this.user_id,
},
v: 42
v: 42,
},
{
op: { p: 6, d: 'bar' },
meta: {
start_ts: this.ts1,
end_ts: this.ts1,
user_id: this.user_id
user_id: this.user_id,
},
v: 43
}
v: 43,
},
])
})
})
@ -792,45 +792,45 @@ describe('UpdateCompressor', function () {
{
op: [
{ p: 1000, d: 'hello' },
{ p: 1000, i: 'HELLO()' }
{ p: 1000, i: 'HELLO()' },
],
meta: {
start_ts: this.ts1,
end_ts: this.ts1,
user_id: this.user_id
user_id: this.user_id,
},
v: 42
v: 42,
},
[
{
op: [{ p: 1006, i: 'WORLD' }],
meta: { ts: this.ts2, user_id: this.user_id },
v: 43
}
v: 43,
},
]
)
).to.deep.equal([
{
op: [
{ p: 1000, d: 'hello' },
{ p: 1000, i: 'HELLO()' }
{ p: 1000, i: 'HELLO()' },
],
meta: {
start_ts: this.ts1,
end_ts: this.ts1,
user_id: this.user_id
user_id: this.user_id,
},
v: 42
v: 42,
},
{
op: [{ p: 1006, i: 'WORLD' }],
meta: {
start_ts: this.ts2,
end_ts: this.ts2,
user_id: this.user_id
user_id: this.user_id,
},
v: 43
}
v: 43,
},
])
})
})

View file

@ -23,8 +23,8 @@ describe('UpdateTrimmer', function () {
this.UpdateTrimmer = SandboxedModule.require(modulePath, {
requires: {
'./WebApiManager': (this.WebApiManager = {}),
'./MongoManager': (this.MongoManager = {})
}
'./MongoManager': (this.MongoManager = {}),
},
})
this.callback = sinon.stub()

View file

@ -32,18 +32,18 @@ describe('UpdatesManager', function () {
'./WebApiManager': (this.WebApiManager = {}),
'./UpdateTrimmer': (this.UpdateTrimmer = {}),
'./DocArchiveManager': (this.DocArchiveManager = {}),
'settings-sharelatex': {
'@overleaf/settings': {
redis: {
lock: {
key_schema: {
historyLock({ doc_id }) {
return `HistoryLock:${doc_id}`
}
}
}
}
}
}
},
},
},
},
},
},
})
this.doc_id = 'doc-id-123'
this.project_id = 'project-id-123'
@ -79,7 +79,7 @@ describe('UpdatesManager', function () {
beforeEach(function () {
this.rawUpdates = [
{ v: 12, op: 'mock-op-12' },
{ v: 13, op: 'mock-op-13' }
{ v: 13, op: 'mock-op-13' },
]
this.compressedUpdates = [{ v: 13, op: 'compressed-op-12' }]
@ -127,7 +127,7 @@ describe('UpdatesManager', function () {
this.lastCompressedUpdate = { v: 11, op: 'compressed-op-11' }
this.compressedUpdates = [
{ v: 12, op: 'compressed-op-11+12' },
{ v: 13, op: 'compressed-op-12' }
{ v: 13, op: 'compressed-op-12' },
]
this.MongoManager.peekLastCompressedUpdate = sinon
@ -148,7 +148,7 @@ describe('UpdatesManager', function () {
beforeEach(function () {
this.rawUpdates = [
{ v: 12, op: 'mock-op-12' },
{ v: 13, op: 'mock-op-13' }
{ v: 13, op: 'mock-op-13' },
]
return this.UpdatesManager.compressAndSaveRawUpdates(
this.project_id,
@ -192,11 +192,11 @@ describe('UpdatesManager', function () {
beforeEach(function () {
this.lastCompressedUpdate = {
pack: [{ v: 11, op: 'compressed-op-11' }],
v: 11
v: 11,
}
this.rawUpdates = [
{ v: 12, op: 'mock-op-12' },
{ v: 13, op: 'mock-op-13' }
{ v: 13, op: 'mock-op-13' },
]
this.MongoManager.peekLastCompressedUpdate = sinon
.stub()
@ -250,7 +250,7 @@ describe('UpdatesManager', function () {
{ v: 10, op: 'mock-op-10' },
{ v: 11, op: 'mock-op-11' },
{ v: 12, op: 'mock-op-12' },
{ v: 13, op: 'mock-op-13' }
{ v: 13, op: 'mock-op-13' },
]
return this.UpdatesManager.compressAndSaveRawUpdates(
@ -303,7 +303,7 @@ describe('UpdatesManager', function () {
beforeEach(function () {
this.rawUpdates = [
{ v: 13, op: 'mock-op-13' },
{ v: 12, op: 'mock-op-12' }
{ v: 12, op: 'mock-op-12' },
]
return this.UpdatesManager.compressAndSaveRawUpdates(
this.project_id,
@ -347,7 +347,7 @@ describe('UpdatesManager', function () {
beforeEach(function () {
this.rawUpdates = [
{ v: 12, op: 'mock-op-12' },
{ v: 13, op: 'mock-op-13' }
{ v: 13, op: 'mock-op-13' },
]
return this.UpdatesManager.compressAndSaveRawUpdates(
this.project_id,
@ -454,7 +454,7 @@ describe('UpdatesManager', function () {
'mock-update-1',
'mock-update-2',
'mock-update-3',
'mock-update-4'
'mock-update-4',
]
this.redisArray = this.updates.slice()
this.RedisManager.getOldestDocUpdates = (
@ -673,7 +673,7 @@ describe('UpdatesManager', function () {
})
it('should process the doc ops for the each doc_id', function () {
return Array.from(this.doc_ids).map((doc_id) =>
return Array.from(this.doc_ids).map(doc_id =>
this.UpdatesManager._processUncompressedUpdatesForDocWithLock
.calledWith(this.project_id, doc_id, this.temporary)
.should.equal(true)
@ -692,26 +692,26 @@ describe('UpdatesManager', function () {
doc_id: 123,
v: 456,
op: 'mock-updates',
meta: { user_id: 123, start_ts: 1233, end_ts: 1234 }
}
meta: { user_id: 123, start_ts: 1233, end_ts: 1234 },
},
]
this.options = { before: 'mock-before', limit: 'mock-limit' }
this.summarizedUpdates = [
{
meta: { user_ids: [123], start_ts: 1233, end_ts: 1234 },
docs: { '123': { fromV: 456, toV: 456 } }
}
docs: { 123: { fromV: 456, toV: 456 } },
},
]
this.updatesWithUserInfo = ['updates-with-user-info']
this.done_state = false
this.iterator = {
next: (cb) => {
next: cb => {
this.done_state = true
return cb(null, this.updates)
},
done: () => {
return this.done_state
}
},
}
this.PackManager.makeProjectIterator = sinon
.stub()
@ -867,22 +867,22 @@ describe('UpdatesManager', function () {
this.updates = [
{
meta: {
user_id: this.user_id_1
user_id: this.user_id_1,
},
op: 'mock-op-1'
op: 'mock-op-1',
},
{
meta: {
user_id: this.user_id_1
user_id: this.user_id_1,
},
op: 'mock-op-2'
op: 'mock-op-2',
},
{
meta: {
user_id: this.user_id_2
user_id: this.user_id_2,
},
op: 'mock-op-3'
}
op: 'mock-op-3',
},
]
this.user_info = {}
this.user_info[this.user_id_1] = { email: 'user1@sharelatex.com' }
@ -920,27 +920,27 @@ describe('UpdatesManager', function () {
{
meta: {
user: {
email: 'user1@sharelatex.com'
}
email: 'user1@sharelatex.com',
},
},
op: 'mock-op-1'
op: 'mock-op-1',
},
{
meta: {
user: {
email: 'user1@sharelatex.com'
}
email: 'user1@sharelatex.com',
},
},
op: 'mock-op-2'
op: 'mock-op-2',
},
{
meta: {
user: {
email: 'user2@sharelatex.com'
}
email: 'user2@sharelatex.com',
},
},
op: 'mock-op-3'
}
op: 'mock-op-3',
},
])
})
})
@ -950,16 +950,16 @@ describe('UpdatesManager', function () {
this.updates = [
{
meta: {
user_id: null
user_id: null,
},
op: 'mock-op-1'
op: 'mock-op-1',
},
{
meta: {
user_id: 'anonymous-user'
user_id: 'anonymous-user',
},
op: 'mock-op-2'
}
op: 'mock-op-2',
},
]
this.WebApiManager.getUserInfo = (user_id, callback) => {
if (callback == null) {
@ -986,12 +986,12 @@ describe('UpdatesManager', function () {
return expect(this.results).to.deep.equal([
{
meta: {},
op: 'mock-op-1'
op: 'mock-op-1',
},
{
meta: {},
op: 'mock-op-2'
}
op: 'mock-op-2',
},
])
})
})
@ -1011,19 +1011,19 @@ describe('UpdatesManager', function () {
meta: {
user_id: this.user_1.id,
start_ts: this.now + 20,
end_ts: this.now + 30
end_ts: this.now + 30,
},
v: 5
v: 5,
},
{
doc_id: 'doc-id-1',
meta: {
user_id: this.user_2.id,
start_ts: this.now,
end_ts: this.now + 10
end_ts: this.now + 10,
},
v: 4
}
v: 4,
},
])
return expect(result).to.deep.equal([
@ -1031,15 +1031,15 @@ describe('UpdatesManager', function () {
docs: {
'doc-id-1': {
fromV: 4,
toV: 5
}
toV: 5,
},
},
meta: {
user_ids: [this.user_1.id, this.user_2.id],
start_ts: this.now,
end_ts: this.now + 30
}
}
end_ts: this.now + 30,
},
},
])
})
@ -1051,47 +1051,47 @@ describe('UpdatesManager', function () {
meta: {
user_id: this.user_2.id,
start_ts: this.now + oneDay,
end_ts: this.now + oneDay + 10
end_ts: this.now + oneDay + 10,
},
v: 5
v: 5,
},
{
doc_id: 'doc-id-1',
meta: {
user_id: this.user_1.id,
start_ts: this.now,
end_ts: this.now + 10
end_ts: this.now + 10,
},
v: 4
}
v: 4,
},
])
return expect(result).to.deep.equal([
{
docs: {
'doc-id-1': {
fromV: 5,
toV: 5
}
toV: 5,
},
},
meta: {
user_ids: [this.user_2.id],
start_ts: this.now + oneDay,
end_ts: this.now + oneDay + 10
}
end_ts: this.now + oneDay + 10,
},
},
{
docs: {
'doc-id-1': {
fromV: 4,
toV: 4
}
toV: 4,
},
},
meta: {
user_ids: [this.user_1.id],
start_ts: this.now,
end_ts: this.now + 10
}
}
end_ts: this.now + 10,
},
},
])
})
@ -1103,34 +1103,34 @@ describe('UpdatesManager', function () {
meta: {
user_id: this.user_1.id,
start_ts: this.now + 20,
end_ts: this.now + 30
end_ts: this.now + 30,
},
v: 5
v: 5,
},
{
doc_id: 'doc-id-2',
meta: {
user_id: this.user_2.id,
start_ts: this.now,
end_ts: this.now + 10
end_ts: this.now + 10,
},
v: 4
}
v: 4,
},
],
[
{
docs: {
'doc-id-1': {
fromV: 6,
toV: 8
}
toV: 8,
},
},
meta: {
user_ids: [this.user_1.id],
start_ts: this.now + 40,
end_ts: this.now + 50
}
}
end_ts: this.now + 50,
},
},
]
)
return expect(result).to.deep.equal([
@ -1138,19 +1138,19 @@ describe('UpdatesManager', function () {
docs: {
'doc-id-1': {
toV: 8,
fromV: 6
fromV: 6,
},
'doc-id-2': {
toV: 5,
fromV: 4
}
fromV: 4,
},
},
meta: {
user_ids: [this.user_1.id, this.user_2.id],
start_ts: this.now,
end_ts: this.now + 50
}
}
end_ts: this.now + 50,
},
},
])
})
@ -1161,34 +1161,34 @@ describe('UpdatesManager', function () {
meta: {
user_id: this.user_1.id,
start_ts: this.now + 20,
end_ts: this.now + 30
end_ts: this.now + 30,
},
v: 5
v: 5,
},
{
doc_id: 'doc-id-1',
meta: {
user_id: null,
start_ts: this.now,
end_ts: this.now + 10
end_ts: this.now + 10,
},
v: 4
}
v: 4,
},
])
return expect(result).to.deep.equal([
{
docs: {
'doc-id-1': {
fromV: 4,
toV: 5
}
toV: 5,
},
},
meta: {
user_ids: [this.user_1.id, null],
start_ts: this.now,
end_ts: this.now + 30
}
}
end_ts: this.now + 30,
},
},
])
})
@ -1199,34 +1199,34 @@ describe('UpdatesManager', function () {
meta: {
user_id: null,
start_ts: this.now,
end_ts: this.now + 10
end_ts: this.now + 10,
},
v: 4
v: 4,
},
{
doc_id: 'doc-id-1',
meta: {
user_id: this.user_1.id,
start_ts: this.now + 20,
end_ts: this.now + 30
end_ts: this.now + 30,
},
v: 5
}
v: 5,
},
])
return expect(result).to.deep.equal([
{
docs: {
'doc-id-1': {
fromV: 4,
toV: 5
}
toV: 5,
},
},
meta: {
user_ids: [null, this.user_1.id],
start_ts: this.now,
end_ts: this.now + 30
}
}
end_ts: this.now + 30,
},
},
])
})
@ -1237,43 +1237,43 @@ describe('UpdatesManager', function () {
meta: {
user_id: this.user_1.id,
start_ts: this.now + 20,
end_ts: this.now + 30
end_ts: this.now + 30,
},
v: 5
v: 5,
},
{
doc_id: 'doc-id-1',
meta: {
user_id: null,
start_ts: this.now,
end_ts: this.now + 10
end_ts: this.now + 10,
},
v: 4
v: 4,
},
{
doc_id: 'doc-id-1',
meta: {
user_id: null,
start_ts: this.now + 2,
end_ts: this.now + 4
end_ts: this.now + 4,
},
v: 4
}
v: 4,
},
])
return expect(result).to.deep.equal([
{
docs: {
'doc-id-1': {
fromV: 4,
toV: 5
}
toV: 5,
},
},
meta: {
user_ids: [this.user_1.id, null],
start_ts: this.now,
end_ts: this.now + 30
}
}
end_ts: this.now + 30,
},
},
])
})
@ -1285,19 +1285,19 @@ describe('UpdatesManager', function () {
meta: {
user_id: this.user_1.id,
start_ts: this.now + 20,
end_ts: this.now + 30
end_ts: this.now + 30,
},
v: 5
v: 5,
},
{
doc_id: 'doc-id-1',
meta: {
user_id: this.user_2.id,
start_ts: this.now,
end_ts: this.now + 10
end_ts: this.now + 10,
},
v: 4
}
v: 4,
},
])
return expect(result).to.deep.equal([
@ -1305,28 +1305,28 @@ describe('UpdatesManager', function () {
docs: {
'doc-id-1': {
fromV: 5,
toV: 5
}
toV: 5,
},
},
meta: {
user_ids: [this.user_1.id],
start_ts: this.now + 20,
end_ts: this.now + 30
}
end_ts: this.now + 30,
},
},
{
docs: {
'doc-id-1': {
fromV: 4,
toV: 4
}
toV: 4,
},
},
meta: {
user_ids: [this.user_2.id],
start_ts: this.now,
end_ts: this.now + 10
}
}
end_ts: this.now + 10,
},
},
])
})
})

View file

@ -19,16 +19,16 @@ describe('WebApiManager', function () {
this.WebApiManager = SandboxedModule.require(modulePath, {
requires: {
requestretry: (this.request = {}),
'settings-sharelatex': (this.settings = {
'@overleaf/settings': (this.settings = {
apis: {
web: {
url: 'http://example.com',
user: 'sharelatex',
pass: 'password'
}
}
})
}
pass: 'password',
},
},
}),
},
})
this.callback = sinon.stub()
this.user_id = 'mock-user-id'
@ -38,7 +38,7 @@ describe('WebApiManager', function () {
id: this.user_id,
first_name: 'Leo',
last_nane: 'Lion',
extra_param: 'blah'
extra_param: 'blah',
}
return (this.project = { features: 'mock-features' })
})
@ -60,8 +60,8 @@ describe('WebApiManager', function () {
auth: {
user: this.settings.apis.web.user,
pass: this.settings.apis.web.pass,
sendImmediately: true
}
sendImmediately: true,
},
})
.should.equal(true)
})
@ -72,7 +72,7 @@ describe('WebApiManager', function () {
id: this.user_id,
email: this.user_info.email,
first_name: this.user_info.first_name,
last_name: this.user_info.last_name
last_name: this.user_info.last_name,
})
.should.equal(true)
})
@ -150,8 +150,8 @@ describe('WebApiManager', function () {
auth: {
user: this.settings.apis.web.user,
pass: this.settings.apis.web.pass,
sendImmediately: true
}
sendImmediately: true,
},
})
.should.equal(true)
})