From 90a921cbe6f271664de346f356d0b3cd1868871a Mon Sep 17 00:00:00 2001 From: Eric Mc Sween <5454374+emcsween@users.noreply.github.com> Date: Tue, 21 Mar 2023 08:21:51 -0400 Subject: [PATCH] Merge pull request #12218 from overleaf/em-camel-case-track-changes Camel case variables in track-changes GitOrigin-RevId: 92878e2b7dfa051069e0baaf604e96f4d2e0a501 --- .../track-changes/app/js/DiffGenerator.js | 9 +- services/track-changes/app/js/DiffManager.js | 35 +- .../track-changes/app/js/DocstoreManager.js | 7 +- .../app/js/DocumentUpdaterManager.js | 41 +- .../track-changes/app/js/HealthChecker.js | 15 +- .../track-changes/app/js/HttpController.js | 81 ++-- services/track-changes/app/js/LockManager.js | 2 +- services/track-changes/app/js/MongoAWS.js | 66 ++- services/track-changes/app/js/MongoManager.js | 29 +- services/track-changes/app/js/PackManager.js | 407 ++++++++---------- services/track-changes/app/js/PackWorker.js | 19 +- services/track-changes/app/js/RedisManager.js | 34 +- .../track-changes/app/js/RestoreManager.js | 15 +- .../track-changes/app/js/UpdateCompressor.js | 11 +- .../track-changes/app/js/UpdateTrimmer.js | 13 +- .../track-changes/app/js/UpdatesManager.js | 234 +++++----- .../track-changes/app/js/WebApiManager.js | 21 +- .../acceptance/js/ArchivingUpdatesTests.js | 5 +- .../acceptance/js/helpers/MockDocStoreApi.js | 5 +- .../js/helpers/MockDocUpdaterApi.js | 13 +- .../test/acceptance/js/helpers/MockWebApi.js | 11 +- .../js/helpers/TrackChangesClient.js | 69 ++- .../unit/js/DiffManager/DiffManagerTests.js | 5 +- .../unit/js/LockManager/LockManagerTests.js | 3 +- .../unit/js/RedisManager/RedisManagerTests.js | 9 +- .../js/UpdatesManager/UpdatesManagerTests.js | 19 +- 26 files changed, 538 insertions(+), 640 deletions(-) diff --git a/services/track-changes/app/js/DiffGenerator.js b/services/track-changes/app/js/DiffGenerator.js index 4cdd613008..7f1a420a37 100644 --- a/services/track-changes/app/js/DiffGenerator.js +++ b/services/track-changes/app/js/DiffGenerator.js @@ -1,5 +1,4 @@ /* eslint-disable - camelcase, no-proto, no-unused-vars, */ @@ -59,10 +58,10 @@ module.exports = DiffGenerator = { // is the case with this op, and shift p back appropriately to match // ShareJS if so. ;({ p } = op) - const max_p = content.length - op.i.length - if (p > max_p) { - logger.warn({ max_p, p }, 'truncating position to content length') - p = max_p + const maxP = content.length - op.i.length + if (p > maxP) { + logger.warn({ maxP, p }, 'truncating position to content length') + p = maxP op.p = p // fix out of range offsets to avoid invalid history exports in ZipManager } diff --git a/services/track-changes/app/js/DiffManager.js b/services/track-changes/app/js/DiffManager.js index a1d5fd8b9d..b7ac801a2b 100644 --- a/services/track-changes/app/js/DiffManager.js +++ b/services/track-changes/app/js/DiffManager.js @@ -1,6 +1,3 @@ -/* eslint-disable - camelcase, -*/ // TODO: This file was created by bulk-decaffeinate. // Fix any style issues and re-enable lint. /* @@ -17,15 +14,15 @@ const DiffGenerator = require('./DiffGenerator') const logger = require('@overleaf/logger') module.exports = DiffManager = { - getLatestDocAndUpdates(project_id, doc_id, fromVersion, callback) { + getLatestDocAndUpdates(projectId, docId, fromVersion, callback) { // Get updates last, since then they must be ahead and it // might be possible to rewind to the same version as the doc. if (callback == null) { callback = function () {} } return DocumentUpdaterManager.getDocument( - project_id, - doc_id, + projectId, + docId, function (error, content, version) { if (error != null) { return callback(error) @@ -35,8 +32,8 @@ module.exports = DiffManager = { return callback(null, content, version, []) } return UpdatesManager.getDocUpdatesWithUserInfo( - project_id, - doc_id, + projectId, + docId, { from: fromVersion }, function (error, updates) { if (error != null) { @@ -49,13 +46,13 @@ module.exports = DiffManager = { ) }, - getDiff(project_id, doc_id, fromVersion, toVersion, callback) { + getDiff(projectId, docId, fromVersion, toVersion, callback) { if (callback == null) { callback = function () {} } return DiffManager.getDocumentBeforeVersion( - project_id, - doc_id, + projectId, + docId, fromVersion, function (error, startingContent, updates) { let diff @@ -85,7 +82,7 @@ module.exports = DiffManager = { ) }, - getDocumentBeforeVersion(project_id, doc_id, version, _callback) { + getDocumentBeforeVersion(projectId, docId, version, _callback) { // Whichever order we get the latest document and the latest updates, // there is potential for updates to be applied between them so that // they do not return the same 'latest' versions. @@ -100,7 +97,7 @@ module.exports = DiffManager = { if (error != null) { if (error.retry && retries > 0) { logger.warn( - { error, project_id, doc_id, version, retries }, + { error, projectId, docId, version, retries }, 'retrying getDocumentBeforeVersion' ) return retry() @@ -115,25 +112,25 @@ module.exports = DiffManager = { return (retry = function () { retries-- return DiffManager._tryGetDocumentBeforeVersion( - project_id, - doc_id, + projectId, + docId, version, callback ) })() }, - _tryGetDocumentBeforeVersion(project_id, doc_id, version, callback) { + _tryGetDocumentBeforeVersion(projectId, docId, version, callback) { if (callback == null) { callback = function () {} } logger.debug( - { project_id, doc_id, version }, + { projectId, docId, version }, 'getting document before version' ) return DiffManager.getLatestDocAndUpdates( - project_id, - doc_id, + projectId, + docId, version, function (error, content, version, updates) { let startingContent diff --git a/services/track-changes/app/js/DocstoreManager.js b/services/track-changes/app/js/DocstoreManager.js index 99337ff29f..46de335678 100644 --- a/services/track-changes/app/js/DocstoreManager.js +++ b/services/track-changes/app/js/DocstoreManager.js @@ -5,10 +5,7 @@ const Errors = require('./Errors') function peekDocument(projectId, docId, callback) { const url = `${Settings.apis.docstore.url}/project/${projectId}/doc/${docId}/peek` - logger.debug( - { project_id: projectId, doc_id: docId }, - 'getting doc from docstore' - ) + logger.debug({ projectId, docId }, 'getting doc from docstore') request.get(url, function (error, res, body) { if (error != null) { return callback(error) @@ -20,7 +17,7 @@ function peekDocument(projectId, docId, callback) { return callback(error) } logger.debug( - { project_id: projectId, doc_id: docId, version: body.version }, + { projectId, docId, version: body.version }, 'got doc from docstore' ) return callback(null, body.lines.join('\n'), body.version) diff --git a/services/track-changes/app/js/DocumentUpdaterManager.js b/services/track-changes/app/js/DocumentUpdaterManager.js index 69dae684a2..26a6d3ab4b 100644 --- a/services/track-changes/app/js/DocumentUpdaterManager.js +++ b/services/track-changes/app/js/DocumentUpdaterManager.js @@ -1,5 +1,4 @@ /* eslint-disable - camelcase, no-unused-vars, */ // TODO: This file was created by bulk-decaffeinate. @@ -17,12 +16,12 @@ const Settings = require('@overleaf/settings') const Errors = require('./Errors') module.exports = DocumentUpdaterManager = { - _requestDocument(project_id, doc_id, url, callback) { + _requestDocument(projectId, docId, url, callback) { if (callback == null) { callback = function () {} } - logger.debug({ project_id, doc_id }, 'getting doc from document updater') + logger.debug({ projectId, docId }, 'getting doc from document updater') return request.get(url, function (error, res, body) { if (error != null) { return callback(error) @@ -35,7 +34,7 @@ module.exports = DocumentUpdaterManager = { return callback(error) } logger.debug( - { project_id, doc_id, version: body.version }, + { projectId, docId, version: body.version }, 'got doc from document updater' ) return callback(null, body.lines.join('\n'), body.version) @@ -44,14 +43,14 @@ module.exports = DocumentUpdaterManager = { `doc updater returned a non-success status code: ${res.statusCode}` ) logger.error( - { err: error, project_id, doc_id, url }, + { err: error, projectId, docId, url }, 'error accessing doc updater' ) if (res.statusCode === 404) { return callback( new Errors.NotFoundError('doc not found', { - projectId: project_id, - docId: doc_id, + projectId, + docId, }) ) } else { @@ -61,29 +60,29 @@ module.exports = DocumentUpdaterManager = { }) }, - getDocument(project_id, doc_id, callback) { - const url = `${Settings.apis.documentupdater.url}/project/${project_id}/doc/${doc_id}` - DocumentUpdaterManager._requestDocument(project_id, doc_id, url, callback) + getDocument(projectId, docId, callback) { + const url = `${Settings.apis.documentupdater.url}/project/${projectId}/doc/${docId}` + DocumentUpdaterManager._requestDocument(projectId, docId, url, callback) }, - peekDocument(project_id, doc_id, callback) { - const url = `${Settings.apis.documentupdater.url}/project/${project_id}/doc/${doc_id}/peek` - DocumentUpdaterManager._requestDocument(project_id, doc_id, url, callback) + peekDocument(projectId, docId, callback) { + const url = `${Settings.apis.documentupdater.url}/project/${projectId}/doc/${docId}/peek` + DocumentUpdaterManager._requestDocument(projectId, docId, url, callback) }, - setDocument(project_id, doc_id, content, user_id, callback) { + setDocument(projectId, docId, content, userId, callback) { if (callback == null) { callback = function () {} } - const url = `${Settings.apis.documentupdater.url}/project/${project_id}/doc/${doc_id}` - logger.debug({ project_id, doc_id }, 'setting doc in document updater') + const url = `${Settings.apis.documentupdater.url}/project/${projectId}/doc/${docId}` + logger.debug({ projectId, docId }, 'setting doc in document updater') return request.post( { url, json: { lines: content.split('\n'), source: 'restore', - user_id, + user_id: userId, undoing: true, }, }, @@ -98,7 +97,7 @@ module.exports = DocumentUpdaterManager = { `doc updater returned a non-success status code: ${res.statusCode}` ) logger.error( - { err: error, project_id, doc_id, url }, + { err: error, projectId, docId, url }, 'error accessing doc updater' ) return callback(error) @@ -111,11 +110,11 @@ module.exports = DocumentUpdaterManager = { module.exports.promises = { // peekDocument returns two arguments so we can't use util.promisify, which only handles a single argument, we need // to treat this it as a special case. - peekDocument: (project_id, doc_id) => { + peekDocument: (projectId, docId) => { return new Promise((resolve, reject) => { DocumentUpdaterManager.peekDocument( - project_id, - doc_id, + projectId, + docId, (err, content, version) => { if (err) { reject(err) diff --git a/services/track-changes/app/js/HealthChecker.js b/services/track-changes/app/js/HealthChecker.js index 0a558674ef..9cb5efb182 100644 --- a/services/track-changes/app/js/HealthChecker.js +++ b/services/track-changes/app/js/HealthChecker.js @@ -1,6 +1,3 @@ -/* eslint-disable - camelcase, -*/ // TODO: This file was created by bulk-decaffeinate. // Fix any style issues and re-enable lint. /* @@ -19,9 +16,9 @@ const LockManager = require('./LockManager') module.exports = { check(callback) { - const project_id = ObjectId(settings.trackchanges.healthCheck.project_id) - const url = `http://localhost:${port}/project/${project_id}` - logger.debug({ project_id }, 'running health check') + const projectId = ObjectId(settings.trackchanges.healthCheck.project_id) + const url = `http://localhost:${port}/project/${projectId}` + logger.debug({ projectId }, 'running health check') const jobs = [ cb => request.get( @@ -29,7 +26,7 @@ module.exports = { function (err, res, body) { if (err != null) { logger.err( - { err, project_id }, + { err, projectId }, 'error checking lock for health check' ) return cb(err) @@ -47,7 +44,7 @@ module.exports = { { url: `${url}/flush`, timeout: 10000 }, function (err, res, body) { if (err != null) { - logger.err({ err, project_id }, 'error flushing for health check') + logger.err({ err, projectId }, 'error flushing for health check') return cb(err) } else if ((res != null ? res.statusCode : undefined) !== 204) { return cb( @@ -64,7 +61,7 @@ module.exports = { function (err, res, body) { if (err != null) { logger.err( - { err, project_id }, + { err, projectId }, 'error getting updates for health check' ) return cb(err) diff --git a/services/track-changes/app/js/HttpController.js b/services/track-changes/app/js/HttpController.js index 7df4224ed7..4b85ce00df 100644 --- a/services/track-changes/app/js/HttpController.js +++ b/services/track-changes/app/js/HttpController.js @@ -1,5 +1,4 @@ /* eslint-disable - camelcase, no-unused-vars, */ // TODO: This file was created by bulk-decaffeinate. @@ -28,12 +27,12 @@ module.exports = HttpController = { if (next == null) { next = function () {} } - const { doc_id } = req.params - const { project_id } = req.params - logger.debug({ project_id, doc_id }, 'compressing doc history') + const { doc_id: docId } = req.params + const { project_id: projectId } = req.params + logger.debug({ projectId, docId }, 'compressing doc history') return UpdatesManager.processUncompressedUpdatesWithLock( - project_id, - doc_id, + projectId, + docId, function (error) { if (error != null) { return next(error) @@ -47,10 +46,10 @@ module.exports = HttpController = { if (next == null) { next = function () {} } - const { project_id } = req.params - logger.debug({ project_id }, 'compressing project history') + const { project_id: projectId } = req.params + logger.debug({ projectId }, 'compressing project history') return UpdatesManager.processUncompressedUpdatesForProject( - project_id, + projectId, function (error) { if (error != null) { return next(error) @@ -114,12 +113,12 @@ module.exports = HttpController = { if (next == null) { next = function () {} } - const { doc_id } = req.params - const { project_id } = req.params - logger.debug({ project_id, doc_id }, 'checking doc history') + const { doc_id: docId } = req.params + const { project_id: projectId } = req.params + logger.debug({ projectId, docId }, 'checking doc history') return DiffManager.getDocumentBeforeVersion( - project_id, - doc_id, + projectId, + docId, 1, function (error, document, rewoundUpdates) { if (error != null) { @@ -147,8 +146,8 @@ module.exports = HttpController = { if (next == null) { next = function () {} } - const { doc_id } = req.params - const { project_id } = req.params + const { doc_id: docId } = req.params + const { project_id: projectId } = req.params if (req.query.from != null) { from = parseInt(req.query.from, 10) @@ -161,10 +160,10 @@ module.exports = HttpController = { to = null } - logger.debug({ project_id, doc_id, from, to }, 'getting diff') + logger.debug({ projectId, docId, from, to }, 'getting diff') return DiffManager.getDiff( - project_id, - doc_id, + projectId, + docId, from, to, function (error, diff) { @@ -177,22 +176,22 @@ module.exports = HttpController = { }, getUpdates(req, res, next) { - let before, min_count + let before, minCount if (next == null) { next = function () {} } - const { project_id } = req.params + const { project_id: projectId } = req.params if (req.query.before != null) { before = parseInt(req.query.before, 10) } if (req.query.min_count != null) { - min_count = parseInt(req.query.min_count, 10) + minCount = parseInt(req.query.min_count, 10) } return UpdatesManager.getSummarizedProjectUpdates( - project_id, - { before, min_count }, + projectId, + { before, min_count: minCount }, function (error, updates, nextBeforeTimestamp) { if (error != null) { return next(error) @@ -233,10 +232,10 @@ module.exports = HttpController = { // - updates can weight MBs for insert/delete of full doc // - multiple updates form a pack // Flush updates per pack onto the wire. - const { project_id } = req.params - logger.debug({ project_id }, 'exporting project history') + const { project_id: projectId } = req.params + logger.debug({ projectId }, 'exporting project history') UpdatesManager.exportProject( - project_id, + projectId, function (err, { updates, userIds }, confirmWrite) { const abortStreaming = req.destroyed || res.finished || res.destroyed if (abortStreaming) { @@ -246,7 +245,7 @@ module.exports = HttpController = { } const hasStartedStreamingResponse = res.headersSent if (err) { - logger.error({ project_id, err }, 'export failed') + logger.error({ projectId, err }, 'export failed') if (!hasStartedStreamingResponse) { // Generate a nice 500 return next(err) @@ -294,14 +293,14 @@ module.exports = HttpController = { if (next == null) { next = function () {} } - let { doc_id, project_id, version } = req.params - const user_id = req.headers['x-user-id'] + let { doc_id: docId, project_id: projectId, version } = req.params + const userId = req.headers['x-user-id'] version = parseInt(version, 10) return RestoreManager.restoreToBeforeVersion( - project_id, - doc_id, + projectId, + docId, version, - user_id, + userId, function (error) { if (error != null) { return next(error) @@ -315,10 +314,10 @@ module.exports = HttpController = { if (next == null) { next = function () {} } - const { project_id } = req.params - const { doc_id } = req.params - logger.debug({ project_id, doc_id }, 'pushing all finalised changes to s3') - return PackManager.pushOldPacks(project_id, doc_id, function (error) { + const { project_id: projectId } = req.params + const { doc_id: docId } = req.params + logger.debug({ projectId, docId }, 'pushing all finalised changes to s3') + return PackManager.pushOldPacks(projectId, docId, function (error) { if (error != null) { return next(error) } @@ -330,10 +329,10 @@ module.exports = HttpController = { if (next == null) { next = function () {} } - const { project_id } = req.params - const { doc_id } = req.params - logger.debug({ project_id, doc_id }, 'pulling all packs from s3') - return PackManager.pullOldPacks(project_id, doc_id, function (error) { + const { project_id: projectId } = req.params + const { doc_id: docId } = req.params + logger.debug({ projectId, docId }, 'pulling all packs from s3') + return PackManager.pullOldPacks(projectId, docId, function (error) { if (error != null) { return next(error) } diff --git a/services/track-changes/app/js/LockManager.js b/services/track-changes/app/js/LockManager.js index 346153103e..3886fca8fb 100644 --- a/services/track-changes/app/js/LockManager.js +++ b/services/track-changes/app/js/LockManager.js @@ -115,7 +115,7 @@ module.exports = LockManager = { if (result != null && result !== 1) { // successful unlock should release exactly one key logger.error( - { key, lockValue, redis_err: err, redis_result: result }, + { key, lockValue, redisErr: err, redisResult: result }, 'unlocking error' ) return callback(new Error('tried to release timed out lock')) diff --git a/services/track-changes/app/js/MongoAWS.js b/services/track-changes/app/js/MongoAWS.js index 9e905d05b5..2fe7bb4b27 100644 --- a/services/track-changes/app/js/MongoAWS.js +++ b/services/track-changes/app/js/MongoAWS.js @@ -1,5 +1,4 @@ /* eslint-disable - camelcase, no-return-assign, no-unused-vars, */ @@ -25,7 +24,7 @@ const Metrics = require('@overleaf/metrics') const DAYS = 24 * 3600 * 1000 // one day in milliseconds -const createStream = function (streamConstructor, project_id, doc_id, pack_id) { +const createStream = function (streamConstructor, projectId, docId, packId) { const AWS_CONFIG = { accessKeyId: settings.trackchanges.s3.key, secretAccessKey: settings.trackchanges.s3.secret, @@ -35,12 +34,12 @@ const createStream = function (streamConstructor, project_id, doc_id, pack_id) { return streamConstructor(new AWS.S3(AWS_CONFIG), { Bucket: settings.trackchanges.stores.doc_history, - Key: project_id + '/changes-' + doc_id + '/pack-' + pack_id, + Key: projectId + '/changes-' + docId + '/pack-' + packId, }) } module.exports = MongoAWS = { - archivePack(project_id, doc_id, pack_id, _callback) { + archivePack(projectId, docId, packId, _callback) { if (_callback == null) { _callback = function () {} } @@ -50,23 +49,23 @@ module.exports = MongoAWS = { } const query = { - _id: ObjectId(pack_id), - doc_id: ObjectId(doc_id), + _id: ObjectId(packId), + doc_id: ObjectId(docId), } - if (project_id == null) { + if (projectId == null) { return callback(new Error('invalid project id')) } - if (doc_id == null) { + if (docId == null) { return callback(new Error('invalid doc id')) } - if (pack_id == null) { + if (packId == null) { return callback(new Error('invalid pack id')) } - logger.debug({ project_id, doc_id, pack_id }, 'uploading data to s3') + logger.debug({ projectId, docId, packId }, 'uploading data to s3') - const upload = createStream(S3S.WriteStream, project_id, doc_id, pack_id) + const upload = createStream(S3S.WriteStream, projectId, docId, packId) return db.docHistory.findOne(query, function (err, result) { if (err != null) { @@ -81,15 +80,15 @@ module.exports = MongoAWS = { const uncompressedData = JSON.stringify(result) if (uncompressedData.indexOf('\u0000') !== -1) { const error = new Error('null bytes found in upload') - logger.error({ err: error, project_id, doc_id, pack_id }, error.message) + logger.error({ err: error, projectId, docId, packId }, error.message) return callback(error) } return zlib.gzip(uncompressedData, function (err, buf) { logger.debug( { - project_id, - doc_id, - pack_id, + projectId, + docId, + packId, origSize: uncompressedData.length, newSize: buf.length, }, @@ -101,10 +100,7 @@ module.exports = MongoAWS = { upload.on('error', err => callback(err)) upload.on('finish', function () { Metrics.inc('archive-pack') - logger.debug( - { project_id, doc_id, pack_id }, - 'upload to s3 completed' - ) + logger.debug({ projectId, docId, packId }, 'upload to s3 completed') return callback(null) }) upload.write(buf) @@ -113,7 +109,7 @@ module.exports = MongoAWS = { }) }, - readArchivedPack(project_id, doc_id, pack_id, _callback) { + readArchivedPack(projectId, docId, packId, _callback) { if (_callback == null) { _callback = function () {} } @@ -122,19 +118,19 @@ module.exports = MongoAWS = { return (_callback = function () {}) } - if (project_id == null) { + if (projectId == null) { return callback(new Error('invalid project id')) } - if (doc_id == null) { + if (docId == null) { return callback(new Error('invalid doc id')) } - if (pack_id == null) { + if (packId == null) { return callback(new Error('invalid pack id')) } - logger.debug({ project_id, doc_id, pack_id }, 'downloading data from s3') + logger.debug({ projectId, docId, packId }, 'downloading data from s3') - const download = createStream(S3S.ReadStream, project_id, doc_id, pack_id) + const download = createStream(S3S.ReadStream, projectId, docId, packId) const inputStream = download .on('open', obj => 1) @@ -144,7 +140,7 @@ module.exports = MongoAWS = { gunzip.setEncoding('utf8') gunzip.on('error', function (err) { logger.debug( - { project_id, doc_id, pack_id, err }, + { projectId, docId, packId, err }, 'error uncompressing gzip stream' ) return callback(err) @@ -155,10 +151,7 @@ module.exports = MongoAWS = { outputStream.on('error', err => callback(err)) outputStream.on('end', function () { let object - logger.debug( - { project_id, doc_id, pack_id }, - 'download from s3 completed' - ) + logger.debug({ projectId, docId, packId }, 'download from s3 completed') try { object = JSON.parse(parts.join('')) } catch (e) { @@ -177,14 +170,14 @@ module.exports = MongoAWS = { return outputStream.on('data', data => parts.push(data)) }, - unArchivePack(project_id, doc_id, pack_id, callback) { + unArchivePack(projectId, docId, packId, callback) { if (callback == null) { callback = function () {} } return MongoAWS.readArchivedPack( - project_id, - doc_id, - pack_id, + projectId, + docId, + packId, function (err, object) { if (err != null) { return callback(err) @@ -192,10 +185,7 @@ module.exports = MongoAWS = { Metrics.inc('unarchive-pack') // allow the object to expire, we can always retrieve it again object.expiresAt = new Date(Date.now() + 7 * DAYS) - logger.debug( - { project_id, doc_id, pack_id }, - 'inserting object from s3' - ) + logger.debug({ projectId, docId, packId }, 'inserting object from s3') return db.docHistory.insertOne(object, (err, confirmation) => { if (err) return callback(err) object._id = confirmation.insertedId diff --git a/services/track-changes/app/js/MongoManager.js b/services/track-changes/app/js/MongoManager.js index f42efef477..19408143af 100644 --- a/services/track-changes/app/js/MongoManager.js +++ b/services/track-changes/app/js/MongoManager.js @@ -1,5 +1,4 @@ /* eslint-disable - camelcase, no-unused-vars, */ // TODO: This file was created by bulk-decaffeinate. @@ -19,13 +18,13 @@ const metrics = require('@overleaf/metrics') const logger = require('@overleaf/logger') module.exports = MongoManager = { - getLastCompressedUpdate(doc_id, callback) { + getLastCompressedUpdate(docId, callback) { if (callback == null) { callback = function () {} } return db.docHistory .find( - { doc_id: ObjectId(doc_id.toString()) }, + { doc_id: ObjectId(docId.toString()) }, // only return the last entry in a pack { projection: { pack: { $slice: -1 } } } ) @@ -39,7 +38,7 @@ module.exports = MongoManager = { }) }, - peekLastCompressedUpdate(doc_id, callback) { + peekLastCompressedUpdate(docId, callback) { // under normal use we pass back the last update as // callback(null,update,version). // @@ -50,7 +49,7 @@ module.exports = MongoManager = { callback = function () {} } return MongoManager.getLastCompressedUpdate( - doc_id, + docId, function (error, update) { if (error != null) { return callback(error) @@ -79,7 +78,7 @@ module.exports = MongoManager = { } } else { return PackManager.getLastPackFromIndex( - doc_id, + docId, function (error, pack) { if (error != null) { return callback(error) @@ -98,41 +97,41 @@ module.exports = MongoManager = { ) }, - backportProjectId(project_id, doc_id, callback) { + backportProjectId(projectId, docId, callback) { if (callback == null) { callback = function () {} } return db.docHistory.updateMany( { - doc_id: ObjectId(doc_id.toString()), + doc_id: ObjectId(docId.toString()), project_id: { $exists: false }, }, { - $set: { project_id: ObjectId(project_id.toString()) }, + $set: { project_id: ObjectId(projectId.toString()) }, }, callback ) }, - getProjectMetaData(project_id, callback) { + getProjectMetaData(projectId, callback) { if (callback == null) { callback = function () {} } return db.projectHistoryMetaData.findOne( { - project_id: ObjectId(project_id.toString()), + project_id: ObjectId(projectId.toString()), }, callback ) }, - setProjectMetaData(project_id, metadata, callback) { + setProjectMetaData(projectId, metadata, callback) { if (callback == null) { callback = function () {} } return db.projectHistoryMetaData.updateOne( { - project_id: ObjectId(project_id), + project_id: ObjectId(projectId), }, { $set: metadata, @@ -144,14 +143,14 @@ module.exports = MongoManager = { ) }, - upgradeHistory(project_id, callback) { + upgradeHistory(projectId, callback) { // preserve the project's existing history if (callback == null) { callback = function () {} } return db.docHistory.updateMany( { - project_id: ObjectId(project_id), + project_id: ObjectId(projectId), temporary: true, expiresAt: { $exists: true }, }, diff --git a/services/track-changes/app/js/PackManager.js b/services/track-changes/app/js/PackManager.js index 7abdc67e63..7043bf043e 100644 --- a/services/track-changes/app/js/PackManager.js +++ b/services/track-changes/app/js/PackManager.js @@ -1,5 +1,4 @@ /* eslint-disable - camelcase, no-unused-vars, */ // TODO: This file was created by bulk-decaffeinate. @@ -69,8 +68,8 @@ module.exports = PackManager = { MAX_COUNT: 1024, insertCompressedUpdates( - project_id, - doc_id, + projectId, + docId, lastUpdate, newUpdates, temporary, @@ -113,8 +112,8 @@ module.exports = PackManager = { } return PackManager.flushCompressedUpdates( - project_id, - doc_id, + projectId, + docId, lastUpdate, updatesToFlush, temporary, @@ -123,8 +122,8 @@ module.exports = PackManager = { return callback(error) } return PackManager.insertCompressedUpdates( - project_id, - doc_id, + projectId, + docId, null, updatesRemaining, temporary, @@ -135,8 +134,8 @@ module.exports = PackManager = { }, flushCompressedUpdates( - project_id, - doc_id, + projectId, + docId, lastUpdate, newUpdates, temporary, @@ -167,8 +166,8 @@ module.exports = PackManager = { if (canAppend) { return PackManager.appendUpdatesToExistingPack( - project_id, - doc_id, + projectId, + docId, lastUpdate, newUpdates, temporary, @@ -176,8 +175,8 @@ module.exports = PackManager = { ) } else { return PackManager.insertUpdatesIntoNewPack( - project_id, - doc_id, + projectId, + docId, newUpdates, temporary, callback @@ -185,13 +184,7 @@ module.exports = PackManager = { } }, - insertUpdatesIntoNewPack( - project_id, - doc_id, - newUpdates, - temporary, - callback - ) { + insertUpdatesIntoNewPack(projectId, docId, newUpdates, temporary, callback) { if (callback == null) { callback = function () {} } @@ -200,8 +193,8 @@ module.exports = PackManager = { const n = newUpdates.length const sz = BSON.calculateObjectSize(newUpdates) const newPack = { - project_id: ObjectId(project_id.toString()), - doc_id: ObjectId(doc_id.toString()), + project_id: ObjectId(projectId.toString()), + doc_id: ObjectId(docId.toString()), pack: newUpdates, n, sz, @@ -218,7 +211,7 @@ module.exports = PackManager = { newPack.last_checked = new Date(Date.now() + 30 * DAYS) // never check temporary packs } logger.debug( - { project_id, doc_id, newUpdates }, + { projectId, docId, newUpdates }, 'inserting updates into new pack' ) return db.docHistory.insertOne(newPack, function (err) { @@ -229,14 +222,14 @@ module.exports = PackManager = { if (temporary) { return callback() } else { - return PackManager.updateIndex(project_id, doc_id, callback) + return PackManager.updateIndex(projectId, docId, callback) } }) }, appendUpdatesToExistingPack( - project_id, - doc_id, + projectId, + docId, lastUpdate, newUpdates, temporary, @@ -251,8 +244,8 @@ module.exports = PackManager = { const sz = BSON.calculateObjectSize(newUpdates) const query = { _id: lastUpdate._id, - project_id: ObjectId(project_id.toString()), - doc_id: ObjectId(doc_id.toString()), + project_id: ObjectId(projectId.toString()), + doc_id: ObjectId(docId.toString()), pack: { $exists: true }, } const update = { @@ -272,7 +265,7 @@ module.exports = PackManager = { update.$set.expiresAt = new Date(Date.now() + 7 * DAYS) } logger.debug( - { project_id, doc_id, lastUpdate, newUpdates }, + { projectId, docId, lastUpdate, newUpdates }, 'appending updates to existing pack' ) Metrics.inc(`append-pack-${temporary ? 'temporary' : 'permanent'}`) @@ -281,18 +274,18 @@ module.exports = PackManager = { // Retrieve all changes for a document - getOpsByVersionRange(project_id, doc_id, fromVersion, toVersion, callback) { + getOpsByVersionRange(projectId, docId, fromVersion, toVersion, callback) { if (callback == null) { callback = function () {} } return PackManager.loadPacksByVersionRange( - project_id, - doc_id, + projectId, + docId, fromVersion, toVersion, function (error) { if (error) return callback(error) - const query = { doc_id: ObjectId(doc_id.toString()) } + const query = { doc_id: ObjectId(docId.toString()) } if (toVersion != null) { query.v = { $lte: toVersion } } @@ -335,14 +328,8 @@ module.exports = PackManager = { ) }, - loadPacksByVersionRange( - project_id, - doc_id, - fromVersion, - toVersion, - callback - ) { - return PackManager.getIndex(doc_id, function (err, indexResult) { + loadPacksByVersionRange(projectId, docId, fromVersion, toVersion, callback) { + return PackManager.getIndex(docId, function (err, indexResult) { let pack if (err != null) { return callback(err) @@ -369,8 +356,8 @@ module.exports = PackManager = { })() if (neededIds.length) { return PackManager.fetchPacksIfNeeded( - project_id, - doc_id, + projectId, + docId, neededIds, callback ) @@ -380,20 +367,17 @@ module.exports = PackManager = { }) }, - fetchPacksIfNeeded(project_id, doc_id, pack_ids, callback) { + fetchPacksIfNeeded(projectId, docId, packIds, callback) { let id return db.docHistory - .find( - { _id: { $in: pack_ids.map(ObjectId) } }, - { projection: { _id: 1 } } - ) + .find({ _id: { $in: packIds.map(ObjectId) } }, { projection: { _id: 1 } }) .toArray(function (err, loadedPacks) { if (err != null) { return callback(err) } const allPackIds = (() => { const result1 = [] - for (id of Array.from(pack_ids)) { + for (id of Array.from(packIds)) { result1.push(id.toString()) } return result1 @@ -403,7 +387,7 @@ module.exports = PackManager = { ) const packIdsToFetch = _.difference(allPackIds, loadedPackIds) logger.debug( - { project_id, doc_id, loadedPackIds, allPackIds, packIdsToFetch }, + { projectId, docId, loadedPackIds, allPackIds, packIdsToFetch }, 'analysed packs' ) if (packIdsToFetch.length === 0) { @@ -412,27 +396,26 @@ module.exports = PackManager = { return async.eachLimit( packIdsToFetch, 4, - (pack_id, cb) => - MongoAWS.unArchivePack(project_id, doc_id, pack_id, cb), + (packId, cb) => MongoAWS.unArchivePack(projectId, docId, packId, cb), function (err) { if (err != null) { return callback(err) } - logger.debug({ project_id, doc_id }, 'done unarchiving') + logger.debug({ projectId, docId }, 'done unarchiving') return callback() } ) }) }, - findAllDocsInProject(project_id, callback) { + findAllDocsInProject(projectId, callback) { const docIdSet = new Set() async.series( [ cb => { db.docHistory .find( - { project_id: ObjectId(project_id) }, + { project_id: ObjectId(projectId) }, { projection: { pack: false } } ) .toArray((err, packs) => { @@ -445,7 +428,7 @@ module.exports = PackManager = { }, cb => { db.docHistoryIndex - .find({ project_id: ObjectId(project_id) }) + .find({ project_id: ObjectId(projectId) }) .toArray((err, indexes) => { if (err) return callback(err) indexes.forEach(index => { @@ -514,9 +497,9 @@ module.exports = PackManager = { }) }, - makeProjectIterator(project_id, before, callback) { + makeProjectIterator(projectId, before, callback) { PackManager._findPacks( - { project_id: ObjectId(project_id) }, + { project_id: ObjectId(projectId) }, { 'meta.end_ts': -1 }, function (err, allPacks) { if (err) return callback(err) @@ -528,9 +511,9 @@ module.exports = PackManager = { ) }, - makeDocIterator(doc_id, callback) { + makeDocIterator(docId, callback) { PackManager._findPacks( - { doc_id: ObjectId(doc_id) }, + { doc_id: ObjectId(docId) }, { v: -1 }, function (err, allPacks) { if (err) return callback(err) @@ -539,13 +522,13 @@ module.exports = PackManager = { ) }, - getPackById(project_id, doc_id, pack_id, callback) { - return db.docHistory.findOne({ _id: pack_id }, function (err, pack) { + getPackById(projectId, docId, packId, callback) { + return db.docHistory.findOne({ _id: packId }, function (err, pack) { if (err != null) { return callback(err) } if (pack == null) { - return MongoAWS.unArchivePack(project_id, doc_id, pack_id, callback) + return MongoAWS.unArchivePack(projectId, docId, packId, callback) } else if (pack.expiresAt != null && pack.temporary === false) { // we only need to touch the TTL when listing the changes in the project // because diffs on individual documents are always done after that @@ -573,24 +556,24 @@ module.exports = PackManager = { // Manage docHistoryIndex collection - getIndex(doc_id, callback) { + getIndex(docId, callback) { return db.docHistoryIndex.findOne( - { _id: ObjectId(doc_id.toString()) }, + { _id: ObjectId(docId.toString()) }, callback ) }, - getPackFromIndex(doc_id, pack_id, callback) { + getPackFromIndex(docId, packId, callback) { return db.docHistoryIndex.findOne( - { _id: ObjectId(doc_id.toString()), 'packs._id': pack_id }, + { _id: ObjectId(docId.toString()), 'packs._id': packId }, { projection: { 'packs.$': 1 } }, callback ) }, - getLastPackFromIndex(doc_id, callback) { + getLastPackFromIndex(docId, callback) { return db.docHistoryIndex.findOne( - { _id: ObjectId(doc_id.toString()) }, + { _id: ObjectId(docId.toString()) }, { projection: { packs: { $slice: -1 } } }, function (err, indexPack) { if (err != null) { @@ -604,8 +587,8 @@ module.exports = PackManager = { ) }, - getIndexWithKeys(doc_id, callback) { - return PackManager.getIndex(doc_id, function (err, index) { + getIndexWithKeys(docId, callback) { + return PackManager.getIndex(docId, function (err, index) { if (err != null) { return callback(err) } @@ -621,10 +604,10 @@ module.exports = PackManager = { }) }, - initialiseIndex(project_id, doc_id, callback) { + initialiseIndex(projectId, docId, callback) { return PackManager.findCompletedPacks( - project_id, - doc_id, + projectId, + docId, function (err, packs) { // console.log 'err', err, 'packs', packs, packs?.length if (err != null) { @@ -634,8 +617,8 @@ module.exports = PackManager = { return callback() } return PackManager.insertPacksIntoIndexWithLock( - project_id, - doc_id, + projectId, + docId, packs, callback ) @@ -643,11 +626,11 @@ module.exports = PackManager = { ) }, - updateIndex(project_id, doc_id, callback) { + updateIndex(projectId, docId, callback) { // find all packs prior to current pack return PackManager.findUnindexedPacks( - project_id, - doc_id, + projectId, + docId, function (err, newPacks) { if (err != null) { return callback(err) @@ -656,15 +639,15 @@ module.exports = PackManager = { return callback() } return PackManager.insertPacksIntoIndexWithLock( - project_id, - doc_id, + projectId, + docId, newPacks, function (err) { if (err != null) { return callback(err) } logger.debug( - { project_id, doc_id, newPacks }, + { projectId, docId, newPacks }, 'added new packs to index' ) return callback() @@ -674,9 +657,9 @@ module.exports = PackManager = { ) }, - findCompletedPacks(project_id, doc_id, callback) { + findCompletedPacks(projectId, docId, callback) { const query = { - doc_id: ObjectId(doc_id.toString()), + doc_id: ObjectId(docId.toString()), expiresAt: { $exists: false }, } return db.docHistory @@ -700,9 +683,9 @@ module.exports = PackManager = { }) }, - findPacks(project_id, doc_id, callback) { + findPacks(projectId, docId, callback) { const query = { - doc_id: ObjectId(doc_id.toString()), + doc_id: ObjectId(docId.toString()), expiresAt: { $exists: false }, } return db.docHistory @@ -722,14 +705,14 @@ module.exports = PackManager = { }) }, - findUnindexedPacks(project_id, doc_id, callback) { - return PackManager.getIndexWithKeys(doc_id, function (err, indexResult) { + findUnindexedPacks(projectId, docId, callback) { + return PackManager.getIndexWithKeys(docId, function (err, indexResult) { if (err != null) { return callback(err) } return PackManager.findCompletedPacks( - project_id, - doc_id, + projectId, + docId, function (err, historyPacks) { let pack if (err != null) { @@ -770,7 +753,7 @@ module.exports = PackManager = { })() if (newPacks.length) { logger.debug( - { project_id, doc_id, n: newPacks.length }, + { projectId, docId, n: newPacks.length }, 'found new packs' ) } @@ -780,13 +763,13 @@ module.exports = PackManager = { }) }, - insertPacksIntoIndexWithLock(project_id, doc_id, newPacks, callback) { + insertPacksIntoIndexWithLock(projectId, docId, newPacks, callback) { return LockManager.runWithLock( - keys.historyIndexLock({ doc_id }), + keys.historyIndexLock({ doc_id: docId }), releaseLock => PackManager._insertPacksIntoIndex( - project_id, - doc_id, + projectId, + docId, newPacks, releaseLock ), @@ -794,11 +777,11 @@ module.exports = PackManager = { ) }, - _insertPacksIntoIndex(project_id, doc_id, newPacks, callback) { + _insertPacksIntoIndex(projectId, docId, newPacks, callback) { return db.docHistoryIndex.updateOne( - { _id: ObjectId(doc_id.toString()) }, + { _id: ObjectId(docId.toString()) }, { - $setOnInsert: { project_id: ObjectId(project_id.toString()) }, + $setOnInsert: { project_id: ObjectId(projectId.toString()) }, $push: { packs: { $each: newPacks, $sort: { v: 1 } }, }, @@ -812,14 +795,14 @@ module.exports = PackManager = { // Archiving packs to S3 - archivePack(project_id, doc_id, pack_id, callback) { + archivePack(projectId, docId, packId, callback) { const clearFlagOnError = function (err, cb) { if (err != null) { // clear the inS3 flag on error return PackManager.clearPackAsArchiveInProgress( - project_id, - doc_id, - pack_id, + projectId, + docId, + packId, function (err2) { if (err2 != null) { return cb(err2) @@ -834,42 +817,27 @@ module.exports = PackManager = { return async.series( [ cb => - PackManager.checkArchiveNotInProgress( - project_id, - doc_id, - pack_id, - cb - ), + PackManager.checkArchiveNotInProgress(projectId, docId, packId, cb), cb => - PackManager.markPackAsArchiveInProgress( - project_id, - doc_id, - pack_id, - cb - ), + PackManager.markPackAsArchiveInProgress(projectId, docId, packId, cb), cb => - MongoAWS.archivePack(project_id, doc_id, pack_id, err => + MongoAWS.archivePack(projectId, docId, packId, err => clearFlagOnError(err, cb) ), cb => - PackManager.checkArchivedPack(project_id, doc_id, pack_id, err => + PackManager.checkArchivedPack(projectId, docId, packId, err => clearFlagOnError(err, cb) ), - cb => PackManager.markPackAsArchived(project_id, doc_id, pack_id, cb), + cb => PackManager.markPackAsArchived(projectId, docId, packId, cb), cb => - PackManager.setTTLOnArchivedPack( - project_id, - doc_id, - pack_id, - callback - ), + PackManager.setTTLOnArchivedPack(projectId, docId, packId, callback), ], callback ) }, - checkArchivedPack(project_id, doc_id, pack_id, callback) { - return db.docHistory.findOne({ _id: pack_id }, function (err, pack) { + checkArchivedPack(projectId, docId, packId, callback) { + return db.docHistory.findOne({ _id: packId }, function (err, pack) { if (err != null) { return callback(err) } @@ -877,9 +845,9 @@ module.exports = PackManager = { return callback(new Error('pack not found')) } return MongoAWS.readArchivedPack( - project_id, - doc_id, - pack_id, + projectId, + docId, + packId, function (err, result) { if (err) return callback(err) delete result.last_checked @@ -917,8 +885,8 @@ module.exports = PackManager = { }, // Extra methods to test archive/unarchive for a doc_id - pushOldPacks(project_id, doc_id, callback) { - return PackManager.findPacks(project_id, doc_id, function (err, packs) { + pushOldPacks(projectId, docId, callback) { + return PackManager.findPacks(projectId, docId, function (err, packs) { if (err != null) { return callback(err) } @@ -926,18 +894,18 @@ module.exports = PackManager = { return callback() } return PackManager.processOldPack( - project_id, - doc_id, + projectId, + docId, packs[0]._id, callback ) }) }, - pullOldPacks(project_id, doc_id, callback) { + pullOldPacks(projectId, docId, callback) { return PackManager.loadPacksByVersionRange( - project_id, - doc_id, + projectId, + docId, null, null, callback @@ -946,21 +914,16 @@ module.exports = PackManager = { // Processing old packs via worker - processOldPack(project_id, doc_id, pack_id, callback) { + processOldPack(projectId, docId, packId, callback) { const markAsChecked = err => - PackManager.markPackAsChecked( - project_id, - doc_id, - pack_id, - function (err2) { - if (err2 != null) { - return callback(err2) - } - return callback(err) + PackManager.markPackAsChecked(projectId, docId, packId, function (err2) { + if (err2 != null) { + return callback(err2) } - ) - logger.debug({ project_id, doc_id }, 'processing old packs') - return db.docHistory.findOne({ _id: pack_id }, function (err, pack) { + return callback(err) + }) + logger.debug({ projectId, docId }, 'processing old packs') + return db.docHistory.findOne({ _id: packId }, function (err, pack) { if (err != null) { return markAsChecked(err) } @@ -971,8 +934,8 @@ module.exports = PackManager = { return callback() } // return directly return PackManager.finaliseIfNeeded( - project_id, - doc_id, + projectId, + docId, pack._id, pack, function (err) { @@ -980,15 +943,15 @@ module.exports = PackManager = { return markAsChecked(err) } return PackManager.updateIndexIfNeeded( - project_id, - doc_id, + projectId, + docId, function (err) { if (err != null) { return markAsChecked(err) } return PackManager.findUnarchivedPacks( - project_id, - doc_id, + projectId, + docId, function (err, unarchivedPacks) { if (err != null) { return markAsChecked(err) @@ -999,7 +962,7 @@ module.exports = PackManager = { : undefined) ) { logger.debug( - { project_id, doc_id }, + { projectId, docId }, 'no packs need archiving' ) return markAsChecked() @@ -1007,12 +970,12 @@ module.exports = PackManager = { return async.eachSeries( unarchivedPacks, (pack, cb) => - PackManager.archivePack(project_id, doc_id, pack._id, cb), + PackManager.archivePack(projectId, docId, pack._id, cb), function (err) { if (err != null) { return markAsChecked(err) } - logger.debug({ project_id, doc_id }, 'done processing') + logger.debug({ projectId, docId }, 'done processing') return markAsChecked() } ) @@ -1025,88 +988,80 @@ module.exports = PackManager = { }) }, - finaliseIfNeeded(project_id, doc_id, pack_id, pack, callback) { + finaliseIfNeeded(projectId, docId, packId, pack, callback) { const sz = pack.sz / (1024 * 1024) // in fractions of a megabyte const n = pack.n / 1024 // in fraction of 1024 ops const age = (Date.now() - pack.meta.end_ts) / DAYS if (age < 30) { // always keep if less than 1 month old - logger.debug( - { project_id, doc_id, pack_id, age }, - 'less than 30 days old' - ) + logger.debug({ projectId, docId, packId, age }, 'less than 30 days old') return callback() } // compute an archiving threshold which decreases for each month of age - const archive_threshold = 30 / age - if (sz > archive_threshold || n > archive_threshold || age > 90) { + const archiveThreshold = 30 / age + if (sz > archiveThreshold || n > archiveThreshold || age > 90) { logger.debug( - { project_id, doc_id, pack_id, age, archive_threshold, sz, n }, + { projectId, docId, packId, age, archiveThreshold, sz, n }, 'meets archive threshold' ) return PackManager.markPackAsFinalisedWithLock( - project_id, - doc_id, - pack_id, + projectId, + docId, + packId, callback ) } else { logger.debug( - { project_id, doc_id, pack_id, age, archive_threshold, sz, n }, + { projectId, docId, packId, age, archiveThreshold, sz, n }, 'does not meet archive threshold' ) return callback() } }, - markPackAsFinalisedWithLock(project_id, doc_id, pack_id, callback) { + markPackAsFinalisedWithLock(projectId, docId, packId, callback) { return LockManager.runWithLock( - keys.historyLock({ doc_id }), + keys.historyLock({ doc_id: docId }), releaseLock => - PackManager._markPackAsFinalised( - project_id, - doc_id, - pack_id, - releaseLock - ), + PackManager._markPackAsFinalised(projectId, docId, packId, releaseLock), callback ) }, - _markPackAsFinalised(project_id, doc_id, pack_id, callback) { - logger.debug({ project_id, doc_id, pack_id }, 'marking pack as finalised') + _markPackAsFinalised(projectId, docId, packId, callback) { + logger.debug({ projectId, docId, packId }, 'marking pack as finalised') return db.docHistory.updateOne( - { _id: pack_id }, + { _id: packId }, { $set: { finalised: true } }, callback ) }, - updateIndexIfNeeded(project_id, doc_id, callback) { - logger.debug({ project_id, doc_id }, 'archiving old packs') - return PackManager.getIndexWithKeys(doc_id, function (err, index) { + updateIndexIfNeeded(projectId, docId, callback) { + logger.debug({ projectId, docId }, 'archiving old packs') + return PackManager.getIndexWithKeys(docId, function (err, index) { if (err != null) { return callback(err) } if (index == null) { - return PackManager.initialiseIndex(project_id, doc_id, callback) + return PackManager.initialiseIndex(projectId, docId, callback) } else { - return PackManager.updateIndex(project_id, doc_id, callback) + return PackManager.updateIndex(projectId, docId, callback) } }) }, - markPackAsChecked(project_id, doc_id, pack_id, callback) { - logger.debug({ project_id, doc_id, pack_id }, 'marking pack as checked') + markPackAsChecked(projectId, docId, packId, callback) { + logger.debug({ projectId, docId, packId }, 'marking pack as checked') return db.docHistory.updateOne( - { _id: pack_id }, + { _id: packId }, { $currentDate: { last_checked: true } }, callback ) }, - findUnarchivedPacks(project_id, doc_id, callback) { - return PackManager.getIndex(doc_id, function (err, indexResult) { + findUnarchivedPacks(projectId, docId, callback) { + return PackManager.getIndex(docId, function (err, indexResult) { if (err != null) { return callback(err) } @@ -1123,7 +1078,7 @@ module.exports = PackManager = { })() if (unArchivedPacks.length) { logger.debug( - { project_id, doc_id, n: unArchivedPacks.length }, + { projectId, docId, n: unArchivedPacks.length }, 'find unarchived packs' ) } @@ -1133,41 +1088,37 @@ module.exports = PackManager = { // Archive locking flags - checkArchiveNotInProgress(project_id, doc_id, pack_id, callback) { + checkArchiveNotInProgress(projectId, docId, packId, callback) { logger.debug( - { project_id, doc_id, pack_id }, + { projectId, docId, packId }, 'checking if archive in progress' ) - return PackManager.getPackFromIndex( - doc_id, - pack_id, - function (err, result) { - if (err != null) { - return callback(err) - } - if (result == null) { - return callback(new Error('pack not found in index')) - } - if (result.inS3) { - return callback(new Error('pack archiving already done')) - } else if (result.inS3 != null) { - return callback(new Error('pack archiving already in progress')) - } else { - return callback() - } + return PackManager.getPackFromIndex(docId, packId, function (err, result) { + if (err != null) { + return callback(err) } - ) + if (result == null) { + return callback(new Error('pack not found in index')) + } + if (result.inS3) { + return callback(new Error('pack archiving already done')) + } else if (result.inS3 != null) { + return callback(new Error('pack archiving already in progress')) + } else { + return callback() + } + }) }, - markPackAsArchiveInProgress(project_id, doc_id, pack_id, callback) { + markPackAsArchiveInProgress(projectId, docId, packId, callback) { logger.debug( - { project_id, doc_id }, + { projectId, docId }, 'marking pack as archive in progress status' ) return db.docHistoryIndex.findOneAndUpdate( { - _id: ObjectId(doc_id.toString()), - packs: { $elemMatch: { _id: pack_id, inS3: { $exists: false } } }, + _id: ObjectId(docId.toString()), + packs: { $elemMatch: { _id: packId, inS3: { $exists: false } } }, }, { $set: { 'packs.$.inS3': false } }, { projection: { 'packs.$': 1 } }, @@ -1179,7 +1130,7 @@ module.exports = PackManager = { return callback(new Error('archive is already in progress')) } logger.debug( - { project_id, doc_id, pack_id }, + { projectId, docId, packId }, 'marked as archive in progress' ) return callback() @@ -1187,27 +1138,27 @@ module.exports = PackManager = { ) }, - clearPackAsArchiveInProgress(project_id, doc_id, pack_id, callback) { + clearPackAsArchiveInProgress(projectId, docId, packId, callback) { logger.debug( - { project_id, doc_id, pack_id }, + { projectId, docId, packId }, 'clearing as archive in progress' ) return db.docHistoryIndex.updateOne( { - _id: ObjectId(doc_id.toString()), - packs: { $elemMatch: { _id: pack_id, inS3: false } }, + _id: ObjectId(docId.toString()), + packs: { $elemMatch: { _id: packId, inS3: false } }, }, { $unset: { 'packs.$.inS3': true } }, callback ) }, - markPackAsArchived(project_id, doc_id, pack_id, callback) { - logger.debug({ project_id, doc_id, pack_id }, 'marking pack as archived') + markPackAsArchived(projectId, docId, packId, callback) { + logger.debug({ projectId, docId, packId }, 'marking pack as archived') return db.docHistoryIndex.findOneAndUpdate( { - _id: ObjectId(doc_id.toString()), - packs: { $elemMatch: { _id: pack_id, inS3: false } }, + _id: ObjectId(docId.toString()), + packs: { $elemMatch: { _id: packId, inS3: false } }, }, { $set: { 'packs.$.inS3': true } }, { projection: { 'packs.$': 1 } }, @@ -1218,21 +1169,21 @@ module.exports = PackManager = { if (!result.value) { return callback(new Error('archive is not marked as progress')) } - logger.debug({ project_id, doc_id, pack_id }, 'marked as archived') + logger.debug({ projectId, docId, packId }, 'marked as archived') return callback() } ) }, - setTTLOnArchivedPack(project_id, doc_id, pack_id, callback) { + setTTLOnArchivedPack(projectId, docId, packId, callback) { return db.docHistory.updateOne( - { _id: pack_id }, + { _id: packId }, { $set: { expiresAt: new Date(Date.now() + 1 * DAYS) } }, function (err) { if (err) { return callback(err) } - logger.debug({ project_id, doc_id, pack_id }, 'set expiry on pack') + logger.debug({ projectId, docId, packId }, 'set expiry on pack') return callback() } ) diff --git a/services/track-changes/app/js/PackWorker.js b/services/track-changes/app/js/PackWorker.js index a726c7d9c8..88a3af5799 100644 --- a/services/track-changes/app/js/PackWorker.js +++ b/services/track-changes/app/js/PackWorker.js @@ -1,5 +1,4 @@ /* eslint-disable - camelcase, no-unused-vars, */ // TODO: This file was created by bulk-decaffeinate. @@ -14,7 +13,7 @@ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ let LIMIT, pending -let project_id, doc_id +let projectId, docId const { callbackify } = require('util') const Settings = require('@overleaf/settings') const async = require('async') @@ -48,8 +47,8 @@ if (!source.match(/^[0-9]+$/)) { const result = (() => { const result1 = [] for (const line of Array.from(file.toString().split('\n'))) { - ;[project_id, doc_id] = Array.from(line.split(' ')) - result1.push({ doc_id, project_id }) + ;[projectId, docId] = Array.from(line.split(' ')) + result1.push({ doc_id: docId, project_id: projectId }) } return result1 })() @@ -108,12 +107,12 @@ const processUpdates = pending => pending, function (result, callback) { let _id - ;({ _id, project_id, doc_id } = result) + ;({ _id, project_id: projectId, doc_id: docId } = result) COUNT++ - logger.debug({ project_id, doc_id }, `processing ${COUNT}/${TOTAL}`) - if (project_id == null || doc_id == null) { + logger.debug({ projectId, docId }, `processing ${COUNT}/${TOTAL}`) + if (projectId == null || docId == null) { logger.debug( - { project_id, doc_id }, + { projectId, docId }, 'skipping pack, missing project/doc id' ) return callback() @@ -138,9 +137,9 @@ const processUpdates = pending => return setTimeout(() => callback(err, result), DOCUMENT_PACK_DELAY) } if (_id == null) { - return PackManager.pushOldPacks(project_id, doc_id, handler) + return PackManager.pushOldPacks(projectId, docId, handler) } else { - return PackManager.processOldPack(project_id, doc_id, _id, handler) + return PackManager.processOldPack(projectId, docId, _id, handler) } }, function (err, results) { diff --git a/services/track-changes/app/js/RedisManager.js b/services/track-changes/app/js/RedisManager.js index dc2f3db2a8..aa90aa0d80 100644 --- a/services/track-changes/app/js/RedisManager.js +++ b/services/track-changes/app/js/RedisManager.js @@ -1,6 +1,3 @@ -/* eslint-disable - camelcase, -*/ // TODO: This file was created by bulk-decaffeinate. // Fix any style issues and re-enable lint. /* @@ -19,11 +16,11 @@ const Keys = Settings.redis.history.key_schema const async = require('async') module.exports = RedisManager = { - getOldestDocUpdates(doc_id, batchSize, callback) { + getOldestDocUpdates(docId, batchSize, callback) { if (callback == null) { callback = function () {} } - const key = Keys.uncompressedHistoryOps({ doc_id }) + const key = Keys.uncompressedHistoryOps({ doc_id: docId }) return rclient.lrange(key, 0, batchSize - 1, callback) }, @@ -42,14 +39,14 @@ module.exports = RedisManager = { return callback(null, rawUpdates) }, - deleteAppliedDocUpdates(project_id, doc_id, docUpdates, callback) { + deleteAppliedDocUpdates(projectId, docId, docUpdates, callback) { if (callback == null) { callback = function () {} } const multi = rclient.multi() // Delete all the updates which have been applied (exact match) for (const update of Array.from(docUpdates || [])) { - multi.lrem(Keys.uncompressedHistoryOps({ doc_id }), 1, update) + multi.lrem(Keys.uncompressedHistoryOps({ doc_id: docId }), 1, update) } return multi.exec(function (error, results) { if (error != null) { @@ -58,8 +55,8 @@ module.exports = RedisManager = { // It's ok to delete the doc_id from the set here. Even though the list // of updates may not be empty, we will continue to process it until it is. return rclient.srem( - Keys.docsWithHistoryOps({ project_id }), - doc_id, + Keys.docsWithHistoryOps({ project_id: projectId }), + docId, function (error) { if (error != null) { return callback(error) @@ -70,11 +67,14 @@ module.exports = RedisManager = { }) }, - getDocIdsWithHistoryOps(project_id, callback) { + getDocIdsWithHistoryOps(projectId, callback) { if (callback == null) { callback = function () {} } - return rclient.smembers(Keys.docsWithHistoryOps({ project_id }), callback) + return rclient.smembers( + Keys.docsWithHistoryOps({ project_id: projectId }), + callback + ) }, // iterate over keys asynchronously using redis scan (non-blocking) @@ -139,12 +139,12 @@ module.exports = RedisManager = { } return RedisManager._getKeys( Keys.docsWithHistoryOps({ project_id: '*' }), - function (error, project_keys) { + function (error, projectKeys) { if (error != null) { return callback(error) } - const project_ids = RedisManager._extractIds(project_keys) - return callback(error, project_ids) + const projectIds = RedisManager._extractIds(projectKeys) + return callback(error, projectIds) } ) }, @@ -157,12 +157,12 @@ module.exports = RedisManager = { } return RedisManager._getKeys( Keys.uncompressedHistoryOps({ doc_id: '*' }), - function (error, doc_keys) { + function (error, docKeys) { if (error != null) { return callback(error) } - const doc_ids = RedisManager._extractIds(doc_keys) - return callback(error, doc_ids) + const docIds = RedisManager._extractIds(docKeys) + return callback(error, docIds) } ) }, diff --git a/services/track-changes/app/js/RestoreManager.js b/services/track-changes/app/js/RestoreManager.js index 07d46712e4..d176a21f4c 100644 --- a/services/track-changes/app/js/RestoreManager.js +++ b/services/track-changes/app/js/RestoreManager.js @@ -1,5 +1,4 @@ /* eslint-disable - camelcase, no-unused-vars, */ // TODO: This file was created by bulk-decaffeinate. @@ -16,24 +15,24 @@ const DiffManager = require('./DiffManager') const logger = require('@overleaf/logger') module.exports = RestoreManager = { - restoreToBeforeVersion(project_id, doc_id, version, user_id, callback) { + restoreToBeforeVersion(projectId, docId, version, userId, callback) { if (callback == null) { callback = function () {} } - logger.debug({ project_id, doc_id, version, user_id }, 'restoring document') + logger.debug({ projectId, docId, version, userId }, 'restoring document') return DiffManager.getDocumentBeforeVersion( - project_id, - doc_id, + projectId, + docId, version, function (error, content) { if (error != null) { return callback(error) } return DocumentUpdaterManager.setDocument( - project_id, - doc_id, + projectId, + docId, content, - user_id, + userId, function (error) { if (error != null) { return callback(error) diff --git a/services/track-changes/app/js/UpdateCompressor.js b/services/track-changes/app/js/UpdateCompressor.js index 69d10586bb..5a530bc6cf 100644 --- a/services/track-changes/app/js/UpdateCompressor.js +++ b/services/track-changes/app/js/UpdateCompressor.js @@ -1,5 +1,4 @@ /* eslint-disable - camelcase, new-cap, no-throw-literal, no-unused-vars, @@ -16,8 +15,8 @@ let oneMinute, twoMegabytes, UpdateCompressor const strInject = (s1, pos, s2) => s1.slice(0, pos) + s2 + s1.slice(pos) const strRemove = (s1, pos, length) => s1.slice(0, pos) + s1.slice(pos + length) -const { diff_match_patch } = require('../lib/diff_match_patch') -const dmp = new diff_match_patch() +const { diff_match_patch: diffMatchPatch } = require('../lib/diff_match_patch') +const dmp = new diffMatchPatch() module.exports = UpdateCompressor = { NOOP: 'noop', @@ -254,8 +253,8 @@ module.exports = UpdateCompressor = { firstOp.p === secondOp.p ) { offset = firstOp.p - const diff_ops = this.diffAsShareJsOps(firstOp.d, secondOp.i) - if (diff_ops.length === 0) { + const diffOps = this.diffAsShareJsOps(firstOp.d, secondOp.i) + if (diffOps.length === 0) { return [ { // Noop @@ -272,7 +271,7 @@ module.exports = UpdateCompressor = { }, ] } else { - return diff_ops.map(function (op) { + return diffOps.map(function (op) { op.p += offset return { meta: { diff --git a/services/track-changes/app/js/UpdateTrimmer.js b/services/track-changes/app/js/UpdateTrimmer.js index 4ef953a05e..a97aad689b 100644 --- a/services/track-changes/app/js/UpdateTrimmer.js +++ b/services/track-changes/app/js/UpdateTrimmer.js @@ -1,5 +1,4 @@ /* eslint-disable - camelcase, no-unused-vars, */ // TODO: This file was created by bulk-decaffeinate. @@ -16,12 +15,12 @@ const WebApiManager = require('./WebApiManager') const logger = require('@overleaf/logger') module.exports = UpdateTrimmer = { - shouldTrimUpdates(project_id, callback) { + shouldTrimUpdates(projectId, callback) { if (callback == null) { callback = function () {} } return MongoManager.getProjectMetaData( - project_id, + projectId, function (error, metadata) { if (error != null) { return callback(error) @@ -30,22 +29,22 @@ module.exports = UpdateTrimmer = { return callback(null, false) } else { return WebApiManager.getProjectDetails( - project_id, + projectId, function (error, details) { if (error != null) { return callback(error) } - logger.debug({ project_id, details }, 'got details') + logger.debug({ projectId, details }, 'got details') if (details?.features?.versioning) { return MongoManager.setProjectMetaData( - project_id, + projectId, { preserveHistory: true }, function (error) { if (error != null) { return callback(error) } return MongoManager.upgradeHistory( - project_id, + projectId, function (error) { if (error != null) { return callback(error) diff --git a/services/track-changes/app/js/UpdatesManager.js b/services/track-changes/app/js/UpdatesManager.js index 9f03877044..9fae4375b9 100644 --- a/services/track-changes/app/js/UpdatesManager.js +++ b/services/track-changes/app/js/UpdatesManager.js @@ -1,5 +1,4 @@ /* eslint-disable - camelcase, no-unused-vars, */ // TODO: This file was created by bulk-decaffeinate. @@ -29,13 +28,7 @@ const keys = Settings.redis.lock.key_schema const util = require('util') module.exports = UpdatesManager = { - compressAndSaveRawUpdates( - project_id, - doc_id, - rawUpdates, - temporary, - callback - ) { + compressAndSaveRawUpdates(projectId, docId, rawUpdates, temporary, callback) { let i if (callback == null) { callback = function () {} @@ -54,8 +47,8 @@ module.exports = UpdatesManager = { if (!(prevVersion < thisVersion)) { logger.error( { - project_id, - doc_id, + projectId, + docId, rawUpdates, temporary, thisVersion, @@ -70,7 +63,7 @@ module.exports = UpdatesManager = { // FIXME: we no longer need the lastCompressedUpdate, so change functions not to need it // CORRECTION: we do use it to log the time in case of error return MongoManager.peekLastCompressedUpdate( - doc_id, + docId, function (error, lastCompressedUpdate, lastVersion) { // lastCompressedUpdate is the most recent update in Mongo, and // lastVersion is its sharejs version number. @@ -93,23 +86,23 @@ module.exports = UpdatesManager = { } if (discardedUpdates.length) { logger.error( - { project_id, doc_id, discardedUpdates, temporary, lastVersion }, + { projectId, docId, discardedUpdates, temporary, lastVersion }, 'discarded updates already present' ) } if (rawUpdates[0] != null && rawUpdates[0].v !== lastVersion + 1) { const ts = lastCompressedUpdate?.meta?.end_ts - const last_timestamp = ts != null ? new Date(ts) : 'unknown time' + const lastTimestamp = ts != null ? new Date(ts) : 'unknown time' error = new Error( - `Tried to apply raw op at version ${rawUpdates[0].v} to last compressed update with version ${lastVersion} from ${last_timestamp}` + `Tried to apply raw op at version ${rawUpdates[0].v} to last compressed update with version ${lastVersion} from ${lastTimestamp}` ) logger.error( { err: error, - doc_id, - project_id, - prev_end_ts: ts, + docId, + projectId, + prevEndTs: ts, temporary, lastCompressedUpdate, }, @@ -155,7 +148,7 @@ module.exports = UpdatesManager = { `dropped op exceeding maximum allowed size of ${REJECT_LARGE_OP_SIZE}` ) logger.error( - { err: error, doc_id, project_id, size, rawUpdate }, + { err: error, docId, projectId, size, rawUpdate }, 'dropped op - too big' ) rawUpdate.op = [] @@ -167,8 +160,8 @@ module.exports = UpdatesManager = { rawUpdates ) return PackManager.insertCompressedUpdates( - project_id, - doc_id, + projectId, + docId, lastCompressedUpdate, compressedUpdates, temporary, @@ -179,13 +172,13 @@ module.exports = UpdatesManager = { if (result != null) { logger.debug( { - project_id, - doc_id, - orig_v: + projectId, + docId, + origV: lastCompressedUpdate != null ? lastCompressedUpdate.v : undefined, - new_v: result.v, + newV: result.v, }, 'inserted updates into pack' ) @@ -198,12 +191,12 @@ module.exports = UpdatesManager = { }, // Check whether the updates are temporary (per-project property) - _prepareProjectForUpdates(project_id, callback) { + _prepareProjectForUpdates(projectId, callback) { if (callback == null) { callback = function () {} } return UpdateTrimmer.shouldTrimUpdates( - project_id, + projectId, function (error, temporary) { if (error != null) { return callback(error) @@ -214,11 +207,11 @@ module.exports = UpdatesManager = { }, // Check for project id on document history (per-document property) - _prepareDocForUpdates(project_id, doc_id, callback) { + _prepareDocForUpdates(projectId, docId, callback) { if (callback == null) { callback = function () {} } - return MongoManager.backportProjectId(project_id, doc_id, function (error) { + return MongoManager.backportProjectId(projectId, docId, function (error) { if (error != null) { return callback(error) } @@ -228,13 +221,13 @@ module.exports = UpdatesManager = { // Apply updates for specific project/doc after preparing at project and doc level REDIS_READ_BATCH_SIZE: 100, - processUncompressedUpdates(project_id, doc_id, temporary, callback) { + processUncompressedUpdates(projectId, docId, temporary, callback) { // get the updates as strings from redis (so we can delete them after they are applied) if (callback == null) { callback = function () {} } return RedisManager.getOldestDocUpdates( - doc_id, + docId, UpdatesManager.REDIS_READ_BATCH_SIZE, function (error, docUpdates) { if (error != null) { @@ -247,18 +240,18 @@ module.exports = UpdatesManager = { function (error, rawUpdates) { if (error != null) { logger.err( - { project_id, doc_id, docUpdates }, + { projectId, docId, docUpdates }, 'failed to parse docUpdates' ) return callback(error) } logger.debug( - { project_id, doc_id, rawUpdates }, + { projectId, docId, rawUpdates }, 'retrieved raw updates from redis' ) return UpdatesManager.compressAndSaveRawUpdates( - project_id, - doc_id, + projectId, + docId, rawUpdates, temporary, function (error) { @@ -266,13 +259,13 @@ module.exports = UpdatesManager = { return callback(error) } logger.debug( - { project_id, doc_id }, + { projectId, docId }, 'compressed and saved doc updates' ) // delete the applied updates from redis return RedisManager.deleteAppliedDocUpdates( - project_id, - doc_id, + projectId, + docId, docUpdates, function (error) { if (error != null) { @@ -281,14 +274,14 @@ module.exports = UpdatesManager = { if (length === UpdatesManager.REDIS_READ_BATCH_SIZE) { // There might be more updates logger.debug( - { project_id, doc_id }, + { projectId, docId }, 'continuing processing updates' ) return setTimeout( () => UpdatesManager.processUncompressedUpdates( - project_id, - doc_id, + projectId, + docId, temporary, callback ), @@ -296,7 +289,7 @@ module.exports = UpdatesManager = { ) } else { logger.debug( - { project_id, doc_id }, + { projectId, docId }, 'all raw updates processed' ) return callback() @@ -312,19 +305,19 @@ module.exports = UpdatesManager = { }, // Process updates for a doc when we flush it individually - processUncompressedUpdatesWithLock(project_id, doc_id, callback) { + processUncompressedUpdatesWithLock(projectId, docId, callback) { if (callback == null) { callback = function () {} } return UpdatesManager._prepareProjectForUpdates( - project_id, + projectId, function (error, temporary) { if (error != null) { return callback(error) } return UpdatesManager._processUncompressedUpdatesForDocWithLock( - project_id, - doc_id, + projectId, + docId, temporary, callback ) @@ -334,8 +327,8 @@ module.exports = UpdatesManager = { // Process updates for a doc when the whole project is flushed (internal method) _processUncompressedUpdatesForDocWithLock( - project_id, - doc_id, + projectId, + docId, temporary, callback ) { @@ -343,18 +336,18 @@ module.exports = UpdatesManager = { callback = function () {} } return UpdatesManager._prepareDocForUpdates( - project_id, - doc_id, + projectId, + docId, function (error) { if (error != null) { return callback(error) } return LockManager.runWithLock( - keys.historyLock({ doc_id }), + keys.historyLock({ doc_id: docId }), releaseLock => UpdatesManager.processUncompressedUpdates( - project_id, - doc_id, + projectId, + docId, temporary, releaseLock ), @@ -365,31 +358,31 @@ module.exports = UpdatesManager = { }, // Process all updates for a project, only check project-level information once - processUncompressedUpdatesForProject(project_id, callback) { + processUncompressedUpdatesForProject(projectId, callback) { if (callback == null) { callback = function () {} } return RedisManager.getDocIdsWithHistoryOps( - project_id, - function (error, doc_ids) { + projectId, + function (error, docIds) { if (error != null) { return callback(error) } return UpdatesManager._prepareProjectForUpdates( - project_id, + projectId, function (error, temporary) { if (error) return callback(error) const jobs = [] - for (const doc_id of Array.from(doc_ids)) { - ;(doc_id => + for (const docId of Array.from(docIds)) { + ;(docId => jobs.push(cb => UpdatesManager._processUncompressedUpdatesForDocWithLock( - project_id, - doc_id, + projectId, + docId, temporary, cb ) - ))(doc_id) + ))(docId) } return async.parallelLimit(jobs, 5, callback) } @@ -405,31 +398,31 @@ module.exports = UpdatesManager = { } return RedisManager.getProjectIdsWithHistoryOps(function ( error, - project_ids + projectIds ) { - let project_id + let projectId if (error != null) { return callback(error) } logger.debug( { - count: project_ids != null ? project_ids.length : undefined, - project_ids, + count: projectIds != null ? projectIds.length : undefined, + projectIds, }, 'found projects' ) const jobs = [] - project_ids = _.shuffle(project_ids) // randomise to avoid hitting same projects each time + projectIds = _.shuffle(projectIds) // randomise to avoid hitting same projects each time const selectedProjects = - limit < 0 ? project_ids : project_ids.slice(0, limit) - for (project_id of Array.from(selectedProjects)) { - ;(project_id => + limit < 0 ? projectIds : projectIds.slice(0, limit) + for (projectId of Array.from(selectedProjects)) { + ;(projectId => jobs.push(cb => UpdatesManager.processUncompressedUpdatesForProject( - project_id, - err => cb(null, { failed: err != null, project_id }) + projectId, + err => cb(null, { failed: err != null, project_id: projectId }) ) - ))(project_id) + ))(projectId) } return async.series(jobs, function (error, result) { let x @@ -457,7 +450,7 @@ module.exports = UpdatesManager = { return callback(null, { failed: failedProjects, succeeded: succeededProjects, - all: project_ids, + all: projectIds, }) }) }) @@ -467,16 +460,13 @@ module.exports = UpdatesManager = { if (callback == null) { callback = function () {} } - return RedisManager.getAllDocIdsWithHistoryOps(function ( - error, - all_doc_ids - ) { + return RedisManager.getAllDocIdsWithHistoryOps(function (error, allDocIds) { if (error != null) { return callback(error) } return RedisManager.getProjectIdsWithHistoryOps(function ( error, - all_project_ids + allProjectIds ) { if (error != null) { return callback(error) @@ -484,25 +474,25 @@ module.exports = UpdatesManager = { // function to get doc_ids for each project const task = cb => async.concatSeries( - all_project_ids, + allProjectIds, RedisManager.getDocIdsWithHistoryOps, cb ) // find the dangling doc ids - return task(function (error, project_doc_ids) { + return task(function (error, projectDocIds) { if (error) return callback(error) - const dangling_doc_ids = _.difference(all_doc_ids, project_doc_ids) + const danglingDocIds = _.difference(allDocIds, projectDocIds) logger.debug( - { all_doc_ids, all_project_ids, project_doc_ids, dangling_doc_ids }, + { allDocIds, allProjectIds, projectDocIds, danglingDocIds }, 'checking for dangling doc ids' ) - return callback(null, dangling_doc_ids) + return callback(null, danglingDocIds) }) }) }) }, - getDocUpdates(project_id, doc_id, options, callback) { + getDocUpdates(projectId, docId, options, callback) { if (options == null) { options = {} } @@ -510,16 +500,16 @@ module.exports = UpdatesManager = { callback = function () {} } return UpdatesManager.processUncompressedUpdatesWithLock( - project_id, - doc_id, + projectId, + docId, function (error) { if (error != null) { return callback(error) } // console.log "options", options return PackManager.getOpsByVersionRange( - project_id, - doc_id, + projectId, + docId, options.from, options.to, function (error, updates) { @@ -533,7 +523,7 @@ module.exports = UpdatesManager = { ) }, - getDocUpdatesWithUserInfo(project_id, doc_id, options, callback) { + getDocUpdatesWithUserInfo(projectId, docId, options, callback) { if (options == null) { options = {} } @@ -541,8 +531,8 @@ module.exports = UpdatesManager = { callback = function () {} } return UpdatesManager.getDocUpdates( - project_id, - doc_id, + projectId, + docId, options, function (error, updates) { if (error != null) { @@ -558,7 +548,7 @@ module.exports = UpdatesManager = { ) }, - getSummarizedProjectUpdates(project_id, options, callback) { + getSummarizedProjectUpdates(projectId, options, callback) { if (options == null) { options = {} } @@ -572,13 +562,13 @@ module.exports = UpdatesManager = { const { before } = options let nextBeforeTimestamp = null return UpdatesManager.processUncompressedUpdatesForProject( - project_id, + projectId, function (error) { if (error != null) { return callback(error) } return PackManager.makeProjectIterator( - project_id, + projectId, before, function (err, iterator) { if (err != null) { @@ -692,17 +682,17 @@ module.exports = UpdatesManager = { } const jobs = [] const fetchedUserInfo = {} - for (const user_id in users) { - ;(user_id => + for (const userId in users) { + ;(userId => jobs.push(callback => - WebApiManager.getUserInfo(user_id, function (error, userInfo) { + WebApiManager.getUserInfo(userId, function (error, userInfo) { if (error != null) { return callback(error) } - fetchedUserInfo[user_id] = userInfo + fetchedUserInfo[userId] = userInfo return callback() }) - ))(user_id) + ))(userId) } return async.series(jobs, function (err) { @@ -714,15 +704,15 @@ module.exports = UpdatesManager = { }, fillUserInfo(updates, callback) { - let update, user_id + let update, userId if (callback == null) { callback = function () {} } const users = {} for (update of Array.from(updates)) { - ;({ user_id } = update.meta) - if (UpdatesManager._validUserId(user_id)) { - users[user_id] = true + ;({ user_id: userId } = update.meta) + if (UpdatesManager._validUserId(userId)) { + users[userId] = true } } @@ -733,10 +723,10 @@ module.exports = UpdatesManager = { return callback(error) } for (update of Array.from(updates)) { - ;({ user_id } = update.meta) + ;({ user_id: userId } = update.meta) delete update.meta.user_id - if (UpdatesManager._validUserId(user_id)) { - update.meta.user = fetchedUserInfo[user_id] + if (UpdatesManager._validUserId(userId)) { + update.meta.user = fetchedUserInfo[userId] } } return callback(null, updates) @@ -745,16 +735,16 @@ module.exports = UpdatesManager = { }, fillSummarizedUserInfo(updates, callback) { - let update, user_id, user_ids + let update, userId, userIds if (callback == null) { callback = function () {} } const users = {} for (update of Array.from(updates)) { - user_ids = update.meta.user_ids || [] - for (user_id of Array.from(user_ids)) { - if (UpdatesManager._validUserId(user_id)) { - users[user_id] = true + userIds = update.meta.user_ids || [] + for (userId of Array.from(userIds)) { + if (UpdatesManager._validUserId(userId)) { + users[userId] = true } } } @@ -766,12 +756,12 @@ module.exports = UpdatesManager = { return callback(error) } for (update of Array.from(updates)) { - user_ids = update.meta.user_ids || [] + userIds = update.meta.user_ids || [] update.meta.users = [] delete update.meta.user_ids - for (user_id of Array.from(user_ids)) { - if (UpdatesManager._validUserId(user_id)) { - update.meta.users.push(fetchedUserInfo[user_id]) + for (userId of Array.from(userIds)) { + if (UpdatesManager._validUserId(userId)) { + update.meta.users.push(fetchedUserInfo[userId]) } else { update.meta.users.push(null) } @@ -782,11 +772,11 @@ module.exports = UpdatesManager = { ) }, - _validUserId(user_id) { - if (user_id == null) { + _validUserId(userId) { + if (userId == null) { return false } else { - return !!user_id.match(/^[a-f0-9]{24}$/) + return !!userId.match(/^[a-f0-9]{24}$/) } }, @@ -799,7 +789,7 @@ module.exports = UpdatesManager = { const summarizedUpdates = existingSummarizedUpdates.slice() let previousUpdateWasBigDelete = false for (const update of Array.from(updates)) { - let doc_id + let docId const earliestUpdate = summarizedUpdates[summarizedUpdates.length - 1] let shouldConcat = false @@ -837,13 +827,13 @@ module.exports = UpdatesManager = { update.meta.user_id, ]) - doc_id = update.doc_id.toString() - const doc = earliestUpdate.docs[doc_id] + docId = update.doc_id.toString() + const doc = earliestUpdate.docs[docId] if (doc != null) { doc.fromV = Math.min(doc.fromV, update.v) doc.toV = Math.max(doc.toV, update.v) } else { - earliestUpdate.docs[doc_id] = { + earliestUpdate.docs[docId] = { fromV: update.v, toV: update.v, } diff --git a/services/track-changes/app/js/WebApiManager.js b/services/track-changes/app/js/WebApiManager.js index 598c563ba3..b98f0285b2 100644 --- a/services/track-changes/app/js/WebApiManager.js +++ b/services/track-changes/app/js/WebApiManager.js @@ -1,6 +1,3 @@ -/* eslint-disable - camelcase, -*/ // TODO: This file was created by bulk-decaffeinate. // Fix any style issues and re-enable lint. /* @@ -58,21 +55,21 @@ module.exports = WebApiManager = { ) }, - getUserInfo(user_id, callback) { + getUserInfo(userId, callback) { if (callback == null) { callback = function () {} } - const url = `/user/${user_id}/personal_info` - logger.debug({ user_id }, 'getting user info from web') + const url = `/user/${userId}/personal_info` + logger.debug({ userId }, 'getting user info from web') return WebApiManager.sendRequest(url, function (error, body) { let user if (error != null) { - logger.error({ err: error, user_id, url }, 'error accessing web') + logger.error({ err: error, userId, url }, 'error accessing web') return callback(error) } if (body === null) { - logger.error({ user_id, url }, 'no user found') + logger.error({ userId, url }, 'no user found') return callback(null, null) } try { @@ -90,16 +87,16 @@ module.exports = WebApiManager = { }) }, - getProjectDetails(project_id, callback) { + getProjectDetails(projectId, callback) { if (callback == null) { callback = function () {} } - const url = `/project/${project_id}/details` - logger.debug({ project_id }, 'getting project details from web') + const url = `/project/${projectId}/details` + logger.debug({ projectId }, 'getting project details from web') return WebApiManager.sendRequest(url, function (error, body) { let project if (error != null) { - logger.error({ err: error, project_id, url }, 'error accessing web') + logger.error({ err: error, projectId, url }, 'error accessing web') return callback(error) } diff --git a/services/track-changes/test/acceptance/js/ArchivingUpdatesTests.js b/services/track-changes/test/acceptance/js/ArchivingUpdatesTests.js index 41aff0d6fb..cfbb4a29bc 100644 --- a/services/track-changes/test/acceptance/js/ArchivingUpdatesTests.js +++ b/services/track-changes/test/acceptance/js/ArchivingUpdatesTests.js @@ -1,5 +1,4 @@ /* eslint-disable - camelcase, no-undef, no-unused-vars, */ @@ -268,11 +267,11 @@ describe('Archiving updates', function () { if (error != null) { throw error } - const pack_id = index.packs[0]._id + const packId = index.packs[0]._id return TrackChangesClient.getS3Doc( this.project_id, this.doc_id, - pack_id, + packId, (error, doc) => { if (error) return done(error) doc.n.should.equal(1024) diff --git a/services/track-changes/test/acceptance/js/helpers/MockDocStoreApi.js b/services/track-changes/test/acceptance/js/helpers/MockDocStoreApi.js index 8658d302c5..b28a8fcb20 100644 --- a/services/track-changes/test/acceptance/js/helpers/MockDocStoreApi.js +++ b/services/track-changes/test/acceptance/js/helpers/MockDocStoreApi.js @@ -1,6 +1,3 @@ -/* eslint-disable - camelcase, -*/ // TODO: This file was created by bulk-decaffeinate. // Fix any style issues and re-enable lint. /* @@ -16,7 +13,7 @@ const app = express() module.exports = MockDocUpdaterApi = { docs: {}, - getAllDoc(project_id, callback) { + getAllDoc(projectId, callback) { if (callback == null) { callback = function () {} } diff --git a/services/track-changes/test/acceptance/js/helpers/MockDocUpdaterApi.js b/services/track-changes/test/acceptance/js/helpers/MockDocUpdaterApi.js index 1ff7dc622e..8c83d7d332 100644 --- a/services/track-changes/test/acceptance/js/helpers/MockDocUpdaterApi.js +++ b/services/track-changes/test/acceptance/js/helpers/MockDocUpdaterApi.js @@ -1,5 +1,4 @@ /* eslint-disable - camelcase, no-undef, */ // TODO: This file was created by bulk-decaffeinate. @@ -19,21 +18,21 @@ app.use(bodyParser.json()) module.exports = MockDocUpdaterApi = { docs: {}, - getDoc(project_id, doc_id, callback) { + getDoc(projectId, docId, callback) { if (callback == null) { callback = function () {} } - return callback(null, this.docs[doc_id]) + return callback(null, this.docs[docId]) }, - setDoc(project_id, doc_id, lines, user_id, undoing, callback) { + setDoc(projectId, docId, lines, userId, undoing, callback) { if (callback == null) { callback = function () {} } - if (!this.docs[doc_id]) { - this.docs[doc_id] = {} + if (!this.docs[docId]) { + this.docs[docId] = {} } - this.docs[doc_id].lines = lines + this.docs[docId].lines = lines return callback() }, diff --git a/services/track-changes/test/acceptance/js/helpers/MockWebApi.js b/services/track-changes/test/acceptance/js/helpers/MockWebApi.js index fd8d36c058..2b7db026e1 100644 --- a/services/track-changes/test/acceptance/js/helpers/MockWebApi.js +++ b/services/track-changes/test/acceptance/js/helpers/MockWebApi.js @@ -1,6 +1,3 @@ -/* eslint-disable - camelcase, -*/ // TODO: This file was created by bulk-decaffeinate. // Fix any style issues and re-enable lint. /* @@ -18,18 +15,18 @@ module.exports = MockWebApi = { projects: {}, - getUserInfo(user_id, callback) { + getUserInfo(userId, callback) { if (callback == null) { callback = function () {} } - return callback(null, this.users[user_id] || null) + return callback(null, this.users[userId] || null) }, - getProjectDetails(project_id, callback) { + getProjectDetails(projectId, callback) { if (callback == null) { callback = function () {} } - return callback(null, this.projects[project_id]) + return callback(null, this.projects[projectId]) }, run() { diff --git a/services/track-changes/test/acceptance/js/helpers/TrackChangesClient.js b/services/track-changes/test/acceptance/js/helpers/TrackChangesClient.js index b187c4681b..ad68d88a1e 100644 --- a/services/track-changes/test/acceptance/js/helpers/TrackChangesClient.js +++ b/services/track-changes/test/acceptance/js/helpers/TrackChangesClient.js @@ -1,5 +1,4 @@ /* eslint-disable - camelcase, no-unused-vars, */ // TODO: This file was created by bulk-decaffeinate. @@ -32,25 +31,25 @@ const s3 = new aws.S3({ const S3_BUCKET = Settings.trackchanges.stores.doc_history module.exports = TrackChangesClient = { - flushAndGetCompressedUpdates(project_id, doc_id, callback) { + flushAndGetCompressedUpdates(projectId, docId, callback) { if (callback == null) { callback = function () {} } - return TrackChangesClient.flushDoc(project_id, doc_id, error => { + return TrackChangesClient.flushDoc(projectId, docId, error => { if (error != null) { return callback(error) } - return TrackChangesClient.getCompressedUpdates(doc_id, callback) + return TrackChangesClient.getCompressedUpdates(docId, callback) }) }, - flushDoc(project_id, doc_id, callback) { + flushDoc(projectId, docId, callback) { if (callback == null) { callback = function () {} } return request.post( { - url: `http://localhost:3015/project/${project_id}/doc/${doc_id}/flush`, + url: `http://localhost:3015/project/${projectId}/doc/${docId}/flush`, }, (error, response, body) => { response.statusCode.should.equal(204) @@ -59,13 +58,13 @@ module.exports = TrackChangesClient = { ) }, - flushProject(project_id, callback) { + flushProject(projectId, callback) { if (callback == null) { callback = function () {} } return request.post( { - url: `http://localhost:3015/project/${project_id}/flush`, + url: `http://localhost:3015/project/${projectId}/flush`, }, (error, response, body) => { response.statusCode.should.equal(204) @@ -74,35 +73,35 @@ module.exports = TrackChangesClient = { ) }, - getCompressedUpdates(doc_id, callback) { + getCompressedUpdates(docId, callback) { if (callback == null) { callback = function () {} } return db.docHistory - .find({ doc_id: ObjectId(doc_id) }) + .find({ doc_id: ObjectId(docId) }) .sort({ 'meta.end_ts': 1 }) .toArray(callback) }, - getProjectMetaData(project_id, callback) { + getProjectMetaData(projectId, callback) { if (callback == null) { callback = function () {} } return db.projectHistoryMetaData.findOne( { - project_id: ObjectId(project_id), + project_id: ObjectId(projectId), }, callback ) }, - setPreserveHistoryForProject(project_id, callback) { + setPreserveHistoryForProject(projectId, callback) { if (callback == null) { callback = function () {} } return db.projectHistoryMetaData.updateOne( { - project_id: ObjectId(project_id), + project_id: ObjectId(projectId), }, { $set: { preserveHistory: true }, @@ -114,19 +113,19 @@ module.exports = TrackChangesClient = { ) }, - pushRawUpdates(project_id, doc_id, updates, callback) { + pushRawUpdates(projectId, docId, updates, callback) { if (callback == null) { callback = function () {} } return rclient.sadd( - Keys.docsWithHistoryOps({ project_id }), - doc_id, + Keys.docsWithHistoryOps({ project_id: projectId }), + docId, error => { if (error != null) { return callback(error) } return rclient.rpush( - Keys.uncompressedHistoryOps({ doc_id }), + Keys.uncompressedHistoryOps({ doc_id: docId }), ...Array.from(Array.from(updates).map(u => JSON.stringify(u))), callback ) @@ -134,13 +133,13 @@ module.exports = TrackChangesClient = { ) }, - getDiff(project_id, doc_id, from, to, callback) { + getDiff(projectId, docId, from, to, callback) { if (callback == null) { callback = function () {} } return request.get( { - url: `http://localhost:3015/project/${project_id}/doc/${doc_id}/diff?from=${from}&to=${to}`, + url: `http://localhost:3015/project/${projectId}/doc/${docId}/diff?from=${from}&to=${to}`, }, (error, response, body) => { if (error) return callback(error) @@ -150,13 +149,13 @@ module.exports = TrackChangesClient = { ) }, - getUpdates(project_id, options, callback) { + getUpdates(projectId, options, callback) { if (callback == null) { callback = function () {} } return request.get( { - url: `http://localhost:3015/project/${project_id}/updates?before=${options.before}&min_count=${options.min_count}`, + url: `http://localhost:3015/project/${projectId}/updates?before=${options.before}&min_count=${options.min_count}`, }, (error, response, body) => { if (error) return callback(error) @@ -166,9 +165,9 @@ module.exports = TrackChangesClient = { ) }, - exportProject(project_id, callback) { + exportProject(projectId, callback) { request.get( - { url: `http://localhost:3015/project/${project_id}/export`, json: true }, + { url: `http://localhost:3015/project/${projectId}/export`, json: true }, (error, response, updates) => { if (error) return callback(error) response.statusCode.should.equal(200) @@ -177,15 +176,15 @@ module.exports = TrackChangesClient = { ) }, - restoreDoc(project_id, doc_id, version, user_id, callback) { + restoreDoc(projectId, docId, version, userId, callback) { if (callback == null) { callback = function () {} } return request.post( { - url: `http://localhost:3015/project/${project_id}/doc/${doc_id}/version/${version}/restore`, + url: `http://localhost:3015/project/${projectId}/doc/${docId}/version/${version}/restore`, headers: { - 'X-User-Id': user_id, + 'X-User-Id': userId, }, }, (error, response, body) => { @@ -196,13 +195,13 @@ module.exports = TrackChangesClient = { ) }, - pushDocHistory(project_id, doc_id, callback) { + pushDocHistory(projectId, docId, callback) { if (callback == null) { callback = function () {} } return request.post( { - url: `http://localhost:3015/project/${project_id}/doc/${doc_id}/push`, + url: `http://localhost:3015/project/${projectId}/doc/${docId}/push`, }, (error, response, body) => { response.statusCode.should.equal(204) @@ -211,13 +210,13 @@ module.exports = TrackChangesClient = { ) }, - pullDocHistory(project_id, doc_id, callback) { + pullDocHistory(projectId, docId, callback) { if (callback == null) { callback = function () {} } return request.post( { - url: `http://localhost:3015/project/${project_id}/doc/${doc_id}/pull`, + url: `http://localhost:3015/project/${projectId}/doc/${docId}/pull`, }, (error, response, body) => { response.statusCode.should.equal(204) @@ -250,13 +249,13 @@ module.exports = TrackChangesClient = { }) }, - getS3Doc(project_id, doc_id, pack_id, callback) { + getS3Doc(projectId, docId, packId, callback) { if (callback == null) { callback = function () {} } const params = { Bucket: S3_BUCKET, - Key: `${project_id}/changes-${doc_id}/pack-${pack_id}`, + Key: `${projectId}/changes-${docId}/pack-${packId}`, } return s3.getObject(params, (error, data) => { @@ -276,13 +275,13 @@ module.exports = TrackChangesClient = { }) }, - removeS3Doc(project_id, doc_id, callback) { + removeS3Doc(projectId, docId, callback) { if (callback == null) { callback = function () {} } let params = { Bucket: S3_BUCKET, - Prefix: `${project_id}/changes-${doc_id}`, + Prefix: `${projectId}/changes-${docId}`, } return s3.listObjects(params, (error, data) => { diff --git a/services/track-changes/test/unit/js/DiffManager/DiffManagerTests.js b/services/track-changes/test/unit/js/DiffManager/DiffManagerTests.js index 846ad706c2..5cc2977921 100644 --- a/services/track-changes/test/unit/js/DiffManager/DiffManagerTests.js +++ b/services/track-changes/test/unit/js/DiffManager/DiffManagerTests.js @@ -1,5 +1,4 @@ /* eslint-disable - camelcase, no-return-assign, no-unused-vars, */ @@ -239,8 +238,8 @@ describe('DiffManager', function () { beforeEach(function () { let retried = false this.DiffManager._tryGetDocumentBeforeVersion = ( - project_id, - doc_id, + projectId, + docId, version, callback ) => { diff --git a/services/track-changes/test/unit/js/LockManager/LockManagerTests.js b/services/track-changes/test/unit/js/LockManager/LockManagerTests.js index 3b6a2748c9..d99b3fcb83 100644 --- a/services/track-changes/test/unit/js/LockManager/LockManagerTests.js +++ b/services/track-changes/test/unit/js/LockManager/LockManagerTests.js @@ -1,5 +1,4 @@ /* eslint-disable - camelcase, mocha/no-nested-tests, no-return-assign, no-undef, @@ -152,7 +151,7 @@ describe('LockManager', function () { beforeEach(function (done) { const startTime = Date.now() this.LockManager.LOCK_TEST_INTERVAL = 5 - this.LockManager.tryLock = function (doc_id, callback) { + this.LockManager.tryLock = function (docId, callback) { if (callback == null) { callback = function () {} } diff --git a/services/track-changes/test/unit/js/RedisManager/RedisManagerTests.js b/services/track-changes/test/unit/js/RedisManager/RedisManagerTests.js index f0c9b0f3d5..a67159e09f 100644 --- a/services/track-changes/test/unit/js/RedisManager/RedisManagerTests.js +++ b/services/track-changes/test/unit/js/RedisManager/RedisManagerTests.js @@ -1,5 +1,4 @@ /* eslint-disable - camelcase, no-return-assign, no-unused-vars, */ @@ -32,11 +31,11 @@ describe('RedisManager', function () { redis: { history: { key_schema: { - uncompressedHistoryOps({ doc_id }) { - return `UncompressedHistoryOps:${doc_id}` + uncompressedHistoryOps({ doc_id: docId }) { + return `UncompressedHistoryOps:${docId}` }, - docsWithHistoryOps({ project_id }) { - return `DocsWithHistoryOps:${project_id}` + docsWithHistoryOps({ project_id: projectId }) { + return `DocsWithHistoryOps:${projectId}` }, }, }, diff --git a/services/track-changes/test/unit/js/UpdatesManager/UpdatesManagerTests.js b/services/track-changes/test/unit/js/UpdatesManager/UpdatesManagerTests.js index e73e44bb42..09bb213dca 100644 --- a/services/track-changes/test/unit/js/UpdatesManager/UpdatesManagerTests.js +++ b/services/track-changes/test/unit/js/UpdatesManager/UpdatesManagerTests.js @@ -1,5 +1,4 @@ /* eslint-disable - camelcase, no-return-assign, no-unused-vars, */ @@ -35,8 +34,8 @@ describe('UpdatesManager', function () { redis: { lock: { key_schema: { - historyLock({ doc_id }) { - return `HistoryLock:${doc_id}` + historyLock({ doc_id: docId }) { + return `HistoryLock:${docId}` }, }, }, @@ -457,7 +456,7 @@ describe('UpdatesManager', function () { ] this.redisArray = this.updates.slice() this.RedisManager.getOldestDocUpdates = ( - doc_id, + docId, batchSize, callback ) => { @@ -672,9 +671,9 @@ describe('UpdatesManager', function () { }) it('should process the doc ops for the each doc_id', function () { - return Array.from(this.doc_ids).map(doc_id => + return Array.from(this.doc_ids).map(docId => this.UpdatesManager._processUncompressedUpdatesForDocWithLock - .calledWith(this.project_id, doc_id, this.temporary) + .calledWith(this.project_id, docId, this.temporary) .should.equal(true) ) }) @@ -887,11 +886,11 @@ describe('UpdatesManager', function () { this.user_info[this.user_id_1] = { email: 'user1@sharelatex.com' } this.user_info[this.user_id_2] = { email: 'user2@sharelatex.com' } - this.WebApiManager.getUserInfo = (user_id, callback) => { + this.WebApiManager.getUserInfo = (userId, callback) => { if (callback == null) { callback = function () {} } - return callback(null, this.user_info[user_id]) + return callback(null, this.user_info[userId]) } sinon.spy(this.WebApiManager, 'getUserInfo') @@ -961,11 +960,11 @@ describe('UpdatesManager', function () { op: 'mock-op-2', }, ] - this.WebApiManager.getUserInfo = (user_id, callback) => { + this.WebApiManager.getUserInfo = (userId, callback) => { if (callback == null) { callback = function () {} } - return callback(null, this.user_info[user_id]) + return callback(null, this.user_info[userId]) } sinon.spy(this.WebApiManager, 'getUserInfo')