mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
Merge pull request #12218 from overleaf/em-camel-case-track-changes
Camel case variables in track-changes GitOrigin-RevId: 92878e2b7dfa051069e0baaf604e96f4d2e0a501
This commit is contained in:
parent
3831416c2f
commit
90a921cbe6
26 changed files with 538 additions and 640 deletions
|
@ -1,5 +1,4 @@
|
||||||
/* eslint-disable
|
/* eslint-disable
|
||||||
camelcase,
|
|
||||||
no-proto,
|
no-proto,
|
||||||
no-unused-vars,
|
no-unused-vars,
|
||||||
*/
|
*/
|
||||||
|
@ -59,10 +58,10 @@ module.exports = DiffGenerator = {
|
||||||
// is the case with this op, and shift p back appropriately to match
|
// is the case with this op, and shift p back appropriately to match
|
||||||
// ShareJS if so.
|
// ShareJS if so.
|
||||||
;({ p } = op)
|
;({ p } = op)
|
||||||
const max_p = content.length - op.i.length
|
const maxP = content.length - op.i.length
|
||||||
if (p > max_p) {
|
if (p > maxP) {
|
||||||
logger.warn({ max_p, p }, 'truncating position to content length')
|
logger.warn({ maxP, p }, 'truncating position to content length')
|
||||||
p = max_p
|
p = maxP
|
||||||
op.p = p // fix out of range offsets to avoid invalid history exports in ZipManager
|
op.p = p // fix out of range offsets to avoid invalid history exports in ZipManager
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
/* eslint-disable
|
|
||||||
camelcase,
|
|
||||||
*/
|
|
||||||
// TODO: This file was created by bulk-decaffeinate.
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
// Fix any style issues and re-enable lint.
|
// Fix any style issues and re-enable lint.
|
||||||
/*
|
/*
|
||||||
|
@ -17,15 +14,15 @@ const DiffGenerator = require('./DiffGenerator')
|
||||||
const logger = require('@overleaf/logger')
|
const logger = require('@overleaf/logger')
|
||||||
|
|
||||||
module.exports = DiffManager = {
|
module.exports = DiffManager = {
|
||||||
getLatestDocAndUpdates(project_id, doc_id, fromVersion, callback) {
|
getLatestDocAndUpdates(projectId, docId, fromVersion, callback) {
|
||||||
// Get updates last, since then they must be ahead and it
|
// Get updates last, since then they must be ahead and it
|
||||||
// might be possible to rewind to the same version as the doc.
|
// might be possible to rewind to the same version as the doc.
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return DocumentUpdaterManager.getDocument(
|
return DocumentUpdaterManager.getDocument(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
function (error, content, version) {
|
function (error, content, version) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
|
@ -35,8 +32,8 @@ module.exports = DiffManager = {
|
||||||
return callback(null, content, version, [])
|
return callback(null, content, version, [])
|
||||||
}
|
}
|
||||||
return UpdatesManager.getDocUpdatesWithUserInfo(
|
return UpdatesManager.getDocUpdatesWithUserInfo(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
{ from: fromVersion },
|
{ from: fromVersion },
|
||||||
function (error, updates) {
|
function (error, updates) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
|
@ -49,13 +46,13 @@ module.exports = DiffManager = {
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
getDiff(project_id, doc_id, fromVersion, toVersion, callback) {
|
getDiff(projectId, docId, fromVersion, toVersion, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return DiffManager.getDocumentBeforeVersion(
|
return DiffManager.getDocumentBeforeVersion(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
fromVersion,
|
fromVersion,
|
||||||
function (error, startingContent, updates) {
|
function (error, startingContent, updates) {
|
||||||
let diff
|
let diff
|
||||||
|
@ -85,7 +82,7 @@ module.exports = DiffManager = {
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
getDocumentBeforeVersion(project_id, doc_id, version, _callback) {
|
getDocumentBeforeVersion(projectId, docId, version, _callback) {
|
||||||
// Whichever order we get the latest document and the latest updates,
|
// Whichever order we get the latest document and the latest updates,
|
||||||
// there is potential for updates to be applied between them so that
|
// there is potential for updates to be applied between them so that
|
||||||
// they do not return the same 'latest' versions.
|
// they do not return the same 'latest' versions.
|
||||||
|
@ -100,7 +97,7 @@ module.exports = DiffManager = {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
if (error.retry && retries > 0) {
|
if (error.retry && retries > 0) {
|
||||||
logger.warn(
|
logger.warn(
|
||||||
{ error, project_id, doc_id, version, retries },
|
{ error, projectId, docId, version, retries },
|
||||||
'retrying getDocumentBeforeVersion'
|
'retrying getDocumentBeforeVersion'
|
||||||
)
|
)
|
||||||
return retry()
|
return retry()
|
||||||
|
@ -115,25 +112,25 @@ module.exports = DiffManager = {
|
||||||
return (retry = function () {
|
return (retry = function () {
|
||||||
retries--
|
retries--
|
||||||
return DiffManager._tryGetDocumentBeforeVersion(
|
return DiffManager._tryGetDocumentBeforeVersion(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
version,
|
version,
|
||||||
callback
|
callback
|
||||||
)
|
)
|
||||||
})()
|
})()
|
||||||
},
|
},
|
||||||
|
|
||||||
_tryGetDocumentBeforeVersion(project_id, doc_id, version, callback) {
|
_tryGetDocumentBeforeVersion(projectId, docId, version, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ project_id, doc_id, version },
|
{ projectId, docId, version },
|
||||||
'getting document before version'
|
'getting document before version'
|
||||||
)
|
)
|
||||||
return DiffManager.getLatestDocAndUpdates(
|
return DiffManager.getLatestDocAndUpdates(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
version,
|
version,
|
||||||
function (error, content, version, updates) {
|
function (error, content, version, updates) {
|
||||||
let startingContent
|
let startingContent
|
||||||
|
|
|
@ -5,10 +5,7 @@ const Errors = require('./Errors')
|
||||||
|
|
||||||
function peekDocument(projectId, docId, callback) {
|
function peekDocument(projectId, docId, callback) {
|
||||||
const url = `${Settings.apis.docstore.url}/project/${projectId}/doc/${docId}/peek`
|
const url = `${Settings.apis.docstore.url}/project/${projectId}/doc/${docId}/peek`
|
||||||
logger.debug(
|
logger.debug({ projectId, docId }, 'getting doc from docstore')
|
||||||
{ project_id: projectId, doc_id: docId },
|
|
||||||
'getting doc from docstore'
|
|
||||||
)
|
|
||||||
request.get(url, function (error, res, body) {
|
request.get(url, function (error, res, body) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
|
@ -20,7 +17,7 @@ function peekDocument(projectId, docId, callback) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ project_id: projectId, doc_id: docId, version: body.version },
|
{ projectId, docId, version: body.version },
|
||||||
'got doc from docstore'
|
'got doc from docstore'
|
||||||
)
|
)
|
||||||
return callback(null, body.lines.join('\n'), body.version)
|
return callback(null, body.lines.join('\n'), body.version)
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
/* eslint-disable
|
/* eslint-disable
|
||||||
camelcase,
|
|
||||||
no-unused-vars,
|
no-unused-vars,
|
||||||
*/
|
*/
|
||||||
// TODO: This file was created by bulk-decaffeinate.
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
@ -17,12 +16,12 @@ const Settings = require('@overleaf/settings')
|
||||||
const Errors = require('./Errors')
|
const Errors = require('./Errors')
|
||||||
|
|
||||||
module.exports = DocumentUpdaterManager = {
|
module.exports = DocumentUpdaterManager = {
|
||||||
_requestDocument(project_id, doc_id, url, callback) {
|
_requestDocument(projectId, docId, url, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.debug({ project_id, doc_id }, 'getting doc from document updater')
|
logger.debug({ projectId, docId }, 'getting doc from document updater')
|
||||||
return request.get(url, function (error, res, body) {
|
return request.get(url, function (error, res, body) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
|
@ -35,7 +34,7 @@ module.exports = DocumentUpdaterManager = {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ project_id, doc_id, version: body.version },
|
{ projectId, docId, version: body.version },
|
||||||
'got doc from document updater'
|
'got doc from document updater'
|
||||||
)
|
)
|
||||||
return callback(null, body.lines.join('\n'), body.version)
|
return callback(null, body.lines.join('\n'), body.version)
|
||||||
|
@ -44,14 +43,14 @@ module.exports = DocumentUpdaterManager = {
|
||||||
`doc updater returned a non-success status code: ${res.statusCode}`
|
`doc updater returned a non-success status code: ${res.statusCode}`
|
||||||
)
|
)
|
||||||
logger.error(
|
logger.error(
|
||||||
{ err: error, project_id, doc_id, url },
|
{ err: error, projectId, docId, url },
|
||||||
'error accessing doc updater'
|
'error accessing doc updater'
|
||||||
)
|
)
|
||||||
if (res.statusCode === 404) {
|
if (res.statusCode === 404) {
|
||||||
return callback(
|
return callback(
|
||||||
new Errors.NotFoundError('doc not found', {
|
new Errors.NotFoundError('doc not found', {
|
||||||
projectId: project_id,
|
projectId,
|
||||||
docId: doc_id,
|
docId,
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
|
@ -61,29 +60,29 @@ module.exports = DocumentUpdaterManager = {
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
getDocument(project_id, doc_id, callback) {
|
getDocument(projectId, docId, callback) {
|
||||||
const url = `${Settings.apis.documentupdater.url}/project/${project_id}/doc/${doc_id}`
|
const url = `${Settings.apis.documentupdater.url}/project/${projectId}/doc/${docId}`
|
||||||
DocumentUpdaterManager._requestDocument(project_id, doc_id, url, callback)
|
DocumentUpdaterManager._requestDocument(projectId, docId, url, callback)
|
||||||
},
|
},
|
||||||
|
|
||||||
peekDocument(project_id, doc_id, callback) {
|
peekDocument(projectId, docId, callback) {
|
||||||
const url = `${Settings.apis.documentupdater.url}/project/${project_id}/doc/${doc_id}/peek`
|
const url = `${Settings.apis.documentupdater.url}/project/${projectId}/doc/${docId}/peek`
|
||||||
DocumentUpdaterManager._requestDocument(project_id, doc_id, url, callback)
|
DocumentUpdaterManager._requestDocument(projectId, docId, url, callback)
|
||||||
},
|
},
|
||||||
|
|
||||||
setDocument(project_id, doc_id, content, user_id, callback) {
|
setDocument(projectId, docId, content, userId, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
const url = `${Settings.apis.documentupdater.url}/project/${project_id}/doc/${doc_id}`
|
const url = `${Settings.apis.documentupdater.url}/project/${projectId}/doc/${docId}`
|
||||||
logger.debug({ project_id, doc_id }, 'setting doc in document updater')
|
logger.debug({ projectId, docId }, 'setting doc in document updater')
|
||||||
return request.post(
|
return request.post(
|
||||||
{
|
{
|
||||||
url,
|
url,
|
||||||
json: {
|
json: {
|
||||||
lines: content.split('\n'),
|
lines: content.split('\n'),
|
||||||
source: 'restore',
|
source: 'restore',
|
||||||
user_id,
|
user_id: userId,
|
||||||
undoing: true,
|
undoing: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -98,7 +97,7 @@ module.exports = DocumentUpdaterManager = {
|
||||||
`doc updater returned a non-success status code: ${res.statusCode}`
|
`doc updater returned a non-success status code: ${res.statusCode}`
|
||||||
)
|
)
|
||||||
logger.error(
|
logger.error(
|
||||||
{ err: error, project_id, doc_id, url },
|
{ err: error, projectId, docId, url },
|
||||||
'error accessing doc updater'
|
'error accessing doc updater'
|
||||||
)
|
)
|
||||||
return callback(error)
|
return callback(error)
|
||||||
|
@ -111,11 +110,11 @@ module.exports = DocumentUpdaterManager = {
|
||||||
module.exports.promises = {
|
module.exports.promises = {
|
||||||
// peekDocument returns two arguments so we can't use util.promisify, which only handles a single argument, we need
|
// peekDocument returns two arguments so we can't use util.promisify, which only handles a single argument, we need
|
||||||
// to treat this it as a special case.
|
// to treat this it as a special case.
|
||||||
peekDocument: (project_id, doc_id) => {
|
peekDocument: (projectId, docId) => {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
DocumentUpdaterManager.peekDocument(
|
DocumentUpdaterManager.peekDocument(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
(err, content, version) => {
|
(err, content, version) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
reject(err)
|
reject(err)
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
/* eslint-disable
|
|
||||||
camelcase,
|
|
||||||
*/
|
|
||||||
// TODO: This file was created by bulk-decaffeinate.
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
// Fix any style issues and re-enable lint.
|
// Fix any style issues and re-enable lint.
|
||||||
/*
|
/*
|
||||||
|
@ -19,9 +16,9 @@ const LockManager = require('./LockManager')
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
check(callback) {
|
check(callback) {
|
||||||
const project_id = ObjectId(settings.trackchanges.healthCheck.project_id)
|
const projectId = ObjectId(settings.trackchanges.healthCheck.project_id)
|
||||||
const url = `http://localhost:${port}/project/${project_id}`
|
const url = `http://localhost:${port}/project/${projectId}`
|
||||||
logger.debug({ project_id }, 'running health check')
|
logger.debug({ projectId }, 'running health check')
|
||||||
const jobs = [
|
const jobs = [
|
||||||
cb =>
|
cb =>
|
||||||
request.get(
|
request.get(
|
||||||
|
@ -29,7 +26,7 @@ module.exports = {
|
||||||
function (err, res, body) {
|
function (err, res, body) {
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
logger.err(
|
logger.err(
|
||||||
{ err, project_id },
|
{ err, projectId },
|
||||||
'error checking lock for health check'
|
'error checking lock for health check'
|
||||||
)
|
)
|
||||||
return cb(err)
|
return cb(err)
|
||||||
|
@ -47,7 +44,7 @@ module.exports = {
|
||||||
{ url: `${url}/flush`, timeout: 10000 },
|
{ url: `${url}/flush`, timeout: 10000 },
|
||||||
function (err, res, body) {
|
function (err, res, body) {
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
logger.err({ err, project_id }, 'error flushing for health check')
|
logger.err({ err, projectId }, 'error flushing for health check')
|
||||||
return cb(err)
|
return cb(err)
|
||||||
} else if ((res != null ? res.statusCode : undefined) !== 204) {
|
} else if ((res != null ? res.statusCode : undefined) !== 204) {
|
||||||
return cb(
|
return cb(
|
||||||
|
@ -64,7 +61,7 @@ module.exports = {
|
||||||
function (err, res, body) {
|
function (err, res, body) {
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
logger.err(
|
logger.err(
|
||||||
{ err, project_id },
|
{ err, projectId },
|
||||||
'error getting updates for health check'
|
'error getting updates for health check'
|
||||||
)
|
)
|
||||||
return cb(err)
|
return cb(err)
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
/* eslint-disable
|
/* eslint-disable
|
||||||
camelcase,
|
|
||||||
no-unused-vars,
|
no-unused-vars,
|
||||||
*/
|
*/
|
||||||
// TODO: This file was created by bulk-decaffeinate.
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
@ -28,12 +27,12 @@ module.exports = HttpController = {
|
||||||
if (next == null) {
|
if (next == null) {
|
||||||
next = function () {}
|
next = function () {}
|
||||||
}
|
}
|
||||||
const { doc_id } = req.params
|
const { doc_id: docId } = req.params
|
||||||
const { project_id } = req.params
|
const { project_id: projectId } = req.params
|
||||||
logger.debug({ project_id, doc_id }, 'compressing doc history')
|
logger.debug({ projectId, docId }, 'compressing doc history')
|
||||||
return UpdatesManager.processUncompressedUpdatesWithLock(
|
return UpdatesManager.processUncompressedUpdatesWithLock(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
function (error) {
|
function (error) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return next(error)
|
return next(error)
|
||||||
|
@ -47,10 +46,10 @@ module.exports = HttpController = {
|
||||||
if (next == null) {
|
if (next == null) {
|
||||||
next = function () {}
|
next = function () {}
|
||||||
}
|
}
|
||||||
const { project_id } = req.params
|
const { project_id: projectId } = req.params
|
||||||
logger.debug({ project_id }, 'compressing project history')
|
logger.debug({ projectId }, 'compressing project history')
|
||||||
return UpdatesManager.processUncompressedUpdatesForProject(
|
return UpdatesManager.processUncompressedUpdatesForProject(
|
||||||
project_id,
|
projectId,
|
||||||
function (error) {
|
function (error) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return next(error)
|
return next(error)
|
||||||
|
@ -114,12 +113,12 @@ module.exports = HttpController = {
|
||||||
if (next == null) {
|
if (next == null) {
|
||||||
next = function () {}
|
next = function () {}
|
||||||
}
|
}
|
||||||
const { doc_id } = req.params
|
const { doc_id: docId } = req.params
|
||||||
const { project_id } = req.params
|
const { project_id: projectId } = req.params
|
||||||
logger.debug({ project_id, doc_id }, 'checking doc history')
|
logger.debug({ projectId, docId }, 'checking doc history')
|
||||||
return DiffManager.getDocumentBeforeVersion(
|
return DiffManager.getDocumentBeforeVersion(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
1,
|
1,
|
||||||
function (error, document, rewoundUpdates) {
|
function (error, document, rewoundUpdates) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
|
@ -147,8 +146,8 @@ module.exports = HttpController = {
|
||||||
if (next == null) {
|
if (next == null) {
|
||||||
next = function () {}
|
next = function () {}
|
||||||
}
|
}
|
||||||
const { doc_id } = req.params
|
const { doc_id: docId } = req.params
|
||||||
const { project_id } = req.params
|
const { project_id: projectId } = req.params
|
||||||
|
|
||||||
if (req.query.from != null) {
|
if (req.query.from != null) {
|
||||||
from = parseInt(req.query.from, 10)
|
from = parseInt(req.query.from, 10)
|
||||||
|
@ -161,10 +160,10 @@ module.exports = HttpController = {
|
||||||
to = null
|
to = null
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.debug({ project_id, doc_id, from, to }, 'getting diff')
|
logger.debug({ projectId, docId, from, to }, 'getting diff')
|
||||||
return DiffManager.getDiff(
|
return DiffManager.getDiff(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
from,
|
from,
|
||||||
to,
|
to,
|
||||||
function (error, diff) {
|
function (error, diff) {
|
||||||
|
@ -177,22 +176,22 @@ module.exports = HttpController = {
|
||||||
},
|
},
|
||||||
|
|
||||||
getUpdates(req, res, next) {
|
getUpdates(req, res, next) {
|
||||||
let before, min_count
|
let before, minCount
|
||||||
if (next == null) {
|
if (next == null) {
|
||||||
next = function () {}
|
next = function () {}
|
||||||
}
|
}
|
||||||
const { project_id } = req.params
|
const { project_id: projectId } = req.params
|
||||||
|
|
||||||
if (req.query.before != null) {
|
if (req.query.before != null) {
|
||||||
before = parseInt(req.query.before, 10)
|
before = parseInt(req.query.before, 10)
|
||||||
}
|
}
|
||||||
if (req.query.min_count != null) {
|
if (req.query.min_count != null) {
|
||||||
min_count = parseInt(req.query.min_count, 10)
|
minCount = parseInt(req.query.min_count, 10)
|
||||||
}
|
}
|
||||||
|
|
||||||
return UpdatesManager.getSummarizedProjectUpdates(
|
return UpdatesManager.getSummarizedProjectUpdates(
|
||||||
project_id,
|
projectId,
|
||||||
{ before, min_count },
|
{ before, min_count: minCount },
|
||||||
function (error, updates, nextBeforeTimestamp) {
|
function (error, updates, nextBeforeTimestamp) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return next(error)
|
return next(error)
|
||||||
|
@ -233,10 +232,10 @@ module.exports = HttpController = {
|
||||||
// - updates can weight MBs for insert/delete of full doc
|
// - updates can weight MBs for insert/delete of full doc
|
||||||
// - multiple updates form a pack
|
// - multiple updates form a pack
|
||||||
// Flush updates per pack onto the wire.
|
// Flush updates per pack onto the wire.
|
||||||
const { project_id } = req.params
|
const { project_id: projectId } = req.params
|
||||||
logger.debug({ project_id }, 'exporting project history')
|
logger.debug({ projectId }, 'exporting project history')
|
||||||
UpdatesManager.exportProject(
|
UpdatesManager.exportProject(
|
||||||
project_id,
|
projectId,
|
||||||
function (err, { updates, userIds }, confirmWrite) {
|
function (err, { updates, userIds }, confirmWrite) {
|
||||||
const abortStreaming = req.destroyed || res.finished || res.destroyed
|
const abortStreaming = req.destroyed || res.finished || res.destroyed
|
||||||
if (abortStreaming) {
|
if (abortStreaming) {
|
||||||
|
@ -246,7 +245,7 @@ module.exports = HttpController = {
|
||||||
}
|
}
|
||||||
const hasStartedStreamingResponse = res.headersSent
|
const hasStartedStreamingResponse = res.headersSent
|
||||||
if (err) {
|
if (err) {
|
||||||
logger.error({ project_id, err }, 'export failed')
|
logger.error({ projectId, err }, 'export failed')
|
||||||
if (!hasStartedStreamingResponse) {
|
if (!hasStartedStreamingResponse) {
|
||||||
// Generate a nice 500
|
// Generate a nice 500
|
||||||
return next(err)
|
return next(err)
|
||||||
|
@ -294,14 +293,14 @@ module.exports = HttpController = {
|
||||||
if (next == null) {
|
if (next == null) {
|
||||||
next = function () {}
|
next = function () {}
|
||||||
}
|
}
|
||||||
let { doc_id, project_id, version } = req.params
|
let { doc_id: docId, project_id: projectId, version } = req.params
|
||||||
const user_id = req.headers['x-user-id']
|
const userId = req.headers['x-user-id']
|
||||||
version = parseInt(version, 10)
|
version = parseInt(version, 10)
|
||||||
return RestoreManager.restoreToBeforeVersion(
|
return RestoreManager.restoreToBeforeVersion(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
version,
|
version,
|
||||||
user_id,
|
userId,
|
||||||
function (error) {
|
function (error) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return next(error)
|
return next(error)
|
||||||
|
@ -315,10 +314,10 @@ module.exports = HttpController = {
|
||||||
if (next == null) {
|
if (next == null) {
|
||||||
next = function () {}
|
next = function () {}
|
||||||
}
|
}
|
||||||
const { project_id } = req.params
|
const { project_id: projectId } = req.params
|
||||||
const { doc_id } = req.params
|
const { doc_id: docId } = req.params
|
||||||
logger.debug({ project_id, doc_id }, 'pushing all finalised changes to s3')
|
logger.debug({ projectId, docId }, 'pushing all finalised changes to s3')
|
||||||
return PackManager.pushOldPacks(project_id, doc_id, function (error) {
|
return PackManager.pushOldPacks(projectId, docId, function (error) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return next(error)
|
return next(error)
|
||||||
}
|
}
|
||||||
|
@ -330,10 +329,10 @@ module.exports = HttpController = {
|
||||||
if (next == null) {
|
if (next == null) {
|
||||||
next = function () {}
|
next = function () {}
|
||||||
}
|
}
|
||||||
const { project_id } = req.params
|
const { project_id: projectId } = req.params
|
||||||
const { doc_id } = req.params
|
const { doc_id: docId } = req.params
|
||||||
logger.debug({ project_id, doc_id }, 'pulling all packs from s3')
|
logger.debug({ projectId, docId }, 'pulling all packs from s3')
|
||||||
return PackManager.pullOldPacks(project_id, doc_id, function (error) {
|
return PackManager.pullOldPacks(projectId, docId, function (error) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return next(error)
|
return next(error)
|
||||||
}
|
}
|
||||||
|
|
|
@ -115,7 +115,7 @@ module.exports = LockManager = {
|
||||||
if (result != null && result !== 1) {
|
if (result != null && result !== 1) {
|
||||||
// successful unlock should release exactly one key
|
// successful unlock should release exactly one key
|
||||||
logger.error(
|
logger.error(
|
||||||
{ key, lockValue, redis_err: err, redis_result: result },
|
{ key, lockValue, redisErr: err, redisResult: result },
|
||||||
'unlocking error'
|
'unlocking error'
|
||||||
)
|
)
|
||||||
return callback(new Error('tried to release timed out lock'))
|
return callback(new Error('tried to release timed out lock'))
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
/* eslint-disable
|
/* eslint-disable
|
||||||
camelcase,
|
|
||||||
no-return-assign,
|
no-return-assign,
|
||||||
no-unused-vars,
|
no-unused-vars,
|
||||||
*/
|
*/
|
||||||
|
@ -25,7 +24,7 @@ const Metrics = require('@overleaf/metrics')
|
||||||
|
|
||||||
const DAYS = 24 * 3600 * 1000 // one day in milliseconds
|
const DAYS = 24 * 3600 * 1000 // one day in milliseconds
|
||||||
|
|
||||||
const createStream = function (streamConstructor, project_id, doc_id, pack_id) {
|
const createStream = function (streamConstructor, projectId, docId, packId) {
|
||||||
const AWS_CONFIG = {
|
const AWS_CONFIG = {
|
||||||
accessKeyId: settings.trackchanges.s3.key,
|
accessKeyId: settings.trackchanges.s3.key,
|
||||||
secretAccessKey: settings.trackchanges.s3.secret,
|
secretAccessKey: settings.trackchanges.s3.secret,
|
||||||
|
@ -35,12 +34,12 @@ const createStream = function (streamConstructor, project_id, doc_id, pack_id) {
|
||||||
|
|
||||||
return streamConstructor(new AWS.S3(AWS_CONFIG), {
|
return streamConstructor(new AWS.S3(AWS_CONFIG), {
|
||||||
Bucket: settings.trackchanges.stores.doc_history,
|
Bucket: settings.trackchanges.stores.doc_history,
|
||||||
Key: project_id + '/changes-' + doc_id + '/pack-' + pack_id,
|
Key: projectId + '/changes-' + docId + '/pack-' + packId,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = MongoAWS = {
|
module.exports = MongoAWS = {
|
||||||
archivePack(project_id, doc_id, pack_id, _callback) {
|
archivePack(projectId, docId, packId, _callback) {
|
||||||
if (_callback == null) {
|
if (_callback == null) {
|
||||||
_callback = function () {}
|
_callback = function () {}
|
||||||
}
|
}
|
||||||
|
@ -50,23 +49,23 @@ module.exports = MongoAWS = {
|
||||||
}
|
}
|
||||||
|
|
||||||
const query = {
|
const query = {
|
||||||
_id: ObjectId(pack_id),
|
_id: ObjectId(packId),
|
||||||
doc_id: ObjectId(doc_id),
|
doc_id: ObjectId(docId),
|
||||||
}
|
}
|
||||||
|
|
||||||
if (project_id == null) {
|
if (projectId == null) {
|
||||||
return callback(new Error('invalid project id'))
|
return callback(new Error('invalid project id'))
|
||||||
}
|
}
|
||||||
if (doc_id == null) {
|
if (docId == null) {
|
||||||
return callback(new Error('invalid doc id'))
|
return callback(new Error('invalid doc id'))
|
||||||
}
|
}
|
||||||
if (pack_id == null) {
|
if (packId == null) {
|
||||||
return callback(new Error('invalid pack id'))
|
return callback(new Error('invalid pack id'))
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.debug({ project_id, doc_id, pack_id }, 'uploading data to s3')
|
logger.debug({ projectId, docId, packId }, 'uploading data to s3')
|
||||||
|
|
||||||
const upload = createStream(S3S.WriteStream, project_id, doc_id, pack_id)
|
const upload = createStream(S3S.WriteStream, projectId, docId, packId)
|
||||||
|
|
||||||
return db.docHistory.findOne(query, function (err, result) {
|
return db.docHistory.findOne(query, function (err, result) {
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
|
@ -81,15 +80,15 @@ module.exports = MongoAWS = {
|
||||||
const uncompressedData = JSON.stringify(result)
|
const uncompressedData = JSON.stringify(result)
|
||||||
if (uncompressedData.indexOf('\u0000') !== -1) {
|
if (uncompressedData.indexOf('\u0000') !== -1) {
|
||||||
const error = new Error('null bytes found in upload')
|
const error = new Error('null bytes found in upload')
|
||||||
logger.error({ err: error, project_id, doc_id, pack_id }, error.message)
|
logger.error({ err: error, projectId, docId, packId }, error.message)
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
return zlib.gzip(uncompressedData, function (err, buf) {
|
return zlib.gzip(uncompressedData, function (err, buf) {
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{
|
{
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
pack_id,
|
packId,
|
||||||
origSize: uncompressedData.length,
|
origSize: uncompressedData.length,
|
||||||
newSize: buf.length,
|
newSize: buf.length,
|
||||||
},
|
},
|
||||||
|
@ -101,10 +100,7 @@ module.exports = MongoAWS = {
|
||||||
upload.on('error', err => callback(err))
|
upload.on('error', err => callback(err))
|
||||||
upload.on('finish', function () {
|
upload.on('finish', function () {
|
||||||
Metrics.inc('archive-pack')
|
Metrics.inc('archive-pack')
|
||||||
logger.debug(
|
logger.debug({ projectId, docId, packId }, 'upload to s3 completed')
|
||||||
{ project_id, doc_id, pack_id },
|
|
||||||
'upload to s3 completed'
|
|
||||||
)
|
|
||||||
return callback(null)
|
return callback(null)
|
||||||
})
|
})
|
||||||
upload.write(buf)
|
upload.write(buf)
|
||||||
|
@ -113,7 +109,7 @@ module.exports = MongoAWS = {
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
readArchivedPack(project_id, doc_id, pack_id, _callback) {
|
readArchivedPack(projectId, docId, packId, _callback) {
|
||||||
if (_callback == null) {
|
if (_callback == null) {
|
||||||
_callback = function () {}
|
_callback = function () {}
|
||||||
}
|
}
|
||||||
|
@ -122,19 +118,19 @@ module.exports = MongoAWS = {
|
||||||
return (_callback = function () {})
|
return (_callback = function () {})
|
||||||
}
|
}
|
||||||
|
|
||||||
if (project_id == null) {
|
if (projectId == null) {
|
||||||
return callback(new Error('invalid project id'))
|
return callback(new Error('invalid project id'))
|
||||||
}
|
}
|
||||||
if (doc_id == null) {
|
if (docId == null) {
|
||||||
return callback(new Error('invalid doc id'))
|
return callback(new Error('invalid doc id'))
|
||||||
}
|
}
|
||||||
if (pack_id == null) {
|
if (packId == null) {
|
||||||
return callback(new Error('invalid pack id'))
|
return callback(new Error('invalid pack id'))
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.debug({ project_id, doc_id, pack_id }, 'downloading data from s3')
|
logger.debug({ projectId, docId, packId }, 'downloading data from s3')
|
||||||
|
|
||||||
const download = createStream(S3S.ReadStream, project_id, doc_id, pack_id)
|
const download = createStream(S3S.ReadStream, projectId, docId, packId)
|
||||||
|
|
||||||
const inputStream = download
|
const inputStream = download
|
||||||
.on('open', obj => 1)
|
.on('open', obj => 1)
|
||||||
|
@ -144,7 +140,7 @@ module.exports = MongoAWS = {
|
||||||
gunzip.setEncoding('utf8')
|
gunzip.setEncoding('utf8')
|
||||||
gunzip.on('error', function (err) {
|
gunzip.on('error', function (err) {
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ project_id, doc_id, pack_id, err },
|
{ projectId, docId, packId, err },
|
||||||
'error uncompressing gzip stream'
|
'error uncompressing gzip stream'
|
||||||
)
|
)
|
||||||
return callback(err)
|
return callback(err)
|
||||||
|
@ -155,10 +151,7 @@ module.exports = MongoAWS = {
|
||||||
outputStream.on('error', err => callback(err))
|
outputStream.on('error', err => callback(err))
|
||||||
outputStream.on('end', function () {
|
outputStream.on('end', function () {
|
||||||
let object
|
let object
|
||||||
logger.debug(
|
logger.debug({ projectId, docId, packId }, 'download from s3 completed')
|
||||||
{ project_id, doc_id, pack_id },
|
|
||||||
'download from s3 completed'
|
|
||||||
)
|
|
||||||
try {
|
try {
|
||||||
object = JSON.parse(parts.join(''))
|
object = JSON.parse(parts.join(''))
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
|
@ -177,14 +170,14 @@ module.exports = MongoAWS = {
|
||||||
return outputStream.on('data', data => parts.push(data))
|
return outputStream.on('data', data => parts.push(data))
|
||||||
},
|
},
|
||||||
|
|
||||||
unArchivePack(project_id, doc_id, pack_id, callback) {
|
unArchivePack(projectId, docId, packId, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return MongoAWS.readArchivedPack(
|
return MongoAWS.readArchivedPack(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
pack_id,
|
packId,
|
||||||
function (err, object) {
|
function (err, object) {
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
return callback(err)
|
return callback(err)
|
||||||
|
@ -192,10 +185,7 @@ module.exports = MongoAWS = {
|
||||||
Metrics.inc('unarchive-pack')
|
Metrics.inc('unarchive-pack')
|
||||||
// allow the object to expire, we can always retrieve it again
|
// allow the object to expire, we can always retrieve it again
|
||||||
object.expiresAt = new Date(Date.now() + 7 * DAYS)
|
object.expiresAt = new Date(Date.now() + 7 * DAYS)
|
||||||
logger.debug(
|
logger.debug({ projectId, docId, packId }, 'inserting object from s3')
|
||||||
{ project_id, doc_id, pack_id },
|
|
||||||
'inserting object from s3'
|
|
||||||
)
|
|
||||||
return db.docHistory.insertOne(object, (err, confirmation) => {
|
return db.docHistory.insertOne(object, (err, confirmation) => {
|
||||||
if (err) return callback(err)
|
if (err) return callback(err)
|
||||||
object._id = confirmation.insertedId
|
object._id = confirmation.insertedId
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
/* eslint-disable
|
/* eslint-disable
|
||||||
camelcase,
|
|
||||||
no-unused-vars,
|
no-unused-vars,
|
||||||
*/
|
*/
|
||||||
// TODO: This file was created by bulk-decaffeinate.
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
@ -19,13 +18,13 @@ const metrics = require('@overleaf/metrics')
|
||||||
const logger = require('@overleaf/logger')
|
const logger = require('@overleaf/logger')
|
||||||
|
|
||||||
module.exports = MongoManager = {
|
module.exports = MongoManager = {
|
||||||
getLastCompressedUpdate(doc_id, callback) {
|
getLastCompressedUpdate(docId, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return db.docHistory
|
return db.docHistory
|
||||||
.find(
|
.find(
|
||||||
{ doc_id: ObjectId(doc_id.toString()) },
|
{ doc_id: ObjectId(docId.toString()) },
|
||||||
// only return the last entry in a pack
|
// only return the last entry in a pack
|
||||||
{ projection: { pack: { $slice: -1 } } }
|
{ projection: { pack: { $slice: -1 } } }
|
||||||
)
|
)
|
||||||
|
@ -39,7 +38,7 @@ module.exports = MongoManager = {
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
peekLastCompressedUpdate(doc_id, callback) {
|
peekLastCompressedUpdate(docId, callback) {
|
||||||
// under normal use we pass back the last update as
|
// under normal use we pass back the last update as
|
||||||
// callback(null,update,version).
|
// callback(null,update,version).
|
||||||
//
|
//
|
||||||
|
@ -50,7 +49,7 @@ module.exports = MongoManager = {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return MongoManager.getLastCompressedUpdate(
|
return MongoManager.getLastCompressedUpdate(
|
||||||
doc_id,
|
docId,
|
||||||
function (error, update) {
|
function (error, update) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
|
@ -79,7 +78,7 @@ module.exports = MongoManager = {
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return PackManager.getLastPackFromIndex(
|
return PackManager.getLastPackFromIndex(
|
||||||
doc_id,
|
docId,
|
||||||
function (error, pack) {
|
function (error, pack) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
|
@ -98,41 +97,41 @@ module.exports = MongoManager = {
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
backportProjectId(project_id, doc_id, callback) {
|
backportProjectId(projectId, docId, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return db.docHistory.updateMany(
|
return db.docHistory.updateMany(
|
||||||
{
|
{
|
||||||
doc_id: ObjectId(doc_id.toString()),
|
doc_id: ObjectId(docId.toString()),
|
||||||
project_id: { $exists: false },
|
project_id: { $exists: false },
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
$set: { project_id: ObjectId(project_id.toString()) },
|
$set: { project_id: ObjectId(projectId.toString()) },
|
||||||
},
|
},
|
||||||
callback
|
callback
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
getProjectMetaData(project_id, callback) {
|
getProjectMetaData(projectId, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return db.projectHistoryMetaData.findOne(
|
return db.projectHistoryMetaData.findOne(
|
||||||
{
|
{
|
||||||
project_id: ObjectId(project_id.toString()),
|
project_id: ObjectId(projectId.toString()),
|
||||||
},
|
},
|
||||||
callback
|
callback
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
setProjectMetaData(project_id, metadata, callback) {
|
setProjectMetaData(projectId, metadata, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return db.projectHistoryMetaData.updateOne(
|
return db.projectHistoryMetaData.updateOne(
|
||||||
{
|
{
|
||||||
project_id: ObjectId(project_id),
|
project_id: ObjectId(projectId),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
$set: metadata,
|
$set: metadata,
|
||||||
|
@ -144,14 +143,14 @@ module.exports = MongoManager = {
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
upgradeHistory(project_id, callback) {
|
upgradeHistory(projectId, callback) {
|
||||||
// preserve the project's existing history
|
// preserve the project's existing history
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return db.docHistory.updateMany(
|
return db.docHistory.updateMany(
|
||||||
{
|
{
|
||||||
project_id: ObjectId(project_id),
|
project_id: ObjectId(projectId),
|
||||||
temporary: true,
|
temporary: true,
|
||||||
expiresAt: { $exists: true },
|
expiresAt: { $exists: true },
|
||||||
},
|
},
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
/* eslint-disable
|
/* eslint-disable
|
||||||
camelcase,
|
|
||||||
no-unused-vars,
|
no-unused-vars,
|
||||||
*/
|
*/
|
||||||
// TODO: This file was created by bulk-decaffeinate.
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
@ -69,8 +68,8 @@ module.exports = PackManager = {
|
||||||
MAX_COUNT: 1024,
|
MAX_COUNT: 1024,
|
||||||
|
|
||||||
insertCompressedUpdates(
|
insertCompressedUpdates(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
lastUpdate,
|
lastUpdate,
|
||||||
newUpdates,
|
newUpdates,
|
||||||
temporary,
|
temporary,
|
||||||
|
@ -113,8 +112,8 @@ module.exports = PackManager = {
|
||||||
}
|
}
|
||||||
|
|
||||||
return PackManager.flushCompressedUpdates(
|
return PackManager.flushCompressedUpdates(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
lastUpdate,
|
lastUpdate,
|
||||||
updatesToFlush,
|
updatesToFlush,
|
||||||
temporary,
|
temporary,
|
||||||
|
@ -123,8 +122,8 @@ module.exports = PackManager = {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
return PackManager.insertCompressedUpdates(
|
return PackManager.insertCompressedUpdates(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
null,
|
null,
|
||||||
updatesRemaining,
|
updatesRemaining,
|
||||||
temporary,
|
temporary,
|
||||||
|
@ -135,8 +134,8 @@ module.exports = PackManager = {
|
||||||
},
|
},
|
||||||
|
|
||||||
flushCompressedUpdates(
|
flushCompressedUpdates(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
lastUpdate,
|
lastUpdate,
|
||||||
newUpdates,
|
newUpdates,
|
||||||
temporary,
|
temporary,
|
||||||
|
@ -167,8 +166,8 @@ module.exports = PackManager = {
|
||||||
|
|
||||||
if (canAppend) {
|
if (canAppend) {
|
||||||
return PackManager.appendUpdatesToExistingPack(
|
return PackManager.appendUpdatesToExistingPack(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
lastUpdate,
|
lastUpdate,
|
||||||
newUpdates,
|
newUpdates,
|
||||||
temporary,
|
temporary,
|
||||||
|
@ -176,8 +175,8 @@ module.exports = PackManager = {
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
return PackManager.insertUpdatesIntoNewPack(
|
return PackManager.insertUpdatesIntoNewPack(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
newUpdates,
|
newUpdates,
|
||||||
temporary,
|
temporary,
|
||||||
callback
|
callback
|
||||||
|
@ -185,13 +184,7 @@ module.exports = PackManager = {
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
insertUpdatesIntoNewPack(
|
insertUpdatesIntoNewPack(projectId, docId, newUpdates, temporary, callback) {
|
||||||
project_id,
|
|
||||||
doc_id,
|
|
||||||
newUpdates,
|
|
||||||
temporary,
|
|
||||||
callback
|
|
||||||
) {
|
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
|
@ -200,8 +193,8 @@ module.exports = PackManager = {
|
||||||
const n = newUpdates.length
|
const n = newUpdates.length
|
||||||
const sz = BSON.calculateObjectSize(newUpdates)
|
const sz = BSON.calculateObjectSize(newUpdates)
|
||||||
const newPack = {
|
const newPack = {
|
||||||
project_id: ObjectId(project_id.toString()),
|
project_id: ObjectId(projectId.toString()),
|
||||||
doc_id: ObjectId(doc_id.toString()),
|
doc_id: ObjectId(docId.toString()),
|
||||||
pack: newUpdates,
|
pack: newUpdates,
|
||||||
n,
|
n,
|
||||||
sz,
|
sz,
|
||||||
|
@ -218,7 +211,7 @@ module.exports = PackManager = {
|
||||||
newPack.last_checked = new Date(Date.now() + 30 * DAYS) // never check temporary packs
|
newPack.last_checked = new Date(Date.now() + 30 * DAYS) // never check temporary packs
|
||||||
}
|
}
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ project_id, doc_id, newUpdates },
|
{ projectId, docId, newUpdates },
|
||||||
'inserting updates into new pack'
|
'inserting updates into new pack'
|
||||||
)
|
)
|
||||||
return db.docHistory.insertOne(newPack, function (err) {
|
return db.docHistory.insertOne(newPack, function (err) {
|
||||||
|
@ -229,14 +222,14 @@ module.exports = PackManager = {
|
||||||
if (temporary) {
|
if (temporary) {
|
||||||
return callback()
|
return callback()
|
||||||
} else {
|
} else {
|
||||||
return PackManager.updateIndex(project_id, doc_id, callback)
|
return PackManager.updateIndex(projectId, docId, callback)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
appendUpdatesToExistingPack(
|
appendUpdatesToExistingPack(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
lastUpdate,
|
lastUpdate,
|
||||||
newUpdates,
|
newUpdates,
|
||||||
temporary,
|
temporary,
|
||||||
|
@ -251,8 +244,8 @@ module.exports = PackManager = {
|
||||||
const sz = BSON.calculateObjectSize(newUpdates)
|
const sz = BSON.calculateObjectSize(newUpdates)
|
||||||
const query = {
|
const query = {
|
||||||
_id: lastUpdate._id,
|
_id: lastUpdate._id,
|
||||||
project_id: ObjectId(project_id.toString()),
|
project_id: ObjectId(projectId.toString()),
|
||||||
doc_id: ObjectId(doc_id.toString()),
|
doc_id: ObjectId(docId.toString()),
|
||||||
pack: { $exists: true },
|
pack: { $exists: true },
|
||||||
}
|
}
|
||||||
const update = {
|
const update = {
|
||||||
|
@ -272,7 +265,7 @@ module.exports = PackManager = {
|
||||||
update.$set.expiresAt = new Date(Date.now() + 7 * DAYS)
|
update.$set.expiresAt = new Date(Date.now() + 7 * DAYS)
|
||||||
}
|
}
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ project_id, doc_id, lastUpdate, newUpdates },
|
{ projectId, docId, lastUpdate, newUpdates },
|
||||||
'appending updates to existing pack'
|
'appending updates to existing pack'
|
||||||
)
|
)
|
||||||
Metrics.inc(`append-pack-${temporary ? 'temporary' : 'permanent'}`)
|
Metrics.inc(`append-pack-${temporary ? 'temporary' : 'permanent'}`)
|
||||||
|
@ -281,18 +274,18 @@ module.exports = PackManager = {
|
||||||
|
|
||||||
// Retrieve all changes for a document
|
// Retrieve all changes for a document
|
||||||
|
|
||||||
getOpsByVersionRange(project_id, doc_id, fromVersion, toVersion, callback) {
|
getOpsByVersionRange(projectId, docId, fromVersion, toVersion, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return PackManager.loadPacksByVersionRange(
|
return PackManager.loadPacksByVersionRange(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
fromVersion,
|
fromVersion,
|
||||||
toVersion,
|
toVersion,
|
||||||
function (error) {
|
function (error) {
|
||||||
if (error) return callback(error)
|
if (error) return callback(error)
|
||||||
const query = { doc_id: ObjectId(doc_id.toString()) }
|
const query = { doc_id: ObjectId(docId.toString()) }
|
||||||
if (toVersion != null) {
|
if (toVersion != null) {
|
||||||
query.v = { $lte: toVersion }
|
query.v = { $lte: toVersion }
|
||||||
}
|
}
|
||||||
|
@ -335,14 +328,8 @@ module.exports = PackManager = {
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
loadPacksByVersionRange(
|
loadPacksByVersionRange(projectId, docId, fromVersion, toVersion, callback) {
|
||||||
project_id,
|
return PackManager.getIndex(docId, function (err, indexResult) {
|
||||||
doc_id,
|
|
||||||
fromVersion,
|
|
||||||
toVersion,
|
|
||||||
callback
|
|
||||||
) {
|
|
||||||
return PackManager.getIndex(doc_id, function (err, indexResult) {
|
|
||||||
let pack
|
let pack
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
return callback(err)
|
return callback(err)
|
||||||
|
@ -369,8 +356,8 @@ module.exports = PackManager = {
|
||||||
})()
|
})()
|
||||||
if (neededIds.length) {
|
if (neededIds.length) {
|
||||||
return PackManager.fetchPacksIfNeeded(
|
return PackManager.fetchPacksIfNeeded(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
neededIds,
|
neededIds,
|
||||||
callback
|
callback
|
||||||
)
|
)
|
||||||
|
@ -380,20 +367,17 @@ module.exports = PackManager = {
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
fetchPacksIfNeeded(project_id, doc_id, pack_ids, callback) {
|
fetchPacksIfNeeded(projectId, docId, packIds, callback) {
|
||||||
let id
|
let id
|
||||||
return db.docHistory
|
return db.docHistory
|
||||||
.find(
|
.find({ _id: { $in: packIds.map(ObjectId) } }, { projection: { _id: 1 } })
|
||||||
{ _id: { $in: pack_ids.map(ObjectId) } },
|
|
||||||
{ projection: { _id: 1 } }
|
|
||||||
)
|
|
||||||
.toArray(function (err, loadedPacks) {
|
.toArray(function (err, loadedPacks) {
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
return callback(err)
|
return callback(err)
|
||||||
}
|
}
|
||||||
const allPackIds = (() => {
|
const allPackIds = (() => {
|
||||||
const result1 = []
|
const result1 = []
|
||||||
for (id of Array.from(pack_ids)) {
|
for (id of Array.from(packIds)) {
|
||||||
result1.push(id.toString())
|
result1.push(id.toString())
|
||||||
}
|
}
|
||||||
return result1
|
return result1
|
||||||
|
@ -403,7 +387,7 @@ module.exports = PackManager = {
|
||||||
)
|
)
|
||||||
const packIdsToFetch = _.difference(allPackIds, loadedPackIds)
|
const packIdsToFetch = _.difference(allPackIds, loadedPackIds)
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ project_id, doc_id, loadedPackIds, allPackIds, packIdsToFetch },
|
{ projectId, docId, loadedPackIds, allPackIds, packIdsToFetch },
|
||||||
'analysed packs'
|
'analysed packs'
|
||||||
)
|
)
|
||||||
if (packIdsToFetch.length === 0) {
|
if (packIdsToFetch.length === 0) {
|
||||||
|
@ -412,27 +396,26 @@ module.exports = PackManager = {
|
||||||
return async.eachLimit(
|
return async.eachLimit(
|
||||||
packIdsToFetch,
|
packIdsToFetch,
|
||||||
4,
|
4,
|
||||||
(pack_id, cb) =>
|
(packId, cb) => MongoAWS.unArchivePack(projectId, docId, packId, cb),
|
||||||
MongoAWS.unArchivePack(project_id, doc_id, pack_id, cb),
|
|
||||||
function (err) {
|
function (err) {
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
return callback(err)
|
return callback(err)
|
||||||
}
|
}
|
||||||
logger.debug({ project_id, doc_id }, 'done unarchiving')
|
logger.debug({ projectId, docId }, 'done unarchiving')
|
||||||
return callback()
|
return callback()
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
findAllDocsInProject(project_id, callback) {
|
findAllDocsInProject(projectId, callback) {
|
||||||
const docIdSet = new Set()
|
const docIdSet = new Set()
|
||||||
async.series(
|
async.series(
|
||||||
[
|
[
|
||||||
cb => {
|
cb => {
|
||||||
db.docHistory
|
db.docHistory
|
||||||
.find(
|
.find(
|
||||||
{ project_id: ObjectId(project_id) },
|
{ project_id: ObjectId(projectId) },
|
||||||
{ projection: { pack: false } }
|
{ projection: { pack: false } }
|
||||||
)
|
)
|
||||||
.toArray((err, packs) => {
|
.toArray((err, packs) => {
|
||||||
|
@ -445,7 +428,7 @@ module.exports = PackManager = {
|
||||||
},
|
},
|
||||||
cb => {
|
cb => {
|
||||||
db.docHistoryIndex
|
db.docHistoryIndex
|
||||||
.find({ project_id: ObjectId(project_id) })
|
.find({ project_id: ObjectId(projectId) })
|
||||||
.toArray((err, indexes) => {
|
.toArray((err, indexes) => {
|
||||||
if (err) return callback(err)
|
if (err) return callback(err)
|
||||||
indexes.forEach(index => {
|
indexes.forEach(index => {
|
||||||
|
@ -514,9 +497,9 @@ module.exports = PackManager = {
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
makeProjectIterator(project_id, before, callback) {
|
makeProjectIterator(projectId, before, callback) {
|
||||||
PackManager._findPacks(
|
PackManager._findPacks(
|
||||||
{ project_id: ObjectId(project_id) },
|
{ project_id: ObjectId(projectId) },
|
||||||
{ 'meta.end_ts': -1 },
|
{ 'meta.end_ts': -1 },
|
||||||
function (err, allPacks) {
|
function (err, allPacks) {
|
||||||
if (err) return callback(err)
|
if (err) return callback(err)
|
||||||
|
@ -528,9 +511,9 @@ module.exports = PackManager = {
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
makeDocIterator(doc_id, callback) {
|
makeDocIterator(docId, callback) {
|
||||||
PackManager._findPacks(
|
PackManager._findPacks(
|
||||||
{ doc_id: ObjectId(doc_id) },
|
{ doc_id: ObjectId(docId) },
|
||||||
{ v: -1 },
|
{ v: -1 },
|
||||||
function (err, allPacks) {
|
function (err, allPacks) {
|
||||||
if (err) return callback(err)
|
if (err) return callback(err)
|
||||||
|
@ -539,13 +522,13 @@ module.exports = PackManager = {
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
getPackById(project_id, doc_id, pack_id, callback) {
|
getPackById(projectId, docId, packId, callback) {
|
||||||
return db.docHistory.findOne({ _id: pack_id }, function (err, pack) {
|
return db.docHistory.findOne({ _id: packId }, function (err, pack) {
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
return callback(err)
|
return callback(err)
|
||||||
}
|
}
|
||||||
if (pack == null) {
|
if (pack == null) {
|
||||||
return MongoAWS.unArchivePack(project_id, doc_id, pack_id, callback)
|
return MongoAWS.unArchivePack(projectId, docId, packId, callback)
|
||||||
} else if (pack.expiresAt != null && pack.temporary === false) {
|
} else if (pack.expiresAt != null && pack.temporary === false) {
|
||||||
// we only need to touch the TTL when listing the changes in the project
|
// we only need to touch the TTL when listing the changes in the project
|
||||||
// because diffs on individual documents are always done after that
|
// because diffs on individual documents are always done after that
|
||||||
|
@ -573,24 +556,24 @@ module.exports = PackManager = {
|
||||||
|
|
||||||
// Manage docHistoryIndex collection
|
// Manage docHistoryIndex collection
|
||||||
|
|
||||||
getIndex(doc_id, callback) {
|
getIndex(docId, callback) {
|
||||||
return db.docHistoryIndex.findOne(
|
return db.docHistoryIndex.findOne(
|
||||||
{ _id: ObjectId(doc_id.toString()) },
|
{ _id: ObjectId(docId.toString()) },
|
||||||
callback
|
callback
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
getPackFromIndex(doc_id, pack_id, callback) {
|
getPackFromIndex(docId, packId, callback) {
|
||||||
return db.docHistoryIndex.findOne(
|
return db.docHistoryIndex.findOne(
|
||||||
{ _id: ObjectId(doc_id.toString()), 'packs._id': pack_id },
|
{ _id: ObjectId(docId.toString()), 'packs._id': packId },
|
||||||
{ projection: { 'packs.$': 1 } },
|
{ projection: { 'packs.$': 1 } },
|
||||||
callback
|
callback
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
getLastPackFromIndex(doc_id, callback) {
|
getLastPackFromIndex(docId, callback) {
|
||||||
return db.docHistoryIndex.findOne(
|
return db.docHistoryIndex.findOne(
|
||||||
{ _id: ObjectId(doc_id.toString()) },
|
{ _id: ObjectId(docId.toString()) },
|
||||||
{ projection: { packs: { $slice: -1 } } },
|
{ projection: { packs: { $slice: -1 } } },
|
||||||
function (err, indexPack) {
|
function (err, indexPack) {
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
|
@ -604,8 +587,8 @@ module.exports = PackManager = {
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
getIndexWithKeys(doc_id, callback) {
|
getIndexWithKeys(docId, callback) {
|
||||||
return PackManager.getIndex(doc_id, function (err, index) {
|
return PackManager.getIndex(docId, function (err, index) {
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
return callback(err)
|
return callback(err)
|
||||||
}
|
}
|
||||||
|
@ -621,10 +604,10 @@ module.exports = PackManager = {
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
initialiseIndex(project_id, doc_id, callback) {
|
initialiseIndex(projectId, docId, callback) {
|
||||||
return PackManager.findCompletedPacks(
|
return PackManager.findCompletedPacks(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
function (err, packs) {
|
function (err, packs) {
|
||||||
// console.log 'err', err, 'packs', packs, packs?.length
|
// console.log 'err', err, 'packs', packs, packs?.length
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
|
@ -634,8 +617,8 @@ module.exports = PackManager = {
|
||||||
return callback()
|
return callback()
|
||||||
}
|
}
|
||||||
return PackManager.insertPacksIntoIndexWithLock(
|
return PackManager.insertPacksIntoIndexWithLock(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
packs,
|
packs,
|
||||||
callback
|
callback
|
||||||
)
|
)
|
||||||
|
@ -643,11 +626,11 @@ module.exports = PackManager = {
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
updateIndex(project_id, doc_id, callback) {
|
updateIndex(projectId, docId, callback) {
|
||||||
// find all packs prior to current pack
|
// find all packs prior to current pack
|
||||||
return PackManager.findUnindexedPacks(
|
return PackManager.findUnindexedPacks(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
function (err, newPacks) {
|
function (err, newPacks) {
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
return callback(err)
|
return callback(err)
|
||||||
|
@ -656,15 +639,15 @@ module.exports = PackManager = {
|
||||||
return callback()
|
return callback()
|
||||||
}
|
}
|
||||||
return PackManager.insertPacksIntoIndexWithLock(
|
return PackManager.insertPacksIntoIndexWithLock(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
newPacks,
|
newPacks,
|
||||||
function (err) {
|
function (err) {
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
return callback(err)
|
return callback(err)
|
||||||
}
|
}
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ project_id, doc_id, newPacks },
|
{ projectId, docId, newPacks },
|
||||||
'added new packs to index'
|
'added new packs to index'
|
||||||
)
|
)
|
||||||
return callback()
|
return callback()
|
||||||
|
@ -674,9 +657,9 @@ module.exports = PackManager = {
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
findCompletedPacks(project_id, doc_id, callback) {
|
findCompletedPacks(projectId, docId, callback) {
|
||||||
const query = {
|
const query = {
|
||||||
doc_id: ObjectId(doc_id.toString()),
|
doc_id: ObjectId(docId.toString()),
|
||||||
expiresAt: { $exists: false },
|
expiresAt: { $exists: false },
|
||||||
}
|
}
|
||||||
return db.docHistory
|
return db.docHistory
|
||||||
|
@ -700,9 +683,9 @@ module.exports = PackManager = {
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
findPacks(project_id, doc_id, callback) {
|
findPacks(projectId, docId, callback) {
|
||||||
const query = {
|
const query = {
|
||||||
doc_id: ObjectId(doc_id.toString()),
|
doc_id: ObjectId(docId.toString()),
|
||||||
expiresAt: { $exists: false },
|
expiresAt: { $exists: false },
|
||||||
}
|
}
|
||||||
return db.docHistory
|
return db.docHistory
|
||||||
|
@ -722,14 +705,14 @@ module.exports = PackManager = {
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
findUnindexedPacks(project_id, doc_id, callback) {
|
findUnindexedPacks(projectId, docId, callback) {
|
||||||
return PackManager.getIndexWithKeys(doc_id, function (err, indexResult) {
|
return PackManager.getIndexWithKeys(docId, function (err, indexResult) {
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
return callback(err)
|
return callback(err)
|
||||||
}
|
}
|
||||||
return PackManager.findCompletedPacks(
|
return PackManager.findCompletedPacks(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
function (err, historyPacks) {
|
function (err, historyPacks) {
|
||||||
let pack
|
let pack
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
|
@ -770,7 +753,7 @@ module.exports = PackManager = {
|
||||||
})()
|
})()
|
||||||
if (newPacks.length) {
|
if (newPacks.length) {
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ project_id, doc_id, n: newPacks.length },
|
{ projectId, docId, n: newPacks.length },
|
||||||
'found new packs'
|
'found new packs'
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -780,13 +763,13 @@ module.exports = PackManager = {
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
insertPacksIntoIndexWithLock(project_id, doc_id, newPacks, callback) {
|
insertPacksIntoIndexWithLock(projectId, docId, newPacks, callback) {
|
||||||
return LockManager.runWithLock(
|
return LockManager.runWithLock(
|
||||||
keys.historyIndexLock({ doc_id }),
|
keys.historyIndexLock({ doc_id: docId }),
|
||||||
releaseLock =>
|
releaseLock =>
|
||||||
PackManager._insertPacksIntoIndex(
|
PackManager._insertPacksIntoIndex(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
newPacks,
|
newPacks,
|
||||||
releaseLock
|
releaseLock
|
||||||
),
|
),
|
||||||
|
@ -794,11 +777,11 @@ module.exports = PackManager = {
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
_insertPacksIntoIndex(project_id, doc_id, newPacks, callback) {
|
_insertPacksIntoIndex(projectId, docId, newPacks, callback) {
|
||||||
return db.docHistoryIndex.updateOne(
|
return db.docHistoryIndex.updateOne(
|
||||||
{ _id: ObjectId(doc_id.toString()) },
|
{ _id: ObjectId(docId.toString()) },
|
||||||
{
|
{
|
||||||
$setOnInsert: { project_id: ObjectId(project_id.toString()) },
|
$setOnInsert: { project_id: ObjectId(projectId.toString()) },
|
||||||
$push: {
|
$push: {
|
||||||
packs: { $each: newPacks, $sort: { v: 1 } },
|
packs: { $each: newPacks, $sort: { v: 1 } },
|
||||||
},
|
},
|
||||||
|
@ -812,14 +795,14 @@ module.exports = PackManager = {
|
||||||
|
|
||||||
// Archiving packs to S3
|
// Archiving packs to S3
|
||||||
|
|
||||||
archivePack(project_id, doc_id, pack_id, callback) {
|
archivePack(projectId, docId, packId, callback) {
|
||||||
const clearFlagOnError = function (err, cb) {
|
const clearFlagOnError = function (err, cb) {
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
// clear the inS3 flag on error
|
// clear the inS3 flag on error
|
||||||
return PackManager.clearPackAsArchiveInProgress(
|
return PackManager.clearPackAsArchiveInProgress(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
pack_id,
|
packId,
|
||||||
function (err2) {
|
function (err2) {
|
||||||
if (err2 != null) {
|
if (err2 != null) {
|
||||||
return cb(err2)
|
return cb(err2)
|
||||||
|
@ -834,42 +817,27 @@ module.exports = PackManager = {
|
||||||
return async.series(
|
return async.series(
|
||||||
[
|
[
|
||||||
cb =>
|
cb =>
|
||||||
PackManager.checkArchiveNotInProgress(
|
PackManager.checkArchiveNotInProgress(projectId, docId, packId, cb),
|
||||||
project_id,
|
|
||||||
doc_id,
|
|
||||||
pack_id,
|
|
||||||
cb
|
|
||||||
),
|
|
||||||
cb =>
|
cb =>
|
||||||
PackManager.markPackAsArchiveInProgress(
|
PackManager.markPackAsArchiveInProgress(projectId, docId, packId, cb),
|
||||||
project_id,
|
|
||||||
doc_id,
|
|
||||||
pack_id,
|
|
||||||
cb
|
|
||||||
),
|
|
||||||
cb =>
|
cb =>
|
||||||
MongoAWS.archivePack(project_id, doc_id, pack_id, err =>
|
MongoAWS.archivePack(projectId, docId, packId, err =>
|
||||||
clearFlagOnError(err, cb)
|
clearFlagOnError(err, cb)
|
||||||
),
|
),
|
||||||
cb =>
|
cb =>
|
||||||
PackManager.checkArchivedPack(project_id, doc_id, pack_id, err =>
|
PackManager.checkArchivedPack(projectId, docId, packId, err =>
|
||||||
clearFlagOnError(err, cb)
|
clearFlagOnError(err, cb)
|
||||||
),
|
),
|
||||||
cb => PackManager.markPackAsArchived(project_id, doc_id, pack_id, cb),
|
cb => PackManager.markPackAsArchived(projectId, docId, packId, cb),
|
||||||
cb =>
|
cb =>
|
||||||
PackManager.setTTLOnArchivedPack(
|
PackManager.setTTLOnArchivedPack(projectId, docId, packId, callback),
|
||||||
project_id,
|
|
||||||
doc_id,
|
|
||||||
pack_id,
|
|
||||||
callback
|
|
||||||
),
|
|
||||||
],
|
],
|
||||||
callback
|
callback
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
checkArchivedPack(project_id, doc_id, pack_id, callback) {
|
checkArchivedPack(projectId, docId, packId, callback) {
|
||||||
return db.docHistory.findOne({ _id: pack_id }, function (err, pack) {
|
return db.docHistory.findOne({ _id: packId }, function (err, pack) {
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
return callback(err)
|
return callback(err)
|
||||||
}
|
}
|
||||||
|
@ -877,9 +845,9 @@ module.exports = PackManager = {
|
||||||
return callback(new Error('pack not found'))
|
return callback(new Error('pack not found'))
|
||||||
}
|
}
|
||||||
return MongoAWS.readArchivedPack(
|
return MongoAWS.readArchivedPack(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
pack_id,
|
packId,
|
||||||
function (err, result) {
|
function (err, result) {
|
||||||
if (err) return callback(err)
|
if (err) return callback(err)
|
||||||
delete result.last_checked
|
delete result.last_checked
|
||||||
|
@ -917,8 +885,8 @@ module.exports = PackManager = {
|
||||||
},
|
},
|
||||||
// Extra methods to test archive/unarchive for a doc_id
|
// Extra methods to test archive/unarchive for a doc_id
|
||||||
|
|
||||||
pushOldPacks(project_id, doc_id, callback) {
|
pushOldPacks(projectId, docId, callback) {
|
||||||
return PackManager.findPacks(project_id, doc_id, function (err, packs) {
|
return PackManager.findPacks(projectId, docId, function (err, packs) {
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
return callback(err)
|
return callback(err)
|
||||||
}
|
}
|
||||||
|
@ -926,18 +894,18 @@ module.exports = PackManager = {
|
||||||
return callback()
|
return callback()
|
||||||
}
|
}
|
||||||
return PackManager.processOldPack(
|
return PackManager.processOldPack(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
packs[0]._id,
|
packs[0]._id,
|
||||||
callback
|
callback
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
pullOldPacks(project_id, doc_id, callback) {
|
pullOldPacks(projectId, docId, callback) {
|
||||||
return PackManager.loadPacksByVersionRange(
|
return PackManager.loadPacksByVersionRange(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
null,
|
null,
|
||||||
null,
|
null,
|
||||||
callback
|
callback
|
||||||
|
@ -946,21 +914,16 @@ module.exports = PackManager = {
|
||||||
|
|
||||||
// Processing old packs via worker
|
// Processing old packs via worker
|
||||||
|
|
||||||
processOldPack(project_id, doc_id, pack_id, callback) {
|
processOldPack(projectId, docId, packId, callback) {
|
||||||
const markAsChecked = err =>
|
const markAsChecked = err =>
|
||||||
PackManager.markPackAsChecked(
|
PackManager.markPackAsChecked(projectId, docId, packId, function (err2) {
|
||||||
project_id,
|
if (err2 != null) {
|
||||||
doc_id,
|
return callback(err2)
|
||||||
pack_id,
|
|
||||||
function (err2) {
|
|
||||||
if (err2 != null) {
|
|
||||||
return callback(err2)
|
|
||||||
}
|
|
||||||
return callback(err)
|
|
||||||
}
|
}
|
||||||
)
|
return callback(err)
|
||||||
logger.debug({ project_id, doc_id }, 'processing old packs')
|
})
|
||||||
return db.docHistory.findOne({ _id: pack_id }, function (err, pack) {
|
logger.debug({ projectId, docId }, 'processing old packs')
|
||||||
|
return db.docHistory.findOne({ _id: packId }, function (err, pack) {
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
return markAsChecked(err)
|
return markAsChecked(err)
|
||||||
}
|
}
|
||||||
|
@ -971,8 +934,8 @@ module.exports = PackManager = {
|
||||||
return callback()
|
return callback()
|
||||||
} // return directly
|
} // return directly
|
||||||
return PackManager.finaliseIfNeeded(
|
return PackManager.finaliseIfNeeded(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
pack._id,
|
pack._id,
|
||||||
pack,
|
pack,
|
||||||
function (err) {
|
function (err) {
|
||||||
|
@ -980,15 +943,15 @@ module.exports = PackManager = {
|
||||||
return markAsChecked(err)
|
return markAsChecked(err)
|
||||||
}
|
}
|
||||||
return PackManager.updateIndexIfNeeded(
|
return PackManager.updateIndexIfNeeded(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
function (err) {
|
function (err) {
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
return markAsChecked(err)
|
return markAsChecked(err)
|
||||||
}
|
}
|
||||||
return PackManager.findUnarchivedPacks(
|
return PackManager.findUnarchivedPacks(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
function (err, unarchivedPacks) {
|
function (err, unarchivedPacks) {
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
return markAsChecked(err)
|
return markAsChecked(err)
|
||||||
|
@ -999,7 +962,7 @@ module.exports = PackManager = {
|
||||||
: undefined)
|
: undefined)
|
||||||
) {
|
) {
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ project_id, doc_id },
|
{ projectId, docId },
|
||||||
'no packs need archiving'
|
'no packs need archiving'
|
||||||
)
|
)
|
||||||
return markAsChecked()
|
return markAsChecked()
|
||||||
|
@ -1007,12 +970,12 @@ module.exports = PackManager = {
|
||||||
return async.eachSeries(
|
return async.eachSeries(
|
||||||
unarchivedPacks,
|
unarchivedPacks,
|
||||||
(pack, cb) =>
|
(pack, cb) =>
|
||||||
PackManager.archivePack(project_id, doc_id, pack._id, cb),
|
PackManager.archivePack(projectId, docId, pack._id, cb),
|
||||||
function (err) {
|
function (err) {
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
return markAsChecked(err)
|
return markAsChecked(err)
|
||||||
}
|
}
|
||||||
logger.debug({ project_id, doc_id }, 'done processing')
|
logger.debug({ projectId, docId }, 'done processing')
|
||||||
return markAsChecked()
|
return markAsChecked()
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@ -1025,88 +988,80 @@ module.exports = PackManager = {
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
finaliseIfNeeded(project_id, doc_id, pack_id, pack, callback) {
|
finaliseIfNeeded(projectId, docId, packId, pack, callback) {
|
||||||
const sz = pack.sz / (1024 * 1024) // in fractions of a megabyte
|
const sz = pack.sz / (1024 * 1024) // in fractions of a megabyte
|
||||||
const n = pack.n / 1024 // in fraction of 1024 ops
|
const n = pack.n / 1024 // in fraction of 1024 ops
|
||||||
const age = (Date.now() - pack.meta.end_ts) / DAYS
|
const age = (Date.now() - pack.meta.end_ts) / DAYS
|
||||||
if (age < 30) {
|
if (age < 30) {
|
||||||
// always keep if less than 1 month old
|
// always keep if less than 1 month old
|
||||||
logger.debug(
|
logger.debug({ projectId, docId, packId, age }, 'less than 30 days old')
|
||||||
{ project_id, doc_id, pack_id, age },
|
|
||||||
'less than 30 days old'
|
|
||||||
)
|
|
||||||
return callback()
|
return callback()
|
||||||
}
|
}
|
||||||
// compute an archiving threshold which decreases for each month of age
|
// compute an archiving threshold which decreases for each month of age
|
||||||
const archive_threshold = 30 / age
|
const archiveThreshold = 30 / age
|
||||||
if (sz > archive_threshold || n > archive_threshold || age > 90) {
|
if (sz > archiveThreshold || n > archiveThreshold || age > 90) {
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ project_id, doc_id, pack_id, age, archive_threshold, sz, n },
|
{ projectId, docId, packId, age, archiveThreshold, sz, n },
|
||||||
'meets archive threshold'
|
'meets archive threshold'
|
||||||
)
|
)
|
||||||
return PackManager.markPackAsFinalisedWithLock(
|
return PackManager.markPackAsFinalisedWithLock(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
pack_id,
|
packId,
|
||||||
callback
|
callback
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ project_id, doc_id, pack_id, age, archive_threshold, sz, n },
|
{ projectId, docId, packId, age, archiveThreshold, sz, n },
|
||||||
'does not meet archive threshold'
|
'does not meet archive threshold'
|
||||||
)
|
)
|
||||||
return callback()
|
return callback()
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
markPackAsFinalisedWithLock(project_id, doc_id, pack_id, callback) {
|
markPackAsFinalisedWithLock(projectId, docId, packId, callback) {
|
||||||
return LockManager.runWithLock(
|
return LockManager.runWithLock(
|
||||||
keys.historyLock({ doc_id }),
|
keys.historyLock({ doc_id: docId }),
|
||||||
releaseLock =>
|
releaseLock =>
|
||||||
PackManager._markPackAsFinalised(
|
PackManager._markPackAsFinalised(projectId, docId, packId, releaseLock),
|
||||||
project_id,
|
|
||||||
doc_id,
|
|
||||||
pack_id,
|
|
||||||
releaseLock
|
|
||||||
),
|
|
||||||
callback
|
callback
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
_markPackAsFinalised(project_id, doc_id, pack_id, callback) {
|
_markPackAsFinalised(projectId, docId, packId, callback) {
|
||||||
logger.debug({ project_id, doc_id, pack_id }, 'marking pack as finalised')
|
logger.debug({ projectId, docId, packId }, 'marking pack as finalised')
|
||||||
return db.docHistory.updateOne(
|
return db.docHistory.updateOne(
|
||||||
{ _id: pack_id },
|
{ _id: packId },
|
||||||
{ $set: { finalised: true } },
|
{ $set: { finalised: true } },
|
||||||
callback
|
callback
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
updateIndexIfNeeded(project_id, doc_id, callback) {
|
updateIndexIfNeeded(projectId, docId, callback) {
|
||||||
logger.debug({ project_id, doc_id }, 'archiving old packs')
|
logger.debug({ projectId, docId }, 'archiving old packs')
|
||||||
return PackManager.getIndexWithKeys(doc_id, function (err, index) {
|
return PackManager.getIndexWithKeys(docId, function (err, index) {
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
return callback(err)
|
return callback(err)
|
||||||
}
|
}
|
||||||
if (index == null) {
|
if (index == null) {
|
||||||
return PackManager.initialiseIndex(project_id, doc_id, callback)
|
return PackManager.initialiseIndex(projectId, docId, callback)
|
||||||
} else {
|
} else {
|
||||||
return PackManager.updateIndex(project_id, doc_id, callback)
|
return PackManager.updateIndex(projectId, docId, callback)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
markPackAsChecked(project_id, doc_id, pack_id, callback) {
|
markPackAsChecked(projectId, docId, packId, callback) {
|
||||||
logger.debug({ project_id, doc_id, pack_id }, 'marking pack as checked')
|
logger.debug({ projectId, docId, packId }, 'marking pack as checked')
|
||||||
return db.docHistory.updateOne(
|
return db.docHistory.updateOne(
|
||||||
{ _id: pack_id },
|
{ _id: packId },
|
||||||
{ $currentDate: { last_checked: true } },
|
{ $currentDate: { last_checked: true } },
|
||||||
callback
|
callback
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
findUnarchivedPacks(project_id, doc_id, callback) {
|
findUnarchivedPacks(projectId, docId, callback) {
|
||||||
return PackManager.getIndex(doc_id, function (err, indexResult) {
|
return PackManager.getIndex(docId, function (err, indexResult) {
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
return callback(err)
|
return callback(err)
|
||||||
}
|
}
|
||||||
|
@ -1123,7 +1078,7 @@ module.exports = PackManager = {
|
||||||
})()
|
})()
|
||||||
if (unArchivedPacks.length) {
|
if (unArchivedPacks.length) {
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ project_id, doc_id, n: unArchivedPacks.length },
|
{ projectId, docId, n: unArchivedPacks.length },
|
||||||
'find unarchived packs'
|
'find unarchived packs'
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -1133,41 +1088,37 @@ module.exports = PackManager = {
|
||||||
|
|
||||||
// Archive locking flags
|
// Archive locking flags
|
||||||
|
|
||||||
checkArchiveNotInProgress(project_id, doc_id, pack_id, callback) {
|
checkArchiveNotInProgress(projectId, docId, packId, callback) {
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ project_id, doc_id, pack_id },
|
{ projectId, docId, packId },
|
||||||
'checking if archive in progress'
|
'checking if archive in progress'
|
||||||
)
|
)
|
||||||
return PackManager.getPackFromIndex(
|
return PackManager.getPackFromIndex(docId, packId, function (err, result) {
|
||||||
doc_id,
|
if (err != null) {
|
||||||
pack_id,
|
return callback(err)
|
||||||
function (err, result) {
|
|
||||||
if (err != null) {
|
|
||||||
return callback(err)
|
|
||||||
}
|
|
||||||
if (result == null) {
|
|
||||||
return callback(new Error('pack not found in index'))
|
|
||||||
}
|
|
||||||
if (result.inS3) {
|
|
||||||
return callback(new Error('pack archiving already done'))
|
|
||||||
} else if (result.inS3 != null) {
|
|
||||||
return callback(new Error('pack archiving already in progress'))
|
|
||||||
} else {
|
|
||||||
return callback()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
)
|
if (result == null) {
|
||||||
|
return callback(new Error('pack not found in index'))
|
||||||
|
}
|
||||||
|
if (result.inS3) {
|
||||||
|
return callback(new Error('pack archiving already done'))
|
||||||
|
} else if (result.inS3 != null) {
|
||||||
|
return callback(new Error('pack archiving already in progress'))
|
||||||
|
} else {
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
markPackAsArchiveInProgress(project_id, doc_id, pack_id, callback) {
|
markPackAsArchiveInProgress(projectId, docId, packId, callback) {
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ project_id, doc_id },
|
{ projectId, docId },
|
||||||
'marking pack as archive in progress status'
|
'marking pack as archive in progress status'
|
||||||
)
|
)
|
||||||
return db.docHistoryIndex.findOneAndUpdate(
|
return db.docHistoryIndex.findOneAndUpdate(
|
||||||
{
|
{
|
||||||
_id: ObjectId(doc_id.toString()),
|
_id: ObjectId(docId.toString()),
|
||||||
packs: { $elemMatch: { _id: pack_id, inS3: { $exists: false } } },
|
packs: { $elemMatch: { _id: packId, inS3: { $exists: false } } },
|
||||||
},
|
},
|
||||||
{ $set: { 'packs.$.inS3': false } },
|
{ $set: { 'packs.$.inS3': false } },
|
||||||
{ projection: { 'packs.$': 1 } },
|
{ projection: { 'packs.$': 1 } },
|
||||||
|
@ -1179,7 +1130,7 @@ module.exports = PackManager = {
|
||||||
return callback(new Error('archive is already in progress'))
|
return callback(new Error('archive is already in progress'))
|
||||||
}
|
}
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ project_id, doc_id, pack_id },
|
{ projectId, docId, packId },
|
||||||
'marked as archive in progress'
|
'marked as archive in progress'
|
||||||
)
|
)
|
||||||
return callback()
|
return callback()
|
||||||
|
@ -1187,27 +1138,27 @@ module.exports = PackManager = {
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
clearPackAsArchiveInProgress(project_id, doc_id, pack_id, callback) {
|
clearPackAsArchiveInProgress(projectId, docId, packId, callback) {
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ project_id, doc_id, pack_id },
|
{ projectId, docId, packId },
|
||||||
'clearing as archive in progress'
|
'clearing as archive in progress'
|
||||||
)
|
)
|
||||||
return db.docHistoryIndex.updateOne(
|
return db.docHistoryIndex.updateOne(
|
||||||
{
|
{
|
||||||
_id: ObjectId(doc_id.toString()),
|
_id: ObjectId(docId.toString()),
|
||||||
packs: { $elemMatch: { _id: pack_id, inS3: false } },
|
packs: { $elemMatch: { _id: packId, inS3: false } },
|
||||||
},
|
},
|
||||||
{ $unset: { 'packs.$.inS3': true } },
|
{ $unset: { 'packs.$.inS3': true } },
|
||||||
callback
|
callback
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
markPackAsArchived(project_id, doc_id, pack_id, callback) {
|
markPackAsArchived(projectId, docId, packId, callback) {
|
||||||
logger.debug({ project_id, doc_id, pack_id }, 'marking pack as archived')
|
logger.debug({ projectId, docId, packId }, 'marking pack as archived')
|
||||||
return db.docHistoryIndex.findOneAndUpdate(
|
return db.docHistoryIndex.findOneAndUpdate(
|
||||||
{
|
{
|
||||||
_id: ObjectId(doc_id.toString()),
|
_id: ObjectId(docId.toString()),
|
||||||
packs: { $elemMatch: { _id: pack_id, inS3: false } },
|
packs: { $elemMatch: { _id: packId, inS3: false } },
|
||||||
},
|
},
|
||||||
{ $set: { 'packs.$.inS3': true } },
|
{ $set: { 'packs.$.inS3': true } },
|
||||||
{ projection: { 'packs.$': 1 } },
|
{ projection: { 'packs.$': 1 } },
|
||||||
|
@ -1218,21 +1169,21 @@ module.exports = PackManager = {
|
||||||
if (!result.value) {
|
if (!result.value) {
|
||||||
return callback(new Error('archive is not marked as progress'))
|
return callback(new Error('archive is not marked as progress'))
|
||||||
}
|
}
|
||||||
logger.debug({ project_id, doc_id, pack_id }, 'marked as archived')
|
logger.debug({ projectId, docId, packId }, 'marked as archived')
|
||||||
return callback()
|
return callback()
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
setTTLOnArchivedPack(project_id, doc_id, pack_id, callback) {
|
setTTLOnArchivedPack(projectId, docId, packId, callback) {
|
||||||
return db.docHistory.updateOne(
|
return db.docHistory.updateOne(
|
||||||
{ _id: pack_id },
|
{ _id: packId },
|
||||||
{ $set: { expiresAt: new Date(Date.now() + 1 * DAYS) } },
|
{ $set: { expiresAt: new Date(Date.now() + 1 * DAYS) } },
|
||||||
function (err) {
|
function (err) {
|
||||||
if (err) {
|
if (err) {
|
||||||
return callback(err)
|
return callback(err)
|
||||||
}
|
}
|
||||||
logger.debug({ project_id, doc_id, pack_id }, 'set expiry on pack')
|
logger.debug({ projectId, docId, packId }, 'set expiry on pack')
|
||||||
return callback()
|
return callback()
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
/* eslint-disable
|
/* eslint-disable
|
||||||
camelcase,
|
|
||||||
no-unused-vars,
|
no-unused-vars,
|
||||||
*/
|
*/
|
||||||
// TODO: This file was created by bulk-decaffeinate.
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
@ -14,7 +13,7 @@
|
||||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
*/
|
*/
|
||||||
let LIMIT, pending
|
let LIMIT, pending
|
||||||
let project_id, doc_id
|
let projectId, docId
|
||||||
const { callbackify } = require('util')
|
const { callbackify } = require('util')
|
||||||
const Settings = require('@overleaf/settings')
|
const Settings = require('@overleaf/settings')
|
||||||
const async = require('async')
|
const async = require('async')
|
||||||
|
@ -48,8 +47,8 @@ if (!source.match(/^[0-9]+$/)) {
|
||||||
const result = (() => {
|
const result = (() => {
|
||||||
const result1 = []
|
const result1 = []
|
||||||
for (const line of Array.from(file.toString().split('\n'))) {
|
for (const line of Array.from(file.toString().split('\n'))) {
|
||||||
;[project_id, doc_id] = Array.from(line.split(' '))
|
;[projectId, docId] = Array.from(line.split(' '))
|
||||||
result1.push({ doc_id, project_id })
|
result1.push({ doc_id: docId, project_id: projectId })
|
||||||
}
|
}
|
||||||
return result1
|
return result1
|
||||||
})()
|
})()
|
||||||
|
@ -108,12 +107,12 @@ const processUpdates = pending =>
|
||||||
pending,
|
pending,
|
||||||
function (result, callback) {
|
function (result, callback) {
|
||||||
let _id
|
let _id
|
||||||
;({ _id, project_id, doc_id } = result)
|
;({ _id, project_id: projectId, doc_id: docId } = result)
|
||||||
COUNT++
|
COUNT++
|
||||||
logger.debug({ project_id, doc_id }, `processing ${COUNT}/${TOTAL}`)
|
logger.debug({ projectId, docId }, `processing ${COUNT}/${TOTAL}`)
|
||||||
if (project_id == null || doc_id == null) {
|
if (projectId == null || docId == null) {
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ project_id, doc_id },
|
{ projectId, docId },
|
||||||
'skipping pack, missing project/doc id'
|
'skipping pack, missing project/doc id'
|
||||||
)
|
)
|
||||||
return callback()
|
return callback()
|
||||||
|
@ -138,9 +137,9 @@ const processUpdates = pending =>
|
||||||
return setTimeout(() => callback(err, result), DOCUMENT_PACK_DELAY)
|
return setTimeout(() => callback(err, result), DOCUMENT_PACK_DELAY)
|
||||||
}
|
}
|
||||||
if (_id == null) {
|
if (_id == null) {
|
||||||
return PackManager.pushOldPacks(project_id, doc_id, handler)
|
return PackManager.pushOldPacks(projectId, docId, handler)
|
||||||
} else {
|
} else {
|
||||||
return PackManager.processOldPack(project_id, doc_id, _id, handler)
|
return PackManager.processOldPack(projectId, docId, _id, handler)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
function (err, results) {
|
function (err, results) {
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
/* eslint-disable
|
|
||||||
camelcase,
|
|
||||||
*/
|
|
||||||
// TODO: This file was created by bulk-decaffeinate.
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
// Fix any style issues and re-enable lint.
|
// Fix any style issues and re-enable lint.
|
||||||
/*
|
/*
|
||||||
|
@ -19,11 +16,11 @@ const Keys = Settings.redis.history.key_schema
|
||||||
const async = require('async')
|
const async = require('async')
|
||||||
|
|
||||||
module.exports = RedisManager = {
|
module.exports = RedisManager = {
|
||||||
getOldestDocUpdates(doc_id, batchSize, callback) {
|
getOldestDocUpdates(docId, batchSize, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
const key = Keys.uncompressedHistoryOps({ doc_id })
|
const key = Keys.uncompressedHistoryOps({ doc_id: docId })
|
||||||
return rclient.lrange(key, 0, batchSize - 1, callback)
|
return rclient.lrange(key, 0, batchSize - 1, callback)
|
||||||
},
|
},
|
||||||
|
|
||||||
|
@ -42,14 +39,14 @@ module.exports = RedisManager = {
|
||||||
return callback(null, rawUpdates)
|
return callback(null, rawUpdates)
|
||||||
},
|
},
|
||||||
|
|
||||||
deleteAppliedDocUpdates(project_id, doc_id, docUpdates, callback) {
|
deleteAppliedDocUpdates(projectId, docId, docUpdates, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
const multi = rclient.multi()
|
const multi = rclient.multi()
|
||||||
// Delete all the updates which have been applied (exact match)
|
// Delete all the updates which have been applied (exact match)
|
||||||
for (const update of Array.from(docUpdates || [])) {
|
for (const update of Array.from(docUpdates || [])) {
|
||||||
multi.lrem(Keys.uncompressedHistoryOps({ doc_id }), 1, update)
|
multi.lrem(Keys.uncompressedHistoryOps({ doc_id: docId }), 1, update)
|
||||||
}
|
}
|
||||||
return multi.exec(function (error, results) {
|
return multi.exec(function (error, results) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
|
@ -58,8 +55,8 @@ module.exports = RedisManager = {
|
||||||
// It's ok to delete the doc_id from the set here. Even though the list
|
// It's ok to delete the doc_id from the set here. Even though the list
|
||||||
// of updates may not be empty, we will continue to process it until it is.
|
// of updates may not be empty, we will continue to process it until it is.
|
||||||
return rclient.srem(
|
return rclient.srem(
|
||||||
Keys.docsWithHistoryOps({ project_id }),
|
Keys.docsWithHistoryOps({ project_id: projectId }),
|
||||||
doc_id,
|
docId,
|
||||||
function (error) {
|
function (error) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
|
@ -70,11 +67,14 @@ module.exports = RedisManager = {
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
getDocIdsWithHistoryOps(project_id, callback) {
|
getDocIdsWithHistoryOps(projectId, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return rclient.smembers(Keys.docsWithHistoryOps({ project_id }), callback)
|
return rclient.smembers(
|
||||||
|
Keys.docsWithHistoryOps({ project_id: projectId }),
|
||||||
|
callback
|
||||||
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
// iterate over keys asynchronously using redis scan (non-blocking)
|
// iterate over keys asynchronously using redis scan (non-blocking)
|
||||||
|
@ -139,12 +139,12 @@ module.exports = RedisManager = {
|
||||||
}
|
}
|
||||||
return RedisManager._getKeys(
|
return RedisManager._getKeys(
|
||||||
Keys.docsWithHistoryOps({ project_id: '*' }),
|
Keys.docsWithHistoryOps({ project_id: '*' }),
|
||||||
function (error, project_keys) {
|
function (error, projectKeys) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
const project_ids = RedisManager._extractIds(project_keys)
|
const projectIds = RedisManager._extractIds(projectKeys)
|
||||||
return callback(error, project_ids)
|
return callback(error, projectIds)
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
@ -157,12 +157,12 @@ module.exports = RedisManager = {
|
||||||
}
|
}
|
||||||
return RedisManager._getKeys(
|
return RedisManager._getKeys(
|
||||||
Keys.uncompressedHistoryOps({ doc_id: '*' }),
|
Keys.uncompressedHistoryOps({ doc_id: '*' }),
|
||||||
function (error, doc_keys) {
|
function (error, docKeys) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
const doc_ids = RedisManager._extractIds(doc_keys)
|
const docIds = RedisManager._extractIds(docKeys)
|
||||||
return callback(error, doc_ids)
|
return callback(error, docIds)
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
/* eslint-disable
|
/* eslint-disable
|
||||||
camelcase,
|
|
||||||
no-unused-vars,
|
no-unused-vars,
|
||||||
*/
|
*/
|
||||||
// TODO: This file was created by bulk-decaffeinate.
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
@ -16,24 +15,24 @@ const DiffManager = require('./DiffManager')
|
||||||
const logger = require('@overleaf/logger')
|
const logger = require('@overleaf/logger')
|
||||||
|
|
||||||
module.exports = RestoreManager = {
|
module.exports = RestoreManager = {
|
||||||
restoreToBeforeVersion(project_id, doc_id, version, user_id, callback) {
|
restoreToBeforeVersion(projectId, docId, version, userId, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
logger.debug({ project_id, doc_id, version, user_id }, 'restoring document')
|
logger.debug({ projectId, docId, version, userId }, 'restoring document')
|
||||||
return DiffManager.getDocumentBeforeVersion(
|
return DiffManager.getDocumentBeforeVersion(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
version,
|
version,
|
||||||
function (error, content) {
|
function (error, content) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
return DocumentUpdaterManager.setDocument(
|
return DocumentUpdaterManager.setDocument(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
content,
|
content,
|
||||||
user_id,
|
userId,
|
||||||
function (error) {
|
function (error) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
/* eslint-disable
|
/* eslint-disable
|
||||||
camelcase,
|
|
||||||
new-cap,
|
new-cap,
|
||||||
no-throw-literal,
|
no-throw-literal,
|
||||||
no-unused-vars,
|
no-unused-vars,
|
||||||
|
@ -16,8 +15,8 @@ let oneMinute, twoMegabytes, UpdateCompressor
|
||||||
const strInject = (s1, pos, s2) => s1.slice(0, pos) + s2 + s1.slice(pos)
|
const strInject = (s1, pos, s2) => s1.slice(0, pos) + s2 + s1.slice(pos)
|
||||||
const strRemove = (s1, pos, length) => s1.slice(0, pos) + s1.slice(pos + length)
|
const strRemove = (s1, pos, length) => s1.slice(0, pos) + s1.slice(pos + length)
|
||||||
|
|
||||||
const { diff_match_patch } = require('../lib/diff_match_patch')
|
const { diff_match_patch: diffMatchPatch } = require('../lib/diff_match_patch')
|
||||||
const dmp = new diff_match_patch()
|
const dmp = new diffMatchPatch()
|
||||||
|
|
||||||
module.exports = UpdateCompressor = {
|
module.exports = UpdateCompressor = {
|
||||||
NOOP: 'noop',
|
NOOP: 'noop',
|
||||||
|
@ -254,8 +253,8 @@ module.exports = UpdateCompressor = {
|
||||||
firstOp.p === secondOp.p
|
firstOp.p === secondOp.p
|
||||||
) {
|
) {
|
||||||
offset = firstOp.p
|
offset = firstOp.p
|
||||||
const diff_ops = this.diffAsShareJsOps(firstOp.d, secondOp.i)
|
const diffOps = this.diffAsShareJsOps(firstOp.d, secondOp.i)
|
||||||
if (diff_ops.length === 0) {
|
if (diffOps.length === 0) {
|
||||||
return [
|
return [
|
||||||
{
|
{
|
||||||
// Noop
|
// Noop
|
||||||
|
@ -272,7 +271,7 @@ module.exports = UpdateCompressor = {
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
} else {
|
} else {
|
||||||
return diff_ops.map(function (op) {
|
return diffOps.map(function (op) {
|
||||||
op.p += offset
|
op.p += offset
|
||||||
return {
|
return {
|
||||||
meta: {
|
meta: {
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
/* eslint-disable
|
/* eslint-disable
|
||||||
camelcase,
|
|
||||||
no-unused-vars,
|
no-unused-vars,
|
||||||
*/
|
*/
|
||||||
// TODO: This file was created by bulk-decaffeinate.
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
@ -16,12 +15,12 @@ const WebApiManager = require('./WebApiManager')
|
||||||
const logger = require('@overleaf/logger')
|
const logger = require('@overleaf/logger')
|
||||||
|
|
||||||
module.exports = UpdateTrimmer = {
|
module.exports = UpdateTrimmer = {
|
||||||
shouldTrimUpdates(project_id, callback) {
|
shouldTrimUpdates(projectId, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return MongoManager.getProjectMetaData(
|
return MongoManager.getProjectMetaData(
|
||||||
project_id,
|
projectId,
|
||||||
function (error, metadata) {
|
function (error, metadata) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
|
@ -30,22 +29,22 @@ module.exports = UpdateTrimmer = {
|
||||||
return callback(null, false)
|
return callback(null, false)
|
||||||
} else {
|
} else {
|
||||||
return WebApiManager.getProjectDetails(
|
return WebApiManager.getProjectDetails(
|
||||||
project_id,
|
projectId,
|
||||||
function (error, details) {
|
function (error, details) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
logger.debug({ project_id, details }, 'got details')
|
logger.debug({ projectId, details }, 'got details')
|
||||||
if (details?.features?.versioning) {
|
if (details?.features?.versioning) {
|
||||||
return MongoManager.setProjectMetaData(
|
return MongoManager.setProjectMetaData(
|
||||||
project_id,
|
projectId,
|
||||||
{ preserveHistory: true },
|
{ preserveHistory: true },
|
||||||
function (error) {
|
function (error) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
return MongoManager.upgradeHistory(
|
return MongoManager.upgradeHistory(
|
||||||
project_id,
|
projectId,
|
||||||
function (error) {
|
function (error) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
/* eslint-disable
|
/* eslint-disable
|
||||||
camelcase,
|
|
||||||
no-unused-vars,
|
no-unused-vars,
|
||||||
*/
|
*/
|
||||||
// TODO: This file was created by bulk-decaffeinate.
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
@ -29,13 +28,7 @@ const keys = Settings.redis.lock.key_schema
|
||||||
const util = require('util')
|
const util = require('util')
|
||||||
|
|
||||||
module.exports = UpdatesManager = {
|
module.exports = UpdatesManager = {
|
||||||
compressAndSaveRawUpdates(
|
compressAndSaveRawUpdates(projectId, docId, rawUpdates, temporary, callback) {
|
||||||
project_id,
|
|
||||||
doc_id,
|
|
||||||
rawUpdates,
|
|
||||||
temporary,
|
|
||||||
callback
|
|
||||||
) {
|
|
||||||
let i
|
let i
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
|
@ -54,8 +47,8 @@ module.exports = UpdatesManager = {
|
||||||
if (!(prevVersion < thisVersion)) {
|
if (!(prevVersion < thisVersion)) {
|
||||||
logger.error(
|
logger.error(
|
||||||
{
|
{
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
rawUpdates,
|
rawUpdates,
|
||||||
temporary,
|
temporary,
|
||||||
thisVersion,
|
thisVersion,
|
||||||
|
@ -70,7 +63,7 @@ module.exports = UpdatesManager = {
|
||||||
// FIXME: we no longer need the lastCompressedUpdate, so change functions not to need it
|
// FIXME: we no longer need the lastCompressedUpdate, so change functions not to need it
|
||||||
// CORRECTION: we do use it to log the time in case of error
|
// CORRECTION: we do use it to log the time in case of error
|
||||||
return MongoManager.peekLastCompressedUpdate(
|
return MongoManager.peekLastCompressedUpdate(
|
||||||
doc_id,
|
docId,
|
||||||
function (error, lastCompressedUpdate, lastVersion) {
|
function (error, lastCompressedUpdate, lastVersion) {
|
||||||
// lastCompressedUpdate is the most recent update in Mongo, and
|
// lastCompressedUpdate is the most recent update in Mongo, and
|
||||||
// lastVersion is its sharejs version number.
|
// lastVersion is its sharejs version number.
|
||||||
|
@ -93,23 +86,23 @@ module.exports = UpdatesManager = {
|
||||||
}
|
}
|
||||||
if (discardedUpdates.length) {
|
if (discardedUpdates.length) {
|
||||||
logger.error(
|
logger.error(
|
||||||
{ project_id, doc_id, discardedUpdates, temporary, lastVersion },
|
{ projectId, docId, discardedUpdates, temporary, lastVersion },
|
||||||
'discarded updates already present'
|
'discarded updates already present'
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (rawUpdates[0] != null && rawUpdates[0].v !== lastVersion + 1) {
|
if (rawUpdates[0] != null && rawUpdates[0].v !== lastVersion + 1) {
|
||||||
const ts = lastCompressedUpdate?.meta?.end_ts
|
const ts = lastCompressedUpdate?.meta?.end_ts
|
||||||
const last_timestamp = ts != null ? new Date(ts) : 'unknown time'
|
const lastTimestamp = ts != null ? new Date(ts) : 'unknown time'
|
||||||
error = new Error(
|
error = new Error(
|
||||||
`Tried to apply raw op at version ${rawUpdates[0].v} to last compressed update with version ${lastVersion} from ${last_timestamp}`
|
`Tried to apply raw op at version ${rawUpdates[0].v} to last compressed update with version ${lastVersion} from ${lastTimestamp}`
|
||||||
)
|
)
|
||||||
logger.error(
|
logger.error(
|
||||||
{
|
{
|
||||||
err: error,
|
err: error,
|
||||||
doc_id,
|
docId,
|
||||||
project_id,
|
projectId,
|
||||||
prev_end_ts: ts,
|
prevEndTs: ts,
|
||||||
temporary,
|
temporary,
|
||||||
lastCompressedUpdate,
|
lastCompressedUpdate,
|
||||||
},
|
},
|
||||||
|
@ -155,7 +148,7 @@ module.exports = UpdatesManager = {
|
||||||
`dropped op exceeding maximum allowed size of ${REJECT_LARGE_OP_SIZE}`
|
`dropped op exceeding maximum allowed size of ${REJECT_LARGE_OP_SIZE}`
|
||||||
)
|
)
|
||||||
logger.error(
|
logger.error(
|
||||||
{ err: error, doc_id, project_id, size, rawUpdate },
|
{ err: error, docId, projectId, size, rawUpdate },
|
||||||
'dropped op - too big'
|
'dropped op - too big'
|
||||||
)
|
)
|
||||||
rawUpdate.op = []
|
rawUpdate.op = []
|
||||||
|
@ -167,8 +160,8 @@ module.exports = UpdatesManager = {
|
||||||
rawUpdates
|
rawUpdates
|
||||||
)
|
)
|
||||||
return PackManager.insertCompressedUpdates(
|
return PackManager.insertCompressedUpdates(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
lastCompressedUpdate,
|
lastCompressedUpdate,
|
||||||
compressedUpdates,
|
compressedUpdates,
|
||||||
temporary,
|
temporary,
|
||||||
|
@ -179,13 +172,13 @@ module.exports = UpdatesManager = {
|
||||||
if (result != null) {
|
if (result != null) {
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{
|
{
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
orig_v:
|
origV:
|
||||||
lastCompressedUpdate != null
|
lastCompressedUpdate != null
|
||||||
? lastCompressedUpdate.v
|
? lastCompressedUpdate.v
|
||||||
: undefined,
|
: undefined,
|
||||||
new_v: result.v,
|
newV: result.v,
|
||||||
},
|
},
|
||||||
'inserted updates into pack'
|
'inserted updates into pack'
|
||||||
)
|
)
|
||||||
|
@ -198,12 +191,12 @@ module.exports = UpdatesManager = {
|
||||||
},
|
},
|
||||||
|
|
||||||
// Check whether the updates are temporary (per-project property)
|
// Check whether the updates are temporary (per-project property)
|
||||||
_prepareProjectForUpdates(project_id, callback) {
|
_prepareProjectForUpdates(projectId, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return UpdateTrimmer.shouldTrimUpdates(
|
return UpdateTrimmer.shouldTrimUpdates(
|
||||||
project_id,
|
projectId,
|
||||||
function (error, temporary) {
|
function (error, temporary) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
|
@ -214,11 +207,11 @@ module.exports = UpdatesManager = {
|
||||||
},
|
},
|
||||||
|
|
||||||
// Check for project id on document history (per-document property)
|
// Check for project id on document history (per-document property)
|
||||||
_prepareDocForUpdates(project_id, doc_id, callback) {
|
_prepareDocForUpdates(projectId, docId, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return MongoManager.backportProjectId(project_id, doc_id, function (error) {
|
return MongoManager.backportProjectId(projectId, docId, function (error) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
|
@ -228,13 +221,13 @@ module.exports = UpdatesManager = {
|
||||||
|
|
||||||
// Apply updates for specific project/doc after preparing at project and doc level
|
// Apply updates for specific project/doc after preparing at project and doc level
|
||||||
REDIS_READ_BATCH_SIZE: 100,
|
REDIS_READ_BATCH_SIZE: 100,
|
||||||
processUncompressedUpdates(project_id, doc_id, temporary, callback) {
|
processUncompressedUpdates(projectId, docId, temporary, callback) {
|
||||||
// get the updates as strings from redis (so we can delete them after they are applied)
|
// get the updates as strings from redis (so we can delete them after they are applied)
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return RedisManager.getOldestDocUpdates(
|
return RedisManager.getOldestDocUpdates(
|
||||||
doc_id,
|
docId,
|
||||||
UpdatesManager.REDIS_READ_BATCH_SIZE,
|
UpdatesManager.REDIS_READ_BATCH_SIZE,
|
||||||
function (error, docUpdates) {
|
function (error, docUpdates) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
|
@ -247,18 +240,18 @@ module.exports = UpdatesManager = {
|
||||||
function (error, rawUpdates) {
|
function (error, rawUpdates) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
logger.err(
|
logger.err(
|
||||||
{ project_id, doc_id, docUpdates },
|
{ projectId, docId, docUpdates },
|
||||||
'failed to parse docUpdates'
|
'failed to parse docUpdates'
|
||||||
)
|
)
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ project_id, doc_id, rawUpdates },
|
{ projectId, docId, rawUpdates },
|
||||||
'retrieved raw updates from redis'
|
'retrieved raw updates from redis'
|
||||||
)
|
)
|
||||||
return UpdatesManager.compressAndSaveRawUpdates(
|
return UpdatesManager.compressAndSaveRawUpdates(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
rawUpdates,
|
rawUpdates,
|
||||||
temporary,
|
temporary,
|
||||||
function (error) {
|
function (error) {
|
||||||
|
@ -266,13 +259,13 @@ module.exports = UpdatesManager = {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ project_id, doc_id },
|
{ projectId, docId },
|
||||||
'compressed and saved doc updates'
|
'compressed and saved doc updates'
|
||||||
)
|
)
|
||||||
// delete the applied updates from redis
|
// delete the applied updates from redis
|
||||||
return RedisManager.deleteAppliedDocUpdates(
|
return RedisManager.deleteAppliedDocUpdates(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
docUpdates,
|
docUpdates,
|
||||||
function (error) {
|
function (error) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
|
@ -281,14 +274,14 @@ module.exports = UpdatesManager = {
|
||||||
if (length === UpdatesManager.REDIS_READ_BATCH_SIZE) {
|
if (length === UpdatesManager.REDIS_READ_BATCH_SIZE) {
|
||||||
// There might be more updates
|
// There might be more updates
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ project_id, doc_id },
|
{ projectId, docId },
|
||||||
'continuing processing updates'
|
'continuing processing updates'
|
||||||
)
|
)
|
||||||
return setTimeout(
|
return setTimeout(
|
||||||
() =>
|
() =>
|
||||||
UpdatesManager.processUncompressedUpdates(
|
UpdatesManager.processUncompressedUpdates(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
temporary,
|
temporary,
|
||||||
callback
|
callback
|
||||||
),
|
),
|
||||||
|
@ -296,7 +289,7 @@ module.exports = UpdatesManager = {
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ project_id, doc_id },
|
{ projectId, docId },
|
||||||
'all raw updates processed'
|
'all raw updates processed'
|
||||||
)
|
)
|
||||||
return callback()
|
return callback()
|
||||||
|
@ -312,19 +305,19 @@ module.exports = UpdatesManager = {
|
||||||
},
|
},
|
||||||
|
|
||||||
// Process updates for a doc when we flush it individually
|
// Process updates for a doc when we flush it individually
|
||||||
processUncompressedUpdatesWithLock(project_id, doc_id, callback) {
|
processUncompressedUpdatesWithLock(projectId, docId, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return UpdatesManager._prepareProjectForUpdates(
|
return UpdatesManager._prepareProjectForUpdates(
|
||||||
project_id,
|
projectId,
|
||||||
function (error, temporary) {
|
function (error, temporary) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
return UpdatesManager._processUncompressedUpdatesForDocWithLock(
|
return UpdatesManager._processUncompressedUpdatesForDocWithLock(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
temporary,
|
temporary,
|
||||||
callback
|
callback
|
||||||
)
|
)
|
||||||
|
@ -334,8 +327,8 @@ module.exports = UpdatesManager = {
|
||||||
|
|
||||||
// Process updates for a doc when the whole project is flushed (internal method)
|
// Process updates for a doc when the whole project is flushed (internal method)
|
||||||
_processUncompressedUpdatesForDocWithLock(
|
_processUncompressedUpdatesForDocWithLock(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
temporary,
|
temporary,
|
||||||
callback
|
callback
|
||||||
) {
|
) {
|
||||||
|
@ -343,18 +336,18 @@ module.exports = UpdatesManager = {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return UpdatesManager._prepareDocForUpdates(
|
return UpdatesManager._prepareDocForUpdates(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
function (error) {
|
function (error) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
return LockManager.runWithLock(
|
return LockManager.runWithLock(
|
||||||
keys.historyLock({ doc_id }),
|
keys.historyLock({ doc_id: docId }),
|
||||||
releaseLock =>
|
releaseLock =>
|
||||||
UpdatesManager.processUncompressedUpdates(
|
UpdatesManager.processUncompressedUpdates(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
temporary,
|
temporary,
|
||||||
releaseLock
|
releaseLock
|
||||||
),
|
),
|
||||||
|
@ -365,31 +358,31 @@ module.exports = UpdatesManager = {
|
||||||
},
|
},
|
||||||
|
|
||||||
// Process all updates for a project, only check project-level information once
|
// Process all updates for a project, only check project-level information once
|
||||||
processUncompressedUpdatesForProject(project_id, callback) {
|
processUncompressedUpdatesForProject(projectId, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return RedisManager.getDocIdsWithHistoryOps(
|
return RedisManager.getDocIdsWithHistoryOps(
|
||||||
project_id,
|
projectId,
|
||||||
function (error, doc_ids) {
|
function (error, docIds) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
return UpdatesManager._prepareProjectForUpdates(
|
return UpdatesManager._prepareProjectForUpdates(
|
||||||
project_id,
|
projectId,
|
||||||
function (error, temporary) {
|
function (error, temporary) {
|
||||||
if (error) return callback(error)
|
if (error) return callback(error)
|
||||||
const jobs = []
|
const jobs = []
|
||||||
for (const doc_id of Array.from(doc_ids)) {
|
for (const docId of Array.from(docIds)) {
|
||||||
;(doc_id =>
|
;(docId =>
|
||||||
jobs.push(cb =>
|
jobs.push(cb =>
|
||||||
UpdatesManager._processUncompressedUpdatesForDocWithLock(
|
UpdatesManager._processUncompressedUpdatesForDocWithLock(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
temporary,
|
temporary,
|
||||||
cb
|
cb
|
||||||
)
|
)
|
||||||
))(doc_id)
|
))(docId)
|
||||||
}
|
}
|
||||||
return async.parallelLimit(jobs, 5, callback)
|
return async.parallelLimit(jobs, 5, callback)
|
||||||
}
|
}
|
||||||
|
@ -405,31 +398,31 @@ module.exports = UpdatesManager = {
|
||||||
}
|
}
|
||||||
return RedisManager.getProjectIdsWithHistoryOps(function (
|
return RedisManager.getProjectIdsWithHistoryOps(function (
|
||||||
error,
|
error,
|
||||||
project_ids
|
projectIds
|
||||||
) {
|
) {
|
||||||
let project_id
|
let projectId
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{
|
{
|
||||||
count: project_ids != null ? project_ids.length : undefined,
|
count: projectIds != null ? projectIds.length : undefined,
|
||||||
project_ids,
|
projectIds,
|
||||||
},
|
},
|
||||||
'found projects'
|
'found projects'
|
||||||
)
|
)
|
||||||
const jobs = []
|
const jobs = []
|
||||||
project_ids = _.shuffle(project_ids) // randomise to avoid hitting same projects each time
|
projectIds = _.shuffle(projectIds) // randomise to avoid hitting same projects each time
|
||||||
const selectedProjects =
|
const selectedProjects =
|
||||||
limit < 0 ? project_ids : project_ids.slice(0, limit)
|
limit < 0 ? projectIds : projectIds.slice(0, limit)
|
||||||
for (project_id of Array.from(selectedProjects)) {
|
for (projectId of Array.from(selectedProjects)) {
|
||||||
;(project_id =>
|
;(projectId =>
|
||||||
jobs.push(cb =>
|
jobs.push(cb =>
|
||||||
UpdatesManager.processUncompressedUpdatesForProject(
|
UpdatesManager.processUncompressedUpdatesForProject(
|
||||||
project_id,
|
projectId,
|
||||||
err => cb(null, { failed: err != null, project_id })
|
err => cb(null, { failed: err != null, project_id: projectId })
|
||||||
)
|
)
|
||||||
))(project_id)
|
))(projectId)
|
||||||
}
|
}
|
||||||
return async.series(jobs, function (error, result) {
|
return async.series(jobs, function (error, result) {
|
||||||
let x
|
let x
|
||||||
|
@ -457,7 +450,7 @@ module.exports = UpdatesManager = {
|
||||||
return callback(null, {
|
return callback(null, {
|
||||||
failed: failedProjects,
|
failed: failedProjects,
|
||||||
succeeded: succeededProjects,
|
succeeded: succeededProjects,
|
||||||
all: project_ids,
|
all: projectIds,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -467,16 +460,13 @@ module.exports = UpdatesManager = {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return RedisManager.getAllDocIdsWithHistoryOps(function (
|
return RedisManager.getAllDocIdsWithHistoryOps(function (error, allDocIds) {
|
||||||
error,
|
|
||||||
all_doc_ids
|
|
||||||
) {
|
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
return RedisManager.getProjectIdsWithHistoryOps(function (
|
return RedisManager.getProjectIdsWithHistoryOps(function (
|
||||||
error,
|
error,
|
||||||
all_project_ids
|
allProjectIds
|
||||||
) {
|
) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
|
@ -484,25 +474,25 @@ module.exports = UpdatesManager = {
|
||||||
// function to get doc_ids for each project
|
// function to get doc_ids for each project
|
||||||
const task = cb =>
|
const task = cb =>
|
||||||
async.concatSeries(
|
async.concatSeries(
|
||||||
all_project_ids,
|
allProjectIds,
|
||||||
RedisManager.getDocIdsWithHistoryOps,
|
RedisManager.getDocIdsWithHistoryOps,
|
||||||
cb
|
cb
|
||||||
)
|
)
|
||||||
// find the dangling doc ids
|
// find the dangling doc ids
|
||||||
return task(function (error, project_doc_ids) {
|
return task(function (error, projectDocIds) {
|
||||||
if (error) return callback(error)
|
if (error) return callback(error)
|
||||||
const dangling_doc_ids = _.difference(all_doc_ids, project_doc_ids)
|
const danglingDocIds = _.difference(allDocIds, projectDocIds)
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ all_doc_ids, all_project_ids, project_doc_ids, dangling_doc_ids },
|
{ allDocIds, allProjectIds, projectDocIds, danglingDocIds },
|
||||||
'checking for dangling doc ids'
|
'checking for dangling doc ids'
|
||||||
)
|
)
|
||||||
return callback(null, dangling_doc_ids)
|
return callback(null, danglingDocIds)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
getDocUpdates(project_id, doc_id, options, callback) {
|
getDocUpdates(projectId, docId, options, callback) {
|
||||||
if (options == null) {
|
if (options == null) {
|
||||||
options = {}
|
options = {}
|
||||||
}
|
}
|
||||||
|
@ -510,16 +500,16 @@ module.exports = UpdatesManager = {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return UpdatesManager.processUncompressedUpdatesWithLock(
|
return UpdatesManager.processUncompressedUpdatesWithLock(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
function (error) {
|
function (error) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
// console.log "options", options
|
// console.log "options", options
|
||||||
return PackManager.getOpsByVersionRange(
|
return PackManager.getOpsByVersionRange(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
options.from,
|
options.from,
|
||||||
options.to,
|
options.to,
|
||||||
function (error, updates) {
|
function (error, updates) {
|
||||||
|
@ -533,7 +523,7 @@ module.exports = UpdatesManager = {
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
getDocUpdatesWithUserInfo(project_id, doc_id, options, callback) {
|
getDocUpdatesWithUserInfo(projectId, docId, options, callback) {
|
||||||
if (options == null) {
|
if (options == null) {
|
||||||
options = {}
|
options = {}
|
||||||
}
|
}
|
||||||
|
@ -541,8 +531,8 @@ module.exports = UpdatesManager = {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return UpdatesManager.getDocUpdates(
|
return UpdatesManager.getDocUpdates(
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
options,
|
options,
|
||||||
function (error, updates) {
|
function (error, updates) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
|
@ -558,7 +548,7 @@ module.exports = UpdatesManager = {
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
getSummarizedProjectUpdates(project_id, options, callback) {
|
getSummarizedProjectUpdates(projectId, options, callback) {
|
||||||
if (options == null) {
|
if (options == null) {
|
||||||
options = {}
|
options = {}
|
||||||
}
|
}
|
||||||
|
@ -572,13 +562,13 @@ module.exports = UpdatesManager = {
|
||||||
const { before } = options
|
const { before } = options
|
||||||
let nextBeforeTimestamp = null
|
let nextBeforeTimestamp = null
|
||||||
return UpdatesManager.processUncompressedUpdatesForProject(
|
return UpdatesManager.processUncompressedUpdatesForProject(
|
||||||
project_id,
|
projectId,
|
||||||
function (error) {
|
function (error) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
return PackManager.makeProjectIterator(
|
return PackManager.makeProjectIterator(
|
||||||
project_id,
|
projectId,
|
||||||
before,
|
before,
|
||||||
function (err, iterator) {
|
function (err, iterator) {
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
|
@ -692,17 +682,17 @@ module.exports = UpdatesManager = {
|
||||||
}
|
}
|
||||||
const jobs = []
|
const jobs = []
|
||||||
const fetchedUserInfo = {}
|
const fetchedUserInfo = {}
|
||||||
for (const user_id in users) {
|
for (const userId in users) {
|
||||||
;(user_id =>
|
;(userId =>
|
||||||
jobs.push(callback =>
|
jobs.push(callback =>
|
||||||
WebApiManager.getUserInfo(user_id, function (error, userInfo) {
|
WebApiManager.getUserInfo(userId, function (error, userInfo) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
fetchedUserInfo[user_id] = userInfo
|
fetchedUserInfo[userId] = userInfo
|
||||||
return callback()
|
return callback()
|
||||||
})
|
})
|
||||||
))(user_id)
|
))(userId)
|
||||||
}
|
}
|
||||||
|
|
||||||
return async.series(jobs, function (err) {
|
return async.series(jobs, function (err) {
|
||||||
|
@ -714,15 +704,15 @@ module.exports = UpdatesManager = {
|
||||||
},
|
},
|
||||||
|
|
||||||
fillUserInfo(updates, callback) {
|
fillUserInfo(updates, callback) {
|
||||||
let update, user_id
|
let update, userId
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
const users = {}
|
const users = {}
|
||||||
for (update of Array.from(updates)) {
|
for (update of Array.from(updates)) {
|
||||||
;({ user_id } = update.meta)
|
;({ user_id: userId } = update.meta)
|
||||||
if (UpdatesManager._validUserId(user_id)) {
|
if (UpdatesManager._validUserId(userId)) {
|
||||||
users[user_id] = true
|
users[userId] = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -733,10 +723,10 @@ module.exports = UpdatesManager = {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
for (update of Array.from(updates)) {
|
for (update of Array.from(updates)) {
|
||||||
;({ user_id } = update.meta)
|
;({ user_id: userId } = update.meta)
|
||||||
delete update.meta.user_id
|
delete update.meta.user_id
|
||||||
if (UpdatesManager._validUserId(user_id)) {
|
if (UpdatesManager._validUserId(userId)) {
|
||||||
update.meta.user = fetchedUserInfo[user_id]
|
update.meta.user = fetchedUserInfo[userId]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return callback(null, updates)
|
return callback(null, updates)
|
||||||
|
@ -745,16 +735,16 @@ module.exports = UpdatesManager = {
|
||||||
},
|
},
|
||||||
|
|
||||||
fillSummarizedUserInfo(updates, callback) {
|
fillSummarizedUserInfo(updates, callback) {
|
||||||
let update, user_id, user_ids
|
let update, userId, userIds
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
const users = {}
|
const users = {}
|
||||||
for (update of Array.from(updates)) {
|
for (update of Array.from(updates)) {
|
||||||
user_ids = update.meta.user_ids || []
|
userIds = update.meta.user_ids || []
|
||||||
for (user_id of Array.from(user_ids)) {
|
for (userId of Array.from(userIds)) {
|
||||||
if (UpdatesManager._validUserId(user_id)) {
|
if (UpdatesManager._validUserId(userId)) {
|
||||||
users[user_id] = true
|
users[userId] = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -766,12 +756,12 @@ module.exports = UpdatesManager = {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
for (update of Array.from(updates)) {
|
for (update of Array.from(updates)) {
|
||||||
user_ids = update.meta.user_ids || []
|
userIds = update.meta.user_ids || []
|
||||||
update.meta.users = []
|
update.meta.users = []
|
||||||
delete update.meta.user_ids
|
delete update.meta.user_ids
|
||||||
for (user_id of Array.from(user_ids)) {
|
for (userId of Array.from(userIds)) {
|
||||||
if (UpdatesManager._validUserId(user_id)) {
|
if (UpdatesManager._validUserId(userId)) {
|
||||||
update.meta.users.push(fetchedUserInfo[user_id])
|
update.meta.users.push(fetchedUserInfo[userId])
|
||||||
} else {
|
} else {
|
||||||
update.meta.users.push(null)
|
update.meta.users.push(null)
|
||||||
}
|
}
|
||||||
|
@ -782,11 +772,11 @@ module.exports = UpdatesManager = {
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
_validUserId(user_id) {
|
_validUserId(userId) {
|
||||||
if (user_id == null) {
|
if (userId == null) {
|
||||||
return false
|
return false
|
||||||
} else {
|
} else {
|
||||||
return !!user_id.match(/^[a-f0-9]{24}$/)
|
return !!userId.match(/^[a-f0-9]{24}$/)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
|
@ -799,7 +789,7 @@ module.exports = UpdatesManager = {
|
||||||
const summarizedUpdates = existingSummarizedUpdates.slice()
|
const summarizedUpdates = existingSummarizedUpdates.slice()
|
||||||
let previousUpdateWasBigDelete = false
|
let previousUpdateWasBigDelete = false
|
||||||
for (const update of Array.from(updates)) {
|
for (const update of Array.from(updates)) {
|
||||||
let doc_id
|
let docId
|
||||||
const earliestUpdate = summarizedUpdates[summarizedUpdates.length - 1]
|
const earliestUpdate = summarizedUpdates[summarizedUpdates.length - 1]
|
||||||
let shouldConcat = false
|
let shouldConcat = false
|
||||||
|
|
||||||
|
@ -837,13 +827,13 @@ module.exports = UpdatesManager = {
|
||||||
update.meta.user_id,
|
update.meta.user_id,
|
||||||
])
|
])
|
||||||
|
|
||||||
doc_id = update.doc_id.toString()
|
docId = update.doc_id.toString()
|
||||||
const doc = earliestUpdate.docs[doc_id]
|
const doc = earliestUpdate.docs[docId]
|
||||||
if (doc != null) {
|
if (doc != null) {
|
||||||
doc.fromV = Math.min(doc.fromV, update.v)
|
doc.fromV = Math.min(doc.fromV, update.v)
|
||||||
doc.toV = Math.max(doc.toV, update.v)
|
doc.toV = Math.max(doc.toV, update.v)
|
||||||
} else {
|
} else {
|
||||||
earliestUpdate.docs[doc_id] = {
|
earliestUpdate.docs[docId] = {
|
||||||
fromV: update.v,
|
fromV: update.v,
|
||||||
toV: update.v,
|
toV: update.v,
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
/* eslint-disable
|
|
||||||
camelcase,
|
|
||||||
*/
|
|
||||||
// TODO: This file was created by bulk-decaffeinate.
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
// Fix any style issues and re-enable lint.
|
// Fix any style issues and re-enable lint.
|
||||||
/*
|
/*
|
||||||
|
@ -58,21 +55,21 @@ module.exports = WebApiManager = {
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
getUserInfo(user_id, callback) {
|
getUserInfo(userId, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
const url = `/user/${user_id}/personal_info`
|
const url = `/user/${userId}/personal_info`
|
||||||
logger.debug({ user_id }, 'getting user info from web')
|
logger.debug({ userId }, 'getting user info from web')
|
||||||
return WebApiManager.sendRequest(url, function (error, body) {
|
return WebApiManager.sendRequest(url, function (error, body) {
|
||||||
let user
|
let user
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
logger.error({ err: error, user_id, url }, 'error accessing web')
|
logger.error({ err: error, userId, url }, 'error accessing web')
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (body === null) {
|
if (body === null) {
|
||||||
logger.error({ user_id, url }, 'no user found')
|
logger.error({ userId, url }, 'no user found')
|
||||||
return callback(null, null)
|
return callback(null, null)
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
|
@ -90,16 +87,16 @@ module.exports = WebApiManager = {
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
getProjectDetails(project_id, callback) {
|
getProjectDetails(projectId, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
const url = `/project/${project_id}/details`
|
const url = `/project/${projectId}/details`
|
||||||
logger.debug({ project_id }, 'getting project details from web')
|
logger.debug({ projectId }, 'getting project details from web')
|
||||||
return WebApiManager.sendRequest(url, function (error, body) {
|
return WebApiManager.sendRequest(url, function (error, body) {
|
||||||
let project
|
let project
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
logger.error({ err: error, project_id, url }, 'error accessing web')
|
logger.error({ err: error, projectId, url }, 'error accessing web')
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
/* eslint-disable
|
/* eslint-disable
|
||||||
camelcase,
|
|
||||||
no-undef,
|
no-undef,
|
||||||
no-unused-vars,
|
no-unused-vars,
|
||||||
*/
|
*/
|
||||||
|
@ -268,11 +267,11 @@ describe('Archiving updates', function () {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
throw error
|
throw error
|
||||||
}
|
}
|
||||||
const pack_id = index.packs[0]._id
|
const packId = index.packs[0]._id
|
||||||
return TrackChangesClient.getS3Doc(
|
return TrackChangesClient.getS3Doc(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
this.doc_id,
|
this.doc_id,
|
||||||
pack_id,
|
packId,
|
||||||
(error, doc) => {
|
(error, doc) => {
|
||||||
if (error) return done(error)
|
if (error) return done(error)
|
||||||
doc.n.should.equal(1024)
|
doc.n.should.equal(1024)
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
/* eslint-disable
|
|
||||||
camelcase,
|
|
||||||
*/
|
|
||||||
// TODO: This file was created by bulk-decaffeinate.
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
// Fix any style issues and re-enable lint.
|
// Fix any style issues and re-enable lint.
|
||||||
/*
|
/*
|
||||||
|
@ -16,7 +13,7 @@ const app = express()
|
||||||
module.exports = MockDocUpdaterApi = {
|
module.exports = MockDocUpdaterApi = {
|
||||||
docs: {},
|
docs: {},
|
||||||
|
|
||||||
getAllDoc(project_id, callback) {
|
getAllDoc(projectId, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
/* eslint-disable
|
/* eslint-disable
|
||||||
camelcase,
|
|
||||||
no-undef,
|
no-undef,
|
||||||
*/
|
*/
|
||||||
// TODO: This file was created by bulk-decaffeinate.
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
@ -19,21 +18,21 @@ app.use(bodyParser.json())
|
||||||
module.exports = MockDocUpdaterApi = {
|
module.exports = MockDocUpdaterApi = {
|
||||||
docs: {},
|
docs: {},
|
||||||
|
|
||||||
getDoc(project_id, doc_id, callback) {
|
getDoc(projectId, docId, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return callback(null, this.docs[doc_id])
|
return callback(null, this.docs[docId])
|
||||||
},
|
},
|
||||||
|
|
||||||
setDoc(project_id, doc_id, lines, user_id, undoing, callback) {
|
setDoc(projectId, docId, lines, userId, undoing, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
if (!this.docs[doc_id]) {
|
if (!this.docs[docId]) {
|
||||||
this.docs[doc_id] = {}
|
this.docs[docId] = {}
|
||||||
}
|
}
|
||||||
this.docs[doc_id].lines = lines
|
this.docs[docId].lines = lines
|
||||||
return callback()
|
return callback()
|
||||||
},
|
},
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
/* eslint-disable
|
|
||||||
camelcase,
|
|
||||||
*/
|
|
||||||
// TODO: This file was created by bulk-decaffeinate.
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
// Fix any style issues and re-enable lint.
|
// Fix any style issues and re-enable lint.
|
||||||
/*
|
/*
|
||||||
|
@ -18,18 +15,18 @@ module.exports = MockWebApi = {
|
||||||
|
|
||||||
projects: {},
|
projects: {},
|
||||||
|
|
||||||
getUserInfo(user_id, callback) {
|
getUserInfo(userId, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return callback(null, this.users[user_id] || null)
|
return callback(null, this.users[userId] || null)
|
||||||
},
|
},
|
||||||
|
|
||||||
getProjectDetails(project_id, callback) {
|
getProjectDetails(projectId, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return callback(null, this.projects[project_id])
|
return callback(null, this.projects[projectId])
|
||||||
},
|
},
|
||||||
|
|
||||||
run() {
|
run() {
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
/* eslint-disable
|
/* eslint-disable
|
||||||
camelcase,
|
|
||||||
no-unused-vars,
|
no-unused-vars,
|
||||||
*/
|
*/
|
||||||
// TODO: This file was created by bulk-decaffeinate.
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
@ -32,25 +31,25 @@ const s3 = new aws.S3({
|
||||||
const S3_BUCKET = Settings.trackchanges.stores.doc_history
|
const S3_BUCKET = Settings.trackchanges.stores.doc_history
|
||||||
|
|
||||||
module.exports = TrackChangesClient = {
|
module.exports = TrackChangesClient = {
|
||||||
flushAndGetCompressedUpdates(project_id, doc_id, callback) {
|
flushAndGetCompressedUpdates(projectId, docId, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return TrackChangesClient.flushDoc(project_id, doc_id, error => {
|
return TrackChangesClient.flushDoc(projectId, docId, error => {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
return TrackChangesClient.getCompressedUpdates(doc_id, callback)
|
return TrackChangesClient.getCompressedUpdates(docId, callback)
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
flushDoc(project_id, doc_id, callback) {
|
flushDoc(projectId, docId, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return request.post(
|
return request.post(
|
||||||
{
|
{
|
||||||
url: `http://localhost:3015/project/${project_id}/doc/${doc_id}/flush`,
|
url: `http://localhost:3015/project/${projectId}/doc/${docId}/flush`,
|
||||||
},
|
},
|
||||||
(error, response, body) => {
|
(error, response, body) => {
|
||||||
response.statusCode.should.equal(204)
|
response.statusCode.should.equal(204)
|
||||||
|
@ -59,13 +58,13 @@ module.exports = TrackChangesClient = {
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
flushProject(project_id, callback) {
|
flushProject(projectId, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return request.post(
|
return request.post(
|
||||||
{
|
{
|
||||||
url: `http://localhost:3015/project/${project_id}/flush`,
|
url: `http://localhost:3015/project/${projectId}/flush`,
|
||||||
},
|
},
|
||||||
(error, response, body) => {
|
(error, response, body) => {
|
||||||
response.statusCode.should.equal(204)
|
response.statusCode.should.equal(204)
|
||||||
|
@ -74,35 +73,35 @@ module.exports = TrackChangesClient = {
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
getCompressedUpdates(doc_id, callback) {
|
getCompressedUpdates(docId, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return db.docHistory
|
return db.docHistory
|
||||||
.find({ doc_id: ObjectId(doc_id) })
|
.find({ doc_id: ObjectId(docId) })
|
||||||
.sort({ 'meta.end_ts': 1 })
|
.sort({ 'meta.end_ts': 1 })
|
||||||
.toArray(callback)
|
.toArray(callback)
|
||||||
},
|
},
|
||||||
|
|
||||||
getProjectMetaData(project_id, callback) {
|
getProjectMetaData(projectId, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return db.projectHistoryMetaData.findOne(
|
return db.projectHistoryMetaData.findOne(
|
||||||
{
|
{
|
||||||
project_id: ObjectId(project_id),
|
project_id: ObjectId(projectId),
|
||||||
},
|
},
|
||||||
callback
|
callback
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
setPreserveHistoryForProject(project_id, callback) {
|
setPreserveHistoryForProject(projectId, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return db.projectHistoryMetaData.updateOne(
|
return db.projectHistoryMetaData.updateOne(
|
||||||
{
|
{
|
||||||
project_id: ObjectId(project_id),
|
project_id: ObjectId(projectId),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
$set: { preserveHistory: true },
|
$set: { preserveHistory: true },
|
||||||
|
@ -114,19 +113,19 @@ module.exports = TrackChangesClient = {
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
pushRawUpdates(project_id, doc_id, updates, callback) {
|
pushRawUpdates(projectId, docId, updates, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return rclient.sadd(
|
return rclient.sadd(
|
||||||
Keys.docsWithHistoryOps({ project_id }),
|
Keys.docsWithHistoryOps({ project_id: projectId }),
|
||||||
doc_id,
|
docId,
|
||||||
error => {
|
error => {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
return rclient.rpush(
|
return rclient.rpush(
|
||||||
Keys.uncompressedHistoryOps({ doc_id }),
|
Keys.uncompressedHistoryOps({ doc_id: docId }),
|
||||||
...Array.from(Array.from(updates).map(u => JSON.stringify(u))),
|
...Array.from(Array.from(updates).map(u => JSON.stringify(u))),
|
||||||
callback
|
callback
|
||||||
)
|
)
|
||||||
|
@ -134,13 +133,13 @@ module.exports = TrackChangesClient = {
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
getDiff(project_id, doc_id, from, to, callback) {
|
getDiff(projectId, docId, from, to, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return request.get(
|
return request.get(
|
||||||
{
|
{
|
||||||
url: `http://localhost:3015/project/${project_id}/doc/${doc_id}/diff?from=${from}&to=${to}`,
|
url: `http://localhost:3015/project/${projectId}/doc/${docId}/diff?from=${from}&to=${to}`,
|
||||||
},
|
},
|
||||||
(error, response, body) => {
|
(error, response, body) => {
|
||||||
if (error) return callback(error)
|
if (error) return callback(error)
|
||||||
|
@ -150,13 +149,13 @@ module.exports = TrackChangesClient = {
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
getUpdates(project_id, options, callback) {
|
getUpdates(projectId, options, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return request.get(
|
return request.get(
|
||||||
{
|
{
|
||||||
url: `http://localhost:3015/project/${project_id}/updates?before=${options.before}&min_count=${options.min_count}`,
|
url: `http://localhost:3015/project/${projectId}/updates?before=${options.before}&min_count=${options.min_count}`,
|
||||||
},
|
},
|
||||||
(error, response, body) => {
|
(error, response, body) => {
|
||||||
if (error) return callback(error)
|
if (error) return callback(error)
|
||||||
|
@ -166,9 +165,9 @@ module.exports = TrackChangesClient = {
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
exportProject(project_id, callback) {
|
exportProject(projectId, callback) {
|
||||||
request.get(
|
request.get(
|
||||||
{ url: `http://localhost:3015/project/${project_id}/export`, json: true },
|
{ url: `http://localhost:3015/project/${projectId}/export`, json: true },
|
||||||
(error, response, updates) => {
|
(error, response, updates) => {
|
||||||
if (error) return callback(error)
|
if (error) return callback(error)
|
||||||
response.statusCode.should.equal(200)
|
response.statusCode.should.equal(200)
|
||||||
|
@ -177,15 +176,15 @@ module.exports = TrackChangesClient = {
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
restoreDoc(project_id, doc_id, version, user_id, callback) {
|
restoreDoc(projectId, docId, version, userId, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return request.post(
|
return request.post(
|
||||||
{
|
{
|
||||||
url: `http://localhost:3015/project/${project_id}/doc/${doc_id}/version/${version}/restore`,
|
url: `http://localhost:3015/project/${projectId}/doc/${docId}/version/${version}/restore`,
|
||||||
headers: {
|
headers: {
|
||||||
'X-User-Id': user_id,
|
'X-User-Id': userId,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
(error, response, body) => {
|
(error, response, body) => {
|
||||||
|
@ -196,13 +195,13 @@ module.exports = TrackChangesClient = {
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
pushDocHistory(project_id, doc_id, callback) {
|
pushDocHistory(projectId, docId, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return request.post(
|
return request.post(
|
||||||
{
|
{
|
||||||
url: `http://localhost:3015/project/${project_id}/doc/${doc_id}/push`,
|
url: `http://localhost:3015/project/${projectId}/doc/${docId}/push`,
|
||||||
},
|
},
|
||||||
(error, response, body) => {
|
(error, response, body) => {
|
||||||
response.statusCode.should.equal(204)
|
response.statusCode.should.equal(204)
|
||||||
|
@ -211,13 +210,13 @@ module.exports = TrackChangesClient = {
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
pullDocHistory(project_id, doc_id, callback) {
|
pullDocHistory(projectId, docId, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return request.post(
|
return request.post(
|
||||||
{
|
{
|
||||||
url: `http://localhost:3015/project/${project_id}/doc/${doc_id}/pull`,
|
url: `http://localhost:3015/project/${projectId}/doc/${docId}/pull`,
|
||||||
},
|
},
|
||||||
(error, response, body) => {
|
(error, response, body) => {
|
||||||
response.statusCode.should.equal(204)
|
response.statusCode.should.equal(204)
|
||||||
|
@ -250,13 +249,13 @@ module.exports = TrackChangesClient = {
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
getS3Doc(project_id, doc_id, pack_id, callback) {
|
getS3Doc(projectId, docId, packId, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: S3_BUCKET,
|
Bucket: S3_BUCKET,
|
||||||
Key: `${project_id}/changes-${doc_id}/pack-${pack_id}`,
|
Key: `${projectId}/changes-${docId}/pack-${packId}`,
|
||||||
}
|
}
|
||||||
|
|
||||||
return s3.getObject(params, (error, data) => {
|
return s3.getObject(params, (error, data) => {
|
||||||
|
@ -276,13 +275,13 @@ module.exports = TrackChangesClient = {
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
removeS3Doc(project_id, doc_id, callback) {
|
removeS3Doc(projectId, docId, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
let params = {
|
let params = {
|
||||||
Bucket: S3_BUCKET,
|
Bucket: S3_BUCKET,
|
||||||
Prefix: `${project_id}/changes-${doc_id}`,
|
Prefix: `${projectId}/changes-${docId}`,
|
||||||
}
|
}
|
||||||
|
|
||||||
return s3.listObjects(params, (error, data) => {
|
return s3.listObjects(params, (error, data) => {
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
/* eslint-disable
|
/* eslint-disable
|
||||||
camelcase,
|
|
||||||
no-return-assign,
|
no-return-assign,
|
||||||
no-unused-vars,
|
no-unused-vars,
|
||||||
*/
|
*/
|
||||||
|
@ -239,8 +238,8 @@ describe('DiffManager', function () {
|
||||||
beforeEach(function () {
|
beforeEach(function () {
|
||||||
let retried = false
|
let retried = false
|
||||||
this.DiffManager._tryGetDocumentBeforeVersion = (
|
this.DiffManager._tryGetDocumentBeforeVersion = (
|
||||||
project_id,
|
projectId,
|
||||||
doc_id,
|
docId,
|
||||||
version,
|
version,
|
||||||
callback
|
callback
|
||||||
) => {
|
) => {
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
/* eslint-disable
|
/* eslint-disable
|
||||||
camelcase,
|
|
||||||
mocha/no-nested-tests,
|
mocha/no-nested-tests,
|
||||||
no-return-assign,
|
no-return-assign,
|
||||||
no-undef,
|
no-undef,
|
||||||
|
@ -152,7 +151,7 @@ describe('LockManager', function () {
|
||||||
beforeEach(function (done) {
|
beforeEach(function (done) {
|
||||||
const startTime = Date.now()
|
const startTime = Date.now()
|
||||||
this.LockManager.LOCK_TEST_INTERVAL = 5
|
this.LockManager.LOCK_TEST_INTERVAL = 5
|
||||||
this.LockManager.tryLock = function (doc_id, callback) {
|
this.LockManager.tryLock = function (docId, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
/* eslint-disable
|
/* eslint-disable
|
||||||
camelcase,
|
|
||||||
no-return-assign,
|
no-return-assign,
|
||||||
no-unused-vars,
|
no-unused-vars,
|
||||||
*/
|
*/
|
||||||
|
@ -32,11 +31,11 @@ describe('RedisManager', function () {
|
||||||
redis: {
|
redis: {
|
||||||
history: {
|
history: {
|
||||||
key_schema: {
|
key_schema: {
|
||||||
uncompressedHistoryOps({ doc_id }) {
|
uncompressedHistoryOps({ doc_id: docId }) {
|
||||||
return `UncompressedHistoryOps:${doc_id}`
|
return `UncompressedHistoryOps:${docId}`
|
||||||
},
|
},
|
||||||
docsWithHistoryOps({ project_id }) {
|
docsWithHistoryOps({ project_id: projectId }) {
|
||||||
return `DocsWithHistoryOps:${project_id}`
|
return `DocsWithHistoryOps:${projectId}`
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
/* eslint-disable
|
/* eslint-disable
|
||||||
camelcase,
|
|
||||||
no-return-assign,
|
no-return-assign,
|
||||||
no-unused-vars,
|
no-unused-vars,
|
||||||
*/
|
*/
|
||||||
|
@ -35,8 +34,8 @@ describe('UpdatesManager', function () {
|
||||||
redis: {
|
redis: {
|
||||||
lock: {
|
lock: {
|
||||||
key_schema: {
|
key_schema: {
|
||||||
historyLock({ doc_id }) {
|
historyLock({ doc_id: docId }) {
|
||||||
return `HistoryLock:${doc_id}`
|
return `HistoryLock:${docId}`
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -457,7 +456,7 @@ describe('UpdatesManager', function () {
|
||||||
]
|
]
|
||||||
this.redisArray = this.updates.slice()
|
this.redisArray = this.updates.slice()
|
||||||
this.RedisManager.getOldestDocUpdates = (
|
this.RedisManager.getOldestDocUpdates = (
|
||||||
doc_id,
|
docId,
|
||||||
batchSize,
|
batchSize,
|
||||||
callback
|
callback
|
||||||
) => {
|
) => {
|
||||||
|
@ -672,9 +671,9 @@ describe('UpdatesManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should process the doc ops for the each doc_id', function () {
|
it('should process the doc ops for the each doc_id', function () {
|
||||||
return Array.from(this.doc_ids).map(doc_id =>
|
return Array.from(this.doc_ids).map(docId =>
|
||||||
this.UpdatesManager._processUncompressedUpdatesForDocWithLock
|
this.UpdatesManager._processUncompressedUpdatesForDocWithLock
|
||||||
.calledWith(this.project_id, doc_id, this.temporary)
|
.calledWith(this.project_id, docId, this.temporary)
|
||||||
.should.equal(true)
|
.should.equal(true)
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
@ -887,11 +886,11 @@ describe('UpdatesManager', function () {
|
||||||
this.user_info[this.user_id_1] = { email: 'user1@sharelatex.com' }
|
this.user_info[this.user_id_1] = { email: 'user1@sharelatex.com' }
|
||||||
this.user_info[this.user_id_2] = { email: 'user2@sharelatex.com' }
|
this.user_info[this.user_id_2] = { email: 'user2@sharelatex.com' }
|
||||||
|
|
||||||
this.WebApiManager.getUserInfo = (user_id, callback) => {
|
this.WebApiManager.getUserInfo = (userId, callback) => {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return callback(null, this.user_info[user_id])
|
return callback(null, this.user_info[userId])
|
||||||
}
|
}
|
||||||
sinon.spy(this.WebApiManager, 'getUserInfo')
|
sinon.spy(this.WebApiManager, 'getUserInfo')
|
||||||
|
|
||||||
|
@ -961,11 +960,11 @@ describe('UpdatesManager', function () {
|
||||||
op: 'mock-op-2',
|
op: 'mock-op-2',
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
this.WebApiManager.getUserInfo = (user_id, callback) => {
|
this.WebApiManager.getUserInfo = (userId, callback) => {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return callback(null, this.user_info[user_id])
|
return callback(null, this.user_info[userId])
|
||||||
}
|
}
|
||||||
sinon.spy(this.WebApiManager, 'getUserInfo')
|
sinon.spy(this.WebApiManager, 'getUserInfo')
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue