mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
Merge pull request #133 from overleaf/em-json-request-size
Make max JSON request size configurable and default to 8 MB
This commit is contained in:
commit
a4de5848fb
5 changed files with 808 additions and 906 deletions
|
@ -1,11 +1,3 @@
|
|||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS103: Rewrite code to no longer use __guard__
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const Metrics = require('metrics-sharelatex')
|
||||
Metrics.initialize('doc-updater')
|
||||
|
||||
|
@ -16,7 +8,7 @@ logger.initialize('document-updater')
|
|||
|
||||
logger.logger.addSerializers(require('./app/js/LoggerSerializers'))
|
||||
|
||||
if ((Settings.sentry != null ? Settings.sentry.dsn : undefined) != null) {
|
||||
if (Settings.sentry != null && Settings.sentry.dsn != null) {
|
||||
logger.initializeErrorReporting(Settings.sentry.dsn)
|
||||
}
|
||||
|
||||
|
@ -39,21 +31,21 @@ Metrics.event_loop.monitor(logger, 100)
|
|||
|
||||
const app = express()
|
||||
app.use(Metrics.http.monitor(logger))
|
||||
app.use(bodyParser.json({ limit: Settings.max_doc_length + 64 * 1024 }))
|
||||
app.use(bodyParser.json({ limit: Settings.maxJsonRequestSize }))
|
||||
Metrics.injectMetricsRoute(app)
|
||||
|
||||
DispatchManager.createAndStartDispatchers(Settings.dispatcherCount || 10)
|
||||
|
||||
app.param('project_id', function (req, res, next, projectId) {
|
||||
if (projectId != null ? projectId.match(/^[0-9a-f]{24}$/) : undefined) {
|
||||
app.param('project_id', (req, res, next, projectId) => {
|
||||
if (projectId != null && projectId.match(/^[0-9a-f]{24}$/)) {
|
||||
return next()
|
||||
} else {
|
||||
return next(new Error('invalid project id'))
|
||||
}
|
||||
})
|
||||
|
||||
app.param('doc_id', function (req, res, next, docId) {
|
||||
if (docId != null ? docId.match(/^[0-9a-f]{24}$/) : undefined) {
|
||||
app.param('doc_id', (req, res, next, docId) => {
|
||||
if (docId != null && docId.match(/^[0-9a-f]{24}$/)) {
|
||||
return next()
|
||||
} else {
|
||||
return next(new Error('invalid doc id'))
|
||||
|
@ -99,18 +91,18 @@ app.delete(
|
|||
app.get('/flush_all_projects', HttpController.flushAllProjects)
|
||||
app.get('/flush_queued_projects', HttpController.flushQueuedProjects)
|
||||
|
||||
app.get('/total', function (req, res, next) {
|
||||
app.get('/total', (req, res, next) => {
|
||||
const timer = new Metrics.Timer('http.allDocList')
|
||||
return RedisManager.getCountOfDocsInMemory(function (err, count) {
|
||||
RedisManager.getCountOfDocsInMemory((err, count) => {
|
||||
if (err) {
|
||||
return next(err)
|
||||
}
|
||||
timer.done()
|
||||
return res.send({ total: count })
|
||||
res.send({ total: count })
|
||||
})
|
||||
})
|
||||
|
||||
app.get('/status', function (req, res) {
|
||||
app.get('/status', (req, res) => {
|
||||
if (Settings.shuttingDown) {
|
||||
return res.sendStatus(503) // Service unavailable
|
||||
} else {
|
||||
|
@ -121,67 +113,70 @@ app.get('/status', function (req, res) {
|
|||
const pubsubClient = require('redis-sharelatex').createClient(
|
||||
Settings.redis.pubsub
|
||||
)
|
||||
app.get('/health_check/redis', (req, res, next) =>
|
||||
pubsubClient.healthCheck(function (error) {
|
||||
if (error != null) {
|
||||
app.get('/health_check/redis', (req, res, next) => {
|
||||
pubsubClient.healthCheck((error) => {
|
||||
if (error) {
|
||||
logger.err({ err: error }, 'failed redis health check')
|
||||
return res.sendStatus(500)
|
||||
} else {
|
||||
return res.sendStatus(200)
|
||||
}
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
const docUpdaterRedisClient = require('redis-sharelatex').createClient(
|
||||
Settings.redis.documentupdater
|
||||
)
|
||||
app.get('/health_check/redis_cluster', (req, res, next) =>
|
||||
docUpdaterRedisClient.healthCheck(function (error) {
|
||||
if (error != null) {
|
||||
app.get('/health_check/redis_cluster', (req, res, next) => {
|
||||
docUpdaterRedisClient.healthCheck((error) => {
|
||||
if (error) {
|
||||
logger.err({ err: error }, 'failed redis cluster health check')
|
||||
return res.sendStatus(500)
|
||||
} else {
|
||||
return res.sendStatus(200)
|
||||
}
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
app.get('/health_check', (req, res, next) =>
|
||||
app.get('/health_check', (req, res, next) => {
|
||||
async.series(
|
||||
[
|
||||
(cb) =>
|
||||
pubsubClient.healthCheck(function (error) {
|
||||
if (error != null) {
|
||||
(cb) => {
|
||||
pubsubClient.healthCheck((error) => {
|
||||
if (error) {
|
||||
logger.err({ err: error }, 'failed redis health check')
|
||||
}
|
||||
return cb(error)
|
||||
}),
|
||||
(cb) =>
|
||||
docUpdaterRedisClient.healthCheck(function (error) {
|
||||
if (error != null) {
|
||||
cb(error)
|
||||
})
|
||||
},
|
||||
(cb) => {
|
||||
docUpdaterRedisClient.healthCheck((error) => {
|
||||
if (error) {
|
||||
logger.err({ err: error }, 'failed redis cluster health check')
|
||||
}
|
||||
return cb(error)
|
||||
}),
|
||||
(cb) =>
|
||||
mongojs.healthCheck(function (error) {
|
||||
if (error != null) {
|
||||
cb(error)
|
||||
})
|
||||
},
|
||||
(cb) => {
|
||||
mongojs.healthCheck((error) => {
|
||||
if (error) {
|
||||
logger.err({ err: error }, 'failed mongo health check')
|
||||
}
|
||||
return cb(error)
|
||||
cb(error)
|
||||
})
|
||||
}
|
||||
],
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
(error) => {
|
||||
if (error) {
|
||||
return res.sendStatus(500)
|
||||
} else {
|
||||
return res.sendStatus(200)
|
||||
}
|
||||
}
|
||||
)
|
||||
)
|
||||
})
|
||||
|
||||
app.use(function (error, req, res, next) {
|
||||
app.use((error, req, res, next) => {
|
||||
if (error instanceof Errors.NotFoundError) {
|
||||
return res.sendStatus(404)
|
||||
} else if (error instanceof Errors.OpRangeNotAvailableError) {
|
||||
|
@ -194,45 +189,41 @@ app.use(function (error, req, res, next) {
|
|||
}
|
||||
})
|
||||
|
||||
const shutdownCleanly = (signal) =>
|
||||
function () {
|
||||
const shutdownCleanly = (signal) => () => {
|
||||
logger.log({ signal }, 'received interrupt, cleaning up')
|
||||
Settings.shuttingDown = true
|
||||
return setTimeout(function () {
|
||||
setTimeout(() => {
|
||||
logger.log({ signal }, 'shutting down')
|
||||
return process.exit()
|
||||
process.exit()
|
||||
}, 10000)
|
||||
}
|
||||
|
||||
const watchForEvent = (eventName) =>
|
||||
docUpdaterRedisClient.on(
|
||||
eventName,
|
||||
(e) => console.log(`redis event: ${eventName} ${e}`) // eslint-disable-line no-console
|
||||
)
|
||||
const watchForEvent = (eventName) => {
|
||||
docUpdaterRedisClient.on(eventName, (e) => {
|
||||
console.log(`redis event: ${eventName} ${e}`) // eslint-disable-line no-console
|
||||
})
|
||||
}
|
||||
|
||||
const events = ['connect', 'ready', 'error', 'close', 'reconnecting', 'end']
|
||||
for (const eventName of Array.from(events)) {
|
||||
for (const eventName of events) {
|
||||
watchForEvent(eventName)
|
||||
}
|
||||
|
||||
const port =
|
||||
__guard__(
|
||||
Settings.internal != null ? Settings.internal.documentupdater : undefined,
|
||||
(x) => x.port
|
||||
) ||
|
||||
__guard__(
|
||||
Settings.apis != null ? Settings.apis.documentupdater : undefined,
|
||||
(x1) => x1.port
|
||||
) ||
|
||||
Settings.internal.documentupdater.port ||
|
||||
(Settings.api &&
|
||||
Settings.api.documentupdater &&
|
||||
Settings.api.documentupdater.port) ||
|
||||
3003
|
||||
const host = Settings.internal.documentupdater.host || 'localhost'
|
||||
|
||||
if (!module.parent) {
|
||||
// Called directly
|
||||
app.listen(port, host, function () {
|
||||
app.listen(port, host, () => {
|
||||
logger.info(`Document-updater starting up, listening on ${host}:${port}`)
|
||||
if (Settings.continuousBackgroundFlush) {
|
||||
logger.info('Starting continuous background flush')
|
||||
return DeleteQueueManager.startBackgroundFlush()
|
||||
DeleteQueueManager.startBackgroundFlush()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -250,9 +241,3 @@ for (const signal of [
|
|||
]) {
|
||||
process.on(signal, shutdownCleanly(signal))
|
||||
}
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return typeof value !== 'undefined' && value !== null
|
||||
? transform(value)
|
||||
: undefined
|
||||
}
|
||||
|
|
|
@ -1,62 +1,60 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS103: Rewrite code to no longer use __guard__
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let HttpController
|
||||
const DocumentManager = require('./DocumentManager')
|
||||
const HistoryManager = require('./HistoryManager')
|
||||
const ProjectManager = require('./ProjectManager')
|
||||
const Errors = require('./Errors')
|
||||
const logger = require('logger-sharelatex')
|
||||
const Settings = require('settings-sharelatex')
|
||||
const Metrics = require('./Metrics')
|
||||
const ProjectFlusher = require('./ProjectFlusher')
|
||||
const DeleteQueueManager = require('./DeleteQueueManager')
|
||||
const async = require('async')
|
||||
|
||||
const TWO_MEGABYTES = 2 * 1024 * 1024
|
||||
|
||||
module.exports = HttpController = {
|
||||
getDoc(req, res, next) {
|
||||
let fromVersion
|
||||
if (next == null) {
|
||||
next = function (error) {}
|
||||
module.exports = {
|
||||
getDoc,
|
||||
getProjectDocsAndFlushIfOld,
|
||||
clearProjectState,
|
||||
setDoc,
|
||||
flushDocIfLoaded,
|
||||
deleteDoc,
|
||||
flushProject,
|
||||
deleteProject,
|
||||
deleteMultipleProjects,
|
||||
acceptChanges,
|
||||
deleteComment,
|
||||
updateProject,
|
||||
resyncProjectHistory,
|
||||
flushAllProjects,
|
||||
flushQueuedProjects
|
||||
}
|
||||
const { doc_id } = req.params
|
||||
const { project_id } = req.params
|
||||
logger.log({ project_id, doc_id }, 'getting doc via http')
|
||||
|
||||
function getDoc(req, res, next) {
|
||||
let fromVersion
|
||||
const docId = req.params.doc_id
|
||||
const projectId = req.params.project_id
|
||||
logger.log({ projectId, docId }, 'getting doc via http')
|
||||
const timer = new Metrics.Timer('http.getDoc')
|
||||
|
||||
if ((req.query != null ? req.query.fromVersion : undefined) != null) {
|
||||
if (req.query.fromVersion != null) {
|
||||
fromVersion = parseInt(req.query.fromVersion, 10)
|
||||
} else {
|
||||
fromVersion = -1
|
||||
}
|
||||
|
||||
return DocumentManager.getDocAndRecentOpsWithLock(
|
||||
project_id,
|
||||
doc_id,
|
||||
DocumentManager.getDocAndRecentOpsWithLock(
|
||||
projectId,
|
||||
docId,
|
||||
fromVersion,
|
||||
function (error, lines, version, ops, ranges, pathname) {
|
||||
(error, lines, version, ops, ranges, pathname) => {
|
||||
timer.done()
|
||||
if (error != null) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
logger.log({ project_id, doc_id }, 'got doc via http')
|
||||
logger.log({ projectId, docId }, 'got doc via http')
|
||||
if (lines == null || version == null) {
|
||||
return next(new Errors.NotFoundError('document not found'))
|
||||
}
|
||||
return res.json({
|
||||
id: doc_id,
|
||||
res.json({
|
||||
id: docId,
|
||||
lines,
|
||||
version,
|
||||
ops,
|
||||
|
@ -65,311 +63,268 @@ module.exports = HttpController = {
|
|||
})
|
||||
}
|
||||
)
|
||||
},
|
||||
}
|
||||
|
||||
_getTotalSizeOfLines(lines) {
|
||||
function _getTotalSizeOfLines(lines) {
|
||||
let size = 0
|
||||
for (const line of Array.from(lines)) {
|
||||
for (const line of lines) {
|
||||
size += line.length + 1
|
||||
}
|
||||
return size
|
||||
},
|
||||
|
||||
getProjectDocsAndFlushIfOld(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function (error) {}
|
||||
}
|
||||
const { project_id } = req.params
|
||||
const projectStateHash = req.query != null ? req.query.state : undefined
|
||||
|
||||
function getProjectDocsAndFlushIfOld(req, res, next) {
|
||||
const projectId = req.params.project_id
|
||||
const projectStateHash = req.query.state
|
||||
// exclude is string of existing docs "id:version,id:version,..."
|
||||
const excludeItems =
|
||||
__guard__(req.query != null ? req.query.exclude : undefined, (x) =>
|
||||
x.split(',')
|
||||
) || []
|
||||
logger.log({ project_id, exclude: excludeItems }, 'getting docs via http')
|
||||
req.query.exclude != null ? req.query.exclude.split(',') : []
|
||||
logger.log({ projectId, exclude: excludeItems }, 'getting docs via http')
|
||||
const timer = new Metrics.Timer('http.getAllDocs')
|
||||
const excludeVersions = {}
|
||||
for (const item of Array.from(excludeItems)) {
|
||||
const [id, version] = Array.from(
|
||||
item != null ? item.split(':') : undefined
|
||||
)
|
||||
for (const item of excludeItems) {
|
||||
const [id, version] = item.split(':')
|
||||
excludeVersions[id] = version
|
||||
}
|
||||
logger.log(
|
||||
{ project_id, projectStateHash, excludeVersions },
|
||||
{ projectId, projectStateHash, excludeVersions },
|
||||
'excluding versions'
|
||||
)
|
||||
return ProjectManager.getProjectDocsAndFlushIfOld(
|
||||
project_id,
|
||||
ProjectManager.getProjectDocsAndFlushIfOld(
|
||||
projectId,
|
||||
projectStateHash,
|
||||
excludeVersions,
|
||||
function (error, result) {
|
||||
(error, result) => {
|
||||
timer.done()
|
||||
if (error instanceof Errors.ProjectStateChangedError) {
|
||||
return res.sendStatus(409) // conflict
|
||||
} else if (error != null) {
|
||||
return next(error)
|
||||
res.sendStatus(409) // conflict
|
||||
} else if (error) {
|
||||
next(error)
|
||||
} else {
|
||||
logger.log(
|
||||
{
|
||||
project_id,
|
||||
result: Array.from(result).map((doc) => `${doc._id}:${doc.v}`)
|
||||
projectId,
|
||||
result: result.map((doc) => `${doc._id}:${doc.v}`)
|
||||
},
|
||||
'got docs via http'
|
||||
)
|
||||
return res.send(result)
|
||||
res.send(result)
|
||||
}
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
clearProjectState(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function (error) {}
|
||||
}
|
||||
const { project_id } = req.params
|
||||
|
||||
function clearProjectState(req, res, next) {
|
||||
const projectId = req.params.project_id
|
||||
const timer = new Metrics.Timer('http.clearProjectState')
|
||||
logger.log({ project_id }, 'clearing project state via http')
|
||||
return ProjectManager.clearProjectState(project_id, function (error) {
|
||||
logger.log({ projectId }, 'clearing project state via http')
|
||||
ProjectManager.clearProjectState(projectId, (error) => {
|
||||
timer.done()
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
if (error) {
|
||||
next(error)
|
||||
} else {
|
||||
return res.sendStatus(200)
|
||||
res.sendStatus(200)
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
setDoc(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function (error) {}
|
||||
}
|
||||
const { doc_id } = req.params
|
||||
const { project_id } = req.params
|
||||
const { lines, source, user_id, undoing } = req.body
|
||||
const lineSize = HttpController._getTotalSizeOfLines(lines)
|
||||
if (lineSize > TWO_MEGABYTES) {
|
||||
|
||||
function setDoc(req, res, next) {
|
||||
const docId = req.params.doc_id
|
||||
const projectId = req.params.project_id
|
||||
const { lines, source, user_id: userId, undoing } = req.body
|
||||
const lineSize = _getTotalSizeOfLines(lines)
|
||||
if (lineSize > Settings.max_doc_length) {
|
||||
logger.log(
|
||||
{ project_id, doc_id, source, lineSize, user_id },
|
||||
{ projectId, docId, source, lineSize, userId },
|
||||
'document too large, returning 406 response'
|
||||
)
|
||||
return res.sendStatus(406)
|
||||
}
|
||||
logger.log(
|
||||
{ project_id, doc_id, lines, source, user_id, undoing },
|
||||
{ projectId, docId, lines, source, userId, undoing },
|
||||
'setting doc via http'
|
||||
)
|
||||
const timer = new Metrics.Timer('http.setDoc')
|
||||
return DocumentManager.setDocWithLock(
|
||||
project_id,
|
||||
doc_id,
|
||||
DocumentManager.setDocWithLock(
|
||||
projectId,
|
||||
docId,
|
||||
lines,
|
||||
source,
|
||||
user_id,
|
||||
userId,
|
||||
undoing,
|
||||
function (error) {
|
||||
(error) => {
|
||||
timer.done()
|
||||
if (error != null) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
logger.log({ project_id, doc_id }, 'set doc via http')
|
||||
return res.sendStatus(204)
|
||||
logger.log({ projectId, docId }, 'set doc via http')
|
||||
res.sendStatus(204) // No Content
|
||||
}
|
||||
)
|
||||
}, // No Content
|
||||
|
||||
flushDocIfLoaded(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function (error) {}
|
||||
}
|
||||
const { doc_id } = req.params
|
||||
const { project_id } = req.params
|
||||
logger.log({ project_id, doc_id }, 'flushing doc via http')
|
||||
|
||||
function flushDocIfLoaded(req, res, next) {
|
||||
const docId = req.params.doc_id
|
||||
const projectId = req.params.project_id
|
||||
logger.log({ projectId, docId }, 'flushing doc via http')
|
||||
const timer = new Metrics.Timer('http.flushDoc')
|
||||
return DocumentManager.flushDocIfLoadedWithLock(
|
||||
project_id,
|
||||
doc_id,
|
||||
function (error) {
|
||||
DocumentManager.flushDocIfLoadedWithLock(projectId, docId, (error) => {
|
||||
timer.done()
|
||||
if (error != null) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
logger.log({ project_id, doc_id }, 'flushed doc via http')
|
||||
return res.sendStatus(204)
|
||||
logger.log({ projectId, docId }, 'flushed doc via http')
|
||||
res.sendStatus(204) // No Content
|
||||
})
|
||||
}
|
||||
)
|
||||
}, // No Content
|
||||
|
||||
deleteDoc(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function (error) {}
|
||||
}
|
||||
const { doc_id } = req.params
|
||||
const { project_id } = req.params
|
||||
function deleteDoc(req, res, next) {
|
||||
const docId = req.params.doc_id
|
||||
const projectId = req.params.project_id
|
||||
const ignoreFlushErrors = req.query.ignore_flush_errors === 'true'
|
||||
const timer = new Metrics.Timer('http.deleteDoc')
|
||||
logger.log({ project_id, doc_id }, 'deleting doc via http')
|
||||
return DocumentManager.flushAndDeleteDocWithLock(
|
||||
project_id,
|
||||
doc_id,
|
||||
logger.log({ projectId, docId }, 'deleting doc via http')
|
||||
DocumentManager.flushAndDeleteDocWithLock(
|
||||
projectId,
|
||||
docId,
|
||||
{ ignoreFlushErrors },
|
||||
function (error) {
|
||||
(error) => {
|
||||
timer.done()
|
||||
// There is no harm in flushing project history if the previous call
|
||||
// failed and sometimes it is required
|
||||
HistoryManager.flushProjectChangesAsync(project_id)
|
||||
HistoryManager.flushProjectChangesAsync(projectId)
|
||||
|
||||
if (error != null) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
logger.log({ project_id, doc_id }, 'deleted doc via http')
|
||||
return res.sendStatus(204)
|
||||
logger.log({ projectId, docId }, 'deleted doc via http')
|
||||
res.sendStatus(204) // No Content
|
||||
}
|
||||
)
|
||||
}, // No Content
|
||||
|
||||
flushProject(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function (error) {}
|
||||
}
|
||||
const { project_id } = req.params
|
||||
logger.log({ project_id }, 'flushing project via http')
|
||||
|
||||
function flushProject(req, res, next) {
|
||||
const projectId = req.params.project_id
|
||||
logger.log({ projectId }, 'flushing project via http')
|
||||
const timer = new Metrics.Timer('http.flushProject')
|
||||
return ProjectManager.flushProjectWithLocks(project_id, function (error) {
|
||||
ProjectManager.flushProjectWithLocks(projectId, (error) => {
|
||||
timer.done()
|
||||
if (error != null) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
logger.log({ project_id }, 'flushed project via http')
|
||||
return res.sendStatus(204)
|
||||
logger.log({ projectId }, 'flushed project via http')
|
||||
res.sendStatus(204) // No Content
|
||||
})
|
||||
}, // No Content
|
||||
|
||||
deleteProject(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function (error) {}
|
||||
}
|
||||
const { project_id } = req.params
|
||||
logger.log({ project_id }, 'deleting project via http')
|
||||
|
||||
function deleteProject(req, res, next) {
|
||||
const projectId = req.params.project_id
|
||||
logger.log({ projectId }, 'deleting project via http')
|
||||
const options = {}
|
||||
if (req.query != null ? req.query.background : undefined) {
|
||||
if (req.query.background) {
|
||||
options.background = true
|
||||
} // allow non-urgent flushes to be queued
|
||||
if (req.query != null ? req.query.shutdown : undefined) {
|
||||
if (req.query.shutdown) {
|
||||
options.skip_history_flush = true
|
||||
} // don't flush history when realtime shuts down
|
||||
if (req.query != null ? req.query.background : undefined) {
|
||||
return ProjectManager.queueFlushAndDeleteProject(project_id, function (
|
||||
error
|
||||
) {
|
||||
if (error != null) {
|
||||
if (req.query.background) {
|
||||
ProjectManager.queueFlushAndDeleteProject(projectId, (error) => {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
logger.log({ project_id }, 'queue delete of project via http')
|
||||
return res.sendStatus(204)
|
||||
logger.log({ projectId }, 'queue delete of project via http')
|
||||
res.sendStatus(204)
|
||||
}) // No Content
|
||||
} else {
|
||||
const timer = new Metrics.Timer('http.deleteProject')
|
||||
return ProjectManager.flushAndDeleteProjectWithLocks(
|
||||
project_id,
|
||||
ProjectManager.flushAndDeleteProjectWithLocks(
|
||||
projectId,
|
||||
options,
|
||||
function (error) {
|
||||
(error) => {
|
||||
timer.done()
|
||||
if (error != null) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
logger.log({ project_id }, 'deleted project via http')
|
||||
return res.sendStatus(204)
|
||||
logger.log({ projectId }, 'deleted project via http')
|
||||
res.sendStatus(204) // No Content
|
||||
}
|
||||
)
|
||||
}
|
||||
}, // No Content
|
||||
|
||||
deleteMultipleProjects(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function (error) {}
|
||||
}
|
||||
const project_ids =
|
||||
(req.body != null ? req.body.project_ids : undefined) || []
|
||||
logger.log({ project_ids }, 'deleting multiple projects via http')
|
||||
return async.eachSeries(
|
||||
project_ids,
|
||||
function (project_id, cb) {
|
||||
logger.log({ project_id }, 'queue delete of project via http')
|
||||
return ProjectManager.queueFlushAndDeleteProject(project_id, cb)
|
||||
|
||||
function deleteMultipleProjects(req, res, next) {
|
||||
const projectIds = req.body.project_ids || []
|
||||
logger.log({ projectIds }, 'deleting multiple projects via http')
|
||||
async.eachSeries(
|
||||
projectIds,
|
||||
(projectId, cb) => {
|
||||
logger.log({ projectId }, 'queue delete of project via http')
|
||||
ProjectManager.queueFlushAndDeleteProject(projectId, cb)
|
||||
},
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
(error) => {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
return res.sendStatus(204)
|
||||
res.sendStatus(204) // No Content
|
||||
}
|
||||
)
|
||||
}, // No Content
|
||||
|
||||
acceptChanges(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function (error) {}
|
||||
}
|
||||
const { project_id, doc_id } = req.params
|
||||
let change_ids = req.body != null ? req.body.change_ids : undefined
|
||||
if (change_ids == null) {
|
||||
change_ids = [req.params.change_id]
|
||||
|
||||
function acceptChanges(req, res, next) {
|
||||
const { project_id: projectId, doc_id: docId } = req.params
|
||||
let changeIds = req.body.change_ids
|
||||
if (changeIds == null) {
|
||||
changeIds = [req.params.change_id]
|
||||
}
|
||||
logger.log(
|
||||
{ project_id, doc_id },
|
||||
`accepting ${change_ids.length} changes via http`
|
||||
{ projectId, docId },
|
||||
`accepting ${changeIds.length} changes via http`
|
||||
)
|
||||
const timer = new Metrics.Timer('http.acceptChanges')
|
||||
return DocumentManager.acceptChangesWithLock(
|
||||
project_id,
|
||||
doc_id,
|
||||
change_ids,
|
||||
function (error) {
|
||||
DocumentManager.acceptChangesWithLock(
|
||||
projectId,
|
||||
docId,
|
||||
changeIds,
|
||||
(error) => {
|
||||
timer.done()
|
||||
if (error != null) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
logger.log(
|
||||
{ project_id, doc_id },
|
||||
`accepted ${change_ids.length} changes via http`
|
||||
{ projectId, docId },
|
||||
`accepted ${changeIds.length} changes via http`
|
||||
)
|
||||
return res.sendStatus(204)
|
||||
res.sendStatus(204) // No Content
|
||||
}
|
||||
)
|
||||
}, // No Content
|
||||
}
|
||||
|
||||
deleteComment(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function (error) {}
|
||||
}
|
||||
const { project_id, doc_id, comment_id } = req.params
|
||||
logger.log({ project_id, doc_id, comment_id }, 'deleting comment via http')
|
||||
function deleteComment(req, res, next) {
|
||||
const {
|
||||
project_id: projectId,
|
||||
doc_id: docId,
|
||||
comment_id: commentId
|
||||
} = req.params
|
||||
logger.log({ projectId, docId, commentId }, 'deleting comment via http')
|
||||
const timer = new Metrics.Timer('http.deleteComment')
|
||||
return DocumentManager.deleteCommentWithLock(
|
||||
project_id,
|
||||
doc_id,
|
||||
comment_id,
|
||||
function (error) {
|
||||
DocumentManager.deleteCommentWithLock(
|
||||
projectId,
|
||||
docId,
|
||||
commentId,
|
||||
(error) => {
|
||||
timer.done()
|
||||
if (error != null) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
logger.log(
|
||||
{ project_id, doc_id, comment_id },
|
||||
'deleted comment via http'
|
||||
)
|
||||
return res.sendStatus(204)
|
||||
logger.log({ projectId, docId, commentId }, 'deleted comment via http')
|
||||
res.sendStatus(204) // No Content
|
||||
}
|
||||
)
|
||||
}, // No Content
|
||||
}
|
||||
|
||||
updateProject(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function (error) {}
|
||||
}
|
||||
function updateProject(req, res, next) {
|
||||
const timer = new Metrics.Timer('http.updateProject')
|
||||
const { project_id } = req.params
|
||||
const projectId = req.params.project_id
|
||||
const {
|
||||
projectHistoryId,
|
||||
userId,
|
||||
|
@ -378,104 +333,82 @@ module.exports = HttpController = {
|
|||
version
|
||||
} = req.body
|
||||
logger.log(
|
||||
{ project_id, docUpdates, fileUpdates, version },
|
||||
{ projectId, docUpdates, fileUpdates, version },
|
||||
'updating project via http'
|
||||
)
|
||||
|
||||
return ProjectManager.updateProjectWithLocks(
|
||||
project_id,
|
||||
ProjectManager.updateProjectWithLocks(
|
||||
projectId,
|
||||
projectHistoryId,
|
||||
userId,
|
||||
docUpdates,
|
||||
fileUpdates,
|
||||
version,
|
||||
function (error) {
|
||||
(error) => {
|
||||
timer.done()
|
||||
if (error != null) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
logger.log({ project_id }, 'updated project via http')
|
||||
return res.sendStatus(204)
|
||||
logger.log({ projectId }, 'updated project via http')
|
||||
res.sendStatus(204) // No Content
|
||||
}
|
||||
)
|
||||
}, // No Content
|
||||
|
||||
resyncProjectHistory(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function (error) {}
|
||||
}
|
||||
const { project_id } = req.params
|
||||
|
||||
function resyncProjectHistory(req, res, next) {
|
||||
const projectId = req.params.project_id
|
||||
const { projectHistoryId, docs, files } = req.body
|
||||
|
||||
logger.log(
|
||||
{ project_id, docs, files },
|
||||
{ projectId, docs, files },
|
||||
'queuing project history resync via http'
|
||||
)
|
||||
return HistoryManager.resyncProjectHistory(
|
||||
project_id,
|
||||
HistoryManager.resyncProjectHistory(
|
||||
projectId,
|
||||
projectHistoryId,
|
||||
docs,
|
||||
files,
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
(error) => {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
logger.log({ project_id }, 'queued project history resync via http')
|
||||
return res.sendStatus(204)
|
||||
logger.log({ projectId }, 'queued project history resync via http')
|
||||
res.sendStatus(204)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
flushAllProjects(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function (error) {}
|
||||
}
|
||||
|
||||
function flushAllProjects(req, res, next) {
|
||||
res.setTimeout(5 * 60 * 1000)
|
||||
const options = {
|
||||
limit: req.query.limit || 1000,
|
||||
concurrency: req.query.concurrency || 5,
|
||||
dryRun: req.query.dryRun || false
|
||||
}
|
||||
return ProjectFlusher.flushAllProjects(options, function (
|
||||
err,
|
||||
project_ids
|
||||
) {
|
||||
if (err != null) {
|
||||
ProjectFlusher.flushAllProjects(options, (err, projectIds) => {
|
||||
if (err) {
|
||||
logger.err({ err }, 'error bulk flushing projects')
|
||||
return res.sendStatus(500)
|
||||
res.sendStatus(500)
|
||||
} else {
|
||||
return res.send(project_ids)
|
||||
res.send(projectIds)
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
flushQueuedProjects(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function (error) {}
|
||||
}
|
||||
|
||||
function flushQueuedProjects(req, res, next) {
|
||||
res.setTimeout(10 * 60 * 1000)
|
||||
const options = {
|
||||
limit: req.query.limit || 1000,
|
||||
timeout: 5 * 60 * 1000,
|
||||
min_delete_age: req.query.min_delete_age || 5 * 60 * 1000
|
||||
}
|
||||
return DeleteQueueManager.flushAndDeleteOldProjects(options, function (
|
||||
err,
|
||||
flushed
|
||||
) {
|
||||
if (err != null) {
|
||||
DeleteQueueManager.flushAndDeleteOldProjects(options, (err, flushed) => {
|
||||
if (err) {
|
||||
logger.err({ err }, 'error flushing old projects')
|
||||
return res.sendStatus(500)
|
||||
res.sendStatus(500)
|
||||
} else {
|
||||
logger.log({ flushed }, 'flush of queued projects completed')
|
||||
return res.send({ flushed })
|
||||
res.send({ flushed })
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return typeof value !== 'undefined' && value !== null
|
||||
? transform(value)
|
||||
: undefined
|
||||
}
|
||||
|
|
|
@ -168,6 +168,8 @@ module.exports = {
|
|||
},
|
||||
|
||||
max_doc_length: 2 * 1024 * 1024, // 2mb
|
||||
maxJsonRequestSize:
|
||||
parseInt(process.env.MAX_JSON_REQUEST_SIZE, 10) || 8 * 1024 * 1024,
|
||||
|
||||
dispatcherCount: process.env.DISPATCHER_COUNT,
|
||||
|
||||
|
|
|
@ -1,23 +1,9 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-return-assign,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const chai = require('chai')
|
||||
chai.should()
|
||||
const { expect } = require('chai')
|
||||
const Settings = require('settings-sharelatex')
|
||||
const rclient_du = require('redis-sharelatex').createClient(
|
||||
const docUpdaterRedis = require('redis-sharelatex').createClient(
|
||||
Settings.redis.documentupdater
|
||||
)
|
||||
const Keys = Settings.redis.documentupdater.key_schema
|
||||
|
@ -50,39 +36,37 @@ describe('Setting a document', function () {
|
|||
sinon.spy(MockTrackChangesApi, 'flushDoc')
|
||||
sinon.spy(MockProjectHistoryApi, 'flushProject')
|
||||
sinon.spy(MockWebApi, 'setDocument')
|
||||
return DocUpdaterApp.ensureRunning(done)
|
||||
DocUpdaterApp.ensureRunning(done)
|
||||
})
|
||||
|
||||
after(function () {
|
||||
MockTrackChangesApi.flushDoc.restore()
|
||||
MockProjectHistoryApi.flushProject.restore()
|
||||
return MockWebApi.setDocument.restore()
|
||||
MockWebApi.setDocument.restore()
|
||||
})
|
||||
|
||||
describe('when the updated doc exists in the doc updater', function () {
|
||||
before(function (done) {
|
||||
;[this.project_id, this.doc_id] = Array.from([
|
||||
DocUpdaterClient.randomId(),
|
||||
DocUpdaterClient.randomId()
|
||||
])
|
||||
this.project_id = DocUpdaterClient.randomId()
|
||||
this.doc_id = DocUpdaterClient.randomId()
|
||||
MockWebApi.insertDoc(this.project_id, this.doc_id, {
|
||||
lines: this.lines,
|
||||
version: this.version
|
||||
})
|
||||
DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, (error) => {
|
||||
if (error != null) {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
return DocUpdaterClient.sendUpdate(
|
||||
DocUpdaterClient.sendUpdate(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.update,
|
||||
(error) => {
|
||||
if (error != null) {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
return setTimeout(() => {
|
||||
return DocUpdaterClient.setDocLines(
|
||||
setTimeout(() => {
|
||||
DocUpdaterClient.setDocLines(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newLines,
|
||||
|
@ -90,29 +74,31 @@ describe('Setting a document', function () {
|
|||
this.user_id,
|
||||
false,
|
||||
(error, res, body) => {
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
this.statusCode = res.statusCode
|
||||
return done()
|
||||
done()
|
||||
}
|
||||
)
|
||||
}, 200)
|
||||
}
|
||||
)
|
||||
})
|
||||
return null
|
||||
})
|
||||
|
||||
after(function () {
|
||||
MockTrackChangesApi.flushDoc.reset()
|
||||
MockProjectHistoryApi.flushProject.reset()
|
||||
return MockWebApi.setDocument.reset()
|
||||
MockWebApi.setDocument.reset()
|
||||
})
|
||||
|
||||
it('should return a 204 status code', function () {
|
||||
return this.statusCode.should.equal(204)
|
||||
this.statusCode.should.equal(204)
|
||||
})
|
||||
|
||||
it('should send the updated doc lines and version to the web api', function () {
|
||||
return MockWebApi.setDocument
|
||||
MockWebApi.setDocument
|
||||
.calledWith(this.project_id, this.doc_id, this.newLines)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
@ -122,11 +108,13 @@ describe('Setting a document', function () {
|
|||
this.project_id,
|
||||
this.doc_id,
|
||||
(error, res, doc) => {
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
doc.lines.should.deep.equal(this.newLines)
|
||||
return done()
|
||||
done()
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
|
||||
it('should bump the version in the doc updater', function (done) {
|
||||
|
@ -134,31 +122,33 @@ describe('Setting a document', function () {
|
|||
this.project_id,
|
||||
this.doc_id,
|
||||
(error, res, doc) => {
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
doc.version.should.equal(this.version + 2)
|
||||
return done()
|
||||
done()
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
|
||||
return it('should leave the document in redis', function (done) {
|
||||
rclient_du.get(Keys.docLines({ doc_id: this.doc_id }), (error, lines) => {
|
||||
if (error != null) {
|
||||
it('should leave the document in redis', function (done) {
|
||||
docUpdaterRedis.get(
|
||||
Keys.docLines({ doc_id: this.doc_id }),
|
||||
(error, lines) => {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
expect(JSON.parse(lines)).to.deep.equal(this.newLines)
|
||||
return done()
|
||||
})
|
||||
return null
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the updated doc does not exist in the doc updater', function () {
|
||||
before(function (done) {
|
||||
;[this.project_id, this.doc_id] = Array.from([
|
||||
DocUpdaterClient.randomId(),
|
||||
DocUpdaterClient.randomId()
|
||||
])
|
||||
this.project_id = DocUpdaterClient.randomId()
|
||||
this.doc_id = DocUpdaterClient.randomId()
|
||||
MockWebApi.insertDoc(this.project_id, this.doc_id, {
|
||||
lines: this.lines,
|
||||
version: this.version
|
||||
|
@ -171,68 +161,79 @@ describe('Setting a document', function () {
|
|||
this.user_id,
|
||||
false,
|
||||
(error, res, body) => {
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
this.statusCode = res.statusCode
|
||||
return setTimeout(done, 200)
|
||||
setTimeout(done, 200)
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
|
||||
after(function () {
|
||||
MockTrackChangesApi.flushDoc.reset()
|
||||
MockProjectHistoryApi.flushProject.reset()
|
||||
return MockWebApi.setDocument.reset()
|
||||
MockWebApi.setDocument.reset()
|
||||
})
|
||||
|
||||
it('should return a 204 status code', function () {
|
||||
return this.statusCode.should.equal(204)
|
||||
this.statusCode.should.equal(204)
|
||||
})
|
||||
|
||||
it('should send the updated doc lines to the web api', function () {
|
||||
return MockWebApi.setDocument
|
||||
MockWebApi.setDocument
|
||||
.calledWith(this.project_id, this.doc_id, this.newLines)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should flush track changes', function () {
|
||||
return MockTrackChangesApi.flushDoc
|
||||
.calledWith(this.doc_id)
|
||||
.should.equal(true)
|
||||
MockTrackChangesApi.flushDoc.calledWith(this.doc_id).should.equal(true)
|
||||
})
|
||||
|
||||
it('should flush project history', function () {
|
||||
return MockProjectHistoryApi.flushProject
|
||||
MockProjectHistoryApi.flushProject
|
||||
.calledWith(this.project_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should remove the document from redis', function (done) {
|
||||
rclient_du.get(Keys.docLines({ doc_id: this.doc_id }), (error, lines) => {
|
||||
if (error != null) {
|
||||
it('should remove the document from redis', function (done) {
|
||||
docUpdaterRedis.get(
|
||||
Keys.docLines({ doc_id: this.doc_id }),
|
||||
(error, lines) => {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
expect(lines).to.not.exist
|
||||
return done()
|
||||
})
|
||||
return null
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the updated doc is too large for the body parser', function () {
|
||||
const DOC_TOO_LARGE_TEST_CASES = [
|
||||
{
|
||||
desc: 'when the updated doc is too large for the body parser',
|
||||
size: Settings.maxJsonRequestSize,
|
||||
expectedStatusCode: 413
|
||||
},
|
||||
{
|
||||
desc: 'when the updated doc is larger than the HTTP controller limit',
|
||||
size: Settings.max_doc_length,
|
||||
expectedStatusCode: 406
|
||||
}
|
||||
]
|
||||
|
||||
DOC_TOO_LARGE_TEST_CASES.forEach((testCase) => {
|
||||
describe(testCase.desc, function () {
|
||||
before(function (done) {
|
||||
;[this.project_id, this.doc_id] = Array.from([
|
||||
DocUpdaterClient.randomId(),
|
||||
DocUpdaterClient.randomId()
|
||||
])
|
||||
this.project_id = DocUpdaterClient.randomId()
|
||||
this.doc_id = DocUpdaterClient.randomId()
|
||||
MockWebApi.insertDoc(this.project_id, this.doc_id, {
|
||||
lines: this.lines,
|
||||
version: this.version
|
||||
})
|
||||
this.newLines = []
|
||||
while (
|
||||
JSON.stringify(this.newLines).length <
|
||||
Settings.max_doc_length + 64 * 1024
|
||||
) {
|
||||
while (JSON.stringify(this.newLines).length <= testCase.size) {
|
||||
this.newLines.push('(a long line of text)'.repeat(10000))
|
||||
}
|
||||
DocUpdaterClient.setDocLines(
|
||||
|
@ -243,42 +244,43 @@ describe('Setting a document', function () {
|
|||
this.user_id,
|
||||
false,
|
||||
(error, res, body) => {
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
this.statusCode = res.statusCode
|
||||
return setTimeout(done, 200)
|
||||
setTimeout(done, 200)
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
|
||||
after(function () {
|
||||
MockTrackChangesApi.flushDoc.reset()
|
||||
MockProjectHistoryApi.flushProject.reset()
|
||||
return MockWebApi.setDocument.reset()
|
||||
MockWebApi.setDocument.reset()
|
||||
})
|
||||
|
||||
it('should return a 413 status code', function () {
|
||||
return this.statusCode.should.equal(413)
|
||||
it(`should return a ${testCase.expectedStatusCode} status code`, function () {
|
||||
this.statusCode.should.equal(testCase.expectedStatusCode)
|
||||
})
|
||||
|
||||
it('should not send the updated doc lines to the web api', function () {
|
||||
return MockWebApi.setDocument.called.should.equal(false)
|
||||
MockWebApi.setDocument.called.should.equal(false)
|
||||
})
|
||||
|
||||
it('should not flush track changes', function () {
|
||||
return MockTrackChangesApi.flushDoc.called.should.equal(false)
|
||||
MockTrackChangesApi.flushDoc.called.should.equal(false)
|
||||
})
|
||||
|
||||
return it('should not flush project history', function () {
|
||||
return MockProjectHistoryApi.flushProject.called.should.equal(false)
|
||||
it('should not flush project history', function () {
|
||||
MockProjectHistoryApi.flushProject.called.should.equal(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the updated doc is large but under the bodyParser and HTTPController size limit', function () {
|
||||
before(function (done) {
|
||||
;[this.project_id, this.doc_id] = Array.from([
|
||||
DocUpdaterClient.randomId(),
|
||||
DocUpdaterClient.randomId()
|
||||
])
|
||||
this.project_id = DocUpdaterClient.randomId()
|
||||
this.doc_id = DocUpdaterClient.randomId()
|
||||
MockWebApi.insertDoc(this.project_id, this.doc_id, {
|
||||
lines: this.lines,
|
||||
version: this.version
|
||||
|
@ -298,35 +300,37 @@ describe('Setting a document', function () {
|
|||
this.user_id,
|
||||
false,
|
||||
(error, res, body) => {
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
this.statusCode = res.statusCode
|
||||
return setTimeout(done, 200)
|
||||
setTimeout(done, 200)
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
|
||||
after(function () {
|
||||
MockTrackChangesApi.flushDoc.reset()
|
||||
MockProjectHistoryApi.flushProject.reset()
|
||||
return MockWebApi.setDocument.reset()
|
||||
MockWebApi.setDocument.reset()
|
||||
})
|
||||
|
||||
it('should return a 204 status code', function () {
|
||||
return this.statusCode.should.equal(204)
|
||||
this.statusCode.should.equal(204)
|
||||
})
|
||||
|
||||
return it('should send the updated doc lines to the web api', function () {
|
||||
return MockWebApi.setDocument
|
||||
it('should send the updated doc lines to the web api', function () {
|
||||
MockWebApi.setDocument
|
||||
.calledWith(this.project_id, this.doc_id, this.newLines)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('with track changes', function () {
|
||||
describe('with track changes', function () {
|
||||
before(function () {
|
||||
this.lines = ['one', 'one and a half', 'two', 'three']
|
||||
this.id_seed = '587357bd35e64f6157'
|
||||
return (this.update = {
|
||||
this.update = {
|
||||
doc: this.doc_id,
|
||||
op: [
|
||||
{
|
||||
|
@ -339,33 +343,31 @@ describe('Setting a document', function () {
|
|||
user_id: this.user_id
|
||||
},
|
||||
v: this.version
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
describe('with the undo flag', function () {
|
||||
before(function (done) {
|
||||
;[this.project_id, this.doc_id] = Array.from([
|
||||
DocUpdaterClient.randomId(),
|
||||
DocUpdaterClient.randomId()
|
||||
])
|
||||
this.project_id = DocUpdaterClient.randomId()
|
||||
this.doc_id = DocUpdaterClient.randomId()
|
||||
MockWebApi.insertDoc(this.project_id, this.doc_id, {
|
||||
lines: this.lines,
|
||||
version: this.version
|
||||
})
|
||||
DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, (error) => {
|
||||
if (error != null) {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
return DocUpdaterClient.sendUpdate(
|
||||
DocUpdaterClient.sendUpdate(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.update,
|
||||
(error) => {
|
||||
if (error != null) {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
// Go back to old lines, with undo flag
|
||||
return DocUpdaterClient.setDocLines(
|
||||
DocUpdaterClient.setDocLines(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.lines,
|
||||
|
@ -373,63 +375,62 @@ describe('Setting a document', function () {
|
|||
this.user_id,
|
||||
true,
|
||||
(error, res, body) => {
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
this.statusCode = res.statusCode
|
||||
return setTimeout(done, 200)
|
||||
setTimeout(done, 200)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
return null
|
||||
})
|
||||
|
||||
after(function () {
|
||||
MockTrackChangesApi.flushDoc.reset()
|
||||
MockProjectHistoryApi.flushProject.reset()
|
||||
return MockWebApi.setDocument.reset()
|
||||
MockWebApi.setDocument.reset()
|
||||
})
|
||||
|
||||
return it('should undo the tracked changes', function (done) {
|
||||
it('should undo the tracked changes', function (done) {
|
||||
DocUpdaterClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
(error, res, data) => {
|
||||
if (error != null) {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
const { ranges } = data
|
||||
expect(ranges.changes).to.be.undefined
|
||||
return done()
|
||||
done()
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
})
|
||||
|
||||
return describe('without the undo flag', function () {
|
||||
describe('without the undo flag', function () {
|
||||
before(function (done) {
|
||||
;[this.project_id, this.doc_id] = Array.from([
|
||||
DocUpdaterClient.randomId(),
|
||||
DocUpdaterClient.randomId()
|
||||
])
|
||||
this.project_id = DocUpdaterClient.randomId()
|
||||
this.doc_id = DocUpdaterClient.randomId()
|
||||
MockWebApi.insertDoc(this.project_id, this.doc_id, {
|
||||
lines: this.lines,
|
||||
version: this.version
|
||||
})
|
||||
DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, (error) => {
|
||||
if (error != null) {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
return DocUpdaterClient.sendUpdate(
|
||||
DocUpdaterClient.sendUpdate(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.update,
|
||||
(error) => {
|
||||
if (error != null) {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
// Go back to old lines, without undo flag
|
||||
return DocUpdaterClient.setDocLines(
|
||||
DocUpdaterClient.setDocLines(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.lines,
|
||||
|
@ -437,36 +438,37 @@ describe('Setting a document', function () {
|
|||
this.user_id,
|
||||
false,
|
||||
(error, res, body) => {
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
this.statusCode = res.statusCode
|
||||
return setTimeout(done, 200)
|
||||
setTimeout(done, 200)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
return null
|
||||
})
|
||||
|
||||
after(function () {
|
||||
MockTrackChangesApi.flushDoc.reset()
|
||||
MockProjectHistoryApi.flushProject.reset()
|
||||
return MockWebApi.setDocument.reset()
|
||||
MockWebApi.setDocument.reset()
|
||||
})
|
||||
|
||||
return it('should not undo the tracked changes', function (done) {
|
||||
it('should not undo the tracked changes', function (done) {
|
||||
DocUpdaterClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
(error, res, data) => {
|
||||
if (error != null) {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
const { ranges } = data
|
||||
expect(ranges.changes.length).to.equal(1)
|
||||
return done()
|
||||
done()
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
File diff suppressed because it is too large
Load diff
Loading…
Reference in a new issue