diff --git a/services/document-updater/app/js/RedisManager.js b/services/document-updater/app/js/RedisManager.js index 73d85f60d5..418e3ec6d4 100644 --- a/services/document-updater/app/js/RedisManager.js +++ b/services/document-updater/app/js/RedisManager.js @@ -72,6 +72,13 @@ module.exports = RedisManager = { logger.error({ err: error, doc_id, docLines }, error.message) return callback(error) } + // Do a cheap size check on the serialized blob. + if (docLines.length > Settings.max_doc_length) { + const docSize = docLines.length + const err = new Error('blocking doc insert into redis: doc is too large') + logger.error({ project_id, doc_id, err, docSize }, err.message) + return callback(err) + } const docHash = RedisManager._computeHash(docLines) // record bytes sent to redis metrics.summary('redis.docLines', docLines.length, { status: 'set' }) @@ -461,6 +468,13 @@ module.exports = RedisManager = { logger.error({ err: error, doc_id, newDocLines }, error.message) return callback(error) } + // Do a cheap size check on the serialized blob. + if (newDocLines.length > Settings.max_doc_length) { + const err = new Error('blocking doc update: doc is too large') + const docSize = newDocLines.length + logger.error({ project_id, doc_id, err, docSize }, err.message) + return callback(err) + } const newHash = RedisManager._computeHash(newDocLines) const opVersions = appliedOps.map((op) => (op != null ? op.v : undefined)) diff --git a/services/document-updater/app/js/ShareJsUpdateManager.js b/services/document-updater/app/js/ShareJsUpdateManager.js index 607ae2d9fa..8ae91df32c 100644 --- a/services/document-updater/app/js/ShareJsUpdateManager.js +++ b/services/document-updater/app/js/ShareJsUpdateManager.js @@ -87,6 +87,20 @@ module.exports = ShareJsUpdateManager = { if (error != null) { return callback(error) } + const docSizeAfter = data.snapshot.length + if (docSizeAfter > Settings.max_doc_length) { + const docSizeBefore = lines.join('\n').length + const err = new Error( + 'blocking persistence of ShareJs update: doc size exceeds limits' + ) + logger.error( + { project_id, doc_id, err, docSizeBefore, docSizeAfter }, + err.message + ) + metrics.inc('sharejs.other-error') + const publicError = 'Update takes doc over max doc size' + return callback(publicError) + } // only check hash when present and no other updates have been applied if (update.hash != null && incomingUpdateVersion === version) { const ourHash = ShareJsUpdateManager._computeHash(data.snapshot) diff --git a/services/document-updater/test/acceptance/js/SizeCheckTests.js b/services/document-updater/test/acceptance/js/SizeCheckTests.js new file mode 100644 index 0000000000..288cc485e1 --- /dev/null +++ b/services/document-updater/test/acceptance/js/SizeCheckTests.js @@ -0,0 +1,129 @@ +const { expect } = require('chai') +const Settings = require('settings-sharelatex') + +const MockWebApi = require('./helpers/MockWebApi') +const DocUpdaterClient = require('./helpers/DocUpdaterClient') +const DocUpdaterApp = require('./helpers/DocUpdaterApp') + +describe('SizeChecks', function () { + before(function (done) { + DocUpdaterApp.ensureRunning(done) + }) + beforeEach(function () { + this.version = 0 + this.update = { + doc: this.doc_id, + op: [ + { + i: 'insert some more lines that will bring it above the limit\n', + p: 42 + } + ], + v: this.version + } + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() + }) + + describe('when a doc is above the doc size limit already', function () { + beforeEach(function () { + this.lines = ['0123456789'.repeat(Settings.max_doc_length / 10 + 1)] + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + v: this.version + }) + }) + + it('should error when fetching the doc', function (done) { + DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res) => { + if (error) return done(error) + expect(res.statusCode).to.equal(500) + done() + }) + }) + + describe('when trying to update', function () { + beforeEach(function (done) { + const update = { + doc: this.doc_id, + op: this.update.op, + v: this.version + } + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + update, + (error) => { + if (error != null) { + throw error + } + setTimeout(done, 200) + } + ) + }) + + it('should still error when fetching the doc', function (done) { + DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res) => { + if (error) return done(error) + expect(res.statusCode).to.equal(500) + done() + }) + }) + }) + }) + + describe('when a doc is just below the doc size limit', function () { + beforeEach(function () { + this.lines = ['0123456789'.repeat(Settings.max_doc_length / 10 - 1)] + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + v: this.version + }) + }) + + it('should be able to fetch the doc', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + if (error) return done(error) + expect(doc.lines).to.deep.equal(this.lines) + done() + } + ) + }) + + describe('when trying to update', function () { + beforeEach(function (done) { + const update = { + doc: this.doc_id, + op: this.update.op, + v: this.version + } + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + update, + (error) => { + if (error != null) { + throw error + } + setTimeout(done, 200) + } + ) + }) + + it('should not update the doc', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + if (error) return done(error) + expect(doc.lines).to.deep.equal(this.lines) + done() + } + ) + }) + }) + }) +})