mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
Merge pull request #175 from overleaf/jpa-double-down-doc-size-limit
[misc] double down on enforcing doc size limits
This commit is contained in:
commit
f97117a6ba
3 changed files with 157 additions and 0 deletions
|
@ -72,6 +72,13 @@ module.exports = RedisManager = {
|
|||
logger.error({ err: error, doc_id, docLines }, error.message)
|
||||
return callback(error)
|
||||
}
|
||||
// Do a cheap size check on the serialized blob.
|
||||
if (docLines.length > Settings.max_doc_length) {
|
||||
const docSize = docLines.length
|
||||
const err = new Error('blocking doc insert into redis: doc is too large')
|
||||
logger.error({ project_id, doc_id, err, docSize }, err.message)
|
||||
return callback(err)
|
||||
}
|
||||
const docHash = RedisManager._computeHash(docLines)
|
||||
// record bytes sent to redis
|
||||
metrics.summary('redis.docLines', docLines.length, { status: 'set' })
|
||||
|
@ -461,6 +468,13 @@ module.exports = RedisManager = {
|
|||
logger.error({ err: error, doc_id, newDocLines }, error.message)
|
||||
return callback(error)
|
||||
}
|
||||
// Do a cheap size check on the serialized blob.
|
||||
if (newDocLines.length > Settings.max_doc_length) {
|
||||
const err = new Error('blocking doc update: doc is too large')
|
||||
const docSize = newDocLines.length
|
||||
logger.error({ project_id, doc_id, err, docSize }, err.message)
|
||||
return callback(err)
|
||||
}
|
||||
const newHash = RedisManager._computeHash(newDocLines)
|
||||
|
||||
const opVersions = appliedOps.map((op) => (op != null ? op.v : undefined))
|
||||
|
|
|
@ -87,6 +87,20 @@ module.exports = ShareJsUpdateManager = {
|
|||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
const docSizeAfter = data.snapshot.length
|
||||
if (docSizeAfter > Settings.max_doc_length) {
|
||||
const docSizeBefore = lines.join('\n').length
|
||||
const err = new Error(
|
||||
'blocking persistence of ShareJs update: doc size exceeds limits'
|
||||
)
|
||||
logger.error(
|
||||
{ project_id, doc_id, err, docSizeBefore, docSizeAfter },
|
||||
err.message
|
||||
)
|
||||
metrics.inc('sharejs.other-error')
|
||||
const publicError = 'Update takes doc over max doc size'
|
||||
return callback(publicError)
|
||||
}
|
||||
// only check hash when present and no other updates have been applied
|
||||
if (update.hash != null && incomingUpdateVersion === version) {
|
||||
const ourHash = ShareJsUpdateManager._computeHash(data.snapshot)
|
||||
|
|
129
services/document-updater/test/acceptance/js/SizeCheckTests.js
Normal file
129
services/document-updater/test/acceptance/js/SizeCheckTests.js
Normal file
|
@ -0,0 +1,129 @@
|
|||
const { expect } = require('chai')
|
||||
const Settings = require('settings-sharelatex')
|
||||
|
||||
const MockWebApi = require('./helpers/MockWebApi')
|
||||
const DocUpdaterClient = require('./helpers/DocUpdaterClient')
|
||||
const DocUpdaterApp = require('./helpers/DocUpdaterApp')
|
||||
|
||||
describe('SizeChecks', function () {
|
||||
before(function (done) {
|
||||
DocUpdaterApp.ensureRunning(done)
|
||||
})
|
||||
beforeEach(function () {
|
||||
this.version = 0
|
||||
this.update = {
|
||||
doc: this.doc_id,
|
||||
op: [
|
||||
{
|
||||
i: 'insert some more lines that will bring it above the limit\n',
|
||||
p: 42
|
||||
}
|
||||
],
|
||||
v: this.version
|
||||
}
|
||||
this.project_id = DocUpdaterClient.randomId()
|
||||
this.doc_id = DocUpdaterClient.randomId()
|
||||
})
|
||||
|
||||
describe('when a doc is above the doc size limit already', function () {
|
||||
beforeEach(function () {
|
||||
this.lines = ['0123456789'.repeat(Settings.max_doc_length / 10 + 1)]
|
||||
MockWebApi.insertDoc(this.project_id, this.doc_id, {
|
||||
lines: this.lines,
|
||||
v: this.version
|
||||
})
|
||||
})
|
||||
|
||||
it('should error when fetching the doc', function (done) {
|
||||
DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res) => {
|
||||
if (error) return done(error)
|
||||
expect(res.statusCode).to.equal(500)
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
describe('when trying to update', function () {
|
||||
beforeEach(function (done) {
|
||||
const update = {
|
||||
doc: this.doc_id,
|
||||
op: this.update.op,
|
||||
v: this.version
|
||||
}
|
||||
DocUpdaterClient.sendUpdate(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
update,
|
||||
(error) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
setTimeout(done, 200)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should still error when fetching the doc', function (done) {
|
||||
DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res) => {
|
||||
if (error) return done(error)
|
||||
expect(res.statusCode).to.equal(500)
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when a doc is just below the doc size limit', function () {
|
||||
beforeEach(function () {
|
||||
this.lines = ['0123456789'.repeat(Settings.max_doc_length / 10 - 1)]
|
||||
MockWebApi.insertDoc(this.project_id, this.doc_id, {
|
||||
lines: this.lines,
|
||||
v: this.version
|
||||
})
|
||||
})
|
||||
|
||||
it('should be able to fetch the doc', function (done) {
|
||||
DocUpdaterClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
expect(doc.lines).to.deep.equal(this.lines)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
describe('when trying to update', function () {
|
||||
beforeEach(function (done) {
|
||||
const update = {
|
||||
doc: this.doc_id,
|
||||
op: this.update.op,
|
||||
v: this.version
|
||||
}
|
||||
DocUpdaterClient.sendUpdate(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
update,
|
||||
(error) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
setTimeout(done, 200)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should not update the doc', function (done) {
|
||||
DocUpdaterClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
expect(doc.lines).to.deep.equal(this.lines)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
Loading…
Reference in a new issue