Add in limit on all JSON parsing

This commit is contained in:
James Allen 2015-12-01 11:05:49 +00:00
parent 92d18d7e2e
commit 830d676f4f
7 changed files with 89 additions and 14 deletions

View file

@ -2,6 +2,7 @@ logger = require "logger-sharelatex"
settings = require 'settings-sharelatex'
redis = require("redis-sharelatex")
rclient = redis.createClient(settings.redis.web)
SafeJsonParse = require "./SafeJsonParse"
MESSAGE_SIZE_LOG_LIMIT = 1024 * 1024 # 1Mb
@ -15,9 +16,10 @@ module.exports = DocumentUpdaterController =
DocumentUpdaterController._processMessageFromDocumentUpdater(io, channel, message)
_processMessageFromDocumentUpdater: (io, channel, message) ->
if message.length > MESSAGE_SIZE_LOG_LIMIT
logger.log {length: message.length, head: message.slice(0,200)}, "large message from doc updater"
message = JSON.parse message
SafeJsonParse.parse message, (error, message) ->
if error?
logger.error {err: error, channel}, "error parsing JSON"
return
if message.op?
DocumentUpdaterController._applyUpdateFromDocumentUpdater(io, message.doc_id, message.op)
else if message.error?

View file

@ -0,0 +1,13 @@
Settings = require "settings-sharelatex"
logger = require "logger-sharelatex"
module.exports =
parse: (data, callback = (error, parsed) ->) ->
if data.length > (Settings.max_doc_length or 2 * 1024 * 1024)
logger.error {head: data.slice(0,1024)}, "data too large to parse"
return callback new Error("data too large to parse")
try
parsed = JSON.parse(data)
catch e
return callback e
callback null, parsed

View file

@ -1,6 +1,7 @@
Settings = require 'settings-sharelatex'
logger = require 'logger-sharelatex'
redis = require("redis-sharelatex")
SafeJsonParse = require "./SafeJsonParse"
rclientPub = redis.createClient(Settings.redis.web)
rclientSub = redis.createClient(Settings.redis.web)
@ -28,7 +29,10 @@ module.exports = WebsocketLoadBalancer =
WebsocketLoadBalancer._processEditorEvent io, channel, message
_processEditorEvent: (io, channel, message) ->
message = JSON.parse(message)
SafeJsonParse.parse message, (error, message) ->
if error?
logger.error {err: error, channel}, "error parsing JSON"
return
if message.room_id == "all"
io.sockets.emit(message.message, message.payload...)
else if message.room_id?

View file

@ -26,3 +26,5 @@ module.exports =
sessionSecret: "secret-please-change"
cookieName:"sharelatex.sid"
max_doc_length: 2 * 1024 * 1024 # 2mb

View file

@ -17,6 +17,8 @@ describe "DocumentUpdaterController", ->
"redis-sharelatex" :
createClient: ()=>
@rclient = {auth:->}
"./SafeJsonParse": @SafeJsonParse =
parse: (data, cb) => cb null, JSON.parse(data)
describe "listenForUpdatesFromDocumentUpdater", ->
beforeEach ->
@ -31,6 +33,14 @@ describe "DocumentUpdaterController", ->
@rclient.on.calledWith("message").should.equal true
describe "_processMessageFromDocumentUpdater", ->
describe "with bad JSON", ->
beforeEach ->
@SafeJsonParse.parse = sinon.stub().callsArgWith 1, new Error("oops")
@EditorUpdatesController._processMessageFromDocumentUpdater @io, "applied-ops", "blah"
it "should log an error", ->
@logger.error.called.should.equal true
describe "with update", ->
beforeEach ->
@message =

View file

@ -0,0 +1,34 @@
require('chai').should()
expect = require("chai").expect
SandboxedModule = require('sandboxed-module')
modulePath = '../../../app/js/SafeJsonParse'
sinon = require("sinon")
describe 'SafeJsonParse', ->
beforeEach ->
@SafeJsonParse = SandboxedModule.require modulePath, requires:
"settings-sharelatex": @Settings = {
max_doc_length: 16 * 1024
}
"logger-sharelatex": @logger = {error: sinon.stub()}
describe "parse", ->
it "should parse documents correctly", (done) ->
@SafeJsonParse.parse '{"foo": "bar"}', (error, parsed) ->
expect(parsed).to.deep.equal {foo: "bar"}
done()
it "should return an error on bad data", (done) ->
@SafeJsonParse.parse 'blah', (error, parsed) ->
expect(error).to.exist
done()
it "should return an error on oversized data", (done) ->
# we have a 2k overhead on top of max size
big_blob = Array(16*1024).join("A")
data = "{\"foo\": \"#{big_blob}\"}"
@Settings.max_doc_length = 2 * 1024
@SafeJsonParse.parse data, (error, parsed) =>
@logger.error.called.should.equal true
expect(error).to.exist
done()

View file

@ -9,7 +9,9 @@ describe "WebsocketLoadBalancer", ->
"redis-sharelatex":
createClient: () ->
auth:->
"logger-sharelatex": { log: sinon.stub(), err: sinon.stub() }
"logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() }
"./SafeJsonParse": @SafeJsonParse =
parse: (data, cb) => cb null, JSON.parse(data)
@io = {}
@WebsocketLoadBalancer.rclientPub = publish: sinon.stub()
@WebsocketLoadBalancer.rclientSub =
@ -59,6 +61,14 @@ describe "WebsocketLoadBalancer", ->
.should.equal true
describe "_processEditorEvent", ->
describe "with bad JSON", ->
beforeEach ->
@SafeJsonParse.parse = sinon.stub().callsArgWith 1, new Error("oops")
@WebsocketLoadBalancer._processEditorEvent(@io, "editor-events", "blah")
it "should log an error", ->
@logger.error.called.should.equal true
describe "with a designated room", ->
beforeEach ->
@io.sockets =