Add in limit on all JSON parsing

This commit is contained in:
James Allen 2015-12-01 11:05:49 +00:00
parent 92d18d7e2e
commit 830d676f4f
7 changed files with 89 additions and 14 deletions

View file

@ -2,6 +2,7 @@ logger = require "logger-sharelatex"
settings = require 'settings-sharelatex' settings = require 'settings-sharelatex'
redis = require("redis-sharelatex") redis = require("redis-sharelatex")
rclient = redis.createClient(settings.redis.web) rclient = redis.createClient(settings.redis.web)
SafeJsonParse = require "./SafeJsonParse"
MESSAGE_SIZE_LOG_LIMIT = 1024 * 1024 # 1Mb MESSAGE_SIZE_LOG_LIMIT = 1024 * 1024 # 1Mb
@ -15,13 +16,14 @@ module.exports = DocumentUpdaterController =
DocumentUpdaterController._processMessageFromDocumentUpdater(io, channel, message) DocumentUpdaterController._processMessageFromDocumentUpdater(io, channel, message)
_processMessageFromDocumentUpdater: (io, channel, message) -> _processMessageFromDocumentUpdater: (io, channel, message) ->
if message.length > MESSAGE_SIZE_LOG_LIMIT SafeJsonParse.parse message, (error, message) ->
logger.log {length: message.length, head: message.slice(0,200)}, "large message from doc updater" if error?
message = JSON.parse message logger.error {err: error, channel}, "error parsing JSON"
if message.op? return
DocumentUpdaterController._applyUpdateFromDocumentUpdater(io, message.doc_id, message.op) if message.op?
else if message.error? DocumentUpdaterController._applyUpdateFromDocumentUpdater(io, message.doc_id, message.op)
DocumentUpdaterController._processErrorFromDocumentUpdater(io, message.doc_id, message.error, message) else if message.error?
DocumentUpdaterController._processErrorFromDocumentUpdater(io, message.doc_id, message.error, message)
_applyUpdateFromDocumentUpdater: (io, doc_id, update) -> _applyUpdateFromDocumentUpdater: (io, doc_id, update) ->
for client in io.sockets.clients(doc_id) for client in io.sockets.clients(doc_id)

View file

@ -0,0 +1,13 @@
Settings = require "settings-sharelatex"
logger = require "logger-sharelatex"
module.exports =
parse: (data, callback = (error, parsed) ->) ->
if data.length > (Settings.max_doc_length or 2 * 1024 * 1024)
logger.error {head: data.slice(0,1024)}, "data too large to parse"
return callback new Error("data too large to parse")
try
parsed = JSON.parse(data)
catch e
return callback e
callback null, parsed

View file

@ -1,6 +1,7 @@
Settings = require 'settings-sharelatex' Settings = require 'settings-sharelatex'
logger = require 'logger-sharelatex' logger = require 'logger-sharelatex'
redis = require("redis-sharelatex") redis = require("redis-sharelatex")
SafeJsonParse = require "./SafeJsonParse"
rclientPub = redis.createClient(Settings.redis.web) rclientPub = redis.createClient(Settings.redis.web)
rclientSub = redis.createClient(Settings.redis.web) rclientSub = redis.createClient(Settings.redis.web)
@ -28,9 +29,12 @@ module.exports = WebsocketLoadBalancer =
WebsocketLoadBalancer._processEditorEvent io, channel, message WebsocketLoadBalancer._processEditorEvent io, channel, message
_processEditorEvent: (io, channel, message) -> _processEditorEvent: (io, channel, message) ->
message = JSON.parse(message) SafeJsonParse.parse message, (error, message) ->
if message.room_id == "all" if error?
io.sockets.emit(message.message, message.payload...) logger.error {err: error, channel}, "error parsing JSON"
else if message.room_id? return
io.sockets.in(message.room_id).emit(message.message, message.payload...) if message.room_id == "all"
io.sockets.emit(message.message, message.payload...)
else if message.room_id?
io.sockets.in(message.room_id).emit(message.message, message.payload...)

View file

@ -25,4 +25,6 @@ module.exports =
security: security:
sessionSecret: "secret-please-change" sessionSecret: "secret-please-change"
cookieName:"sharelatex.sid" cookieName:"sharelatex.sid"
max_doc_length: 2 * 1024 * 1024 # 2mb

View file

@ -17,6 +17,8 @@ describe "DocumentUpdaterController", ->
"redis-sharelatex" : "redis-sharelatex" :
createClient: ()=> createClient: ()=>
@rclient = {auth:->} @rclient = {auth:->}
"./SafeJsonParse": @SafeJsonParse =
parse: (data, cb) => cb null, JSON.parse(data)
describe "listenForUpdatesFromDocumentUpdater", -> describe "listenForUpdatesFromDocumentUpdater", ->
beforeEach -> beforeEach ->
@ -31,6 +33,14 @@ describe "DocumentUpdaterController", ->
@rclient.on.calledWith("message").should.equal true @rclient.on.calledWith("message").should.equal true
describe "_processMessageFromDocumentUpdater", -> describe "_processMessageFromDocumentUpdater", ->
describe "with bad JSON", ->
beforeEach ->
@SafeJsonParse.parse = sinon.stub().callsArgWith 1, new Error("oops")
@EditorUpdatesController._processMessageFromDocumentUpdater @io, "applied-ops", "blah"
it "should log an error", ->
@logger.error.called.should.equal true
describe "with update", -> describe "with update", ->
beforeEach -> beforeEach ->
@message = @message =

View file

@ -0,0 +1,34 @@
require('chai').should()
expect = require("chai").expect
SandboxedModule = require('sandboxed-module')
modulePath = '../../../app/js/SafeJsonParse'
sinon = require("sinon")
describe 'SafeJsonParse', ->
beforeEach ->
@SafeJsonParse = SandboxedModule.require modulePath, requires:
"settings-sharelatex": @Settings = {
max_doc_length: 16 * 1024
}
"logger-sharelatex": @logger = {error: sinon.stub()}
describe "parse", ->
it "should parse documents correctly", (done) ->
@SafeJsonParse.parse '{"foo": "bar"}', (error, parsed) ->
expect(parsed).to.deep.equal {foo: "bar"}
done()
it "should return an error on bad data", (done) ->
@SafeJsonParse.parse 'blah', (error, parsed) ->
expect(error).to.exist
done()
it "should return an error on oversized data", (done) ->
# we have a 2k overhead on top of max size
big_blob = Array(16*1024).join("A")
data = "{\"foo\": \"#{big_blob}\"}"
@Settings.max_doc_length = 2 * 1024
@SafeJsonParse.parse data, (error, parsed) =>
@logger.error.called.should.equal true
expect(error).to.exist
done()

View file

@ -9,7 +9,9 @@ describe "WebsocketLoadBalancer", ->
"redis-sharelatex": "redis-sharelatex":
createClient: () -> createClient: () ->
auth:-> auth:->
"logger-sharelatex": { log: sinon.stub(), err: sinon.stub() } "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() }
"./SafeJsonParse": @SafeJsonParse =
parse: (data, cb) => cb null, JSON.parse(data)
@io = {} @io = {}
@WebsocketLoadBalancer.rclientPub = publish: sinon.stub() @WebsocketLoadBalancer.rclientPub = publish: sinon.stub()
@WebsocketLoadBalancer.rclientSub = @WebsocketLoadBalancer.rclientSub =
@ -59,6 +61,14 @@ describe "WebsocketLoadBalancer", ->
.should.equal true .should.equal true
describe "_processEditorEvent", -> describe "_processEditorEvent", ->
describe "with bad JSON", ->
beforeEach ->
@SafeJsonParse.parse = sinon.stub().callsArgWith 1, new Error("oops")
@WebsocketLoadBalancer._processEditorEvent(@io, "editor-events", "blah")
it "should log an error", ->
@logger.error.called.should.equal true
describe "with a designated room", -> describe "with a designated room", ->
beforeEach -> beforeEach ->
@io.sockets = @io.sockets =