mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
Merge pull request #22 from sharelatex/ja-increase-limits-on-changes
Increase limits on changes and set absolute limit on JSON data size
This commit is contained in:
commit
a9b7949828
4 changed files with 113 additions and 42 deletions
|
@ -3,7 +3,7 @@ logger = require "logger-sharelatex"
|
||||||
|
|
||||||
module.exports = RangesManager =
|
module.exports = RangesManager =
|
||||||
MAX_COMMENTS: 500
|
MAX_COMMENTS: 500
|
||||||
MAX_CHANGES: 1200
|
MAX_CHANGES: 2000
|
||||||
|
|
||||||
applyUpdate: (project_id, doc_id, entries = {}, updates = [], callback = (error, new_entries) ->) ->
|
applyUpdate: (project_id, doc_id, entries = {}, updates = [], callback = (error, new_entries) ->) ->
|
||||||
{changes, comments} = entries
|
{changes, comments} = entries
|
||||||
|
|
|
@ -22,6 +22,9 @@ logHashErrors = Settings.documentupdater?.logHashErrors
|
||||||
logHashReadErrors = logHashErrors?.read
|
logHashReadErrors = logHashErrors?.read
|
||||||
logHashWriteErrors = logHashErrors?.write
|
logHashWriteErrors = logHashErrors?.write
|
||||||
|
|
||||||
|
MEGABYTES = 1024 * 1024
|
||||||
|
MAX_RANGES_SIZE = 3 * MEGABYTES
|
||||||
|
|
||||||
module.exports = RedisManager =
|
module.exports = RedisManager =
|
||||||
rclient: rclient
|
rclient: rclient
|
||||||
|
|
||||||
|
@ -37,24 +40,27 @@ module.exports = RedisManager =
|
||||||
return callback(error)
|
return callback(error)
|
||||||
docHash = RedisManager._computeHash(docLines)
|
docHash = RedisManager._computeHash(docLines)
|
||||||
logger.log project_id:project_id, doc_id:doc_id, version: version, hash:docHash, "putting doc in redis"
|
logger.log project_id:project_id, doc_id:doc_id, version: version, hash:docHash, "putting doc in redis"
|
||||||
ranges = RedisManager._serializeRanges(ranges)
|
RedisManager._serializeRanges ranges, (error, ranges) ->
|
||||||
multi = rclient.multi()
|
if error?
|
||||||
multi.eval setScript, 1, keys.docLines(doc_id:doc_id), docLines
|
logger.error {err: error, doc_id, project_id}, error.message
|
||||||
multi.set keys.projectKey({doc_id:doc_id}), project_id
|
return callback(error)
|
||||||
multi.set keys.docVersion(doc_id:doc_id), version
|
multi = rclient.multi()
|
||||||
multi.set keys.docHash(doc_id:doc_id), docHash
|
multi.eval setScript, 1, keys.docLines(doc_id:doc_id), docLines
|
||||||
if ranges?
|
multi.set keys.projectKey({doc_id:doc_id}), project_id
|
||||||
multi.set keys.ranges(doc_id:doc_id), ranges
|
multi.set keys.docVersion(doc_id:doc_id), version
|
||||||
else
|
multi.set keys.docHash(doc_id:doc_id), docHash
|
||||||
multi.del keys.ranges(doc_id:doc_id)
|
if ranges?
|
||||||
multi.exec (error, result) ->
|
multi.set keys.ranges(doc_id:doc_id), ranges
|
||||||
return callback(error) if error?
|
else
|
||||||
# check the hash computed on the redis server
|
multi.del keys.ranges(doc_id:doc_id)
|
||||||
writeHash = result?[0]
|
multi.exec (error, result) ->
|
||||||
if logHashWriteErrors and writeHash? and writeHash isnt docHash
|
return callback(error) if error?
|
||||||
logger.error project_id: project_id, doc_id: doc_id, writeHash: writeHash, origHash: docHash, docLines:docLines, "hash mismatch on putDocInMemory"
|
# check the hash computed on the redis server
|
||||||
# update docsInProject set
|
writeHash = result?[0]
|
||||||
rclient.sadd keys.docsInProject(project_id:project_id), doc_id, callback
|
if logHashWriteErrors and writeHash? and writeHash isnt docHash
|
||||||
|
logger.error project_id: project_id, doc_id: doc_id, writeHash: writeHash, origHash: docHash, docLines:docLines, "hash mismatch on putDocInMemory"
|
||||||
|
# update docsInProject set
|
||||||
|
rclient.sadd keys.docsInProject(project_id:project_id), doc_id, callback
|
||||||
|
|
||||||
removeDocFromMemory : (project_id, doc_id, _callback)->
|
removeDocFromMemory : (project_id, doc_id, _callback)->
|
||||||
logger.log project_id:project_id, doc_id:doc_id, "removing doc from redis"
|
logger.log project_id:project_id, doc_id:doc_id, "removing doc from redis"
|
||||||
|
@ -163,36 +169,41 @@ module.exports = RedisManager =
|
||||||
|
|
||||||
logger.log doc_id: doc_id, version: newVersion, hash: newHash, "updating doc in redis"
|
logger.log doc_id: doc_id, version: newVersion, hash: newHash, "updating doc in redis"
|
||||||
|
|
||||||
multi = rclient.multi()
|
RedisManager._serializeRanges ranges, (error, ranges) ->
|
||||||
multi.eval setScript, 1, keys.docLines(doc_id:doc_id), newDocLines
|
if error?
|
||||||
multi.set keys.docVersion(doc_id:doc_id), newVersion
|
logger.error {err: error, doc_id}, error.message
|
||||||
multi.set keys.docHash(doc_id:doc_id), newHash
|
return callback(error)
|
||||||
if jsonOps.length > 0
|
multi = rclient.multi()
|
||||||
multi.rpush keys.docOps(doc_id: doc_id), jsonOps...
|
multi.eval setScript, 1, keys.docLines(doc_id:doc_id), newDocLines
|
||||||
multi.expire keys.docOps(doc_id: doc_id), RedisManager.DOC_OPS_TTL
|
multi.set keys.docVersion(doc_id:doc_id), newVersion
|
||||||
multi.ltrim keys.docOps(doc_id: doc_id), -RedisManager.DOC_OPS_MAX_LENGTH, -1
|
multi.set keys.docHash(doc_id:doc_id), newHash
|
||||||
ranges = RedisManager._serializeRanges(ranges)
|
if jsonOps.length > 0
|
||||||
if ranges?
|
multi.rpush keys.docOps(doc_id: doc_id), jsonOps...
|
||||||
multi.set keys.ranges(doc_id:doc_id), ranges
|
multi.expire keys.docOps(doc_id: doc_id), RedisManager.DOC_OPS_TTL
|
||||||
else
|
multi.ltrim keys.docOps(doc_id: doc_id), -RedisManager.DOC_OPS_MAX_LENGTH, -1
|
||||||
multi.del keys.ranges(doc_id:doc_id)
|
if ranges?
|
||||||
multi.exec (error, result) ->
|
multi.set keys.ranges(doc_id:doc_id), ranges
|
||||||
return callback(error) if error?
|
else
|
||||||
# check the hash computed on the redis server
|
multi.del keys.ranges(doc_id:doc_id)
|
||||||
writeHash = result?[0]
|
multi.exec (error, result) ->
|
||||||
if logHashWriteErrors and writeHash? and writeHash isnt newHash
|
return callback(error) if error?
|
||||||
logger.error doc_id: doc_id, writeHash: writeHash, origHash: newHash, docLines:newDocLines, "hash mismatch on updateDocument"
|
# check the hash computed on the redis server
|
||||||
return callback()
|
writeHash = result?[0]
|
||||||
|
if logHashWriteErrors and writeHash? and writeHash isnt newHash
|
||||||
|
logger.error doc_id: doc_id, writeHash: writeHash, origHash: newHash, docLines:newDocLines, "hash mismatch on updateDocument"
|
||||||
|
return callback()
|
||||||
|
|
||||||
getDocIdsInProject: (project_id, callback = (error, doc_ids) ->) ->
|
getDocIdsInProject: (project_id, callback = (error, doc_ids) ->) ->
|
||||||
rclient.smembers keys.docsInProject(project_id: project_id), callback
|
rclient.smembers keys.docsInProject(project_id: project_id), callback
|
||||||
|
|
||||||
_serializeRanges: (ranges) ->
|
_serializeRanges: (ranges, callback = (error, serializedRanges) ->) ->
|
||||||
jsonRanges = JSON.stringify(ranges)
|
jsonRanges = JSON.stringify(ranges)
|
||||||
|
if jsonRanges? and jsonRanges.length > MAX_RANGES_SIZE
|
||||||
|
return callback new Error("ranges are too large")
|
||||||
if jsonRanges == '{}'
|
if jsonRanges == '{}'
|
||||||
# Most doc will have empty ranges so don't fill redis with lots of '{}' keys
|
# Most doc will have empty ranges so don't fill redis with lots of '{}' keys
|
||||||
jsonRanges = null
|
jsonRanges = null
|
||||||
return jsonRanges
|
return callback null, jsonRanges
|
||||||
|
|
||||||
_deserializeRanges: (ranges) ->
|
_deserializeRanges: (ranges) ->
|
||||||
if !ranges? or ranges == ""
|
if !ranges? or ranges == ""
|
||||||
|
|
|
@ -264,4 +264,41 @@ describe "Ranges", ->
|
||||||
DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) =>
|
DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) =>
|
||||||
throw error if error?
|
throw error if error?
|
||||||
expect(data.ranges.comments).to.be.undefined
|
expect(data.ranges.comments).to.be.undefined
|
||||||
done()
|
done()
|
||||||
|
|
||||||
|
describe "tripping range size limit", ->
|
||||||
|
before (done) ->
|
||||||
|
@project_id = DocUpdaterClient.randomId()
|
||||||
|
@user_id = DocUpdaterClient.randomId()
|
||||||
|
@id_seed = DocUpdaterClient.randomId()
|
||||||
|
@doc = {
|
||||||
|
id: DocUpdaterClient.randomId()
|
||||||
|
lines: ["aaa"]
|
||||||
|
}
|
||||||
|
@i = new Array(3 * 1024 * 1024).join("a")
|
||||||
|
@updates = [{
|
||||||
|
doc: @doc.id
|
||||||
|
op: [{ i: @i, p: 1 }]
|
||||||
|
v: 0
|
||||||
|
meta: { user_id: @user_id, tc: @id_seed }
|
||||||
|
}]
|
||||||
|
MockWebApi.insertDoc @project_id, @doc.id, {
|
||||||
|
lines: @doc.lines
|
||||||
|
version: 0
|
||||||
|
}
|
||||||
|
jobs = []
|
||||||
|
for update in @updates
|
||||||
|
do (update) =>
|
||||||
|
jobs.push (callback) => DocUpdaterClient.sendUpdate @project_id, @doc.id, update, callback
|
||||||
|
DocUpdaterClient.preloadDoc @project_id, @doc.id, (error) =>
|
||||||
|
throw error if error?
|
||||||
|
async.series jobs, (error) ->
|
||||||
|
throw error if error?
|
||||||
|
setTimeout done, 200
|
||||||
|
|
||||||
|
it "should not update the ranges", (done) ->
|
||||||
|
DocUpdaterClient.getDoc @project_id, @doc.id, (error, res, data) =>
|
||||||
|
throw error if error?
|
||||||
|
ranges = data.ranges
|
||||||
|
expect(ranges.changes).to.be.undefined
|
||||||
|
done()
|
|
@ -337,6 +337,18 @@ describe "RedisManager", ->
|
||||||
|
|
||||||
it "should call the callback with an error", ->
|
it "should call the callback with an error", ->
|
||||||
@callback.calledWith(new Error("null bytes found in doc lines")).should.equal true
|
@callback.calledWith(new Error("null bytes found in doc lines")).should.equal true
|
||||||
|
|
||||||
|
describe "with ranges that are too big", ->
|
||||||
|
beforeEach ->
|
||||||
|
@RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length)
|
||||||
|
@RedisManager._serializeRanges = sinon.stub().yields(new Error("ranges are too large"))
|
||||||
|
@RedisManager.updateDocument @doc_id, @lines, @version, @ops, @ranges, @callback
|
||||||
|
|
||||||
|
it 'should log an error', ->
|
||||||
|
@logger.error.called.should.equal true
|
||||||
|
|
||||||
|
it "should call the callback with the error", ->
|
||||||
|
@callback.calledWith(new Error("ranges are too large")).should.equal true
|
||||||
|
|
||||||
describe "putDocInMemory", ->
|
describe "putDocInMemory", ->
|
||||||
beforeEach ->
|
beforeEach ->
|
||||||
|
@ -425,6 +437,17 @@ describe "RedisManager", ->
|
||||||
|
|
||||||
it "should call the callback with an error", ->
|
it "should call the callback with an error", ->
|
||||||
@callback.calledWith(new Error("null bytes found in doc lines")).should.equal true
|
@callback.calledWith(new Error("null bytes found in doc lines")).should.equal true
|
||||||
|
|
||||||
|
describe "with ranges that are too big", ->
|
||||||
|
beforeEach ->
|
||||||
|
@RedisManager._serializeRanges = sinon.stub().yields(new Error("ranges are too large"))
|
||||||
|
@RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @callback
|
||||||
|
|
||||||
|
it 'should log an error', ->
|
||||||
|
@logger.error.called.should.equal true
|
||||||
|
|
||||||
|
it "should call the callback with the error", ->
|
||||||
|
@callback.calledWith(new Error("ranges are too large")).should.equal true
|
||||||
|
|
||||||
describe "removeDocFromMemory", ->
|
describe "removeDocFromMemory", ->
|
||||||
beforeEach (done) ->
|
beforeEach (done) ->
|
||||||
|
|
Loading…
Reference in a new issue