mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
remove redis server-side hashing for performance
we still compute the document hash in node, and check it on retrieval but we don't check the hash at the point of writing it in redis which was previously done with a redis Lua script.
This commit is contained in:
parent
c64cae2248
commit
912a3a7753
2 changed files with 8 additions and 49 deletions
|
@ -15,16 +15,8 @@ MAX_REDIS_REQUEST_LENGTH = 5000 # 5 seconds
|
||||||
# Make times easy to read
|
# Make times easy to read
|
||||||
minutes = 60 # seconds for Redis expire
|
minutes = 60 # seconds for Redis expire
|
||||||
|
|
||||||
# LUA script to write document and return hash
|
|
||||||
# arguments: docLinesKey docLines
|
|
||||||
setScript = """
|
|
||||||
redis.call('set', KEYS[1], ARGV[1])
|
|
||||||
return redis.sha1hex(ARGV[1])
|
|
||||||
"""
|
|
||||||
|
|
||||||
logHashErrors = Settings.documentupdater?.logHashErrors
|
logHashErrors = Settings.documentupdater?.logHashErrors
|
||||||
logHashReadErrors = logHashErrors?.read
|
logHashReadErrors = logHashErrors?.read
|
||||||
logHashWriteErrors = logHashErrors?.write
|
|
||||||
|
|
||||||
MEGABYTES = 1024 * 1024
|
MEGABYTES = 1024 * 1024
|
||||||
MAX_RANGES_SIZE = 3 * MEGABYTES
|
MAX_RANGES_SIZE = 3 * MEGABYTES
|
||||||
|
@ -52,7 +44,7 @@ module.exports = RedisManager =
|
||||||
logger.error {err: error, doc_id, project_id}, error.message
|
logger.error {err: error, doc_id, project_id}, error.message
|
||||||
return callback(error)
|
return callback(error)
|
||||||
multi = rclient.multi()
|
multi = rclient.multi()
|
||||||
multi.eval setScript, 1, keys.docLines(doc_id:doc_id), docLines
|
multi.set keys.docLines(doc_id:doc_id), docLines
|
||||||
multi.set keys.projectKey({doc_id:doc_id}), project_id
|
multi.set keys.projectKey({doc_id:doc_id}), project_id
|
||||||
multi.set keys.docVersion(doc_id:doc_id), version
|
multi.set keys.docVersion(doc_id:doc_id), version
|
||||||
multi.set keys.docHash(doc_id:doc_id), docHash
|
multi.set keys.docHash(doc_id:doc_id), docHash
|
||||||
|
@ -64,10 +56,6 @@ module.exports = RedisManager =
|
||||||
multi.set keys.projectHistoryId(doc_id:doc_id), projectHistoryId
|
multi.set keys.projectHistoryId(doc_id:doc_id), projectHistoryId
|
||||||
multi.exec (error, result) ->
|
multi.exec (error, result) ->
|
||||||
return callback(error) if error?
|
return callback(error) if error?
|
||||||
# check the hash computed on the redis server
|
|
||||||
writeHash = result?[0]
|
|
||||||
if logHashWriteErrors and writeHash? and writeHash isnt docHash
|
|
||||||
logger.error project_id: project_id, doc_id: doc_id, writeHash: writeHash, origHash: docHash, docLines:docLines, "hash mismatch on putDocInMemory"
|
|
||||||
# update docsInProject set
|
# update docsInProject set
|
||||||
rclient.sadd keys.docsInProject(project_id:project_id), doc_id, callback
|
rclient.sadd keys.docsInProject(project_id:project_id), doc_id, callback
|
||||||
|
|
||||||
|
@ -247,7 +235,7 @@ module.exports = RedisManager =
|
||||||
logger.error err: error, doc_id: doc_id, ranges: ranges, error.message
|
logger.error err: error, doc_id: doc_id, ranges: ranges, error.message
|
||||||
return callback(error)
|
return callback(error)
|
||||||
multi = rclient.multi()
|
multi = rclient.multi()
|
||||||
multi.eval setScript, 1, keys.docLines(doc_id:doc_id), newDocLines # index 0
|
multi.set keys.docLines(doc_id:doc_id), newDocLines # index 0
|
||||||
multi.set keys.docVersion(doc_id:doc_id), newVersion # index 1
|
multi.set keys.docVersion(doc_id:doc_id), newVersion # index 1
|
||||||
multi.set keys.docHash(doc_id:doc_id), newHash # index 2
|
multi.set keys.docHash(doc_id:doc_id), newHash # index 2
|
||||||
multi.ltrim keys.docOps(doc_id: doc_id), -RedisManager.DOC_OPS_MAX_LENGTH, -1 # index 3
|
multi.ltrim keys.docOps(doc_id: doc_id), -RedisManager.DOC_OPS_MAX_LENGTH, -1 # index 3
|
||||||
|
@ -272,10 +260,6 @@ module.exports = RedisManager =
|
||||||
multi.del keys.lastUpdatedBy(doc_id: doc_id) # index 9
|
multi.del keys.lastUpdatedBy(doc_id: doc_id) # index 9
|
||||||
multi.exec (error, result) ->
|
multi.exec (error, result) ->
|
||||||
return callback(error) if error?
|
return callback(error) if error?
|
||||||
# check the hash computed on the redis server
|
|
||||||
writeHash = result?[0]
|
|
||||||
if logHashWriteErrors and writeHash? and writeHash isnt newHash
|
|
||||||
logger.error doc_id: doc_id, writeHash: writeHash, origHash: newHash, docLines:newDocLines, "hash mismatch on updateDocument"
|
|
||||||
|
|
||||||
# length of uncompressedHistoryOps queue (index 7)
|
# length of uncompressedHistoryOps queue (index 7)
|
||||||
docUpdateCount = result[7]
|
docUpdateCount = result[7]
|
||||||
|
|
|
@ -351,7 +351,6 @@ describe "RedisManager", ->
|
||||||
@multi.expire = sinon.stub()
|
@multi.expire = sinon.stub()
|
||||||
@multi.ltrim = sinon.stub()
|
@multi.ltrim = sinon.stub()
|
||||||
@multi.del = sinon.stub()
|
@multi.del = sinon.stub()
|
||||||
@multi.eval = sinon.stub()
|
|
||||||
@multi.exec = sinon.stub().callsArgWith(0, null,
|
@multi.exec = sinon.stub().callsArgWith(0, null,
|
||||||
[@hash, null, null, null, null, null, null, @doc_update_list_length, null, null]
|
[@hash, null, null, null, null, null, null, @doc_update_list_length, null, null]
|
||||||
)
|
)
|
||||||
|
@ -374,8 +373,8 @@ describe "RedisManager", ->
|
||||||
.should.equal true
|
.should.equal true
|
||||||
|
|
||||||
it "should set the doclines", ->
|
it "should set the doclines", ->
|
||||||
@multi.eval
|
@multi.set
|
||||||
.calledWith(sinon.match(/redis.call/), 1, "doclines:#{@doc_id}", JSON.stringify(@lines))
|
.calledWith("doclines:#{@doc_id}", JSON.stringify(@lines))
|
||||||
.should.equal true
|
.should.equal true
|
||||||
|
|
||||||
it "should set the version", ->
|
it "should set the version", ->
|
||||||
|
@ -486,8 +485,8 @@ describe "RedisManager", ->
|
||||||
.should.equal false
|
.should.equal false
|
||||||
|
|
||||||
it "should still set the doclines", ->
|
it "should still set the doclines", ->
|
||||||
@multi.eval
|
@multi.set
|
||||||
.calledWith(sinon.match(/redis.call/), 1, "doclines:#{@doc_id}", JSON.stringify(@lines))
|
.calledWith("doclines:#{@doc_id}", JSON.stringify(@lines))
|
||||||
.should.equal true
|
.should.equal true
|
||||||
|
|
||||||
describe "with empty ranges", ->
|
describe "with empty ranges", ->
|
||||||
|
@ -505,20 +504,6 @@ describe "RedisManager", ->
|
||||||
.calledWith("Ranges:#{@doc_id}")
|
.calledWith("Ranges:#{@doc_id}")
|
||||||
.should.equal true
|
.should.equal true
|
||||||
|
|
||||||
describe "with a corrupted write", ->
|
|
||||||
beforeEach ->
|
|
||||||
@badHash = "INVALID-HASH-VALUE"
|
|
||||||
@multi.exec = sinon.stub().callsArgWith(0, null, [@badHash])
|
|
||||||
@RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length)
|
|
||||||
@RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @updateMeta, @callback
|
|
||||||
|
|
||||||
it 'should log a hash error', ->
|
|
||||||
@logger.error.calledWith()
|
|
||||||
.should.equal true
|
|
||||||
|
|
||||||
it "should call the callback", ->
|
|
||||||
@callback.called.should.equal true
|
|
||||||
|
|
||||||
describe "with null bytes in the serialized doc lines", ->
|
describe "with null bytes in the serialized doc lines", ->
|
||||||
beforeEach ->
|
beforeEach ->
|
||||||
@RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length)
|
@RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length)
|
||||||
|
@ -567,7 +552,6 @@ describe "RedisManager", ->
|
||||||
@multi.set = sinon.stub()
|
@multi.set = sinon.stub()
|
||||||
@rclient.sadd = sinon.stub().yields()
|
@rclient.sadd = sinon.stub().yields()
|
||||||
@multi.del = sinon.stub()
|
@multi.del = sinon.stub()
|
||||||
@multi.eval = sinon.stub()
|
|
||||||
@lines = ["one", "two", "three", "これは"]
|
@lines = ["one", "two", "three", "これは"]
|
||||||
@version = 42
|
@version = 42
|
||||||
@hash = crypto.createHash('sha1').update(JSON.stringify(@lines),'utf8').digest('hex')
|
@hash = crypto.createHash('sha1').update(JSON.stringify(@lines),'utf8').digest('hex')
|
||||||
|
@ -580,8 +564,8 @@ describe "RedisManager", ->
|
||||||
@RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, @projectHistoryId, done
|
@RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, @projectHistoryId, done
|
||||||
|
|
||||||
it "should set the lines", ->
|
it "should set the lines", ->
|
||||||
@multi.eval
|
@multi.set
|
||||||
.calledWith(sinon.match(/redis.call/), 1, "doclines:#{@doc_id}", JSON.stringify(@lines))
|
.calledWith("doclines:#{@doc_id}", JSON.stringify(@lines))
|
||||||
.should.equal true
|
.should.equal true
|
||||||
|
|
||||||
it "should set the version", ->
|
it "should set the version", ->
|
||||||
|
@ -637,15 +621,6 @@ describe "RedisManager", ->
|
||||||
.calledWith("Ranges:#{@doc_id}", JSON.stringify(@ranges))
|
.calledWith("Ranges:#{@doc_id}", JSON.stringify(@ranges))
|
||||||
.should.equal false
|
.should.equal false
|
||||||
|
|
||||||
describe "with a corrupted write", ->
|
|
||||||
beforeEach (done) ->
|
|
||||||
@multi.exec = sinon.stub().callsArgWith(0, null, ["INVALID-HASH-VALUE"])
|
|
||||||
@RedisManager.putDocInMemory @project_id, @doc_id, @lines, @version, @ranges, @pathname, @projectHistoryId, done
|
|
||||||
|
|
||||||
it 'should log a hash error', ->
|
|
||||||
@logger.error.calledWith()
|
|
||||||
.should.equal true
|
|
||||||
|
|
||||||
describe "with null bytes in the serialized doc lines", ->
|
describe "with null bytes in the serialized doc lines", ->
|
||||||
beforeEach ->
|
beforeEach ->
|
||||||
@_stringify = JSON.stringify
|
@_stringify = JSON.stringify
|
||||||
|
|
Loading…
Reference in a new issue