mirror of
https://github.com/overleaf/overleaf.git
synced 2025-01-23 05:32:48 +00:00
add gzip support for large documents to reduce load on redis
This commit is contained in:
parent
4942038a23
commit
e3d73d4456
8 changed files with 75 additions and 17 deletions
|
@ -6,6 +6,11 @@ _ = require('underscore')
|
|||
keys = require('./RedisKeyBuilder')
|
||||
logger = require('logger-sharelatex')
|
||||
metrics = require('./Metrics')
|
||||
ZipManager = require('./ZipManager')
|
||||
|
||||
redisOptions = _.clone(Settings.redis.web)
|
||||
redisOptions.return_buffers = true
|
||||
rclientBuffer = redis.createClient(redisOptions)
|
||||
|
||||
# Make times easy to read
|
||||
minutes = 60 # seconds for Redis expire
|
||||
|
@ -44,19 +49,21 @@ module.exports = RedisManager =
|
|||
|
||||
getDoc : (doc_id, callback = (error, lines, version) ->)->
|
||||
timer = new metrics.Timer("redis.get-doc")
|
||||
multi = rclient.multi()
|
||||
# use Buffer when retrieving data as it may be gzipped
|
||||
multi = rclientBuffer.multi()
|
||||
linesKey = keys.docLines(doc_id:doc_id)
|
||||
multi.get linesKey
|
||||
multi.get keys.docVersion(doc_id:doc_id)
|
||||
multi.exec (error, result)->
|
||||
timer.done()
|
||||
return callback(error) if error?
|
||||
try
|
||||
docLines = JSON.parse result[0]
|
||||
catch e
|
||||
return callback(e)
|
||||
version = parseInt(result[1] or 0, 10)
|
||||
callback null, docLines, version
|
||||
ZipManager.uncompressIfNeeded doc_id, result, (error, result) ->
|
||||
try
|
||||
docLines = JSON.parse result[0]
|
||||
catch e
|
||||
return callback(e)
|
||||
version = parseInt(result[1] or 0, 10)
|
||||
callback null, docLines, version
|
||||
|
||||
getDocVersion: (doc_id, callback = (error, version) ->) ->
|
||||
rclient.get keys.docVersion(doc_id: doc_id), (error, version) ->
|
||||
|
@ -70,11 +77,12 @@ module.exports = RedisManager =
|
|||
callback null, len
|
||||
|
||||
setDocument : (doc_id, docLines, version, callback = (error) ->)->
|
||||
multi = rclient.multi()
|
||||
multi.set keys.docLines(doc_id:doc_id), JSON.stringify(docLines)
|
||||
multi.set keys.docVersion(doc_id:doc_id), version
|
||||
multi.incr keys.now("docsets")
|
||||
multi.exec (error, replys) -> callback(error)
|
||||
ZipManager.compressIfNeeded doc_id, JSON.stringify(docLines), (err, result) ->
|
||||
multi = rclient.multi()
|
||||
multi.set keys.docLines(doc_id:doc_id), result
|
||||
multi.set keys.docVersion(doc_id:doc_id), version
|
||||
multi.incr keys.now("docsets")
|
||||
multi.exec (error, replys) -> callback(error)
|
||||
|
||||
getPendingUpdatesForDoc : (doc_id, callback)->
|
||||
multi = rclient.multi()
|
||||
|
|
47
services/document-updater/app/coffee/ZipManager.coffee
Normal file
47
services/document-updater/app/coffee/ZipManager.coffee
Normal file
|
@ -0,0 +1,47 @@
|
|||
Settings = require('settings-sharelatex')
|
||||
logger = require('logger-sharelatex')
|
||||
metrics = require('./Metrics')
|
||||
zlib = require('zlib')
|
||||
|
||||
ZIP_WRITES_ENABLED = Settings.redis.zip?.writesEnabled?
|
||||
ZIP_MINSIZE = Settings.redis.zip?.minsize || 64*1024
|
||||
|
||||
module.exports = ZipManager =
|
||||
uncompressIfNeeded: (doc_id, result, callback) ->
|
||||
# result is an array of [text, version]. Each entry is a node
|
||||
# Buffer object which we need to convert to strings on output
|
||||
|
||||
# first make sure the version (result[1]) is returned as a string
|
||||
if result?[1]?.toString?
|
||||
result[1] = result[1].toString()
|
||||
|
||||
# now uncompress the text (result[0]) if needed
|
||||
buf = result?[0]
|
||||
|
||||
# Check if we have a GZIP file
|
||||
if buf? and buf[0] == 0x1F and buf[1] == 0x8B
|
||||
zlib.gunzip buf, (err, newbuf) ->
|
||||
if err?
|
||||
logger.err doc_id:doc_id, err:err, "error uncompressing doc"
|
||||
callback(err, null)
|
||||
else
|
||||
logger.log doc_id:doc_id, fromBytes: buf.length, toChars: newbuf.length, factor: buf.length/newbuf.length, "uncompressed successfully"
|
||||
result[0] = newbuf.toString()
|
||||
callback(null, result)
|
||||
else
|
||||
# if we don't have a GZIP file it's just a buffer of text, convert it back to a string
|
||||
if buf?.toString?
|
||||
result[0] = buf.toString()
|
||||
callback(null, result)
|
||||
|
||||
compressIfNeeded: (doc_id, text, callback) ->
|
||||
if ZIP_WRITES_ENABLED && ZIP_MINSIZE > 0 and text.length > ZIP_MINSIZE
|
||||
zlib.gzip text, (err, buf) ->
|
||||
if err?
|
||||
logger.err doc_id:doc_id, err:err, "error compressing doc"
|
||||
callback(err, null)
|
||||
else
|
||||
logger.log doc_id:doc_id, fromChars: text.length, toBytes: buf.length, factor: buf.length/text.length , "compressed successfully"
|
||||
callback(null, buf)
|
||||
else
|
||||
callback(null, text)
|
|
@ -20,6 +20,9 @@ module.exports =
|
|||
port:"6379"
|
||||
host:"localhost"
|
||||
password:""
|
||||
zip:
|
||||
minSize: 8*1024
|
||||
writesEnabled: true
|
||||
|
||||
mongo:
|
||||
url: 'mongodb://127.0.0.1/sharelatex'
|
||||
|
|
|
@ -11,7 +11,7 @@ describe "RedisManager.clearDocFromPendingUpdatesSet", ->
|
|||
@callback = sinon.stub()
|
||||
@RedisManager = SandboxedModule.require modulePath, requires:
|
||||
"redis-sharelatex" : createClient: () =>
|
||||
@rclient = auth:->
|
||||
@rclient ?= auth:-> # only assign one rclient
|
||||
"logger-sharelatex": {}
|
||||
|
||||
@rclient.srem = sinon.stub().callsArg(2)
|
||||
|
|
|
@ -9,7 +9,7 @@ describe "RedisManager.getDocsWithPendingUpdates", ->
|
|||
@callback = sinon.stub()
|
||||
@RedisManager = SandboxedModule.require modulePath, requires:
|
||||
"redis-sharelatex" : createClient: () =>
|
||||
@rclient = auth:->
|
||||
@rclient ?= auth:->
|
||||
"logger-sharelatex": {}
|
||||
|
||||
@docs = [{
|
||||
|
|
|
@ -9,7 +9,7 @@ describe "RedisManager.getPreviousDocOpsTests", ->
|
|||
@callback = sinon.stub()
|
||||
@RedisManager = SandboxedModule.require modulePath, requires:
|
||||
"redis-sharelatex" : createClient: () =>
|
||||
@rclient =
|
||||
@rclient ?=
|
||||
auth: ->
|
||||
multi: => @rclient
|
||||
"logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub() }
|
||||
|
|
|
@ -8,7 +8,7 @@ describe "RedisManager.pushDocOp", ->
|
|||
beforeEach ->
|
||||
@RedisManager = SandboxedModule.require modulePath, requires:
|
||||
"redis-sharelatex": createClient: () =>
|
||||
@rclient =
|
||||
@rclient ?=
|
||||
auth: () ->
|
||||
multi: () => @rclient
|
||||
"logger-sharelatex": @logger = {log: sinon.stub()}
|
||||
|
|
|
@ -8,7 +8,7 @@ describe "RedisManager.pushUncompressedHistoryOp", ->
|
|||
beforeEach ->
|
||||
@RedisManager = SandboxedModule.require modulePath, requires:
|
||||
"redis-sharelatex": createClient: () =>
|
||||
@rclient =
|
||||
@rclient ?=
|
||||
auth: () ->
|
||||
multi: () => @rclient
|
||||
"logger-sharelatex": @logger = {log: sinon.stub()}
|
||||
|
|
Loading…
Reference in a new issue