mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
Merge branch 'master' into bg-use-separate-redis-for-project-history
This commit is contained in:
commit
ac68f59487
15 changed files with 1644 additions and 1189 deletions
|
@ -54,7 +54,7 @@ app.post '/project/:project_id/get_and_flush_if_old', HttpCont
|
|||
app.post '/project/:project_id/clearState', HttpController.clearProjectState
|
||||
app.post '/project/:project_id/doc/:doc_id', HttpController.setDoc
|
||||
app.post '/project/:project_id/doc/:doc_id/flush', HttpController.flushDocIfLoaded
|
||||
app.delete '/project/:project_id/doc/:doc_id', HttpController.flushAndDeleteDoc
|
||||
app.delete '/project/:project_id/doc/:doc_id', HttpController.deleteDoc
|
||||
app.delete '/project/:project_id', HttpController.deleteProject
|
||||
app.delete '/project', HttpController.deleteMultipleProjects
|
||||
app.post '/project/:project_id', HttpController.updateProject
|
||||
|
|
|
@ -30,7 +30,7 @@ module.exports = DocumentManager =
|
|||
return callback(error) if error?
|
||||
RedisManager.setHistoryType doc_id, projectHistoryType, (error) ->
|
||||
return callback(error) if error?
|
||||
callback null, lines, version, ranges, pathname, projectHistoryId, null, false
|
||||
callback null, lines, version, ranges || {}, pathname, projectHistoryId, null, false
|
||||
else
|
||||
callback null, lines, version, ranges, pathname, projectHistoryId, unflushedTime, true
|
||||
|
||||
|
@ -91,7 +91,7 @@ module.exports = DocumentManager =
|
|||
return callback(error) if error?
|
||||
callback null
|
||||
else
|
||||
DocumentManager.flushAndDeleteDoc project_id, doc_id, (error) ->
|
||||
DocumentManager.flushAndDeleteDoc project_id, doc_id, {}, (error) ->
|
||||
# There is no harm in flushing project history if the previous
|
||||
# call failed and sometimes it is required
|
||||
HistoryManager.flushProjectChangesAsync project_id
|
||||
|
@ -115,14 +115,18 @@ module.exports = DocumentManager =
|
|||
return callback(error) if error?
|
||||
RedisManager.clearUnflushedTime doc_id, callback
|
||||
|
||||
flushAndDeleteDoc: (project_id, doc_id, _callback = (error) ->) ->
|
||||
flushAndDeleteDoc: (project_id, doc_id, options, _callback) ->
|
||||
timer = new Metrics.Timer("docManager.flushAndDeleteDoc")
|
||||
callback = (args...) ->
|
||||
timer.done()
|
||||
_callback(args...)
|
||||
|
||||
DocumentManager.flushDocIfLoaded project_id, doc_id, (error) ->
|
||||
return callback(error) if error?
|
||||
if error?
|
||||
if options.ignoreFlushErrors
|
||||
logger.warn {project_id: project_id, doc_id: doc_id, err: error}, "ignoring flush error while deleting document"
|
||||
else
|
||||
return callback(error)
|
||||
|
||||
# Flush in the background since it requires a http request
|
||||
HistoryManager.flushDocChangesAsync project_id, doc_id
|
||||
|
@ -218,9 +222,9 @@ module.exports = DocumentManager =
|
|||
UpdateManager = require "./UpdateManager"
|
||||
UpdateManager.lockUpdatesAndDo DocumentManager.flushDocIfLoaded, project_id, doc_id, callback
|
||||
|
||||
flushAndDeleteDocWithLock: (project_id, doc_id, callback = (error) ->) ->
|
||||
flushAndDeleteDocWithLock: (project_id, doc_id, options, callback) ->
|
||||
UpdateManager = require "./UpdateManager"
|
||||
UpdateManager.lockUpdatesAndDo DocumentManager.flushAndDeleteDoc, project_id, doc_id, callback
|
||||
UpdateManager.lockUpdatesAndDo DocumentManager.flushAndDeleteDoc, project_id, doc_id, options, callback
|
||||
|
||||
acceptChangesWithLock: (project_id, doc_id, change_ids, callback = (error) ->) ->
|
||||
UpdateManager = require "./UpdateManager"
|
||||
|
|
|
@ -103,12 +103,13 @@ module.exports = HttpController =
|
|||
logger.log project_id: project_id, doc_id: doc_id, "flushed doc via http"
|
||||
res.send 204 # No Content
|
||||
|
||||
flushAndDeleteDoc: (req, res, next = (error) ->) ->
|
||||
deleteDoc: (req, res, next = (error) ->) ->
|
||||
doc_id = req.params.doc_id
|
||||
project_id = req.params.project_id
|
||||
logger.log project_id: project_id, doc_id: doc_id, "deleting doc via http"
|
||||
ignoreFlushErrors = req.query.ignore_flush_errors == 'true'
|
||||
timer = new Metrics.Timer("http.deleteDoc")
|
||||
DocumentManager.flushAndDeleteDocWithLock project_id, doc_id, (error) ->
|
||||
logger.log project_id: project_id, doc_id: doc_id, "deleting doc via http"
|
||||
DocumentManager.flushAndDeleteDocWithLock project_id, doc_id, { ignoreFlushErrors: ignoreFlushErrors }, (error) ->
|
||||
timer.done()
|
||||
# There is no harm in flushing project history if the previous call
|
||||
# failed and sometimes it is required
|
||||
|
|
|
@ -3,9 +3,13 @@ projectHistoryKeys = Settings.redis?.project_history?.key_schema
|
|||
#rclient = require("redis-sharelatex").createClient(Settings.redis.project_history)
|
||||
rclient = require("./RedisMigrationManager").createClient(Settings.redis.project_history, Settings.redis.new_project_history)
|
||||
logger = require('logger-sharelatex')
|
||||
metrics = require('./Metrics')
|
||||
|
||||
module.exports = ProjectHistoryRedisManager =
|
||||
queueOps: (project_id, ops..., callback = (error, projectUpdateCount) ->) ->
|
||||
# Record metric for ops pushed onto queue
|
||||
for op in ops
|
||||
metrics.summary "redis.projectHistoryOps", op.length, {status: "push"}
|
||||
multi = rclient.multi()
|
||||
# Push the ops onto the project history queue
|
||||
multi.rpush projectHistoryKeys.projectHistoryOps({project_id}), ops...
|
||||
|
|
|
@ -52,7 +52,7 @@ module.exports = ProjectManager =
|
|||
for doc_id in (doc_ids or [])
|
||||
do (doc_id) ->
|
||||
jobs.push (callback) ->
|
||||
DocumentManager.flushAndDeleteDocWithLock project_id, doc_id, (error) ->
|
||||
DocumentManager.flushAndDeleteDocWithLock project_id, doc_id, {}, (error) ->
|
||||
if error?
|
||||
logger.error err: error, project_id: project_id, doc_id: doc_id, "error deleting doc"
|
||||
errors.push(error)
|
||||
|
|
|
@ -5,6 +5,7 @@ Keys = Settings.redis.documentupdater.key_schema
|
|||
logger = require('logger-sharelatex')
|
||||
os = require "os"
|
||||
crypto = require "crypto"
|
||||
metrics = require('./Metrics')
|
||||
|
||||
HOST = os.hostname()
|
||||
RND = crypto.randomBytes(4).toString('hex') # generate a random key for this process
|
||||
|
@ -20,6 +21,9 @@ module.exports = RealTimeRedisManager =
|
|||
multi.exec (error, replys) ->
|
||||
return callback(error) if error?
|
||||
jsonUpdates = replys[0]
|
||||
for jsonUpdate in jsonUpdates
|
||||
# record metric for each update removed from queue
|
||||
metrics.summary "redis.pendingUpdates", jsonUpdate.length, {status: "pop"}
|
||||
updates = []
|
||||
for jsonUpdate in jsonUpdates
|
||||
try
|
||||
|
@ -36,9 +40,13 @@ module.exports = RealTimeRedisManager =
|
|||
# create a unique message id using a counter
|
||||
message_id = "doc:#{HOST}:#{RND}-#{COUNT++}"
|
||||
data?._id = message_id
|
||||
|
||||
blob = JSON.stringify(data)
|
||||
metrics.summary "redis.publish.applied-ops", blob.length
|
||||
|
||||
# publish on separate channels for individual projects and docs when
|
||||
# configured (needs realtime to be configured for this too).
|
||||
if Settings.publishOnIndividualChannels
|
||||
pubsubClient.publish "applied-ops:#{data.doc_id}", JSON.stringify(data)
|
||||
pubsubClient.publish "applied-ops:#{data.doc_id}", blob
|
||||
else
|
||||
pubsubClient.publish "applied-ops", JSON.stringify(data)
|
||||
pubsubClient.publish "applied-ops", blob
|
||||
|
|
|
@ -41,6 +41,8 @@ module.exports = RedisManager =
|
|||
logger.error {err: error, doc_id: doc_id, docLines: docLines}, error.message
|
||||
return callback(error)
|
||||
docHash = RedisManager._computeHash(docLines)
|
||||
# record bytes sent to redis
|
||||
metrics.summary "redis.docLines", docLines.length, {status: "set"}
|
||||
logger.log {project_id, doc_id, version, docHash, pathname, projectHistoryId}, "putting doc in redis"
|
||||
RedisManager._serializeRanges ranges, (error, ranges) ->
|
||||
if error?
|
||||
|
@ -73,6 +75,7 @@ module.exports = RedisManager =
|
|||
_callback()
|
||||
|
||||
multi = rclient.multi()
|
||||
multi.strlen keys.docLines(doc_id:doc_id)
|
||||
multi.del keys.docLines(doc_id:doc_id)
|
||||
multi.del keys.projectKey(doc_id:doc_id)
|
||||
multi.del keys.docVersion(doc_id:doc_id)
|
||||
|
@ -84,8 +87,12 @@ module.exports = RedisManager =
|
|||
multi.del keys.unflushedTime(doc_id:doc_id)
|
||||
multi.del keys.lastUpdatedAt(doc_id: doc_id)
|
||||
multi.del keys.lastUpdatedBy(doc_id: doc_id)
|
||||
multi.exec (error) ->
|
||||
multi.exec (error, response) ->
|
||||
return callback(error) if error?
|
||||
length = response?[0]
|
||||
if length > 0
|
||||
# record bytes freed in redis
|
||||
metrics.summary "redis.docLines", length, {status: "del"}
|
||||
multi = rclient.multi()
|
||||
multi.srem keys.docsInProject(project_id:project_id), doc_id
|
||||
multi.del keys.projectState(project_id:project_id)
|
||||
|
@ -125,6 +132,9 @@ module.exports = RedisManager =
|
|||
if timeSpan > MAX_REDIS_REQUEST_LENGTH
|
||||
error = new Error("redis getDoc exceeded timeout")
|
||||
return callback(error)
|
||||
# record bytes loaded from redis
|
||||
if docLines?
|
||||
metrics.summary "redis.docLines", docLines.length, {status: "get"}
|
||||
# check sha1 hash value if present
|
||||
if docLines? and storedHash?
|
||||
computedHash = RedisManager._computeHash(docLines)
|
||||
|
@ -240,7 +250,8 @@ module.exports = RedisManager =
|
|||
|
||||
opVersions = appliedOps.map (op) -> op?.v
|
||||
logger.log doc_id: doc_id, version: newVersion, hash: newHash, op_versions: opVersions, "updating doc in redis"
|
||||
|
||||
# record bytes sent to redis in update
|
||||
metrics.summary "redis.docLines", newDocLines.length, {status: "update"}
|
||||
RedisManager._serializeRanges ranges, (error, ranges) ->
|
||||
if error?
|
||||
logger.error {err: error, doc_id}, error.message
|
||||
|
|
2644
services/document-updater/package-lock.json
generated
2644
services/document-updater/package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -21,26 +21,25 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"async": "^2.5.0",
|
||||
"bunyan": "~0.22.1",
|
||||
"coffee-script": "~1.7.0",
|
||||
"express": "3.11.0",
|
||||
"lodash": "^4.17.13",
|
||||
"logger-sharelatex": "^1.7.0",
|
||||
"lynx": "0.0.11",
|
||||
"metrics-sharelatex": "^2.4.0",
|
||||
"logger-sharelatex": "^1.9.1",
|
||||
"metrics-sharelatex": "^2.6.2",
|
||||
"mongojs": "^2.6.0",
|
||||
"redis-sharelatex": "^1.0.11",
|
||||
"request": "2.47.0",
|
||||
"requestretry": "^1.12.0",
|
||||
"sandboxed-module": "~0.2.0",
|
||||
"settings-sharelatex": "^1.1.0",
|
||||
"sinon": "~1.5.2"
|
||||
"redis-sharelatex": "^1.0.12",
|
||||
"request": "^2.47.0",
|
||||
"requestretry": "^4.1.0",
|
||||
"settings-sharelatex": "^1.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"bunyan": "~0.22.1",
|
||||
"chai": "^3.5.0",
|
||||
"chai-spies": "^0.7.1",
|
||||
"cluster-key-slot": "^1.0.5",
|
||||
"mocha": "^5.0.1",
|
||||
"sandboxed-module": "~0.2.0",
|
||||
"sinon": "~1.5.2",
|
||||
"timekeeper": "^2.0.0"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@ describe "DocumentManager", ->
|
|||
"./HistoryManager": @HistoryManager =
|
||||
flushDocChangesAsync: sinon.stub()
|
||||
flushProjectChangesAsync: sinon.stub()
|
||||
"logger-sharelatex": @logger = {log: sinon.stub()}
|
||||
"logger-sharelatex": @logger = {log: sinon.stub(), warn: sinon.stub()}
|
||||
"./DocOpsManager": @DocOpsManager = {}
|
||||
"./Metrics": @Metrics =
|
||||
Timer: class Timer
|
||||
|
@ -47,7 +47,7 @@ describe "DocumentManager", ->
|
|||
beforeEach ->
|
||||
@RedisManager.removeDocFromMemory = sinon.stub().callsArg(2)
|
||||
@DocumentManager.flushDocIfLoaded = sinon.stub().callsArgWith(2)
|
||||
@DocumentManager.flushAndDeleteDoc @project_id, @doc_id, @callback
|
||||
@DocumentManager.flushAndDeleteDoc @project_id, @doc_id, {}, @callback
|
||||
|
||||
it "should flush the doc", ->
|
||||
@DocumentManager.flushDocIfLoaded
|
||||
|
@ -70,6 +70,25 @@ describe "DocumentManager", ->
|
|||
.calledWithExactly(@project_id, @doc_id)
|
||||
.should.equal true
|
||||
|
||||
describe "when a flush error occurs", ->
|
||||
beforeEach ->
|
||||
@DocumentManager.flushDocIfLoaded = sinon.stub().callsArgWith(2, new Error("boom!"))
|
||||
@RedisManager.removeDocFromMemory = sinon.stub().callsArg(2)
|
||||
|
||||
it "should not remove the doc from redis", (done) ->
|
||||
@DocumentManager.flushAndDeleteDoc @project_id, @doc_id, {}, (error) =>
|
||||
error.should.exist
|
||||
@RedisManager.removeDocFromMemory.called.should.equal false
|
||||
done()
|
||||
|
||||
describe "when ignoring flush errors", ->
|
||||
it "should remove the doc from redis", (done) ->
|
||||
@DocumentManager.flushAndDeleteDoc @project_id, @doc_id, { ignoreFlushErrors: true }, (error) =>
|
||||
if error?
|
||||
return done(error)
|
||||
@RedisManager.removeDocFromMemory.called.should.equal true
|
||||
done()
|
||||
|
||||
describe "flushDocIfLoaded", ->
|
||||
describe "when the doc is in Redis", ->
|
||||
beforeEach ->
|
||||
|
@ -220,7 +239,7 @@ describe "DocumentManager", ->
|
|||
@DiffCodec.diffAsShareJsOp = sinon.stub().callsArgWith(2, null, @ops)
|
||||
@UpdateManager.applyUpdate = sinon.stub().callsArgWith(3, null)
|
||||
@DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2)
|
||||
@DocumentManager.flushAndDeleteDoc = sinon.stub().callsArg(2)
|
||||
@DocumentManager.flushAndDeleteDoc = sinon.stub().callsArg(3)
|
||||
|
||||
describe "when already loaded", ->
|
||||
beforeEach ->
|
||||
|
@ -276,7 +295,7 @@ describe "DocumentManager", ->
|
|||
|
||||
it "should flush and delete the doc from the doc updater", ->
|
||||
@DocumentManager.flushAndDeleteDoc
|
||||
.calledWith(@project_id, @doc_id)
|
||||
.calledWith(@project_id, @doc_id, {})
|
||||
.should.equal true
|
||||
|
||||
it "should not flush the project history", ->
|
||||
|
|
|
@ -265,21 +265,22 @@ describe "HttpController", ->
|
|||
.calledWith(new Error("oops"))
|
||||
.should.equal true
|
||||
|
||||
describe "flushAndDeleteDoc", ->
|
||||
describe "deleteDoc", ->
|
||||
beforeEach ->
|
||||
@req =
|
||||
params:
|
||||
project_id: @project_id
|
||||
doc_id: @doc_id
|
||||
query: {}
|
||||
|
||||
describe "successfully", ->
|
||||
beforeEach ->
|
||||
@DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArgWith(2)
|
||||
@HttpController.flushAndDeleteDoc(@req, @res, @next)
|
||||
@DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArgWith(3)
|
||||
@HttpController.deleteDoc(@req, @res, @next)
|
||||
|
||||
it "should flush and delete the doc", ->
|
||||
@DocumentManager.flushAndDeleteDocWithLock
|
||||
.calledWith(@project_id, @doc_id)
|
||||
.calledWith(@project_id, @doc_id, { ignoreFlushErrors: false })
|
||||
.should.equal true
|
||||
|
||||
it "should flush project history", ->
|
||||
|
@ -300,10 +301,24 @@ describe "HttpController", ->
|
|||
it "should time the request", ->
|
||||
@Metrics.Timer::done.called.should.equal true
|
||||
|
||||
describe "ignoring errors", ->
|
||||
beforeEach ->
|
||||
@req.query.ignore_flush_errors = 'true'
|
||||
@DocumentManager.flushAndDeleteDocWithLock = sinon.stub().yields()
|
||||
@HttpController.deleteDoc(@req, @res, @next)
|
||||
|
||||
it "should delete the doc", ->
|
||||
@DocumentManager.flushAndDeleteDocWithLock
|
||||
.calledWith(@project_id, @doc_id, { ignoreFlushErrors: true })
|
||||
.should.equal true
|
||||
|
||||
it "should return a successful No Content response", ->
|
||||
@res.send.calledWith(204).should.equal true
|
||||
|
||||
describe "when an errors occurs", ->
|
||||
beforeEach ->
|
||||
@DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArgWith(2, new Error("oops"))
|
||||
@HttpController.flushAndDeleteDoc(@req, @res, @next)
|
||||
@DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArgWith(3, new Error("oops"))
|
||||
@HttpController.deleteDoc(@req, @res, @next)
|
||||
|
||||
it "should flush project history", ->
|
||||
@HistoryManager.flushProjectChangesAsync
|
||||
|
|
|
@ -28,6 +28,7 @@ describe "ProjectHistoryRedisManager", ->
|
|||
createClient: () => @rclient
|
||||
"logger-sharelatex":
|
||||
log:->
|
||||
"./Metrics": @metrics = { summary: sinon.stub()}
|
||||
globals:
|
||||
JSON: @JSON = JSON
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ describe "ProjectManager - flushAndDeleteProject", ->
|
|||
beforeEach (done) ->
|
||||
@doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"]
|
||||
@RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, @doc_ids)
|
||||
@DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArg(2)
|
||||
@DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArg(3)
|
||||
@ProjectManager.flushAndDeleteProjectWithLocks @project_id, {}, (error) =>
|
||||
@callback(error)
|
||||
done()
|
||||
|
@ -36,7 +36,7 @@ describe "ProjectManager - flushAndDeleteProject", ->
|
|||
it "should delete each doc in the project", ->
|
||||
for doc_id in @doc_ids
|
||||
@DocumentManager.flushAndDeleteDocWithLock
|
||||
.calledWith(@project_id, doc_id)
|
||||
.calledWith(@project_id, doc_id, {})
|
||||
.should.equal true
|
||||
|
||||
it "should flush project history", ->
|
||||
|
@ -54,7 +54,7 @@ describe "ProjectManager - flushAndDeleteProject", ->
|
|||
beforeEach (done) ->
|
||||
@doc_ids = ["doc-id-1", "doc-id-2", "doc-id-3"]
|
||||
@RedisManager.getDocIdsInProject = sinon.stub().callsArgWith(1, null, @doc_ids)
|
||||
@DocumentManager.flushAndDeleteDocWithLock = sinon.spy (project_id, doc_id, callback = (error) ->) =>
|
||||
@DocumentManager.flushAndDeleteDocWithLock = sinon.spy (project_id, doc_id, options, callback) =>
|
||||
if doc_id == "doc-id-1"
|
||||
callback(@error = new Error("oops, something went wrong"))
|
||||
else
|
||||
|
@ -66,7 +66,7 @@ describe "ProjectManager - flushAndDeleteProject", ->
|
|||
it "should still flush each doc in the project", ->
|
||||
for doc_id in @doc_ids
|
||||
@DocumentManager.flushAndDeleteDocWithLock
|
||||
.calledWith(@project_id, doc_id)
|
||||
.calledWith(@project_id, doc_id, {})
|
||||
.should.equal true
|
||||
|
||||
it "should still flush project history", ->
|
||||
|
|
|
@ -25,6 +25,7 @@ describe "RealTimeRedisManager", ->
|
|||
"logger-sharelatex": { log: () -> }
|
||||
"crypto": @crypto = { randomBytes: sinon.stub().withArgs(4).returns(Buffer.from([0x1, 0x2, 0x3, 0x4])) }
|
||||
"os": @os = {hostname: sinon.stub().returns("somehost")}
|
||||
"./Metrics": @metrics = { summary: sinon.stub()}
|
||||
|
||||
@doc_id = "doc-id-123"
|
||||
@project_id = "project-id-123"
|
||||
|
@ -89,3 +90,6 @@ describe "RealTimeRedisManager", ->
|
|||
|
||||
it "should send the op with a message id", ->
|
||||
@pubsubClient.publish.calledWith("applied-ops", JSON.stringify({op:"thisop",_id:@message_id})).should.equal true
|
||||
|
||||
it "should track the payload size", ->
|
||||
@metrics.summary.calledWith("redis.publish.applied-ops", JSON.stringify({op:"thisop",_id:@message_id}).length).should.equal true
|
||||
|
|
|
@ -48,6 +48,7 @@ describe "RedisManager", ->
|
|||
createClient: () => @rclient
|
||||
"./Metrics": @metrics =
|
||||
inc: sinon.stub()
|
||||
summary: sinon.stub()
|
||||
Timer: class Timer
|
||||
constructor: () ->
|
||||
this.start = new Date()
|
||||
|
@ -670,11 +671,17 @@ describe "RedisManager", ->
|
|||
|
||||
describe "removeDocFromMemory", ->
|
||||
beforeEach (done) ->
|
||||
@multi.strlen = sinon.stub()
|
||||
@multi.del = sinon.stub()
|
||||
@multi.srem = sinon.stub()
|
||||
@multi.exec.yields()
|
||||
@RedisManager.removeDocFromMemory @project_id, @doc_id, done
|
||||
|
||||
it "should check the length of the current doclines", ->
|
||||
@multi.strlen
|
||||
.calledWith("doclines:#{@doc_id}")
|
||||
.should.equal true
|
||||
|
||||
it "should delete the lines", ->
|
||||
@multi.del
|
||||
.calledWith("doclines:#{@doc_id}")
|
||||
|
|
Loading…
Reference in a new issue