mirror of
https://github.com/overleaf/overleaf.git
synced 2025-04-11 13:34:00 +00:00
Merge branch 'master' into ho-docker
This commit is contained in:
commit
a11d6dcc48
8 changed files with 68 additions and 18 deletions
|
@ -181,14 +181,19 @@ module.exports = DocumentManager =
|
|||
callback(null, lines, version)
|
||||
|
||||
resyncDocContents: (project_id, doc_id, callback) ->
|
||||
logger.log {project_id: project_id, doc_id: doc_id}, "start resyncing doc contents"
|
||||
RedisManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) ->
|
||||
return callback(error) if error?
|
||||
|
||||
if !lines? or !version?
|
||||
logger.log {project_id: project_id, doc_id: doc_id}, "resyncing doc contents - not found in redis - retrieving from web"
|
||||
PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) ->
|
||||
return callback(error) if error?
|
||||
if error?
|
||||
logger.error {project_id: project_id, doc_id: doc_id, getDocError: error}, "resyncing doc contents - error retrieving from web"
|
||||
return callback(error)
|
||||
ProjectHistoryRedisManager.queueResyncDocContent project_id, projectHistoryId, doc_id, lines, version, pathname, callback
|
||||
else
|
||||
logger.log {project_id: project_id, doc_id: doc_id}, "resyncing doc contents - doc in redis - will queue in redis"
|
||||
ProjectHistoryRedisManager.queueResyncDocContent project_id, projectHistoryId, doc_id, lines, version, pathname, callback
|
||||
|
||||
getDocWithLock: (project_id, doc_id, callback = (error, lines, version) ->) ->
|
||||
|
|
|
@ -65,10 +65,12 @@ module.exports = HistoryManager =
|
|||
newBlock = Math.floor(length / threshold)
|
||||
return newBlock != prevBlock
|
||||
|
||||
MAX_PARALLEL_REQUESTS: 4
|
||||
|
||||
resyncProjectHistory: (project_id, projectHistoryId, docs, files, callback) ->
|
||||
ProjectHistoryRedisManager.queueResyncProjectStructure project_id, projectHistoryId, docs, files, (error) ->
|
||||
return callback(error) if error?
|
||||
DocumentManager = require "./DocumentManager"
|
||||
resyncDoc = (doc, cb) ->
|
||||
DocumentManager.resyncDocContentsWithLock project_id, doc.doc, cb
|
||||
async.each docs, resyncDoc, callback
|
||||
async.eachLimit docs, HistoryManager.MAX_PARALLEL_REQUESTS, resyncDoc, callback
|
||||
|
|
|
@ -4,8 +4,18 @@ rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdate
|
|||
logger = require('logger-sharelatex')
|
||||
|
||||
module.exports = ProjectHistoryRedisManager =
|
||||
queueOps: (project_id, ops..., callback) ->
|
||||
rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), ops..., callback
|
||||
queueOps: (project_id, ops..., callback = (error, projectUpdateCount) ->) ->
|
||||
multi = rclient.multi()
|
||||
# Push the ops onto the project history queue
|
||||
multi.rpush projectHistoryKeys.projectHistoryOps({project_id}), ops...
|
||||
# To record the age of the oldest op on the queue set a timestamp if not
|
||||
# already present (SETNX).
|
||||
multi.setnx projectHistoryKeys.projectHistoryFirstOpTimestamp({project_id}), Date.now()
|
||||
multi.exec (error, result) ->
|
||||
return callback(error) if error?
|
||||
# return the number of entries pushed onto the project history queue
|
||||
callback null, result[0]
|
||||
|
||||
|
||||
queueRenameEntity: (project_id, projectHistoryId, entity_type, entity_id, user_id, projectUpdate, callback) ->
|
||||
projectUpdate =
|
||||
|
|
|
@ -10,7 +10,7 @@ module.exports =
|
|||
|
||||
apis:
|
||||
web:
|
||||
url: "http://#{process.env["WEB_HOST"] or "localhost"}:3000"
|
||||
url: "http://#{process.env["WEB_HOST"] or "localhost"}:#{process.env['WEB_PORT'] or 3000}"
|
||||
user: "sharelatex"
|
||||
pass: "password"
|
||||
trackchanges:
|
||||
|
@ -74,6 +74,7 @@ module.exports =
|
|||
project_history:
|
||||
key_schema:
|
||||
projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:#{project_id}"
|
||||
projectHistoryFirstOpTimestamp: ({project_id}) -> "ProjectHistory:FirstOpTimestamp:#{project_id}"
|
||||
# cluster: [{
|
||||
# port: "7000"
|
||||
# host: "localhost"
|
||||
|
|
|
@ -33,7 +33,7 @@ describe "Applying updates to a doc", ->
|
|||
before (done) ->
|
||||
[@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]
|
||||
sinon.spy MockWebApi, "getDocument"
|
||||
|
||||
@startTime = Date.now()
|
||||
MockWebApi.insertDoc @project_id, @doc_id, {lines: @lines, version: @version}
|
||||
DocUpdaterClient.sendUpdate @project_id, @doc_id, @update, (error) ->
|
||||
throw error if error?
|
||||
|
@ -67,6 +67,27 @@ describe "Applying updates to a doc", ->
|
|||
JSON.parse(updates[0]).op.should.deep.equal @update.op
|
||||
done()
|
||||
|
||||
it "should set the first op timestamp", (done) ->
|
||||
rclient_history.get ProjectHistoryKeys.projectHistoryFirstOpTimestamp({@project_id}), (error, result) =>
|
||||
throw error if error?
|
||||
result.should.be.within(@startTime, Date.now())
|
||||
@firstOpTimestamp = result
|
||||
done()
|
||||
|
||||
describe "when sending another update", ->
|
||||
before (done) ->
|
||||
@second_update = Object.create(@update)
|
||||
@second_update.v = @version + 1
|
||||
DocUpdaterClient.sendUpdate @project_id, @doc_id, @second_update, (error) ->
|
||||
throw error if error?
|
||||
setTimeout done, 200
|
||||
|
||||
it "should not change the first op timestamp", (done) ->
|
||||
rclient_history.get ProjectHistoryKeys.projectHistoryFirstOpTimestamp({@project_id}), (error, result) =>
|
||||
throw error if error?
|
||||
result.should.equal @firstOpTimestamp
|
||||
done()
|
||||
|
||||
describe "when the document is loaded", ->
|
||||
before (done) ->
|
||||
[@project_id, @doc_id] = [DocUpdaterClient.randomId(), DocUpdaterClient.randomId()]
|
||||
|
|
|
@ -8,6 +8,7 @@ tk = require "timekeeper"
|
|||
|
||||
describe "DocumentManager", ->
|
||||
beforeEach ->
|
||||
tk.freeze(new Date())
|
||||
@DocumentManager = SandboxedModule.require modulePath, requires:
|
||||
"./RedisManager": @RedisManager = {}
|
||||
"./ProjectHistoryRedisManager": @ProjectHistoryRedisManager = {}
|
||||
|
@ -35,6 +36,9 @@ describe "DocumentManager", ->
|
|||
@pathname = '/a/b/c.tex'
|
||||
@unflushedTime = Date.now()
|
||||
|
||||
afterEach ->
|
||||
tk.reset()
|
||||
|
||||
describe "flushAndDeleteDoc", ->
|
||||
describe "successfully", ->
|
||||
beforeEach ->
|
||||
|
@ -394,12 +398,8 @@ describe "DocumentManager", ->
|
|||
|
||||
describe "getDocAndFlushIfOld", ->
|
||||
beforeEach ->
|
||||
tk.freeze(new Date())
|
||||
@DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2)
|
||||
|
||||
afterEach ->
|
||||
tk.reset()
|
||||
|
||||
describe "when the doc is in Redis", ->
|
||||
describe "and has changes to be flushed", ->
|
||||
beforeEach ->
|
||||
|
|
|
@ -20,28 +20,39 @@ describe "ProjectHistoryRedisManager", ->
|
|||
project_history:
|
||||
key_schema:
|
||||
projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:#{project_id}"
|
||||
projectHistoryFirstOpTimestamp: ({project_id}) -> "ProjectHistory:FirstOpTimestamp:#{project_id}"
|
||||
}
|
||||
"redis-sharelatex":
|
||||
createClient: () => @rclient
|
||||
globals:
|
||||
JSON: @JSON = JSON
|
||||
|
||||
afterEach ->
|
||||
tk.reset()
|
||||
afterEach ->
|
||||
tk.reset()
|
||||
|
||||
describe "queueOps", ->
|
||||
beforeEach ->
|
||||
@ops = ["mock-op-1", "mock-op-2"]
|
||||
@rclient.rpush = sinon.stub()
|
||||
@multi = exec: sinon.stub()
|
||||
@multi.rpush = sinon.stub()
|
||||
@multi.setnx = sinon.stub()
|
||||
@rclient.multi = () => @multi
|
||||
# @rclient = multi: () => @multi
|
||||
@ProjectHistoryRedisManager.queueOps @project_id, @ops..., @callback
|
||||
|
||||
it "should queue an update", ->
|
||||
@rclient.rpush
|
||||
@multi.rpush
|
||||
.calledWithExactly(
|
||||
"ProjectHistory:Ops:#{@project_id}"
|
||||
@ops[0]
|
||||
@ops[1]
|
||||
@callback
|
||||
).should.equal true
|
||||
|
||||
it "should set the queue timestamp if not present", ->
|
||||
@multi.setnx
|
||||
.calledWithExactly(
|
||||
"ProjectHistory:FirstOpTimestamp:#{@project_id}"
|
||||
Date.now()
|
||||
).should.equal true
|
||||
|
||||
describe "queueRenameEntity", ->
|
||||
|
|
|
@ -55,14 +55,14 @@ describe "RedisManager", ->
|
|||
globals:
|
||||
JSON: @JSON = JSON
|
||||
|
||||
afterEach ->
|
||||
tk.reset()
|
||||
|
||||
@doc_id = "doc-id-123"
|
||||
@project_id = "project-id-123"
|
||||
@projectHistoryId = 123
|
||||
@callback = sinon.stub()
|
||||
|
||||
afterEach ->
|
||||
tk.reset()
|
||||
|
||||
describe "getDoc", ->
|
||||
beforeEach ->
|
||||
@lines = ["one", "two", "three", "これは"] # include some utf8
|
||||
|
|
Loading…
Add table
Reference in a new issue