mirror of
https://github.com/overleaf/overleaf.git
synced 2025-04-04 23:37:19 +00:00
extract ProjectHistoryRedisManager
This commit is contained in:
parent
fc6ef6ea7a
commit
1226f96fc3
13 changed files with 210 additions and 152 deletions
|
@ -1,4 +1,5 @@
|
|||
RedisManager = require "./RedisManager"
|
||||
ProjectHistoryRedisManager = require "./ProjectHistoryRedisManager"
|
||||
PersistenceManager = require "./PersistenceManager"
|
||||
DiffCodec = require "./DiffCodec"
|
||||
logger = require "logger-sharelatex"
|
||||
|
@ -186,9 +187,9 @@ module.exports = DocumentManager =
|
|||
if !lines? or !version?
|
||||
PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname) ->
|
||||
return callback(error) if error?
|
||||
RedisManager.queueResyncDocContent project_id, doc_id, lines, version, pathname, callback
|
||||
ProjectHistoryRedisManager.queueResyncDocContent project_id, doc_id, lines, version, pathname, callback
|
||||
else
|
||||
RedisManager.queueResyncDocContent project_id, doc_id, lines, version, pathname, callback
|
||||
ProjectHistoryRedisManager.queueResyncDocContent project_id, doc_id, lines, version, pathname, callback
|
||||
|
||||
getDocWithLock: (project_id, doc_id, callback = (error, lines, version) ->) ->
|
||||
UpdateManager = require "./UpdateManager"
|
||||
|
|
|
@ -3,6 +3,7 @@ logger = require "logger-sharelatex"
|
|||
request = require "request"
|
||||
Settings = require "settings-sharelatex"
|
||||
HistoryRedisManager = require "./HistoryRedisManager"
|
||||
ProjectHistoryRedisManager = require "./ProjectHistoryRedisManager"
|
||||
RedisManager = require "./RedisManager"
|
||||
|
||||
module.exports = HistoryManager =
|
||||
|
@ -65,7 +66,7 @@ module.exports = HistoryManager =
|
|||
return newBlock != prevBlock
|
||||
|
||||
resyncProjectHistory: (project_id, docs, files, callback) ->
|
||||
RedisManager.queueResyncProjectStructure project_id, docs, files, (error) ->
|
||||
ProjectHistoryRedisManager.queueResyncProjectStructure project_id, docs, files, (error) ->
|
||||
return callback(error) if error?
|
||||
DocumentManager = require "./DocumentManager"
|
||||
resyncDoc = (doc, cb) ->
|
||||
|
|
|
@ -0,0 +1,59 @@
|
|||
Settings = require('settings-sharelatex')
|
||||
projectHistoryKeys = Settings.redis?.project_history?.key_schema
|
||||
rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater)
|
||||
logger = require('logger-sharelatex')
|
||||
|
||||
module.exports = ProjectHistoryRedisManager =
|
||||
queueOps: (project_id, ops..., callback) ->
|
||||
rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), ops..., callback
|
||||
|
||||
queueRenameEntity: (project_id, entity_type, entity_id, user_id, update, callback) ->
|
||||
update =
|
||||
pathname: update.pathname
|
||||
new_pathname: update.newPathname
|
||||
meta:
|
||||
user_id: user_id
|
||||
ts: new Date()
|
||||
update[entity_type] = entity_id
|
||||
|
||||
logger.log {project_id, update}, "queue rename operation to project-history"
|
||||
jsonUpdate = JSON.stringify(update)
|
||||
|
||||
ProjectHistoryRedisManager.queueOps project_id, jsonUpdate, callback
|
||||
|
||||
queueAddEntity: (project_id, entity_type, entitiy_id, user_id, update, callback = (error) ->) ->
|
||||
update =
|
||||
pathname: update.pathname
|
||||
docLines: update.docLines
|
||||
url: update.url
|
||||
meta:
|
||||
user_id: user_id
|
||||
ts: new Date()
|
||||
update[entity_type] = entitiy_id
|
||||
|
||||
logger.log {project_id, update}, "queue add operation to project-history"
|
||||
jsonUpdate = JSON.stringify(update)
|
||||
|
||||
ProjectHistoryRedisManager.queueOps project_id, jsonUpdate, callback
|
||||
|
||||
queueResyncProjectStructure: (project_id, docs, files, callback) ->
|
||||
logger.log {project_id, docs, files}, "queue project structure resync"
|
||||
update =
|
||||
resyncProjectStructure: { docs, files }
|
||||
meta:
|
||||
ts: new Date()
|
||||
jsonUpdate = JSON.stringify update
|
||||
ProjectHistoryRedisManager.queueOps project_id, jsonUpdate, callback
|
||||
|
||||
queueResyncDocContent: (project_id, doc_id, lines, version, pathname, callback) ->
|
||||
logger.log {project_id, doc_id, lines, version, pathname}, "queue doc content resync"
|
||||
update =
|
||||
resyncDocContent:
|
||||
content: lines.join("\n"),
|
||||
version: version
|
||||
path: pathname
|
||||
doc: doc_id
|
||||
meta:
|
||||
ts: new Date()
|
||||
jsonUpdate = JSON.stringify update
|
||||
ProjectHistoryRedisManager.queueOps project_id, jsonUpdate, callback
|
|
@ -1,4 +1,5 @@
|
|||
RedisManager = require "./RedisManager"
|
||||
ProjectHistoryRedisManager = require "./ProjectHistoryRedisManager"
|
||||
DocumentManager = require "./DocumentManager"
|
||||
HistoryManager = require "./HistoryManager"
|
||||
async = require "async"
|
||||
|
@ -115,22 +116,22 @@ module.exports = ProjectManager =
|
|||
handleDocUpdate = (update, cb) ->
|
||||
doc_id = update.id
|
||||
if update.docLines?
|
||||
RedisManager.addEntity project_id, 'doc', doc_id, user_id, update, (error, count) =>
|
||||
ProjectHistoryRedisManager.queueAddEntity project_id, 'doc', doc_id, user_id, update, (error, count) ->
|
||||
project_ops_length = count
|
||||
cb(error)
|
||||
else
|
||||
DocumentManager.renameDocWithLock project_id, doc_id, user_id, update, (error, count) =>
|
||||
DocumentManager.renameDocWithLock project_id, doc_id, user_id, update, (error, count) ->
|
||||
project_ops_length = count
|
||||
cb(error)
|
||||
|
||||
handleFileUpdate = (update, cb) ->
|
||||
file_id = update.id
|
||||
if update.url?
|
||||
RedisManager.addEntity project_id, 'file', file_id, user_id, update, (error, count) =>
|
||||
ProjectHistoryRedisManager.queueAddEntity project_id, 'file', file_id, user_id, update, (error, count) ->
|
||||
project_ops_length = count
|
||||
cb(error)
|
||||
else
|
||||
RedisManager.renameFile project_id, file_id, user_id, update, (error, count) =>
|
||||
ProjectHistoryRedisManager.queueRenameEntity project_id, 'file', file_id, user_id, update, (error, count) ->
|
||||
project_ops_length = count
|
||||
cb(error)
|
||||
|
||||
|
|
|
@ -5,6 +5,7 @@ logger = require('logger-sharelatex')
|
|||
metrics = require('./Metrics')
|
||||
Errors = require "./Errors"
|
||||
crypto = require "crypto"
|
||||
ProjectHistoryRedisManager = require "./ProjectHistoryRedisManager"
|
||||
|
||||
# Sometimes Redis calls take an unexpectedly long time. We have to be
|
||||
# quick with Redis calls because we're holding a lock that expires
|
||||
|
@ -31,7 +32,6 @@ MAX_RANGES_SIZE = 3 * MEGABYTES
|
|||
|
||||
keys = Settings.redis.documentupdater.key_schema
|
||||
historyKeys = Settings.redis.history.key_schema
|
||||
projectHistoryKeys = Settings.redis?.project_history?.key_schema
|
||||
|
||||
module.exports = RedisManager =
|
||||
rclient: rclient
|
||||
|
@ -267,7 +267,7 @@ module.exports = RedisManager =
|
|||
docUpdateCount = result[7]
|
||||
|
||||
if jsonOps.length > 0 && Settings.apis?.project_history?.enabled
|
||||
rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), jsonOps..., (error, projectUpdateCount) ->
|
||||
ProjectHistoryRedisManager.queueOps project_id, jsonOps..., (error, projectUpdateCount) ->
|
||||
callback null, docUpdateCount, projectUpdateCount
|
||||
else
|
||||
callback null, docUpdateCount
|
||||
|
@ -279,41 +279,9 @@ module.exports = RedisManager =
|
|||
if lines? and version?
|
||||
rclient.set keys.pathname(doc_id:doc_id), update.newPathname, (error) ->
|
||||
return callback(error) if error?
|
||||
RedisManager._renameEntity project_id, 'doc', doc_id, user_id, update, callback
|
||||
ProjectHistoryRedisManager.queueRenameEntity project_id, 'doc', doc_id, user_id, update, callback
|
||||
else
|
||||
RedisManager._renameEntity project_id, 'doc', doc_id, user_id, update, callback
|
||||
|
||||
renameFile: (project_id, file_id, user_id, update, callback = (error) ->) ->
|
||||
RedisManager._renameEntity project_id, 'file', file_id, user_id, update, callback
|
||||
|
||||
_renameEntity: (project_id, entity_type, entity_id, user_id, update, callback = (error) ->) ->
|
||||
update =
|
||||
pathname: update.pathname
|
||||
new_pathname: update.newPathname
|
||||
meta:
|
||||
user_id: user_id
|
||||
ts: new Date()
|
||||
update[entity_type] = entity_id
|
||||
|
||||
logger.log {project_id, update}, "queue rename operation to project-history"
|
||||
jsonUpdate = JSON.stringify(update)
|
||||
|
||||
rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), jsonUpdate, callback
|
||||
|
||||
addEntity: (project_id, entity_type, entitiy_id, user_id, update, callback = (error) ->) ->
|
||||
update =
|
||||
pathname: update.pathname
|
||||
docLines: update.docLines
|
||||
url: update.url
|
||||
meta:
|
||||
user_id: user_id
|
||||
ts: new Date()
|
||||
update[entity_type] = entitiy_id
|
||||
|
||||
logger.log {project_id, update}, "queue add operation to project-history"
|
||||
jsonUpdate = JSON.stringify(update)
|
||||
|
||||
rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), jsonUpdate, callback
|
||||
ProjectHistoryRedisManager.queueRenameEntity project_id, 'doc', doc_id, user_id, update, callback
|
||||
|
||||
clearUnflushedTime: (doc_id, callback = (error) ->) ->
|
||||
rclient.del keys.unflushedTime(doc_id:doc_id), callback
|
||||
|
@ -321,28 +289,6 @@ module.exports = RedisManager =
|
|||
getDocIdsInProject: (project_id, callback = (error, doc_ids) ->) ->
|
||||
rclient.smembers keys.docsInProject(project_id: project_id), callback
|
||||
|
||||
queueResyncProjectStructure: (project_id, docs, files, callback) ->
|
||||
logger.log {project_id, docs, files}, "queue project structure resync"
|
||||
update =
|
||||
resyncProjectStructure: { docs, files }
|
||||
meta:
|
||||
ts: new Date()
|
||||
jsonUpdate = JSON.stringify update
|
||||
rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), jsonUpdate, callback
|
||||
|
||||
queueResyncDocContent: (project_id, doc_id, lines, version, pathname, callback) ->
|
||||
logger.log {project_id, doc_id, lines, version, pathname}, "queue doc content resync"
|
||||
update =
|
||||
resyncDocContent:
|
||||
content: lines.join("\n"),
|
||||
version: version
|
||||
path: pathname
|
||||
doc: doc_id
|
||||
meta:
|
||||
ts: new Date()
|
||||
jsonUpdate = JSON.stringify update
|
||||
rclient.rpush projectHistoryKeys.projectHistoryOps({project_id}), jsonUpdate, callback
|
||||
|
||||
_serializeRanges: (ranges, callback = (error, serializedRanges) ->) ->
|
||||
jsonRanges = JSON.stringify(ranges)
|
||||
if jsonRanges? and jsonRanges.length > MAX_RANGES_SIZE
|
||||
|
|
|
@ -10,6 +10,7 @@ describe "DocumentManager", ->
|
|||
beforeEach ->
|
||||
@DocumentManager = SandboxedModule.require modulePath, requires:
|
||||
"./RedisManager": @RedisManager = {}
|
||||
"./ProjectHistoryRedisManager": @ProjectHistoryRedisManager = {}
|
||||
"./PersistenceManager": @PersistenceManager = {}
|
||||
"./HistoryManager": @HistoryManager =
|
||||
flushDocChangesAsync: sinon.stub()
|
||||
|
@ -472,7 +473,7 @@ describe "DocumentManager", ->
|
|||
describe "when doc is loaded in redis", ->
|
||||
beforeEach ->
|
||||
@RedisManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname)
|
||||
@RedisManager.queueResyncDocContents = sinon.stub()
|
||||
@ProjectHistoryRedisManager.queueResyncDocContent = sinon.stub()
|
||||
@DocumentManager.resyncDocContents @project_id, @doc_id, @callback
|
||||
|
||||
it "gets the doc contents from redis", ->
|
||||
|
@ -481,7 +482,7 @@ describe "DocumentManager", ->
|
|||
.should.equal true
|
||||
|
||||
it "queues a resync doc content update", ->
|
||||
@RedisManager.queueResyncDocContents
|
||||
@ProjectHistoryRedisManager.queueResyncDocContent
|
||||
.calledWith(@project_id, @doc_id, @lines, @version, @pathname, @callback)
|
||||
.should.equal true
|
||||
|
||||
|
@ -489,7 +490,7 @@ describe "DocumentManager", ->
|
|||
beforeEach ->
|
||||
@RedisManager.getDoc = sinon.stub().callsArgWith(2, null)
|
||||
@PersistenceManager.getDoc = sinon.stub().callsArgWith(2, null, @lines, @version, @ranges, @pathname)
|
||||
@RedisManager.queueResyncDocContents = sinon.stub()
|
||||
@ProjectHistoryRedisManager.queueResyncDocContent = sinon.stub()
|
||||
@DocumentManager.resyncDocContents @project_id, @doc_id, @callback
|
||||
|
||||
it "tries to get the doc contents from redis", ->
|
||||
|
@ -503,6 +504,6 @@ describe "DocumentManager", ->
|
|||
.should.equal true
|
||||
|
||||
it "queues a resync doc content update", ->
|
||||
@RedisManager.queueResyncDocContents
|
||||
@ProjectHistoryRedisManager.queueResyncDocContent
|
||||
.calledWith(@project_id, @doc_id, @lines, @version, @pathname, @callback)
|
||||
.should.equal true
|
||||
|
|
|
@ -19,6 +19,7 @@ describe "HistoryManager", ->
|
|||
"./DocumentManager": @DocumentManager = {}
|
||||
"./HistoryRedisManager": @HistoryRedisManager = {}
|
||||
"./RedisManager": @RedisManager = {}
|
||||
"./ProjectHistoryRedisManager": @ProjectHistoryRedisManager = {}
|
||||
@project_id = "mock-project-id"
|
||||
@doc_id = "mock-doc-id"
|
||||
@callback = sinon.stub()
|
||||
|
@ -172,12 +173,12 @@ describe "HistoryManager", ->
|
|||
path: 'universe.png'
|
||||
url: "www.filestore.test/#{@project_id}/mock-file-id"
|
||||
]
|
||||
@RedisManager.queueResyncProjectStructure = sinon.stub().yields()
|
||||
@ProjectHistoryRedisManager.queueResyncProjectStructure = sinon.stub().yields()
|
||||
@DocumentManager.resyncDocContentsWithLock = sinon.stub().yields()
|
||||
@HistoryManager.resyncProjectHistory @project_id, @docs, @files, @callback
|
||||
|
||||
it "should queue a project structure reync", ->
|
||||
@RedisManager.queueResyncProjectStructure
|
||||
@ProjectHistoryRedisManager.queueResyncProjectStructure
|
||||
.calledWith(@project_id, @docs, @files)
|
||||
.should.equal true
|
||||
|
||||
|
|
|
@ -0,0 +1,101 @@
|
|||
sinon = require('sinon')
|
||||
chai = require('chai')
|
||||
should = chai.should()
|
||||
modulePath = "../../../../app/js/ProjectHistoryRedisManager.js"
|
||||
SandboxedModule = require('sandboxed-module')
|
||||
tk = require "timekeeper"
|
||||
|
||||
describe "ProjectHistoryRedisManager", ->
|
||||
beforeEach ->
|
||||
@project_id = "project-id-123"
|
||||
@user_id = "user-id-123"
|
||||
@callback = sinon.stub()
|
||||
@rclient = {}
|
||||
tk.freeze(new Date())
|
||||
@ProjectHistoryRedisManager = SandboxedModule.require modulePath,
|
||||
requires:
|
||||
"settings-sharelatex": @settings = {
|
||||
redis:
|
||||
project_history:
|
||||
key_schema:
|
||||
projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:#{project_id}"
|
||||
}
|
||||
"redis-sharelatex":
|
||||
createClient: () => @rclient
|
||||
globals:
|
||||
JSON: @JSON = JSON
|
||||
|
||||
afterEach ->
|
||||
tk.reset()
|
||||
|
||||
describe "queueOps", ->
|
||||
beforeEach ->
|
||||
@ops = ["mock-op-1", "mock-op-2"]
|
||||
@rclient.rpush = sinon.stub()
|
||||
@ProjectHistoryRedisManager.queueOps @project_id, @ops..., @callback
|
||||
|
||||
it "should queue an update", ->
|
||||
@rclient.rpush
|
||||
.calledWithExactly(
|
||||
"ProjectHistory:Ops:#{@project_id}"
|
||||
@ops[0]
|
||||
@ops[1]
|
||||
@callback
|
||||
).should.equal true
|
||||
|
||||
describe "queueRenameEntity", ->
|
||||
beforeEach () ->
|
||||
@file_id = 1234
|
||||
|
||||
@rawUpdate =
|
||||
pathname: @pathname = '/old'
|
||||
newPathname: @newPathname = '/new'
|
||||
|
||||
@ProjectHistoryRedisManager.queueOps = sinon.stub()
|
||||
@ProjectHistoryRedisManager.queueRenameEntity @project_id, 'file', @file_id, @user_id, @rawUpdate, @callback
|
||||
|
||||
it "should queue an update", ->
|
||||
update =
|
||||
pathname: @pathname
|
||||
new_pathname: @newPathname
|
||||
meta:
|
||||
user_id: @user_id
|
||||
ts: new Date()
|
||||
file: @file_id
|
||||
|
||||
@ProjectHistoryRedisManager.queueOps
|
||||
.calledWithExactly(@project_id, @JSON.stringify(update), @callback)
|
||||
.should.equal true
|
||||
|
||||
describe "queueAddEntity", ->
|
||||
beforeEach () ->
|
||||
@rclient.rpush = sinon.stub().yields()
|
||||
@doc_id = 1234
|
||||
|
||||
@rawUpdate =
|
||||
pathname: @pathname = '/old'
|
||||
docLines: @docLines = 'a\nb'
|
||||
url: @url = 'filestore.example.com'
|
||||
|
||||
@ProjectHistoryRedisManager.queueOps = sinon.stub()
|
||||
@ProjectHistoryRedisManager.queueAddEntity @project_id, 'doc', @doc_id, @user_id, @rawUpdate, @callback
|
||||
|
||||
it "should queue an update", ->
|
||||
update =
|
||||
pathname: @pathname
|
||||
docLines: @docLines
|
||||
url: @url
|
||||
meta:
|
||||
user_id: @user_id
|
||||
ts: new Date()
|
||||
doc: @doc_id
|
||||
|
||||
@ProjectHistoryRedisManager.queueOps
|
||||
.calledWithExactly(@project_id, @JSON.stringify(update), @callback)
|
||||
.should.equal true
|
||||
|
||||
describe "queueResyncProjectStructure", ->
|
||||
it "should queue an update", ->
|
||||
|
||||
describe "queueResyncDocContent", ->
|
||||
it "should queue an update", ->
|
|
@ -8,6 +8,7 @@ describe "ProjectManager - flushAndDeleteProject", ->
|
|||
beforeEach ->
|
||||
@ProjectManager = SandboxedModule.require modulePath, requires:
|
||||
"./RedisManager": @RedisManager = {}
|
||||
"./ProjectHistoryRedisManager": @ProjectHistoryRedisManager = {}
|
||||
"./DocumentManager": @DocumentManager = {}
|
||||
"logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() }
|
||||
"./HistoryManager": @HistoryManager =
|
||||
|
|
|
@ -8,6 +8,7 @@ describe "ProjectManager - flushProject", ->
|
|||
beforeEach ->
|
||||
@ProjectManager = SandboxedModule.require modulePath, requires:
|
||||
"./RedisManager": @RedisManager = {}
|
||||
"./ProjectHistoryRedisManager": @ProjectHistoryRedisManager = {}
|
||||
"./DocumentManager": @DocumentManager = {}
|
||||
"logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() }
|
||||
"./HistoryManager": @HistoryManager = {}
|
||||
|
@ -72,5 +73,3 @@ describe "ProjectManager - flushProject", ->
|
|||
|
||||
it "should time the execution", ->
|
||||
@Metrics.Timer::done.called.should.equal true
|
||||
|
||||
|
||||
|
|
|
@ -9,6 +9,7 @@ describe "ProjectManager - getProjectDocsAndFlushIfOld", ->
|
|||
beforeEach ->
|
||||
@ProjectManager = SandboxedModule.require modulePath, requires:
|
||||
"./RedisManager": @RedisManager = {}
|
||||
"./ProjectHistoryRedisManager": @ProjectHistoryRedisManager = {}
|
||||
"./DocumentManager": @DocumentManager = {}
|
||||
"logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() }
|
||||
"./HistoryManager": @HistoryManager = {}
|
||||
|
|
|
@ -8,6 +8,7 @@ describe "ProjectManager", ->
|
|||
beforeEach ->
|
||||
@ProjectManager = SandboxedModule.require modulePath, requires:
|
||||
"./RedisManager": @RedisManager = {}
|
||||
"./ProjectHistoryRedisManager": @ProjectHistoryRedisManager = {}
|
||||
"./DocumentManager": @DocumentManager = {}
|
||||
"logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() }
|
||||
"./HistoryManager": @HistoryManager = {}
|
||||
|
@ -40,7 +41,7 @@ describe "ProjectManager", ->
|
|||
newPathname: 'bar2'
|
||||
@fileUpdates = [ @firstFileUpdate ]
|
||||
@DocumentManager.renameDocWithLock = sinon.stub().yields()
|
||||
@RedisManager.renameFile = sinon.stub().yields()
|
||||
@ProjectHistoryRedisManager.queueRenameEntity = sinon.stub().yields()
|
||||
|
||||
describe "successfully", ->
|
||||
beforeEach ->
|
||||
|
@ -55,8 +56,8 @@ describe "ProjectManager", ->
|
|||
.should.equal true
|
||||
|
||||
it "should rename the files in the updates", ->
|
||||
@RedisManager.renameFile
|
||||
.calledWith(@project_id, @firstFileUpdate.id, @user_id, @firstFileUpdate)
|
||||
@ProjectHistoryRedisManager.queueRenameEntity
|
||||
.calledWith(@project_id, 'file', @firstFileUpdate.id, @user_id, @firstFileUpdate)
|
||||
.should.equal true
|
||||
|
||||
it "should not flush the history", ->
|
||||
|
@ -79,7 +80,7 @@ describe "ProjectManager", ->
|
|||
describe "when renaming a file fails", ->
|
||||
beforeEach ->
|
||||
@error = new Error('error')
|
||||
@RedisManager.renameFile = sinon.stub().yields(@error)
|
||||
@ProjectHistoryRedisManager.queueRenameEntity = sinon.stub().yields(@error)
|
||||
@ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @callback
|
||||
|
||||
it "should call the callback with the error", ->
|
||||
|
@ -108,22 +109,22 @@ describe "ProjectManager", ->
|
|||
id: 2
|
||||
url: 'filestore.example.com/2'
|
||||
@fileUpdates = [ @firstFileUpdate ]
|
||||
@RedisManager.addEntity = sinon.stub().yields()
|
||||
@ProjectHistoryRedisManager.queueAddEntity = sinon.stub().yields()
|
||||
|
||||
describe "successfully", ->
|
||||
beforeEach ->
|
||||
@ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @callback
|
||||
|
||||
it "should add the docs in the updates", ->
|
||||
@RedisManager.addEntity
|
||||
@ProjectHistoryRedisManager.queueAddEntity
|
||||
.calledWith(@project_id, 'doc', @firstDocUpdate.id, @user_id, @firstDocUpdate)
|
||||
.should.equal true
|
||||
@RedisManager.addEntity
|
||||
@ProjectHistoryRedisManager.queueAddEntity
|
||||
.calledWith(@project_id, 'doc', @secondDocUpdate.id, @user_id, @secondDocUpdate)
|
||||
.should.equal true
|
||||
|
||||
it "should add the files in the updates", ->
|
||||
@RedisManager.addEntity
|
||||
@ProjectHistoryRedisManager.queueAddEntity
|
||||
.calledWith(@project_id, 'file', @firstFileUpdate.id, @user_id, @firstFileUpdate)
|
||||
.should.equal true
|
||||
|
||||
|
@ -138,7 +139,7 @@ describe "ProjectManager", ->
|
|||
describe "when adding a doc fails", ->
|
||||
beforeEach ->
|
||||
@error = new Error('error')
|
||||
@RedisManager.addEntity = sinon.stub().yields(@error)
|
||||
@ProjectHistoryRedisManager.queueAddEntity = sinon.stub().yields(@error)
|
||||
@ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @callback
|
||||
|
||||
it "should call the callback with the error", ->
|
||||
|
@ -147,7 +148,7 @@ describe "ProjectManager", ->
|
|||
describe "when adding a file fails", ->
|
||||
beforeEach ->
|
||||
@error = new Error('error')
|
||||
@RedisManager.addEntity = sinon.stub().yields(@error)
|
||||
@ProjectHistoryRedisManager.queueAddEntity = sinon.stub().yields(@error)
|
||||
@ProjectManager.updateProjectWithLocks @project_id, @user_id, @docUpdates, @fileUpdates, @callback
|
||||
|
||||
it "should call the callback with the error", ->
|
||||
|
@ -162,4 +163,3 @@ describe "ProjectManager", ->
|
|||
@HistoryManager.flushProjectChangesAsync
|
||||
.calledWith(@project_id)
|
||||
.should.equal true
|
||||
|
||||
|
|
|
@ -15,6 +15,7 @@ describe "RedisManager", ->
|
|||
@RedisManager = SandboxedModule.require modulePath,
|
||||
requires:
|
||||
"logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() }
|
||||
"./ProjectHistoryRedisManager": @ProjectHistoryRedisManager = {}
|
||||
"settings-sharelatex": @settings = {
|
||||
documentupdater: {logHashErrors: {write:true, read:true}}
|
||||
apis:
|
||||
|
@ -38,9 +39,6 @@ describe "RedisManager", ->
|
|||
key_schema:
|
||||
uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}"
|
||||
docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}"
|
||||
project_history:
|
||||
key_schema:
|
||||
projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:#{project_id}"
|
||||
}
|
||||
"redis-sharelatex":
|
||||
createClient: () => @rclient
|
||||
|
@ -337,7 +335,9 @@ describe "RedisManager", ->
|
|||
@multi.exec = sinon.stub().callsArgWith(0, null,
|
||||
[@hash, null, null, null, null, null, null, @doc_update_list_length]
|
||||
)
|
||||
@rclient.rpush = sinon.stub().callsArgWith(@ops.length + 1, null, @project_update_list_length)
|
||||
@ProjectHistoryRedisManager.queueOps = sinon.stub().callsArgWith(
|
||||
@ops.length + 1, null, @project_update_list_length
|
||||
)
|
||||
|
||||
describe "with a consistent version", ->
|
||||
beforeEach ->
|
||||
|
@ -399,8 +399,8 @@ describe "RedisManager", ->
|
|||
.should.equal true
|
||||
|
||||
it "should push the updates into the project history ops list", ->
|
||||
@rclient.rpush
|
||||
.calledWith("ProjectHistory:Ops:#{@project_id}", JSON.stringify(@ops[0]), JSON.stringify(@ops[1]))
|
||||
@ProjectHistoryRedisManager.queueOps
|
||||
.calledWith(@project_id, JSON.stringify(@ops[0]))
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
|
@ -686,6 +686,7 @@ describe "RedisManager", ->
|
|||
describe "the document is cached in redis", ->
|
||||
beforeEach ->
|
||||
@RedisManager.getDoc = sinon.stub().callsArgWith(2, null, 'lines', 'version')
|
||||
@ProjectHistoryRedisManager.queueRenameEntity = sinon.stub().yields()
|
||||
@RedisManager.renameDoc @project_id, @doc_id, @userId, @update, @callback
|
||||
|
||||
it "update the cached pathname", ->
|
||||
|
@ -694,75 +695,20 @@ describe "RedisManager", ->
|
|||
.should.equal true
|
||||
|
||||
it "should queue an update", ->
|
||||
update =
|
||||
pathname: @pathname
|
||||
new_pathname: @newPathname
|
||||
meta:
|
||||
user_id: @userId
|
||||
ts: new Date()
|
||||
doc: @doc_id
|
||||
@rclient.rpush
|
||||
.calledWith("ProjectHistory:Ops:#{@project_id}", JSON.stringify(update))
|
||||
@ProjectHistoryRedisManager.queueRenameEntity
|
||||
.calledWithExactly(@project_id, 'doc', @doc_id, @userId, @update, @callback)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.calledWith().should.equal true
|
||||
|
||||
describe "the document is not cached in redis", ->
|
||||
beforeEach ->
|
||||
@RedisManager.getDoc = sinon.stub().callsArgWith(2, null, null, null)
|
||||
@ProjectHistoryRedisManager.queueRenameEntity = sinon.stub().yields()
|
||||
@RedisManager.renameDoc @project_id, @doc_id, @userId, @update, @callback
|
||||
|
||||
it "does not update the cached pathname", ->
|
||||
@rclient.set.called.should.equal false
|
||||
|
||||
describe "renameFile", ->
|
||||
beforeEach () ->
|
||||
@rclient.rpush = sinon.stub().yields()
|
||||
@file_id = 1234
|
||||
|
||||
@update =
|
||||
pathname: @pathname = '/old'
|
||||
newPathname: @newPathname = '/new'
|
||||
|
||||
@RedisManager.renameFile @project_id, @file_id, @userId, @update
|
||||
|
||||
it "should queue an update", ->
|
||||
update =
|
||||
pathname: @pathname
|
||||
new_pathname: @newPathname
|
||||
meta:
|
||||
user_id: @userId
|
||||
ts: new Date()
|
||||
file: @file_id
|
||||
|
||||
@rclient.rpush
|
||||
.calledWith("ProjectHistory:Ops:#{@project_id}", JSON.stringify(update))
|
||||
.should.equal true
|
||||
|
||||
describe "addEntity", ->
|
||||
beforeEach (done) ->
|
||||
@rclient.rpush = sinon.stub().yields()
|
||||
@entity_id = 1234
|
||||
@entity_type = 'type'
|
||||
|
||||
@update =
|
||||
pathname: @pathname = '/old'
|
||||
docLines: @docLines = 'a\nb'
|
||||
url: @url = 'filestore.example.com'
|
||||
|
||||
@RedisManager.addEntity @project_id, @entity_type, @entity_id, @userId, @update, done
|
||||
|
||||
it "should queue an update", ->
|
||||
update =
|
||||
pathname: @pathname
|
||||
docLines: @docLines
|
||||
url: @url
|
||||
meta:
|
||||
user_id: @user_id
|
||||
ts: new Date()
|
||||
update[@entity_type] = @entity_id
|
||||
|
||||
@rclient.rpush
|
||||
.calledWith("ProjectHistory:Ops:#{@project_id}", JSON.stringify(update))
|
||||
.should.equal true
|
||||
it "should queue an update", ->
|
||||
@ProjectHistoryRedisManager.queueRenameEntity
|
||||
.calledWithExactly(@project_id, 'doc', @doc_id, @userId, @update, @callback)
|
||||
.should.equal true
|
||||
|
|
Loading…
Reference in a new issue