Big refactor to use better names and separation of concerns

This commit is contained in:
James Allen 2014-01-27 16:26:58 +00:00
parent 8a0aa55c91
commit 533b8e59a3
8 changed files with 435 additions and 525 deletions

View file

@ -1,48 +0,0 @@
{db, ObjectId} = require "./mongojs"
HistoryBuilder = require "./HistoryBuilder"
logger = require "logger-sharelatex"
module.exports = ConversionManager =
OPS_TO_LEAVE: 100
popLastCompressedOp: (doc_id, callback = (error, op) ->) ->
db.docHistory.findAndModify
query: { doc_id: ObjectId(doc_id) }
fields: { docOps: { $slice: -1 } }
update: { $pop: { docOps: 1 } }
, (error, history = { docOps: [] }) ->
return callback(error) if error?
callback null, history.docOps[0]
insertCompressedOps: (doc_id, docOps, callback = (error) ->) ->
db.docHistory.update {
doc_id: ObjectId(doc_id)
}, {
$push:
docOps:
$each: docOps
}, {
upsert: true
}, callback
convertAndSaveRawOps: (doc_id, rawOps, callback = (error) ->) ->
length = rawOps.length
if length == 0
return callback()
ConversionManager.popLastCompressedOp doc_id, (error, lastCompressedOp) ->
return callback(error) if error?
if !lastCompressedOp?
rawOps = rawOps.slice(0) # Clone so we can modify in place
lastCompressedOp = rawOps.shift()
uncompressedOps = [lastCompressedOp].concat rawOps
compressedOps = HistoryBuilder.compressOps uncompressedOps
ConversionManager.insertCompressedOps doc_id, compressedOps, (error) ->
return callback(error) if error?
logger.log doc_id: doc_id, rawOpsLength: length, compressedOpsLength: compressedOps.length, "compressed doc ops"
callback()

View file

@ -1,150 +0,0 @@
strInject = (s1, pos, s2) -> s1[...pos] + s2 + s1[pos..]
strRemove = (s1, pos, length) -> s1[...pos] + s1[(pos + length)..]
module.exports = HistoryBuilder =
normalizeUpdate: (update) ->
if update.meta.start_ts?
return [update] # already normalized
updates = []
for op in update.op
updates.push
op: [op]
meta:
start_ts: update.meta.ts
end_ts: update.meta.ts
user_id: update.meta.user_id
return updates
compressUpdates: (rawUpdates) ->
return [] if rawUpdates.length == 0
normalizedUpdates = []
for rawUpdate in rawUpdates
normalizedUpdates = normalizedUpdates.concat HistoryBuilder.normalizeUpdate(rawUpdate)
return [] if normalizedUpdates.length == 0
firstPass = [normalizedUpdates.shift()]
for update in normalizedUpdates
lastCompressedUpdate = firstPass.pop()
if lastCompressedUpdate?
firstPass = firstPass.concat HistoryBuilder._concatTwoUpdates lastCompressedUpdate, update, false
else
firstPass.push rawUpdate
return [] if firstPass.length == 0
secondPass = [firstPass.shift()]
for update in firstPass
lastCompressedUpdate = secondPass.pop()
if lastCompressedUpdate?
secondPass = secondPass.concat HistoryBuilder._concatTwoUpdates lastCompressedUpdate, update, true
else
secondPass.push update
return secondPass
MAX_TIME_BETWEEN_UPDATES: oneMinute = 60 * 1000
_concatTwoUpdates: (firstUpdate, secondUpdate, mergeInsertsAndDeletes) ->
firstUpdate =
op: firstUpdate.op
meta:
user_id: firstUpdate.meta.user_id or null
start_ts: firstUpdate.meta.start_ts or firstUpdate.meta.ts
end_ts: firstUpdate.meta.end_ts or firstUpdate.meta.ts
secondUpdate =
op: secondUpdate.op
meta:
user_id: secondUpdate.meta.user_id or null
start_ts: secondUpdate.meta.start_ts or secondUpdate.meta.ts
end_ts: secondUpdate.meta.end_ts or secondUpdate.meta.ts
if firstUpdate.meta.user_id != secondUpdate.meta.user_id
return [firstUpdate, secondUpdate]
if secondUpdate.meta.start_ts - firstUpdate.meta.end_ts > HistoryBuilder.MAX_TIME_BETWEEN_UPDATES
return [firstUpdate, secondUpdate]
firstOp = firstUpdate.op[0]
secondOp = secondUpdate.op[0]
# Two inserts
if firstOp.i? and secondOp.i? and firstOp.p <= secondOp.p <= (firstOp.p + firstOp.i.length)
return [
meta:
start_ts: firstUpdate.meta.start_ts
end_ts: secondUpdate.meta.end_ts
user_id: firstUpdate.meta.user_id
op: [
p: firstOp.p
i: strInject(firstOp.i, secondOp.p - firstOp.p, secondOp.i)
]
]
# Two deletes
else if firstOp.d? and secondOp.d? and secondOp.p <= firstOp.p <= (secondOp.p + secondOp.d.length)
return [
meta:
start_ts: firstUpdate.meta.start_ts
end_ts: secondUpdate.meta.end_ts
user_id: firstUpdate.meta.user_id
op: [
p: secondOp.p
d: strInject(secondOp.d, firstOp.p - secondOp.p, firstOp.d)
]
]
# An insert and then a delete
if mergeInsertsAndDeletes and firstOp.i? and secondOp.d? and firstOp.p <= secondOp.p <= (firstOp.p + firstOp.i.length)
offset = secondOp.p - firstOp.p
insertedText = firstOp.i.slice(offset, offset + secondOp.d.length)
if insertedText == secondOp.d
insert = strRemove(firstOp.i, offset, secondOp.d.length)
return [] if insert == ""
return [
meta:
start_ts: firstUpdate.meta.start_ts
end_ts: secondUpdate.meta.end_ts
user_id: firstUpdate.meta.user_id
op: [
p: firstOp.p
i: insert
]
]
else
# This shouldn't be possible!
return [firstUpdate, secondUpdate]
else if mergeInsertsAndDeletes and firstOp.d? and secondOp.i? and firstOp.p == secondOp.p
offset = firstOp.d.indexOf(secondOp.i)
if offset == -1
return [firstUpdate, secondUpdate]
headD = firstOp.d.slice(0, offset)
inserted = firstOp.d.slice(offset, secondOp.i.length)
tailD = firstOp.d.slice(offset + secondOp.i.length)
headP = firstOp.p
tailP = firstOp.p + secondOp.i.length
updates = []
if headD != ""
updates.push
meta:
start_ts: firstUpdate.meta.start_ts
end_ts: secondUpdate.meta.end_ts
user_id: firstUpdate.meta.user_id
op: [
p: headP
d: headD
]
if tailD != ""
updates.push
meta:
start_ts: firstUpdate.meta.start_ts
end_ts: secondUpdate.meta.end_ts
user_id: firstUpdate.meta.user_id
op: [
p: tailP
d: tailD
]
if updates.length == 2
updates[0].meta.start_ts = updates[0].meta.end_ts = firstUpdate.meta.start_ts
updates[1].meta.start_ts = updates[1].meta.end_ts = secondUpdate.meta.end_ts
return updates
else
return [firstUpdate, secondUpdate]

View file

@ -0,0 +1,54 @@
{db, ObjectId} = require "./mongojs"
UpdateCompressor = require "./UpdateCompressor"
logger = require "logger-sharelatex"
module.exports = HistoryManager =
getLastCompressedUpdate: (doc_id, callback = (error, update) ->) ->
db.docHistory
.find(doc_id: ObjectId(doc_id.toString()))
.sort(timestamp: -1)
.limit(1)
.toArray (error, compressedUpdates) ->
return callback(error) if error?
return callback null, compressedUpdates[0] or null
deleteCompressedUpdate: (id, callback = (error) ->) ->
db.docHistory.delete({ _id: ObjectId(id.toString()) }, callback)
popLastCompressedUpdate: (doc_id, callback = (error, update) ->) ->
HistoryManager.getLastCompressedUpdate doc_id, (error, update) ->
return callback(error) if error?
if update?
HistoryManager.deleteCompressedUpdate update._id, (error) ->
return callback(error) if error?
callback null, update
else
callback null, null
insertCompressedUpdates: (doc_id, docUpdates, callback = (error) ->) ->
db.docHistory.update {
doc_id: ObjectId(doc_id)
}, {
$push:
docUpdates:
$each: docUpdates
}, {
upsert: true
}, callback
compressAndSaveRawUpdates: (doc_id, rawUpdates, callback = (error) ->) ->
length = rawUpdates.length
if length == 0
return callback()
HistoryManager.popLastCompressedUpdate doc_id, (error, lastCompressedUpdate) ->
return callback(error) if error?
compressedUpdates = UpdateCompressor.compressRawUpdates lastCompressedUpdate, rawUpdates
HistoryManager.insertCompressedUpdates doc_id, compressedUpdates, (error) ->
return callback(error) if error?
logger.log doc_id: doc_id, rawUpdatesLength: length, compressedUpdatesLength: compressedUpdates.length, "compressed doc updates"
callback()

View file

@ -0,0 +1,118 @@
strInject = (s1, pos, s2) -> s1[...pos] + s2 + s1[pos..]
strRemove = (s1, pos, length) -> s1[...pos] + s1[(pos + length)..]
module.exports = UpdateCompressor =
# Updates come from the doc updater in format
# {
# op: [ { ... op1 ... }, { ... op2 ... } ]
# meta: { ts: ..., user_id: ... }
# }
# but it's easier to work with on op per update, so convert these updates to
# our compressed format
# [{
# op: op1
# meta: { start_ts: ... , end_ts: ..., user_id: ... }
# }, {
# op: op2
# meta: { start_ts: ... , end_ts: ..., user_id: ... }
# }]
convertRawUpdatesToCompressedFormat: (updates) ->
normalizedUpdates = []
for update in updates
for op in update.op
normalizedUpdates.push
op: op
meta:
start_ts: update.meta.start_ts or update.meta.ts
end_ts: update.meta.end_ts or update.meta.ts
user_id: update.meta.user_id
return normalizedUpdates
compressRawUpdates: (lastPreviousUpdate, rawUpdates) ->
updates = UpdateCompressor.convertRawUpdatesToCompressedFormat(rawUpdates)
if lastPreviousUpdate?
updates.unshift(lastPreviousUpdate)
return UpdateCompressor.compressUpdates(updates)
compressUpdates: (updates) ->
return [] if updates.length == 0
compressedUpdates = [updates.shift()]
for update in updates
lastCompressedUpdate = compressedUpdates.pop()
if lastCompressedUpdate?
compressedUpdates = compressedUpdates.concat UpdateCompressor._concatTwoUpdates lastCompressedUpdate, update
else
compressedUpdates.push update
return compressedUpdates
MAX_TIME_BETWEEN_UPDATES: oneMinute = 60 * 1000
_concatTwoUpdates: (firstUpdate, secondUpdate) ->
firstUpdate =
op: firstUpdate.op
meta:
user_id: firstUpdate.meta.user_id or null
start_ts: firstUpdate.meta.start_ts or firstUpdate.meta.ts
end_ts: firstUpdate.meta.end_ts or firstUpdate.meta.ts
secondUpdate =
op: secondUpdate.op
meta:
user_id: secondUpdate.meta.user_id or null
start_ts: secondUpdate.meta.start_ts or secondUpdate.meta.ts
end_ts: secondUpdate.meta.end_ts or secondUpdate.meta.ts
if firstUpdate.meta.user_id != secondUpdate.meta.user_id
return [firstUpdate, secondUpdate]
if secondUpdate.meta.start_ts - firstUpdate.meta.end_ts > UpdateCompressor.MAX_TIME_BETWEEN_UPDATES
return [firstUpdate, secondUpdate]
firstOp = firstUpdate.op
secondOp = secondUpdate.op
# Two inserts
if firstOp.i? and secondOp.i? and firstOp.p <= secondOp.p <= (firstOp.p + firstOp.i.length)
return [
meta:
start_ts: firstUpdate.meta.start_ts
end_ts: secondUpdate.meta.end_ts
user_id: firstUpdate.meta.user_id
op:
p: firstOp.p
i: strInject(firstOp.i, secondOp.p - firstOp.p, secondOp.i)
]
# Two deletes
else if firstOp.d? and secondOp.d? and secondOp.p <= firstOp.p <= (secondOp.p + secondOp.d.length)
return [
meta:
start_ts: firstUpdate.meta.start_ts
end_ts: secondUpdate.meta.end_ts
user_id: firstUpdate.meta.user_id
op:
p: secondOp.p
d: strInject(secondOp.d, firstOp.p - secondOp.p, firstOp.d)
]
# An insert and then a delete
else if firstOp.i? and secondOp.d? and firstOp.p <= secondOp.p <= (firstOp.p + firstOp.i.length)
offset = secondOp.p - firstOp.p
insertedText = firstOp.i.slice(offset, offset + secondOp.d.length)
if insertedText == secondOp.d
insert = strRemove(firstOp.i, offset, secondOp.d.length)
return [] if insert == ""
return [
meta:
start_ts: firstUpdate.meta.start_ts
end_ts: secondUpdate.meta.end_ts
user_id: firstUpdate.meta.user_id
op:
p: firstOp.p
i: insert
]
else
# This shouldn't be possible!
return [firstUpdate, secondUpdate]
else
return [firstUpdate, secondUpdate]

View file

@ -1,87 +0,0 @@
sinon = require('sinon')
chai = require('chai')
should = chai.should()
expect = chai.expect
modulePath = "../../../../app/js/ConversionManager.js"
SandboxedModule = require('sandboxed-module')
describe "ConversionManager", ->
beforeEach ->
@ConversionManager = SandboxedModule.require modulePath, requires:
"./HistoryBuilder": @HistoryBuilder = {}
"./mongojs" : {}
"logger-sharelatex": { log: sinon.stub() }
@doc_id = "doc-id-123"
@callback = sinon.stub()
describe "when there are no raw ops", ->
beforeEach ->
@ConversionManager.popLastCompressedOp = sinon.stub()
@ConversionManager.insertCompressedOps = sinon.stub()
@ConversionManager.convertAndSaveRawOps @doc_id, [], @callback
it "should not need to access the database", ->
@ConversionManager.popLastCompressedOp.called.should.equal false
@ConversionManager.insertCompressedOps.called.should.equal false
it "should call the callback", ->
@callback.called.should.equal true
describe "when there is no compressed history to begin with", ->
beforeEach ->
@rawOps = ["mock-raw-op-1", "mock-raw-op-2"]
@compressedOps = ["mock-compressed-op"]
@ConversionManager.popLastCompressedOp = sinon.stub().callsArgWith(1, null, null)
@ConversionManager.insertCompressedOps = sinon.stub().callsArg(2)
@HistoryBuilder.compressOps = sinon.stub().returns(@compressedOps)
@ConversionManager.convertAndSaveRawOps @doc_id, @rawOps, @callback
it "should try to pop the last compressed op", ->
@ConversionManager.popLastCompressedOp
.calledWith(@doc_id)
.should.equal true
it "should compress the raw ops", ->
@HistoryBuilder.compressOps
.calledWith(@rawOps)
.should.equal true
it "should save the compressed ops", ->
@ConversionManager.insertCompressedOps
.calledWith(@doc_id, @compressedOps)
.should.equal true
it "should call the callback", ->
@callback.called.should.equal true
describe "when the raw ops need appending to existing history", ->
beforeEach ->
@rawOps = ["mock-raw-op-1", "mock-raw-op-2"]
@lastCompressedOp = "mock-last-compressed-op-0"
@compressedOps = ["mock-compressed-op-1"]
@ConversionManager.popLastCompressedOp = sinon.stub().callsArgWith(1, null, @lastCompressedOp)
@ConversionManager.insertCompressedOps = sinon.stub().callsArg(2)
@HistoryBuilder.compressOps = sinon.stub().returns(@compressedOps)
@ConversionManager.convertAndSaveRawOps @doc_id, @rawOps, @callback
it "should try to pop the last compressed op", ->
@ConversionManager.popLastCompressedOp
.calledWith(@doc_id)
.should.equal true
it "should compress the last compressed op and the raw ops", ->
@HistoryBuilder.compressOps
.calledWith([@lastCompressedOp].concat(@rawOps))
.should.equal true
it "should save the compressed ops", ->
@ConversionManager.insertCompressedOps
.calledWith(@doc_id, @compressedOps)
.should.equal true
it "should call the callback", ->
@callback.called.should.equal true

View file

@ -1,240 +0,0 @@
sinon = require('sinon')
chai = require('chai')
should = chai.should()
expect = chai.expect
modulePath = "../../../../app/js/HistoryBuilder.js"
SandboxedModule = require('sandboxed-module')
describe "HistoryBuilder", ->
beforeEach ->
@HistoryBuilder = SandboxedModule.require modulePath
@user_id = "user-id-1"
@other_user_id = "user-id-2"
@ts1 = Date.now()
@ts2 = Date.now() + 1000
describe "compress", ->
describe "insert - insert", ->
it "should append one insert to the other", ->
expect(@HistoryBuilder.compressUpdates [{
op: [ p: 3, i: "foo" ]
meta: ts: @ts1, user_id: @user_id
}, {
op: [ p: 6, i: "bar" ]
meta: ts: @ts2, user_id: @user_id
}])
.to.deep.equal [{
op: [ p: 3, i: "foobar" ]
meta: start_ts: @ts1, end_ts: @ts2, user_id: @user_id
}]
it "should insert one insert inside the other", ->
expect(@HistoryBuilder.compressUpdates [{
op: [ p: 3, i: "foo" ]
meta: ts: @ts1, user_id: @user_id
}, {
op: [ p: 5, i: "bar" ]
meta: ts: @ts2, user_id: @user_id
}])
.to.deep.equal [{
op: [ p: 3, i: "fobaro" ]
meta: start_ts: @ts1, end_ts: @ts2, user_id: @user_id
}]
it "should not append separated inserts", ->
expect(@HistoryBuilder.compressUpdates [{
op: [ p: 3, i: "foo" ]
meta: ts: @ts1, user_id: @user_id
}, {
op: [ p: 9, i: "bar" ]
meta: ts: @ts2, user_id: @user_id
}])
.to.deep.equal [{
op: [ p: 3, i: "foo" ]
meta: start_ts: @ts1, end_ts: @ts1, user_id: @user_id
}, {
op: [ p: 9, i: "bar" ]
meta: start_ts: @ts2, end_ts: @ts2, user_id: @user_id
}]
describe "delete - delete", ->
it "should append one delete to the other", ->
expect(@HistoryBuilder.compressUpdates [{
op: [ p: 3, d: "foo" ]
meta: ts: @ts1, user_id: @user_id
}, {
op: [ p: 3, d: "bar" ]
meta: ts: @ts2, user_id: @user_id
}])
.to.deep.equal [{
op: [ p: 3, d: "foobar" ]
meta: start_ts: @ts1, end_ts: @ts2, user_id: @user_id
}]
it "should insert one delete inside the other", ->
expect(@HistoryBuilder.compressUpdates [{
op: [ p: 3, d: "foo" ]
meta: ts: @ts1, user_id: @user_id
}, {
op: [ p: 1, d: "bar" ]
meta: ts: @ts2, user_id: @user_id
}])
.to.deep.equal [{
op: [ p: 1, d: "bafoor" ]
meta: start_ts: @ts1, end_ts: @ts2, user_id: @user_id
}]
it "should not append separated deletes", ->
expect(@HistoryBuilder.compressUpdates [{
op: [ p: 3, d: "foo" ]
meta: ts: @ts1, user_id: @user_id
}, {
op: [ p: 9, d: "bar" ]
meta: ts: @ts2, user_id: @user_id
}])
.to.deep.equal [{
op: [ p: 3, d: "foo" ]
meta: start_ts: @ts1, end_ts: @ts1, user_id: @user_id
}, {
op: [ p: 9, d: "bar" ]
meta: start_ts: @ts2, end_ts: @ts2, user_id: @user_id
}]
describe "insert - delete", ->
it "should undo a previous insert", ->
expect(@HistoryBuilder.compressUpdates [{
op: [ p: 3, i: "foo" ]
meta: ts: @ts1, user_id: @user_id
}, {
op: [ p: 5, d: "o" ]
meta: ts: @ts2, user_id: @user_id
}])
.to.deep.equal [{
op: [ p: 3, i: "fo" ]
meta: start_ts: @ts1, end_ts: @ts2, user_id: @user_id
}]
it "should remove part of an insert from the middle", ->
expect(@HistoryBuilder.compressUpdates [{
op: [ p: 3, i: "fobaro" ]
meta: ts: @ts1, user_id: @user_id
}, {
op: [ p: 5, d: "bar" ]
meta: ts: @ts2, user_id: @user_id
}])
.to.deep.equal [{
op: [ p: 3, i: "foo" ]
meta: start_ts: @ts1, end_ts: @ts2, user_id: @user_id
}]
it "should cancel out two opposite updates", ->
expect(@HistoryBuilder.compressUpdates [{
op: [ p: 3, i: "foo" ]
meta: ts: @ts1, user_id: @user_id
}, {
op: [ p: 3, d: "foo" ]
meta: ts: @ts2, user_id: @user_id
}])
.to.deep.equal []
it "should not combine separated updates", ->
expect(@HistoryBuilder.compressUpdates [{
op: [ p: 3, i: "foo" ]
meta: ts: @ts1, user_id: @user_id
}, {
op: [ p: 9, d: "bar" ]
meta: ts: @ts2, user_id: @user_id
}])
.to.deep.equal [{
op: [ p: 3, i: "foo" ]
meta: start_ts: @ts1, end_ts: @ts1, user_id: @user_id
}, {
op: [ p: 9, d: "bar" ]
meta: start_ts: @ts2, end_ts: @ts2, user_id: @user_id
}]
describe "delete - insert", ->
it "should redo a previous delete at the beginning", ->
expect(@HistoryBuilder.compressUpdates [{
op: [ p: 3, d: "foo" ]
meta: ts: @ts1, user_id: @user_id
}, {
op: [ p: 3, i: "f" ]
meta: ts: @ts2, user_id: @user_id
}])
.to.deep.equal [{
op: [ p: 4, d: "oo" ]
meta: start_ts: @ts1, end_ts: @ts2, user_id: @user_id
}]
it "should redo a previous delete from halfway through", ->
expect(@HistoryBuilder.compressUpdates [{
op: [ p: 3, d: "foobar" ]
meta: ts: @ts1, user_id: @user_id
}, {
op: [ p: 3, i: "oo" ]
meta: ts: @ts2, user_id: @user_id
}])
.to.deep.equal [{
op: [ p: 3, d: "f" ]
meta: start_ts: @ts1, end_ts: @ts1, user_id: @user_id
}, {
op: [ p: 5, d: "bar" ]
meta: start_ts: @ts2, end_ts: @ts2, user_id: @user_id
}]
it "should keep words together", ->
expect(@HistoryBuilder.compressUpdates [{
op: [ p: 3, d: "abcdefghijklmnopqrstuvwxyz hello world" ]
meta: ts: @ts1, user_id: @user_id
}, {
op: [ p: 3, i: "w" ]
meta: ts: @ts2, user_id: @user_id
}, {
op: [ p: 4, i: "o" ]
meta: ts: @ts2, user_id: @user_id
}, {
op: [ p: 5, i: "r" ]
meta: ts: @ts2, user_id: @user_id
}, {
op: [ p: 6, i: "l" ]
meta: ts: @ts2, user_id: @user_id
}, {
op: [ p: 7, i: "d" ]
meta: ts: @ts2, user_id: @user_id
}])
.to.deep.equal [{
op: [ p: 3, d: "abcdefghijklmnopqrstuvwxyz hello " ]
meta: start_ts: @ts1, end_ts: @ts2, user_id: @user_id
}]
it "should not combine the ops if the insert text does not match the delete text", ->
expect(@HistoryBuilder.compressUpdates [{
op: [ p: 3, d: "foobar" ]
meta: ts: @ts1, user_id: @user_id
}, {
op: [ p: 3, i: "xy" ]
meta: ts: @ts2, user_id: @user_id
}])
.to.deep.equal [{
op: [ p: 3, d: "foobar" ]
meta: start_ts: @ts1, end_ts: @ts1, user_id: @user_id
}, {
op: [ p: 3, i: "xy" ]
meta: start_ts: @ts2, end_ts: @ts2, user_id: @user_id
}]
it "should cancel two equal updates", ->
expect(@HistoryBuilder.compressUpdates [{
op: [ p: 3, d: "foo" ]
meta: ts: @ts1, user_id: @user_id
}, {
op: [ p: 3, i: "foo" ]
meta: ts: @ts2, user_id: @user_id
}])
.to.deep.equal []

View file

@ -0,0 +1,87 @@
sinon = require('sinon')
chai = require('chai')
should = chai.should()
expect = chai.expect
modulePath = "../../../../app/js/HistoryManager.js"
SandboxedModule = require('sandboxed-module')
describe "HistoryManager", ->
beforeEach ->
@HistoryManager = SandboxedModule.require modulePath, requires:
"./UpdateCompressor": @UpdateCompressor = {}
"./mongojs" : {}
"logger-sharelatex": { log: sinon.stub() }
@doc_id = "doc-id-123"
@callback = sinon.stub()
describe "when there are no raw ops", ->
beforeEach ->
@HistoryManager.popLastCompressedUpdate = sinon.stub()
@HistoryManager.insertCompressedUpdates = sinon.stub()
@HistoryManager.compressAndSaveRawUpdates @doc_id, [], @callback
it "should not need to access the database", ->
@HistoryManager.popLastCompressedUpdate.called.should.equal false
@HistoryManager.insertCompressedUpdates.called.should.equal false
it "should call the callback", ->
@callback.called.should.equal true
describe "when there is no compressed history to begin with", ->
beforeEach ->
@rawUpdates = ["mock-raw-op-1", "mock-raw-op-2"]
@compressedUpdates = ["mock-compressed-op"]
@HistoryManager.popLastCompressedUpdate = sinon.stub().callsArgWith(1, null, null)
@HistoryManager.insertCompressedUpdates = sinon.stub().callsArg(2)
@UpdateCompressor.compressRawUpdates = sinon.stub().returns(@compressedUpdates)
@HistoryManager.compressAndSaveRawUpdates @doc_id, @rawUpdates, @callback
it "should try to pop the last compressed op", ->
@HistoryManager.popLastCompressedUpdate
.calledWith(@doc_id)
.should.equal true
it "should compress the raw ops", ->
@UpdateCompressor.compressRawUpdates
.calledWith(null, @rawUpdates)
.should.equal true
it "should save the compressed ops", ->
@HistoryManager.insertCompressedUpdates
.calledWith(@doc_id, @compressedUpdates)
.should.equal true
it "should call the callback", ->
@callback.called.should.equal true
describe "when the raw ops need appending to existing history", ->
beforeEach ->
@rawUpdates = ["mock-raw-op-1", "mock-raw-op-2"]
@lastCompressedUpdate = "mock-last-compressed-op-0"
@compressedUpdates = ["mock-compressed-op-1"]
@HistoryManager.popLastCompressedUpdate = sinon.stub().callsArgWith(1, null, @lastCompressedUpdate)
@HistoryManager.insertCompressedUpdates = sinon.stub().callsArg(2)
@UpdateCompressor.compressRawUpdates = sinon.stub().returns(@compressedUpdates)
@HistoryManager.compressAndSaveRawUpdates @doc_id, @rawUpdates, @callback
it "should try to pop the last compressed op", ->
@HistoryManager.popLastCompressedUpdate
.calledWith(@doc_id)
.should.equal true
it "should compress the last compressed op and the raw ops", ->
@UpdateCompressor.compressRawUpdates
.calledWith(@lastCompressedUpdate, @rawUpdates)
.should.equal true
it "should save the compressed ops", ->
@HistoryManager.insertCompressedUpdates
.calledWith(@doc_id, @compressedUpdates)
.should.equal true
it "should call the callback", ->
@callback.called.should.equal true

View file

@ -0,0 +1,176 @@
sinon = require('sinon')
chai = require('chai')
should = chai.should()
expect = chai.expect
modulePath = "../../../../app/js/UpdateCompressor.js"
SandboxedModule = require('sandboxed-module')
describe "UpdateCompressor", ->
beforeEach ->
@UpdateCompressor = SandboxedModule.require modulePath
@user_id = "user-id-1"
@other_user_id = "user-id-2"
@ts1 = Date.now()
@ts2 = Date.now() + 1000
describe "convertRawUpdatesToCompressedFormat", ->
it "should split grouped updates into individual updates", ->
expect(@UpdateCompressor.convertRawUpdatesToCompressedFormat [{
op: [ @op1 = { p: 0, i: "Foo" }, @op2 = { p: 6, i: "bar"} ]
meta: { ts: @ts1, user_id: @user_id }
}, {
op: [ @op3 = { p: 10, i: "baz" } ]
meta: { ts: @ts2, user_id: @other_user_id }
}])
.to.deep.equal [{
op: @op1,
meta: { start_ts: @ts1, end_ts: @ts1, user_id: @user_id }
}, {
op: @op2,
meta: { start_ts: @ts1, end_ts: @ts1, user_id: @user_id }
}, {
op: @op3,
meta: { start_ts: @ts2, end_ts: @ts2, user_id: @other_user_id }
}]
describe "compress", ->
describe "insert - insert", ->
it "should append one insert to the other", ->
expect(@UpdateCompressor.compressUpdates [{
op: { p: 3, i: "foo" }
meta: ts: @ts1, user_id: @user_id
}, {
op: { p: 6, i: "bar" }
meta: ts: @ts2, user_id: @user_id
}])
.to.deep.equal [{
op: { p: 3, i: "foobar" }
meta: start_ts: @ts1, end_ts: @ts2, user_id: @user_id
}]
it "should insert one insert inside the other", ->
expect(@UpdateCompressor.compressUpdates [{
op: { p: 3, i: "foo" }
meta: ts: @ts1, user_id: @user_id
}, {
op: { p: 5, i: "bar" }
meta: ts: @ts2, user_id: @user_id
}])
.to.deep.equal [{
op: { p: 3, i: "fobaro" }
meta: start_ts: @ts1, end_ts: @ts2, user_id: @user_id
}]
it "should not append separated inserts", ->
expect(@UpdateCompressor.compressUpdates [{
op: { p: 3, i: "foo" }
meta: ts: @ts1, user_id: @user_id
}, {
op: { p: 9, i: "bar" }
meta: ts: @ts2, user_id: @user_id
}])
.to.deep.equal [{
op: { p: 3, i: "foo" }
meta: start_ts: @ts1, end_ts: @ts1, user_id: @user_id
}, {
op: { p: 9, i: "bar" }
meta: start_ts: @ts2, end_ts: @ts2, user_id: @user_id
}]
describe "delete - delete", ->
it "should append one delete to the other", ->
expect(@UpdateCompressor.compressUpdates [{
op: { p: 3, d: "foo" }
meta: ts: @ts1, user_id: @user_id
}, {
op: { p: 3, d: "bar" }
meta: ts: @ts2, user_id: @user_id
}])
.to.deep.equal [{
op: { p: 3, d: "foobar" }
meta: start_ts: @ts1, end_ts: @ts2, user_id: @user_id
}]
it "should insert one delete inside the other", ->
expect(@UpdateCompressor.compressUpdates [{
op: { p: 3, d: "foo" }
meta: ts: @ts1, user_id: @user_id
}, {
op: { p: 1, d: "bar" }
meta: ts: @ts2, user_id: @user_id
}])
.to.deep.equal [{
op: { p: 1, d: "bafoor" }
meta: start_ts: @ts1, end_ts: @ts2, user_id: @user_id
}]
it "should not append separated deletes", ->
expect(@UpdateCompressor.compressUpdates [{
op: { p: 3, d: "foo" }
meta: ts: @ts1, user_id: @user_id
}, {
op: { p: 9, d: "bar" }
meta: ts: @ts2, user_id: @user_id
}])
.to.deep.equal [{
op: { p: 3, d: "foo" }
meta: start_ts: @ts1, end_ts: @ts1, user_id: @user_id
}, {
op: { p: 9, d: "bar" }
meta: start_ts: @ts2, end_ts: @ts2, user_id: @user_id
}]
describe "insert - delete", ->
it "should undo a previous insert", ->
expect(@UpdateCompressor.compressUpdates [{
op: { p: 3, i: "foo" }
meta: ts: @ts1, user_id: @user_id
}, {
op: { p: 5, d: "o" }
meta: ts: @ts2, user_id: @user_id
}])
.to.deep.equal [{
op: { p: 3, i: "fo" }
meta: start_ts: @ts1, end_ts: @ts2, user_id: @user_id
}]
it "should remove part of an insert from the middle", ->
expect(@UpdateCompressor.compressUpdates [{
op: { p: 3, i: "fobaro" }
meta: ts: @ts1, user_id: @user_id
}, {
op: { p: 5, d: "bar" }
meta: ts: @ts2, user_id: @user_id
}])
.to.deep.equal [{
op: { p: 3, i: "foo" }
meta: start_ts: @ts1, end_ts: @ts2, user_id: @user_id
}]
it "should cancel out two opposite updates", ->
expect(@UpdateCompressor.compressUpdates [{
op: { p: 3, i: "foo" }
meta: ts: @ts1, user_id: @user_id
}, {
op: { p: 3, d: "foo" }
meta: ts: @ts2, user_id: @user_id
}])
.to.deep.equal []
it "should not combine separated updates", ->
expect(@UpdateCompressor.compressUpdates [{
op: { p: 3, i: "foo" }
meta: ts: @ts1, user_id: @user_id
}, {
op: { p: 9, d: "bar" }
meta: ts: @ts2, user_id: @user_id
}])
.to.deep.equal [{
op: { p: 3, i: "foo" }
meta: start_ts: @ts1, end_ts: @ts1, user_id: @user_id
}, {
op: { p: 9, d: "bar" }
meta: start_ts: @ts2, end_ts: @ts2, user_id: @user_id
}]