Merge branch 'master' into sk-upgrade-metrics

This commit is contained in:
Shane Kilkelly 2017-03-16 15:17:57 +00:00
commit c30e672549
13 changed files with 2554 additions and 30 deletions

View file

@ -18,7 +18,7 @@ module.exports = DiffGenerator =
if e instanceof ConsistencyError and i = update.op.length - 1 if e instanceof ConsistencyError and i = update.op.length - 1
# catch known case where the last op in an array has been # catch known case where the last op in an array has been
# merged into a later op # merged into a later op
logger.error {update, op: JSON.stringify(op)}, "marking op as broken" logger.error {err: e, update, op: JSON.stringify(op)}, "marking op as broken"
op.broken = true op.broken = true
else else
throw e # rethrow the execption throw e # rethrow the execption
@ -47,6 +47,9 @@ module.exports = DiffGenerator =
else if op.d? else if op.d?
return content.slice(0, op.p) + op.d + content.slice(op.p) return content.slice(0, op.p) + op.d + content.slice(op.p)
else
return content
rewindUpdates: (content, updates) -> rewindUpdates: (content, updates) ->
for update in updates.reverse() for update in updates.reverse()

View file

@ -5,8 +5,8 @@ logger = require "logger-sharelatex"
module.exports = DiffManager = module.exports = DiffManager =
getLatestDocAndUpdates: (project_id, doc_id, fromVersion, toVersion, callback = (error, content, version, updates) ->) -> getLatestDocAndUpdates: (project_id, doc_id, fromVersion, toVersion, callback = (error, content, version, updates) ->) ->
# retrieve the document before retreiving the updates, # Get updates last, since then they must be ahead and it
# because updates are written to mongo after the document # might be possible to rewind to the same version as the doc.
DocumentUpdaterManager.getDocument project_id, doc_id, (error, content, version) -> DocumentUpdaterManager.getDocument project_id, doc_id, (error, content, version) ->
return callback(error) if error? return callback(error) if error?
UpdatesManager.getDocUpdatesWithUserInfo project_id, doc_id, from: fromVersion, to: toVersion, (error, updates) -> UpdatesManager.getDocUpdatesWithUserInfo project_id, doc_id, from: fromVersion, to: toVersion, (error, updates) ->
@ -33,7 +33,28 @@ module.exports = DiffManager =
callback(null, diff) callback(null, diff)
getDocumentBeforeVersion: (project_id, doc_id, version, callback = (error, document, rewoundUpdates) ->) -> getDocumentBeforeVersion: (project_id, doc_id, version, _callback = (error, document, rewoundUpdates) ->) ->
# Whichever order we get the latest document and the latest updates,
# there is potential for updates to be applied between them so that
# they do not return the same 'latest' versions.
# If this happens, we just retry and hopefully get them at the compatible
# versions.
retries = 3
callback = (error, args...) ->
if error?
if error.retry and retries > 0
logger.warn {error, project_id, doc_id, version, retries}, "retrying getDocumentBeforeVersion"
retry()
else
_callback(error)
else
_callback(null, args...)
do retry = () ->
retries--
DiffManager._tryGetDocumentBeforeVersion(project_id, doc_id, version, callback)
_tryGetDocumentBeforeVersion: (project_id, doc_id, version, callback = (error, document, rewoundUpdates) ->) ->
logger.log project_id: project_id, doc_id: doc_id, version: version, "getting document before version" logger.log project_id: project_id, doc_id: doc_id, version: version, "getting document before version"
DiffManager.getLatestDocAndUpdates project_id, doc_id, version, null, (error, content, version, updates) -> DiffManager.getLatestDocAndUpdates project_id, doc_id, version, null, (error, content, version, updates) ->
return callback(error) if error? return callback(error) if error?
@ -48,7 +69,11 @@ module.exports = DiffManager =
lastUpdate = updates[0] lastUpdate = updates[0]
if lastUpdate? and lastUpdate.v != version - 1 if lastUpdate? and lastUpdate.v != version - 1
return callback new Error("latest update version, #{lastUpdate.v}, does not match doc version, #{version}") error = new Error("latest update version, #{lastUpdate.v}, does not match doc version, #{version}")
error.retry = true
return callback error
logger.log {docVersion: version, lastUpdateVersion: lastUpdate?.v, updateCount: updates.length}, "rewinding updates"
tryUpdates = updates.slice().reverse() tryUpdates = updates.slice().reverse()

View file

@ -14,6 +14,7 @@ module.exports = DocumentUpdaterManager =
body = JSON.parse(body) body = JSON.parse(body)
catch error catch error
return callback(error) return callback(error)
logger.log {project_id, doc_id, version: body.version}, "got doc from document updater"
callback null, body.lines.join("\n"), body.version callback null, body.lines.join("\n"), body.version
else else
error = new Error("doc updater returned a non-success status code: #{res.statusCode}") error = new Error("doc updater returned a non-success status code: #{res.statusCode}")

View file

@ -82,7 +82,7 @@ module.exports = MongoManager =
# For finding all updates that go into a diff for a doc # For finding all updates that go into a diff for a doc
db.docHistory.ensureIndex { doc_id: 1, v: 1 }, { background: true } db.docHistory.ensureIndex { doc_id: 1, v: 1 }, { background: true }
# For finding all updates that affect a project # For finding all updates that affect a project
db.docHistory.ensureIndex { project_id: 1, "meta.end_ts": 1, "meta.start_ts": -1 }, { background: true } db.docHistory.ensureIndex { project_id: 1, "meta.end_ts": 1 }, { background: true }
# For finding updates that don't yet have a project_id and need it inserting # For finding updates that don't yet have a project_id and need it inserting
db.docHistory.ensureIndex { doc_id: 1, project_id: 1 }, { background: true } db.docHistory.ensureIndex { doc_id: 1, project_id: 1 }, { background: true }
# For finding project meta-data # For finding project meta-data

View file

@ -530,3 +530,9 @@ module.exports = PackManager =
}, (err) -> }, (err) ->
logger.log {project_id, doc_id, pack_id}, "set expiry on pack" logger.log {project_id, doc_id, pack_id}, "set expiry on pack"
callback() callback()
# _getOneDayInFutureWithRandomDelay: ->
# thirtyMins = 1000 * 60 * 30
# randomThirtyMinMax = Math.ceil(Math.random() * thirtyMins)
# return new Date(Date.now() + randomThirtyMinMax + 1*DAYS)

View file

@ -1,6 +1,9 @@
strInject = (s1, pos, s2) -> s1[...pos] + s2 + s1[pos..] strInject = (s1, pos, s2) -> s1[...pos] + s2 + s1[pos..]
strRemove = (s1, pos, length) -> s1[...pos] + s1[(pos + length)..] strRemove = (s1, pos, length) -> s1[...pos] + s1[(pos + length)..]
diff_match_patch = require("../lib/diff_match_patch").diff_match_patch
dmp = new diff_match_patch()
module.exports = UpdateCompressor = module.exports = UpdateCompressor =
NOOP: "noop" NOOP: "noop"
@ -21,7 +24,9 @@ module.exports = UpdateCompressor =
convertToSingleOpUpdates: (updates) -> convertToSingleOpUpdates: (updates) ->
splitUpdates = [] splitUpdates = []
for update in updates for update in updates
if update.op.length == 0 # Reject any non-insert or delete ops, i.e. comments
ops = update.op.filter (o) -> o.i? or o.d?
if ops.length == 0
splitUpdates.push splitUpdates.push
op: UpdateCompressor.NOOP op: UpdateCompressor.NOOP
meta: meta:
@ -30,7 +35,7 @@ module.exports = UpdateCompressor =
user_id: update.meta.user_id user_id: update.meta.user_id
v: update.v v: update.v
else else
for op in update.op for op in ops
splitUpdates.push splitUpdates.push
op: op op: op
meta: meta:
@ -155,6 +160,59 @@ module.exports = UpdateCompressor =
# This will only happen if the delete extends outside the insert # This will only happen if the delete extends outside the insert
return [firstUpdate, secondUpdate] return [firstUpdate, secondUpdate]
# A delete then an insert at the same place, likely a copy-paste of a chunk of content
else if firstOp.d? and secondOp.i? and firstOp.p == secondOp.p
offset = firstOp.p
diff_ops = @diffAsShareJsOps(firstOp.d, secondOp.i)
if diff_ops.length == 0
return [{ # Noop
meta:
start_ts: firstUpdate.meta.start_ts
end_ts: secondUpdate.meta.end_ts
user_id: firstUpdate.meta.user_id
op:
p: firstOp.p
i: ""
v: secondUpdate.v
}]
else
return diff_ops.map (op) ->
op.p += offset
return {
meta:
start_ts: firstUpdate.meta.start_ts
end_ts: secondUpdate.meta.end_ts
user_id: firstUpdate.meta.user_id
op: op
v: secondUpdate.v
}
else else
return [firstUpdate, secondUpdate] return [firstUpdate, secondUpdate]
ADDED: 1
REMOVED: -1
UNCHANGED: 0
diffAsShareJsOps: (before, after, callback = (error, ops) ->) ->
diffs = dmp.diff_main(before, after)
dmp.diff_cleanupSemantic(diffs)
ops = []
position = 0
for diff in diffs
type = diff[0]
content = diff[1]
if type == @ADDED
ops.push
i: content
p: position
position += content.length
else if type == @REMOVED
ops.push
d: content
p: position
else if type == @UNCHANGED
position += content.length
else
throw "Unknown type"
return ops

View file

@ -219,11 +219,35 @@ module.exports = UpdatesManager =
return !!user_id.match(/^[a-f0-9]{24}$/) return !!user_id.match(/^[a-f0-9]{24}$/)
TIME_BETWEEN_DISTINCT_UPDATES: fiveMinutes = 5 * 60 * 1000 TIME_BETWEEN_DISTINCT_UPDATES: fiveMinutes = 5 * 60 * 1000
SPLIT_ON_DELETE_SIZE: 16 # characters
_summarizeUpdates: (updates, existingSummarizedUpdates = []) -> _summarizeUpdates: (updates, existingSummarizedUpdates = []) ->
summarizedUpdates = existingSummarizedUpdates.slice() summarizedUpdates = existingSummarizedUpdates.slice()
previousUpdateWasBigDelete = false
for update in updates for update in updates
earliestUpdate = summarizedUpdates[summarizedUpdates.length - 1] earliestUpdate = summarizedUpdates[summarizedUpdates.length - 1]
if earliestUpdate and earliestUpdate.meta.start_ts - update.meta.end_ts < @TIME_BETWEEN_DISTINCT_UPDATES shouldConcat = false
# If a user inserts some text, then deletes a big chunk including that text,
# the update we show might concat the insert and delete, and there will be no sign
# of that insert having happened, or be able to restore to it (restoring after a big delete is common).
# So, we split the summary on 'big' deletes. However, we've stepping backwards in time with
# most recent changes considered first, so if this update is a big delete, we want to start
# a new summarized update next timge, hence we monitor the previous update.
if previousUpdateWasBigDelete
shouldConcat = false
else if earliestUpdate and earliestUpdate.meta.end_ts - update.meta.start_ts < @TIME_BETWEEN_DISTINCT_UPDATES
# We're going backwards in time through the updates, so only combine if this update starts less than 5 minutes before
# the end of current summarized block, so no block spans more than 5 minutes.
shouldConcat = true
isBigDelete = false
for op in update.op or []
if op.d? and op.d.length > @SPLIT_ON_DELETE_SIZE
isBigDelete = true
previousUpdateWasBigDelete = isBigDelete
if shouldConcat
# check if the user in this update is already present in the earliest update, # check if the user in this update is already present in the earliest update,
# if not, add them to the users list of the earliest update # if not, add them to the users list of the earliest update
earliestUpdate.meta.user_ids = _.union earliestUpdate.meta.user_ids, [update.meta.user_id] earliestUpdate.meta.user_ids = _.union earliestUpdate.meta.user_ids, [update.meta.user_id]

File diff suppressed because it is too large Load diff

View file

@ -233,6 +233,28 @@ describe "Appending doc ops to the history", ->
expect(@updates[0].pack[0].v).to.equal 3 expect(@updates[0].pack[0].v).to.equal 3
expect(@updates[0].pack[1].v).to.equal 4 expect(@updates[0].pack[1].v).to.equal 4
describe "when there is a comment update", ->
before (done) ->
@project_id = ObjectId().toString()
@doc_id = ObjectId().toString()
@user_id = ObjectId().toString()
MockWebApi.projects[@project_id] = features: versioning: false
TrackChangesClient.pushRawUpdates @project_id, @doc_id, [{
op: [{ c: "foo", p: 3 }, {d: "bar", p: 6}]
meta: { ts: Date.now(), user_id: @user_id }
v: 3
}], (error) =>
throw error if error?
TrackChangesClient.flushAndGetCompressedUpdates @project_id, @doc_id, (error, @updates) =>
throw error if error?
done()
it "should ignore the comment op", ->
expect(@updates[0].pack[0].op).to.deep.equal [{d: "bar", p: 6}]
it "should insert the correct version numbers into mongo", ->
expect(@updates[0].pack[0].v).to.equal 3
describe "when the project has versioning enabled", -> describe "when the project has versioning enabled", ->
before (done) -> before (done) ->
@project_id = ObjectId().toString() @project_id = ObjectId().toString()

View file

@ -8,7 +8,7 @@ SandboxedModule = require('sandboxed-module')
describe "DiffManager", -> describe "DiffManager", ->
beforeEach -> beforeEach ->
@DiffManager = SandboxedModule.require modulePath, requires: @DiffManager = SandboxedModule.require modulePath, requires:
"logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub(), warn: sinon.stub() }
"./UpdatesManager": @UpdatesManager = {} "./UpdatesManager": @UpdatesManager = {}
"./DocumentUpdaterManager": @DocumentUpdaterManager = {} "./DocumentUpdaterManager": @DocumentUpdaterManager = {}
"./DiffGenerator": @DiffGenerator = {} "./DiffGenerator": @DiffGenerator = {}
@ -90,6 +90,76 @@ describe "DiffManager", ->
.should.equal true .should.equal true
describe "getDocumentBeforeVersion", -> describe "getDocumentBeforeVersion", ->
beforeEach ->
@DiffManager._tryGetDocumentBeforeVersion = sinon.stub()
@document = "mock-documents"
@rewound_updates = "mock-rewound-updates"
describe "succesfully", ->
beforeEach ->
@DiffManager._tryGetDocumentBeforeVersion.yields(null, @document, @rewound_updates)
@DiffManager.getDocumentBeforeVersion @project_id, @doc_id, @version, @callback
it "should call _tryGetDocumentBeforeVersion", ->
@DiffManager._tryGetDocumentBeforeVersion
.calledWith(@project_id, @doc_id, @version)
.should.equal true
it "should call the callback with the response", ->
@callback.calledWith(null, @document, @rewound_updates).should.equal true
describe "with a retry needed", ->
beforeEach ->
retried = false
@DiffManager._tryGetDocumentBeforeVersion = (project_id, doc_id, version, callback) =>
if !retried
retried = true
error = new Error()
error.retry = true
callback error
else
callback(null, @document, @rewound_updates)
sinon.spy @DiffManager, "_tryGetDocumentBeforeVersion"
@DiffManager.getDocumentBeforeVersion @project_id, @doc_id, @version, @callback
it "should call _tryGetDocumentBeforeVersion twice", ->
@DiffManager._tryGetDocumentBeforeVersion
.calledTwice
.should.equal true
it "should call the callback with the response", ->
@callback.calledWith(null, @document, @rewound_updates).should.equal true
describe "with a non-retriable error", ->
beforeEach ->
@error = new Error("oops")
@DiffManager._tryGetDocumentBeforeVersion.yields(@error)
@DiffManager.getDocumentBeforeVersion @project_id, @doc_id, @version, @callback
it "should call _tryGetDocumentBeforeVersion once", ->
@DiffManager._tryGetDocumentBeforeVersion
.calledOnce
.should.equal true
it "should call the callback with the error", ->
@callback.calledWith(@error).should.equal true
describe "when retry limit is matched", ->
beforeEach ->
@error = new Error("oops")
@error.retry = true
@DiffManager._tryGetDocumentBeforeVersion.yields(@error)
@DiffManager.getDocumentBeforeVersion @project_id, @doc_id, @version, @callback
it "should call _tryGetDocumentBeforeVersion three times (max retries)", ->
@DiffManager._tryGetDocumentBeforeVersion
.calledThrice
.should.equal true
it "should call the callback with the error", ->
@callback.calledWith(@error).should.equal true
describe "_tryGetDocumentBeforeVersion", ->
beforeEach -> beforeEach ->
@content = "hello world" @content = "hello world"
# Op versions are the version they were applied to, so doc is always one version # Op versions are the version they were applied to, so doc is always one version
@ -113,7 +183,7 @@ describe "DiffManager", ->
updates.reverse() updates.reverse()
return @rewound_content return @rewound_content
@rewindUpdatesWithArgs = @DiffGenerator.rewindUpdates.withArgs(@content, @updates.slice().reverse()) @rewindUpdatesWithArgs = @DiffGenerator.rewindUpdates.withArgs(@content, @updates.slice().reverse())
@DiffManager.getDocumentBeforeVersion @project_id, @doc_id, @fromVersion, @callback @DiffManager._tryGetDocumentBeforeVersion @project_id, @doc_id, @fromVersion, @callback
it "should get the latest doc and version with all recent updates", -> it "should get the latest doc and version with all recent updates", ->
@DiffManager.getLatestDocAndUpdates @DiffManager.getLatestDocAndUpdates
@ -131,12 +201,12 @@ describe "DiffManager", ->
@version = 50 @version = 50
@updates = [ { op: "mock-1", v: 40 }, { op: "mock-1", v: 39 } ] @updates = [ { op: "mock-1", v: 40 }, { op: "mock-1", v: 39 } ]
@DiffManager.getLatestDocAndUpdates = sinon.stub().callsArgWith(4, null, @content, @version, @updates) @DiffManager.getLatestDocAndUpdates = sinon.stub().callsArgWith(4, null, @content, @version, @updates)
@DiffManager.getDocumentBeforeVersion @project_id, @doc_id, @fromVersion, @callback @DiffManager._tryGetDocumentBeforeVersion @project_id, @doc_id, @fromVersion, @callback
it "should call the callback with an error", -> it "should call the callback with an error with retry = true set", ->
@callback @callback.calledOnce.should.equal true
.calledWith(new Error("latest update version, 40, does not match doc version, 42")) error = @callback.args[0][0]
.should.equal true expect(error.retry).to.equal true
describe "when the updates are inconsistent", -> describe "when the updates are inconsistent", ->
beforeEach -> beforeEach ->

View file

@ -25,7 +25,6 @@ describe "PackManager", ->
@project_id = ObjectId().toString() @project_id = ObjectId().toString()
@PackManager.MAX_COUNT = 512 @PackManager.MAX_COUNT = 512
afterEach -> afterEach ->
tk.reset() tk.reset()
@ -334,3 +333,33 @@ describe "PackManager", ->
@callback.called.should.equal true @callback.called.should.equal true
it "should return with no error", -> it "should return with no error", ->
@callback.calledWith(undefined).should.equal true @callback.calledWith(undefined).should.equal true
# describe "setTTLOnArchivedPack", ->
# beforeEach ->
# @pack_id = "somepackid"
# @onedayinms = 86400000
# @db.docHistory =
# findAndModify : sinon.stub().callsArgWith(1)
# it "should set expires to 1 day", (done)->
# #@PackManager._getOneDayInFutureWithRandomDelay = sinon.stub().returns(@onedayinms)
# @PackManager.setTTLOnArchivedPack @project_id, @doc_id, @pack_id, =>
# args = @db.docHistory.findAndModify.args[0][0]
# args.query._id.should.equal @pack_id
# args.update['$set'].expiresAt.should.equal @onedayinms
# done()
# describe "_getOneDayInFutureWithRandomDelay", ->
# beforeEach ->
# @onedayinms = 86400000
# @thirtyMins = 1000 * 60 * 30
# it "should give 1 day + 30 mins random time", (done)->
# loops = 10000
# while --loops > 0
# randomDelay = @PackManager._getOneDayInFutureWithRandomDelay() - new Date(Date.now() + @onedayinms)
# randomDelay.should.be.above(0)
# randomDelay.should.be.below(@thirtyMins + 1)
# done()

View file

@ -5,13 +5,15 @@ expect = chai.expect
modulePath = "../../../../app/js/UpdateCompressor.js" modulePath = "../../../../app/js/UpdateCompressor.js"
SandboxedModule = require('sandboxed-module') SandboxedModule = require('sandboxed-module')
bigstring = ("a" for [0 .. 2*1024*1024]).join("")
mediumstring = ("a" for [0 .. 1024*1024]).join("")
describe "UpdateCompressor", -> describe "UpdateCompressor", ->
beforeEach -> beforeEach ->
@UpdateCompressor = SandboxedModule.require modulePath @UpdateCompressor = SandboxedModule.require modulePath, requires:
"../lib/diff_match_patch": require("../../../../app/lib/diff_match_patch")
@user_id = "user-id-1" @user_id = "user-id-1"
@other_user_id = "user-id-2" @other_user_id = "user-id-2"
@bigstring = ("a" for [0 .. 2*1024*1024]).join("")
@mediumstring = ("a" for [0 .. 1024*1024]).join("")
@ts1 = Date.now() @ts1 = Date.now()
@ts2 = Date.now() + 1000 @ts2 = Date.now() + 1000
@ -52,6 +54,22 @@ describe "UpdateCompressor", ->
v: 42 v: 42
}] }]
it "should ignore comment ops", ->
expect(@UpdateCompressor.convertToSingleOpUpdates [{
op: [ @op1 = { p: 0, i: "Foo" }, @op2 = { p: 9, c: "baz"}, @op3 = { p: 6, i: "bar"} ]
meta: { ts: @ts1, user_id: @user_id }
v: 42
}])
.to.deep.equal [{
op: @op1,
meta: { start_ts: @ts1, end_ts: @ts1, user_id: @user_id },
v: 42
}, {
op: @op3,
meta: { start_ts: @ts1, end_ts: @ts1, user_id: @user_id },
v: 42
}]
describe "concatUpdatesWithSameVersion", -> describe "concatUpdatesWithSameVersion", ->
it "should concat updates with the same version", -> it "should concat updates with the same version", ->
expect(@UpdateCompressor.concatUpdatesWithSameVersion [{ expect(@UpdateCompressor.concatUpdatesWithSameVersion [{
@ -149,7 +167,7 @@ describe "UpdateCompressor", ->
meta: ts: @ts1, user_id: @user_id meta: ts: @ts1, user_id: @user_id
v: 42 v: 42
}, { }, {
op: { p: 6, i: @bigstring } op: { p: 6, i: bigstring }
meta: ts: @ts2, user_id: @user_id meta: ts: @ts2, user_id: @user_id
v: 43 v: 43
}]) }])
@ -158,47 +176,47 @@ describe "UpdateCompressor", ->
meta: start_ts: @ts1, end_ts: @ts1, user_id: @user_id meta: start_ts: @ts1, end_ts: @ts1, user_id: @user_id
v: 42 v: 42
}, { }, {
op: { p: 6, i: @bigstring } op: { p: 6, i: bigstring }
meta: start_ts: @ts2, end_ts: @ts2, user_id: @user_id meta: start_ts: @ts2, end_ts: @ts2, user_id: @user_id
v: 43 v: 43
}] }]
it "should not append inserts that are too big (first op)", -> it "should not append inserts that are too big (first op)", ->
expect(@UpdateCompressor.compressUpdates [{ expect(@UpdateCompressor.compressUpdates [{
op: { p: 3, i: @bigstring } op: { p: 3, i: bigstring }
meta: ts: @ts1, user_id: @user_id meta: ts: @ts1, user_id: @user_id
v: 42 v: 42
}, { }, {
op: { p: 3 + @bigstring.length, i: "bar" } op: { p: 3 + bigstring.length, i: "bar" }
meta: ts: @ts2, user_id: @user_id meta: ts: @ts2, user_id: @user_id
v: 43 v: 43
}]) }])
.to.deep.equal [{ .to.deep.equal [{
op: { p: 3, i: @bigstring } op: { p: 3, i: bigstring }
meta: start_ts: @ts1, end_ts: @ts1, user_id: @user_id meta: start_ts: @ts1, end_ts: @ts1, user_id: @user_id
v: 42 v: 42
}, { }, {
op: { p: 3 + @bigstring.length, i: "bar" } op: { p: 3 + bigstring.length, i: "bar" }
meta: start_ts: @ts2, end_ts: @ts2, user_id: @user_id meta: start_ts: @ts2, end_ts: @ts2, user_id: @user_id
v: 43 v: 43
}] }]
it "should not append inserts that are too big (first and second op)", -> it "should not append inserts that are too big (first and second op)", ->
expect(@UpdateCompressor.compressUpdates [{ expect(@UpdateCompressor.compressUpdates [{
op: { p: 3, i: @mediumstring } op: { p: 3, i: mediumstring }
meta: ts: @ts1, user_id: @user_id meta: ts: @ts1, user_id: @user_id
v: 42 v: 42
}, { }, {
op: { p: 3 + @mediumstring.length, i: @mediumstring } op: { p: 3 + mediumstring.length, i: mediumstring }
meta: ts: @ts2, user_id: @user_id meta: ts: @ts2, user_id: @user_id
v: 43 v: 43
}]) }])
.to.deep.equal [{ .to.deep.equal [{
op: { p: 3, i: @mediumstring } op: { p: 3, i: mediumstring }
meta: start_ts: @ts1, end_ts: @ts1, user_id: @user_id meta: start_ts: @ts1, end_ts: @ts1, user_id: @user_id
v: 42 v: 42
}, { }, {
op: { p: 3 + @mediumstring.length, i: @mediumstring } op: { p: 3 + mediumstring.length, i: mediumstring }
meta: start_ts: @ts2, end_ts: @ts2, user_id: @user_id meta: start_ts: @ts2, end_ts: @ts2, user_id: @user_id
v: 43 v: 43
}] }]
@ -345,6 +363,43 @@ describe "UpdateCompressor", ->
meta: start_ts: @ts2, end_ts: @ts2, user_id: @user_id meta: start_ts: @ts2, end_ts: @ts2, user_id: @user_id
v: 43 v: 43
}] }]
describe "delete - insert", ->
it "should do a diff of the content", ->
expect(@UpdateCompressor.compressUpdates [{
op: { p: 3, d: "one two three four five six seven eight" }
meta: ts: @ts1, user_id: @user_id
v: 42
}, {
op: { p: 3, i: "one 2 three four five six seven eight" }
meta: ts: @ts2, user_id: @user_id
v: 43
}])
.to.deep.equal [{
op: { p: 7, d: "two" }
meta: start_ts: @ts1, end_ts: @ts2, user_id: @user_id
v: 43
}, {
op: { p: 7, i: "2" }
meta: start_ts: @ts1, end_ts: @ts2, user_id: @user_id
v: 43
}]
it "should return a no-op if the delete and insert are the same", ->
expect(@UpdateCompressor.compressUpdates [{
op: { p: 3, d: "one two three four five six seven eight" }
meta: ts: @ts1, user_id: @user_id
v: 42
}, {
op: { p: 3, i: "one two three four five six seven eight" }
meta: ts: @ts2, user_id: @user_id
v: 43
}])
.to.deep.equal [{
op: { p: 3, i: "" }
meta: start_ts: @ts1, end_ts: @ts2, user_id: @user_id
v: 43
}]
describe "noop - insert", -> describe "noop - insert", ->
it "should leave them untouched", -> it "should leave them untouched", ->

View file

@ -761,3 +761,41 @@ describe "UpdatesManager", ->
start_ts: @now start_ts: @now
end_ts: @now + 30 end_ts: @now + 30
}] }]
it "should split updates before a big delete", ->
result = @UpdatesManager._summarizeUpdates [{
doc_id: "doc-id-1"
op: [{ d: "this is a long long long long long delete", p: 34 }]
meta:
user_id: @user_1.id
start_ts: @now + 20
end_ts: @now + 30
v: 5
}, {
doc_id: "doc-id-1"
meta:
user_id: @user_2.id
start_ts: @now
end_ts: @now + 10
v: 4
}]
expect(result).to.deep.equal [{
docs:
"doc-id-1":
fromV: 5
toV: 5
meta:
user_ids: [@user_1.id]
start_ts: @now + 20
end_ts: @now + 30
}, {
docs:
"doc-id-1":
fromV: 4
toV: 4
meta:
user_ids: [@user_2.id]
start_ts: @now
end_ts: @now + 10
}]