mirror of
https://github.com/overleaf/overleaf.git
synced 2025-04-27 14:32:37 +00:00
Merge pull request #18 from sharelatex/ja-split-on-deletes
Split update summary on big deletes
This commit is contained in:
commit
a197c0d148
2 changed files with 61 additions and 1 deletions
|
@ -219,11 +219,33 @@ module.exports = UpdatesManager =
|
||||||
return !!user_id.match(/^[a-f0-9]{24}$/)
|
return !!user_id.match(/^[a-f0-9]{24}$/)
|
||||||
|
|
||||||
TIME_BETWEEN_DISTINCT_UPDATES: fiveMinutes = 5 * 60 * 1000
|
TIME_BETWEEN_DISTINCT_UPDATES: fiveMinutes = 5 * 60 * 1000
|
||||||
|
SPLIT_ON_DELETE_SIZE: 16 # characters
|
||||||
_summarizeUpdates: (updates, existingSummarizedUpdates = []) ->
|
_summarizeUpdates: (updates, existingSummarizedUpdates = []) ->
|
||||||
summarizedUpdates = existingSummarizedUpdates.slice()
|
summarizedUpdates = existingSummarizedUpdates.slice()
|
||||||
|
previousUpdateWasBigDelete = false
|
||||||
for update in updates
|
for update in updates
|
||||||
earliestUpdate = summarizedUpdates[summarizedUpdates.length - 1]
|
earliestUpdate = summarizedUpdates[summarizedUpdates.length - 1]
|
||||||
if earliestUpdate and earliestUpdate.meta.start_ts - update.meta.end_ts < @TIME_BETWEEN_DISTINCT_UPDATES
|
shouldConcat = false
|
||||||
|
|
||||||
|
# If a user inserts some text, then deletes a big chunk including that text,
|
||||||
|
# the update we show might concat the insert and delete, and there will be no sign
|
||||||
|
# of that insert having happened, or be able to restore to it (restoring after a big delete is common).
|
||||||
|
# So, we split the summary on 'big' deletes. However, we've stepping backwards in time with
|
||||||
|
# most recent changes considered first, so if this update is a big delete, we want to start
|
||||||
|
# a new summarized update next timge, hence we monitor the previous update.
|
||||||
|
if previousUpdateWasBigDelete
|
||||||
|
shouldConcat = false
|
||||||
|
else if earliestUpdate and earliestUpdate.meta.start_ts - update.meta.end_ts < @TIME_BETWEEN_DISTINCT_UPDATES
|
||||||
|
shouldConcat = true
|
||||||
|
|
||||||
|
isBigDelete = false
|
||||||
|
for op in update.op or []
|
||||||
|
if op.d? and op.d.length > @SPLIT_ON_DELETE_SIZE
|
||||||
|
isBigDelete = true
|
||||||
|
|
||||||
|
previousUpdateWasBigDelete = isBigDelete
|
||||||
|
|
||||||
|
if shouldConcat
|
||||||
# check if the user in this update is already present in the earliest update,
|
# check if the user in this update is already present in the earliest update,
|
||||||
# if not, add them to the users list of the earliest update
|
# if not, add them to the users list of the earliest update
|
||||||
earliestUpdate.meta.user_ids = _.union earliestUpdate.meta.user_ids, [update.meta.user_id]
|
earliestUpdate.meta.user_ids = _.union earliestUpdate.meta.user_ids, [update.meta.user_id]
|
||||||
|
|
|
@ -761,3 +761,41 @@ describe "UpdatesManager", ->
|
||||||
start_ts: @now
|
start_ts: @now
|
||||||
end_ts: @now + 30
|
end_ts: @now + 30
|
||||||
}]
|
}]
|
||||||
|
|
||||||
|
it "should split updates before a big delete", ->
|
||||||
|
result = @UpdatesManager._summarizeUpdates [{
|
||||||
|
doc_id: "doc-id-1"
|
||||||
|
op: [{ d: "this is a long long long long long delete", p: 34 }]
|
||||||
|
meta:
|
||||||
|
user_id: @user_1.id
|
||||||
|
start_ts: @now + 20
|
||||||
|
end_ts: @now + 30
|
||||||
|
v: 5
|
||||||
|
}, {
|
||||||
|
doc_id: "doc-id-1"
|
||||||
|
meta:
|
||||||
|
user_id: @user_2.id
|
||||||
|
start_ts: @now
|
||||||
|
end_ts: @now + 10
|
||||||
|
v: 4
|
||||||
|
}]
|
||||||
|
|
||||||
|
expect(result).to.deep.equal [{
|
||||||
|
docs:
|
||||||
|
"doc-id-1":
|
||||||
|
fromV: 5
|
||||||
|
toV: 5
|
||||||
|
meta:
|
||||||
|
user_ids: [@user_1.id]
|
||||||
|
start_ts: @now + 20
|
||||||
|
end_ts: @now + 30
|
||||||
|
}, {
|
||||||
|
docs:
|
||||||
|
"doc-id-1":
|
||||||
|
fromV: 4
|
||||||
|
toV: 4
|
||||||
|
meta:
|
||||||
|
user_ids: [@user_2.id]
|
||||||
|
start_ts: @now
|
||||||
|
end_ts: @now + 10
|
||||||
|
}]
|
Loading…
Add table
Reference in a new issue