Merge pull request #17662 from overleaf/em-handle-tcs-history

Reintroduce tracked changes handling in project-history

GitOrigin-RevId: f6d72fbffc9b2d2328edc35ffb3f328a31d95e6a
This commit is contained in:
Eric Mc Sween 2024-03-27 12:19:15 -04:00 committed by Copybot
parent 9c9ec23689
commit 59703ef745
5 changed files with 1008 additions and 73 deletions

View file

@ -1,6 +1,15 @@
// @ts-check
import OError from '@overleaf/o-error'
import DMP from 'diff-match-patch'
/**
* @typedef {import('./types').DeleteOp} DeleteOp
* @typedef {import('./types').InsertOp} InsertOp
* @typedef {import('./types').Op} Op
* @typedef {import('./types').Update} Update
*/
const MAX_TIME_BETWEEN_UPDATES = 60 * 1000 // one minute
const MAX_UPDATE_SIZE = 2 * 1024 * 1024 // 2 MB
const ADDED = 1
@ -31,47 +40,87 @@ const mergeUpdatesWithOp = function (firstUpdate, secondUpdate, op) {
return update
}
const adjustLengthByOp = function (length, op) {
if (op.i != null) {
return length + op.i.length
} else if (op.d != null) {
return length - op.d.length
} else if (op.c != null) {
/**
* Adjust the given length to account for the given op
*
* The resulting length is the new length of the doc after the op is applied.
*
* @param {number} length
* @param {Op} op
* @param {object} opts
* @param {boolean} [opts.tracked] - whether or not the update is a tracked change
* @returns {number} the adjusted length
*/
function adjustLengthByOp(length, op, opts = {}) {
if ('i' in op && op.i != null) {
if (op.trackedDeleteRejection) {
// Tracked delete rejection: will be translated into a retain
return length
} else {
return length + op.i.length
}
} else if ('d' in op && op.d != null) {
if (opts.tracked && op.u == null) {
// Tracked delete: will be translated into a retain, except where it overlaps tracked inserts.
for (const change of op.trackedChanges ?? []) {
if (change.type === 'insert') {
length -= change.length
}
}
return length
} else {
return length - op.d.length
}
} else if ('c' in op && op.c != null) {
return length
} else {
throw new OError('unexpected op type')
}
}
// Updates come from the doc updater in format
// {
// op: [ { ... op1 ... }, { ... op2 ... } ]
// meta: { ts: ..., user_id: ... }
// }
// but it's easier to work with on op per update, so convert these updates to
// our compressed format
// [{
// op: op1
// meta: { ts: ..., user_id: ... }
// }, {
// op: op2
// meta: { ts: ..., user_id: ... }
// }]
/**
* Updates come from the doc updater in format
* {
* op: [ { ... op1 ... }, { ... op2 ... } ]
* meta: { ts: ..., user_id: ... }
* }
* but it's easier to work with on op per update, so convert these updates to
* our compressed format
* [{
* op: op1
* meta: { ts: ..., user_id: ... }
* }, {
* op: op2
* meta: { ts: ..., user_id: ... }
* }]
*
* @param {Update[]} updates
* @returns {Update[]} single op updates
*/
export function convertToSingleOpUpdates(updates) {
const splitUpdates = []
for (const update of updates) {
if (update.op == null) {
if (!('op' in update)) {
// Not a text op, likely a project strucure op
splitUpdates.push(update)
continue
}
const ops = update.op
let { doc_length: docLength } = update.meta
let docLength = update.meta.history_doc_length ?? update.meta.doc_length
// Temporary fix for document-updater sending a length of -1 for empty
// documents. This can be removed after all queues have been flushed.
if (docLength === -1) {
docLength = 0
}
for (const op of ops) {
const splitUpdate = cloneWithOp(update, op)
if (docLength != null) {
splitUpdate.meta.doc_length = docLength
docLength = adjustLengthByOp(docLength, op)
docLength = adjustLengthByOp(docLength, op, {
tracked: update.meta.tc != null,
})
delete splitUpdate.meta.history_doc_length
}
splitUpdates.push(splitUpdate)
}
@ -146,13 +195,21 @@ export function compressUpdates(updates) {
return compressedUpdates
}
/**
* If possible, merge two updates into a single update that has the same effect.
*
* It's useful to do some of this work at this point while we're dealing with
* document-updater updates. The deletes, in particular include the deleted
* text. This allows us to find pieces of inserts and deletes that cancel each
* other out because they insert/delete the exact same text. This compression
* makes the diff smaller.
*/
function _concatTwoUpdates(firstUpdate, secondUpdate) {
// Previously we cloned firstUpdate and secondUpdate at this point but we
// can skip this step because whenever they are returned with
// modification there is always a clone at that point via
// mergeUpdatesWithOp.
let offset
if (firstUpdate.op == null || secondUpdate.op == null) {
// Project structure ops
return [firstUpdate, secondUpdate]
@ -185,6 +242,45 @@ function _concatTwoUpdates(firstUpdate, secondUpdate) {
return [firstUpdate, secondUpdate]
}
if (
(firstUpdate.meta.tc == null && secondUpdate.meta.tc != null) ||
(firstUpdate.meta.tc != null && secondUpdate.meta.tc == null)
) {
// One update is tracking changes and the other isn't. Tracking changes
// results in different behaviour in the history, so we need to keep these
// two updates separate.
return [firstUpdate, secondUpdate]
}
if (Boolean(firstUpdate.op.u) !== Boolean(secondUpdate.op.u)) {
// One update is an undo and the other isn't. If we were to merge the two
// updates, we would have to choose one value for the flag, which would be
// partially incorrect. Moreover, a tracked delete that is also an undo is
// treated as a tracked insert rejection by the history, so these updates
// need to be well separated.
return [firstUpdate, secondUpdate]
}
if (
firstUpdate.op.trackedDeleteRejection ||
secondUpdate.op.trackedDeleteRejection
) {
// Do not merge tracked delete rejections. Each tracked delete rejection is
// a separate operation.
return [firstUpdate, secondUpdate]
}
if (
firstUpdate.op.trackedChanges != null ||
secondUpdate.op.trackedChanges != null
) {
// Do not merge ops that span tracked changes.
// TODO: This could theoretically be handled, but it would be complex. One
// would need to take tracked deletes into account when merging inserts and
// deletes together.
return [firstUpdate, secondUpdate]
}
const firstOp = firstUpdate.op
const secondOp = secondUpdate.op
const firstSize =
@ -206,26 +302,38 @@ function _concatTwoUpdates(firstUpdate, secondUpdate) {
) {
return [
mergeUpdatesWithOp(firstUpdate, secondUpdate, {
p: firstOp.p,
...firstOp,
i: strInject(firstOp.i, secondOp.p - firstOp.p, secondOp.i),
}),
]
// Two deletes
} else if (
}
// Two deletes
if (
firstOp.d != null &&
secondOp.d != null &&
firstOpInsideSecondOp &&
combinedLengthUnderLimit
combinedLengthUnderLimit &&
firstUpdate.meta.tc == null &&
secondUpdate.meta.tc == null
) {
return [
mergeUpdatesWithOp(firstUpdate, secondUpdate, {
p: secondOp.p,
...secondOp,
d: strInject(secondOp.d, firstOp.p - secondOp.p, firstOp.d),
}),
]
// An insert and then a delete
} else if (firstOp.i != null && secondOp.d != null && secondOpInsideFirstOp) {
offset = secondOp.p - firstOp.p
}
// An insert and then a delete
if (
firstOp.i != null &&
secondOp.d != null &&
secondOpInsideFirstOp &&
firstUpdate.meta.tc == null &&
secondUpdate.meta.tc == null
) {
const offset = secondOp.p - firstOp.p
const insertedText = firstOp.i.slice(offset, offset + secondOp.d.length)
// Only trim the insert when the delete is fully contained within in it
if (insertedText === secondOp.d) {
@ -235,7 +343,7 @@ function _concatTwoUpdates(firstUpdate, secondUpdate) {
} else {
return [
mergeUpdatesWithOp(firstUpdate, secondUpdate, {
p: firstOp.p,
...firstOp,
i: insert,
}),
]
@ -244,35 +352,60 @@ function _concatTwoUpdates(firstUpdate, secondUpdate) {
// This will only happen if the delete extends outside the insert
return [firstUpdate, secondUpdate]
}
}
// A delete then an insert at the same place, likely a copy-paste of a chunk of content
} else if (
// A delete then an insert at the same place, likely a copy-paste of a chunk of content
if (
firstOp.d != null &&
secondOp.i != null &&
firstOp.p === secondOp.p
firstOp.p === secondOp.p &&
firstUpdate.meta.tc == null &&
secondUpdate.meta.tc == null
) {
offset = firstOp.p
const offset = firstOp.p
const hoffset = firstOp.hpos
const diffUpdates = diffAsShareJsOps(firstOp.d, secondOp.i).map(
function (op) {
op.p += offset
// diffAsShareJsOps() returns ops with positions relative to the position
// of the copy/paste. We need to adjust these positions so that they
// apply to the whole document instead.
const pos = op.p
op.p = pos + offset
if (hoffset != null) {
op.hpos = pos + hoffset
}
if (firstOp.u && secondOp.u) {
op.u = true
}
return mergeUpdatesWithOp(firstUpdate, secondUpdate, op)
}
)
// Doing a diff like this loses track of the doc lengths for each
// update, so recalculate them
let { doc_length: docLength } = firstUpdate.meta
let docLength =
firstUpdate.meta.history_doc_length ?? firstUpdate.meta.doc_length
for (const update of diffUpdates) {
update.meta.doc_length = docLength
docLength = adjustLengthByOp(docLength, update.op)
docLength = adjustLengthByOp(docLength, update.op, {
tracked: update.meta.tc != null,
})
delete update.meta.history_doc_length
}
return diffUpdates
} else {
return [firstUpdate, secondUpdate]
}
return [firstUpdate, secondUpdate]
}
/**
* Return the diff between two strings
*
* @param {string} before
* @param {string} after
* @returns {(InsertOp | DeleteOp)[]} the ops that generate that diff
*/
export function diffAsShareJsOps(before, after) {
const diffs = dmp.diff_main(before, after)
dmp.diff_cleanupSemantic(diffs)

View file

@ -6,17 +6,19 @@ import * as Errors from './Errors.js'
import * as OperationsCompressor from './OperationsCompressor.js'
/**
* @typedef {import('./types.ts').AddDocUpdate} AddDocUpdate
* @typedef {import('./types.ts').AddFileUpdate} AddFileUpdate
* @typedef {import('./types.ts').CommentOp} CommentOp
* @typedef {import('./types.ts').DeleteOp} DeleteOp
* @typedef {import('./types.ts').InsertOp} InsertOp
* @typedef {import('./types.ts').Op} Op
* @typedef {import('./types.ts').RenameUpdate} RenameUpdate
* @typedef {import('./types.ts').TextUpdate} TextUpdate
* @typedef {import('./types.ts').DeleteCommentUpdate} DeleteCommentUpdate
* @typedef {import('./types.ts').Update} Update
* @typedef {import('./types.ts').UpdateWithBlob} UpdateWithBlob
* @typedef {import('./types').AddDocUpdate} AddDocUpdate
* @typedef {import('./types').AddFileUpdate} AddFileUpdate
* @typedef {import('./types').CommentOp} CommentOp
* @typedef {import('./types').DeleteOp} DeleteCommentUpdate
* @typedef {import('./types').DeleteOp} DeleteOp
* @typedef {import('./types').InsertOp} InsertOp
* @typedef {import('./types').Op} Op
* @typedef {import('./types').RawScanOp} RawScanOp
* @typedef {import('./types').RenameUpdate} RenameUpdate
* @typedef {import('./types').TextUpdate} TextUpdate
* @typedef {import('./types').TrackingProps} TrackingProps
* @typedef {import('./types').Update} Update
* @typedef {import('./types').UpdateWithBlob} UpdateWithBlob
*/
/**
@ -63,15 +65,14 @@ function _convertToChange(projectId, updateWithBlob) {
]
projectVersion = update.version
} else if (isTextUpdate(update)) {
const docLength = update.meta.doc_length
const docLength = update.meta.history_doc_length ?? update.meta.doc_length
let pathname = update.meta.pathname
pathname = _convertPathname(pathname)
const builder = new OperationsBuilder(docLength, pathname)
// convert ops
for (const op of update.op) {
// if this throws an exception it will be caught in convertToChanges
builder.addOp(op)
builder.addOp(op, update)
}
operations = builder.finish()
// add doc version information if present
@ -232,7 +233,7 @@ class OperationsBuilder {
/**
* Currently built text operation
*
* @type {(number | string)[]}
* @type {RawScanOp[]}
*/
this.textOperation = []
@ -247,9 +248,15 @@ class OperationsBuilder {
/**
* @param {Op} op
* @param {Update} update
* @returns {void}
*/
addOp(op) {
addOp(op, update) {
// We sometimes receive operations that operate at positions outside the
// docLength. Document updater coerces the position to the end of the
// document. We do the same here.
const pos = Math.min(op.hpos ?? op.p, this.docLength)
if (isComment(op)) {
// Close the current text operation
this.pushTextOperation()
@ -260,8 +267,8 @@ class OperationsBuilder {
commentId: op.t,
ranges: [
{
pos: op.p,
length: op.c.length,
pos,
length: op.hlen ?? op.c.length,
},
],
})
@ -272,11 +279,6 @@ class OperationsBuilder {
throw new Errors.UnexpectedOpTypeError('unexpected op type', { op })
}
// We sometimes receive operations that operate at positions outside the
// docLength. Document updater coerces the position to the end of the
// document. We do the same here.
const pos = Math.min(op.p, this.docLength)
if (pos < this.cursor) {
this.pushTextOperation()
// At this point, this.cursor === 0 and we can continue
@ -287,26 +289,128 @@ class OperationsBuilder {
}
if (isInsert(op)) {
this.insert(op.i)
if (op.trackedDeleteRejection) {
this.retain(op.i.length, {
tracking: {
type: 'none',
userId: update.meta.user_id,
ts: new Date(update.meta.ts).toISOString(),
},
})
} else if (update.meta.tc != null) {
this.insert(op.i, {
tracking: {
type: 'insert',
userId: update.meta.user_id,
ts: new Date(update.meta.ts).toISOString(),
},
})
} else {
this.insert(op.i)
}
}
if (isDelete(op)) {
this.delete(op.d.length)
const changes = op.trackedChanges ?? []
// Tracked changes should already be ordered by offset, but let's make
// sure they are.
changes.sort((a, b) => {
const posOrder = a.offset - b.offset
if (posOrder !== 0) {
return posOrder
} else if (a.type === 'insert' && b.type === 'delete') {
return 1
} else if (a.type === 'delete' && b.type === 'insert') {
return -1
} else {
return 0
}
})
let offset = 0
for (const change of changes) {
if (change.offset > offset) {
// Handle the portion before the tracked change
if (update.meta.tc != null && op.u == null) {
// This is a tracked delete
this.retain(change.offset - offset, {
tracking: {
type: 'delete',
userId: update.meta.user_id,
ts: new Date(update.meta.ts).toISOString(),
},
})
} else {
// This is a regular delete
this.delete(change.offset - offset)
}
offset = change.offset
}
// Now, handle the portion inside the tracked change
if (change.type === 'delete') {
// Tracked deletes are skipped over when deleting
this.retain(change.length)
} else if (change.type === 'insert') {
// Deletes inside tracked inserts are always regular deletes
this.delete(change.length)
offset += change.length
}
}
if (offset < op.d.length) {
// Handle the portion after the last tracked change
if (update.meta.tc != null && op.u == null) {
// This is a tracked delete
this.retain(op.d.length - offset, {
tracking: {
type: 'delete',
userId: update.meta.user_id,
ts: new Date(update.meta.ts).toISOString(),
},
})
} else {
// This is a regular delete
this.delete(op.d.length - offset)
}
}
}
}
retain(length) {
this.textOperation.push(length)
/**
* @param {number} length
* @param {object} opts
* @param {TrackingProps} [opts.tracking]
*/
retain(length, opts = {}) {
if (opts.tracking) {
this.textOperation.push({ r: length, ...opts })
} else {
this.textOperation.push(length)
}
this.cursor += length
}
insert(str) {
this.textOperation.push(str)
/**
* @param {string} str
* @param {object} opts
* @param {TrackingProps} [opts.tracking]
*/
insert(str, opts = {}) {
if (opts.tracking) {
this.textOperation.push({ i: str, ...opts })
} else {
this.textOperation.push(str)
}
this.cursor += str.length
this.docLength += str.length
}
delete(length) {
/**
* @param {number} length
* @param {object} opts
*/
delete(length, opts = {}) {
this.textOperation.push(-length)
this.docLength -= length
}

View file

@ -2,10 +2,11 @@ export type Update = TextUpdate | AddDocUpdate | AddFileUpdate | RenameUpdate |
export type UpdateMeta = {
user_id: string
ts: string
ts: number
source?: string
type?: string
origin?: RawOrigin
tc?: string
}
export type TextUpdate = {
@ -15,6 +16,7 @@ export type TextUpdate = {
meta: UpdateMeta & {
pathname: string
doc_length: number
history_doc_length?: number
}
}
@ -52,17 +54,31 @@ export type Op = InsertOp | DeleteOp | CommentOp
export type InsertOp = {
i: string
p: number
u?: boolean
hpos?: number
trackedDeleteRejection?: boolean
}
export type DeleteOp = {
d: string
p: number
u?: boolean
hpos?: number
trackedChanges?: TrackedChangesInsideDelete[]
}
export type TrackedChangesInsideDelete = {
type: 'insert' | 'delete'
offset: number
length: number
}
export type CommentOp = {
c: string
p: number
t: string
hpos?: number
hlen?: number
}
export type UpdateWithBlob = {
@ -73,3 +89,16 @@ export type UpdateWithBlob = {
export type RawOrigin = {
kind: string
}
export type TrackingProps = {
type: 'insert' | 'delete' | 'none'
userId: string
ts: string
}
export type RawScanOp =
| number
| string
| { r: number; tracking?: TrackingProps }
| { i: string; tracking?: TrackingProps; commentIds?: string[] }
| { d: number; tracking?: TrackingProps }

View file

@ -148,6 +148,61 @@ describe('UpdateCompressor', function () {
},
])
})
it('should take tracked changes into account when calculating the doc length', function () {
const meta = {
ts: this.ts1,
user_id: this.user_id,
tc: 'tracked-change-id',
}
expect(
this.UpdateCompressor.convertToSingleOpUpdates([
{
op: [
{ p: 6, i: 'orange' }, // doc_length += 6
{ p: 22, d: 'apple' }, // doc_length doesn't change
{ p: 12, i: 'melon', u: true }, // doc_length += 5
{ p: 18, i: 'banana', u: true, trackedDeleteRejection: true }, // doc_length doesn't change
{ p: 8, d: 'pineapple', u: true }, // doc_length -= 9
{ p: 11, i: 'fruit salad' },
],
meta: { ...meta, doc_length: 20, history_doc_length: 30 },
v: 42,
},
])
).to.deep.equal([
{
op: { p: 6, i: 'orange' },
meta: { ...meta, doc_length: 30 },
v: 42,
},
{
op: { p: 22, d: 'apple' },
meta: { ...meta, doc_length: 36 },
v: 42,
},
{
op: { p: 12, i: 'melon', u: true },
meta: { ...meta, doc_length: 36 },
v: 42,
},
{
op: { p: 18, i: 'banana', u: true, trackedDeleteRejection: true },
meta: { ...meta, doc_length: 41 },
v: 42,
},
{
op: { p: 8, d: 'pineapple', u: true },
meta: { ...meta, doc_length: 41 },
v: 42,
},
{
op: { p: 11, i: 'fruit salad' },
meta: { ...meta, doc_length: 32 },
v: 42,
},
])
})
})
describe('concatUpdatesWithSameVersion', function () {
@ -571,6 +626,153 @@ describe('UpdateCompressor', function () {
},
])
})
it("should not merge updates that track changes and updates that don't", function () {
expect(
this.UpdateCompressor.compressUpdates([
{
pathname: 'main.tex',
op: { p: 3, i: 'foo' },
meta: { ts: this.ts1, user_id: this.user_id },
v: 42,
},
{
pathname: 'main.tex',
op: { p: 6, i: 'bar' },
meta: { ts: this.ts2, user_id: this.user_id, tc: 'tracking-id' },
v: 43,
},
])
).to.deep.equal([
{
pathname: 'main.tex',
op: { p: 3, i: 'foo' },
meta: { ts: this.ts1, user_id: this.user_id },
v: 42,
},
{
pathname: 'main.tex',
op: { p: 6, i: 'bar' },
meta: { ts: this.ts2, user_id: this.user_id, tc: 'tracking-id' },
v: 43,
},
])
})
it('should not merge undos with regular ops', function () {
expect(
this.UpdateCompressor.compressUpdates([
{
pathname: 'main.tex',
op: { p: 3, i: 'foo' },
meta: { ts: this.ts1, user_id: this.user_id },
v: 42,
},
{
pathname: 'main.tex',
op: { p: 6, i: 'bar', u: true },
meta: { ts: this.ts2, user_id: this.user_id },
v: 43,
},
])
).to.deep.equal([
{
pathname: 'main.tex',
op: { p: 3, i: 'foo' },
meta: { ts: this.ts1, user_id: this.user_id },
v: 42,
},
{
pathname: 'main.tex',
op: { p: 6, i: 'bar', u: true },
meta: { ts: this.ts2, user_id: this.user_id },
v: 43,
},
])
})
it('should not merge tracked delete rejections', function () {
expect(
this.UpdateCompressor.compressUpdates([
{
pathname: 'main.tex',
op: { p: 3, i: 'foo' },
meta: { ts: this.ts1, user_id: this.user_id },
v: 42,
},
{
pathname: 'main.tex',
op: { p: 6, i: 'bar', trackedDeleteRejection: true },
meta: { ts: this.ts2, user_id: this.user_id },
v: 43,
},
])
).to.deep.equal([
{
pathname: 'main.tex',
op: { p: 3, i: 'foo' },
meta: { ts: this.ts1, user_id: this.user_id },
v: 42,
},
{
pathname: 'main.tex',
op: { p: 6, i: 'bar', trackedDeleteRejection: true },
meta: { ts: this.ts2, user_id: this.user_id },
v: 43,
},
])
})
it('should preserve history metadata', function () {
expect(
this.UpdateCompressor.compressUpdates([
{
op: { p: 3, i: 'foo', hpos: 13 },
meta: { ts: this.ts1, user_id: this.user_id },
v: 42,
},
{
op: { p: 6, i: 'bar', hpos: 16 },
meta: { ts: this.ts2, user_id: this.user_id },
v: 43,
},
])
).to.deep.equal([
{
op: { p: 3, i: 'foobar', hpos: 13 },
meta: { ts: this.ts1, user_id: this.user_id },
v: 43,
},
])
})
it('should not merge updates from different users', function () {
expect(
this.UpdateCompressor.compressUpdates([
{
op: { p: 3, i: 'foo', hpos: 13 },
meta: { ts: this.ts1, user_id: this.user_id },
v: 42,
},
{
op: { p: 6, i: 'bar', hpos: 16 },
meta: { ts: this.ts2, user_id: this.other_user_id },
v: 43,
},
])
).to.deep.equal([
{
op: { p: 3, i: 'foo', hpos: 13 },
meta: { ts: this.ts1, user_id: this.user_id },
v: 42,
},
{
op: { p: 6, i: 'bar', hpos: 16 },
meta: { ts: this.ts2, user_id: this.other_user_id },
v: 43,
},
])
})
})
describe('delete - delete', function () {
@ -647,6 +849,95 @@ describe('UpdateCompressor', function () {
},
])
})
it('should not merge deletes over tracked changes', function () {
expect(
this.UpdateCompressor.compressUpdates([
{
op: { p: 3, d: 'foo' },
meta: { ts: this.ts1, user_id: this.user_id },
v: 42,
},
{
op: {
p: 3,
d: 'bar',
trackedChanges: [{ type: 'delete', pos: 2, length: 10 }],
},
meta: { ts: this.ts2, user_id: this.user_id },
v: 43,
},
])
).to.deep.equal([
{
op: { p: 3, d: 'foo' },
meta: { ts: this.ts1, user_id: this.user_id },
v: 42,
},
{
op: {
p: 3,
d: 'bar',
trackedChanges: [{ type: 'delete', pos: 2, length: 10 }],
},
meta: { ts: this.ts2, user_id: this.user_id },
v: 43,
},
])
})
it('should preserve history metadata', function () {
expect(
this.UpdateCompressor.compressUpdates([
{
op: { p: 3, d: 'foo' },
meta: { ts: this.ts1, user_id: this.user_id },
v: 42,
},
{
op: { p: 3, d: 'bar' },
meta: { ts: this.ts2, user_id: this.user_id },
v: 43,
},
])
).to.deep.equal([
{
op: { p: 3, d: 'foobar' },
meta: { ts: this.ts1, user_id: this.user_id },
v: 43,
},
])
})
it('should not merge when the deletes are tracked', function () {
// TODO: We should be able to lift that constraint, but it would
// require recalculating the hpos on the second op.
expect(
this.UpdateCompressor.compressUpdates([
{
op: { p: 3, d: 'foo' },
meta: { ts: this.ts1, user_id: this.user_id, tc: 'tracking-id' },
v: 42,
},
{
op: { p: 3, d: 'bar' },
meta: { ts: this.ts2, user_id: this.user_id, tc: 'tracking-id' },
v: 43,
},
])
).to.deep.equal([
{
op: { p: 3, d: 'foo' },
meta: { ts: this.ts1, user_id: this.user_id, tc: 'tracking-id' },
v: 42,
},
{
op: { p: 3, d: 'bar' },
meta: { ts: this.ts2, user_id: this.user_id, tc: 'tracking-id' },
v: 43,
},
])
})
})
describe('insert - delete', function () {
@ -768,6 +1059,29 @@ describe('UpdateCompressor', function () {
},
])
})
it('should preserver history metadata', function () {
expect(
this.UpdateCompressor.compressUpdates([
{
op: { p: 3, i: 'foo', hpos: 13 },
meta: { ts: this.ts1, user_id: this.user_id },
v: 42,
},
{
op: { p: 5, d: 'o', hpos: 15 },
meta: { ts: this.ts2, user_id: this.user_id },
v: 43,
},
])
).to.deep.equal([
{
op: { p: 3, i: 'fo', hpos: 13 },
meta: { ts: this.ts1, user_id: this.user_id },
v: 43,
},
])
})
})
describe('delete - insert', function () {
@ -815,6 +1129,90 @@ describe('UpdateCompressor', function () {
])
).to.deep.equal([])
})
it('should preserver history metadata', function () {
expect(
this.UpdateCompressor.compressUpdates([
{
op: {
p: 3,
d: 'one two three four five six seven eight',
hpos: 13,
},
meta: { ts: this.ts1, user_id: this.user_id, doc_length: 100 },
v: 42,
},
{
op: {
p: 3,
i: 'one 2 three four five six seven eight',
hpos: 13,
},
meta: { ts: this.ts2, user_id: this.user_id, doc_length: 100 },
v: 43,
},
])
).to.deep.equal([
{
op: { p: 7, d: 'two', hpos: 17 },
meta: { ts: this.ts1, user_id: this.user_id, doc_length: 100 },
v: 43,
},
{
op: { p: 7, i: '2', hpos: 17 },
meta: { ts: this.ts1, user_id: this.user_id, doc_length: 97 },
v: 43,
},
])
})
it('should not merge when tracking changes', function () {
expect(
this.UpdateCompressor.compressUpdates([
{
op: { p: 3, d: 'one two three four five six seven eight' },
meta: {
ts: this.ts1,
user_id: this.user_id,
doc_length: 100,
tc: 'tracking-id',
},
v: 42,
},
{
op: { p: 3, i: 'one 2 three four five six seven eight' },
meta: {
ts: this.ts2,
user_id: this.user_id,
doc_length: 100,
tc: 'tracking-id',
},
v: 43,
},
])
).to.deep.equal([
{
op: { p: 3, d: 'one two three four five six seven eight' },
meta: {
ts: this.ts1,
user_id: this.user_id,
doc_length: 100,
tc: 'tracking-id',
},
v: 42,
},
{
op: { p: 3, i: 'one 2 three four five six seven eight' },
meta: {
ts: this.ts2,
user_id: this.user_id,
doc_length: 100,
tc: 'tracking-id',
},
v: 43,
},
])
})
})
describe('a long chain of ops', function () {

View file

@ -831,5 +831,276 @@ describe('UpdateTranslator', function () {
}).to.throw('unexpected op type')
})
})
describe('text updates with history metadata', function () {
it('handles deletes over tracked deletes', function () {
const updates = [
{
update: {
doc: this.doc_id,
op: [
{ i: 'foo', p: 3, hpos: 5 },
{
d: 'quux',
p: 10,
hpos: 15,
trackedChanges: [
{ type: 'delete', offset: 2, length: 3 },
{ type: 'delete', offset: 3, length: 1 },
],
},
{ c: 'noteworthy', p: 8, t: 'comment-id', hpos: 11, hlen: 14 },
],
v: this.version,
meta: {
user_id: this.user_id,
ts: new Date(this.timestamp).getTime(),
pathname: '/main.tex',
doc_length: 20,
history_doc_length: 30,
source: 'some-editor-id',
},
},
},
]
const changes = this.UpdateTranslator.convertToChanges(
this.project_id,
updates
).map(change => change.toRaw())
expect(changes).to.deep.equal([
{
authors: [],
operations: [
{
pathname: 'main.tex',
textOperation: [5, 'foo', 7, -2, 3, -1, 1, -1, 10],
},
{
pathname: 'main.tex',
commentId: 'comment-id',
ranges: [{ pos: 11, length: 14 }],
resolved: false,
},
],
v2Authors: [this.user_id],
timestamp: this.timestamp,
v2DocVersions: {
'59bfd450e3028c4d40a1e9ab': {
pathname: 'main.tex',
v: 0,
},
},
},
])
})
it('handles tracked delete rejections specially', function () {
const updates = [
{
update: {
doc: this.doc_id,
op: [{ i: 'foo', p: 3, trackedDeleteRejection: true }],
v: this.version,
meta: {
user_id: this.user_id,
ts: new Date(this.timestamp).getTime(),
pathname: '/main.tex',
doc_length: 20,
source: 'some-editor-id',
},
},
},
]
const changes = this.UpdateTranslator.convertToChanges(
this.project_id,
updates
).map(change => change.toRaw())
expect(changes).to.deep.equal([
{
authors: [],
operations: [
{
pathname: 'main.tex',
textOperation: [
3,
{
r: 3,
tracking: {
type: 'none',
userId: this.user_id,
ts: new Date(this.timestamp).toISOString(),
},
},
14,
],
},
],
v2Authors: [this.user_id],
timestamp: this.timestamp,
v2DocVersions: {
'59bfd450e3028c4d40a1e9ab': {
pathname: 'main.tex',
v: 0,
},
},
},
])
})
it('handles tracked changes', function () {
const updates = [
{
update: {
doc: this.doc_id,
op: [
{ i: 'inserted', p: 5 },
{ d: 'deleted', p: 20 },
{ i: 'rejected deletion', p: 30, trackedDeleteRejection: true },
{ d: 'rejected insertion', p: 50, u: true },
],
v: this.version,
meta: {
tc: 'tracked-change-id',
user_id: this.user_id,
ts: new Date(this.timestamp).getTime(),
pathname: '/main.tex',
doc_length: 70,
source: 'some-editor-id',
},
},
},
]
const changes = this.UpdateTranslator.convertToChanges(
this.project_id,
updates
).map(change => change.toRaw())
expect(changes).to.deep.equal([
{
authors: [],
operations: [
{
pathname: 'main.tex',
textOperation: [
5,
{
i: 'inserted',
tracking: {
type: 'insert',
userId: this.user_id,
ts: new Date(this.timestamp).toISOString(),
},
},
7,
{
r: 7,
tracking: {
type: 'delete',
userId: this.user_id,
ts: new Date(this.timestamp).toISOString(),
},
},
3,
{
r: 17,
tracking: {
type: 'none',
userId: this.user_id,
ts: new Date(this.timestamp).toISOString(),
},
},
3,
-18,
10,
],
},
],
v2Authors: [this.user_id],
timestamp: this.timestamp,
v2DocVersions: {
'59bfd450e3028c4d40a1e9ab': {
pathname: 'main.tex',
v: 0,
},
},
},
])
})
it('handles a delete over a mix of tracked inserts and tracked deletes', function () {
const updates = [
{
update: {
doc: this.doc_id,
op: [
{
d: 'abcdef',
p: 10,
trackedChanges: [
{ type: 'insert', offset: 0, length: 3 },
{ type: 'delete', offset: 2, length: 10 },
{ type: 'insert', offset: 2, length: 2 },
],
},
],
v: this.version,
meta: {
tc: 'tracking-id',
user_id: this.user_id,
ts: new Date(this.timestamp).getTime(),
pathname: '/main.tex',
doc_length: 20,
history_doc_length: 30,
source: 'some-editor-id',
},
},
},
]
const changes = this.UpdateTranslator.convertToChanges(
this.project_id,
updates
).map(change => change.toRaw())
expect(changes).to.deep.equal([
{
authors: [],
operations: [
{
pathname: 'main.tex',
textOperation: [
10,
-3,
10,
-2,
{
r: 1,
tracking: {
type: 'delete',
userId: this.user_id,
ts: this.timestamp,
},
},
4,
],
},
],
v2Authors: [this.user_id],
timestamp: this.timestamp,
v2DocVersions: {
'59bfd450e3028c4d40a1e9ab': {
pathname: 'main.tex',
v: 0,
},
},
},
])
})
})
})
})