mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
[overleaf-editor-core] Restructure TextOperation hierachy (#16582)
* [overleaf-editor-core] Restructure TextOperation hierachy Restructures the hierachy of TextOperations to include a superclass EditOperation. This superclass will later on contain other classes used for tracked changes and comments. * [overleaf-editor-core] Update json format of LazyStringFileData * [history-v1+project-history] Fix TextOperation.fromJSON calls * [overleaf-editor-core] Change EditOperationBuilder.fromRaw to fromJSON * [overleaf-editor-core] Update apply and invert functions to accept FileData * [overleaf-editor-core] Pass missing argument to store method * [overleaf-editor-core] Remove unused method * [overleaf-editor-core] User EditOperationTransformer * [overleaf-editor-core] Clean up JSDoc comments * [overleaf-editor-core] Add tests for EditOperation * [overleaf-editor-core] Update JSDoc types GitOrigin-RevId: 9c22a3a89b8483bdb87b43f329ddbdd887ffed42
This commit is contained in:
parent
fc90db231c
commit
43b2fe4a3a
21 changed files with 457 additions and 179 deletions
|
@ -20,6 +20,7 @@ exports.RestoreOrigin = require('./lib/origin/restore_origin')
|
||||||
exports.Origin = require('./lib/origin')
|
exports.Origin = require('./lib/origin')
|
||||||
exports.OtClient = require('./lib/ot_client')
|
exports.OtClient = require('./lib/ot_client')
|
||||||
exports.TextOperation = require('./lib/operation/text_operation')
|
exports.TextOperation = require('./lib/operation/text_operation')
|
||||||
|
exports.EditOperation = require('./lib/operation/edit_operation')
|
||||||
exports.safePathname = require('./lib/safe_pathname')
|
exports.safePathname = require('./lib/safe_pathname')
|
||||||
exports.Snapshot = require('./lib/snapshot')
|
exports.Snapshot = require('./lib/snapshot')
|
||||||
exports.util = require('./lib/util')
|
exports.util = require('./lib/util')
|
||||||
|
|
|
@ -1,5 +1,9 @@
|
||||||
|
// @ts-check
|
||||||
'use strict'
|
'use strict'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @typedef {import('../operation/edit_operation')} EditOperation
|
||||||
|
*/
|
||||||
const assert = require('check-types').assert
|
const assert = require('check-types').assert
|
||||||
|
|
||||||
const FileData = require('./')
|
const FileData = require('./')
|
||||||
|
@ -44,9 +48,12 @@ class HollowStringFileData extends FileData {
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @inheritdoc */
|
/**
|
||||||
edit(textOperation) {
|
* @inheritdoc
|
||||||
this.stringLength = textOperation.applyToLength(this.stringLength)
|
* @param {EditOperation} operation
|
||||||
|
*/
|
||||||
|
edit(operation) {
|
||||||
|
this.stringLength = operation.applyToLength(this.stringLength)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -15,6 +15,7 @@ let StringFileData = null
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @typedef {import("../types").BlobStore} BlobStore
|
* @typedef {import("../types").BlobStore} BlobStore
|
||||||
|
* @typedef {import("../operation/edit_operation")} EditOperation
|
||||||
* @typedef {import("../types").CommentRawData} CommentRawData
|
* @typedef {import("../types").CommentRawData} CommentRawData
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
@ -62,33 +63,52 @@ class FileData {
|
||||||
throw new Error('FileData: toRaw not implemented')
|
throw new Error('FileData: toRaw not implemented')
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @see File#getHash */
|
/**
|
||||||
|
* @see File#getHash
|
||||||
|
* @return {string | null | undefined}
|
||||||
|
*/
|
||||||
|
|
||||||
getHash() {
|
getHash() {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @see File#getContent */
|
/**
|
||||||
|
* @see File#getContent
|
||||||
|
* @return {string | null | undefined}
|
||||||
|
*/
|
||||||
getContent() {
|
getContent() {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @see File#isEditable */
|
/**
|
||||||
|
* @see File#isEditable
|
||||||
|
* @return {boolean | null | undefined} null if it is not currently known
|
||||||
|
*/
|
||||||
isEditable() {
|
isEditable() {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @see File#getByteLength */
|
/**
|
||||||
|
* @see File#getByteLength
|
||||||
|
* @return {number | null | undefined}
|
||||||
|
*/
|
||||||
getByteLength() {
|
getByteLength() {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @see File#getStringLength */
|
/**
|
||||||
|
* @see File#getStringLength
|
||||||
|
* @return {number | null | undefined}
|
||||||
|
*/
|
||||||
getStringLength() {
|
getStringLength() {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @see File#edit */
|
/**
|
||||||
edit(textOperation) {
|
* @see File#edit
|
||||||
|
* @param {EditOperation} editOperation
|
||||||
|
*/
|
||||||
|
edit(editOperation) {
|
||||||
throw new Error('edit not implemented for ' + JSON.stringify(this))
|
throw new Error('edit not implemented for ' + JSON.stringify(this))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
// @ts-check
|
||||||
'use strict'
|
'use strict'
|
||||||
|
|
||||||
const _ = require('lodash')
|
const _ = require('lodash')
|
||||||
|
@ -6,40 +7,41 @@ const assert = require('check-types').assert
|
||||||
const Blob = require('../blob')
|
const Blob = require('../blob')
|
||||||
const FileData = require('./')
|
const FileData = require('./')
|
||||||
const EagerStringFileData = require('./string_file_data')
|
const EagerStringFileData = require('./string_file_data')
|
||||||
const TextOperation = require('../operation/text_operation')
|
const EditOperation = require('../operation/edit_operation')
|
||||||
|
const EditOperationBuilder = require('../operation/edit_operation_builder')
|
||||||
|
|
||||||
class LazyStringFileData extends FileData {
|
class LazyStringFileData extends FileData {
|
||||||
/**
|
/**
|
||||||
* @param {string} hash
|
* @param {string} hash
|
||||||
* @param {number} stringLength
|
* @param {number} stringLength
|
||||||
* @param {Array.<TextOperation>} [textOperations]
|
* @param {Array.<EditOperation>} [operations]
|
||||||
* @see FileData
|
* @see FileData
|
||||||
*/
|
*/
|
||||||
constructor(hash, stringLength, textOperations) {
|
constructor(hash, stringLength, operations) {
|
||||||
super()
|
super()
|
||||||
assert.match(hash, Blob.HEX_HASH_RX)
|
assert.match(hash, Blob.HEX_HASH_RX)
|
||||||
assert.greaterOrEqual(stringLength, 0)
|
assert.greaterOrEqual(stringLength, 0)
|
||||||
assert.maybe.array.of.instance(textOperations, TextOperation)
|
assert.maybe.array.of.instance(operations, EditOperation)
|
||||||
|
|
||||||
this.hash = hash
|
this.hash = hash
|
||||||
this.stringLength = stringLength
|
this.stringLength = stringLength
|
||||||
this.textOperations = textOperations || []
|
this.operations = operations || []
|
||||||
}
|
}
|
||||||
|
|
||||||
static fromRaw(raw) {
|
static fromRaw(raw) {
|
||||||
return new LazyStringFileData(
|
return new LazyStringFileData(
|
||||||
raw.hash,
|
raw.hash,
|
||||||
raw.stringLength,
|
raw.stringLength,
|
||||||
raw.textOperations && _.map(raw.textOperations, TextOperation.fromJSON)
|
raw.operations && _.map(raw.operations, EditOperationBuilder.fromJSON)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @inheritdoc */
|
/** @inheritdoc */
|
||||||
toRaw() {
|
toRaw() {
|
||||||
const raw = { hash: this.hash, stringLength: this.stringLength }
|
const raw = { hash: this.hash, stringLength: this.stringLength }
|
||||||
if (this.textOperations.length) {
|
if (this.operations.length) {
|
||||||
raw.textOperations = _.map(this.textOperations, function (textOperation) {
|
raw.operations = _.map(this.operations, function (operation) {
|
||||||
return textOperation.toJSON()
|
return operation.toJSON()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
return raw
|
return raw
|
||||||
|
@ -47,7 +49,7 @@ class LazyStringFileData extends FileData {
|
||||||
|
|
||||||
/** @inheritdoc */
|
/** @inheritdoc */
|
||||||
getHash() {
|
getHash() {
|
||||||
if (this.textOperations.length) return null
|
if (this.operations.length) return null
|
||||||
return this.hash
|
return this.hash
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -76,16 +78,21 @@ class LazyStringFileData extends FileData {
|
||||||
* Get the cached text operations that are to be applied to this file to get
|
* Get the cached text operations that are to be applied to this file to get
|
||||||
* from the content with its last known hash to its latest content.
|
* from the content with its last known hash to its latest content.
|
||||||
*
|
*
|
||||||
* @return {Array.<TextOperation>}
|
* @return {Array.<EditOperation>}
|
||||||
*/
|
*/
|
||||||
getTextOperations() {
|
getOperations() {
|
||||||
return this.textOperations
|
return this.operations
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @inheritdoc */
|
/**
|
||||||
|
* @inheritdoc
|
||||||
|
* @returns {Promise<EagerStringFileData>}
|
||||||
|
*/
|
||||||
async toEager(blobStore) {
|
async toEager(blobStore) {
|
||||||
const content = await blobStore.getString(this.hash)
|
const content = await blobStore.getString(this.hash)
|
||||||
return new EagerStringFileData(computeContent(this.textOperations, content))
|
const file = new EagerStringFileData(content)
|
||||||
|
applyOperations(this.operations, file)
|
||||||
|
return file
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @inheritdoc */
|
/** @inheritdoc */
|
||||||
|
@ -99,33 +106,30 @@ class LazyStringFileData extends FileData {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @inheritdoc */
|
/** @inheritdoc */
|
||||||
edit(textOperation) {
|
edit(operation) {
|
||||||
this.stringLength = textOperation.applyToLength(this.stringLength)
|
this.stringLength = operation.applyToLength(this.stringLength)
|
||||||
this.textOperations.push(textOperation)
|
this.operations.push(operation)
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @inheritdoc */
|
/** @inheritdoc */
|
||||||
async store(blobStore) {
|
async store(blobStore) {
|
||||||
if (this.textOperations.length === 0) {
|
if (this.operations.length === 0) {
|
||||||
return { hash: this.hash }
|
return { hash: this.hash }
|
||||||
}
|
}
|
||||||
|
const eager = await this.toEager(blobStore)
|
||||||
const content = await blobStore.getString(this.hash)
|
this.operations.length = 0
|
||||||
const blob = await blobStore.putString(
|
return eager.store(blobStore)
|
||||||
computeContent(this.textOperations, content)
|
|
||||||
)
|
|
||||||
this.hash = blob.getHash()
|
|
||||||
this.stringLength = blob.getStringLength()
|
|
||||||
this.textOperations.length = 0
|
|
||||||
return { hash: this.hash }
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function computeContent(textOperations, initialFile) {
|
/**
|
||||||
function applyTextOperation(content, textOperation) {
|
*
|
||||||
return textOperation.apply(content)
|
* @param {EditOperation[]} operations
|
||||||
}
|
* @param {EagerStringFileData} file
|
||||||
return _.reduce(textOperations, applyTextOperation, initialFile)
|
* @returns {void}
|
||||||
|
*/
|
||||||
|
function applyOperations(operations, file) {
|
||||||
|
_.each(operations, operation => operation.apply(file))
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = LazyStringFileData
|
module.exports = LazyStringFileData
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
// @ts-check
|
||||||
'use strict'
|
'use strict'
|
||||||
|
|
||||||
const assert = require('check-types').assert
|
const assert = require('check-types').assert
|
||||||
|
@ -7,6 +8,8 @@ const CommentList = require('./comment_list')
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @typedef {import("../types").StringFileRawData} StringFileRawData
|
* @typedef {import("../types").StringFileRawData} StringFileRawData
|
||||||
|
* @typedef {import("../operation/edit_operation")} EditOperation
|
||||||
|
* @typedef {import("../types").BlobStore} BlobStore
|
||||||
* @typedef {import("../types").CommentRawData} CommentRawData
|
* @typedef {import("../types").CommentRawData} CommentRawData
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
@ -65,9 +68,11 @@ class StringFileData extends FileData {
|
||||||
return this.content.length
|
return this.content.length
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @inheritdoc */
|
/**
|
||||||
edit(textOperation) {
|
* @inheritdoc
|
||||||
this.content = textOperation.apply(this.content)
|
* @param {EditOperation} operation */
|
||||||
|
edit(operation) {
|
||||||
|
operation.apply(this)
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @inheritdoc */
|
/** @inheritdoc */
|
||||||
|
@ -75,7 +80,10 @@ class StringFileData extends FileData {
|
||||||
return this.comments.getComments()
|
return this.comments.getComments()
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @inheritdoc */
|
/**
|
||||||
|
* @inheritdoc
|
||||||
|
* @returns {Promise<StringFileData>}
|
||||||
|
*/
|
||||||
async toEager() {
|
async toEager() {
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
@ -85,7 +93,10 @@ class StringFileData extends FileData {
|
||||||
return FileData.createHollow(this.getByteLength(), this.getStringLength())
|
return FileData.createHollow(this.getByteLength(), this.getStringLength())
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @inheritdoc */
|
/**
|
||||||
|
* @inheritdoc
|
||||||
|
* @param {BlobStore} blobStore
|
||||||
|
*/
|
||||||
async store(blobStore) {
|
async store(blobStore) {
|
||||||
const blob = await blobStore.putString(this.content)
|
const blob = await blobStore.putString(this.content)
|
||||||
return { hash: blob.getHash() }
|
return { hash: blob.getHash() }
|
||||||
|
|
|
@ -1,20 +1,22 @@
|
||||||
|
// @ts-check
|
||||||
'use strict'
|
'use strict'
|
||||||
|
/** @typedef {import('./edit_operation')} EditOperation */
|
||||||
|
|
||||||
const Operation = require('./')
|
const Operation = require('./')
|
||||||
const TextOperation = require('./text_operation')
|
const EditOperationBuilder = require('./edit_operation_builder')
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Edit a file in place. It is a wrapper around a single TextOperation.
|
* Edit a file in place. It is a wrapper around a single EditOperation.
|
||||||
*/
|
*/
|
||||||
class EditFileOperation extends Operation {
|
class EditFileOperation extends Operation {
|
||||||
/**
|
/**
|
||||||
* @param {string} pathname
|
* @param {string} pathname
|
||||||
* @param {TextOperation} textOperation
|
* @param {EditOperation} operation
|
||||||
*/
|
*/
|
||||||
constructor(pathname, textOperation) {
|
constructor(pathname, operation) {
|
||||||
super()
|
super()
|
||||||
this.pathname = pathname
|
this.pathname = pathname
|
||||||
this.textOperation = textOperation
|
this.operation = operation
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -23,7 +25,7 @@ class EditFileOperation extends Operation {
|
||||||
toRaw() {
|
toRaw() {
|
||||||
return {
|
return {
|
||||||
pathname: this.pathname,
|
pathname: this.pathname,
|
||||||
textOperation: this.textOperation.toJSON(),
|
...this.operation.toJSON(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -36,7 +38,7 @@ class EditFileOperation extends Operation {
|
||||||
static fromRaw(raw) {
|
static fromRaw(raw) {
|
||||||
return new EditFileOperation(
|
return new EditFileOperation(
|
||||||
raw.pathname,
|
raw.pathname,
|
||||||
TextOperation.fromJSON(raw.textOperation)
|
EditOperationBuilder.fromJSON(raw)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -44,15 +46,15 @@ class EditFileOperation extends Operation {
|
||||||
return this.pathname
|
return this.pathname
|
||||||
}
|
}
|
||||||
|
|
||||||
getTextOperation() {
|
getOperation() {
|
||||||
return this.textOperation
|
return this.operation
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @inheritdoc
|
* @inheritdoc
|
||||||
*/
|
*/
|
||||||
applyTo(snapshot) {
|
applyTo(snapshot) {
|
||||||
snapshot.editFile(this.pathname, this.textOperation)
|
snapshot.editFile(this.pathname, this.operation)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -61,7 +63,7 @@ class EditFileOperation extends Operation {
|
||||||
canBeComposedWithForUndo(other) {
|
canBeComposedWithForUndo(other) {
|
||||||
return (
|
return (
|
||||||
this.canBeComposedWith(other) &&
|
this.canBeComposedWith(other) &&
|
||||||
this.textOperation.canBeComposedWithForUndo(other.textOperation)
|
this.operation.canBeComposedWithForUndo(other.operation)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -74,7 +76,7 @@ class EditFileOperation extends Operation {
|
||||||
// Ensure that both operations are editing the same file
|
// Ensure that both operations are editing the same file
|
||||||
if (this.getPathname() !== other.getPathname()) return false
|
if (this.getPathname() !== other.getPathname()) return false
|
||||||
|
|
||||||
return this.textOperation.canBeComposedWith(other.textOperation)
|
return this.operation.canBeComposedWith(other.operation)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -83,7 +85,7 @@ class EditFileOperation extends Operation {
|
||||||
compose(other) {
|
compose(other) {
|
||||||
return new EditFileOperation(
|
return new EditFileOperation(
|
||||||
this.pathname,
|
this.pathname,
|
||||||
this.textOperation.compose(other.textOperation)
|
this.operation.compose(other.operation)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,90 @@
|
||||||
|
// @ts-check
|
||||||
|
/**
|
||||||
|
* @typedef {import('../file_data')} FileData
|
||||||
|
*/
|
||||||
|
|
||||||
|
class EditOperation {
|
||||||
|
constructor() {
|
||||||
|
if (this.constructor === EditOperation) {
|
||||||
|
throw new Error('Cannot instantiate abstract class')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts operation into a JSON value.
|
||||||
|
* @returns {object}
|
||||||
|
*/
|
||||||
|
toJSON() {
|
||||||
|
throw new Error('Abstract method not implemented')
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @abstract
|
||||||
|
* @param {FileData} fileData
|
||||||
|
*/
|
||||||
|
apply(fileData) {
|
||||||
|
throw new Error('Abstract method not implemented')
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Determine the effect of this operation on the length of the text.
|
||||||
|
*
|
||||||
|
* NB: This is an Overleaf addition to the original OT system.
|
||||||
|
*
|
||||||
|
* @param {number} length of the original string; non-negative
|
||||||
|
* @return {number} length of the new string; non-negative
|
||||||
|
*/
|
||||||
|
applyToLength(length) {
|
||||||
|
return length
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Computes the inverse of an operation. The inverse of an operation is the
|
||||||
|
* operation that reverts the effects of the operation, e.g. when you have an
|
||||||
|
* operation 'insert("hello "); skip(6);' then the inverse is 'remove("hello ");
|
||||||
|
* skip(6);'. The inverse should be used for implementing undo.
|
||||||
|
* @param {FileData} previousState
|
||||||
|
* @returns {EditOperation}
|
||||||
|
*/
|
||||||
|
invert(previousState) {
|
||||||
|
throw new Error('Abstract method not implemented')
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @param {EditOperation} other
|
||||||
|
* @returns {boolean}
|
||||||
|
*/
|
||||||
|
canBeComposedWith(other) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When you use ctrl-z to undo your latest changes, you expect the program not
|
||||||
|
* to undo every single keystroke but to undo your last sentence you wrote at
|
||||||
|
* a stretch or the deletion you did by holding the backspace key down. This
|
||||||
|
* This can be implemented by composing operations on the undo stack. This
|
||||||
|
* method can help decide whether two operations should be composed. It
|
||||||
|
* returns true if the operations are consecutive insert operations or both
|
||||||
|
* operations delete text at the same position. You may want to include other
|
||||||
|
* factors like the time since the last change in your decision.
|
||||||
|
* @param {EditOperation} other
|
||||||
|
*/
|
||||||
|
canBeComposedWithForUndo(other) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compose merges two consecutive operations into one operation, that
|
||||||
|
* preserves the changes of both. Or, in other words, for each input string S
|
||||||
|
* and a pair of consecutive operations A and B,
|
||||||
|
* apply(apply(S, A), B) = apply(S, compose(A, B)) must hold.
|
||||||
|
* @param {EditOperation} other
|
||||||
|
* @returns {EditOperation}
|
||||||
|
*/
|
||||||
|
compose(other) {
|
||||||
|
throw new Error('Abstract method not implemented')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = EditOperation
|
|
@ -0,0 +1,19 @@
|
||||||
|
// @ts-check
|
||||||
|
/** @typedef {import('./edit_operation')} EditOperation */
|
||||||
|
const TextOperation = require('./text_operation')
|
||||||
|
|
||||||
|
class EditOperationBuilder {
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @param {object} raw
|
||||||
|
* @returns {EditOperation}
|
||||||
|
*/
|
||||||
|
static fromJSON(raw) {
|
||||||
|
if (raw.textOperation) {
|
||||||
|
return TextOperation.fromJSON(raw)
|
||||||
|
}
|
||||||
|
throw new Error('Unsupported operation in EditOperationBuilder.fromJSON')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = EditOperationBuilder
|
|
@ -0,0 +1,21 @@
|
||||||
|
// @ts-check
|
||||||
|
const TextOperation = require('./text_operation')
|
||||||
|
/** @typedef {import('./edit_operation')} EditOperation */
|
||||||
|
|
||||||
|
class EditOperationTransformer {
|
||||||
|
/**
|
||||||
|
* Transform two edit operations against each other.
|
||||||
|
* @param {EditOperation} a
|
||||||
|
* @param {EditOperation} b
|
||||||
|
*/
|
||||||
|
static transform(a, b) {
|
||||||
|
if (a instanceof TextOperation && b instanceof TextOperation) {
|
||||||
|
return TextOperation.transform(a, b)
|
||||||
|
}
|
||||||
|
throw new Error(
|
||||||
|
`Transform not implemented for ${a.constructor.name}○${b.constructor.name}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = EditOperationTransformer
|
|
@ -2,8 +2,7 @@
|
||||||
|
|
||||||
const _ = require('lodash')
|
const _ = require('lodash')
|
||||||
const assert = require('check-types').assert
|
const assert = require('check-types').assert
|
||||||
|
const EditOperationTransformer = require('./edit_operation_transformer')
|
||||||
const TextOperation = require('./text_operation')
|
|
||||||
|
|
||||||
// Dependencies are loaded at the bottom of the file to mitigate circular
|
// Dependencies are loaded at the bottom of the file to mitigate circular
|
||||||
// dependency
|
// dependency
|
||||||
|
@ -224,8 +223,8 @@ class Operation {
|
||||||
return new AddFileOperation(pathname, file)
|
return new AddFileOperation(pathname, file)
|
||||||
}
|
}
|
||||||
|
|
||||||
static editFile(pathname, textOperation) {
|
static editFile(pathname, editOperation) {
|
||||||
return new EditFileOperation(pathname, textOperation)
|
return new EditFileOperation(pathname, editOperation)
|
||||||
}
|
}
|
||||||
|
|
||||||
static moveFile(pathname, newPathname) {
|
static moveFile(pathname, newPathname) {
|
||||||
|
@ -390,7 +389,7 @@ function transformMoveFileEditFile(move, edit) {
|
||||||
}
|
}
|
||||||
return [
|
return [
|
||||||
move,
|
move,
|
||||||
Operation.editFile(move.getNewPathname(), edit.getTextOperation()),
|
Operation.editFile(move.getNewPathname(), edit.getOperation()),
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -422,13 +421,13 @@ function transformMoveFileSetFileMetadata(move, set) {
|
||||||
|
|
||||||
function transformEditFileEditFile(edit1, edit2) {
|
function transformEditFileEditFile(edit1, edit2) {
|
||||||
if (edit1.getPathname() === edit2.getPathname()) {
|
if (edit1.getPathname() === edit2.getPathname()) {
|
||||||
const primeTextOps = TextOperation.transform(
|
const primeOps = EditOperationTransformer.transform(
|
||||||
edit1.getTextOperation(),
|
edit1.getOperation(),
|
||||||
edit2.getTextOperation()
|
edit2.getOperation()
|
||||||
)
|
)
|
||||||
return [
|
return [
|
||||||
Operation.editFile(edit1.getPathname(), primeTextOps[0]),
|
Operation.editFile(edit1.getPathname(), primeOps[0]),
|
||||||
Operation.editFile(edit2.getPathname(), primeTextOps[1]),
|
Operation.editFile(edit2.getPathname(), primeOps[1]),
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
// @ts-check
|
||||||
/**
|
/**
|
||||||
* The text operation from OT.js with some minor cosmetic changes.
|
* The text operation from OT.js with some minor cosmetic changes.
|
||||||
*
|
*
|
||||||
|
@ -8,10 +9,10 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
'use strict'
|
'use strict'
|
||||||
|
|
||||||
const containsNonBmpChars = require('../util').containsNonBmpChars
|
const containsNonBmpChars = require('../util').containsNonBmpChars
|
||||||
|
|
||||||
const OError = require('@overleaf/o-error')
|
const OError = require('@overleaf/o-error')
|
||||||
|
const EditOperation = require('./edit_operation')
|
||||||
|
/** @typedef {import('../file_data/string_file_data')} StringFileData */
|
||||||
|
|
||||||
class UnprocessableError extends OError {}
|
class UnprocessableError extends OError {}
|
||||||
|
|
||||||
|
@ -44,8 +45,9 @@ class TooLongError extends UnprocessableError {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create an empty text operation.
|
* Create an empty text operation.
|
||||||
|
* @extends EditOperation
|
||||||
*/
|
*/
|
||||||
class TextOperation {
|
class TextOperation extends EditOperation {
|
||||||
/**
|
/**
|
||||||
* Length of the longest file that we'll attempt to edit, in characters.
|
* Length of the longest file that we'll attempt to edit, in characters.
|
||||||
*
|
*
|
||||||
|
@ -61,6 +63,7 @@ class TextOperation {
|
||||||
static isRemove = isRemove
|
static isRemove = isRemove
|
||||||
|
|
||||||
constructor() {
|
constructor() {
|
||||||
|
super()
|
||||||
// When an operation is applied to an input string, you can think of this as
|
// When an operation is applied to an input string, you can think of this as
|
||||||
// if an imaginary cursor runs over the entire string and skips over some
|
// if an imaginary cursor runs over the entire string and skips over some
|
||||||
// parts, removes some parts and inserts characters at some positions. These
|
// parts, removes some parts and inserts characters at some positions. These
|
||||||
|
@ -206,16 +209,16 @@ class TextOperation {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Converts operation into a JSON value.
|
* @inheritdoc
|
||||||
*/
|
*/
|
||||||
toJSON() {
|
toJSON() {
|
||||||
return this.ops
|
return { textOperation: this.ops }
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Converts a plain JS object into an operation and validates it.
|
* Converts a plain JS object into an operation and validates it.
|
||||||
*/
|
*/
|
||||||
static fromJSON = function (ops) {
|
static fromJSON = function ({ textOperation: ops }) {
|
||||||
const o = new TextOperation()
|
const o = new TextOperation()
|
||||||
for (let i = 0, l = ops.length; i < l; i++) {
|
for (let i = 0, l = ops.length; i < l; i++) {
|
||||||
const op = ops[i]
|
const op = ops[i]
|
||||||
|
@ -240,8 +243,12 @@ class TextOperation {
|
||||||
/**
|
/**
|
||||||
* Apply an operation to a string, returning a new string. Throws an error if
|
* Apply an operation to a string, returning a new string. Throws an error if
|
||||||
* there's a mismatch between the input string and the operation.
|
* there's a mismatch between the input string and the operation.
|
||||||
|
* @override
|
||||||
|
* @inheritdoc
|
||||||
|
* @param {StringFileData} file
|
||||||
*/
|
*/
|
||||||
apply(str) {
|
apply(file) {
|
||||||
|
const str = file.getContent()
|
||||||
const operation = this
|
const operation = this
|
||||||
if (containsNonBmpChars(str)) {
|
if (containsNonBmpChars(str)) {
|
||||||
throw new TextOperation.ApplyError(
|
throw new TextOperation.ApplyError(
|
||||||
|
@ -298,16 +305,12 @@ class TextOperation {
|
||||||
if (result.length > TextOperation.MAX_STRING_LENGTH) {
|
if (result.length > TextOperation.MAX_STRING_LENGTH) {
|
||||||
throw new TextOperation.TooLongError(operation, result.length)
|
throw new TextOperation.TooLongError(operation, result.length)
|
||||||
}
|
}
|
||||||
return result
|
|
||||||
|
file.content = result
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Determine the effect of this operation on the length of the text.
|
* @inheritdoc
|
||||||
*
|
|
||||||
* NB: This is an Overleaf addition to the original TextOperation.
|
|
||||||
*
|
|
||||||
* @param {number} length of the original string; non-negative
|
|
||||||
* @return {number} length of the new string; non-negative
|
|
||||||
*/
|
*/
|
||||||
applyToLength(length) {
|
applyToLength(length) {
|
||||||
const operation = this
|
const operation = this
|
||||||
|
@ -356,12 +359,11 @@ class TextOperation {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Computes the inverse of an operation. The inverse of an operation is the
|
* @inheritdoc
|
||||||
* operation that reverts the effects of the operation, e.g. when you have an
|
* @param {StringFileData} previousState
|
||||||
* operation 'insert("hello "); skip(6);' then the inverse is 'remove("hello ");
|
|
||||||
* skip(6);'. The inverse should be used for implementing undo.
|
|
||||||
*/
|
*/
|
||||||
invert(str) {
|
invert(previousState) {
|
||||||
|
const str = previousState.getContent()
|
||||||
let strIndex = 0
|
let strIndex = 0
|
||||||
const inverse = new TextOperation()
|
const inverse = new TextOperation()
|
||||||
const ops = this.ops
|
const ops = this.ops
|
||||||
|
@ -382,16 +384,14 @@ class TextOperation {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* When you use ctrl-z to undo your latest changes, you expect the program not
|
* @inheritdoc
|
||||||
* to undo every single keystroke but to undo your last sentence you wrote at
|
* @param {EditOperation} other
|
||||||
* a stretch or the deletion you did by holding the backspace key down. This
|
|
||||||
* This can be implemented by composing operations on the undo stack. This
|
|
||||||
* method can help decide whether two operations should be composed. It
|
|
||||||
* returns true if the operations are consecutive insert operations or both
|
|
||||||
* operations delete text at the same position. You may want to include other
|
|
||||||
* factors like the time since the last change in your decision.
|
|
||||||
*/
|
*/
|
||||||
canBeComposedWithForUndo(other) {
|
canBeComposedWithForUndo(other) {
|
||||||
|
if (!(other instanceof TextOperation)) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
if (this.isNoop() || other.isNoop()) {
|
if (this.isNoop() || other.isNoop()) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
@ -419,16 +419,25 @@ class TextOperation {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @inheritdoc
|
* @inheritdoc
|
||||||
|
* @param {EditOperation} other
|
||||||
*/
|
*/
|
||||||
canBeComposedWith(other) {
|
canBeComposedWith(other) {
|
||||||
|
if (!(other instanceof TextOperation)) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
return this.targetLength === other.baseLength
|
return this.targetLength === other.baseLength
|
||||||
}
|
}
|
||||||
|
|
||||||
// Compose merges two consecutive operations into one operation, that
|
/**
|
||||||
// preserves the changes of both. Or, in other words, for each input string S
|
* @inheritdoc
|
||||||
// and a pair of consecutive operations A and B,
|
* @param {EditOperation} operation2
|
||||||
// apply(apply(S, A), B) = apply(S, compose(A, B)) must hold.
|
*/
|
||||||
compose(operation2) {
|
compose(operation2) {
|
||||||
|
if (!(operation2 instanceof TextOperation)) {
|
||||||
|
throw new Error(
|
||||||
|
`Trying to compose TextOperation with ${operation2?.constructor?.name}.`
|
||||||
|
)
|
||||||
|
}
|
||||||
const operation1 = this
|
const operation1 = this
|
||||||
if (operation1.targetLength !== operation2.baseLength) {
|
if (operation1.targetLength !== operation2.baseLength) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
|
@ -543,6 +552,8 @@ class TextOperation {
|
||||||
* produces two operations A' and B' (in an array) such that
|
* produces two operations A' and B' (in an array) such that
|
||||||
* `apply(apply(S, A), B') = apply(apply(S, B), A')`. This function is the
|
* `apply(apply(S, A), B') = apply(apply(S, B), A')`. This function is the
|
||||||
* heart of OT.
|
* heart of OT.
|
||||||
|
* @param {TextOperation} operation1
|
||||||
|
* @param {TextOperation} operation2
|
||||||
*/
|
*/
|
||||||
static transform(operation1, operation2) {
|
static transform(operation1, operation2) {
|
||||||
if (operation1.baseLength !== operation2.baseLength) {
|
if (operation1.baseLength !== operation2.baseLength) {
|
||||||
|
|
|
@ -1,17 +1,18 @@
|
||||||
|
// @ts-check
|
||||||
'use strict'
|
'use strict'
|
||||||
|
|
||||||
const { expect } = require('chai')
|
const { expect } = require('chai')
|
||||||
|
|
||||||
const ot = require('..')
|
const ot = require('..')
|
||||||
|
const EditOperationBuilder = require('../lib/operation/edit_operation_builder')
|
||||||
const File = ot.File
|
const File = ot.File
|
||||||
const Operation = ot.Operation
|
const Operation = ot.Operation
|
||||||
const TextOperation = ot.TextOperation
|
|
||||||
|
|
||||||
describe('EditFileOperation', function () {
|
describe('EditFileOperation', function () {
|
||||||
function edit(pathname, textOperationJsonObject) {
|
function edit(pathname, textOperationJsonObject) {
|
||||||
return Operation.editFile(
|
return Operation.editFile(
|
||||||
pathname,
|
pathname,
|
||||||
TextOperation.fromJSON(textOperationJsonObject)
|
EditOperationBuilder.fromJSON({ textOperation: textOperationJsonObject })
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
55
libraries/overleaf-editor-core/test/edit_operation.test.js
Normal file
55
libraries/overleaf-editor-core/test/edit_operation.test.js
Normal file
|
@ -0,0 +1,55 @@
|
||||||
|
const { expect } = require('chai')
|
||||||
|
const EditOperationBuilder = require('../lib/operation/edit_operation_builder')
|
||||||
|
const TextOperation = require('../lib/operation/text_operation')
|
||||||
|
const EditOperationTransformer = require('../lib/operation/edit_operation_transformer')
|
||||||
|
const EditOperation = require('../lib/operation/edit_operation')
|
||||||
|
const randomTextOperation = require('./support/random_text_operation')
|
||||||
|
const random = require('./support/random')
|
||||||
|
|
||||||
|
describe('EditOperation', function () {
|
||||||
|
it('Cannot be instantiated', function () {
|
||||||
|
expect(() => new EditOperation()).to.throw(
|
||||||
|
'Cannot instantiate abstract class'
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('EditOperationTransformer', function () {
|
||||||
|
it('Transforms two TextOperations', function () {
|
||||||
|
const a = new TextOperation().insert('foo')
|
||||||
|
const b = new TextOperation().insert('bar')
|
||||||
|
const [aPrime, bPrime] = EditOperationTransformer.transform(a, b)
|
||||||
|
expect(aPrime).to.be.an.instanceof(TextOperation)
|
||||||
|
expect(bPrime).to.be.an.instanceof(TextOperation)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('EditOperationBuilder', function () {
|
||||||
|
it('Constructs TextOperation from JSON', function () {
|
||||||
|
const raw = {
|
||||||
|
textOperation: [1, 'foo', 3],
|
||||||
|
}
|
||||||
|
const op = EditOperationBuilder.fromJSON(raw)
|
||||||
|
expect(op).to.be.an.instanceof(TextOperation)
|
||||||
|
expect(op.ops).to.deep.equal([1, 'foo', 3])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('Throws error for unsupported operation', function () {
|
||||||
|
const raw = {
|
||||||
|
unsupportedOperation: {
|
||||||
|
op: 'foo',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
expect(() => EditOperationBuilder.fromJSON(raw)).to.throw(
|
||||||
|
'Unsupported operation in EditOperationBuilder.fromJSON'
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('Constructs TextOperation from JSON (randomised)', function () {
|
||||||
|
const str = random.string(50)
|
||||||
|
const randomOperation = randomTextOperation(str)
|
||||||
|
const op = EditOperationBuilder.fromJSON(randomOperation.toJSON())
|
||||||
|
expect(op).to.be.an.instanceof(TextOperation)
|
||||||
|
expect(op.equals(randomOperation)).to.be.true
|
||||||
|
})
|
||||||
|
})
|
|
@ -21,32 +21,32 @@ describe('LazyStringFileData', function () {
|
||||||
roundTripFileData = LazyStringFileData.fromRaw(fileData.toRaw())
|
roundTripFileData = LazyStringFileData.fromRaw(fileData.toRaw())
|
||||||
expect(roundTripFileData.getHash()).to.equal(testHash)
|
expect(roundTripFileData.getHash()).to.equal(testHash)
|
||||||
expect(roundTripFileData.getStringLength()).to.equal(0)
|
expect(roundTripFileData.getStringLength()).to.equal(0)
|
||||||
expect(roundTripFileData.getTextOperations()).to.have.length(0)
|
expect(roundTripFileData.getOperations()).to.have.length(0)
|
||||||
|
|
||||||
fileData.edit(new TextOperation().insert('a'))
|
fileData.edit(new TextOperation().insert('a'))
|
||||||
expect(fileData.toRaw()).to.eql({
|
expect(fileData.toRaw()).to.eql({
|
||||||
hash: testHash,
|
hash: testHash,
|
||||||
stringLength: 1,
|
stringLength: 1,
|
||||||
textOperations: [['a']],
|
operations: [{ textOperation: ['a'] }],
|
||||||
})
|
})
|
||||||
roundTripFileData = LazyStringFileData.fromRaw(fileData.toRaw())
|
roundTripFileData = LazyStringFileData.fromRaw(fileData.toRaw())
|
||||||
expect(roundTripFileData.getHash()).not.to.exist // file has changed
|
expect(roundTripFileData.getHash()).not.to.exist // file has changed
|
||||||
expect(roundTripFileData.getStringLength()).to.equal(1)
|
expect(roundTripFileData.getStringLength()).to.equal(1)
|
||||||
expect(roundTripFileData.getTextOperations()).to.have.length(1)
|
expect(roundTripFileData.getOperations()).to.have.length(1)
|
||||||
expect(roundTripFileData.getTextOperations()[0].ops).to.have.length(1)
|
expect(roundTripFileData.getOperations()[0].ops).to.have.length(1)
|
||||||
|
|
||||||
fileData.edit(new TextOperation().retain(1).insert('b'))
|
fileData.edit(new TextOperation().retain(1).insert('b'))
|
||||||
expect(fileData.toRaw()).to.eql({
|
expect(fileData.toRaw()).to.eql({
|
||||||
hash: testHash,
|
hash: testHash,
|
||||||
stringLength: 2,
|
stringLength: 2,
|
||||||
textOperations: [['a'], [1, 'b']],
|
operations: [{ textOperation: ['a'] }, { textOperation: [1, 'b'] }],
|
||||||
})
|
})
|
||||||
roundTripFileData = LazyStringFileData.fromRaw(fileData.toRaw())
|
roundTripFileData = LazyStringFileData.fromRaw(fileData.toRaw())
|
||||||
expect(roundTripFileData.getHash()).not.to.exist // file has changed
|
expect(roundTripFileData.getHash()).not.to.exist // file has changed
|
||||||
expect(roundTripFileData.getStringLength()).to.equal(2)
|
expect(roundTripFileData.getStringLength()).to.equal(2)
|
||||||
expect(roundTripFileData.getTextOperations()).to.have.length(2)
|
expect(roundTripFileData.getOperations()).to.have.length(2)
|
||||||
expect(roundTripFileData.getTextOperations()[0].ops).to.have.length(1)
|
expect(roundTripFileData.getOperations()[0].ops).to.have.length(1)
|
||||||
expect(roundTripFileData.getTextOperations()[1].ops).to.have.length(2)
|
expect(roundTripFileData.getOperations()[1].ops).to.have.length(2)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('validates operations when edited', function () {
|
it('validates operations when edited', function () {
|
||||||
|
@ -55,13 +55,13 @@ describe('LazyStringFileData', function () {
|
||||||
expect(fileData.getHash()).equal(testHash)
|
expect(fileData.getHash()).equal(testHash)
|
||||||
expect(fileData.getByteLength()).to.equal(0) // approximately
|
expect(fileData.getByteLength()).to.equal(0) // approximately
|
||||||
expect(fileData.getStringLength()).to.equal(0)
|
expect(fileData.getStringLength()).to.equal(0)
|
||||||
expect(fileData.getTextOperations()).to.have.length(0)
|
expect(fileData.getOperations()).to.have.length(0)
|
||||||
|
|
||||||
fileData.edit(new TextOperation().insert('a'))
|
fileData.edit(new TextOperation().insert('a'))
|
||||||
expect(fileData.getHash()).not.to.exist
|
expect(fileData.getHash()).not.to.exist
|
||||||
expect(fileData.getByteLength()).to.equal(1) // approximately
|
expect(fileData.getByteLength()).to.equal(1) // approximately
|
||||||
expect(fileData.getStringLength()).to.equal(1)
|
expect(fileData.getStringLength()).to.equal(1)
|
||||||
expect(fileData.getTextOperations()).to.have.length(1)
|
expect(fileData.getOperations()).to.have.length(1)
|
||||||
|
|
||||||
expect(() => {
|
expect(() => {
|
||||||
fileData.edit(new TextOperation().retain(10))
|
fileData.edit(new TextOperation().retain(10))
|
||||||
|
@ -69,7 +69,7 @@ describe('LazyStringFileData', function () {
|
||||||
expect(fileData.getHash()).not.to.exist
|
expect(fileData.getHash()).not.to.exist
|
||||||
expect(fileData.getByteLength()).to.equal(1) // approximately
|
expect(fileData.getByteLength()).to.equal(1) // approximately
|
||||||
expect(fileData.getStringLength()).to.equal(1)
|
expect(fileData.getStringLength()).to.equal(1)
|
||||||
expect(fileData.getTextOperations()).to.have.length(1)
|
expect(fileData.getOperations()).to.have.length(1)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('validates string length when edited', function () {
|
it('validates string length when edited', function () {
|
||||||
|
@ -78,14 +78,14 @@ describe('LazyStringFileData', function () {
|
||||||
expect(fileData.getHash()).equal(testHash)
|
expect(fileData.getHash()).equal(testHash)
|
||||||
expect(fileData.getByteLength()).to.equal(0) // approximately
|
expect(fileData.getByteLength()).to.equal(0) // approximately
|
||||||
expect(fileData.getStringLength()).to.equal(0)
|
expect(fileData.getStringLength()).to.equal(0)
|
||||||
expect(fileData.getTextOperations()).to.have.length(0)
|
expect(fileData.getOperations()).to.have.length(0)
|
||||||
|
|
||||||
const longString = _.repeat('a', TextOperation.MAX_STRING_LENGTH)
|
const longString = _.repeat('a', TextOperation.MAX_STRING_LENGTH)
|
||||||
fileData.edit(new TextOperation().insert(longString))
|
fileData.edit(new TextOperation().insert(longString))
|
||||||
expect(fileData.getHash()).not.to.exist
|
expect(fileData.getHash()).not.to.exist
|
||||||
expect(fileData.getByteLength()).to.equal(longString.length) // approximate
|
expect(fileData.getByteLength()).to.equal(longString.length) // approximate
|
||||||
expect(fileData.getStringLength()).to.equal(longString.length)
|
expect(fileData.getStringLength()).to.equal(longString.length)
|
||||||
expect(fileData.getTextOperations()).to.have.length(1)
|
expect(fileData.getOperations()).to.have.length(1)
|
||||||
|
|
||||||
expect(() => {
|
expect(() => {
|
||||||
fileData.edit(new TextOperation().retain(longString.length).insert('x'))
|
fileData.edit(new TextOperation().retain(longString.length).insert('x'))
|
||||||
|
@ -93,6 +93,6 @@ describe('LazyStringFileData', function () {
|
||||||
expect(fileData.getHash()).not.to.exist
|
expect(fileData.getHash()).not.to.exist
|
||||||
expect(fileData.getByteLength()).to.equal(longString.length) // approximate
|
expect(fileData.getByteLength()).to.equal(longString.length) // approximate
|
||||||
expect(fileData.getStringLength()).to.equal(longString.length)
|
expect(fileData.getStringLength()).to.equal(longString.length)
|
||||||
expect(fileData.getTextOperations()).to.have.length(1)
|
expect(fileData.getOperations()).to.have.length(1)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -114,10 +114,10 @@ describe('Operation', function () {
|
||||||
}
|
}
|
||||||
|
|
||||||
// shorthand for creating an edit operation
|
// shorthand for creating an edit operation
|
||||||
function edit(pathname, textOperationJsonObject) {
|
function edit(pathname, textOperationOps) {
|
||||||
return Operation.editFile(
|
return Operation.editFile(
|
||||||
pathname,
|
pathname,
|
||||||
TextOperation.fromJSON(textOperationJsonObject)
|
TextOperation.fromJSON({ textOperation: textOperationOps })
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -633,14 +633,12 @@ describe('Operation', function () {
|
||||||
.expectPrime(1, EditFileOperation)
|
.expectPrime(1, EditFileOperation)
|
||||||
.expectPrime(0, EditFileOperation)
|
.expectPrime(0, EditFileOperation)
|
||||||
.tap(function () {
|
.tap(function () {
|
||||||
expect(this.primeOperations[1].getTextOperation().toJSON()).to.eql([
|
expect(this.primeOperations[1].getOperation().toJSON()).to.eql({
|
||||||
1,
|
textOperation: [1, 'y'],
|
||||||
'y',
|
})
|
||||||
])
|
expect(this.primeOperations[0].getOperation().toJSON()).to.eql({
|
||||||
expect(this.primeOperations[0].getTextOperation().toJSON()).to.eql([
|
textOperation: ['x', 1],
|
||||||
'x',
|
})
|
||||||
1,
|
|
||||||
])
|
|
||||||
})
|
})
|
||||||
.swap()
|
.swap()
|
||||||
.expectFiles({ foo: 'yx' })
|
.expectFiles({ foo: 'yx' })
|
||||||
|
|
|
@ -0,0 +1,31 @@
|
||||||
|
const TextOperation = require('../../lib/operation/text_operation')
|
||||||
|
const random = require('./random')
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @param {string} str
|
||||||
|
* @returns {TextOperation}
|
||||||
|
*/
|
||||||
|
function randomTextOperation(str) {
|
||||||
|
const operation = new TextOperation()
|
||||||
|
let left
|
||||||
|
while (true) {
|
||||||
|
left = str.length - operation.baseLength
|
||||||
|
if (left === 0) break
|
||||||
|
const r = Math.random()
|
||||||
|
const l = 1 + random.int(Math.min(left - 1, 20))
|
||||||
|
if (r < 0.2) {
|
||||||
|
operation.insert(random.string(l))
|
||||||
|
} else if (r < 0.4) {
|
||||||
|
operation.remove(l)
|
||||||
|
} else {
|
||||||
|
operation.retain(l)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (Math.random() < 0.3) {
|
||||||
|
operation.insert(1 + random.string(10))
|
||||||
|
}
|
||||||
|
return operation
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = randomTextOperation
|
|
@ -7,31 +7,11 @@
|
||||||
|
|
||||||
const { expect } = require('chai')
|
const { expect } = require('chai')
|
||||||
const random = require('./support/random')
|
const random = require('./support/random')
|
||||||
|
const randomOperation = require('./support/random_text_operation')
|
||||||
|
|
||||||
const ot = require('..')
|
const ot = require('..')
|
||||||
const TextOperation = ot.TextOperation
|
const TextOperation = ot.TextOperation
|
||||||
|
const StringFileData = require('../lib/file_data/string_file_data')
|
||||||
function randomOperation(str) {
|
|
||||||
const operation = new TextOperation()
|
|
||||||
let left
|
|
||||||
while (true) {
|
|
||||||
left = str.length - operation.baseLength
|
|
||||||
if (left === 0) break
|
|
||||||
const r = Math.random()
|
|
||||||
const l = 1 + random.int(Math.min(left - 1, 20))
|
|
||||||
if (r < 0.2) {
|
|
||||||
operation.insert(random.string(l))
|
|
||||||
} else if (r < 0.4) {
|
|
||||||
operation.remove(l)
|
|
||||||
} else {
|
|
||||||
operation.retain(l)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (Math.random() < 0.3) {
|
|
||||||
operation.insert(1 + random.string(10))
|
|
||||||
}
|
|
||||||
return operation
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('TextOperation', function () {
|
describe('TextOperation', function () {
|
||||||
const numTrials = 500
|
const numTrials = 500
|
||||||
|
@ -134,7 +114,7 @@ describe('TextOperation', function () {
|
||||||
|
|
||||||
it('converts from JSON', function () {
|
it('converts from JSON', function () {
|
||||||
const ops = [2, -1, -1, 'cde']
|
const ops = [2, -1, -1, 'cde']
|
||||||
const o = TextOperation.fromJSON(ops)
|
const o = TextOperation.fromJSON({ textOperation: ops })
|
||||||
expect(o.ops.length).to.equal(3)
|
expect(o.ops.length).to.equal(3)
|
||||||
expect(o.baseLength).to.equal(4)
|
expect(o.baseLength).to.equal(4)
|
||||||
expect(o.targetLength).to.equal(5)
|
expect(o.targetLength).to.equal(5)
|
||||||
|
@ -143,7 +123,7 @@ describe('TextOperation', function () {
|
||||||
const ops2 = ops.slice(0)
|
const ops2 = ops.slice(0)
|
||||||
fn(ops2)
|
fn(ops2)
|
||||||
expect(() => {
|
expect(() => {
|
||||||
TextOperation.fromJSON(ops2)
|
TextOperation.fromJSON({ textOperations: ops2 })
|
||||||
}).to.throw
|
}).to.throw
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -161,7 +141,10 @@ describe('TextOperation', function () {
|
||||||
const str = random.string(50)
|
const str = random.string(50)
|
||||||
const o = randomOperation(str)
|
const o = randomOperation(str)
|
||||||
expect(str.length).to.equal(o.baseLength)
|
expect(str.length).to.equal(o.baseLength)
|
||||||
expect(o.apply(str).length).to.equal(o.targetLength)
|
const file = new StringFileData(str)
|
||||||
|
o.apply(file)
|
||||||
|
const result = file.getContent()
|
||||||
|
expect(result.length).to.equal(o.targetLength)
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -170,10 +153,14 @@ describe('TextOperation', function () {
|
||||||
random.test(numTrials, () => {
|
random.test(numTrials, () => {
|
||||||
const str = random.string(50)
|
const str = random.string(50)
|
||||||
const o = randomOperation(str)
|
const o = randomOperation(str)
|
||||||
const p = o.invert(str)
|
const p = o.invert(new StringFileData(str))
|
||||||
expect(o.baseLength).to.equal(p.targetLength)
|
expect(o.baseLength).to.equal(p.targetLength)
|
||||||
expect(o.targetLength).to.equal(p.baseLength)
|
expect(o.targetLength).to.equal(p.baseLength)
|
||||||
expect(p.apply(o.apply(str))).to.equal(str)
|
const file = new StringFileData(str)
|
||||||
|
o.apply(file)
|
||||||
|
p.apply(file)
|
||||||
|
const result = file.getContent()
|
||||||
|
expect(result).to.equal(str)
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -193,14 +180,18 @@ describe('TextOperation', function () {
|
||||||
// invariant: apply(str, compose(a, b)) === apply(apply(str, a), b)
|
// invariant: apply(str, compose(a, b)) === apply(apply(str, a), b)
|
||||||
const str = random.string(20)
|
const str = random.string(20)
|
||||||
const a = randomOperation(str)
|
const a = randomOperation(str)
|
||||||
const afterA = a.apply(str)
|
const file = new StringFileData(str)
|
||||||
|
a.apply(file)
|
||||||
|
const afterA = file.getContent()
|
||||||
expect(afterA.length).to.equal(a.targetLength)
|
expect(afterA.length).to.equal(a.targetLength)
|
||||||
const b = randomOperation(afterA)
|
const b = randomOperation(afterA)
|
||||||
const afterB = b.apply(afterA)
|
b.apply(file)
|
||||||
|
const afterB = file.getContent()
|
||||||
expect(afterB.length).to.equal(b.targetLength)
|
expect(afterB.length).to.equal(b.targetLength)
|
||||||
const ab = a.compose(b)
|
const ab = a.compose(b)
|
||||||
expect(ab.targetLength).to.equal(b.targetLength)
|
expect(ab.targetLength).to.equal(b.targetLength)
|
||||||
const afterAB = ab.apply(str)
|
ab.apply(new StringFileData(str))
|
||||||
|
const afterAB = file.getContent()
|
||||||
expect(afterAB).to.equal(afterB)
|
expect(afterAB).to.equal(afterB)
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
@ -218,20 +209,22 @@ describe('TextOperation', function () {
|
||||||
const bPrime = primes[1]
|
const bPrime = primes[1]
|
||||||
const abPrime = a.compose(bPrime)
|
const abPrime = a.compose(bPrime)
|
||||||
const baPrime = b.compose(aPrime)
|
const baPrime = b.compose(aPrime)
|
||||||
const afterAbPrime = abPrime.apply(str)
|
const abFile = new StringFileData(str)
|
||||||
const afterBaPrime = baPrime.apply(str)
|
const baFile = new StringFileData(str)
|
||||||
|
abPrime.apply(abFile)
|
||||||
|
baPrime.apply(baFile)
|
||||||
expect(abPrime.equals(baPrime)).to.be.true
|
expect(abPrime.equals(baPrime)).to.be.true
|
||||||
expect(afterAbPrime).to.equal(afterBaPrime)
|
expect(abFile.getContent()).to.equal(baFile.getContent())
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
|
||||||
it('throws when invalid operations are applied', function () {
|
it('throws when invalid operations are applied', function () {
|
||||||
const operation = new TextOperation().retain(1)
|
const operation = new TextOperation().retain(1)
|
||||||
expect(() => {
|
expect(() => {
|
||||||
operation.apply('')
|
operation.apply(new StringFileData(''))
|
||||||
}).to.throw(TextOperation.ApplyError)
|
}).to.throw(TextOperation.ApplyError)
|
||||||
expect(() => {
|
expect(() => {
|
||||||
operation.apply(' ')
|
operation.apply(new StringFileData(' '))
|
||||||
}).not.to.throw
|
}).not.to.throw
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -250,7 +243,7 @@ describe('TextOperation', function () {
|
||||||
const operation = new TextOperation()
|
const operation = new TextOperation()
|
||||||
const str = '𝌆\n'
|
const str = '𝌆\n'
|
||||||
expect(() => {
|
expect(() => {
|
||||||
operation.apply(str)
|
operation.apply(new StringFileData(str))
|
||||||
}).to.throw(
|
}).to.throw(
|
||||||
TextOperation.UnprocessableError,
|
TextOperation.UnprocessableError,
|
||||||
/string contains non BMP characters/
|
/string contains non BMP characters/
|
||||||
|
@ -260,7 +253,7 @@ describe('TextOperation', function () {
|
||||||
it('throws at from JSON when it contains non BMP chars', function () {
|
it('throws at from JSON when it contains non BMP chars', function () {
|
||||||
const operation = ['𝌆\n']
|
const operation = ['𝌆\n']
|
||||||
expect(() => {
|
expect(() => {
|
||||||
TextOperation.fromJSON(operation)
|
TextOperation.fromJSON({ textOperation: operation })
|
||||||
}).to.throw(
|
}).to.throw(
|
||||||
TextOperation.UnprocessableError,
|
TextOperation.UnprocessableError,
|
||||||
/inserted text contains non BMP characters/
|
/inserted text contains non BMP characters/
|
||||||
|
|
|
@ -212,7 +212,12 @@ describe('overleaf ot', function () {
|
||||||
// edit the main file
|
// edit the main file
|
||||||
.then(projectId => {
|
.then(projectId => {
|
||||||
const change = new Change(
|
const change = new Change(
|
||||||
[Operation.editFile('main.tex', TextOperation.fromJSON(['hello']))],
|
[
|
||||||
|
Operation.editFile(
|
||||||
|
'main.tex',
|
||||||
|
TextOperation.fromJSON({ textOperation: ['hello'] })
|
||||||
|
),
|
||||||
|
],
|
||||||
new Date()
|
new Date()
|
||||||
)
|
)
|
||||||
return basicAuthClient.apis.ProjectImport.importChanges1({
|
return basicAuthClient.apis.ProjectImport.importChanges1({
|
||||||
|
@ -263,7 +268,7 @@ describe('overleaf ot', function () {
|
||||||
[
|
[
|
||||||
Operation.editFile(
|
Operation.editFile(
|
||||||
'main.tex',
|
'main.tex',
|
||||||
TextOperation.fromJSON([1, -4, 'i world'])
|
TextOperation.fromJSON({ textOperation: [1, -4, 'i world'] })
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
new Date()
|
new Date()
|
||||||
|
|
|
@ -49,8 +49,10 @@ describe('history import', function () {
|
||||||
const testProjectId = '1'
|
const testProjectId = '1'
|
||||||
const testFilePathname = 'main.tex'
|
const testFilePathname = 'main.tex'
|
||||||
const testAuthors = [123, null]
|
const testAuthors = [123, null]
|
||||||
const testTextOperation0 = TextOperation.fromJSON(['a'])
|
const testTextOperation0 = TextOperation.fromJSON({ textOperation: ['a'] })
|
||||||
const testTextOperation1 = TextOperation.fromJSON([1, 'b'])
|
const testTextOperation1 = TextOperation.fromJSON({
|
||||||
|
textOperation: [1, 'b'],
|
||||||
|
})
|
||||||
|
|
||||||
let testSnapshot
|
let testSnapshot
|
||||||
|
|
||||||
|
@ -188,7 +190,9 @@ describe('history import', function () {
|
||||||
it('rejects invalid changes in history', function () {
|
it('rejects invalid changes in history', function () {
|
||||||
const testProjectId = '1'
|
const testProjectId = '1'
|
||||||
const testFilePathname = 'main.tex'
|
const testFilePathname = 'main.tex'
|
||||||
const testTextOperation = TextOperation.fromJSON(['a', 10])
|
const testTextOperation = TextOperation.fromJSON({
|
||||||
|
textOperation: ['a', 10],
|
||||||
|
})
|
||||||
|
|
||||||
let testSnapshot
|
let testSnapshot
|
||||||
|
|
||||||
|
@ -286,7 +290,7 @@ describe('history import', function () {
|
||||||
const testProjectId = '1'
|
const testProjectId = '1'
|
||||||
const mainFilePathname = 'main.tex'
|
const mainFilePathname = 'main.tex'
|
||||||
const testFilePathname = 'test.tex'
|
const testFilePathname = 'test.tex'
|
||||||
const testTextOperation = TextOperation.fromJSON(['a'])
|
const testTextOperation = TextOperation.fromJSON({ textOperation: ['a'] })
|
||||||
const inexistentAuthors = [1234, 5678]
|
const inexistentAuthors = [1234, 5678]
|
||||||
const projectVersion = '12345.0'
|
const projectVersion = '12345.0'
|
||||||
const v2DocVersions = new V2DocVersions({
|
const v2DocVersions = new V2DocVersions({
|
||||||
|
@ -447,7 +451,7 @@ describe('history import', function () {
|
||||||
it('rejects text operations on binary files', function () {
|
it('rejects text operations on binary files', function () {
|
||||||
const testProjectId = '1'
|
const testProjectId = '1'
|
||||||
const testFilePathname = 'main.tex'
|
const testFilePathname = 'main.tex'
|
||||||
const testTextOperation = TextOperation.fromJSON(['bb'])
|
const testTextOperation = TextOperation.fromJSON({ textOperation: ['bb'] })
|
||||||
|
|
||||||
let testSnapshot
|
let testSnapshot
|
||||||
|
|
||||||
|
@ -517,7 +521,9 @@ describe('history import', function () {
|
||||||
it('accepts text operation on files with null characters if stringLength is present', function () {
|
it('accepts text operation on files with null characters if stringLength is present', function () {
|
||||||
const testProjectId = '1'
|
const testProjectId = '1'
|
||||||
const mainFilePathname = 'main.tex'
|
const mainFilePathname = 'main.tex'
|
||||||
const testTextOperation = TextOperation.fromJSON([3, 'a'])
|
const testTextOperation = TextOperation.fromJSON({
|
||||||
|
textOperation: [3, 'a'],
|
||||||
|
})
|
||||||
|
|
||||||
let testSnapshot
|
let testSnapshot
|
||||||
|
|
||||||
|
@ -626,7 +632,7 @@ describe('history import', function () {
|
||||||
|
|
||||||
it('creates and returns changes with v2 author ids', function () {
|
it('creates and returns changes with v2 author ids', function () {
|
||||||
const testFilePathname = 'test.tex'
|
const testFilePathname = 'test.tex'
|
||||||
const testTextOperation = TextOperation.fromJSON(['a'])
|
const testTextOperation = TextOperation.fromJSON({ textOperation: ['a'] })
|
||||||
const v2Authors = ['5a296963ad5e82432674c839', null]
|
const v2Authors = ['5a296963ad5e82432674c839', null]
|
||||||
|
|
||||||
let testProjectId
|
let testProjectId
|
||||||
|
|
|
@ -52,7 +52,9 @@ describe('chunkStore', function () {
|
||||||
|
|
||||||
describe('adding and editing a blank file', function () {
|
describe('adding and editing a blank file', function () {
|
||||||
const testPathname = 'foo.txt'
|
const testPathname = 'foo.txt'
|
||||||
const testTextOperation = TextOperation.fromJSON(['a']) // insert an a
|
const testTextOperation = TextOperation.fromJSON({
|
||||||
|
textOperation: ['a'],
|
||||||
|
}) // insert an a
|
||||||
let lastChangeTimestamp
|
let lastChangeTimestamp
|
||||||
|
|
||||||
beforeEach(async function () {
|
beforeEach(async function () {
|
||||||
|
@ -259,7 +261,9 @@ describe('chunkStore', function () {
|
||||||
it('does not create chunks', async function () {
|
it('does not create chunks', async function () {
|
||||||
const oldEndVersion = 0
|
const oldEndVersion = 0
|
||||||
const testPathname = 'foo.txt'
|
const testPathname = 'foo.txt'
|
||||||
const testTextOperation = TextOperation.fromJSON(['a']) // insert an a
|
const testTextOperation = TextOperation.fromJSON({
|
||||||
|
textOperation: ['a'],
|
||||||
|
}) // insert an a
|
||||||
|
|
||||||
let chunk = await chunkStore.loadLatest(projectId)
|
let chunk = await chunkStore.loadLatest(projectId)
|
||||||
expect(chunk.getEndVersion()).to.equal(oldEndVersion)
|
expect(chunk.getEndVersion()).to.equal(oldEndVersion)
|
||||||
|
@ -287,13 +291,13 @@ describe('chunkStore', function () {
|
||||||
makeChange(
|
makeChange(
|
||||||
Operation.editFile(
|
Operation.editFile(
|
||||||
'main.tex',
|
'main.tex',
|
||||||
TextOperation.fromJSON([3, 'def'])
|
TextOperation.fromJSON({ textOperation: [3, 'def'] })
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
makeChange(
|
makeChange(
|
||||||
Operation.editFile(
|
Operation.editFile(
|
||||||
'main.tex',
|
'main.tex',
|
||||||
TextOperation.fromJSON([6, 'ghi'])
|
TextOperation.fromJSON({ textOperation: [6, 'ghi'] })
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
|
|
|
@ -6,7 +6,7 @@ describe('OperationsCompressor', function () {
|
||||||
function edit(pathname, textOperationJsonObject) {
|
function edit(pathname, textOperationJsonObject) {
|
||||||
return Core.Operation.editFile(
|
return Core.Operation.editFile(
|
||||||
pathname,
|
pathname,
|
||||||
Core.TextOperation.fromJSON(textOperationJsonObject)
|
Core.TextOperation.fromJSON({ textOperation: textOperationJsonObject })
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue