mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
Merge pull request #18114 from overleaf/em-resync-comments
Handle comments when resyncing history GitOrigin-RevId: cd848fbd91f017a0a66e46df5c009bd16498d154
This commit is contained in:
parent
0c20c86878
commit
b75ba32774
10 changed files with 640 additions and 107 deletions
|
@ -7,6 +7,7 @@ const ChangeRequest = require('./lib/change_request')
|
||||||
const ChangeNote = require('./lib/change_note')
|
const ChangeNote = require('./lib/change_note')
|
||||||
const Chunk = require('./lib/chunk')
|
const Chunk = require('./lib/chunk')
|
||||||
const ChunkResponse = require('./lib/chunk_response')
|
const ChunkResponse = require('./lib/chunk_response')
|
||||||
|
const Comment = require('./lib/comment')
|
||||||
const DeleteCommentOperation = require('./lib/operation/delete_comment_operation')
|
const DeleteCommentOperation = require('./lib/operation/delete_comment_operation')
|
||||||
const File = require('./lib/file')
|
const File = require('./lib/file')
|
||||||
const FileMap = require('./lib/file_map')
|
const FileMap = require('./lib/file_map')
|
||||||
|
@ -48,6 +49,7 @@ exports.ChangeRequest = ChangeRequest
|
||||||
exports.ChangeNote = ChangeNote
|
exports.ChangeNote = ChangeNote
|
||||||
exports.Chunk = Chunk
|
exports.Chunk = Chunk
|
||||||
exports.ChunkResponse = ChunkResponse
|
exports.ChunkResponse = ChunkResponse
|
||||||
|
exports.Comment = Comment
|
||||||
exports.DeleteCommentOperation = DeleteCommentOperation
|
exports.DeleteCommentOperation = DeleteCommentOperation
|
||||||
exports.File = File
|
exports.File = File
|
||||||
exports.FileMap = FileMap
|
exports.FileMap = FileMap
|
||||||
|
|
|
@ -21,6 +21,15 @@ class CommentList {
|
||||||
return this.comments.values()
|
return this.comments.values()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the contents of this list in an array
|
||||||
|
*
|
||||||
|
* @returns {Comment[]}
|
||||||
|
*/
|
||||||
|
toArray() {
|
||||||
|
return Array.from(this)
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return the length of the comment list
|
* Return the length of the comment list
|
||||||
*
|
*
|
||||||
|
|
|
@ -25,6 +25,16 @@ class Range {
|
||||||
return this.pos + this.length
|
return this.pos + this.length
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Is this range equal to the given range?
|
||||||
|
*
|
||||||
|
* @param {Range} other
|
||||||
|
* @returns {boolean}
|
||||||
|
*/
|
||||||
|
equals(other) {
|
||||||
|
return this.pos === other.pos && this.length === other.length
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {Range} range
|
* @param {Range} range
|
||||||
* @returns {boolean}
|
* @returns {boolean}
|
||||||
|
|
|
@ -128,6 +128,12 @@ describe('commentList', function () {
|
||||||
])
|
])
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('should be iterable', function () {
|
||||||
|
const comment = new Comment('comm1', [new Range(5, 10)])
|
||||||
|
const commentList = new CommentList([comment])
|
||||||
|
expect(Array.from(commentList)).to.deep.equal([comment])
|
||||||
|
})
|
||||||
|
|
||||||
describe('inserting a comment between ranges', function () {
|
describe('inserting a comment between ranges', function () {
|
||||||
it('should expand comment on the left', function () {
|
it('should expand comment on the left', function () {
|
||||||
const commentList = CommentList.fromRaw([
|
const commentList = CommentList.fromRaw([
|
||||||
|
|
|
@ -82,7 +82,7 @@ async function getRangesSnapshot(projectId, version, pathname) {
|
||||||
throw new Error('Unable to read file contents')
|
throw new Error('Unable to read file contents')
|
||||||
}
|
}
|
||||||
const trackedChanges = file.getTrackedChanges().asSorted()
|
const trackedChanges = file.getTrackedChanges().asSorted()
|
||||||
const comments = file.getComments()
|
const comments = file.getComments().toArray()
|
||||||
const docUpdaterCompatibleTrackedChanges = []
|
const docUpdaterCompatibleTrackedChanges = []
|
||||||
|
|
||||||
let trackedDeletionOffset = 0
|
let trackedDeletionOffset = 0
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
// @ts-check
|
||||||
|
|
||||||
import _ from 'lodash'
|
import _ from 'lodash'
|
||||||
import { callbackify, promisify } from 'util'
|
import { callbackify, promisify } from 'util'
|
||||||
import { callbackifyMultiResult } from '@overleaf/promise-utils'
|
import { callbackifyMultiResult } from '@overleaf/promise-utils'
|
||||||
|
@ -18,6 +20,13 @@ import * as RedisManager from './RedisManager.js'
|
||||||
import * as HistoryStoreManager from './HistoryStoreManager.js'
|
import * as HistoryStoreManager from './HistoryStoreManager.js'
|
||||||
import * as HashManager from './HashManager.js'
|
import * as HashManager from './HashManager.js'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @typedef {import('overleaf-editor-core').Comment} HistoryComment
|
||||||
|
* @typedef {import('./types').Comment} Comment
|
||||||
|
* @typedef {import('./types').Entity} Entity
|
||||||
|
* @typedef {import('./types').ResyncDocContentUpdate} ResyncDocContentUpdate
|
||||||
|
* @typedef {import('./types').Update} Update
|
||||||
|
*/
|
||||||
const MAX_RESYNC_HISTORY_RECORDS = 100 // keep this many records of previous resyncs
|
const MAX_RESYNC_HISTORY_RECORDS = 100 // keep this many records of previous resyncs
|
||||||
const EXPIRE_RESYNC_HISTORY_INTERVAL_MS = 90 * 24 * 3600 * 1000 // 90 days
|
const EXPIRE_RESYNC_HISTORY_INTERVAL_MS = 90 * 24 * 3600 * 1000 // 90 days
|
||||||
|
|
||||||
|
@ -347,6 +356,13 @@ class SyncState {
|
||||||
}
|
}
|
||||||
|
|
||||||
class SyncUpdateExpander {
|
class SyncUpdateExpander {
|
||||||
|
/**
|
||||||
|
* Build a SyncUpdateExpander
|
||||||
|
*
|
||||||
|
* @param {string} projectId
|
||||||
|
* @param {Record<string, File>} snapshotFiles
|
||||||
|
* @param {string} origin
|
||||||
|
*/
|
||||||
constructor(projectId, snapshotFiles, origin) {
|
constructor(projectId, snapshotFiles, origin) {
|
||||||
this.projectId = projectId
|
this.projectId = projectId
|
||||||
this.files = snapshotFiles
|
this.files = snapshotFiles
|
||||||
|
@ -374,8 +390,11 @@ class SyncUpdateExpander {
|
||||||
return !matchedExpectedFile
|
return !matchedExpectedFile
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Update} update
|
||||||
|
*/
|
||||||
async expandUpdate(update) {
|
async expandUpdate(update) {
|
||||||
if (update.resyncProjectStructure != null) {
|
if ('resyncProjectStructure' in update) {
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ projectId: this.projectId, update },
|
{ projectId: this.projectId, update },
|
||||||
'expanding resyncProjectStructure update'
|
'expanding resyncProjectStructure update'
|
||||||
|
@ -441,12 +460,12 @@ class SyncUpdateExpander {
|
||||||
expectedBinaryFiles,
|
expectedBinaryFiles,
|
||||||
persistedBinaryFiles
|
persistedBinaryFiles
|
||||||
)
|
)
|
||||||
} else if (update.resyncDocContent != null) {
|
} else if ('resyncDocContent' in update) {
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ projectId: this.projectId, update },
|
{ projectId: this.projectId, update },
|
||||||
'expanding resyncDocContent update'
|
'expanding resyncDocContent update'
|
||||||
)
|
)
|
||||||
await this.queueTextOpForOutOfSyncContents(update)
|
await this.expandResyncDocContentUpdate(update)
|
||||||
} else {
|
} else {
|
||||||
this.expandedUpdates.push(update)
|
this.expandedUpdates.push(update)
|
||||||
}
|
}
|
||||||
|
@ -456,6 +475,10 @@ class SyncUpdateExpander {
|
||||||
return this.expandedUpdates
|
return this.expandedUpdates
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Entity[]} expectedFiles
|
||||||
|
* @param {{ path: string }[]} persistedFiles
|
||||||
|
*/
|
||||||
queueRemoveOpsForUnexpectedFiles(update, expectedFiles, persistedFiles) {
|
queueRemoveOpsForUnexpectedFiles(update, expectedFiles, persistedFiles) {
|
||||||
const unexpectedFiles = _.differenceBy(
|
const unexpectedFiles = _.differenceBy(
|
||||||
persistedFiles,
|
persistedFiles,
|
||||||
|
@ -479,6 +502,10 @@ class SyncUpdateExpander {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Entity[]} expectedFiles
|
||||||
|
* @param {{ path: string }[]} persistedFiles
|
||||||
|
*/
|
||||||
queueAddOpsForMissingFiles(update, expectedFiles, persistedFiles) {
|
queueAddOpsForMissingFiles(update, expectedFiles, persistedFiles) {
|
||||||
const missingFiles = _.differenceBy(expectedFiles, persistedFiles, 'path')
|
const missingFiles = _.differenceBy(expectedFiles, persistedFiles, 'path')
|
||||||
for (const entity of missingFiles) {
|
for (const entity of missingFiles) {
|
||||||
|
@ -491,7 +518,7 @@ class SyncUpdateExpander {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
if (entity.doc != null) {
|
if ('doc' in entity) {
|
||||||
update.doc = entity.doc
|
update.doc = entity.doc
|
||||||
update.docLines = ''
|
update.docLines = ''
|
||||||
// we have to create a dummy entry here because later we will need the content in the diff computation
|
// we have to create a dummy entry here because later we will need the content in the diff computation
|
||||||
|
@ -553,10 +580,16 @@ class SyncUpdateExpander {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async queueTextOpForOutOfSyncContents(update) {
|
/**
|
||||||
|
* Expand a resyncDocContentUpdate
|
||||||
|
*
|
||||||
|
* @param {ResyncDocContentUpdate} update
|
||||||
|
*/
|
||||||
|
async expandResyncDocContentUpdate(update) {
|
||||||
const pathname = UpdateTranslator._convertPathname(update.path)
|
const pathname = UpdateTranslator._convertPathname(update.path)
|
||||||
const snapshotFile = this.files[pathname]
|
const snapshotFile = this.files[pathname]
|
||||||
const expectedFile = update.resyncDocContent
|
const expectedFile = update.resyncDocContent
|
||||||
|
const expectedContent = expectedFile.content
|
||||||
|
|
||||||
if (!snapshotFile) {
|
if (!snapshotFile) {
|
||||||
throw new OError('unrecognised file: not in snapshot')
|
throw new OError('unrecognised file: not in snapshot')
|
||||||
|
@ -567,63 +600,79 @@ class SyncUpdateExpander {
|
||||||
// Note getHash() returns the hash only when the persisted file has
|
// Note getHash() returns the hash only when the persisted file has
|
||||||
// no changes in the snapshot, the hash is null if there are changes
|
// no changes in the snapshot, the hash is null if there are changes
|
||||||
// that apply to it.
|
// that apply to it.
|
||||||
const persistedHash =
|
let hashesMatch = false
|
||||||
typeof snapshotFile.getHash === 'function'
|
const persistedHash = snapshotFile.getHash()
|
||||||
? snapshotFile.getHash()
|
|
||||||
: undefined
|
|
||||||
if (persistedHash != null) {
|
if (persistedHash != null) {
|
||||||
const expectedHash = HashManager._getBlobHashFromString(
|
const expectedHash = HashManager._getBlobHashFromString(expectedContent)
|
||||||
expectedFile.content
|
|
||||||
)
|
|
||||||
if (persistedHash === expectedHash) {
|
if (persistedHash === expectedHash) {
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ projectId: this.projectId, persistedHash, expectedHash },
|
{ projectId: this.projectId, persistedHash, expectedHash },
|
||||||
'skipping diff because hashes match and persisted file has no ops'
|
'skipping diff because hashes match and persisted file has no ops'
|
||||||
)
|
)
|
||||||
return
|
hashesMatch = true
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
logger.debug('cannot compare hashes, will retrieve content')
|
logger.debug('cannot compare hashes, will retrieve content')
|
||||||
}
|
}
|
||||||
|
|
||||||
const expectedContent = update.resyncDocContent.content
|
|
||||||
|
|
||||||
let persistedContent
|
|
||||||
// compute the difference between the expected and persisted content
|
// compute the difference between the expected and persisted content
|
||||||
if (snapshotFile.load != null) {
|
const historyId = await WebApiManager.promises.getHistoryId(this.projectId)
|
||||||
const historyId = await WebApiManager.promises.getHistoryId(
|
|
||||||
this.projectId
|
|
||||||
)
|
|
||||||
const file = await snapshotFile.load(
|
const file = await snapshotFile.load(
|
||||||
'eager',
|
'eager',
|
||||||
HistoryStoreManager.getBlobStore(historyId)
|
HistoryStoreManager.getBlobStore(historyId)
|
||||||
)
|
)
|
||||||
persistedContent = file.getContent()
|
const persistedContent = file.getContent()
|
||||||
} else if (snapshotFile.content != null) {
|
if (persistedContent == null) {
|
||||||
// use dummy content from queueAddOpsForMissingFiles for added missing files
|
// This should not happen given that we loaded the file eagerly. We could
|
||||||
persistedContent = snapshotFile.content
|
// probably refine the types in overleaf-editor-core so that this check
|
||||||
} else {
|
// wouldn't be necessary.
|
||||||
throw new OError('unrecognised file')
|
throw new Error('File was not properly loaded')
|
||||||
}
|
}
|
||||||
|
|
||||||
let op
|
if (!hashesMatch) {
|
||||||
|
await this.queueUpdateForOutOfSyncContent(
|
||||||
|
update,
|
||||||
|
pathname,
|
||||||
|
persistedContent,
|
||||||
|
expectedContent
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const persistedComments = file.getComments().toArray()
|
||||||
|
await this.queueUpdateForOutOfSyncComments(
|
||||||
|
update,
|
||||||
|
pathname,
|
||||||
|
persistedContent,
|
||||||
|
persistedComments
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Queue update for out of sync content
|
||||||
|
*
|
||||||
|
* @param {ResyncDocContentUpdate} update
|
||||||
|
* @param {string} pathname
|
||||||
|
* @param {string} persistedContent
|
||||||
|
* @param {string} expectedContent
|
||||||
|
*/
|
||||||
|
async queueUpdateForOutOfSyncContent(
|
||||||
|
update,
|
||||||
|
pathname,
|
||||||
|
persistedContent,
|
||||||
|
expectedContent
|
||||||
|
) {
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ projectId: this.projectId, persistedContent, expectedContent },
|
{ projectId: this.projectId, persistedContent, expectedContent },
|
||||||
'diffing doc contents'
|
'diffing doc contents'
|
||||||
)
|
)
|
||||||
try {
|
const op = UpdateCompressor.diffAsShareJsOps(
|
||||||
op = UpdateCompressor.diffAsShareJsOps(persistedContent, expectedContent)
|
|
||||||
} catch (error) {
|
|
||||||
throw OError.tag(error, 'error from diffAsShareJsOps', {
|
|
||||||
projectId: this.projectId,
|
|
||||||
persistedContent,
|
persistedContent,
|
||||||
expectedContent,
|
expectedContent
|
||||||
})
|
)
|
||||||
}
|
|
||||||
if (op.length === 0) {
|
if (op.length === 0) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
update = {
|
const expandedUpdate = {
|
||||||
doc: update.doc,
|
doc: update.doc,
|
||||||
op,
|
op,
|
||||||
meta: {
|
meta: {
|
||||||
|
@ -638,11 +687,105 @@ class SyncUpdateExpander {
|
||||||
{ projectId: this.projectId, diffCount: op.length },
|
{ projectId: this.projectId, diffCount: op.length },
|
||||||
'doc contents differ'
|
'doc contents differ'
|
||||||
)
|
)
|
||||||
this.expandedUpdates.push(update)
|
this.expandedUpdates.push(expandedUpdate)
|
||||||
Metrics.inc('project_history_resync_operation', 1, {
|
Metrics.inc('project_history_resync_operation', 1, {
|
||||||
status: 'update text file contents',
|
status: 'update text file contents',
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Queue update for out of sync comments
|
||||||
|
*
|
||||||
|
* @param {ResyncDocContentUpdate} update
|
||||||
|
* @param {string} pathname
|
||||||
|
* @param {string} persistedContent
|
||||||
|
* @param {HistoryComment[]} persistedComments
|
||||||
|
*/
|
||||||
|
async queueUpdateForOutOfSyncComments(
|
||||||
|
update,
|
||||||
|
pathname,
|
||||||
|
persistedContent,
|
||||||
|
persistedComments
|
||||||
|
) {
|
||||||
|
const expectedComments = update.resyncDocContent.ranges?.comments ?? []
|
||||||
|
const resolvedComments = new Set(
|
||||||
|
update.resyncDocContent.resolvedComments ?? []
|
||||||
|
)
|
||||||
|
const expectedCommentsById = new Map(
|
||||||
|
expectedComments.map(comment => [comment.id, comment])
|
||||||
|
)
|
||||||
|
const persistedCommentsById = new Map(
|
||||||
|
persistedComments.map(comment => [comment.id, comment])
|
||||||
|
)
|
||||||
|
|
||||||
|
// Delete any persisted comment that is not in the expected comment list.
|
||||||
|
for (const persistedComment of persistedComments) {
|
||||||
|
if (!expectedCommentsById.has(persistedComment.id)) {
|
||||||
|
this.expandedUpdates.push({
|
||||||
|
pathname,
|
||||||
|
deleteComment: persistedComment.id,
|
||||||
|
meta: {
|
||||||
|
resync: true,
|
||||||
|
origin: this.origin,
|
||||||
|
ts: update.meta.ts,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const expectedComment of expectedComments) {
|
||||||
|
const persistedComment = persistedCommentsById.get(expectedComment.id)
|
||||||
|
if (
|
||||||
|
persistedComment != null &&
|
||||||
|
commentRangesAreInSync(persistedComment, expectedComment)
|
||||||
|
) {
|
||||||
|
const expectedCommentResolved = resolvedComments.has(expectedComment.id)
|
||||||
|
if (expectedCommentResolved === persistedComment.resolved) {
|
||||||
|
// Both comments are identical; do nothing
|
||||||
|
} else {
|
||||||
|
// Only the resolved state differs
|
||||||
|
this.expandedUpdates.push({
|
||||||
|
pathname,
|
||||||
|
commentId: expectedComment.id,
|
||||||
|
resolved: expectedCommentResolved,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// New comment or ranges differ
|
||||||
|
this.expandedUpdates.push({
|
||||||
|
doc: update.doc,
|
||||||
|
op: [expectedComment.op],
|
||||||
|
meta: {
|
||||||
|
resync: true,
|
||||||
|
origin: this.origin,
|
||||||
|
ts: update.meta.ts,
|
||||||
|
pathname,
|
||||||
|
doc_length: persistedContent.length,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compares the ranges in the persisted and expected comments
|
||||||
|
*
|
||||||
|
* @param {HistoryComment} persistedComment
|
||||||
|
* @param {Comment} expectedComment
|
||||||
|
*/
|
||||||
|
function commentRangesAreInSync(persistedComment, expectedComment) {
|
||||||
|
if (persistedComment.ranges.length !== 1) {
|
||||||
|
// The editor only supports single range comments
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
const persistedRange = persistedComment.ranges[0]
|
||||||
|
const expectedPos = expectedComment.op.hpos ?? expectedComment.op.p
|
||||||
|
const expectedLength = expectedComment.op.hlen ?? expectedComment.op.c.length
|
||||||
|
return (
|
||||||
|
persistedRange.pos === expectedPos &&
|
||||||
|
persistedRange.length === expectedLength
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// EXPORTS
|
// EXPORTS
|
||||||
|
|
|
@ -9,7 +9,7 @@ import * as OperationsCompressor from './OperationsCompressor.js'
|
||||||
* @typedef {import('./types').AddDocUpdate} AddDocUpdate
|
* @typedef {import('./types').AddDocUpdate} AddDocUpdate
|
||||||
* @typedef {import('./types').AddFileUpdate} AddFileUpdate
|
* @typedef {import('./types').AddFileUpdate} AddFileUpdate
|
||||||
* @typedef {import('./types').CommentOp} CommentOp
|
* @typedef {import('./types').CommentOp} CommentOp
|
||||||
* @typedef {import('./types').DeleteOp} DeleteCommentUpdate
|
* @typedef {import('./types').DeleteCommentUpdate} DeleteCommentUpdate
|
||||||
* @typedef {import('./types').DeleteOp} DeleteOp
|
* @typedef {import('./types').DeleteOp} DeleteOp
|
||||||
* @typedef {import('./types').InsertOp} InsertOp
|
* @typedef {import('./types').InsertOp} InsertOp
|
||||||
* @typedef {import('./types').RetainOp} RetainOp
|
* @typedef {import('./types').RetainOp} RetainOp
|
||||||
|
@ -266,7 +266,7 @@ class OperationsBuilder {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {Op} op
|
* @param {Op} op
|
||||||
* @param {Update} update
|
* @param {TextUpdate} update
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
addOp(op, update) {
|
addOp(op, update) {
|
||||||
|
|
|
@ -5,6 +5,8 @@ export type Update =
|
||||||
| RenameUpdate
|
| RenameUpdate
|
||||||
| DeleteCommentUpdate
|
| DeleteCommentUpdate
|
||||||
| SetCommentStateUpdate
|
| SetCommentStateUpdate
|
||||||
|
| ResyncProjectStructureUpdate
|
||||||
|
| ResyncDocContentUpdate
|
||||||
|
|
||||||
export type UpdateMeta = {
|
export type UpdateMeta = {
|
||||||
user_id: string
|
user_id: string
|
||||||
|
@ -13,6 +15,7 @@ export type UpdateMeta = {
|
||||||
type?: string
|
type?: string
|
||||||
origin?: RawOrigin
|
origin?: RawOrigin
|
||||||
tc?: string
|
tc?: string
|
||||||
|
resync?: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
export type TextUpdate = {
|
export type TextUpdate = {
|
||||||
|
@ -62,6 +65,32 @@ export type RenameUpdate = ProjectUpdateBase & {
|
||||||
new_pathname: string
|
new_pathname: string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type ResyncProjectStructureUpdate = {
|
||||||
|
resyncProjectStructure: {
|
||||||
|
docs: Doc[]
|
||||||
|
files: File[]
|
||||||
|
}
|
||||||
|
projectHistoryId: string
|
||||||
|
meta: {
|
||||||
|
ts: string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ResyncDocContentUpdate = {
|
||||||
|
resyncDocContent: {
|
||||||
|
content: string
|
||||||
|
version: number
|
||||||
|
ranges?: Ranges
|
||||||
|
resolvedComments?: string[]
|
||||||
|
}
|
||||||
|
projectHistoryId: string
|
||||||
|
path: string
|
||||||
|
doc: string
|
||||||
|
meta: {
|
||||||
|
ts: string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export type Op = RetainOp | InsertOp | DeleteOp | CommentOp
|
export type Op = RetainOp | InsertOp | DeleteOp | CommentOp
|
||||||
|
|
||||||
export type RetainOp = {
|
export type RetainOp = {
|
||||||
|
@ -146,3 +175,39 @@ export type RangesSnapshot = {
|
||||||
changes: TrackedChangeSnapshot[]
|
changes: TrackedChangeSnapshot[]
|
||||||
comments: CommentSnapshot[]
|
comments: CommentSnapshot[]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type Doc = {
|
||||||
|
doc: string
|
||||||
|
path: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export type File = {
|
||||||
|
file: string
|
||||||
|
url: string
|
||||||
|
path: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Entity = Doc | File
|
||||||
|
|
||||||
|
export type Ranges = {
|
||||||
|
comments?: Comment[]
|
||||||
|
changes?: TrackedChange[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Comment = {
|
||||||
|
id: string
|
||||||
|
op: CommentOp
|
||||||
|
metadata: {
|
||||||
|
user_id: string
|
||||||
|
ts: string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export type TrackedChange = {
|
||||||
|
id: string
|
||||||
|
op: Op
|
||||||
|
metadata: {
|
||||||
|
user_id: string
|
||||||
|
ts: string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -360,7 +360,7 @@ describe('Syncing with web and doc-updater', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("when a doc's contents is not up to date", function () {
|
describe("when a doc's contents is not up to date", function () {
|
||||||
it('should send test updates to the history store', function (done) {
|
beforeEach(function () {
|
||||||
MockHistoryStore()
|
MockHistoryStore()
|
||||||
.get(`/api/projects/${historyId}/latest/history`)
|
.get(`/api/projects/${historyId}/latest/history`)
|
||||||
.reply(200, {
|
.reply(200, {
|
||||||
|
@ -385,7 +385,9 @@ describe('Syncing with web and doc-updater', function () {
|
||||||
`/api/projects/${historyId}/blobs/0a207c060e61f3b88eaee0a8cd0696f46fb155eb`
|
`/api/projects/${historyId}/blobs/0a207c060e61f3b88eaee0a8cd0696f46fb155eb`
|
||||||
)
|
)
|
||||||
.reply(200, 'a\nb')
|
.reply(200, 'a\nb')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should send test updates to the history store', function (done) {
|
||||||
const addFile = MockHistoryStore()
|
const addFile = MockHistoryStore()
|
||||||
.post(`/api/projects/${historyId}/legacy_changes`, body => {
|
.post(`/api/projects/${historyId}/legacy_changes`, body => {
|
||||||
expect(body).to.deep.equal([
|
expect(body).to.deep.equal([
|
||||||
|
@ -457,31 +459,6 @@ describe('Syncing with web and doc-updater', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should strip non-BMP characters in updates before sending to the history store', function (done) {
|
it('should strip non-BMP characters in updates before sending to the history store', function (done) {
|
||||||
MockHistoryStore()
|
|
||||||
.get(`/api/projects/${historyId}/latest/history`)
|
|
||||||
.reply(200, {
|
|
||||||
chunk: {
|
|
||||||
history: {
|
|
||||||
snapshot: {
|
|
||||||
files: {
|
|
||||||
'main.tex': {
|
|
||||||
hash: '0a207c060e61f3b88eaee0a8cd0696f46fb155eb',
|
|
||||||
stringLength: 3,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
changes: [],
|
|
||||||
},
|
|
||||||
startVersion: 0,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
MockHistoryStore()
|
|
||||||
.get(
|
|
||||||
`/api/projects/${historyId}/blobs/0a207c060e61f3b88eaee0a8cd0696f46fb155eb`
|
|
||||||
)
|
|
||||||
.reply(200, 'a\nb')
|
|
||||||
|
|
||||||
const addFile = MockHistoryStore()
|
const addFile = MockHistoryStore()
|
||||||
.post(`/api/projects/${historyId}/legacy_changes`, body => {
|
.post(`/api/projects/${historyId}/legacy_changes`, body => {
|
||||||
expect(body).to.deep.equal([
|
expect(body).to.deep.equal([
|
||||||
|
@ -551,6 +528,98 @@ describe('Syncing with web and doc-updater', function () {
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('should fix comments in the history store', function (done) {
|
||||||
|
const commentId = 'comment-id'
|
||||||
|
const addComment = MockHistoryStore()
|
||||||
|
.post(`/api/projects/${historyId}/legacy_changes`, body => {
|
||||||
|
expect(body).to.deep.equal([
|
||||||
|
{
|
||||||
|
v2Authors: [],
|
||||||
|
authors: [],
|
||||||
|
timestamp: this.timestamp.toJSON(),
|
||||||
|
operations: [
|
||||||
|
{
|
||||||
|
pathname: 'main.tex',
|
||||||
|
commentId,
|
||||||
|
ranges: [{ pos: 1, length: 10 }],
|
||||||
|
resolved: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
origin: { kind: 'test-origin' },
|
||||||
|
},
|
||||||
|
])
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
.query({ end_version: 0 })
|
||||||
|
.reply(204)
|
||||||
|
|
||||||
|
async.series(
|
||||||
|
[
|
||||||
|
cb => {
|
||||||
|
ProjectHistoryClient.resyncHistory(this.project_id, cb)
|
||||||
|
},
|
||||||
|
cb => {
|
||||||
|
const update = {
|
||||||
|
projectHistoryId: historyId,
|
||||||
|
resyncProjectStructure: {
|
||||||
|
docs: [{ path: '/main.tex' }],
|
||||||
|
files: [],
|
||||||
|
},
|
||||||
|
meta: {
|
||||||
|
ts: this.timestamp,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb)
|
||||||
|
},
|
||||||
|
cb => {
|
||||||
|
const update = {
|
||||||
|
path: '/main.tex',
|
||||||
|
projectHistoryId: historyId,
|
||||||
|
resyncDocContent: {
|
||||||
|
content: 'a\nb',
|
||||||
|
ranges: {
|
||||||
|
comments: [
|
||||||
|
{
|
||||||
|
id: commentId,
|
||||||
|
op: {
|
||||||
|
c: 'a',
|
||||||
|
p: 0,
|
||||||
|
hpos: 1,
|
||||||
|
hlen: 10,
|
||||||
|
t: commentId,
|
||||||
|
},
|
||||||
|
meta: {
|
||||||
|
user_id: 'user-id',
|
||||||
|
ts: this.timestamp,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
doc: this.doc_id,
|
||||||
|
meta: {
|
||||||
|
ts: this.timestamp,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb)
|
||||||
|
},
|
||||||
|
cb => {
|
||||||
|
ProjectHistoryClient.flushProject(this.project_id, cb)
|
||||||
|
},
|
||||||
|
],
|
||||||
|
error => {
|
||||||
|
if (error) {
|
||||||
|
return done(error)
|
||||||
|
}
|
||||||
|
assert(
|
||||||
|
addComment.isDone(),
|
||||||
|
`/api/projects/${historyId}/changes should have been called`
|
||||||
|
)
|
||||||
|
done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,7 +1,10 @@
|
||||||
|
// @ts-check
|
||||||
|
|
||||||
import sinon from 'sinon'
|
import sinon from 'sinon'
|
||||||
import { expect } from 'chai'
|
import { expect } from 'chai'
|
||||||
import mongodb from 'mongodb-legacy'
|
import mongodb from 'mongodb-legacy'
|
||||||
import tk from 'timekeeper'
|
import tk from 'timekeeper'
|
||||||
|
import { Comment, Range } from 'overleaf-editor-core'
|
||||||
import { strict as esmock } from 'esmock'
|
import { strict as esmock } from 'esmock'
|
||||||
const { ObjectId } = mongodb
|
const { ObjectId } = mongodb
|
||||||
|
|
||||||
|
@ -9,26 +12,47 @@ const MODULE_PATH = '../../../../app/js/SyncManager.js'
|
||||||
|
|
||||||
const timestamp = new Date()
|
const timestamp = new Date()
|
||||||
|
|
||||||
const resyncProjectStructureUpdate = (docs, files) => ({
|
function resyncProjectStructureUpdate(docs, files) {
|
||||||
|
return {
|
||||||
resyncProjectStructure: { docs, files },
|
resyncProjectStructure: { docs, files },
|
||||||
|
|
||||||
meta: {
|
meta: {
|
||||||
ts: timestamp,
|
ts: timestamp,
|
||||||
},
|
},
|
||||||
})
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const docContentSyncUpdate = (doc, content) => ({
|
function docContentSyncUpdate(
|
||||||
|
doc,
|
||||||
|
content,
|
||||||
|
ranges = {},
|
||||||
|
resolvedComments = []
|
||||||
|
) {
|
||||||
|
return {
|
||||||
path: doc.path,
|
path: doc.path,
|
||||||
doc: doc.doc,
|
doc: doc.doc,
|
||||||
|
|
||||||
resyncDocContent: {
|
resyncDocContent: {
|
||||||
content,
|
content,
|
||||||
|
ranges,
|
||||||
|
resolvedComments,
|
||||||
},
|
},
|
||||||
|
|
||||||
meta: {
|
meta: {
|
||||||
ts: timestamp,
|
ts: timestamp,
|
||||||
},
|
},
|
||||||
})
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function makeComment(commentId, pos, text) {
|
||||||
|
return {
|
||||||
|
id: commentId,
|
||||||
|
op: { p: pos, c: text, t: commentId },
|
||||||
|
meta: {
|
||||||
|
ts: timestamp,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
describe('SyncManager', function () {
|
describe('SyncManager', function () {
|
||||||
beforeEach(async function () {
|
beforeEach(async function () {
|
||||||
|
@ -54,7 +78,7 @@ describe('SyncManager', function () {
|
||||||
}
|
}
|
||||||
|
|
||||||
this.UpdateCompressor = {
|
this.UpdateCompressor = {
|
||||||
diffAsShareJsOps: sinon.stub(),
|
diffAsShareJsOps: sinon.stub().returns([]),
|
||||||
}
|
}
|
||||||
|
|
||||||
this.UpdateTranslator = {
|
this.UpdateTranslator = {
|
||||||
|
@ -451,22 +475,33 @@ describe('SyncManager', function () {
|
||||||
describe('expandSyncUpdates', function () {
|
describe('expandSyncUpdates', function () {
|
||||||
beforeEach(function () {
|
beforeEach(function () {
|
||||||
this.persistedDoc = {
|
this.persistedDoc = {
|
||||||
doc: { data: { hash: 'abcdef' } },
|
doc: 'doc-id',
|
||||||
path: 'main.tex',
|
path: 'main.tex',
|
||||||
content: 'asdf',
|
|
||||||
}
|
}
|
||||||
|
this.persistedDocContent = 'the quick brown fox jumps over the lazy fox'
|
||||||
this.persistedFile = {
|
this.persistedFile = {
|
||||||
file: { data: { hash: '123456789a' } },
|
file: 'file-id',
|
||||||
path: '1.png',
|
path: '1.png',
|
||||||
|
_hash: 'abcde',
|
||||||
|
}
|
||||||
|
this.loadedSnapshotDoc = {
|
||||||
|
isEditable: sinon.stub().returns(true),
|
||||||
|
getContent: sinon.stub().returns(this.persistedDocContent),
|
||||||
|
getComments: sinon
|
||||||
|
.stub()
|
||||||
|
.returns({ toArray: sinon.stub().returns([]) }),
|
||||||
|
getHash: sinon.stub().returns(null),
|
||||||
}
|
}
|
||||||
this.fileMap = {
|
this.fileMap = {
|
||||||
'main.tex': {
|
'main.tex': {
|
||||||
isEditable: sinon.stub().returns(true),
|
isEditable: sinon.stub().returns(true),
|
||||||
content: this.persistedDoc.content,
|
getContent: sinon.stub().returns(null),
|
||||||
|
getHash: sinon.stub().returns(null),
|
||||||
|
load: sinon.stub().resolves(this.loadedSnapshotDoc),
|
||||||
},
|
},
|
||||||
'1.png': {
|
'1.png': {
|
||||||
isEditable: sinon.stub().returns(false),
|
isEditable: sinon.stub().returns(false),
|
||||||
data: { hash: this.persistedFile.file.data.hash },
|
data: { hash: this.persistedFile._hash },
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
this.UpdateTranslator._convertPathname
|
this.UpdateTranslator._convertPathname
|
||||||
|
@ -777,7 +812,7 @@ describe('SyncManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('preserves other updates', async function () {
|
it('preserves other updates', async function () {
|
||||||
const update = 'mock-update'
|
const update = { mock: 'update' }
|
||||||
const updates = [
|
const updates = [
|
||||||
update,
|
update,
|
||||||
resyncProjectStructureUpdate(
|
resyncProjectStructureUpdate(
|
||||||
|
@ -807,7 +842,7 @@ describe('SyncManager', function () {
|
||||||
[this.persistedDoc],
|
[this.persistedDoc],
|
||||||
[this.persistedFile]
|
[this.persistedFile]
|
||||||
),
|
),
|
||||||
docContentSyncUpdate(this.persistedDoc, this.persistedDoc.content),
|
docContentSyncUpdate(this.persistedDoc, this.persistedDocContent),
|
||||||
]
|
]
|
||||||
await expect(
|
await expect(
|
||||||
this.SyncManager.promises.expandSyncUpdates(
|
this.SyncManager.promises.expandSyncUpdates(
|
||||||
|
@ -838,7 +873,7 @@ describe('SyncManager', function () {
|
||||||
[this.persistedDoc],
|
[this.persistedDoc],
|
||||||
[this.persistedFile]
|
[this.persistedFile]
|
||||||
),
|
),
|
||||||
docContentSyncUpdate(this.persistedDoc, this.persistedDoc.content),
|
docContentSyncUpdate(this.persistedDoc, this.persistedDocContent),
|
||||||
]
|
]
|
||||||
this.UpdateCompressor.diffAsShareJsOps.returns([])
|
this.UpdateCompressor.diffAsShareJsOps.returns([])
|
||||||
const expandedUpdates =
|
const expandedUpdates =
|
||||||
|
@ -859,9 +894,14 @@ describe('SyncManager', function () {
|
||||||
[this.persistedDoc],
|
[this.persistedDoc],
|
||||||
[this.persistedFile]
|
[this.persistedFile]
|
||||||
),
|
),
|
||||||
docContentSyncUpdate(this.persistedDoc, 'a'),
|
docContentSyncUpdate(
|
||||||
|
this.persistedDoc,
|
||||||
|
'the fox jumps over the lazy dog'
|
||||||
|
),
|
||||||
]
|
]
|
||||||
this.UpdateCompressor.diffAsShareJsOps.returns([{ d: 'sdf', p: 1 }])
|
this.UpdateCompressor.diffAsShareJsOps.returns([
|
||||||
|
{ d: 'quick brown ', p: 4 },
|
||||||
|
])
|
||||||
const expandedUpdates =
|
const expandedUpdates =
|
||||||
await this.SyncManager.promises.expandSyncUpdates(
|
await this.SyncManager.promises.expandSyncUpdates(
|
||||||
this.projectId,
|
this.projectId,
|
||||||
|
@ -873,10 +913,10 @@ describe('SyncManager', function () {
|
||||||
expect(expandedUpdates).to.deep.equal([
|
expect(expandedUpdates).to.deep.equal([
|
||||||
{
|
{
|
||||||
doc: this.persistedDoc.doc,
|
doc: this.persistedDoc.doc,
|
||||||
op: [{ d: 'sdf', p: 1 }],
|
op: [{ d: 'quick brown ', p: 4 }],
|
||||||
meta: {
|
meta: {
|
||||||
pathname: this.persistedDoc.path,
|
pathname: this.persistedDoc.path,
|
||||||
doc_length: 4,
|
doc_length: this.persistedDocContent.length,
|
||||||
resync: true,
|
resync: true,
|
||||||
ts: timestamp,
|
ts: timestamp,
|
||||||
origin: { kind: 'history-resync' },
|
origin: { kind: 'history-resync' },
|
||||||
|
@ -891,14 +931,14 @@ describe('SyncManager', function () {
|
||||||
const newDoc = {
|
const newDoc = {
|
||||||
path: 'another.tex',
|
path: 'another.tex',
|
||||||
doc: new ObjectId().toString(),
|
doc: new ObjectId().toString(),
|
||||||
content: 'a',
|
|
||||||
}
|
}
|
||||||
|
const newDocContent = 'a'
|
||||||
const updates = [
|
const updates = [
|
||||||
resyncProjectStructureUpdate(
|
resyncProjectStructureUpdate(
|
||||||
[this.persistedDoc, newDoc],
|
[this.persistedDoc, newDoc],
|
||||||
[this.persistedFile]
|
[this.persistedFile]
|
||||||
),
|
),
|
||||||
docContentSyncUpdate(newDoc, newDoc.content),
|
docContentSyncUpdate(newDoc, newDocContent),
|
||||||
]
|
]
|
||||||
const expandedUpdates =
|
const expandedUpdates =
|
||||||
await this.SyncManager.promises.expandSyncUpdates(
|
await this.SyncManager.promises.expandSyncUpdates(
|
||||||
|
@ -935,7 +975,7 @@ describe('SyncManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('skips text updates for docs when hashes match', async function () {
|
it('skips text updates for docs when hashes match', async function () {
|
||||||
this.fileMap['main.tex'].getHash = sinon.stub().returns('special-hash')
|
this.fileMap['main.tex'].getHash.returns('special-hash')
|
||||||
this.HashManager._getBlobHashFromString.returns('special-hash')
|
this.HashManager._getBlobHashFromString.returns('special-hash')
|
||||||
const updates = [
|
const updates = [
|
||||||
resyncProjectStructureUpdate(
|
resyncProjectStructureUpdate(
|
||||||
|
@ -957,7 +997,7 @@ describe('SyncManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('computes text updates for docs when hashes differ', async function () {
|
it('computes text updates for docs when hashes differ', async function () {
|
||||||
this.fileMap['main.tex'].getHash = sinon.stub().returns('first-hash')
|
this.fileMap['main.tex'].getHash.returns('first-hash')
|
||||||
this.HashManager._getBlobHashFromString.returns('second-hash')
|
this.HashManager._getBlobHashFromString.returns('second-hash')
|
||||||
this.UpdateCompressor.diffAsShareJsOps.returns([
|
this.UpdateCompressor.diffAsShareJsOps.returns([
|
||||||
{ i: 'test diff', p: 0 },
|
{ i: 'test diff', p: 0 },
|
||||||
|
@ -983,7 +1023,7 @@ describe('SyncManager', function () {
|
||||||
op: [{ i: 'test diff', p: 0 }],
|
op: [{ i: 'test diff', p: 0 }],
|
||||||
meta: {
|
meta: {
|
||||||
pathname: this.persistedDoc.path,
|
pathname: this.persistedDoc.path,
|
||||||
doc_length: 4,
|
doc_length: this.persistedDocContent.length,
|
||||||
resync: true,
|
resync: true,
|
||||||
ts: timestamp,
|
ts: timestamp,
|
||||||
origin: { kind: 'history-resync' },
|
origin: { kind: 'history-resync' },
|
||||||
|
@ -1002,8 +1042,7 @@ describe('SyncManager', function () {
|
||||||
),
|
),
|
||||||
docContentSyncUpdate(this.persistedDoc, 'a'),
|
docContentSyncUpdate(this.persistedDoc, 'a'),
|
||||||
]
|
]
|
||||||
const file = { getContent: sinon.stub().returns('stored content') }
|
this.loadedSnapshotDoc.getContent.returns('stored content')
|
||||||
this.fileMap['main.tex'].load = sinon.stub().resolves(file)
|
|
||||||
this.UpdateCompressor.diffAsShareJsOps.returns([{ d: 'sdf', p: 1 }])
|
this.UpdateCompressor.diffAsShareJsOps.returns([{ d: 'sdf', p: 1 }])
|
||||||
this.expandedUpdates =
|
this.expandedUpdates =
|
||||||
await this.SyncManager.promises.expandSyncUpdates(
|
await this.SyncManager.promises.expandSyncUpdates(
|
||||||
|
@ -1032,5 +1071,195 @@ describe('SyncManager', function () {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe('syncing comments', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.loadedSnapshotDoc.getComments.returns({
|
||||||
|
toArray: sinon
|
||||||
|
.stub()
|
||||||
|
.returns([
|
||||||
|
new Comment('comment1', [new Range(4, 5)]),
|
||||||
|
new Comment('comment2', [new Range(10, 5)], true),
|
||||||
|
]),
|
||||||
|
})
|
||||||
|
this.comments = [
|
||||||
|
makeComment('comment1', 4, 'quick'),
|
||||||
|
makeComment('comment2', 10, 'brown'),
|
||||||
|
]
|
||||||
|
this.resolvedComments = ['comment2']
|
||||||
|
})
|
||||||
|
|
||||||
|
it('does nothing if comments have not changed', async function () {
|
||||||
|
const updates = [
|
||||||
|
docContentSyncUpdate(
|
||||||
|
this.persistedDoc,
|
||||||
|
this.persistedDocContent,
|
||||||
|
{
|
||||||
|
comments: this.comments,
|
||||||
|
},
|
||||||
|
this.resolvedComments
|
||||||
|
),
|
||||||
|
]
|
||||||
|
const expandedUpdates =
|
||||||
|
await this.SyncManager.promises.expandSyncUpdates(
|
||||||
|
this.projectId,
|
||||||
|
this.historyId,
|
||||||
|
updates,
|
||||||
|
this.extendLock
|
||||||
|
)
|
||||||
|
expect(expandedUpdates).to.deep.equal([])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('adds missing comments', async function () {
|
||||||
|
this.comments.push(makeComment('comment3', 20, 'jumps'))
|
||||||
|
const updates = [
|
||||||
|
docContentSyncUpdate(
|
||||||
|
this.persistedDoc,
|
||||||
|
this.persistedDocContent,
|
||||||
|
{
|
||||||
|
comments: this.comments,
|
||||||
|
},
|
||||||
|
this.resolvedComments
|
||||||
|
),
|
||||||
|
]
|
||||||
|
const expandedUpdates =
|
||||||
|
await this.SyncManager.promises.expandSyncUpdates(
|
||||||
|
this.projectId,
|
||||||
|
this.historyId,
|
||||||
|
updates,
|
||||||
|
this.extendLock
|
||||||
|
)
|
||||||
|
expect(expandedUpdates).to.deep.equal([
|
||||||
|
{
|
||||||
|
doc: this.persistedDoc.doc,
|
||||||
|
op: [
|
||||||
|
{
|
||||||
|
c: 'jumps',
|
||||||
|
p: 20,
|
||||||
|
t: 'comment3',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
meta: {
|
||||||
|
origin: {
|
||||||
|
kind: 'history-resync',
|
||||||
|
},
|
||||||
|
pathname: this.persistedDoc.path,
|
||||||
|
resync: true,
|
||||||
|
ts: timestamp,
|
||||||
|
doc_length: this.persistedDocContent.length,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('deletes extra comments', async function () {
|
||||||
|
this.comments.splice(0, 1)
|
||||||
|
const updates = [
|
||||||
|
docContentSyncUpdate(
|
||||||
|
this.persistedDoc,
|
||||||
|
this.persistedDocContent,
|
||||||
|
{
|
||||||
|
comments: this.comments,
|
||||||
|
},
|
||||||
|
this.resolvedComments
|
||||||
|
),
|
||||||
|
]
|
||||||
|
const expandedUpdates =
|
||||||
|
await this.SyncManager.promises.expandSyncUpdates(
|
||||||
|
this.projectId,
|
||||||
|
this.historyId,
|
||||||
|
updates,
|
||||||
|
this.extendLock
|
||||||
|
)
|
||||||
|
expect(expandedUpdates).to.deep.equal([
|
||||||
|
{
|
||||||
|
pathname: this.persistedDoc.path,
|
||||||
|
deleteComment: 'comment1',
|
||||||
|
meta: {
|
||||||
|
origin: {
|
||||||
|
kind: 'history-resync',
|
||||||
|
},
|
||||||
|
resync: true,
|
||||||
|
ts: timestamp,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('updates comments when ranges differ', async function () {
|
||||||
|
this.comments[1] = makeComment('comment2', 16, 'fox')
|
||||||
|
const updates = [
|
||||||
|
docContentSyncUpdate(
|
||||||
|
this.persistedDoc,
|
||||||
|
this.persistedDocContent,
|
||||||
|
{
|
||||||
|
comments: this.comments,
|
||||||
|
},
|
||||||
|
this.resolvedComments
|
||||||
|
),
|
||||||
|
]
|
||||||
|
const expandedUpdates =
|
||||||
|
await this.SyncManager.promises.expandSyncUpdates(
|
||||||
|
this.projectId,
|
||||||
|
this.historyId,
|
||||||
|
updates,
|
||||||
|
this.extendLock
|
||||||
|
)
|
||||||
|
expect(expandedUpdates).to.deep.equal([
|
||||||
|
{
|
||||||
|
doc: 'doc-id',
|
||||||
|
op: [
|
||||||
|
{
|
||||||
|
c: 'fox',
|
||||||
|
p: 16,
|
||||||
|
t: 'comment2',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
meta: {
|
||||||
|
origin: {
|
||||||
|
kind: 'history-resync',
|
||||||
|
},
|
||||||
|
resync: true,
|
||||||
|
ts: timestamp,
|
||||||
|
pathname: this.persistedDoc.path,
|
||||||
|
doc_length: this.persistedDocContent.length,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('sets the resolved state when it differs', async function () {
|
||||||
|
this.resolvedComments = ['comment1']
|
||||||
|
const updates = [
|
||||||
|
docContentSyncUpdate(
|
||||||
|
this.persistedDoc,
|
||||||
|
this.persistedDocContent,
|
||||||
|
{
|
||||||
|
comments: this.comments,
|
||||||
|
},
|
||||||
|
this.resolvedComments
|
||||||
|
),
|
||||||
|
]
|
||||||
|
const expandedUpdates =
|
||||||
|
await this.SyncManager.promises.expandSyncUpdates(
|
||||||
|
this.projectId,
|
||||||
|
this.historyId,
|
||||||
|
updates,
|
||||||
|
this.extendLock
|
||||||
|
)
|
||||||
|
expect(expandedUpdates).to.have.deep.members([
|
||||||
|
{
|
||||||
|
pathname: this.persistedDoc.path,
|
||||||
|
commentId: 'comment1',
|
||||||
|
resolved: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
pathname: this.persistedDoc.path,
|
||||||
|
commentId: 'comment2',
|
||||||
|
resolved: false,
|
||||||
|
},
|
||||||
|
])
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
Loading…
Reference in a new issue