Merge pull request #17958 from overleaf/mj-web-revert-file

[web] Initial version of file reverting

GitOrigin-RevId: ef500b2c3ddf21b0acb6650c2526edd10ab2cc18
This commit is contained in:
Mathias Jakobsen 2024-05-28 14:17:55 +01:00 committed by Copybot
parent edaba14ed1
commit f14fdc7b7c
25 changed files with 1605 additions and 222 deletions

View file

@ -38,7 +38,9 @@ const {
} = require('./lib/operation/scan_op')
const TrackedChange = require('./lib/file_data/tracked_change')
const TrackedChangeList = require('./lib/file_data/tracked_change_list')
const TrackingProps = require('./lib/file_data/tracking_props')
const Range = require('./lib/range')
const CommentList = require('./lib/file_data/comment_list')
exports.AddCommentOperation = AddCommentOperation
exports.Author = Author
@ -79,3 +81,5 @@ exports.RemoveOp = RemoveOp
exports.TrackedChangeList = TrackedChangeList
exports.TrackedChange = TrackedChange
exports.Range = Range
exports.CommentList = CommentList
exports.TrackingProps = TrackingProps

View file

@ -86,9 +86,19 @@ const ProjectHistoryRedisManager = {
projectUpdate,
source
) {
let docLines = projectUpdate.docLines
let ranges
if (projectUpdate.historyRangesSupport && projectUpdate.ranges) {
docLines = addTrackedDeletesToContent(
docLines,
projectUpdate.ranges.changes ?? []
)
ranges = HistoryConversions.toHistoryRanges(projectUpdate.ranges)
}
projectUpdate = {
pathname: projectUpdate.pathname,
docLines: projectUpdate.docLines,
docLines,
url: projectUpdate.url,
meta: {
user_id: userId,
@ -97,6 +107,9 @@ const ProjectHistoryRedisManager = {
version: projectUpdate.version,
projectHistoryId,
}
if (ranges) {
projectUpdate.ranges = ranges
}
projectUpdate[entityType] = entityId
if (source != null) {
projectUpdate.meta.source = source

View file

@ -41,6 +41,8 @@ export type CommentOp = {
p: number
t: string
u?: boolean
// Used by project-history when restoring CommentSnapshots
resolved?: boolean
}
/**
@ -54,7 +56,7 @@ export type Ranges = {
export type Comment = {
id: string
op: CommentOp
metadata: {
metadata?: {
user_id: string
ts: string
}

View file

@ -125,19 +125,22 @@ describe('ProjectHistoryRedisManager', function () {
})
describe('queueAddEntity', function () {
beforeEach(async function () {
beforeEach(function () {
this.doc_id = 1234
this.rawUpdate = {
pathname: (this.pathname = '/old'),
docLines: (this.docLines = 'a\nb'),
version: (this.version = 2),
url: (this.url = 'filestore.example.com'),
}
this.ProjectHistoryRedisManager.promises.queueOps = sinon
.stub()
.resolves()
})
it('should queue an update', async function () {
this.rawUpdate.url = this.url = 'filestore.example.com'
await this.ProjectHistoryRedisManager.promises.queueAddEntity(
this.project_id,
this.projectHistoryId,
@ -147,9 +150,7 @@ describe('ProjectHistoryRedisManager', function () {
this.rawUpdate,
this.source
)
})
it('should queue an update', function () {
const update = {
pathname: this.pathname,
docLines: this.docLines,
@ -169,136 +170,325 @@ describe('ProjectHistoryRedisManager', function () {
.should.equal(true)
})
describe('queueResyncProjectStructure', function () {
it('should queue an update', function () {})
it('should forward history compatible ranges if history ranges support is enabled', async function () {
this.rawUpdate.historyRangesSupport = true
this.docLines = 'the quick fox jumps over the lazy dog'
const ranges = {
changes: [
{
op: { p: 4, i: 'quick' },
metadata: { ts: '2024-01-01T00:00:00.000Z', user_id: 'user-1' },
},
{
op: { p: 9, d: ' brown' },
metadata: { ts: '2024-02-01T00:00:00.000Z', user_id: 'user-1' },
},
{
op: { p: 14, i: 'jumps' },
metadata: { ts: '2024-02-01T00:00:00.000Z', user_id: 'user-1' },
},
],
comments: [
{
op: { p: 29, c: 'lazy', t: 'comment-1' },
metadata: { resolved: false },
},
],
}
this.rawUpdate.ranges = ranges
this.rawUpdate.docLines = this.docLines
await this.ProjectHistoryRedisManager.promises.queueAddEntity(
this.project_id,
this.projectHistoryId,
'doc',
this.doc_id,
this.user_id,
this.rawUpdate,
this.source
)
const historyCompatibleRanges = {
comments: [
{
op: { p: 29, c: 'lazy', t: 'comment-1', hpos: 35 },
metadata: { resolved: false },
},
],
changes: [
{
op: { p: 4, i: 'quick' },
metadata: { ts: '2024-01-01T00:00:00.000Z', user_id: 'user-1' },
},
{
op: { p: 9, d: ' brown' },
metadata: { ts: '2024-02-01T00:00:00.000Z', user_id: 'user-1' },
},
{
op: { p: 14, i: 'jumps', hpos: 20 },
metadata: { ts: '2024-02-01T00:00:00.000Z', user_id: 'user-1' },
},
],
}
const update = {
pathname: this.pathname,
docLines: 'the quick brown fox jumps over the lazy dog',
meta: {
user_id: this.user_id,
ts: new Date(),
source: this.source,
},
version: this.version,
projectHistoryId: this.projectHistoryId,
ranges: historyCompatibleRanges,
doc: this.doc_id,
}
expect(
this.ProjectHistoryRedisManager.promises.queueOps
).to.have.been.calledWithExactly(this.project_id, JSON.stringify(update))
})
describe('queueResyncDocContent', function () {
beforeEach(function () {
this.doc_id = 1234
this.lines = ['one', 'two']
this.ranges = {
changes: [{ op: { i: 'ne', p: 1 } }, { op: { d: 'deleted', p: 3 } }],
it('should not forward ranges if history ranges support is disabled', async function () {
this.rawUpdate.historyRangesSupport = false
const ranges = {
changes: [
{
op: { p: 0, i: 'foo' },
metadata: { ts: '2024-01-01T00:00:00.000Z', user_id: 'user-1' },
},
{
op: { p: 7, d: ' baz' },
metadata: { ts: '2024-02-01T00:00:00.000Z', user_id: 'user-1' },
},
],
comments: [
{
op: { p: 4, c: 'bar', t: 'comment-1' },
metadata: { resolved: false },
},
],
}
this.rawUpdate.ranges = ranges
await this.ProjectHistoryRedisManager.promises.queueAddEntity(
this.project_id,
this.projectHistoryId,
'doc',
this.doc_id,
this.user_id,
this.rawUpdate,
this.source
)
const update = {
pathname: this.pathname,
docLines: this.docLines,
meta: {
user_id: this.user_id,
ts: new Date(),
source: this.source,
},
version: this.version,
projectHistoryId: this.projectHistoryId,
doc: this.doc_id,
}
this.ProjectHistoryRedisManager.promises.queueOps
.calledWithExactly(this.project_id, JSON.stringify(update))
.should.equal(true)
})
it('should not forward ranges if history ranges support is undefined', async function () {
this.rawUpdate.historyRangesSupport = false
const ranges = {
changes: [
{
op: { p: 0, i: 'foo' },
metadata: { ts: '2024-01-01T00:00:00.000Z', user_id: 'user-1' },
},
{
op: { p: 7, d: ' baz' },
metadata: { ts: '2024-02-01T00:00:00.000Z', user_id: 'user-1' },
},
],
comments: [
{
op: { p: 4, c: 'bar', t: 'comment-1' },
metadata: { resolved: false },
},
],
}
this.rawUpdate.ranges = ranges
await this.ProjectHistoryRedisManager.promises.queueAddEntity(
this.project_id,
this.projectHistoryId,
'doc',
this.doc_id,
this.user_id,
this.rawUpdate,
this.source
)
const update = {
pathname: this.pathname,
docLines: this.docLines,
meta: {
user_id: this.user_id,
ts: new Date(),
source: this.source,
},
version: this.version,
projectHistoryId: this.projectHistoryId,
doc: this.doc_id,
}
this.ProjectHistoryRedisManager.promises.queueOps
.calledWithExactly(this.project_id, JSON.stringify(update))
.should.equal(true)
})
})
describe('queueResyncProjectStructure', function () {
it('should queue an update', function () {})
})
describe('queueResyncDocContent', function () {
beforeEach(function () {
this.doc_id = 1234
this.lines = ['one', 'two']
this.ranges = {
changes: [{ op: { i: 'ne', p: 1 } }, { op: { d: 'deleted', p: 3 } }],
}
this.resolvedCommentIds = ['comment-1']
this.version = 2
this.pathname = '/path'
this.ProjectHistoryRedisManager.promises.queueOps = sinon
.stub()
.resolves()
})
describe('with a good doc', function () {
beforeEach(async function () {
this.update = {
resyncDocContent: {
content: 'one\ntwo',
version: this.version,
},
projectHistoryId: this.projectHistoryId,
path: this.pathname,
doc: this.doc_id,
meta: { ts: new Date() },
}
this.resolvedCommentIds = ['comment-1']
this.version = 2
this.pathname = '/path'
this.ProjectHistoryRedisManager.promises.queueOps = sinon
.stub()
.resolves()
await this.ProjectHistoryRedisManager.promises.queueResyncDocContent(
this.project_id,
this.projectHistoryId,
this.doc_id,
this.lines,
this.ranges,
this.resolvedCommentIds,
this.version,
this.pathname,
false
)
})
describe('with a good doc', function () {
beforeEach(async function () {
this.update = {
resyncDocContent: {
content: 'one\ntwo',
version: this.version,
},
projectHistoryId: this.projectHistoryId,
path: this.pathname,
doc: this.doc_id,
meta: { ts: new Date() },
}
await this.ProjectHistoryRedisManager.promises.queueResyncDocContent(
this.project_id,
this.projectHistoryId,
this.doc_id,
this.lines,
this.ranges,
this.resolvedCommentIds,
this.version,
this.pathname,
false
)
})
it('should check if the doc is too large', function () {
this.Limits.docIsTooLarge
.calledWith(
JSON.stringify(this.update).length,
this.lines,
this.settings.max_doc_length
)
.should.equal(true)
})
it('should queue an update', function () {
this.ProjectHistoryRedisManager.promises.queueOps
.calledWithExactly(this.project_id, JSON.stringify(this.update))
.should.equal(true)
})
})
describe('with a doc that is too large', function () {
beforeEach(async function () {
this.Limits.docIsTooLarge.returns(true)
await expect(
this.ProjectHistoryRedisManager.promises.queueResyncDocContent(
this.project_id,
this.projectHistoryId,
this.doc_id,
this.lines,
this.ranges,
this.resolvedCommentIds,
this.version,
this.pathname,
false
)
).to.be.rejected
})
it('should not queue an update if the doc is too large', function () {
this.ProjectHistoryRedisManager.promises.queueOps.called.should.equal(
false
)
})
})
describe('when history ranges support is enabled', function () {
beforeEach(async function () {
this.update = {
resyncDocContent: {
content: 'onedeleted\ntwo',
version: this.version,
ranges: this.ranges,
resolvedCommentIds: this.resolvedCommentIds,
},
projectHistoryId: this.projectHistoryId,
path: this.pathname,
doc: this.doc_id,
meta: { ts: new Date() },
}
await this.ProjectHistoryRedisManager.promises.queueResyncDocContent(
this.project_id,
this.projectHistoryId,
this.doc_id,
this.lines,
this.ranges,
this.resolvedCommentIds,
this.version,
this.pathname,
true
)
})
it('should include tracked deletes in the update', function () {
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
this.project_id,
JSON.stringify(this.update)
)
})
it('should check the doc length without tracked deletes', function () {
this.Limits.docIsTooLarge.should.have.been.calledWith(
it('should check if the doc is too large', function () {
this.Limits.docIsTooLarge
.calledWith(
JSON.stringify(this.update).length,
this.lines,
this.settings.max_doc_length
)
})
.should.equal(true)
})
it('should queue an update', function () {
this.ProjectHistoryRedisManager.promises.queueOps
.calledWithExactly(this.project_id, JSON.stringify(this.update))
.should.equal(true)
})
})
describe('with a doc that is too large', function () {
beforeEach(async function () {
this.Limits.docIsTooLarge.returns(true)
await expect(
this.ProjectHistoryRedisManager.promises.queueResyncDocContent(
this.project_id,
this.projectHistoryId,
this.doc_id,
this.lines,
this.ranges,
this.resolvedCommentIds,
this.version,
this.pathname,
false
)
).to.be.rejected
})
it('should not queue an update if the doc is too large', function () {
this.ProjectHistoryRedisManager.promises.queueOps.called.should.equal(
false
)
})
})
describe('when history ranges support is enabled', function () {
beforeEach(async function () {
this.update = {
resyncDocContent: {
content: 'onedeleted\ntwo',
version: this.version,
ranges: this.ranges,
resolvedCommentIds: this.resolvedCommentIds,
},
projectHistoryId: this.projectHistoryId,
path: this.pathname,
doc: this.doc_id,
meta: { ts: new Date() },
}
await this.ProjectHistoryRedisManager.promises.queueResyncDocContent(
this.project_id,
this.projectHistoryId,
this.doc_id,
this.lines,
this.ranges,
this.resolvedCommentIds,
this.version,
this.pathname,
true
)
})
it('should include tracked deletes in the update', function () {
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
this.project_id,
JSON.stringify(this.update)
)
})
it('should check the doc length without tracked deletes', function () {
this.Limits.docIsTooLarge.should.have.been.calledWith(
JSON.stringify(this.update).length,
this.lines,
this.settings.max_doc_length
)
})
it('should queue an update', function () {
this.ProjectHistoryRedisManager.promises.queueOps
.calledWithExactly(this.project_id, JSON.stringify(this.update))
.should.equal(true)
})
})
})

View file

@ -59,7 +59,7 @@ export function createBlobsForUpdates(
projectId,
historyId,
update,
(err, hash) => {
(err, hashes) => {
if (err) {
OError.tag(err, 'retry: error creating blob', {
projectId,
@ -68,18 +68,18 @@ export function createBlobsForUpdates(
})
_cb(err)
} else {
_cb(null, hash)
_cb(null, hashes)
}
}
)
})
},
(error, blobHash) => {
(error, blobHashes) => {
if (error) {
if (!firstBlobCreationError) {
firstBlobCreationError = error
}
return cb(null, { update, blobHash })
return cb(null, { update, blobHashes })
}
extendLock(error => {
@ -88,7 +88,7 @@ export function createBlobsForUpdates(
firstBlobCreationError = error
}
}
cb(null, { update, blobHash })
cb(null, { update, blobHashes })
})
}
)

View file

@ -0,0 +1,122 @@
// @ts-check
import {
Range,
TrackedChange,
TrackedChangeList,
CommentList,
Comment,
TrackingProps,
} from 'overleaf-editor-core'
import logger from '@overleaf/logger'
import OError from '@overleaf/o-error'
/**
* @typedef {import('./types').AddDocUpdate} AddDocUpdate
* @typedef {import('overleaf-editor-core/lib/types').CommentRawData} CommentRawData
* @typedef {import('overleaf-editor-core/lib/types').TrackedChangeRawData} TrackedChangeRawData
* */
/**
*
* @param {AddDocUpdate} update
* @returns {{trackedChanges: TrackedChangeRawData[], comments: CommentRawData[]} | undefined}
*/
export function createRangeBlobDataFromUpdate(update) {
logger.debug({ update }, 'createBlobDataFromUpdate')
if (update.doc == null || update.docLines == null) {
throw new OError('Not an AddFileUpdate')
}
if (
!update.ranges ||
(update.ranges.changes == null && update.ranges.comments == null)
) {
return undefined
}
if (
(!update.ranges.changes || update.ranges.changes.length === 0) &&
(!update.ranges.comments || update.ranges.comments.length === 0)
) {
return undefined
}
const sortedRanges = [...(update.ranges.changes || [])].sort((a, b) => {
if (a.op.p !== b.op.p) {
return a.op.p - b.op.p
}
if ('i' in a.op && a.op.i != null && 'd' in b.op && b.op.d != null) {
// Move deletes before inserts
return 1
}
return -1
})
const tcList = new TrackedChangeList([])
for (const change of sortedRanges) {
if ('d' in change.op && change.op.d != null) {
const length = change.op.d.length
const range = new Range(change.op.hpos ?? change.op.p, length)
tcList.add(
new TrackedChange(
range,
new TrackingProps(
'delete',
change.metadata.user_id,
new Date(change.metadata.ts)
)
)
)
} else if ('i' in change.op && change.op.i != null) {
const length = change.op.i.length
const range = new Range(change.op.hpos ?? change.op.p, length)
tcList.add(
new TrackedChange(
range,
new TrackingProps(
'insert',
change.metadata.user_id,
new Date(change.metadata.ts)
)
)
)
}
}
const comments = [...(update.ranges.comments || [])].sort((a, b) => {
return a.op.p - b.op.p
})
/** @type {Map<string, {ranges: Range[], resolved: boolean}>} */
const commentMap = new Map()
for (const comment of comments) {
const range = new Range(
comment.op.hpos ?? comment.op.p,
comment.op.hlen ?? comment.op.c.length
)
const id = comment.op.t
if (!commentMap.has(id)) {
commentMap.set(id, {
ranges: [],
resolved: comment.op.resolved ?? false,
})
}
const entry = commentMap.get(id)
if (!entry) {
throw new Error('Comment entry not found')
}
if (entry.resolved !== (comment.op.resolved ?? false)) {
throw new Error('Mismatching resolved status for comment')
}
entry.ranges.push(range)
}
const commentList = new CommentList(
[...commentMap.entries()].map(
([id, commentObj]) =>
new Comment(id, commentObj.ranges, commentObj.resolved)
)
)
return { trackedChanges: tcList.toRaw(), comments: commentList.toRaw() }
}

View file

@ -16,6 +16,7 @@ import * as Versions from './Versions.js'
import * as Errors from './Errors.js'
import * as LocalFileWriter from './LocalFileWriter.js'
import * as HashManager from './HashManager.js'
import * as HistoryBlobTranslator from './HistoryBlobTranslator.js'
const HTTP_REQUEST_TIMEOUT = Settings.apis.history_v1.requestTimeout
@ -230,22 +231,60 @@ export function sendChanges(
)
}
function createBlobFromString(historyId, data, fileId, callback) {
const stringStream = new StringStream()
stringStream.push(data)
stringStream.push(null)
LocalFileWriter.bufferOnDisk(
stringStream,
'',
fileId,
(fsPath, cb) => {
_createBlob(historyId, fsPath, cb)
},
callback
)
}
export function createBlobForUpdate(projectId, historyId, update, callback) {
callback = _.once(callback)
if (update.doc != null && update.docLines != null) {
const stringStream = new StringStream()
stringStream.push(update.docLines)
stringStream.push(null)
LocalFileWriter.bufferOnDisk(
stringStream,
'',
let ranges
try {
ranges = HistoryBlobTranslator.createRangeBlobDataFromUpdate(update)
} catch (error) {
return callback(error)
}
createBlobFromString(
historyId,
update.docLines,
`project-${projectId}-doc-${update.doc}`,
(fsPath, cb) => {
_createBlob(historyId, fsPath, cb)
},
callback
(err, fileHash) => {
if (err) {
return callback(err)
}
if (ranges) {
createBlobFromString(
historyId,
JSON.stringify(ranges),
`project-${projectId}-doc-${update.doc}-ranges`,
(err, rangesHash) => {
if (err) {
return callback(err)
}
logger.debug(
{ fileHash, rangesHash },
'created blobs for both ranges and content'
)
return callback(null, { file: fileHash, ranges: rangesHash })
}
)
} else {
logger.debug({ fileHash }, 'created blob for content')
return callback(null, { file: fileHash })
}
}
)
} else if (update.file != null && update.url != null) {
// Rewrite the filestore url to point to the location in the local
@ -274,7 +313,13 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
(fsPath, cb) => {
_createBlob(historyId, fsPath, cb)
},
callback
(err, fileHash) => {
if (err) {
return callback(err)
}
logger.debug({ fileHash }, 'created blob for file')
callback(null, { file: fileHash })
}
)
})
.catch(err => {
@ -291,7 +336,13 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
(fsPath, cb) => {
_createBlob(historyId, fsPath, cb)
},
callback
(err, fileHash) => {
if (err) {
return callback(err)
}
logger.debug({ fileHash }, 'created empty blob for file')
callback(null, { file: fileHash })
}
)
emptyStream.push(null) // send an EOF signal
} else {

View file

@ -185,6 +185,7 @@ async function getRangesSnapshot(projectId, version, pathname) {
p: position,
c: commentRangeContent,
t: comment.id,
resolved: comment.resolved,
},
})
}

View file

@ -55,14 +55,16 @@ function _convertToChange(projectId, updateWithBlob) {
]
projectVersion = update.version
} else if (isAddUpdate(update)) {
operations = [
{
pathname: _convertPathname(update.pathname),
file: {
hash: updateWithBlob.blobHash,
},
const op = {
pathname: _convertPathname(update.pathname),
file: {
hash: updateWithBlob.blobHashes.file,
},
]
}
if (_isAddDocUpdate(update)) {
op.file.rangesHash = updateWithBlob.blobHashes.ranges
}
operations = [op]
projectVersion = update.version
} else if (isTextUpdate(update)) {
const docLength = update.meta.history_doc_length ?? update.meta.doc_length

View file

@ -1,3 +1,5 @@
import { HistoryRanges } from '../../../document-updater/app/js/types'
export type Update =
| TextUpdate
| AddDocUpdate
@ -51,7 +53,8 @@ type ProjectUpdateBase = {
export type AddDocUpdate = ProjectUpdateBase & {
pathname: string
docLines: string[]
docLines: string
ranges?: HistoryRanges
}
export type AddFileUpdate = ProjectUpdateBase & {
@ -134,7 +137,10 @@ export type CommentOp = {
export type UpdateWithBlob = {
update: Update
blobHash: string
blobHashes: {
file: string
ranges?: string
}
}
export type RawOrigin = {
@ -173,6 +179,7 @@ export type CommentSnapshot = {
p: number
t: string
c: string
resolved: boolean
}
}

View file

@ -30,10 +30,11 @@ function slTextUpdate(historyId, doc, userId, v, ts, op) {
}
}
function slAddDocUpdate(historyId, doc, userId, ts, docLines) {
function slAddDocUpdate(historyId, doc, userId, ts, docLines, ranges = {}) {
return {
projectHistoryId: historyId,
pathname: doc.pathname,
ranges,
docLines,
doc: doc.id,
meta: { user_id: userId, ts: ts.getTime() },
@ -46,9 +47,10 @@ function slAddDocUpdateWithVersion(
userId,
ts,
docLines,
projectVersion
projectVersion,
ranges = {}
) {
const result = slAddDocUpdate(historyId, doc, userId, ts, docLines)
const result = slAddDocUpdate(historyId, doc, userId, ts, docLines, ranges)
result.version = projectVersion
return result
}
@ -59,6 +61,7 @@ function slAddFileUpdate(historyId, file, userId, ts, projectId) {
pathname: file.pathname,
url: `http://127.0.0.1:3009/project/${projectId}/file/${file.id}`,
file: file.id,
ranges: undefined,
meta: { user_id: userId, ts: ts.getTime() },
}
}
@ -132,8 +135,8 @@ function olRenameUpdate(doc, userId, ts, pathname, newPathname) {
}
}
function olAddDocUpdate(doc, userId, ts, fileHash) {
return {
function olAddDocUpdate(doc, userId, ts, fileHash, rangesHash = undefined) {
const update = {
v2Authors: [userId],
timestamp: ts.toJSON(),
authors: [],
@ -147,10 +150,21 @@ function olAddDocUpdate(doc, userId, ts, fileHash) {
},
],
}
if (rangesHash) {
update.operations[0].file.rangesHash = rangesHash
}
return update
}
function olAddDocUpdateWithVersion(doc, userId, ts, fileHash, version) {
const result = olAddDocUpdate(doc, userId, ts, fileHash)
function olAddDocUpdateWithVersion(
doc,
userId,
ts,
fileHash,
version,
rangesHash = undefined
) {
const result = olAddDocUpdate(doc, userId, ts, fileHash, rangesHash)
result.projectVersion = version
return result
}
@ -281,6 +295,115 @@ describe('Sending Updates', function () {
)
})
it('should send ranges to the history store', function (done) {
const fileHash = '49e886093b3eacbc12b99a1eb5aeaa44a6b9d90e'
const rangesHash = 'fa9a429ff518bc9e5b2507a96ff0646b566eca65'
const historyRanges = {
trackedChanges: [
{
range: { pos: 4, length: 3 },
tracking: {
type: 'delete',
userId: 'user-id-1',
ts: '2024-01-01T00:00:00.000Z',
},
},
],
comments: [
{
ranges: [{ pos: 0, length: 3 }],
id: 'comment-id-1',
},
],
}
// We need to set up the ranges mock first, as we will call it last..
const createRangesBlob = MockHistoryStore()
.put(`/api/projects/${historyId}/blobs/${rangesHash}`, historyRanges)
.reply(201)
const createBlob = MockHistoryStore()
.put(`/api/projects/${historyId}/blobs/${fileHash}`, 'foo barbaz')
.reply(201)
const addFile = MockHistoryStore()
.post(`/api/projects/${historyId}/legacy_changes`, body => {
expect(body).to.deep.equal([
olAddDocUpdate(
this.doc,
this.userId,
this.timestamp,
fileHash,
rangesHash
),
])
return true
})
.query({ end_version: 0 })
.reply(204)
async.series(
[
cb => {
ProjectHistoryClient.pushRawUpdate(
this.projectId,
slAddDocUpdate(
historyId,
this.doc,
this.userId,
this.timestamp,
'foo barbaz',
{
changes: [
{
op: { p: 4, d: 'bar' },
metadata: {
ts: 1704067200000,
user_id: 'user-id-1',
},
},
],
comments: [
{
op: {
p: 0,
c: 'foo',
t: 'comment-id-1',
},
metadata: { resolved: false },
},
],
}
),
cb
)
},
cb => {
ProjectHistoryClient.flushProject(this.projectId, cb)
},
],
error => {
if (error) {
return done(error)
}
assert(
createBlob.isDone(),
'/api/projects/:historyId/blobs/:hash should have been called to create content blob'
)
assert(
createRangesBlob.isDone(),
'/api/projects/:historyId/blobs/:hash should have been called to create ranges blob'
)
assert(
addFile.isDone(),
`/api/projects/${historyId}/changes should have been called`
)
done()
}
)
})
it('should strip non-BMP characters in add doc updates before sending to the history store', function (done) {
const fileHash = '11509fe05a41f9cdc51ea081342b5a4fc7c8d0fc'

View file

@ -53,7 +53,9 @@ describe('BlobManager', function () {
beforeEach(function (done) {
this.UpdateTranslator.isAddUpdate.returns(true)
this.blobHash = 'test hash'
this.HistoryStoreManager.createBlobForUpdate.yields(null, this.blobHash)
this.HistoryStoreManager.createBlobForUpdate.yields(null, {
file: this.blobHash,
})
this.BlobManager.createBlobsForUpdates(
this.project_id,
this.historyId,
@ -79,7 +81,7 @@ describe('BlobManager', function () {
it('should call the callback with the updates', function () {
const updatesWithBlobs = this.updates.map(update => ({
update,
blobHash: this.blobHash,
blobHashes: { file: this.blobHash },
}))
this.callback.calledWith(null, updatesWithBlobs).should.equal(true)
})
@ -92,7 +94,9 @@ describe('BlobManager', function () {
this.HistoryStoreManager.createBlobForUpdate
.onFirstCall()
.yields(new Error('random failure'))
this.HistoryStoreManager.createBlobForUpdate.yields(null, this.blobHash)
this.HistoryStoreManager.createBlobForUpdate.yields(null, {
file: this.blobHash,
})
this.BlobManager.createBlobsForUpdates(
this.project_id,
this.historyId,
@ -118,7 +122,7 @@ describe('BlobManager', function () {
it('should call the callback with the updates', function () {
const updatesWithBlobs = this.updates.map(update => ({
update,
blobHash: this.blobHash,
blobHashes: { file: this.blobHash },
}))
this.callback.calledWith(null, updatesWithBlobs).should.equal(true)
})

View file

@ -0,0 +1,476 @@
import { expect } from 'chai'
import { createRangeBlobDataFromUpdate } from '../../../../app/js/HistoryBlobTranslator.js'
/** @typedef {import("../../../../app/js/types").AddDocUpdate} AddDocUpdate */
/**
*
* @param {string} pathname s
* @param {string} docLines
* @param {AddDocUpdate["ranges"]} ranges
* @returns {AddDocUpdate}
*/
const update = (pathname, docLines, ranges) => {
return {
pathname,
docLines,
ranges,
version: 'version-1',
projectHistoryId: 'project-id',
doc: 'doc',
meta: {
user_id: 'user-id',
ts: 0,
},
}
}
describe('HistoryBlobTranslator', function () {
describe('createBlobDataFromUpdate', function () {
beforeEach(function () {
this.text = 'the quick brown fox jumps over the lazy dog'
})
describe('for update with no ranges', function () {
beforeEach(function () {
this.result = createRangeBlobDataFromUpdate(
update('pathname', this.text, undefined)
)
})
it('should not return ranges', function () {
expect(this.result).to.be.undefined
})
})
describe('for update with empty ranges object', function () {
beforeEach(function () {
this.result = createRangeBlobDataFromUpdate(
update('pathname', this.text, {})
)
})
it('should not return ranges', function () {
expect(this.result).to.be.undefined
})
})
describe('for update with ranges object with empty lists', function () {
beforeEach(function () {
this.result = createRangeBlobDataFromUpdate(
update('pathname', this.text, { changes: [], comments: [] })
)
})
it('should not return ranges', function () {
expect(this.result).to.be.undefined
})
})
describe('for update with ranges object with only comments', function () {
it('should return unmoved ranges', function () {
const result = createRangeBlobDataFromUpdate(
update('pathname', this.text, {
comments: [
{
op: { c: 'quick', p: 4, t: 'comment-1', resolved: false },
},
],
})
)
expect(result).to.deep.equal({
comments: [
{
id: 'comment-1',
ranges: [{ pos: 4, length: 5 }],
},
],
trackedChanges: [],
})
})
it('should merge comments ranges into a single comment by id', function () {
const result = createRangeBlobDataFromUpdate(
update('pathname', this.text, {
comments: [
{
op: { c: 'quick', p: 4, t: 'comment-1', resolved: false },
},
{
op: { c: 'jumps', p: 20, t: 'comment-1', resolved: false },
},
],
})
)
expect(result).to.deep.equal({
comments: [
{
id: 'comment-1',
ranges: [
{ pos: 4, length: 5 },
{ pos: 20, length: 5 },
],
},
],
trackedChanges: [],
})
})
it('should not merge ranges into a single comment if id differs', function () {
const result = createRangeBlobDataFromUpdate(
update('pathname', this.text, {
comments: [
{
op: { c: 'quick', p: 4, t: 'comment-1', resolved: false },
},
{
op: { c: 'jumps', p: 20, t: 'comment-2', resolved: false },
},
],
})
)
expect(result).to.deep.equal({
comments: [
{
id: 'comment-1',
ranges: [{ pos: 4, length: 5 }],
},
{
id: 'comment-2',
ranges: [{ pos: 20, length: 5 }],
},
],
trackedChanges: [],
})
})
})
describe('for update with ranges object with only tracked insertions', function () {
it('should translate into history tracked insertions', function () {
const result = createRangeBlobDataFromUpdate(
update('pathname', this.text, {
changes: [
{
op: { p: 4, i: 'quick' },
metadata: {
ts: '2024-01-01T00:00:00.000Z',
user_id: 'user-1',
},
},
{
op: { p: 10, i: 'brown' },
metadata: {
ts: '2023-01-01T00:00:00.000Z',
user_id: 'user-2',
},
},
],
})
)
expect(result).to.deep.equal({
comments: [],
trackedChanges: [
{
range: { pos: 4, length: 5 },
tracking: {
type: 'insert',
userId: 'user-1',
ts: '2024-01-01T00:00:00.000Z',
},
},
{
range: { pos: 10, length: 5 },
tracking: {
type: 'insert',
userId: 'user-2',
ts: '2023-01-01T00:00:00.000Z',
},
},
],
})
})
})
describe('for update with ranges object with mixed tracked changes', function () {
describe('with tracked deletions before insertions', function () {
it('should insert tracked deletions before insertions', function () {
const text = 'the quickrapid brown fox jumps over the lazy dog'
const result = createRangeBlobDataFromUpdate(
update('pathname', text, {
changes: [
{
op: { p: 4, d: 'quick' },
metadata: {
ts: '2024-01-01T00:00:00.000Z',
user_id: 'user-1',
},
},
{
op: { p: 4, hpos: 9, i: 'rapid' },
metadata: {
ts: '2023-01-01T00:00:00.000Z',
user_id: 'user-2',
},
},
],
})
)
expect(result).to.deep.equal({
comments: [],
trackedChanges: [
{
range: { pos: 4, length: 5 },
tracking: {
type: 'delete',
userId: 'user-1',
ts: '2024-01-01T00:00:00.000Z',
},
},
{
range: { pos: 9, length: 5 },
tracking: {
type: 'insert',
userId: 'user-2',
ts: '2023-01-01T00:00:00.000Z',
},
},
],
})
})
})
describe('with tracked insertions before deletions', function () {
it('should insert tracked deletions before insertions', function () {
const text = 'the quickrapid brown fox jumps over the lazy dog'
const result = createRangeBlobDataFromUpdate(
update('pathname', text, {
changes: [
{
op: { p: 4, hpos: 9, i: 'rapid' },
metadata: {
ts: '2023-01-01T00:00:00.000Z',
user_id: 'user-2',
},
},
{
op: { p: 4, d: 'quick' },
metadata: {
ts: '2024-01-01T00:00:00.000Z',
user_id: 'user-1',
},
},
],
})
)
expect(result).to.deep.equal({
comments: [],
trackedChanges: [
{
range: { pos: 4, length: 5 },
tracking: {
type: 'delete',
userId: 'user-1',
ts: '2024-01-01T00:00:00.000Z',
},
},
{
range: { pos: 9, length: 5 },
tracking: {
type: 'insert',
userId: 'user-2',
ts: '2023-01-01T00:00:00.000Z',
},
},
],
})
})
})
it('should adjust positions', function () {
const text = 'the quick brown fox jumps over the lazy dog'
const result = createRangeBlobDataFromUpdate(
update('pathname', text, {
changes: [
{
op: { p: 4, i: 'quick' },
metadata: {
ts: '2024-01-01T00:00:00.000Z',
user_id: 'user-1',
},
},
{
op: { p: 10, d: 'brown' },
metadata: {
ts: '2023-01-01T00:00:00.000Z',
user_id: 'user-2',
},
},
{
op: { p: 30, hpos: 35, i: 'lazy' },
metadata: {
ts: '2022-01-01T00:00:00.000Z',
user_id: 'user-2',
},
},
],
})
)
expect(result).to.deep.equal({
comments: [],
trackedChanges: [
{
range: { pos: 4, length: 5 },
tracking: {
type: 'insert',
userId: 'user-1',
ts: '2024-01-01T00:00:00.000Z',
},
},
{
range: { pos: 10, length: 5 },
tracking: {
type: 'delete',
userId: 'user-2',
ts: '2023-01-01T00:00:00.000Z',
},
},
{
range: { pos: 35, length: 4 },
tracking: {
type: 'insert',
userId: 'user-2',
ts: '2022-01-01T00:00:00.000Z',
},
},
],
})
})
})
describe('for update with ranges object with mixed tracked changes and comments', function () {
it('should adjust positions', function () {
const text = 'the quick brown fox jumps over the lazy dog'
const result = createRangeBlobDataFromUpdate(
update('pathname', text, {
comments: [
{
op: { c: 'quick', p: 4, t: 'comment-1', resolved: false },
},
{
op: {
c: 'fox',
p: 11,
hpos: 16,
t: 'comment-2',
resolved: false,
},
},
],
changes: [
{
op: { p: 4, i: 'quick' },
metadata: {
ts: '2024-01-01T00:00:00.000Z',
user_id: 'user-1',
},
},
{
op: { p: 10, d: 'brown' },
metadata: {
ts: '2023-01-01T00:00:00.000Z',
user_id: 'user-2',
},
},
{
op: { p: 30, hpos: 35, i: 'lazy' },
metadata: {
ts: '2022-01-01T00:00:00.000Z',
user_id: 'user-2',
},
},
],
})
)
expect(result).to.deep.equal({
comments: [
{
ranges: [{ pos: 4, length: 5 }],
id: 'comment-1',
},
{
ranges: [{ pos: 16, length: 3 }],
id: 'comment-2',
},
],
trackedChanges: [
{
range: { pos: 4, length: 5 },
tracking: {
type: 'insert',
userId: 'user-1',
ts: '2024-01-01T00:00:00.000Z',
},
},
{
range: { pos: 10, length: 5 },
tracking: {
type: 'delete',
userId: 'user-2',
ts: '2023-01-01T00:00:00.000Z',
},
},
{
range: { pos: 35, length: 4 },
tracking: {
type: 'insert',
userId: 'user-2',
ts: '2022-01-01T00:00:00.000Z',
},
},
],
})
})
it('should adjust comment length', function () {
const text = 'the quick brown fox jumps over the lazy dog'
const result = createRangeBlobDataFromUpdate(
update('pathname', text, {
comments: [
{
op: { c: 'quick fox', p: 4, t: 'comment-1', resolved: false },
},
],
changes: [
{
op: { p: 10, d: 'brown ' },
metadata: {
ts: '2023-01-01T00:00:00.000Z',
user_id: 'user-2',
},
},
],
})
)
expect(result).to.deep.equal({
comments: [
{
ranges: [{ pos: 4, length: 9 }],
id: 'comment-1',
},
],
trackedChanges: [
{
range: { pos: 10, length: 6 },
tracking: {
type: 'delete',
userId: 'user-2',
ts: '2023-01-01T00:00:00.000Z',
},
},
],
})
})
})
})
})

View file

@ -390,7 +390,7 @@ describe('HistoryStoreManager', function () {
this.projectId,
this.historyId,
this.update,
(err, hash) => {
(err, { file: hash }) => {
if (err) {
return done(err)
}

View file

@ -780,6 +780,12 @@ Four five six\
expect(this.data.comments[2].op.p).to.eq(20)
expect(this.data.comments[2].op.c).to.eq('ov')
})
it('should put resolved status in op', function () {
expect(this.data.comments[0].op.resolved).to.be.false
expect(this.data.comments[1].op.resolved).to.be.false
expect(this.data.comments[2].op.resolved).to.be.false
})
})
describe('with multiple tracked changes and comments', function () {
@ -919,6 +925,7 @@ Four five six\
c: '',
p: 4,
t: 'comment-1',
resolved: false,
},
},
{
@ -926,6 +933,7 @@ Four five six\
c: 'brown',
p: 4,
t: 'comment-1',
resolved: false,
},
},
{
@ -933,6 +941,7 @@ Four five six\
c: '',
p: 29,
t: 'comment-1',
resolved: false,
},
},
{
@ -940,6 +949,7 @@ Four five six\
c: 'the',
p: 0,
t: 'comment-2',
resolved: true,
},
},
{
@ -947,6 +957,7 @@ Four five six\
c: 'the',
p: 25,
t: 'comment-2',
resolved: true,
},
},
],

View file

@ -34,7 +34,7 @@ describe('UpdateTranslator', function () {
ts: this.timestamp,
},
},
blobHash: this.mockBlobHash,
blobHashes: { file: this.mockBlobHash },
},
]
@ -72,7 +72,7 @@ describe('UpdateTranslator', function () {
ts: this.timestamp,
},
},
blobHash: this.mockBlobHash,
blobHashes: { file: this.mockBlobHash },
},
]
@ -180,7 +180,7 @@ describe('UpdateTranslator', function () {
ts: this.timestamp,
},
},
blobHash: this.mockBlobHash,
blobHashes: { file: this.mockBlobHash },
},
{
update: {
@ -192,7 +192,7 @@ describe('UpdateTranslator', function () {
ts: this.timestamp,
},
},
blobHash: this.mockBlobHash,
blobHashes: { file: this.mockBlobHash },
},
]
@ -291,7 +291,7 @@ describe('UpdateTranslator', function () {
},
url: 'filestore.example.com/test*test.png',
},
blobHash: this.mockBlobHash,
blobHashes: { file: this.mockBlobHash },
},
]
@ -329,7 +329,7 @@ describe('UpdateTranslator', function () {
},
url: 'filestore.example.com/test.png',
},
blobHash: this.mockBlobHash,
blobHashes: { file: this.mockBlobHash },
},
]
@ -367,7 +367,7 @@ describe('UpdateTranslator', function () {
},
url: 'filestore.example.com/folder/test.png',
},
blobHash: this.mockBlobHash,
blobHashes: { file: this.mockBlobHash },
},
]
@ -405,7 +405,7 @@ describe('UpdateTranslator', function () {
ts: this.timestamp,
},
},
blobHash: this.mockBlobHash,
blobHashes: { file: this.mockBlobHash },
},
]
@ -442,7 +442,7 @@ describe('UpdateTranslator', function () {
ts: this.timestamp,
},
},
blobHash: this.mockBlobHash,
blobHashes: { file: this.mockBlobHash },
},
]

View file

@ -7,6 +7,7 @@ const logger = require('@overleaf/logger')
const metrics = require('@overleaf/metrics')
const { promisify } = require('util')
const { promisifyMultiResult } = require('@overleaf/promise-utils')
const ProjectGetter = require('../Project/ProjectGetter')
module.exports = {
flushProjectToMongo,
@ -296,54 +297,78 @@ function updateProjectStructure(
return callback()
}
const {
deletes: docDeletes,
adds: docAdds,
renames: docRenames,
} = _getUpdates('doc', changes.oldDocs, changes.newDocs)
const {
deletes: fileDeletes,
adds: fileAdds,
renames: fileRenames,
} = _getUpdates('file', changes.oldFiles, changes.newFiles)
const updates = [].concat(
docDeletes,
fileDeletes,
docAdds,
fileAdds,
docRenames,
fileRenames
)
const projectVersion =
changes && changes.newProject && changes.newProject.version
if (updates.length < 1) {
return callback()
}
if (projectVersion == null) {
logger.warn(
{ projectId, changes, projectVersion },
'did not receive project version in changes'
)
return callback(new Error('did not receive project version in changes'))
}
_makeRequest(
{
path: `/project/${projectId}`,
json: {
updates,
userId,
version: projectVersion,
projectHistoryId,
source,
},
method: 'POST',
},
ProjectGetter.getProjectWithoutLock(
projectId,
'update-project-structure',
callback
{ overleaf: true },
(err, project) => {
if (err) {
return callback(err)
}
const historyRangesSupport = _.get(
project,
'overleaf.history.rangesSupportEnabled',
false
)
const {
deletes: docDeletes,
adds: docAdds,
renames: docRenames,
} = _getUpdates(
'doc',
changes.oldDocs,
changes.newDocs,
historyRangesSupport
)
const {
deletes: fileDeletes,
adds: fileAdds,
renames: fileRenames,
} = _getUpdates(
'file',
changes.oldFiles,
changes.newFiles,
historyRangesSupport
)
const updates = [].concat(
docDeletes,
fileDeletes,
docAdds,
fileAdds,
docRenames,
fileRenames
)
const projectVersion =
changes && changes.newProject && changes.newProject.version
if (updates.length < 1) {
return callback()
}
if (projectVersion == null) {
logger.warn(
{ projectId, changes, projectVersion },
'did not receive project version in changes'
)
return callback(new Error('did not receive project version in changes'))
}
_makeRequest(
{
path: `/project/${projectId}`,
json: {
updates,
userId,
version: projectVersion,
projectHistoryId,
source,
},
method: 'POST',
},
projectId,
'update-project-structure',
callback
)
}
)
}
@ -380,7 +405,12 @@ function _makeRequest(options, projectId, metricsKey, callback) {
)
}
function _getUpdates(entityType, oldEntities, newEntities) {
function _getUpdates(
entityType,
oldEntities,
newEntities,
historyRangesSupport
) {
if (!oldEntities) {
oldEntities = []
}
@ -431,6 +461,8 @@ function _getUpdates(entityType, oldEntities, newEntities) {
id,
pathname: newEntity.path,
docLines: newEntity.docLines,
ranges: newEntity.ranges,
historyRangesSupport,
url: newEntity.url,
hash: newEntity.file != null ? newEntity.file.hash : undefined,
})

View file

@ -100,6 +100,27 @@ module.exports = HistoryController = {
)
},
revertFile(req, res, next) {
const { project_id: projectId } = req.params
const { version, pathname } = req.body
const userId = SessionManager.getLoggedInUserId(req.session)
RestoreManager.revertFile(
userId,
projectId,
version,
pathname,
function (err, entity) {
if (err) {
return next(err)
}
res.json({
type: entity.type,
id: entity._id,
})
}
)
},
getLabels(req, res, next) {
const projectId = req.params.Project_id
HistoryController._makeRequest(

View file

@ -6,6 +6,8 @@ const EditorController = require('../Editor/EditorController')
const Errors = require('../Errors/Errors')
const moment = require('moment')
const { callbackifyAll } = require('@overleaf/promise-utils')
const { fetchJson } = require('@overleaf/fetch-utils')
const ProjectLocator = require('../Project/ProjectLocator')
const RestoreManager = {
async restoreFileFromV2(userId, projectId, version, pathname) {
@ -39,6 +41,62 @@ const RestoreManager = {
)
},
async revertFile(userId, projectId, version, pathname) {
const fsPath = await RestoreManager._writeFileVersionToDisk(
projectId,
version,
pathname
)
const basename = Path.basename(pathname)
let dirname = Path.dirname(pathname)
if (dirname === '.') {
// no directory
dirname = ''
}
const parentFolderId = await RestoreManager._findOrCreateFolder(
projectId,
dirname
)
let fileExists = true
try {
// TODO: Is there a better way of doing this?
await ProjectLocator.promises.findElementByPath({
projectId,
path: pathname,
})
} catch (error) {
fileExists = false
}
if (fileExists) {
throw new Errors.InvalidError('File already exists')
}
const importInfo = await FileSystemImportManager.promises.importFile(
fsPath,
pathname
)
if (importInfo.type !== 'doc') {
// TODO: Handle binary files
throw new Errors.InvalidError('File is not editable')
}
const ranges = await RestoreManager._getRangesFromHistory(
projectId,
version,
pathname
)
return await EditorController.promises.addDocWithRanges(
projectId,
parentFolderId,
basename,
importInfo.lines,
ranges,
'revert',
userId
)
},
async _findOrCreateFolder(projectId, dirname) {
const { lastFolder } = await EditorController.promises.mkdirp(
projectId,
@ -74,6 +132,13 @@ const RestoreManager = {
}/project/${projectId}/version/${version}/${encodeURIComponent(pathname)}`
return await FileWriter.promises.writeUrlToDisk(projectId, url)
},
async _getRangesFromHistory(projectId, version, pathname) {
const url = `${
Settings.apis.project_history.url
}/project/${projectId}/ranges/version/${version}/${encodeURIComponent(pathname)}`
return await fetchJson(url)
},
}
module.exports = { ...callbackifyAll(RestoreManager), promises: RestoreManager }

View file

@ -397,6 +397,7 @@ const ProjectEntityUpdateHandler = {
doc,
path: docPath,
docLines: docLines.join('\n'),
ranges,
},
]
DocumentUpdaterHandler.updateProjectStructure(

View file

@ -10,9 +10,11 @@ const logger = require('@overleaf/logger')
module.exports = {
addEntity: callbackify(addEntity),
importDir: callbackify(importDir),
importFile: callbackify(importDir),
promises: {
addEntity,
importDir,
importFile,
},
}

View file

@ -769,6 +769,11 @@ function initialize(webRouter, privateApiRouter, publicApiRouter) {
AuthorizationMiddleware.ensureUserCanWriteProjectContent,
HistoryController.restoreFileFromV2
)
webRouter.post(
'/project/:project_id/revert_file',
AuthorizationMiddleware.ensureUserCanWriteProjectContent,
HistoryController.revertFile
)
webRouter.get(
'/project/:project_id/version/:version/zip',
RateLimiterMiddleware.rateLimit(rateLimiters.downloadProjectRevision),

View file

@ -44,6 +44,9 @@ describe('DocumentUpdaterHandler', function () {
'../../models/Project': {
Project: (this.Project = {}),
},
'../Project/ProjectGetter': (this.ProjectGetter = {
getProjectWithoutLock: sinon.stub(),
}),
'../../Features/Project/ProjectLocator': {},
'@overleaf/metrics': {
Timer: class {
@ -52,6 +55,9 @@ describe('DocumentUpdaterHandler', function () {
},
},
})
this.ProjectGetter.getProjectWithoutLock
.withArgs(this.project_id)
.yields(null, this.project)
})
describe('flushProjectToMongo', function () {
@ -1116,8 +1122,10 @@ describe('DocumentUpdaterHandler', function () {
id: this.docId.toString(),
pathname: '/foo',
docLines: 'a\nb',
historyRangesSupport: false,
url: undefined,
hash: undefined,
ranges: undefined,
},
]
@ -1169,7 +1177,9 @@ describe('DocumentUpdaterHandler', function () {
pathname: '/bar',
url: 'filestore.example.com/file',
docLines: undefined,
historyRangesSupport: false,
hash: '12345',
ranges: undefined,
},
]
@ -1280,8 +1290,10 @@ describe('DocumentUpdaterHandler', function () {
id: this.docId.toString(),
pathname: '/foo.doc',
docLines: 'hello there',
historyRangesSupport: false,
url: undefined,
hash: undefined,
ranges: undefined,
},
]
@ -1337,6 +1349,128 @@ describe('DocumentUpdaterHandler', function () {
)
})
})
describe('when ranges are present', function () {
beforeEach(function () {
this.docId = new ObjectId()
this.ranges = {
changes: [
{
op: { p: 0, i: 'foo' },
metadata: { ts: '2024-01-01T00:00:00.000Z', user_id: 'user-1' },
},
{
op: { p: 7, d: ' baz' },
metadata: { ts: '2024-02-01T00:00:00.000Z', user_id: 'user-1' },
},
],
comments: [
{
op: { p: 4, c: 'bar', t: 'comment-1' },
metadata: { resolved: false },
},
],
}
this.changes = {
newDocs: [
{
path: '/foo',
docLines: 'foo\nbar',
doc: { _id: this.docId },
ranges: this.ranges,
},
],
newProject: { version: this.version },
}
})
it('should forward ranges', function (done) {
const updates = [
{
type: 'add-doc',
id: this.docId.toString(),
pathname: '/foo',
docLines: 'foo\nbar',
historyRangesSupport: false,
url: undefined,
hash: undefined,
ranges: this.ranges,
},
]
this.handler.updateProjectStructure(
this.project_id,
this.projectHistoryId,
this.user_id,
this.changes,
this.source,
() => {
this.request
.calledWith({
url: this.url,
method: 'POST',
json: {
updates,
userId: this.user_id,
version: this.version,
projectHistoryId: this.projectHistoryId,
source: this.source,
},
timeout: 30 * 1000,
})
.should.equal(true)
done()
}
)
})
it('should include flag when history ranges support is enabled', function (done) {
this.ProjectGetter.getProjectWithoutLock
.withArgs(this.project_id)
.yields(null, {
_id: this.project_id,
overleaf: { history: { rangesSupportEnabled: true } },
})
const updates = [
{
type: 'add-doc',
id: this.docId.toString(),
pathname: '/foo',
docLines: 'foo\nbar',
historyRangesSupport: true,
url: undefined,
hash: undefined,
ranges: this.ranges,
},
]
this.handler.updateProjectStructure(
this.project_id,
this.projectHistoryId,
this.user_id,
this.changes,
this.source,
() => {
this.request
.calledWith({
url: this.url,
method: 'POST',
json: {
updates,
userId: this.user_id,
version: this.version,
projectHistoryId: this.projectHistoryId,
source: this.source,
},
timeout: 30 * 1000,
})
.should.equal(true)
done()
}
)
})
})
})
})
})

View file

@ -22,6 +22,7 @@ describe('RestoreManager', function () {
'../Editor/EditorController': (this.EditorController = {
promises: {},
}),
'../Project/ProjectLocator': (this.ProjectLocator = { promises: {} }),
},
})
this.user_id = 'mock-user-id'
@ -196,4 +197,119 @@ describe('RestoreManager', function () {
})
})
})
describe('revertFile', function () {
beforeEach(function () {
this.RestoreManager.promises._writeFileVersionToDisk = sinon
.stub()
.resolves((this.fsPath = '/tmp/path/on/disk'))
this.RestoreManager.promises._findOrCreateFolder = sinon
.stub()
.resolves((this.folder_id = 'mock-folder-id'))
this.FileSystemImportManager.promises.addEntity = sinon
.stub()
.resolves((this.entity = 'mock-entity'))
this.RestoreManager.promises._getRangesFromHistory = sinon
.stub()
.rejects()
})
describe('with an existing file in the current project', function () {
beforeEach(function () {
this.pathname = 'foo.tex'
this.ProjectLocator.promises.findElementByPath = sinon.stub().resolves()
})
it('should reject', function () {
expect(
this.RestoreManager.promises.revertFile(
this.user_id,
this.project_id,
this.version,
this.pathname
)
)
.to.eventually.be.rejectedWith('File already exists')
.and.be.instanceOf(Errors.InvalidError)
})
})
describe('when reverting a binary file', function () {
beforeEach(function () {
this.pathname = 'foo.png'
this.FileSystemImportManager.promises.importFile = sinon
.stub()
.resolves({ type: 'binary' })
})
it('should reject', function () {
expect(
this.RestoreManager.promises.revertFile(
this.user_id,
this.project_id,
this.version,
this.pathname
)
)
.to.eventually.be.rejectedWith('File is not editable')
.and.be.instanceOf(Errors.InvalidError)
})
})
describe("when reverting a file that doesn't current exist", function () {
beforeEach(async function () {
this.pathname = 'foo.tex'
this.ProjectLocator.promises.findElementByPath = sinon.stub().rejects()
this.tracked_changes = [
{
op: { pos: 4, i: 'bar' },
metadata: { ts: '2024-01-01T00:00:00.000Z', user_id: 'user-1' },
},
{
op: { pos: 8, d: 'qux' },
metadata: { ts: '2024-01-01T00:00:00.000Z', user_id: 'user-2' },
},
]
this.comments = [{ op: { t: 'comment-1', p: 0, c: 'foo' } }]
this.FileSystemImportManager.promises.importFile = sinon
.stub()
.resolves({ type: 'doc', lines: ['foo', 'bar', 'baz'] })
this.RestoreManager.promises._getRangesFromHistory = sinon
.stub()
.resolves({ changes: this.tracked_changes, comment: this.comments })
this.EditorController.promises.addDocWithRanges = sinon
.stub()
.resolves(
(this.addedFile = { doc: 'mock-doc', folderId: 'mock-folder' })
)
this.data = await this.RestoreManager.promises.revertFile(
this.user_id,
this.project_id,
this.version,
this.pathname
)
})
it('should import the file', function () {
expect(
this.EditorController.promises.addDocWithRanges
).to.have.been.calledWith(
this.project_id,
this.folder_id,
'foo.tex',
['foo', 'bar', 'baz'],
{ changes: this.tracked_changes, comment: this.comments }
)
})
it('should return the created entity', function () {
expect(this.data).to.equal(this.addedFile)
})
it('should look up ranges', function () {
expect(
this.RestoreManager.promises._getRangesFromHistory
).to.have.been.calledWith(this.project_id, this.version, this.pathname)
})
})
})
})

View file

@ -554,6 +554,7 @@ describe('ProjectEntityUpdateHandler', function () {
doc: this.newDoc,
path: this.path,
docLines: this.docLines.join('\n'),
ranges: {},
},
]
this.DocumentUpdaterHandler.updateProjectStructure.should.have.been.calledWith(