Merge pull request #17958 from overleaf/mj-web-revert-file

[web] Initial version of file reverting

GitOrigin-RevId: ef500b2c3ddf21b0acb6650c2526edd10ab2cc18
This commit is contained in:
Mathias Jakobsen 2024-05-28 14:17:55 +01:00 committed by Copybot
parent edaba14ed1
commit f14fdc7b7c
25 changed files with 1605 additions and 222 deletions

View file

@ -38,7 +38,9 @@ const {
} = require('./lib/operation/scan_op') } = require('./lib/operation/scan_op')
const TrackedChange = require('./lib/file_data/tracked_change') const TrackedChange = require('./lib/file_data/tracked_change')
const TrackedChangeList = require('./lib/file_data/tracked_change_list') const TrackedChangeList = require('./lib/file_data/tracked_change_list')
const TrackingProps = require('./lib/file_data/tracking_props')
const Range = require('./lib/range') const Range = require('./lib/range')
const CommentList = require('./lib/file_data/comment_list')
exports.AddCommentOperation = AddCommentOperation exports.AddCommentOperation = AddCommentOperation
exports.Author = Author exports.Author = Author
@ -79,3 +81,5 @@ exports.RemoveOp = RemoveOp
exports.TrackedChangeList = TrackedChangeList exports.TrackedChangeList = TrackedChangeList
exports.TrackedChange = TrackedChange exports.TrackedChange = TrackedChange
exports.Range = Range exports.Range = Range
exports.CommentList = CommentList
exports.TrackingProps = TrackingProps

View file

@ -86,9 +86,19 @@ const ProjectHistoryRedisManager = {
projectUpdate, projectUpdate,
source source
) { ) {
let docLines = projectUpdate.docLines
let ranges
if (projectUpdate.historyRangesSupport && projectUpdate.ranges) {
docLines = addTrackedDeletesToContent(
docLines,
projectUpdate.ranges.changes ?? []
)
ranges = HistoryConversions.toHistoryRanges(projectUpdate.ranges)
}
projectUpdate = { projectUpdate = {
pathname: projectUpdate.pathname, pathname: projectUpdate.pathname,
docLines: projectUpdate.docLines, docLines,
url: projectUpdate.url, url: projectUpdate.url,
meta: { meta: {
user_id: userId, user_id: userId,
@ -97,6 +107,9 @@ const ProjectHistoryRedisManager = {
version: projectUpdate.version, version: projectUpdate.version,
projectHistoryId, projectHistoryId,
} }
if (ranges) {
projectUpdate.ranges = ranges
}
projectUpdate[entityType] = entityId projectUpdate[entityType] = entityId
if (source != null) { if (source != null) {
projectUpdate.meta.source = source projectUpdate.meta.source = source

View file

@ -41,6 +41,8 @@ export type CommentOp = {
p: number p: number
t: string t: string
u?: boolean u?: boolean
// Used by project-history when restoring CommentSnapshots
resolved?: boolean
} }
/** /**
@ -54,7 +56,7 @@ export type Ranges = {
export type Comment = { export type Comment = {
id: string id: string
op: CommentOp op: CommentOp
metadata: { metadata?: {
user_id: string user_id: string
ts: string ts: string
} }

View file

@ -125,19 +125,22 @@ describe('ProjectHistoryRedisManager', function () {
}) })
describe('queueAddEntity', function () { describe('queueAddEntity', function () {
beforeEach(async function () { beforeEach(function () {
this.doc_id = 1234 this.doc_id = 1234
this.rawUpdate = { this.rawUpdate = {
pathname: (this.pathname = '/old'), pathname: (this.pathname = '/old'),
docLines: (this.docLines = 'a\nb'), docLines: (this.docLines = 'a\nb'),
version: (this.version = 2), version: (this.version = 2),
url: (this.url = 'filestore.example.com'),
} }
this.ProjectHistoryRedisManager.promises.queueOps = sinon this.ProjectHistoryRedisManager.promises.queueOps = sinon
.stub() .stub()
.resolves() .resolves()
})
it('should queue an update', async function () {
this.rawUpdate.url = this.url = 'filestore.example.com'
await this.ProjectHistoryRedisManager.promises.queueAddEntity( await this.ProjectHistoryRedisManager.promises.queueAddEntity(
this.project_id, this.project_id,
this.projectHistoryId, this.projectHistoryId,
@ -147,9 +150,7 @@ describe('ProjectHistoryRedisManager', function () {
this.rawUpdate, this.rawUpdate,
this.source this.source
) )
})
it('should queue an update', function () {
const update = { const update = {
pathname: this.pathname, pathname: this.pathname,
docLines: this.docLines, docLines: this.docLines,
@ -169,6 +170,190 @@ describe('ProjectHistoryRedisManager', function () {
.should.equal(true) .should.equal(true)
}) })
it('should forward history compatible ranges if history ranges support is enabled', async function () {
this.rawUpdate.historyRangesSupport = true
this.docLines = 'the quick fox jumps over the lazy dog'
const ranges = {
changes: [
{
op: { p: 4, i: 'quick' },
metadata: { ts: '2024-01-01T00:00:00.000Z', user_id: 'user-1' },
},
{
op: { p: 9, d: ' brown' },
metadata: { ts: '2024-02-01T00:00:00.000Z', user_id: 'user-1' },
},
{
op: { p: 14, i: 'jumps' },
metadata: { ts: '2024-02-01T00:00:00.000Z', user_id: 'user-1' },
},
],
comments: [
{
op: { p: 29, c: 'lazy', t: 'comment-1' },
metadata: { resolved: false },
},
],
}
this.rawUpdate.ranges = ranges
this.rawUpdate.docLines = this.docLines
await this.ProjectHistoryRedisManager.promises.queueAddEntity(
this.project_id,
this.projectHistoryId,
'doc',
this.doc_id,
this.user_id,
this.rawUpdate,
this.source
)
const historyCompatibleRanges = {
comments: [
{
op: { p: 29, c: 'lazy', t: 'comment-1', hpos: 35 },
metadata: { resolved: false },
},
],
changes: [
{
op: { p: 4, i: 'quick' },
metadata: { ts: '2024-01-01T00:00:00.000Z', user_id: 'user-1' },
},
{
op: { p: 9, d: ' brown' },
metadata: { ts: '2024-02-01T00:00:00.000Z', user_id: 'user-1' },
},
{
op: { p: 14, i: 'jumps', hpos: 20 },
metadata: { ts: '2024-02-01T00:00:00.000Z', user_id: 'user-1' },
},
],
}
const update = {
pathname: this.pathname,
docLines: 'the quick brown fox jumps over the lazy dog',
meta: {
user_id: this.user_id,
ts: new Date(),
source: this.source,
},
version: this.version,
projectHistoryId: this.projectHistoryId,
ranges: historyCompatibleRanges,
doc: this.doc_id,
}
expect(
this.ProjectHistoryRedisManager.promises.queueOps
).to.have.been.calledWithExactly(this.project_id, JSON.stringify(update))
})
it('should not forward ranges if history ranges support is disabled', async function () {
this.rawUpdate.historyRangesSupport = false
const ranges = {
changes: [
{
op: { p: 0, i: 'foo' },
metadata: { ts: '2024-01-01T00:00:00.000Z', user_id: 'user-1' },
},
{
op: { p: 7, d: ' baz' },
metadata: { ts: '2024-02-01T00:00:00.000Z', user_id: 'user-1' },
},
],
comments: [
{
op: { p: 4, c: 'bar', t: 'comment-1' },
metadata: { resolved: false },
},
],
}
this.rawUpdate.ranges = ranges
await this.ProjectHistoryRedisManager.promises.queueAddEntity(
this.project_id,
this.projectHistoryId,
'doc',
this.doc_id,
this.user_id,
this.rawUpdate,
this.source
)
const update = {
pathname: this.pathname,
docLines: this.docLines,
meta: {
user_id: this.user_id,
ts: new Date(),
source: this.source,
},
version: this.version,
projectHistoryId: this.projectHistoryId,
doc: this.doc_id,
}
this.ProjectHistoryRedisManager.promises.queueOps
.calledWithExactly(this.project_id, JSON.stringify(update))
.should.equal(true)
})
it('should not forward ranges if history ranges support is undefined', async function () {
this.rawUpdate.historyRangesSupport = false
const ranges = {
changes: [
{
op: { p: 0, i: 'foo' },
metadata: { ts: '2024-01-01T00:00:00.000Z', user_id: 'user-1' },
},
{
op: { p: 7, d: ' baz' },
metadata: { ts: '2024-02-01T00:00:00.000Z', user_id: 'user-1' },
},
],
comments: [
{
op: { p: 4, c: 'bar', t: 'comment-1' },
metadata: { resolved: false },
},
],
}
this.rawUpdate.ranges = ranges
await this.ProjectHistoryRedisManager.promises.queueAddEntity(
this.project_id,
this.projectHistoryId,
'doc',
this.doc_id,
this.user_id,
this.rawUpdate,
this.source
)
const update = {
pathname: this.pathname,
docLines: this.docLines,
meta: {
user_id: this.user_id,
ts: new Date(),
source: this.source,
},
version: this.version,
projectHistoryId: this.projectHistoryId,
doc: this.doc_id,
}
this.ProjectHistoryRedisManager.promises.queueOps
.calledWithExactly(this.project_id, JSON.stringify(update))
.should.equal(true)
})
})
describe('queueResyncProjectStructure', function () { describe('queueResyncProjectStructure', function () {
it('should queue an update', function () {}) it('should queue an update', function () {})
}) })
@ -299,6 +484,11 @@ describe('ProjectHistoryRedisManager', function () {
this.settings.max_doc_length this.settings.max_doc_length
) )
}) })
it('should queue an update', function () {
this.ProjectHistoryRedisManager.promises.queueOps
.calledWithExactly(this.project_id, JSON.stringify(this.update))
.should.equal(true)
}) })
}) })
}) })

View file

@ -59,7 +59,7 @@ export function createBlobsForUpdates(
projectId, projectId,
historyId, historyId,
update, update,
(err, hash) => { (err, hashes) => {
if (err) { if (err) {
OError.tag(err, 'retry: error creating blob', { OError.tag(err, 'retry: error creating blob', {
projectId, projectId,
@ -68,18 +68,18 @@ export function createBlobsForUpdates(
}) })
_cb(err) _cb(err)
} else { } else {
_cb(null, hash) _cb(null, hashes)
} }
} }
) )
}) })
}, },
(error, blobHash) => { (error, blobHashes) => {
if (error) { if (error) {
if (!firstBlobCreationError) { if (!firstBlobCreationError) {
firstBlobCreationError = error firstBlobCreationError = error
} }
return cb(null, { update, blobHash }) return cb(null, { update, blobHashes })
} }
extendLock(error => { extendLock(error => {
@ -88,7 +88,7 @@ export function createBlobsForUpdates(
firstBlobCreationError = error firstBlobCreationError = error
} }
} }
cb(null, { update, blobHash }) cb(null, { update, blobHashes })
}) })
} }
) )

View file

@ -0,0 +1,122 @@
// @ts-check
import {
Range,
TrackedChange,
TrackedChangeList,
CommentList,
Comment,
TrackingProps,
} from 'overleaf-editor-core'
import logger from '@overleaf/logger'
import OError from '@overleaf/o-error'
/**
* @typedef {import('./types').AddDocUpdate} AddDocUpdate
* @typedef {import('overleaf-editor-core/lib/types').CommentRawData} CommentRawData
* @typedef {import('overleaf-editor-core/lib/types').TrackedChangeRawData} TrackedChangeRawData
* */
/**
*
* @param {AddDocUpdate} update
* @returns {{trackedChanges: TrackedChangeRawData[], comments: CommentRawData[]} | undefined}
*/
export function createRangeBlobDataFromUpdate(update) {
logger.debug({ update }, 'createBlobDataFromUpdate')
if (update.doc == null || update.docLines == null) {
throw new OError('Not an AddFileUpdate')
}
if (
!update.ranges ||
(update.ranges.changes == null && update.ranges.comments == null)
) {
return undefined
}
if (
(!update.ranges.changes || update.ranges.changes.length === 0) &&
(!update.ranges.comments || update.ranges.comments.length === 0)
) {
return undefined
}
const sortedRanges = [...(update.ranges.changes || [])].sort((a, b) => {
if (a.op.p !== b.op.p) {
return a.op.p - b.op.p
}
if ('i' in a.op && a.op.i != null && 'd' in b.op && b.op.d != null) {
// Move deletes before inserts
return 1
}
return -1
})
const tcList = new TrackedChangeList([])
for (const change of sortedRanges) {
if ('d' in change.op && change.op.d != null) {
const length = change.op.d.length
const range = new Range(change.op.hpos ?? change.op.p, length)
tcList.add(
new TrackedChange(
range,
new TrackingProps(
'delete',
change.metadata.user_id,
new Date(change.metadata.ts)
)
)
)
} else if ('i' in change.op && change.op.i != null) {
const length = change.op.i.length
const range = new Range(change.op.hpos ?? change.op.p, length)
tcList.add(
new TrackedChange(
range,
new TrackingProps(
'insert',
change.metadata.user_id,
new Date(change.metadata.ts)
)
)
)
}
}
const comments = [...(update.ranges.comments || [])].sort((a, b) => {
return a.op.p - b.op.p
})
/** @type {Map<string, {ranges: Range[], resolved: boolean}>} */
const commentMap = new Map()
for (const comment of comments) {
const range = new Range(
comment.op.hpos ?? comment.op.p,
comment.op.hlen ?? comment.op.c.length
)
const id = comment.op.t
if (!commentMap.has(id)) {
commentMap.set(id, {
ranges: [],
resolved: comment.op.resolved ?? false,
})
}
const entry = commentMap.get(id)
if (!entry) {
throw new Error('Comment entry not found')
}
if (entry.resolved !== (comment.op.resolved ?? false)) {
throw new Error('Mismatching resolved status for comment')
}
entry.ranges.push(range)
}
const commentList = new CommentList(
[...commentMap.entries()].map(
([id, commentObj]) =>
new Comment(id, commentObj.ranges, commentObj.resolved)
)
)
return { trackedChanges: tcList.toRaw(), comments: commentList.toRaw() }
}

View file

@ -16,6 +16,7 @@ import * as Versions from './Versions.js'
import * as Errors from './Errors.js' import * as Errors from './Errors.js'
import * as LocalFileWriter from './LocalFileWriter.js' import * as LocalFileWriter from './LocalFileWriter.js'
import * as HashManager from './HashManager.js' import * as HashManager from './HashManager.js'
import * as HistoryBlobTranslator from './HistoryBlobTranslator.js'
const HTTP_REQUEST_TIMEOUT = Settings.apis.history_v1.requestTimeout const HTTP_REQUEST_TIMEOUT = Settings.apis.history_v1.requestTimeout
@ -230,23 +231,61 @@ export function sendChanges(
) )
} }
export function createBlobForUpdate(projectId, historyId, update, callback) { function createBlobFromString(historyId, data, fileId, callback) {
callback = _.once(callback)
if (update.doc != null && update.docLines != null) {
const stringStream = new StringStream() const stringStream = new StringStream()
stringStream.push(update.docLines) stringStream.push(data)
stringStream.push(null) stringStream.push(null)
LocalFileWriter.bufferOnDisk( LocalFileWriter.bufferOnDisk(
stringStream, stringStream,
'', '',
`project-${projectId}-doc-${update.doc}`, fileId,
(fsPath, cb) => { (fsPath, cb) => {
_createBlob(historyId, fsPath, cb) _createBlob(historyId, fsPath, cb)
}, },
callback callback
) )
}
export function createBlobForUpdate(projectId, historyId, update, callback) {
callback = _.once(callback)
if (update.doc != null && update.docLines != null) {
let ranges
try {
ranges = HistoryBlobTranslator.createRangeBlobDataFromUpdate(update)
} catch (error) {
return callback(error)
}
createBlobFromString(
historyId,
update.docLines,
`project-${projectId}-doc-${update.doc}`,
(err, fileHash) => {
if (err) {
return callback(err)
}
if (ranges) {
createBlobFromString(
historyId,
JSON.stringify(ranges),
`project-${projectId}-doc-${update.doc}-ranges`,
(err, rangesHash) => {
if (err) {
return callback(err)
}
logger.debug(
{ fileHash, rangesHash },
'created blobs for both ranges and content'
)
return callback(null, { file: fileHash, ranges: rangesHash })
}
)
} else {
logger.debug({ fileHash }, 'created blob for content')
return callback(null, { file: fileHash })
}
}
)
} else if (update.file != null && update.url != null) { } else if (update.file != null && update.url != null) {
// Rewrite the filestore url to point to the location in the local // Rewrite the filestore url to point to the location in the local
// settings for this service (this avoids problems with cross- // settings for this service (this avoids problems with cross-
@ -274,7 +313,13 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
(fsPath, cb) => { (fsPath, cb) => {
_createBlob(historyId, fsPath, cb) _createBlob(historyId, fsPath, cb)
}, },
callback (err, fileHash) => {
if (err) {
return callback(err)
}
logger.debug({ fileHash }, 'created blob for file')
callback(null, { file: fileHash })
}
) )
}) })
.catch(err => { .catch(err => {
@ -291,7 +336,13 @@ export function createBlobForUpdate(projectId, historyId, update, callback) {
(fsPath, cb) => { (fsPath, cb) => {
_createBlob(historyId, fsPath, cb) _createBlob(historyId, fsPath, cb)
}, },
callback (err, fileHash) => {
if (err) {
return callback(err)
}
logger.debug({ fileHash }, 'created empty blob for file')
callback(null, { file: fileHash })
}
) )
emptyStream.push(null) // send an EOF signal emptyStream.push(null) // send an EOF signal
} else { } else {

View file

@ -185,6 +185,7 @@ async function getRangesSnapshot(projectId, version, pathname) {
p: position, p: position,
c: commentRangeContent, c: commentRangeContent,
t: comment.id, t: comment.id,
resolved: comment.resolved,
}, },
}) })
} }

View file

@ -55,14 +55,16 @@ function _convertToChange(projectId, updateWithBlob) {
] ]
projectVersion = update.version projectVersion = update.version
} else if (isAddUpdate(update)) { } else if (isAddUpdate(update)) {
operations = [ const op = {
{
pathname: _convertPathname(update.pathname), pathname: _convertPathname(update.pathname),
file: { file: {
hash: updateWithBlob.blobHash, hash: updateWithBlob.blobHashes.file,
}, },
}, }
] if (_isAddDocUpdate(update)) {
op.file.rangesHash = updateWithBlob.blobHashes.ranges
}
operations = [op]
projectVersion = update.version projectVersion = update.version
} else if (isTextUpdate(update)) { } else if (isTextUpdate(update)) {
const docLength = update.meta.history_doc_length ?? update.meta.doc_length const docLength = update.meta.history_doc_length ?? update.meta.doc_length

View file

@ -1,3 +1,5 @@
import { HistoryRanges } from '../../../document-updater/app/js/types'
export type Update = export type Update =
| TextUpdate | TextUpdate
| AddDocUpdate | AddDocUpdate
@ -51,7 +53,8 @@ type ProjectUpdateBase = {
export type AddDocUpdate = ProjectUpdateBase & { export type AddDocUpdate = ProjectUpdateBase & {
pathname: string pathname: string
docLines: string[] docLines: string
ranges?: HistoryRanges
} }
export type AddFileUpdate = ProjectUpdateBase & { export type AddFileUpdate = ProjectUpdateBase & {
@ -134,7 +137,10 @@ export type CommentOp = {
export type UpdateWithBlob = { export type UpdateWithBlob = {
update: Update update: Update
blobHash: string blobHashes: {
file: string
ranges?: string
}
} }
export type RawOrigin = { export type RawOrigin = {
@ -173,6 +179,7 @@ export type CommentSnapshot = {
p: number p: number
t: string t: string
c: string c: string
resolved: boolean
} }
} }

View file

@ -30,10 +30,11 @@ function slTextUpdate(historyId, doc, userId, v, ts, op) {
} }
} }
function slAddDocUpdate(historyId, doc, userId, ts, docLines) { function slAddDocUpdate(historyId, doc, userId, ts, docLines, ranges = {}) {
return { return {
projectHistoryId: historyId, projectHistoryId: historyId,
pathname: doc.pathname, pathname: doc.pathname,
ranges,
docLines, docLines,
doc: doc.id, doc: doc.id,
meta: { user_id: userId, ts: ts.getTime() }, meta: { user_id: userId, ts: ts.getTime() },
@ -46,9 +47,10 @@ function slAddDocUpdateWithVersion(
userId, userId,
ts, ts,
docLines, docLines,
projectVersion projectVersion,
ranges = {}
) { ) {
const result = slAddDocUpdate(historyId, doc, userId, ts, docLines) const result = slAddDocUpdate(historyId, doc, userId, ts, docLines, ranges)
result.version = projectVersion result.version = projectVersion
return result return result
} }
@ -59,6 +61,7 @@ function slAddFileUpdate(historyId, file, userId, ts, projectId) {
pathname: file.pathname, pathname: file.pathname,
url: `http://127.0.0.1:3009/project/${projectId}/file/${file.id}`, url: `http://127.0.0.1:3009/project/${projectId}/file/${file.id}`,
file: file.id, file: file.id,
ranges: undefined,
meta: { user_id: userId, ts: ts.getTime() }, meta: { user_id: userId, ts: ts.getTime() },
} }
} }
@ -132,8 +135,8 @@ function olRenameUpdate(doc, userId, ts, pathname, newPathname) {
} }
} }
function olAddDocUpdate(doc, userId, ts, fileHash) { function olAddDocUpdate(doc, userId, ts, fileHash, rangesHash = undefined) {
return { const update = {
v2Authors: [userId], v2Authors: [userId],
timestamp: ts.toJSON(), timestamp: ts.toJSON(),
authors: [], authors: [],
@ -147,10 +150,21 @@ function olAddDocUpdate(doc, userId, ts, fileHash) {
}, },
], ],
} }
if (rangesHash) {
update.operations[0].file.rangesHash = rangesHash
}
return update
} }
function olAddDocUpdateWithVersion(doc, userId, ts, fileHash, version) { function olAddDocUpdateWithVersion(
const result = olAddDocUpdate(doc, userId, ts, fileHash) doc,
userId,
ts,
fileHash,
version,
rangesHash = undefined
) {
const result = olAddDocUpdate(doc, userId, ts, fileHash, rangesHash)
result.projectVersion = version result.projectVersion = version
return result return result
} }
@ -281,6 +295,115 @@ describe('Sending Updates', function () {
) )
}) })
it('should send ranges to the history store', function (done) {
const fileHash = '49e886093b3eacbc12b99a1eb5aeaa44a6b9d90e'
const rangesHash = 'fa9a429ff518bc9e5b2507a96ff0646b566eca65'
const historyRanges = {
trackedChanges: [
{
range: { pos: 4, length: 3 },
tracking: {
type: 'delete',
userId: 'user-id-1',
ts: '2024-01-01T00:00:00.000Z',
},
},
],
comments: [
{
ranges: [{ pos: 0, length: 3 }],
id: 'comment-id-1',
},
],
}
// We need to set up the ranges mock first, as we will call it last..
const createRangesBlob = MockHistoryStore()
.put(`/api/projects/${historyId}/blobs/${rangesHash}`, historyRanges)
.reply(201)
const createBlob = MockHistoryStore()
.put(`/api/projects/${historyId}/blobs/${fileHash}`, 'foo barbaz')
.reply(201)
const addFile = MockHistoryStore()
.post(`/api/projects/${historyId}/legacy_changes`, body => {
expect(body).to.deep.equal([
olAddDocUpdate(
this.doc,
this.userId,
this.timestamp,
fileHash,
rangesHash
),
])
return true
})
.query({ end_version: 0 })
.reply(204)
async.series(
[
cb => {
ProjectHistoryClient.pushRawUpdate(
this.projectId,
slAddDocUpdate(
historyId,
this.doc,
this.userId,
this.timestamp,
'foo barbaz',
{
changes: [
{
op: { p: 4, d: 'bar' },
metadata: {
ts: 1704067200000,
user_id: 'user-id-1',
},
},
],
comments: [
{
op: {
p: 0,
c: 'foo',
t: 'comment-id-1',
},
metadata: { resolved: false },
},
],
}
),
cb
)
},
cb => {
ProjectHistoryClient.flushProject(this.projectId, cb)
},
],
error => {
if (error) {
return done(error)
}
assert(
createBlob.isDone(),
'/api/projects/:historyId/blobs/:hash should have been called to create content blob'
)
assert(
createRangesBlob.isDone(),
'/api/projects/:historyId/blobs/:hash should have been called to create ranges blob'
)
assert(
addFile.isDone(),
`/api/projects/${historyId}/changes should have been called`
)
done()
}
)
})
it('should strip non-BMP characters in add doc updates before sending to the history store', function (done) { it('should strip non-BMP characters in add doc updates before sending to the history store', function (done) {
const fileHash = '11509fe05a41f9cdc51ea081342b5a4fc7c8d0fc' const fileHash = '11509fe05a41f9cdc51ea081342b5a4fc7c8d0fc'

View file

@ -53,7 +53,9 @@ describe('BlobManager', function () {
beforeEach(function (done) { beforeEach(function (done) {
this.UpdateTranslator.isAddUpdate.returns(true) this.UpdateTranslator.isAddUpdate.returns(true)
this.blobHash = 'test hash' this.blobHash = 'test hash'
this.HistoryStoreManager.createBlobForUpdate.yields(null, this.blobHash) this.HistoryStoreManager.createBlobForUpdate.yields(null, {
file: this.blobHash,
})
this.BlobManager.createBlobsForUpdates( this.BlobManager.createBlobsForUpdates(
this.project_id, this.project_id,
this.historyId, this.historyId,
@ -79,7 +81,7 @@ describe('BlobManager', function () {
it('should call the callback with the updates', function () { it('should call the callback with the updates', function () {
const updatesWithBlobs = this.updates.map(update => ({ const updatesWithBlobs = this.updates.map(update => ({
update, update,
blobHash: this.blobHash, blobHashes: { file: this.blobHash },
})) }))
this.callback.calledWith(null, updatesWithBlobs).should.equal(true) this.callback.calledWith(null, updatesWithBlobs).should.equal(true)
}) })
@ -92,7 +94,9 @@ describe('BlobManager', function () {
this.HistoryStoreManager.createBlobForUpdate this.HistoryStoreManager.createBlobForUpdate
.onFirstCall() .onFirstCall()
.yields(new Error('random failure')) .yields(new Error('random failure'))
this.HistoryStoreManager.createBlobForUpdate.yields(null, this.blobHash) this.HistoryStoreManager.createBlobForUpdate.yields(null, {
file: this.blobHash,
})
this.BlobManager.createBlobsForUpdates( this.BlobManager.createBlobsForUpdates(
this.project_id, this.project_id,
this.historyId, this.historyId,
@ -118,7 +122,7 @@ describe('BlobManager', function () {
it('should call the callback with the updates', function () { it('should call the callback with the updates', function () {
const updatesWithBlobs = this.updates.map(update => ({ const updatesWithBlobs = this.updates.map(update => ({
update, update,
blobHash: this.blobHash, blobHashes: { file: this.blobHash },
})) }))
this.callback.calledWith(null, updatesWithBlobs).should.equal(true) this.callback.calledWith(null, updatesWithBlobs).should.equal(true)
}) })

View file

@ -0,0 +1,476 @@
import { expect } from 'chai'
import { createRangeBlobDataFromUpdate } from '../../../../app/js/HistoryBlobTranslator.js'
/** @typedef {import("../../../../app/js/types").AddDocUpdate} AddDocUpdate */
/**
*
* @param {string} pathname s
* @param {string} docLines
* @param {AddDocUpdate["ranges"]} ranges
* @returns {AddDocUpdate}
*/
const update = (pathname, docLines, ranges) => {
return {
pathname,
docLines,
ranges,
version: 'version-1',
projectHistoryId: 'project-id',
doc: 'doc',
meta: {
user_id: 'user-id',
ts: 0,
},
}
}
describe('HistoryBlobTranslator', function () {
describe('createBlobDataFromUpdate', function () {
beforeEach(function () {
this.text = 'the quick brown fox jumps over the lazy dog'
})
describe('for update with no ranges', function () {
beforeEach(function () {
this.result = createRangeBlobDataFromUpdate(
update('pathname', this.text, undefined)
)
})
it('should not return ranges', function () {
expect(this.result).to.be.undefined
})
})
describe('for update with empty ranges object', function () {
beforeEach(function () {
this.result = createRangeBlobDataFromUpdate(
update('pathname', this.text, {})
)
})
it('should not return ranges', function () {
expect(this.result).to.be.undefined
})
})
describe('for update with ranges object with empty lists', function () {
beforeEach(function () {
this.result = createRangeBlobDataFromUpdate(
update('pathname', this.text, { changes: [], comments: [] })
)
})
it('should not return ranges', function () {
expect(this.result).to.be.undefined
})
})
describe('for update with ranges object with only comments', function () {
it('should return unmoved ranges', function () {
const result = createRangeBlobDataFromUpdate(
update('pathname', this.text, {
comments: [
{
op: { c: 'quick', p: 4, t: 'comment-1', resolved: false },
},
],
})
)
expect(result).to.deep.equal({
comments: [
{
id: 'comment-1',
ranges: [{ pos: 4, length: 5 }],
},
],
trackedChanges: [],
})
})
it('should merge comments ranges into a single comment by id', function () {
const result = createRangeBlobDataFromUpdate(
update('pathname', this.text, {
comments: [
{
op: { c: 'quick', p: 4, t: 'comment-1', resolved: false },
},
{
op: { c: 'jumps', p: 20, t: 'comment-1', resolved: false },
},
],
})
)
expect(result).to.deep.equal({
comments: [
{
id: 'comment-1',
ranges: [
{ pos: 4, length: 5 },
{ pos: 20, length: 5 },
],
},
],
trackedChanges: [],
})
})
it('should not merge ranges into a single comment if id differs', function () {
const result = createRangeBlobDataFromUpdate(
update('pathname', this.text, {
comments: [
{
op: { c: 'quick', p: 4, t: 'comment-1', resolved: false },
},
{
op: { c: 'jumps', p: 20, t: 'comment-2', resolved: false },
},
],
})
)
expect(result).to.deep.equal({
comments: [
{
id: 'comment-1',
ranges: [{ pos: 4, length: 5 }],
},
{
id: 'comment-2',
ranges: [{ pos: 20, length: 5 }],
},
],
trackedChanges: [],
})
})
})
describe('for update with ranges object with only tracked insertions', function () {
it('should translate into history tracked insertions', function () {
const result = createRangeBlobDataFromUpdate(
update('pathname', this.text, {
changes: [
{
op: { p: 4, i: 'quick' },
metadata: {
ts: '2024-01-01T00:00:00.000Z',
user_id: 'user-1',
},
},
{
op: { p: 10, i: 'brown' },
metadata: {
ts: '2023-01-01T00:00:00.000Z',
user_id: 'user-2',
},
},
],
})
)
expect(result).to.deep.equal({
comments: [],
trackedChanges: [
{
range: { pos: 4, length: 5 },
tracking: {
type: 'insert',
userId: 'user-1',
ts: '2024-01-01T00:00:00.000Z',
},
},
{
range: { pos: 10, length: 5 },
tracking: {
type: 'insert',
userId: 'user-2',
ts: '2023-01-01T00:00:00.000Z',
},
},
],
})
})
})
describe('for update with ranges object with mixed tracked changes', function () {
describe('with tracked deletions before insertions', function () {
it('should insert tracked deletions before insertions', function () {
const text = 'the quickrapid brown fox jumps over the lazy dog'
const result = createRangeBlobDataFromUpdate(
update('pathname', text, {
changes: [
{
op: { p: 4, d: 'quick' },
metadata: {
ts: '2024-01-01T00:00:00.000Z',
user_id: 'user-1',
},
},
{
op: { p: 4, hpos: 9, i: 'rapid' },
metadata: {
ts: '2023-01-01T00:00:00.000Z',
user_id: 'user-2',
},
},
],
})
)
expect(result).to.deep.equal({
comments: [],
trackedChanges: [
{
range: { pos: 4, length: 5 },
tracking: {
type: 'delete',
userId: 'user-1',
ts: '2024-01-01T00:00:00.000Z',
},
},
{
range: { pos: 9, length: 5 },
tracking: {
type: 'insert',
userId: 'user-2',
ts: '2023-01-01T00:00:00.000Z',
},
},
],
})
})
})
describe('with tracked insertions before deletions', function () {
it('should insert tracked deletions before insertions', function () {
const text = 'the quickrapid brown fox jumps over the lazy dog'
const result = createRangeBlobDataFromUpdate(
update('pathname', text, {
changes: [
{
op: { p: 4, hpos: 9, i: 'rapid' },
metadata: {
ts: '2023-01-01T00:00:00.000Z',
user_id: 'user-2',
},
},
{
op: { p: 4, d: 'quick' },
metadata: {
ts: '2024-01-01T00:00:00.000Z',
user_id: 'user-1',
},
},
],
})
)
expect(result).to.deep.equal({
comments: [],
trackedChanges: [
{
range: { pos: 4, length: 5 },
tracking: {
type: 'delete',
userId: 'user-1',
ts: '2024-01-01T00:00:00.000Z',
},
},
{
range: { pos: 9, length: 5 },
tracking: {
type: 'insert',
userId: 'user-2',
ts: '2023-01-01T00:00:00.000Z',
},
},
],
})
})
})
it('should adjust positions', function () {
const text = 'the quick brown fox jumps over the lazy dog'
const result = createRangeBlobDataFromUpdate(
update('pathname', text, {
changes: [
{
op: { p: 4, i: 'quick' },
metadata: {
ts: '2024-01-01T00:00:00.000Z',
user_id: 'user-1',
},
},
{
op: { p: 10, d: 'brown' },
metadata: {
ts: '2023-01-01T00:00:00.000Z',
user_id: 'user-2',
},
},
{
op: { p: 30, hpos: 35, i: 'lazy' },
metadata: {
ts: '2022-01-01T00:00:00.000Z',
user_id: 'user-2',
},
},
],
})
)
expect(result).to.deep.equal({
comments: [],
trackedChanges: [
{
range: { pos: 4, length: 5 },
tracking: {
type: 'insert',
userId: 'user-1',
ts: '2024-01-01T00:00:00.000Z',
},
},
{
range: { pos: 10, length: 5 },
tracking: {
type: 'delete',
userId: 'user-2',
ts: '2023-01-01T00:00:00.000Z',
},
},
{
range: { pos: 35, length: 4 },
tracking: {
type: 'insert',
userId: 'user-2',
ts: '2022-01-01T00:00:00.000Z',
},
},
],
})
})
})
describe('for update with ranges object with mixed tracked changes and comments', function () {
it('should adjust positions', function () {
const text = 'the quick brown fox jumps over the lazy dog'
const result = createRangeBlobDataFromUpdate(
update('pathname', text, {
comments: [
{
op: { c: 'quick', p: 4, t: 'comment-1', resolved: false },
},
{
op: {
c: 'fox',
p: 11,
hpos: 16,
t: 'comment-2',
resolved: false,
},
},
],
changes: [
{
op: { p: 4, i: 'quick' },
metadata: {
ts: '2024-01-01T00:00:00.000Z',
user_id: 'user-1',
},
},
{
op: { p: 10, d: 'brown' },
metadata: {
ts: '2023-01-01T00:00:00.000Z',
user_id: 'user-2',
},
},
{
op: { p: 30, hpos: 35, i: 'lazy' },
metadata: {
ts: '2022-01-01T00:00:00.000Z',
user_id: 'user-2',
},
},
],
})
)
expect(result).to.deep.equal({
comments: [
{
ranges: [{ pos: 4, length: 5 }],
id: 'comment-1',
},
{
ranges: [{ pos: 16, length: 3 }],
id: 'comment-2',
},
],
trackedChanges: [
{
range: { pos: 4, length: 5 },
tracking: {
type: 'insert',
userId: 'user-1',
ts: '2024-01-01T00:00:00.000Z',
},
},
{
range: { pos: 10, length: 5 },
tracking: {
type: 'delete',
userId: 'user-2',
ts: '2023-01-01T00:00:00.000Z',
},
},
{
range: { pos: 35, length: 4 },
tracking: {
type: 'insert',
userId: 'user-2',
ts: '2022-01-01T00:00:00.000Z',
},
},
],
})
})
it('should adjust comment length', function () {
const text = 'the quick brown fox jumps over the lazy dog'
const result = createRangeBlobDataFromUpdate(
update('pathname', text, {
comments: [
{
op: { c: 'quick fox', p: 4, t: 'comment-1', resolved: false },
},
],
changes: [
{
op: { p: 10, d: 'brown ' },
metadata: {
ts: '2023-01-01T00:00:00.000Z',
user_id: 'user-2',
},
},
],
})
)
expect(result).to.deep.equal({
comments: [
{
ranges: [{ pos: 4, length: 9 }],
id: 'comment-1',
},
],
trackedChanges: [
{
range: { pos: 10, length: 6 },
tracking: {
type: 'delete',
userId: 'user-2',
ts: '2023-01-01T00:00:00.000Z',
},
},
],
})
})
})
})
})

View file

@ -390,7 +390,7 @@ describe('HistoryStoreManager', function () {
this.projectId, this.projectId,
this.historyId, this.historyId,
this.update, this.update,
(err, hash) => { (err, { file: hash }) => {
if (err) { if (err) {
return done(err) return done(err)
} }

View file

@ -780,6 +780,12 @@ Four five six\
expect(this.data.comments[2].op.p).to.eq(20) expect(this.data.comments[2].op.p).to.eq(20)
expect(this.data.comments[2].op.c).to.eq('ov') expect(this.data.comments[2].op.c).to.eq('ov')
}) })
it('should put resolved status in op', function () {
expect(this.data.comments[0].op.resolved).to.be.false
expect(this.data.comments[1].op.resolved).to.be.false
expect(this.data.comments[2].op.resolved).to.be.false
})
}) })
describe('with multiple tracked changes and comments', function () { describe('with multiple tracked changes and comments', function () {
@ -919,6 +925,7 @@ Four five six\
c: '', c: '',
p: 4, p: 4,
t: 'comment-1', t: 'comment-1',
resolved: false,
}, },
}, },
{ {
@ -926,6 +933,7 @@ Four five six\
c: 'brown', c: 'brown',
p: 4, p: 4,
t: 'comment-1', t: 'comment-1',
resolved: false,
}, },
}, },
{ {
@ -933,6 +941,7 @@ Four five six\
c: '', c: '',
p: 29, p: 29,
t: 'comment-1', t: 'comment-1',
resolved: false,
}, },
}, },
{ {
@ -940,6 +949,7 @@ Four five six\
c: 'the', c: 'the',
p: 0, p: 0,
t: 'comment-2', t: 'comment-2',
resolved: true,
}, },
}, },
{ {
@ -947,6 +957,7 @@ Four five six\
c: 'the', c: 'the',
p: 25, p: 25,
t: 'comment-2', t: 'comment-2',
resolved: true,
}, },
}, },
], ],

View file

@ -34,7 +34,7 @@ describe('UpdateTranslator', function () {
ts: this.timestamp, ts: this.timestamp,
}, },
}, },
blobHash: this.mockBlobHash, blobHashes: { file: this.mockBlobHash },
}, },
] ]
@ -72,7 +72,7 @@ describe('UpdateTranslator', function () {
ts: this.timestamp, ts: this.timestamp,
}, },
}, },
blobHash: this.mockBlobHash, blobHashes: { file: this.mockBlobHash },
}, },
] ]
@ -180,7 +180,7 @@ describe('UpdateTranslator', function () {
ts: this.timestamp, ts: this.timestamp,
}, },
}, },
blobHash: this.mockBlobHash, blobHashes: { file: this.mockBlobHash },
}, },
{ {
update: { update: {
@ -192,7 +192,7 @@ describe('UpdateTranslator', function () {
ts: this.timestamp, ts: this.timestamp,
}, },
}, },
blobHash: this.mockBlobHash, blobHashes: { file: this.mockBlobHash },
}, },
] ]
@ -291,7 +291,7 @@ describe('UpdateTranslator', function () {
}, },
url: 'filestore.example.com/test*test.png', url: 'filestore.example.com/test*test.png',
}, },
blobHash: this.mockBlobHash, blobHashes: { file: this.mockBlobHash },
}, },
] ]
@ -329,7 +329,7 @@ describe('UpdateTranslator', function () {
}, },
url: 'filestore.example.com/test.png', url: 'filestore.example.com/test.png',
}, },
blobHash: this.mockBlobHash, blobHashes: { file: this.mockBlobHash },
}, },
] ]
@ -367,7 +367,7 @@ describe('UpdateTranslator', function () {
}, },
url: 'filestore.example.com/folder/test.png', url: 'filestore.example.com/folder/test.png',
}, },
blobHash: this.mockBlobHash, blobHashes: { file: this.mockBlobHash },
}, },
] ]
@ -405,7 +405,7 @@ describe('UpdateTranslator', function () {
ts: this.timestamp, ts: this.timestamp,
}, },
}, },
blobHash: this.mockBlobHash, blobHashes: { file: this.mockBlobHash },
}, },
] ]
@ -442,7 +442,7 @@ describe('UpdateTranslator', function () {
ts: this.timestamp, ts: this.timestamp,
}, },
}, },
blobHash: this.mockBlobHash, blobHashes: { file: this.mockBlobHash },
}, },
] ]

View file

@ -7,6 +7,7 @@ const logger = require('@overleaf/logger')
const metrics = require('@overleaf/metrics') const metrics = require('@overleaf/metrics')
const { promisify } = require('util') const { promisify } = require('util')
const { promisifyMultiResult } = require('@overleaf/promise-utils') const { promisifyMultiResult } = require('@overleaf/promise-utils')
const ProjectGetter = require('../Project/ProjectGetter')
module.exports = { module.exports = {
flushProjectToMongo, flushProjectToMongo,
@ -296,16 +297,38 @@ function updateProjectStructure(
return callback() return callback()
} }
ProjectGetter.getProjectWithoutLock(
projectId,
{ overleaf: true },
(err, project) => {
if (err) {
return callback(err)
}
const historyRangesSupport = _.get(
project,
'overleaf.history.rangesSupportEnabled',
false
)
const { const {
deletes: docDeletes, deletes: docDeletes,
adds: docAdds, adds: docAdds,
renames: docRenames, renames: docRenames,
} = _getUpdates('doc', changes.oldDocs, changes.newDocs) } = _getUpdates(
'doc',
changes.oldDocs,
changes.newDocs,
historyRangesSupport
)
const { const {
deletes: fileDeletes, deletes: fileDeletes,
adds: fileAdds, adds: fileAdds,
renames: fileRenames, renames: fileRenames,
} = _getUpdates('file', changes.oldFiles, changes.newFiles) } = _getUpdates(
'file',
changes.oldFiles,
changes.newFiles,
historyRangesSupport
)
const updates = [].concat( const updates = [].concat(
docDeletes, docDeletes,
fileDeletes, fileDeletes,
@ -346,6 +369,8 @@ function updateProjectStructure(
callback callback
) )
} }
)
}
function _makeRequest(options, projectId, metricsKey, callback) { function _makeRequest(options, projectId, metricsKey, callback) {
const timer = new metrics.Timer(metricsKey) const timer = new metrics.Timer(metricsKey)
@ -380,7 +405,12 @@ function _makeRequest(options, projectId, metricsKey, callback) {
) )
} }
function _getUpdates(entityType, oldEntities, newEntities) { function _getUpdates(
entityType,
oldEntities,
newEntities,
historyRangesSupport
) {
if (!oldEntities) { if (!oldEntities) {
oldEntities = [] oldEntities = []
} }
@ -431,6 +461,8 @@ function _getUpdates(entityType, oldEntities, newEntities) {
id, id,
pathname: newEntity.path, pathname: newEntity.path,
docLines: newEntity.docLines, docLines: newEntity.docLines,
ranges: newEntity.ranges,
historyRangesSupport,
url: newEntity.url, url: newEntity.url,
hash: newEntity.file != null ? newEntity.file.hash : undefined, hash: newEntity.file != null ? newEntity.file.hash : undefined,
}) })

View file

@ -100,6 +100,27 @@ module.exports = HistoryController = {
) )
}, },
revertFile(req, res, next) {
const { project_id: projectId } = req.params
const { version, pathname } = req.body
const userId = SessionManager.getLoggedInUserId(req.session)
RestoreManager.revertFile(
userId,
projectId,
version,
pathname,
function (err, entity) {
if (err) {
return next(err)
}
res.json({
type: entity.type,
id: entity._id,
})
}
)
},
getLabels(req, res, next) { getLabels(req, res, next) {
const projectId = req.params.Project_id const projectId = req.params.Project_id
HistoryController._makeRequest( HistoryController._makeRequest(

View file

@ -6,6 +6,8 @@ const EditorController = require('../Editor/EditorController')
const Errors = require('../Errors/Errors') const Errors = require('../Errors/Errors')
const moment = require('moment') const moment = require('moment')
const { callbackifyAll } = require('@overleaf/promise-utils') const { callbackifyAll } = require('@overleaf/promise-utils')
const { fetchJson } = require('@overleaf/fetch-utils')
const ProjectLocator = require('../Project/ProjectLocator')
const RestoreManager = { const RestoreManager = {
async restoreFileFromV2(userId, projectId, version, pathname) { async restoreFileFromV2(userId, projectId, version, pathname) {
@ -39,6 +41,62 @@ const RestoreManager = {
) )
}, },
async revertFile(userId, projectId, version, pathname) {
const fsPath = await RestoreManager._writeFileVersionToDisk(
projectId,
version,
pathname
)
const basename = Path.basename(pathname)
let dirname = Path.dirname(pathname)
if (dirname === '.') {
// no directory
dirname = ''
}
const parentFolderId = await RestoreManager._findOrCreateFolder(
projectId,
dirname
)
let fileExists = true
try {
// TODO: Is there a better way of doing this?
await ProjectLocator.promises.findElementByPath({
projectId,
path: pathname,
})
} catch (error) {
fileExists = false
}
if (fileExists) {
throw new Errors.InvalidError('File already exists')
}
const importInfo = await FileSystemImportManager.promises.importFile(
fsPath,
pathname
)
if (importInfo.type !== 'doc') {
// TODO: Handle binary files
throw new Errors.InvalidError('File is not editable')
}
const ranges = await RestoreManager._getRangesFromHistory(
projectId,
version,
pathname
)
return await EditorController.promises.addDocWithRanges(
projectId,
parentFolderId,
basename,
importInfo.lines,
ranges,
'revert',
userId
)
},
async _findOrCreateFolder(projectId, dirname) { async _findOrCreateFolder(projectId, dirname) {
const { lastFolder } = await EditorController.promises.mkdirp( const { lastFolder } = await EditorController.promises.mkdirp(
projectId, projectId,
@ -74,6 +132,13 @@ const RestoreManager = {
}/project/${projectId}/version/${version}/${encodeURIComponent(pathname)}` }/project/${projectId}/version/${version}/${encodeURIComponent(pathname)}`
return await FileWriter.promises.writeUrlToDisk(projectId, url) return await FileWriter.promises.writeUrlToDisk(projectId, url)
}, },
async _getRangesFromHistory(projectId, version, pathname) {
const url = `${
Settings.apis.project_history.url
}/project/${projectId}/ranges/version/${version}/${encodeURIComponent(pathname)}`
return await fetchJson(url)
},
} }
module.exports = { ...callbackifyAll(RestoreManager), promises: RestoreManager } module.exports = { ...callbackifyAll(RestoreManager), promises: RestoreManager }

View file

@ -397,6 +397,7 @@ const ProjectEntityUpdateHandler = {
doc, doc,
path: docPath, path: docPath,
docLines: docLines.join('\n'), docLines: docLines.join('\n'),
ranges,
}, },
] ]
DocumentUpdaterHandler.updateProjectStructure( DocumentUpdaterHandler.updateProjectStructure(

View file

@ -10,9 +10,11 @@ const logger = require('@overleaf/logger')
module.exports = { module.exports = {
addEntity: callbackify(addEntity), addEntity: callbackify(addEntity),
importDir: callbackify(importDir), importDir: callbackify(importDir),
importFile: callbackify(importDir),
promises: { promises: {
addEntity, addEntity,
importDir, importDir,
importFile,
}, },
} }

View file

@ -769,6 +769,11 @@ function initialize(webRouter, privateApiRouter, publicApiRouter) {
AuthorizationMiddleware.ensureUserCanWriteProjectContent, AuthorizationMiddleware.ensureUserCanWriteProjectContent,
HistoryController.restoreFileFromV2 HistoryController.restoreFileFromV2
) )
webRouter.post(
'/project/:project_id/revert_file',
AuthorizationMiddleware.ensureUserCanWriteProjectContent,
HistoryController.revertFile
)
webRouter.get( webRouter.get(
'/project/:project_id/version/:version/zip', '/project/:project_id/version/:version/zip',
RateLimiterMiddleware.rateLimit(rateLimiters.downloadProjectRevision), RateLimiterMiddleware.rateLimit(rateLimiters.downloadProjectRevision),

View file

@ -44,6 +44,9 @@ describe('DocumentUpdaterHandler', function () {
'../../models/Project': { '../../models/Project': {
Project: (this.Project = {}), Project: (this.Project = {}),
}, },
'../Project/ProjectGetter': (this.ProjectGetter = {
getProjectWithoutLock: sinon.stub(),
}),
'../../Features/Project/ProjectLocator': {}, '../../Features/Project/ProjectLocator': {},
'@overleaf/metrics': { '@overleaf/metrics': {
Timer: class { Timer: class {
@ -52,6 +55,9 @@ describe('DocumentUpdaterHandler', function () {
}, },
}, },
}) })
this.ProjectGetter.getProjectWithoutLock
.withArgs(this.project_id)
.yields(null, this.project)
}) })
describe('flushProjectToMongo', function () { describe('flushProjectToMongo', function () {
@ -1116,8 +1122,10 @@ describe('DocumentUpdaterHandler', function () {
id: this.docId.toString(), id: this.docId.toString(),
pathname: '/foo', pathname: '/foo',
docLines: 'a\nb', docLines: 'a\nb',
historyRangesSupport: false,
url: undefined, url: undefined,
hash: undefined, hash: undefined,
ranges: undefined,
}, },
] ]
@ -1169,7 +1177,9 @@ describe('DocumentUpdaterHandler', function () {
pathname: '/bar', pathname: '/bar',
url: 'filestore.example.com/file', url: 'filestore.example.com/file',
docLines: undefined, docLines: undefined,
historyRangesSupport: false,
hash: '12345', hash: '12345',
ranges: undefined,
}, },
] ]
@ -1280,8 +1290,10 @@ describe('DocumentUpdaterHandler', function () {
id: this.docId.toString(), id: this.docId.toString(),
pathname: '/foo.doc', pathname: '/foo.doc',
docLines: 'hello there', docLines: 'hello there',
historyRangesSupport: false,
url: undefined, url: undefined,
hash: undefined, hash: undefined,
ranges: undefined,
}, },
] ]
@ -1337,6 +1349,128 @@ describe('DocumentUpdaterHandler', function () {
) )
}) })
}) })
describe('when ranges are present', function () {
beforeEach(function () {
this.docId = new ObjectId()
this.ranges = {
changes: [
{
op: { p: 0, i: 'foo' },
metadata: { ts: '2024-01-01T00:00:00.000Z', user_id: 'user-1' },
},
{
op: { p: 7, d: ' baz' },
metadata: { ts: '2024-02-01T00:00:00.000Z', user_id: 'user-1' },
},
],
comments: [
{
op: { p: 4, c: 'bar', t: 'comment-1' },
metadata: { resolved: false },
},
],
}
this.changes = {
newDocs: [
{
path: '/foo',
docLines: 'foo\nbar',
doc: { _id: this.docId },
ranges: this.ranges,
},
],
newProject: { version: this.version },
}
})
it('should forward ranges', function (done) {
const updates = [
{
type: 'add-doc',
id: this.docId.toString(),
pathname: '/foo',
docLines: 'foo\nbar',
historyRangesSupport: false,
url: undefined,
hash: undefined,
ranges: this.ranges,
},
]
this.handler.updateProjectStructure(
this.project_id,
this.projectHistoryId,
this.user_id,
this.changes,
this.source,
() => {
this.request
.calledWith({
url: this.url,
method: 'POST',
json: {
updates,
userId: this.user_id,
version: this.version,
projectHistoryId: this.projectHistoryId,
source: this.source,
},
timeout: 30 * 1000,
})
.should.equal(true)
done()
}
)
})
it('should include flag when history ranges support is enabled', function (done) {
this.ProjectGetter.getProjectWithoutLock
.withArgs(this.project_id)
.yields(null, {
_id: this.project_id,
overleaf: { history: { rangesSupportEnabled: true } },
})
const updates = [
{
type: 'add-doc',
id: this.docId.toString(),
pathname: '/foo',
docLines: 'foo\nbar',
historyRangesSupport: true,
url: undefined,
hash: undefined,
ranges: this.ranges,
},
]
this.handler.updateProjectStructure(
this.project_id,
this.projectHistoryId,
this.user_id,
this.changes,
this.source,
() => {
this.request
.calledWith({
url: this.url,
method: 'POST',
json: {
updates,
userId: this.user_id,
version: this.version,
projectHistoryId: this.projectHistoryId,
source: this.source,
},
timeout: 30 * 1000,
})
.should.equal(true)
done()
}
)
})
})
}) })
}) })
}) })

View file

@ -22,6 +22,7 @@ describe('RestoreManager', function () {
'../Editor/EditorController': (this.EditorController = { '../Editor/EditorController': (this.EditorController = {
promises: {}, promises: {},
}), }),
'../Project/ProjectLocator': (this.ProjectLocator = { promises: {} }),
}, },
}) })
this.user_id = 'mock-user-id' this.user_id = 'mock-user-id'
@ -196,4 +197,119 @@ describe('RestoreManager', function () {
}) })
}) })
}) })
describe('revertFile', function () {
beforeEach(function () {
this.RestoreManager.promises._writeFileVersionToDisk = sinon
.stub()
.resolves((this.fsPath = '/tmp/path/on/disk'))
this.RestoreManager.promises._findOrCreateFolder = sinon
.stub()
.resolves((this.folder_id = 'mock-folder-id'))
this.FileSystemImportManager.promises.addEntity = sinon
.stub()
.resolves((this.entity = 'mock-entity'))
this.RestoreManager.promises._getRangesFromHistory = sinon
.stub()
.rejects()
})
describe('with an existing file in the current project', function () {
beforeEach(function () {
this.pathname = 'foo.tex'
this.ProjectLocator.promises.findElementByPath = sinon.stub().resolves()
})
it('should reject', function () {
expect(
this.RestoreManager.promises.revertFile(
this.user_id,
this.project_id,
this.version,
this.pathname
)
)
.to.eventually.be.rejectedWith('File already exists')
.and.be.instanceOf(Errors.InvalidError)
})
})
describe('when reverting a binary file', function () {
beforeEach(function () {
this.pathname = 'foo.png'
this.FileSystemImportManager.promises.importFile = sinon
.stub()
.resolves({ type: 'binary' })
})
it('should reject', function () {
expect(
this.RestoreManager.promises.revertFile(
this.user_id,
this.project_id,
this.version,
this.pathname
)
)
.to.eventually.be.rejectedWith('File is not editable')
.and.be.instanceOf(Errors.InvalidError)
})
})
describe("when reverting a file that doesn't current exist", function () {
beforeEach(async function () {
this.pathname = 'foo.tex'
this.ProjectLocator.promises.findElementByPath = sinon.stub().rejects()
this.tracked_changes = [
{
op: { pos: 4, i: 'bar' },
metadata: { ts: '2024-01-01T00:00:00.000Z', user_id: 'user-1' },
},
{
op: { pos: 8, d: 'qux' },
metadata: { ts: '2024-01-01T00:00:00.000Z', user_id: 'user-2' },
},
]
this.comments = [{ op: { t: 'comment-1', p: 0, c: 'foo' } }]
this.FileSystemImportManager.promises.importFile = sinon
.stub()
.resolves({ type: 'doc', lines: ['foo', 'bar', 'baz'] })
this.RestoreManager.promises._getRangesFromHistory = sinon
.stub()
.resolves({ changes: this.tracked_changes, comment: this.comments })
this.EditorController.promises.addDocWithRanges = sinon
.stub()
.resolves(
(this.addedFile = { doc: 'mock-doc', folderId: 'mock-folder' })
)
this.data = await this.RestoreManager.promises.revertFile(
this.user_id,
this.project_id,
this.version,
this.pathname
)
})
it('should import the file', function () {
expect(
this.EditorController.promises.addDocWithRanges
).to.have.been.calledWith(
this.project_id,
this.folder_id,
'foo.tex',
['foo', 'bar', 'baz'],
{ changes: this.tracked_changes, comment: this.comments }
)
})
it('should return the created entity', function () {
expect(this.data).to.equal(this.addedFile)
})
it('should look up ranges', function () {
expect(
this.RestoreManager.promises._getRangesFromHistory
).to.have.been.calledWith(this.project_id, this.version, this.pathname)
})
})
})
}) })

View file

@ -554,6 +554,7 @@ describe('ProjectEntityUpdateHandler', function () {
doc: this.newDoc, doc: this.newDoc,
path: this.path, path: this.path,
docLines: this.docLines.join('\n'), docLines: this.docLines.join('\n'),
ranges: {},
}, },
] ]
this.DocumentUpdaterHandler.updateProjectStructure.should.have.been.calledWith( this.DocumentUpdaterHandler.updateProjectStructure.should.have.been.calledWith(