mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
Merge pull request #19740 from overleaf/jpa-linked-file-metadata
[misc] persist linkedFileData in full project history as file metadata GitOrigin-RevId: f3e8ba947ea34b6796e210a076a248c57188d148
This commit is contained in:
parent
e85045255c
commit
e26b6de51b
11 changed files with 691 additions and 209 deletions
|
@ -94,3 +94,9 @@ export type RawEditOperation =
|
|||
| RawAddCommentOperation
|
||||
| RawDeleteCommentOperation
|
||||
| RawSetCommentStateOperation
|
||||
|
||||
export type LinkedFileData = {
|
||||
importedAt: string
|
||||
provider: string
|
||||
[other: string]: any
|
||||
}
|
||||
|
|
|
@ -119,6 +119,7 @@ const ProjectHistoryRedisManager = {
|
|||
ts: new Date(),
|
||||
},
|
||||
version: projectUpdate.version,
|
||||
metadata: projectUpdate.metadata,
|
||||
projectHistoryId,
|
||||
}
|
||||
if (ranges) {
|
||||
|
|
|
@ -170,6 +170,53 @@ describe('ProjectHistoryRedisManager', function () {
|
|||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should queue an update with file metadata', async function () {
|
||||
const metadata = {
|
||||
importedAt: '2024-07-30T09:14:45.928Z',
|
||||
provider: 'references-provider',
|
||||
}
|
||||
const projectId = 'project-id'
|
||||
const fileId = 'file-id'
|
||||
const url = `http://filestore/project/${projectId}/file/${fileId}`
|
||||
await this.ProjectHistoryRedisManager.promises.queueAddEntity(
|
||||
projectId,
|
||||
this.projectHistoryId,
|
||||
'file',
|
||||
fileId,
|
||||
this.user_id,
|
||||
{
|
||||
pathname: 'foo.png',
|
||||
url,
|
||||
version: 42,
|
||||
metadata,
|
||||
},
|
||||
this.source
|
||||
)
|
||||
|
||||
const update = {
|
||||
pathname: 'foo.png',
|
||||
docLines: undefined,
|
||||
url,
|
||||
meta: {
|
||||
user_id: this.user_id,
|
||||
ts: new Date(),
|
||||
source: this.source,
|
||||
},
|
||||
version: 42,
|
||||
metadata,
|
||||
projectHistoryId: this.projectHistoryId,
|
||||
file: fileId,
|
||||
}
|
||||
|
||||
expect(
|
||||
this.ProjectHistoryRedisManager.promises.queueOps.args[0][1]
|
||||
).to.equal(JSON.stringify(update))
|
||||
this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly(
|
||||
projectId,
|
||||
JSON.stringify(update)
|
||||
)
|
||||
})
|
||||
|
||||
it('should forward history compatible ranges if history ranges support is enabled', async function () {
|
||||
this.rawUpdate.historyRangesSupport = true
|
||||
this.docLines = 'the quick fox jumps over the lazy dog'
|
||||
|
|
|
@ -33,6 +33,7 @@ import { isInsert, isDelete } from './Utils.js'
|
|||
* @typedef {import('./types').TrackingDirective} TrackingDirective
|
||||
* @typedef {import('./types').TrackingType} TrackingType
|
||||
* @typedef {import('./types').Update} Update
|
||||
* @typedef {import('./types').ProjectStructureUpdate} ProjectStructureUpdate
|
||||
*/
|
||||
const MAX_RESYNC_HISTORY_RECORDS = 100 // keep this many records of previous resyncs
|
||||
const EXPIRE_RESYNC_HISTORY_INTERVAL_MS = 90 * 24 * 3600 * 1000 // 90 days
|
||||
|
@ -377,7 +378,7 @@ class SyncUpdateExpander {
|
|||
constructor(projectId, snapshotFiles, origin) {
|
||||
this.projectId = projectId
|
||||
this.files = snapshotFiles
|
||||
this.expandedUpdates = []
|
||||
this.expandedUpdates = /** @type ProjectStructureUpdate[] */ []
|
||||
this.origin = origin
|
||||
}
|
||||
|
||||
|
@ -471,6 +472,7 @@ class SyncUpdateExpander {
|
|||
expectedBinaryFiles,
|
||||
persistedBinaryFiles
|
||||
)
|
||||
this.queueSetMetadataOpsForLinkedFiles(update)
|
||||
} else if ('resyncDocContent' in update) {
|
||||
logger.debug(
|
||||
{ projectId: this.projectId, update },
|
||||
|
@ -537,6 +539,7 @@ class SyncUpdateExpander {
|
|||
} else {
|
||||
update.file = entity.file
|
||||
update.url = entity.url
|
||||
update.metadata = entity.metadata
|
||||
}
|
||||
|
||||
this.expandedUpdates.push(update)
|
||||
|
@ -546,6 +549,47 @@ class SyncUpdateExpander {
|
|||
}
|
||||
}
|
||||
|
||||
queueSetMetadataOpsForLinkedFiles(update) {
|
||||
const allEntities = update.resyncProjectStructure.docs.concat(
|
||||
update.resyncProjectStructure.files
|
||||
)
|
||||
for (const file of allEntities) {
|
||||
const pathname = UpdateTranslator._convertPathname(file.path)
|
||||
const matchingAddFileOperation = this.expandedUpdates.some(
|
||||
// Look for an addFile operation that already syncs the metadata.
|
||||
u => u.pathname === pathname && u.metadata === file.metadata
|
||||
)
|
||||
if (matchingAddFileOperation) continue
|
||||
const metaData = this.files[pathname].getMetadata()
|
||||
|
||||
let shouldUpdate = false
|
||||
if (file.metadata) {
|
||||
// check for in place update of linked-file
|
||||
shouldUpdate = Object.entries(file.metadata).some(
|
||||
([k, v]) => metaData[k] !== v
|
||||
)
|
||||
} else if (metaData.provider) {
|
||||
// overwritten by non-linked-file with same hash
|
||||
// or overwritten by doc
|
||||
shouldUpdate = true
|
||||
}
|
||||
if (!shouldUpdate) continue
|
||||
|
||||
this.expandedUpdates.push({
|
||||
pathname,
|
||||
meta: {
|
||||
resync: true,
|
||||
origin: this.origin,
|
||||
ts: update.meta.ts,
|
||||
},
|
||||
metadata: file.metadata || {},
|
||||
})
|
||||
Metrics.inc('project_history_resync_operation', 1, {
|
||||
status: 'update metadata',
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
queueUpdateForOutOfSyncBinaryFiles(update, expectedFiles, persistedFiles) {
|
||||
// create a map to lookup persisted files by their path
|
||||
const persistedFileMap = new Map(persistedFiles.map(x => [x.path, x]))
|
||||
|
@ -583,6 +627,7 @@ class SyncUpdateExpander {
|
|||
},
|
||||
file: entity.file,
|
||||
url: entity.url,
|
||||
metadata: entity.metadata,
|
||||
}
|
||||
this.expandedUpdates.push(addUpdate)
|
||||
Metrics.inc('project_history_resync_operation', 1, {
|
||||
|
|
|
@ -17,6 +17,7 @@ import { isInsert, isRetain, isDelete, isComment } from './Utils.js'
|
|||
* @typedef {import('./types').TrackingDirective} TrackingDirective
|
||||
* @typedef {import('./types').TrackingProps} TrackingProps
|
||||
* @typedef {import('./types').SetCommentStateUpdate} SetCommentStateUpdate
|
||||
* @typedef {import('./types').SetFileMetadataOperation} SetFileMetadataOperation
|
||||
* @typedef {import('./types').Update} Update
|
||||
* @typedef {import('./types').UpdateWithBlob} UpdateWithBlob
|
||||
*/
|
||||
|
@ -64,6 +65,9 @@ function _convertToChange(projectId, updateWithBlob) {
|
|||
if (_isAddDocUpdate(update)) {
|
||||
op.file.rangesHash = updateWithBlob.blobHashes.ranges
|
||||
}
|
||||
if (_isAddFileUpdate(update)) {
|
||||
op.file.metadata = update.metadata
|
||||
}
|
||||
operations = [op]
|
||||
projectVersion = update.version
|
||||
} else if (isTextUpdate(update)) {
|
||||
|
@ -89,6 +93,13 @@ function _convertToChange(projectId, updateWithBlob) {
|
|||
resolved: update.resolved,
|
||||
},
|
||||
]
|
||||
} else if (isSetFileMetadataOperation(update)) {
|
||||
operations = [
|
||||
{
|
||||
pathname: _convertPathname(update.pathname),
|
||||
metadata: update.metadata,
|
||||
},
|
||||
]
|
||||
} else if (isDeleteCommentUpdate(update)) {
|
||||
operations = [
|
||||
{
|
||||
|
@ -215,6 +226,14 @@ export function isDeleteCommentUpdate(update) {
|
|||
return 'deleteComment' in update
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Update} update
|
||||
* @returns {update is SetFileMetadataOperation}
|
||||
*/
|
||||
export function isSetFileMetadataOperation(update) {
|
||||
return 'metadata' in update
|
||||
}
|
||||
|
||||
export function _convertPathname(pathname) {
|
||||
// Strip leading /
|
||||
pathname = pathname.replace(/^\//, '')
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import { HistoryRanges } from '../../../document-updater/app/js/types'
|
||||
import { LinkedFileData } from 'overleaf-editor-core/lib/types'
|
||||
|
||||
export type Update =
|
||||
| TextUpdate
|
||||
|
@ -7,9 +8,16 @@ export type Update =
|
|||
| RenameUpdate
|
||||
| DeleteCommentUpdate
|
||||
| SetCommentStateUpdate
|
||||
| SetFileMetadataOperation
|
||||
| ResyncProjectStructureUpdate
|
||||
| ResyncDocContentUpdate
|
||||
|
||||
export type ProjectStructureUpdate =
|
||||
| AddDocUpdate
|
||||
| AddFileUpdate
|
||||
| RenameUpdate
|
||||
| SetFileMetadataOperation
|
||||
|
||||
export type UpdateMeta = {
|
||||
user_id: string
|
||||
ts: number
|
||||
|
@ -38,6 +46,12 @@ export type SetCommentStateUpdate = {
|
|||
meta: UpdateMeta
|
||||
}
|
||||
|
||||
export type SetFileMetadataOperation = {
|
||||
pathname: string
|
||||
meta: UpdateMeta
|
||||
metadata: LinkedFileData | object
|
||||
}
|
||||
|
||||
export type DeleteCommentUpdate = {
|
||||
pathname: string
|
||||
deleteComment: string
|
||||
|
@ -61,6 +75,7 @@ export type AddFileUpdate = ProjectUpdateBase & {
|
|||
pathname: string
|
||||
file: string
|
||||
url: string
|
||||
metadata?: LinkedFileData
|
||||
}
|
||||
|
||||
export type RenameUpdate = ProjectUpdateBase & {
|
||||
|
@ -199,6 +214,8 @@ export type File = {
|
|||
file: string
|
||||
url: string
|
||||
path: string
|
||||
_hash: string
|
||||
metadata?: LinkedFileData
|
||||
}
|
||||
|
||||
export type Entity = Doc | File
|
||||
|
|
|
@ -498,10 +498,12 @@ describe('SyncManager', function () {
|
|||
getContent: sinon.stub().returns(null),
|
||||
getHash: sinon.stub().returns(null),
|
||||
load: sinon.stub().resolves(this.loadedSnapshotDoc),
|
||||
getMetadata: sinon.stub().returns({}),
|
||||
},
|
||||
'1.png': {
|
||||
isEditable: sinon.stub().returns(false),
|
||||
data: { hash: this.persistedFile._hash },
|
||||
getMetadata: sinon.stub().returns({}),
|
||||
},
|
||||
}
|
||||
this.UpdateTranslator._convertPathname
|
||||
|
@ -600,7 +602,7 @@ describe('SyncManager', function () {
|
|||
expect(this.extendLock).to.have.been.called
|
||||
})
|
||||
|
||||
it('queues file additions for missing files', async function () {
|
||||
it('queues file additions for missing regular files', async function () {
|
||||
const newFile = {
|
||||
path: '2.png',
|
||||
file: {},
|
||||
|
@ -625,6 +627,50 @@ describe('SyncManager', function () {
|
|||
pathname: newFile.path,
|
||||
file: newFile.file,
|
||||
url: newFile.url,
|
||||
metadata: undefined,
|
||||
meta: {
|
||||
resync: true,
|
||||
ts: TIMESTAMP,
|
||||
origin: { kind: 'history-resync' },
|
||||
},
|
||||
},
|
||||
])
|
||||
expect(this.extendLock).to.have.been.called
|
||||
})
|
||||
|
||||
it('queues file additions for missing linked files', async function () {
|
||||
const newFile = {
|
||||
path: '2.png',
|
||||
file: {},
|
||||
url: 'filestore/2.png',
|
||||
metadata: {
|
||||
importedAt: '2024-07-30T09:14:45.928Z',
|
||||
provider: 'references-provider',
|
||||
},
|
||||
}
|
||||
const updates = [
|
||||
resyncProjectStructureUpdate(
|
||||
[this.persistedDoc],
|
||||
[this.persistedFile, newFile]
|
||||
),
|
||||
]
|
||||
const expandedUpdates =
|
||||
await this.SyncManager.promises.expandSyncUpdates(
|
||||
this.projectId,
|
||||
this.historyId,
|
||||
updates,
|
||||
this.extendLock
|
||||
)
|
||||
|
||||
expect(expandedUpdates).to.deep.equal([
|
||||
{
|
||||
pathname: newFile.path,
|
||||
file: newFile.file,
|
||||
url: newFile.url,
|
||||
metadata: {
|
||||
importedAt: '2024-07-30T09:14:45.928Z',
|
||||
provider: 'references-provider',
|
||||
},
|
||||
meta: {
|
||||
resync: true,
|
||||
ts: TIMESTAMP,
|
||||
|
@ -704,6 +750,185 @@ describe('SyncManager', function () {
|
|||
pathname: fileWichWasADoc.path,
|
||||
file: fileWichWasADoc.file,
|
||||
url: fileWichWasADoc.url,
|
||||
metadata: undefined,
|
||||
meta: {
|
||||
resync: true,
|
||||
ts: TIMESTAMP,
|
||||
origin: { kind: 'history-resync' },
|
||||
},
|
||||
},
|
||||
])
|
||||
expect(this.extendLock).to.have.been.called
|
||||
})
|
||||
|
||||
it('removes and re-adds linked-files if their binary state differs', async function () {
|
||||
const fileWhichWasADoc = {
|
||||
path: this.persistedDoc.path,
|
||||
url: 'filestore/references.txt',
|
||||
_hash: 'other-hash',
|
||||
metadata: {
|
||||
importedAt: '2024-07-30T09:14:45.928Z',
|
||||
provider: 'references-provider',
|
||||
},
|
||||
}
|
||||
|
||||
const updates = [
|
||||
resyncProjectStructureUpdate(
|
||||
[],
|
||||
[fileWhichWasADoc, this.persistedFile]
|
||||
),
|
||||
]
|
||||
const expandedUpdates =
|
||||
await this.SyncManager.promises.expandSyncUpdates(
|
||||
this.projectId,
|
||||
this.historyId,
|
||||
updates,
|
||||
this.extendLock
|
||||
)
|
||||
|
||||
expect(expandedUpdates).to.deep.equal([
|
||||
{
|
||||
pathname: fileWhichWasADoc.path,
|
||||
new_pathname: '',
|
||||
meta: {
|
||||
resync: true,
|
||||
ts: TIMESTAMP,
|
||||
origin: { kind: 'history-resync' },
|
||||
},
|
||||
},
|
||||
{
|
||||
pathname: fileWhichWasADoc.path,
|
||||
file: fileWhichWasADoc.file,
|
||||
url: fileWhichWasADoc.url,
|
||||
metadata: {
|
||||
importedAt: '2024-07-30T09:14:45.928Z',
|
||||
provider: 'references-provider',
|
||||
},
|
||||
meta: {
|
||||
resync: true,
|
||||
ts: TIMESTAMP,
|
||||
origin: { kind: 'history-resync' },
|
||||
},
|
||||
},
|
||||
])
|
||||
expect(this.extendLock).to.have.been.called
|
||||
})
|
||||
|
||||
it('add linked file data with same hash', async function () {
|
||||
const nowLinkedFile = {
|
||||
path: this.persistedFile.path,
|
||||
url: 'filestore/1.png',
|
||||
_hash: this.persistedFile._hash,
|
||||
metadata: {
|
||||
importedAt: '2024-07-30T09:14:45.928Z',
|
||||
provider: 'image-provider',
|
||||
},
|
||||
}
|
||||
|
||||
const updates = [
|
||||
resyncProjectStructureUpdate([this.persistedDoc], [nowLinkedFile]),
|
||||
]
|
||||
const expandedUpdates =
|
||||
await this.SyncManager.promises.expandSyncUpdates(
|
||||
this.projectId,
|
||||
this.historyId,
|
||||
updates,
|
||||
this.extendLock
|
||||
)
|
||||
|
||||
expect(expandedUpdates).to.deep.equal([
|
||||
{
|
||||
pathname: nowLinkedFile.path,
|
||||
metadata: {
|
||||
importedAt: '2024-07-30T09:14:45.928Z',
|
||||
provider: 'image-provider',
|
||||
},
|
||||
meta: {
|
||||
resync: true,
|
||||
ts: TIMESTAMP,
|
||||
origin: { kind: 'history-resync' },
|
||||
},
|
||||
},
|
||||
])
|
||||
expect(this.extendLock).to.have.been.called
|
||||
})
|
||||
|
||||
it('updates linked file data when hash remains the same', async function () {
|
||||
this.fileMap[this.persistedFile.path].getMetadata.returns({
|
||||
importedAt: '2024-07-30T09:14:45.928Z',
|
||||
provider: 'image-provider',
|
||||
})
|
||||
const updatedLinkedFile = {
|
||||
path: this.persistedFile.path,
|
||||
url: 'filestore/1.png',
|
||||
_hash: this.persistedFile._hash,
|
||||
metadata: {
|
||||
importedAt: '2024-07-31T00:00:00.000Z',
|
||||
provider: 'image-provider',
|
||||
},
|
||||
}
|
||||
|
||||
const updates = [
|
||||
resyncProjectStructureUpdate(
|
||||
[this.persistedDoc],
|
||||
[updatedLinkedFile]
|
||||
),
|
||||
]
|
||||
const expandedUpdates =
|
||||
await this.SyncManager.promises.expandSyncUpdates(
|
||||
this.projectId,
|
||||
this.historyId,
|
||||
updates,
|
||||
this.extendLock
|
||||
)
|
||||
|
||||
expect(expandedUpdates).to.deep.equal([
|
||||
{
|
||||
pathname: updatedLinkedFile.path,
|
||||
metadata: {
|
||||
importedAt: '2024-07-31T00:00:00.000Z',
|
||||
provider: 'image-provider',
|
||||
},
|
||||
meta: {
|
||||
resync: true,
|
||||
ts: TIMESTAMP,
|
||||
origin: { kind: 'history-resync' },
|
||||
},
|
||||
},
|
||||
])
|
||||
expect(this.extendLock).to.have.been.called
|
||||
})
|
||||
|
||||
it('remove linked file data', async function () {
|
||||
this.fileMap[this.persistedFile.path].getMetadata.returns({
|
||||
importedAt: '2024-07-30T09:14:45.928Z',
|
||||
provider: 'image-provider',
|
||||
})
|
||||
|
||||
const noLongerLinkedFile = {
|
||||
path: this.persistedFile.path,
|
||||
url: 'filestore/1.png',
|
||||
_hash: this.persistedFile._hash,
|
||||
}
|
||||
|
||||
const updates = [
|
||||
resyncProjectStructureUpdate(
|
||||
[this.persistedDoc],
|
||||
[noLongerLinkedFile]
|
||||
),
|
||||
]
|
||||
const expandedUpdates =
|
||||
await this.SyncManager.promises.expandSyncUpdates(
|
||||
this.projectId,
|
||||
this.historyId,
|
||||
updates,
|
||||
this.extendLock
|
||||
)
|
||||
|
||||
expect(expandedUpdates).to.deep.equal([
|
||||
{
|
||||
pathname: noLongerLinkedFile.path,
|
||||
metadata: {},
|
||||
meta: {
|
||||
resync: true,
|
||||
ts: TIMESTAMP,
|
||||
|
@ -801,6 +1026,7 @@ describe('SyncManager', function () {
|
|||
pathname: persistedFileWithNewContent.path,
|
||||
file: persistedFileWithNewContent.file,
|
||||
url: persistedFileWithNewContent.url,
|
||||
metadata: undefined,
|
||||
meta: {
|
||||
resync: true,
|
||||
ts: TIMESTAMP,
|
||||
|
|
|
@ -8,6 +8,7 @@ const metrics = require('@overleaf/metrics')
|
|||
const { promisify } = require('util')
|
||||
const { promisifyMultiResult } = require('@overleaf/promise-utils')
|
||||
const ProjectGetter = require('../Project/ProjectGetter')
|
||||
const FileStoreHandler = require('../FileStore/FileStoreHandler')
|
||||
|
||||
function flushProjectToMongo(projectId, callback) {
|
||||
_makeRequest(
|
||||
|
@ -230,6 +231,18 @@ function resyncProjectHistory(
|
|||
opts,
|
||||
callback
|
||||
) {
|
||||
docs = docs.map(doc => ({
|
||||
doc: doc.doc._id,
|
||||
path: doc.path,
|
||||
}))
|
||||
files = files.map(file => ({
|
||||
file: file.file._id,
|
||||
path: file.path,
|
||||
url: FileStoreHandler._buildUrl(projectId, file.file._id),
|
||||
_hash: file.file.hash,
|
||||
metadata: buildFileMetadataForHistory(file.file),
|
||||
}))
|
||||
|
||||
const body = { docs, files, projectHistoryId }
|
||||
if (opts.historyRangesMigration) {
|
||||
body.historyRangesMigration = opts.historyRangesMigration
|
||||
|
@ -470,6 +483,7 @@ function _getUpdates(
|
|||
historyRangesSupport,
|
||||
url: newEntity.url,
|
||||
hash: newEntity.file != null ? newEntity.file.hash : undefined,
|
||||
metadata: buildFileMetadataForHistory(newEntity.file),
|
||||
})
|
||||
} else if (newEntity.path !== oldEntity.path) {
|
||||
// entity renamed
|
||||
|
@ -485,6 +499,25 @@ function _getUpdates(
|
|||
return { deletes, adds, renames }
|
||||
}
|
||||
|
||||
function buildFileMetadataForHistory(file) {
|
||||
if (!file?.linkedFileData) return undefined
|
||||
|
||||
const metadata = {
|
||||
// Files do not have a created at timestamp in the history.
|
||||
// For cloned projects, the importedAt timestamp needs to remain untouched.
|
||||
// Record the timestamp in the metadata blob to keep everything self-contained.
|
||||
importedAt: file.created,
|
||||
...file.linkedFileData,
|
||||
}
|
||||
if (metadata.provider === 'project_output_file') {
|
||||
// The build-id and clsi-server-id are only used for downloading file.
|
||||
// Omit them from history as they are not useful in the future.
|
||||
delete metadata.build_id
|
||||
delete metadata.clsiServerId
|
||||
}
|
||||
return metadata
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
flushProjectToMongo,
|
||||
flushMultipleProjectsToMongo,
|
||||
|
|
|
@ -1435,17 +1435,6 @@ const ProjectEntityUpdateHandler = {
|
|||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
docs = _.map(docs, doc => ({
|
||||
doc: doc.doc._id,
|
||||
path: doc.path,
|
||||
}))
|
||||
|
||||
files = _.map(files, file => ({
|
||||
file: file.file._id,
|
||||
path: file.path,
|
||||
url: FileStoreHandler._buildUrl(projectId, file.file._id),
|
||||
_hash: file.file.hash,
|
||||
}))
|
||||
|
||||
DocumentUpdaterHandler.resyncProjectHistory(
|
||||
projectId,
|
||||
|
|
|
@ -53,6 +53,11 @@ describe('DocumentUpdaterHandler', function () {
|
|||
done() {}
|
||||
},
|
||||
},
|
||||
'../FileStore/FileStoreHandler': {
|
||||
_buildUrl: sinon.stub().callsFake((projectId, fileId) => {
|
||||
return `http://filestore/project/${projectId}/file/${fileId}`
|
||||
}),
|
||||
},
|
||||
},
|
||||
})
|
||||
this.ProjectGetter.getProjectWithoutLock
|
||||
|
@ -1126,6 +1131,7 @@ describe('DocumentUpdaterHandler', function () {
|
|||
url: undefined,
|
||||
hash: undefined,
|
||||
ranges: undefined,
|
||||
metadata: undefined,
|
||||
},
|
||||
]
|
||||
|
||||
|
@ -1136,8 +1142,7 @@ describe('DocumentUpdaterHandler', function () {
|
|||
this.changes,
|
||||
this.source,
|
||||
() => {
|
||||
this.request
|
||||
.calledWith({
|
||||
this.request.should.have.been.calledWith({
|
||||
url: this.url,
|
||||
method: 'POST',
|
||||
json: {
|
||||
|
@ -1149,7 +1154,6 @@ describe('DocumentUpdaterHandler', function () {
|
|||
},
|
||||
timeout: 30 * 1000,
|
||||
})
|
||||
.should.equal(true)
|
||||
done()
|
||||
}
|
||||
)
|
||||
|
@ -1180,6 +1184,7 @@ describe('DocumentUpdaterHandler', function () {
|
|||
historyRangesSupport: false,
|
||||
hash: '12345',
|
||||
ranges: undefined,
|
||||
metadata: undefined,
|
||||
},
|
||||
]
|
||||
|
||||
|
@ -1190,8 +1195,7 @@ describe('DocumentUpdaterHandler', function () {
|
|||
this.changes,
|
||||
this.source,
|
||||
() => {
|
||||
this.request
|
||||
.calledWith({
|
||||
this.request.should.have.been.calledWith({
|
||||
url: this.url,
|
||||
method: 'POST',
|
||||
json: {
|
||||
|
@ -1203,7 +1207,6 @@ describe('DocumentUpdaterHandler', function () {
|
|||
},
|
||||
timeout: 30 * 1000,
|
||||
})
|
||||
.should.equal(true)
|
||||
done()
|
||||
}
|
||||
)
|
||||
|
@ -1236,8 +1239,7 @@ describe('DocumentUpdaterHandler', function () {
|
|||
this.changes,
|
||||
this.source,
|
||||
() => {
|
||||
this.request
|
||||
.calledWith({
|
||||
this.request.should.have.been.calledWith({
|
||||
url: this.url,
|
||||
method: 'POST',
|
||||
json: {
|
||||
|
@ -1249,7 +1251,6 @@ describe('DocumentUpdaterHandler', function () {
|
|||
},
|
||||
timeout: 30 * 1000,
|
||||
})
|
||||
.should.equal(true)
|
||||
done()
|
||||
}
|
||||
)
|
||||
|
@ -1294,6 +1295,7 @@ describe('DocumentUpdaterHandler', function () {
|
|||
url: undefined,
|
||||
hash: undefined,
|
||||
ranges: undefined,
|
||||
metadata: undefined,
|
||||
},
|
||||
]
|
||||
|
||||
|
@ -1395,6 +1397,7 @@ describe('DocumentUpdaterHandler', function () {
|
|||
url: undefined,
|
||||
hash: undefined,
|
||||
ranges: this.ranges,
|
||||
metadata: undefined,
|
||||
},
|
||||
]
|
||||
|
||||
|
@ -1405,8 +1408,7 @@ describe('DocumentUpdaterHandler', function () {
|
|||
this.changes,
|
||||
this.source,
|
||||
() => {
|
||||
this.request
|
||||
.calledWith({
|
||||
this.request.should.have.been.calledWith({
|
||||
url: this.url,
|
||||
method: 'POST',
|
||||
json: {
|
||||
|
@ -1418,7 +1420,6 @@ describe('DocumentUpdaterHandler', function () {
|
|||
},
|
||||
timeout: 30 * 1000,
|
||||
})
|
||||
.should.equal(true)
|
||||
done()
|
||||
}
|
||||
)
|
||||
|
@ -1442,6 +1443,7 @@ describe('DocumentUpdaterHandler', function () {
|
|||
url: undefined,
|
||||
hash: undefined,
|
||||
ranges: this.ranges,
|
||||
metadata: undefined,
|
||||
},
|
||||
]
|
||||
|
||||
|
@ -1452,8 +1454,7 @@ describe('DocumentUpdaterHandler', function () {
|
|||
this.changes,
|
||||
this.source,
|
||||
() => {
|
||||
this.request
|
||||
.calledWith({
|
||||
this.request.should.have.been.calledWith({
|
||||
url: this.url,
|
||||
method: 'POST',
|
||||
json: {
|
||||
|
@ -1465,7 +1466,6 @@ describe('DocumentUpdaterHandler', function () {
|
|||
},
|
||||
timeout: 30 * 1000,
|
||||
})
|
||||
.should.equal(true)
|
||||
done()
|
||||
}
|
||||
)
|
||||
|
@ -1473,4 +1473,134 @@ describe('DocumentUpdaterHandler', function () {
|
|||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('resyncProjectHistory', function () {
|
||||
it('should add docs', function (done) {
|
||||
const docId1 = new ObjectId()
|
||||
const docId2 = new ObjectId()
|
||||
const docs = [
|
||||
{ doc: { _id: docId1 }, path: 'main.tex' },
|
||||
{ doc: { _id: docId2 }, path: 'references.bib' },
|
||||
]
|
||||
const files = []
|
||||
this.request.yields(null, { statusCode: 200 })
|
||||
const projectId = new ObjectId()
|
||||
const projectHistoryId = 99
|
||||
this.handler.resyncProjectHistory(
|
||||
projectId,
|
||||
projectHistoryId,
|
||||
docs,
|
||||
files,
|
||||
{},
|
||||
() => {
|
||||
this.request.should.have.been.calledWith({
|
||||
url: `${this.settings.apis.documentupdater.url}/project/${projectId}/history/resync`,
|
||||
method: 'POST',
|
||||
json: {
|
||||
docs: [
|
||||
{ doc: docId1, path: 'main.tex' },
|
||||
{ doc: docId2, path: 'references.bib' },
|
||||
],
|
||||
files: [],
|
||||
projectHistoryId,
|
||||
},
|
||||
timeout: 6 * 60 * 1000,
|
||||
})
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
it('should add files', function (done) {
|
||||
const fileId1 = new ObjectId()
|
||||
const fileId2 = new ObjectId()
|
||||
const fileId3 = new ObjectId()
|
||||
const fileCreated2 = new Date()
|
||||
const fileCreated3 = new Date()
|
||||
const otherProjectId = new ObjectId().toString()
|
||||
const files = [
|
||||
{ file: { _id: fileId1, hash: '42' }, path: '1.png' },
|
||||
{
|
||||
file: {
|
||||
_id: fileId2,
|
||||
hash: '1337',
|
||||
created: fileCreated2,
|
||||
linkedFileData: {
|
||||
provider: 'references-provider',
|
||||
},
|
||||
},
|
||||
path: '1.bib',
|
||||
},
|
||||
{
|
||||
file: {
|
||||
_id: fileId3,
|
||||
hash: '21',
|
||||
created: fileCreated3,
|
||||
linkedFileData: {
|
||||
provider: 'project_output_file',
|
||||
build_id: '1234-abc',
|
||||
clsiServerId: 'server-1',
|
||||
source_project_id: otherProjectId,
|
||||
source_output_file_path: 'foo/bar.txt',
|
||||
},
|
||||
},
|
||||
path: 'bar.txt',
|
||||
},
|
||||
]
|
||||
const docs = []
|
||||
this.request.yields(null, { statusCode: 200 })
|
||||
const projectId = new ObjectId()
|
||||
const projectHistoryId = 99
|
||||
this.handler.resyncProjectHistory(
|
||||
projectId,
|
||||
projectHistoryId,
|
||||
docs,
|
||||
files,
|
||||
{},
|
||||
() => {
|
||||
this.request.should.have.been.calledWith({
|
||||
url: `${this.settings.apis.documentupdater.url}/project/${projectId}/history/resync`,
|
||||
method: 'POST',
|
||||
json: {
|
||||
docs: [],
|
||||
files: [
|
||||
{
|
||||
file: fileId1,
|
||||
_hash: '42',
|
||||
path: '1.png',
|
||||
url: `http://filestore/project/${projectId}/file/${fileId1}`,
|
||||
metadata: undefined,
|
||||
},
|
||||
{
|
||||
file: fileId2,
|
||||
_hash: '1337',
|
||||
path: '1.bib',
|
||||
url: `http://filestore/project/${projectId}/file/${fileId2}`,
|
||||
metadata: {
|
||||
importedAt: fileCreated2,
|
||||
provider: 'references-provider',
|
||||
},
|
||||
},
|
||||
{
|
||||
file: fileId3,
|
||||
_hash: '21',
|
||||
path: 'bar.txt',
|
||||
url: `http://filestore/project/${projectId}/file/${fileId3}`,
|
||||
metadata: {
|
||||
importedAt: fileCreated3,
|
||||
provider: 'project_output_file',
|
||||
source_project_id: otherProjectId,
|
||||
source_output_file_path: 'foo/bar.txt',
|
||||
// build_id and clsiServerId are omitted
|
||||
},
|
||||
},
|
||||
],
|
||||
projectHistoryId,
|
||||
},
|
||||
timeout: 6 * 60 * 1000,
|
||||
})
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -2012,17 +2012,15 @@ describe('ProjectEntityUpdateHandler', function () {
|
|||
})
|
||||
|
||||
describe('a project with project-history enabled', function () {
|
||||
beforeEach(function () {
|
||||
this.ProjectGetter.getProject.yields(null, this.project)
|
||||
const docs = [
|
||||
{ doc: { _id: docId, name: 'main.tex' }, path: 'main.tex' },
|
||||
]
|
||||
const docs = [{ doc: { _id: docId, name: 'main.tex' }, path: 'main.tex' }]
|
||||
const files = [
|
||||
{
|
||||
file: { _id: fileId, name: 'universe.png', hash: '123456' },
|
||||
path: 'universe.png',
|
||||
},
|
||||
]
|
||||
beforeEach(function () {
|
||||
this.ProjectGetter.getProject.yields(null, this.project)
|
||||
const folders = []
|
||||
this.ProjectEntityHandler.getAllEntitiesFromProject.returns({
|
||||
docs,
|
||||
|
@ -2051,20 +2049,6 @@ describe('ProjectEntityUpdateHandler', function () {
|
|||
})
|
||||
|
||||
it('tells the doc updater to sync the project', function () {
|
||||
const docs = [
|
||||
{
|
||||
doc: docId,
|
||||
path: 'main.tex',
|
||||
},
|
||||
]
|
||||
const files = [
|
||||
{
|
||||
file: fileId,
|
||||
path: 'universe.png',
|
||||
url: `www.filestore.test/${projectId}/${fileId}`,
|
||||
_hash: '123456',
|
||||
},
|
||||
]
|
||||
this.DocumentUpdaterHandler.resyncProjectHistory
|
||||
.calledWith(projectId, projectHistoryId, docs, files)
|
||||
.should.equal(true)
|
||||
|
@ -2157,40 +2141,24 @@ describe('ProjectEntityUpdateHandler', function () {
|
|||
})
|
||||
|
||||
it('tells the doc updater to resync the project', function () {
|
||||
const docs = [
|
||||
{ doc: 'doc1', path: 'main.tex' },
|
||||
{ doc: 'doc2', path: 'a/b/c/duplicate.tex' },
|
||||
{ doc: 'doc3', path: 'a/b/c/duplicate.tex (1)' },
|
||||
{ doc: 'doc4', path: 'another dupe (22)' },
|
||||
{ doc: 'doc5', path: 'a/b/c/duplicate.tex (2)' },
|
||||
]
|
||||
const urlPrefix = `www.filestore.test/${projectId}`
|
||||
const files = [
|
||||
{
|
||||
file: 'file1',
|
||||
path: 'image.jpg',
|
||||
url: `${urlPrefix}/file1`,
|
||||
_hash: 'hash1',
|
||||
},
|
||||
{
|
||||
file: 'file2',
|
||||
path: 'duplicate.jpg',
|
||||
url: `${urlPrefix}/file2`,
|
||||
_hash: 'hash2',
|
||||
},
|
||||
{
|
||||
file: 'file3',
|
||||
path: 'duplicate.jpg (1)',
|
||||
url: `${urlPrefix}/file3`,
|
||||
_hash: 'hash3',
|
||||
},
|
||||
{
|
||||
file: 'file4',
|
||||
path: 'another dupe (23)',
|
||||
url: `${urlPrefix}/file4`,
|
||||
_hash: 'hash4',
|
||||
},
|
||||
]
|
||||
const docs = this.docs.map(d => {
|
||||
if (d.doc._id === 'doc3') {
|
||||
return Object.assign({}, d, { path: 'a/b/c/duplicate.tex (1)' })
|
||||
}
|
||||
if (d.doc._id === 'doc5') {
|
||||
return Object.assign({}, d, { path: 'a/b/c/duplicate.tex (2)' })
|
||||
}
|
||||
return d
|
||||
})
|
||||
const files = this.files.map(f => {
|
||||
if (f.file._id === 'file3') {
|
||||
return Object.assign({}, f, { path: 'duplicate.jpg (1)' })
|
||||
}
|
||||
if (f.file._id === 'file4') {
|
||||
return Object.assign({}, f, { path: 'another dupe (23)' })
|
||||
}
|
||||
return f
|
||||
})
|
||||
expect(
|
||||
this.DocumentUpdaterHandler.resyncProjectHistory
|
||||
).to.have.been.calledWith(projectId, projectHistoryId, docs, files)
|
||||
|
@ -2262,25 +2230,24 @@ describe('ProjectEntityUpdateHandler', function () {
|
|||
})
|
||||
|
||||
it('tells the doc updater to resync the project', function () {
|
||||
const docs = [
|
||||
{ doc: 'doc1', path: 'a/b/c/_d_e_f_test.tex' },
|
||||
{ doc: 'doc2', path: 'a/untitled' },
|
||||
]
|
||||
const urlPrefix = `www.filestore.test/${projectId}`
|
||||
const files = [
|
||||
{
|
||||
file: 'file1',
|
||||
path: 'A_.png',
|
||||
url: `${urlPrefix}/file1`,
|
||||
_hash: 'hash1',
|
||||
},
|
||||
{
|
||||
file: 'file2',
|
||||
path: 'A_.png (1)',
|
||||
url: `${urlPrefix}/file2`,
|
||||
_hash: 'hash2',
|
||||
},
|
||||
]
|
||||
const docs = this.docs.map(d => {
|
||||
if (d.doc._id === 'doc1') {
|
||||
return Object.assign({}, d, { path: 'a/b/c/_d_e_f_test.tex' })
|
||||
}
|
||||
if (d.doc._id === 'doc2') {
|
||||
return Object.assign({}, d, { path: 'a/untitled' })
|
||||
}
|
||||
return d
|
||||
})
|
||||
const files = this.files.map(f => {
|
||||
if (f.file._id === 'file1') {
|
||||
return Object.assign({}, f, { path: 'A_.png' })
|
||||
}
|
||||
if (f.file._id === 'file2') {
|
||||
return Object.assign({}, f, { path: 'A_.png (1)' })
|
||||
}
|
||||
return f
|
||||
})
|
||||
expect(
|
||||
this.DocumentUpdaterHandler.resyncProjectHistory
|
||||
).to.have.been.calledWith(projectId, projectHistoryId, docs, files)
|
||||
|
@ -2288,8 +2255,6 @@ describe('ProjectEntityUpdateHandler', function () {
|
|||
})
|
||||
|
||||
describe('a project with a bad folder name', function () {
|
||||
beforeEach(function (done) {
|
||||
this.ProjectGetter.getProject.yields(null, this.project)
|
||||
const folders = [
|
||||
{
|
||||
folder: { _id: 'folder1', name: 'good' },
|
||||
|
@ -2311,6 +2276,8 @@ describe('ProjectEntityUpdateHandler', function () {
|
|||
},
|
||||
]
|
||||
const files = []
|
||||
beforeEach(function (done) {
|
||||
this.ProjectGetter.getProject.yields(null, this.project)
|
||||
this.ProjectEntityHandler.getAllEntitiesFromProject.returns({
|
||||
docs,
|
||||
files,
|
||||
|
@ -2335,20 +2302,19 @@ describe('ProjectEntityUpdateHandler', function () {
|
|||
})
|
||||
|
||||
it('tells the doc updater to resync the project', function () {
|
||||
const docs = [
|
||||
{ doc: 'doc1', path: 'good/doc1.tex' },
|
||||
{ doc: 'doc2', path: 'bad_/doc2.tex' },
|
||||
]
|
||||
const files = []
|
||||
const fixedDocs = docs.map(d => {
|
||||
if (d.doc._id === 'doc2') {
|
||||
return Object.assign({}, d, { path: 'bad_/doc2.tex' })
|
||||
}
|
||||
return d
|
||||
})
|
||||
expect(
|
||||
this.DocumentUpdaterHandler.resyncProjectHistory
|
||||
).to.have.been.calledWith(projectId, projectHistoryId, docs, files)
|
||||
).to.have.been.calledWith(projectId, projectHistoryId, fixedDocs, files)
|
||||
})
|
||||
})
|
||||
|
||||
describe('a project with duplicate names between a folder and a doc', function () {
|
||||
beforeEach(function (done) {
|
||||
this.ProjectGetter.getProject.yields(null, this.project)
|
||||
const folders = [
|
||||
{
|
||||
folder: { _id: 'folder1', name: 'chapters' },
|
||||
|
@ -2366,6 +2332,8 @@ describe('ProjectEntityUpdateHandler', function () {
|
|||
},
|
||||
]
|
||||
const files = []
|
||||
beforeEach(function (done) {
|
||||
this.ProjectGetter.getProject.yields(null, this.project)
|
||||
this.ProjectEntityHandler.getAllEntitiesFromProject.returns({
|
||||
docs,
|
||||
files,
|
||||
|
@ -2390,14 +2358,15 @@ describe('ProjectEntityUpdateHandler', function () {
|
|||
})
|
||||
|
||||
it('tells the doc updater to resync the project', function () {
|
||||
const docs = [
|
||||
{ doc: 'doc1', path: 'chapters (1)' },
|
||||
{ doc: 'doc2', path: 'chapters/chapter1.tex' },
|
||||
]
|
||||
const files = []
|
||||
const fixedDocs = docs.map(d => {
|
||||
if (d.doc._id === 'doc1') {
|
||||
return Object.assign({}, d, { path: 'chapters (1)' })
|
||||
}
|
||||
return d
|
||||
})
|
||||
expect(
|
||||
this.DocumentUpdaterHandler.resyncProjectHistory
|
||||
).to.have.been.calledWith(projectId, projectHistoryId, docs, files)
|
||||
).to.have.been.calledWith(projectId, projectHistoryId, fixedDocs, files)
|
||||
})
|
||||
})
|
||||
|
||||
|
|
Loading…
Reference in a new issue