Merge pull request #2827 from overleaf/em-faster-clones

Make a single Mongo update when cloning projects

GitOrigin-RevId: abd4069bd8854d84c413bc8f890583e647b7c18e
This commit is contained in:
Eric Mc Sween 2020-05-13 10:15:23 -04:00 committed by Copybot
parent 0e9e57cd7c
commit ae82366122
5 changed files with 382 additions and 731 deletions

View file

@ -1,16 +1,21 @@
const { callbackify, promisify } = require('util')
const { callbackify } = require('util')
const Path = require('path')
const OError = require('@overleaf/o-error')
const { promiseMapWithLimit } = require('../../util/promises')
const { Doc } = require('../../models/Doc')
const { File } = require('../../models/File')
const DocstoreManager = require('../Docstore/DocstoreManager')
const DocumentUpdaterHandler = require('../DocumentUpdater/DocumentUpdaterHandler')
const FileStoreHandler = require('../FileStore/FileStoreHandler')
const ProjectCreationHandler = require('./ProjectCreationHandler')
const ProjectDeleter = require('./ProjectDeleter')
const ProjectEntityMongoUpdateHandler = require('./ProjectEntityMongoUpdateHandler')
const ProjectEntityUpdateHandler = require('./ProjectEntityUpdateHandler')
const ProjectGetter = require('./ProjectGetter')
const ProjectLocator = require('./ProjectLocator')
const ProjectOptionsHandler = require('./ProjectOptionsHandler')
const ProjectDeleter = require('./ProjectDeleter')
const DocumentUpdaterHandler = require('../DocumentUpdater/DocumentUpdaterHandler')
const DocstoreManager = require('../Docstore/DocstoreManager')
const ProjectGetter = require('./ProjectGetter')
const _ = require('underscore')
const async = require('async')
const logger = require('logger-sharelatex')
const SafePath = require('./SafePath')
const TpdsProjectFlusher = require('../ThirdPartyDataStore/TpdsProjectFlusher')
module.exports = {
duplicate: callbackify(duplicate),
@ -19,179 +24,6 @@ module.exports = {
}
}
function _copyDocs(
ownerId,
newProject,
originalRootDoc,
originalFolder,
desFolder,
docContents,
callback
) {
const setRootDoc = _.once(docId => {
ProjectEntityUpdateHandler.setRootDoc(newProject._id, docId, () => {})
})
const docs = originalFolder.docs || []
const jobs = docs.map(
doc =>
function(cb) {
if (doc == null || doc._id == null) {
return callback()
}
const content = docContents[doc._id.toString()]
ProjectEntityUpdateHandler.addDoc(
newProject._id,
desFolder._id,
doc.name,
content.lines,
ownerId,
function(err, newDoc) {
if (err != null) {
logger.warn({ err }, 'error copying doc')
return callback(err)
}
if (
originalRootDoc != null &&
newDoc.name === originalRootDoc.name
) {
setRootDoc(newDoc._id)
}
cb()
}
)
}
)
async.series(jobs, callback)
}
function _copyFiles(
ownerId,
newProject,
originalProjectId,
originalFolder,
desFolder,
callback
) {
const fileRefs = originalFolder.fileRefs || []
let firstError = null // track first error to exit gracefully from parallel copy
const jobs = fileRefs.map(
file =>
function(cb) {
if (firstError != null) {
return async.setImmediate(cb)
} // skip further copies if an error has occurred
ProjectEntityUpdateHandler.copyFileFromExistingProjectWithProject(
newProject._id,
newProject,
desFolder._id,
originalProjectId,
file,
ownerId,
function(err) {
if (err != null) {
if (!firstError) {
firstError = err
}
} // set the error flag if this copy failed
cb()
}
)
}
)
// If one of these jobs fails then we wait until all running jobs have
// finished, skipping those which have not started yet. We need to wait
// for all the copy jobs to finish to avoid them writing to the project
// entry in the background while we are deleting it.
async.parallelLimit(jobs, 5, function(err) {
if (firstError != null) {
return callback(firstError)
}
if (err != null) {
return callback(err)
} // shouldn't happen
callback()
})
}
function _copyFolderRecursively(
ownerId,
newProjectId,
originalProjectId,
originalRootDoc,
originalFolder,
desFolder,
docContents,
callback
) {
ProjectGetter.getProject(
newProjectId,
{ rootFolder: true, name: true },
function(err, newProject) {
if (err != null) {
logger.warn({ projectId: newProjectId }, 'could not get project')
return callback(err)
}
const folders = originalFolder.folders || []
const jobs = folders.map(
childFolder =>
function(cb) {
if (childFolder == null || childFolder._id == null) {
return cb()
}
ProjectEntityUpdateHandler.addFolder(
newProject._id,
desFolder != null ? desFolder._id : undefined,
childFolder.name,
function(err, newFolder) {
if (err != null) {
return cb(err)
}
_copyFolderRecursively(
ownerId,
newProjectId,
originalProjectId,
originalRootDoc,
childFolder,
newFolder,
docContents,
cb
)
}
)
}
)
jobs.push(cb =>
_copyFiles(
ownerId,
newProject,
originalProjectId,
originalFolder,
desFolder,
cb
)
)
jobs.push(cb =>
_copyDocs(
ownerId,
newProject,
originalRootDoc,
originalFolder,
desFolder,
docContents,
cb
)
)
async.series(jobs, callback)
}
)
}
const _copyFolderRecursivelyAsync = promisify(_copyFolderRecursively)
async function duplicate(owner, originalProjectId, newProjectName) {
await DocumentUpdaterHandler.promises.flushProjectToMongo(originalProjectId)
const originalProject = await ProjectGetter.promises.getProject(
@ -202,19 +34,11 @@ async function duplicate(owner, originalProjectId, newProjectName) {
rootDoc_id: true
}
)
const {
element: originalRootDoc
} = await ProjectLocator.promises.findRootDoc({
const { path: rootDocPath } = await ProjectLocator.promises.findRootDoc({
project_id: originalProjectId
})
const docContentsArray = await DocstoreManager.promises.getAllDocs(
originalProjectId
)
const docContents = {}
for (const docContent of docContentsArray) {
docContents[docContent._id] = docContent
}
const originalEntries = _getFolderEntries(originalProject.rootFolder[0])
// Now create the new project, cleaning it up on failure if necessary
const newProject = await ProjectCreationHandler.promises.createBlankProject(
@ -227,15 +51,24 @@ async function duplicate(owner, originalProjectId, newProjectName) {
newProject._id,
originalProject.compiler
)
await _copyFolderRecursivelyAsync(
owner._id,
const [docEntries, fileEntries] = await Promise.all([
_copyDocs(originalEntries.docEntries, originalProject, newProject),
_copyFiles(originalEntries.fileEntries, originalProject, newProject)
])
const projectVersion = await ProjectEntityMongoUpdateHandler.promises.createNewFolderStructure(
newProject._id,
originalProjectId,
originalRootDoc,
originalProject.rootFolder[0],
newProject.rootFolder[0],
docContents
docEntries,
fileEntries
)
if (rootDocPath) {
await _setRootDoc(newProject._id, rootDocPath.fileSystem)
}
await _notifyDocumentUpdater(newProject, owner._id, {
newFiles: fileEntries,
newDocs: docEntries,
newProject: { version: projectVersion }
})
await TpdsProjectFlusher.promises.flushProjectToTpds(newProject._id)
} catch (err) {
// Clean up broken clone on error.
// Make sure we delete the new failed project, not the original one!
@ -251,3 +84,114 @@ async function duplicate(owner, originalProjectId, newProjectName) {
}
return newProject
}
function _getFolderEntries(folder, folderPath = '/') {
const docEntries = []
const fileEntries = []
const docs = folder.docs || []
const files = folder.fileRefs || []
const subfolders = folder.folders || []
for (const doc of docs) {
if (doc == null || doc._id == null) {
continue
}
const path = Path.join(folderPath, doc.name)
docEntries.push({ doc, path })
}
for (const file of files) {
if (file == null || file._id == null) {
continue
}
const path = Path.join(folderPath, file.name)
fileEntries.push({ file, path })
}
for (const subfolder of subfolders) {
if (subfolder == null || subfolder._id == null) {
continue
}
const subfolderPath = Path.join(folderPath, subfolder.name)
const subfolderEntries = _getFolderEntries(subfolder, subfolderPath)
for (const docEntry of subfolderEntries.docEntries) {
docEntries.push(docEntry)
}
for (const fileEntry of subfolderEntries.fileEntries) {
fileEntries.push(fileEntry)
}
}
return { docEntries, fileEntries }
}
async function _copyDocs(sourceEntries, sourceProject, targetProject) {
const docLinesById = await _getDocLinesForProject(sourceProject._id)
const targetEntries = []
for (const sourceEntry of sourceEntries) {
const sourceDoc = sourceEntry.doc
const path = sourceEntry.path
const doc = new Doc({ name: sourceDoc.name })
const docLines = docLinesById.get(sourceDoc._id.toString())
await DocstoreManager.promises.updateDoc(
targetProject._id.toString(),
doc._id.toString(),
docLines,
0,
{}
)
targetEntries.push({ doc, path, docLines: docLines.join('\n') })
}
return targetEntries
}
async function _getDocLinesForProject(projectId) {
const docs = await DocstoreManager.promises.getAllDocs(projectId)
const docLinesById = new Map(docs.map(doc => [doc._id, doc.lines]))
return docLinesById
}
async function _copyFiles(sourceEntries, sourceProject, targetProject) {
const targetEntries = await promiseMapWithLimit(
5,
sourceEntries,
async sourceEntry => {
const sourceFile = sourceEntry.file
const path = sourceEntry.path
const file = new File({ name: SafePath.clean(sourceFile.name) })
if (sourceFile.linkedFileData != null) {
file.linkedFileData = sourceFile.linkedFileData
}
if (sourceFile.hash != null) {
file.hash = sourceFile.hash
}
const url = await FileStoreHandler.promises.copyFile(
sourceProject._id,
sourceFile._id,
targetProject._id,
file._id
)
return { file, path, url }
}
)
return targetEntries
}
async function _setRootDoc(projectId, path) {
const { element: rootDoc } = await ProjectLocator.promises.findElementByPath({
project_id: projectId,
path,
exactCaseMatch: true
})
await ProjectEntityUpdateHandler.promises.setRootDoc(projectId, rootDoc._id)
}
async function _notifyDocumentUpdater(project, userId, changes) {
const projectHistoryId =
project.overleaf && project.overleaf.history && project.overleaf.history.id
await DocumentUpdaterHandler.promises.updateProjectStructure(
project._id,
projectHistoryId,
userId,
changes
)
}

View file

@ -8,7 +8,6 @@ const { Doc } = require('../../models/Doc')
const DocstoreManager = require('../Docstore/DocstoreManager')
const DocumentUpdaterHandler = require('../../Features/DocumentUpdater/DocumentUpdaterHandler')
const Errors = require('../Errors/Errors')
const { File } = require('../../models/File')
const FileStoreHandler = require('../FileStore/FileStoreHandler')
const LockManager = require('../../infrastructure/LockManager')
const { Project } = require('../../models/Project')
@ -71,152 +70,6 @@ function wrapWithLock(methodWithoutLock) {
}
const ProjectEntityUpdateHandler = {
copyFileFromExistingProjectWithProject: wrapWithLock({
beforeLock(next) {
return function(
projectId,
project,
folderId,
originalProjectId,
originalFileRef,
userId,
callback
) {
logger.log(
{ projectId, folderId, originalProjectId, originalFileRef },
'copying file in s3 with project'
)
folderId = ProjectEntityMongoUpdateHandler._confirmFolder(
project,
folderId
)
if (originalFileRef == null) {
logger.err(
{ projectId, folderId, originalProjectId, originalFileRef },
'file trying to copy is null'
)
return callback()
}
// convert any invalid characters in original file to '_'
const fileProperties = {
name: SafePath.clean(originalFileRef.name)
}
if (originalFileRef.linkedFileData != null) {
fileProperties.linkedFileData = originalFileRef.linkedFileData
}
if (originalFileRef.hash != null) {
fileProperties.hash = originalFileRef.hash
}
const fileRef = new File(fileProperties)
FileStoreHandler.copyFile(
originalProjectId,
originalFileRef._id,
project._id,
fileRef._id,
(err, fileStoreUrl) => {
if (err != null) {
logger.warn(
{
err,
projectId,
folderId,
originalProjectId,
originalFileRef
},
'error coping file in s3'
)
return callback(err)
}
next(
projectId,
project,
folderId,
originalProjectId,
originalFileRef,
userId,
fileRef,
fileStoreUrl,
callback
)
}
)
}
},
withLock(
projectId,
project,
folderId,
originalProjectId,
originalFileRef,
userId,
fileRef,
fileStoreUrl,
callback
) {
const projectHistoryId =
project.overleaf &&
project.overleaf.history &&
project.overleaf.history.id
ProjectEntityMongoUpdateHandler._putElement(
project,
folderId,
fileRef,
'file',
(err, result, newProject) => {
if (err != null) {
logger.warn(
{ err, projectId, folderId },
'error putting element as part of copy'
)
return callback(err)
}
TpdsUpdateSender.addFile(
{
project_id: projectId,
file_id: fileRef._id,
path: result && result.path && result.path.fileSystem,
rev: fileRef.rev,
project_name: project.name
},
err => {
if (err != null) {
logger.err(
{
err,
projectId,
folderId,
originalProjectId,
originalFileRef
},
'error sending file to tpds worker'
)
}
const newFiles = [
{
file: fileRef,
path: result && result.path && result.path.fileSystem,
url: fileStoreUrl
}
]
DocumentUpdaterHandler.updateProjectStructure(
projectId,
projectHistoryId,
userId,
{ newFiles, newProject },
error => {
if (error != null) {
return callback(error)
}
callback(null, fileRef, folderId)
}
)
}
)
}
)
}
}),
updateDocLines(
projectId,
docId,
@ -1796,7 +1649,6 @@ module.exports = ProjectEntityUpdateHandler
module.exports.promises = promisifyAll(ProjectEntityUpdateHandler, {
without: ['isPathValidForRootDoc'],
multiResult: {
copyFileFromExistingProjectWithProject: ['fileRef', 'folderId'],
_addDocAndSendToTpds: ['result', 'project'],
addDoc: ['doc', 'folderId'],
addDocWithRanges: ['doc', 'folderId'],

View file

@ -342,7 +342,7 @@ describe('ProjectStructureChanges', function() {
expect(_.where(updates, { pathname: '/references.bib' }).length).to.equal(
1
)
expect(version).to.equal(3)
expect(version).to.equal(1)
})
it('should version the files created', function() {
@ -355,7 +355,7 @@ describe('ProjectStructureChanges', function() {
expect(update.userId).to.equal(owner._id)
expect(update.pathname).to.equal('/universe.jpg')
expect(update.url).to.be.a('string')
expect(version).to.equal(3)
expect(version).to.equal(1)
})
})

View file

@ -1,166 +1,200 @@
const { expect } = require('chai')
const sinon = require('sinon')
const SandboxedModule = require('sandboxed-module')
const { ObjectId } = require('mongodb')
const MODULE_PATH = '../../../../app/src/Features/Project/ProjectDuplicator.js'
describe('ProjectDuplicator', function() {
beforeEach(function() {
this.doc0 = { _id: 'doc0_id', name: 'rootDocHere' }
this.doc1 = { _id: 'doc1_id', name: 'level1folderDocName' }
this.doc2 = { _id: 'doc2_id', name: 'level2folderDocName' }
this.doc0Lines = ['zero']
this.doc1Lines = ['one']
this.doc2Lines = ['two']
this.file0 = { name: 'file0', _id: 'file0' }
this.file1 = { name: 'file1', _id: 'file1' }
this.file2 = {
name: 'file2',
_id: 'file2',
linkedFileData: { provider: 'url' },
hash: '123456'
}
this.level2folder = {
name: 'level2folderName',
_id: 'level2folderId',
docs: [
(this.doc2 = { _id: 'doc2_id', name: 'level2folderDocName' }),
undefined
],
docs: [this.doc2, undefined],
folders: [],
fileRefs: [{ name: 'file2', _id: 'file2' }]
fileRefs: [this.file2]
}
this.level1folder = {
name: 'level1folder',
_id: 'level1folderId',
docs: [(this.doc1 = { _id: 'doc1_id', name: 'level1folderDocName' })],
docs: [this.doc1],
folders: [this.level2folder],
fileRefs: [{ name: 'file1', _id: 'file1' }, null] // the null is intentional to test null docs/files
fileRefs: [this.file1, null] // the null is intentional to test null docs/files
}
this.rootFolder = {
name: 'rootFolder',
_id: 'rootFolderId',
docs: [(this.doc0 = { _id: 'doc0_id', name: 'rootDocHere' })],
docs: [this.doc0],
folders: [this.level1folder, {}],
fileRefs: [{ name: 'file0', _id: 'file0' }]
fileRefs: [this.file0]
}
this.project = {
_id: (this.old_project_id = 'this_is_the_old_project_id'),
rootDoc_id: 'rootDoc_id',
_id: 'this_is_the_old_project_id',
rootDoc_id: this.doc0._id,
rootFolder: [this.rootFolder],
compiler: 'this_is_a_Compiler'
}
this.doc0Path = '/rootDocHere'
this.doc1Path = '/level1folder/level1folderDocName'
this.doc2Path = '/level1folder/level2folderName/level2folderDocName'
this.file0Path = '/file0'
this.file1Path = '/level1folder/file1'
this.file2Path = '/level1folder/level2folderName/file2'
this.docContents = [
{
_id: this.doc0._id,
lines: (this.doc0_lines = ['zero'])
},
{
_id: this.doc1._id,
lines: (this.doc1_lines = ['one'])
},
{
_id: this.doc2._id,
lines: (this.doc2_lines = ['two'])
}
{ _id: this.doc0._id, lines: this.doc0Lines },
{ _id: this.doc1._id, lines: this.doc1Lines },
{ _id: this.doc2._id, lines: this.doc2Lines }
]
this.DocstoreManager = {
promises: {
getAllDocs: sinon.stub().resolves(this.docContents)
}
}
this.rootDoc = this.doc0
this.rootDocPath = '/rootDocHere'
this.owner = { _id: 'this_is_the_owner' }
this.stubbedNewProject = {
_id: (this.new_project_id = 'new_project_id'),
this.newBlankProject = {
_id: 'new_project_id',
overleaf: { history: { id: 339123 } },
readOnly_refs: [],
collaberator_refs: [],
rootFolder: [{ _id: 'new_root_folder_id' }]
}
this.foundRootDoc = { _id: 'rootDocId', name: 'rootDocHere' }
this.newFolder = { _id: 'newFolderId' }
this.filestoreUrl = 'filestore-url'
this.newProjectVersion = 2
this.newDocId = new ObjectId()
this.newFileId = new ObjectId()
this.newDoc0 = { ...this.doc0, _id: this.newDocId }
this.newDoc1 = { ...this.doc1, _id: this.newDocId }
this.newDoc2 = { ...this.doc2, _id: this.newDocId }
this.newFile0 = { ...this.file0, _id: this.newFileId }
this.newFile1 = { ...this.file1, _id: this.newFileId }
this.newFile2 = { ...this.file2, _id: this.newFileId }
this.docEntries = [
{
path: this.doc0Path,
doc: this.newDoc0,
docLines: this.doc0Lines.join('\n')
},
{
path: this.doc1Path,
doc: this.newDoc1,
docLines: this.doc1Lines.join('\n')
},
{
path: this.doc2Path,
doc: this.newDoc2,
docLines: this.doc2Lines.join('\n')
}
]
this.fileEntries = [
{
path: this.file0Path,
file: this.newFile0,
url: this.filestoreUrl
},
{
path: this.file1Path,
file: this.newFile1,
url: this.filestoreUrl
},
{
path: this.file2Path,
file: this.newFile2,
url: this.filestoreUrl
}
]
this.Doc = sinon
.stub()
.callsFake(props => ({ _id: this.newDocId, ...props }))
this.File = sinon
.stub()
.callsFake(props => ({ _id: this.newFileId, ...props }))
this.DocstoreManager = {
promises: {
updateDoc: sinon.stub().resolves(),
getAllDocs: sinon.stub().resolves(this.docContents)
}
}
this.DocumentUpdaterHandler = {
promises: {
flushProjectToMongo: sinon.stub().resolves(),
updateProjectStructure: sinon.stub().resolves()
}
}
this.FileStoreHandler = {
promises: {
copyFile: sinon.stub().resolves(this.filestoreUrl)
}
}
this.ProjectCreationHandler = {
promises: {
createBlankProject: sinon.stub().resolves(this.stubbedNewProject)
createBlankProject: sinon.stub().resolves(this.newBlankProject)
}
}
this.ProjectDeleter = {
promises: {
deleteProject: sinon.stub().resolves()
}
}
this.ProjectEntityMongoUpdateHandler = {
promises: {
createNewFolderStructure: sinon.stub().resolves(this.newProjectVersion)
}
}
this.ProjectEntityUpdateHandler = {
promises: {
setRootDoc: sinon.stub().resolves()
}
}
this.ProjectGetter = {
promises: {
getProject: sinon
.stub()
.withArgs(this.project._id)
.resolves(this.project)
}
}
this.newFolder = { _id: 'newFolderId' }
this.ProjectLocator = {
promises: {
findRootDoc: sinon
findRootDoc: sinon.stub().resolves({
element: this.rootDoc,
path: { fileSystem: this.rootDocPath }
}),
findElementByPath: sinon
.stub()
.resolves({ element: this.foundRootDoc, path: {} })
.withArgs({
project_id: this.newBlankProject._id,
path: this.rootDocPath,
exactCaseMatch: true
})
.resolves({ element: this.doc0 })
}
}
this.ProjectOptionsHandler = {
promises: {
setCompiler: sinon.stub().resolves()
}
}
this.ProjectEntityUpdateHandler = {
addDoc: sinon.stub().callsArgWith(5, null, { name: 'somDoc' }),
copyFileFromExistingProjectWithProject: sinon.stub(),
setRootDoc: sinon.stub(),
addFolder: sinon.stub().callsArgWith(3, null, this.newFolder)
}
this.ProjectEntityUpdateHandler.copyFileFromExistingProjectWithProject
.withArgs(
sinon.match.any,
sinon.match.any,
sinon.match.any,
sinon.match.any,
'BROKEN-FILE',
sinon.match.any,
sinon.match.any
)
.callsArgWith(6, new Error('failed'))
this.ProjectEntityUpdateHandler.copyFileFromExistingProjectWithProject
.withArgs(
sinon.match.any,
sinon.match.any,
sinon.match.any,
sinon.match.any,
sinon.match.object,
sinon.match.any
)
.callsArg(6)
this.ProjectEntityUpdateHandler.copyFileFromExistingProjectWithProject
.withArgs(
sinon.match.any,
sinon.match.any,
sinon.match.any,
sinon.match.any,
null,
sinon.match.any
)
.callsArg(6)
this.DocumentUpdaterHandler = {
this.TpdsProjectFlusher = {
promises: {
flushProjectToMongo: sinon.stub().resolves()
}
}
this.Project = {
promises: {
findById: sinon.stub().resolves(this.project)
}
}
this.ProjectGetter = {
getProject: sinon.stub(),
promises: {
getProject: sinon.stub()
}
}
this.ProjectGetter.getProject
.withArgs(this.old_project_id, sinon.match.any)
.callsArgWith(2, null, this.project)
this.ProjectGetter.getProject
.withArgs(this.new_project_id, sinon.match.any)
.callsArgWith(2, null, this.stubbedNewProject)
this.ProjectGetter.promises.getProject
.withArgs(this.old_project_id, sinon.match.any)
.resolves(this.project)
this.ProjectGetter.promises.getProject
.withArgs(this.new_project_id, sinon.match.any)
.resolves(this.stubbedNewProject)
this.ProjectDeleter = {
promises: {
deleteProject: sinon.stub().resolves()
flushProjectToTpds: sinon.stub().resolves()
}
}
@ -169,16 +203,21 @@ describe('ProjectDuplicator', function() {
console: console
},
requires: {
'../../models/Project': { Project: this.Project },
'../../models/Doc': { Doc: this.Doc },
'../../models/File': { File: this.File },
'../Docstore/DocstoreManager': this.DocstoreManager,
'../DocumentUpdater/DocumentUpdaterHandler': this
.DocumentUpdaterHandler,
'../FileStore/FileStoreHandler': this.FileStoreHandler,
'./ProjectCreationHandler': this.ProjectCreationHandler,
'./ProjectEntityUpdateHandler': this.ProjectEntityUpdateHandler,
'./ProjectLocator': this.ProjectLocator,
'./ProjectDeleter': this.ProjectDeleter,
'./ProjectOptionsHandler': this.ProjectOptionsHandler,
'../Docstore/DocstoreManager': this.DocstoreManager,
'./ProjectEntityMongoUpdateHandler': this
.ProjectEntityMongoUpdateHandler,
'./ProjectEntityUpdateHandler': this.ProjectEntityUpdateHandler,
'./ProjectGetter': this.ProjectGetter,
'./ProjectLocator': this.ProjectLocator,
'./ProjectOptionsHandler': this.ProjectOptionsHandler,
'../ThirdPartyDataStore/TpdsProjectFlusher': this.TpdsProjectFlusher,
'logger-sharelatex': {
log() {},
warn() {},
@ -189,186 +228,125 @@ describe('ProjectDuplicator', function() {
})
describe('when the copy succeeds', function() {
it('should look up the original project', async function() {
const newProjectName = 'someProj'
await this.ProjectDuplicator.promises.duplicate(
beforeEach(async function() {
this.newProjectName = 'New project name'
this.newProject = await this.ProjectDuplicator.promises.duplicate(
this.owner,
this.old_project_id,
newProjectName
)
this.ProjectGetter.promises.getProject.should.have.been.calledWith(
this.old_project_id
this.project._id,
this.newProjectName
)
})
it('should flush the original project to mongo', async function() {
const newProjectName = 'someProj'
await this.ProjectDuplicator.promises.duplicate(
this.owner,
this.old_project_id,
newProjectName
)
it('should flush the original project to mongo', function() {
this.DocumentUpdaterHandler.promises.flushProjectToMongo.should.have.been.calledWith(
this.old_project_id
this.project._id
)
})
it('should create a blank project', async function() {
const newProjectName = 'someProj'
const newProject = await this.ProjectDuplicator.promises.duplicate(
this.owner,
this.old_project_id,
newProjectName
)
newProject._id.should.equal(this.stubbedNewProject._id)
it('should copy docs to docstore', function() {
for (const docLines of [this.doc0Lines, this.doc1Lines, this.doc2Lines]) {
this.DocstoreManager.promises.updateDoc.should.have.been.calledWith(
this.newProject._id.toString(),
this.newDocId.toString(),
docLines,
0,
{}
)
}
})
it('should copy files to the filestore', function() {
for (const file of [this.file0, this.file1, this.file2]) {
this.FileStoreHandler.promises.copyFile.should.have.been.calledWith(
this.project._id,
file._id,
this.newProject._id,
this.newFileId
)
}
})
it('should create a blank project', function() {
this.ProjectCreationHandler.promises.createBlankProject.should.have.been.calledWith(
this.owner._id,
newProjectName
this.newProjectName
)
this.newProject._id.should.equal(this.newBlankProject._id)
})
it('should use the same compiler', async function() {
this.ProjectEntityUpdateHandler.addDoc.callsArgWith(
5,
null,
this.rootFolder.docs[0],
this.owner._id
)
await this.ProjectDuplicator.promises.duplicate(
this.owner,
this.old_project_id,
''
)
it('should use the same compiler', function() {
this.ProjectOptionsHandler.promises.setCompiler.should.have.been.calledWith(
this.stubbedNewProject._id,
this.newProject._id,
this.project.compiler
)
})
it('should use the same root doc', async function() {
this.ProjectEntityUpdateHandler.addDoc.callsArgWith(
5,
null,
this.rootFolder.docs[0],
this.owner._id
)
await this.ProjectDuplicator.promises.duplicate(
this.owner,
this.old_project_id,
''
)
this.ProjectEntityUpdateHandler.setRootDoc.should.have.been.calledWith(
this.stubbedNewProject._id,
it('should use the same root doc', function() {
this.ProjectEntityUpdateHandler.promises.setRootDoc.should.have.been.calledWith(
this.newProject._id,
this.rootFolder.docs[0]._id
)
})
it('should not copy the collaberators or read only refs', async function() {
const newProject = await this.ProjectDuplicator.promises.duplicate(
this.owner,
this.old_project_id,
''
)
newProject.collaberator_refs.length.should.equal(0)
newProject.readOnly_refs.length.should.equal(0)
it('should not copy the collaborators or read only refs', function() {
this.newProject.collaberator_refs.length.should.equal(0)
this.newProject.readOnly_refs.length.should.equal(0)
})
it('should copy all the folders', async function() {
await this.ProjectDuplicator.promises.duplicate(
this.owner,
this.old_project_id,
''
)
this.ProjectEntityUpdateHandler.addFolder.should.have.been.calledWith(
this.new_project_id,
this.stubbedNewProject.rootFolder[0]._id,
this.level1folder.name
)
this.ProjectEntityUpdateHandler.addFolder.should.have.been.calledWith(
this.new_project_id,
this.newFolder._id,
this.level2folder.name
)
this.ProjectEntityUpdateHandler.addFolder.callCount.should.equal(2)
})
it('should copy all the docs', async function() {
await this.ProjectDuplicator.promises.duplicate(
this.owner,
this.old_project_id,
''
)
this.DocstoreManager.promises.getAllDocs.should.have.been.calledWith(
this.old_project_id
)
this.ProjectEntityUpdateHandler.addDoc.should.have.been.calledWith(
this.new_project_id,
this.stubbedNewProject.rootFolder[0]._id,
this.doc0.name,
this.doc0_lines,
this.owner._id
)
this.ProjectEntityUpdateHandler.addDoc.should.have.been.calledWith(
this.new_project_id,
this.newFolder._id,
this.doc1.name,
this.doc1_lines,
this.owner._id
)
this.ProjectEntityUpdateHandler.addDoc.should.have.been.calledWith(
this.new_project_id,
this.newFolder._id,
this.doc2.name,
this.doc2_lines,
this.owner._id
it('should copy all documents and files', function() {
this.ProjectEntityMongoUpdateHandler.promises.createNewFolderStructure.should.have.been.calledWith(
this.newProject._id,
this.docEntries,
this.fileEntries
)
})
it('should copy all the files', async function() {
await this.ProjectDuplicator.promises.duplicate(
it('should notify document updater of changes', function() {
this.DocumentUpdaterHandler.promises.updateProjectStructure.should.have.been.calledWith(
this.newProject._id,
this.newProject.overleaf.history.id,
this.owner._id,
{
newDocs: this.docEntries,
newFiles: this.fileEntries,
newProject: { version: this.newProjectVersion }
}
)
})
it('should flush the project to TPDS', function() {
this.TpdsProjectFlusher.promises.flushProjectToTpds.should.have.been.calledWith(
this.newProject._id
)
})
})
describe('without a root doc', function() {
beforeEach(async function() {
this.ProjectLocator.promises.findRootDoc.resolves({
element: null,
path: null
})
this.newProject = await this.ProjectDuplicator.promises.duplicate(
this.owner,
this.old_project_id,
''
)
this.ProjectEntityUpdateHandler.copyFileFromExistingProjectWithProject.should.have.been.calledWith(
this.stubbedNewProject._id,
this.stubbedNewProject,
this.stubbedNewProject.rootFolder[0]._id,
this.project._id,
this.rootFolder.fileRefs[0],
this.owner._id
)
this.ProjectEntityUpdateHandler.copyFileFromExistingProjectWithProject.should.have.been.calledWith(
this.stubbedNewProject._id,
this.stubbedNewProject,
this.newFolder._id,
this.project._id,
this.level1folder.fileRefs[0],
this.owner._id
)
this.ProjectEntityUpdateHandler.copyFileFromExistingProjectWithProject.should.have.been.calledWith(
this.stubbedNewProject._id,
this.stubbedNewProject,
this.newFolder._id,
this.project._id,
this.level2folder.fileRefs[0],
this.owner._id
'Copy of project'
)
})
it('should not set the root doc on the copy', function() {
this.ProjectEntityUpdateHandler.promises.setRootDoc.should.not.have.been
.called
})
})
describe('when there is an error', function() {
beforeEach(async function() {
this.rootFolder.fileRefs = [
{ name: 'file0', _id: 'file0' },
'BROKEN-FILE',
{ name: 'file1', _id: 'file1' },
{ name: 'file2', _id: 'file2' }
]
this.ProjectEntityMongoUpdateHandler.promises.createNewFolderStructure.rejects()
await expect(
this.ProjectDuplicator.promises.duplicate(
this.owner,
this.old_project_id,
this.project._id,
''
)
).to.be.rejected
@ -376,13 +354,13 @@ describe('ProjectDuplicator', function() {
it('should delete the broken cloned project', function() {
this.ProjectDeleter.promises.deleteProject.should.have.been.calledWith(
this.stubbedNewProject._id
this.newBlankProject._id
)
})
it('should not delete the original project', function() {
this.ProjectDeleter.promises.deleteProject.should.not.have.been.calledWith(
this.old_project_id
this.project._id
)
})
})

View file

@ -175,129 +175,6 @@ describe('ProjectEntityUpdateHandler', function() {
})
})
describe('copyFileFromExistingProjectWithProject', function() {
beforeEach(function() {
this.oldProjectId = '123kljadas'
this.oldFileRef = { name: this.fileName, _id: 'oldFileRef' }
this.ProjectEntityMongoUpdateHandler._confirmFolder.returns(folderId)
this.ProjectEntityMongoUpdateHandler._putElement.yields(null, {
path: { fileSystem: this.fileSystemPath }
})
this.FileStoreHandler.copyFile.yields(null, this.fileUrl)
this.ProjectEntityUpdateHandler.copyFileFromExistingProjectWithProject(
this.project._id,
this.project,
folderId,
this.oldProjectId,
this.oldFileRef,
userId,
this.callback
)
})
it('should copy the file in FileStoreHandler', function() {
this.FileStoreHandler.copyFile
.calledWith(this.oldProjectId, this.oldFileRef._id, projectId, fileId)
.should.equal(true)
})
it('should put file into folder by calling put element', function() {
this.ProjectEntityMongoUpdateHandler._putElement
.calledWithMatch(
this.project,
folderId,
{ _id: fileId, name: this.fileName },
'file'
)
.should.equal(true)
})
it('should return doc and parent folder', function() {
this.callback
.calledWithMatch(null, { _id: fileId, name: this.fileName }, folderId)
.should.equal(true)
})
it('should call third party data store if versioning is enabled', function() {
this.TpdsUpdateSender.addFile
.calledWith({
project_id: projectId,
file_id: fileId,
path: this.fileSystemPath,
rev: 0,
project_name: this.project.name
})
.should.equal(true)
})
it('should should send the change in project structure to the doc updater', function() {
const changesMatcher = sinon.match(changes => {
const { newFiles } = changes
if (newFiles.length !== 1) {
return false
}
const newFile = newFiles[0]
return (
newFile.file._id === fileId &&
newFile.path === this.fileSystemPath &&
newFile.url === this.fileUrl
)
})
this.DocumentUpdaterHandler.updateProjectStructure
.calledWithMatch(projectId, projectHistoryId, userId, changesMatcher)
.should.equal(true)
})
})
describe('copyFileFromExistingProjectWithProject, with linkedFileData and hash', function() {
beforeEach(function() {
this.oldProjectId = '123kljadas'
this.oldFileRef = {
_id: 'oldFileRef',
name: this.fileName,
linkedFileData: this.linkedFileData,
hash: '123456'
}
this.ProjectEntityMongoUpdateHandler._confirmFolder.returns(folderId)
this.ProjectEntityMongoUpdateHandler._putElement.yields(null, {
path: { fileSystem: this.fileSystemPath }
})
this.FileStoreHandler.copyFile.yields(null, this.fileUrl)
this.ProjectEntityUpdateHandler.copyFileFromExistingProjectWithProject(
this.project._id,
this.project,
folderId,
this.oldProjectId,
this.oldFileRef,
userId,
this.callback
)
})
it('should copy the file in FileStoreHandler', function() {
this.FileStoreHandler.copyFile
.calledWith(this.oldProjectId, this.oldFileRef._id, projectId, fileId)
.should.equal(true)
})
it('should put file into folder by calling put element, with the linkedFileData and hash', function() {
this.ProjectEntityMongoUpdateHandler._putElement
.calledWithMatch(
this.project,
folderId,
{
_id: fileId,
name: this.fileName,
linkedFileData: this.linkedFileData,
hash: '123456'
},
'file'
)
.should.equal(true)
})
})
describe('updateDocLines', function() {
beforeEach(function() {
this.path = '/somewhere/something.tex'