Merge pull request #2777 from overleaf/em-promisify-project-duplicator

Promisify ProjectDuplicator.duplicate()

GitOrigin-RevId: e7cfd8e3a9caca4608d0b57f6978474f3289f49e
This commit is contained in:
Hugh O'Brien 2020-05-01 15:00:34 +01:00 committed by Copybot
parent 7a315bfa4d
commit 6d078c4528
3 changed files with 426 additions and 528 deletions

View file

@ -1,22 +1,10 @@
/* eslint-disable
camelcase,
max-len,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let ProjectDuplicator
const projectCreationHandler = require('./ProjectCreationHandler')
const { callbackify, promisify } = require('util')
const OError = require('@overleaf/o-error')
const ProjectCreationHandler = require('./ProjectCreationHandler')
const ProjectEntityUpdateHandler = require('./ProjectEntityUpdateHandler')
const projectLocator = require('./ProjectLocator')
const projectOptionsHandler = require('./ProjectOptionsHandler')
const projectDeleter = require('./ProjectDeleter')
const ProjectLocator = require('./ProjectLocator')
const ProjectOptionsHandler = require('./ProjectOptionsHandler')
const ProjectDeleter = require('./ProjectDeleter')
const DocumentUpdaterHandler = require('../DocumentUpdater/DocumentUpdaterHandler')
const DocstoreManager = require('../Docstore/DocstoreManager')
const ProjectGetter = require('./ProjectGetter')
@ -24,289 +12,242 @@ const _ = require('underscore')
const async = require('async')
const logger = require('logger-sharelatex')
module.exports = ProjectDuplicator = {
_copyDocs(
owner_id,
newProject,
originalRootDoc,
originalFolder,
desFolder,
docContents,
callback
) {
const setRootDoc = _.once(doc_id =>
ProjectEntityUpdateHandler.setRootDoc(newProject._id, doc_id, () => {})
)
const docs = originalFolder.docs || []
const jobs = docs.map(
doc =>
function(cb) {
if ((doc != null ? doc._id : undefined) == null) {
return callback()
}
const content = docContents[doc._id.toString()]
return ProjectEntityUpdateHandler.addDoc(
newProject._id,
desFolder._id,
doc.name,
content.lines,
owner_id,
function(err, newDoc) {
if (err != null) {
logger.warn({ err }, 'error copying doc')
return callback(err)
}
if (
originalRootDoc != null &&
newDoc.name === originalRootDoc.name
) {
setRootDoc(newDoc._id)
}
return cb()
}
)
}
)
return async.series(jobs, callback)
},
_copyFiles(
owner_id,
newProject,
originalProject_id,
originalFolder,
desFolder,
callback
) {
const fileRefs = originalFolder.fileRefs || []
let firstError = null // track first error to exit gracefully from parallel copy
const jobs = fileRefs.map(
file =>
function(cb) {
if (firstError != null) {
return async.setImmediate(cb)
} // skip further copies if an error has occurred
return ProjectEntityUpdateHandler.copyFileFromExistingProjectWithProject(
newProject._id,
newProject,
desFolder._id,
originalProject_id,
file,
owner_id,
function(err) {
if (err != null) {
if (!firstError) {
firstError = err
}
} // set the error flag if this copy failed
return cb()
}
)
}
)
// If one of these jobs fails then we wait until all running jobs have
// finished, skipping those which have not started yet. We need to wait
// for all the copy jobs to finish to avoid them writing to the project
// entry in the background while we are deleting it.
return async.parallelLimit(jobs, 5, function(err) {
if (firstError != null) {
return callback(firstError)
}
if (err != null) {
return callback(err)
} // shouldn't happen
return callback()
})
},
_copyFolderRecursivly(
owner_id,
newProject_id,
originalProject_id,
originalRootDoc,
originalFolder,
desFolder,
docContents,
callback
) {
return ProjectGetter.getProject(
newProject_id,
{ rootFolder: true, name: true },
function(err, newProject) {
if (err != null) {
logger.warn({ project_id: newProject_id }, 'could not get project')
return callback(err)
}
const folders = originalFolder.folders || []
const jobs = folders.map(
childFolder =>
function(cb) {
if ((childFolder != null ? childFolder._id : undefined) == null) {
return cb()
}
return ProjectEntityUpdateHandler.addFolder(
newProject._id,
desFolder != null ? desFolder._id : undefined,
childFolder.name,
function(err, newFolder) {
if (err != null) {
return cb(err)
}
return ProjectDuplicator._copyFolderRecursivly(
owner_id,
newProject_id,
originalProject_id,
originalRootDoc,
childFolder,
newFolder,
docContents,
cb
)
}
)
}
)
jobs.push(cb =>
ProjectDuplicator._copyFiles(
owner_id,
newProject,
originalProject_id,
originalFolder,
desFolder,
cb
)
)
jobs.push(cb =>
ProjectDuplicator._copyDocs(
owner_id,
newProject,
originalRootDoc,
originalFolder,
desFolder,
docContents,
cb
)
)
return async.series(jobs, callback)
}
)
},
duplicate(owner, originalProject_id, newProjectName, callback) {
const jobs = {
flush(cb) {
return DocumentUpdaterHandler.flushProjectToMongo(
originalProject_id,
cb
)
},
originalProject(cb) {
return ProjectGetter.getProject(
originalProject_id,
{ compiler: true, rootFolder: true, rootDoc_id: true },
cb
)
},
originalRootDoc(cb) {
return projectLocator.findRootDoc(
{ project_id: originalProject_id },
cb
)
},
docContentsArray(cb) {
return DocstoreManager.getAllDocs(originalProject_id, cb)
}
}
// Get the contents of the original project first
return async.series(jobs, function(err, results) {
if (err != null) {
logger.warn(
{ err, originalProject_id },
'error duplicating project reading original project'
)
return callback(err)
}
let { originalProject, originalRootDoc, docContentsArray } = results
originalRootDoc = originalRootDoc != null ? originalRootDoc[0] : undefined
const docContents = {}
for (let docContent of Array.from(docContentsArray)) {
docContents[docContent._id] = docContent
}
// Now create the new project, cleaning it up on failure if necessary
return projectCreationHandler.createBlankProject(
owner._id,
newProjectName,
function(err, newProject) {
if (err != null) {
logger.warn(
{ err, originalProject_id },
'error duplicating project when creating new project'
)
return callback(err)
}
const copyJobs = {
setCompiler(cb) {
return projectOptionsHandler.setCompiler(
newProject._id,
originalProject.compiler,
cb
)
},
copyFiles(cb) {
return ProjectDuplicator._copyFolderRecursivly(
owner._id,
newProject._id,
originalProject_id,
originalRootDoc,
originalProject.rootFolder[0],
newProject.rootFolder[0],
docContents,
cb
)
}
}
// Copy the contents of the original project into the new project
return async.series(copyJobs, function(err) {
if (err != null) {
logger.warn(
{
err,
originalProject_id,
newProjectName,
newProject_id: newProject._id
},
'error cloning project, will delete broken clone'
)
// Clean up broken clone on error.
// Make sure we delete the new failed project, not the original one!
return projectDeleter.deleteProject(newProject._id, function(
delete_err
) {
if (delete_err != null) {
logger.error(
{ newProject_id: newProject._id, delete_err },
'error deleting broken clone of project'
)
}
return callback(err)
})
} else {
return callback(null, newProject)
}
})
}
)
})
module.exports = {
duplicate: callbackify(duplicate),
promises: {
duplicate
}
}
function _copyDocs(
ownerId,
newProject,
originalRootDoc,
originalFolder,
desFolder,
docContents,
callback
) {
const setRootDoc = _.once(docId => {
ProjectEntityUpdateHandler.setRootDoc(newProject._id, docId, () => {})
})
const docs = originalFolder.docs || []
const jobs = docs.map(
doc =>
function(cb) {
if (doc == null || doc._id == null) {
return callback()
}
const content = docContents[doc._id.toString()]
ProjectEntityUpdateHandler.addDoc(
newProject._id,
desFolder._id,
doc.name,
content.lines,
ownerId,
function(err, newDoc) {
if (err != null) {
logger.warn({ err }, 'error copying doc')
return callback(err)
}
if (
originalRootDoc != null &&
newDoc.name === originalRootDoc.name
) {
setRootDoc(newDoc._id)
}
cb()
}
)
}
)
async.series(jobs, callback)
}
function _copyFiles(
ownerId,
newProject,
originalProjectId,
originalFolder,
desFolder,
callback
) {
const fileRefs = originalFolder.fileRefs || []
let firstError = null // track first error to exit gracefully from parallel copy
const jobs = fileRefs.map(
file =>
function(cb) {
if (firstError != null) {
return async.setImmediate(cb)
} // skip further copies if an error has occurred
ProjectEntityUpdateHandler.copyFileFromExistingProjectWithProject(
newProject._id,
newProject,
desFolder._id,
originalProjectId,
file,
ownerId,
function(err) {
if (err != null) {
if (!firstError) {
firstError = err
}
} // set the error flag if this copy failed
cb()
}
)
}
)
// If one of these jobs fails then we wait until all running jobs have
// finished, skipping those which have not started yet. We need to wait
// for all the copy jobs to finish to avoid them writing to the project
// entry in the background while we are deleting it.
async.parallelLimit(jobs, 5, function(err) {
if (firstError != null) {
return callback(firstError)
}
if (err != null) {
return callback(err)
} // shouldn't happen
callback()
})
}
function _copyFolderRecursively(
ownerId,
newProjectId,
originalProjectId,
originalRootDoc,
originalFolder,
desFolder,
docContents,
callback
) {
ProjectGetter.getProject(
newProjectId,
{ rootFolder: true, name: true },
function(err, newProject) {
if (err != null) {
logger.warn({ projectId: newProjectId }, 'could not get project')
return callback(err)
}
const folders = originalFolder.folders || []
const jobs = folders.map(
childFolder =>
function(cb) {
if (childFolder == null || childFolder._id == null) {
return cb()
}
ProjectEntityUpdateHandler.addFolder(
newProject._id,
desFolder != null ? desFolder._id : undefined,
childFolder.name,
function(err, newFolder) {
if (err != null) {
return cb(err)
}
_copyFolderRecursively(
ownerId,
newProjectId,
originalProjectId,
originalRootDoc,
childFolder,
newFolder,
docContents,
cb
)
}
)
}
)
jobs.push(cb =>
_copyFiles(
ownerId,
newProject,
originalProjectId,
originalFolder,
desFolder,
cb
)
)
jobs.push(cb =>
_copyDocs(
ownerId,
newProject,
originalRootDoc,
originalFolder,
desFolder,
docContents,
cb
)
)
async.series(jobs, callback)
}
)
}
const _copyFolderRecursivelyAsync = promisify(_copyFolderRecursively)
async function duplicate(owner, originalProjectId, newProjectName) {
await DocumentUpdaterHandler.promises.flushProjectToMongo(originalProjectId)
const originalProject = await ProjectGetter.promises.getProject(
originalProjectId,
{
compiler: true,
rootFolder: true,
rootDoc_id: true
}
)
const {
element: originalRootDoc
} = await ProjectLocator.promises.findRootDoc({
project_id: originalProjectId
})
const docContentsArray = await DocstoreManager.promises.getAllDocs(
originalProjectId
)
const docContents = {}
for (const docContent of docContentsArray) {
docContents[docContent._id] = docContent
}
// Now create the new project, cleaning it up on failure if necessary
const newProject = await ProjectCreationHandler.promises.createBlankProject(
owner._id,
newProjectName
)
try {
await ProjectOptionsHandler.promises.setCompiler(
newProject._id,
originalProject.compiler
)
await _copyFolderRecursivelyAsync(
owner._id,
newProject._id,
originalProjectId,
originalRootDoc,
originalProject.rootFolder[0],
newProject.rootFolder[0],
docContents
)
} catch (err) {
// Clean up broken clone on error.
// Make sure we delete the new failed project, not the original one!
await ProjectDeleter.promises.deleteProject(newProject._id)
throw new OError({
message: 'error cloning project, broken clone deleted',
info: {
originalProjectId,
newProjectName,
newProjectId: newProject._id
}
}).withCause(err)
}
return newProject
}

View file

@ -101,7 +101,7 @@ const ProjectLocator = {
findRootDoc(opts, callback) {
const getRootDoc = project => {
if (project.rootDoc_id != null) {
this.findElement(
ProjectLocator.findElement(
{ project, element_id: project.rootDoc_id, type: 'docs' },
(error, ...args) => {
if (error != null) {
@ -323,6 +323,7 @@ module.exports = ProjectLocator
module.exports.promises = promisifyAll(ProjectLocator, {
multiResult: {
findElement: ['element', 'path', 'folder'],
findElementByPath: ['element', 'type']
findElementByPath: ['element', 'type'],
findRootDoc: ['element', 'path', 'folder']
}
})

View file

@ -1,21 +1,9 @@
/* eslint-disable
handle-callback-err,
max-len,
no-return-assign,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const { expect } = require('chai')
const sinon = require('sinon')
const chai = require('chai').should()
const modulePath = '../../../../app/src/Features/Project/ProjectDuplicator.js'
const SandboxedModule = require('sandboxed-module')
const MODULE_PATH = '../../../../app/src/Features/Project/ProjectDuplicator.js'
describe('ProjectDuplicator', function() {
beforeEach(function() {
this.level2folder = {
@ -64,7 +52,9 @@ describe('ProjectDuplicator', function() {
}
]
this.DocstoreManager = {
getAllDocs: sinon.stub().callsArgWith(1, null, this.docContents)
promises: {
getAllDocs: sinon.stub().resolves(this.docContents)
}
}
this.owner = { _id: 'this_is_the_owner' }
@ -76,19 +66,27 @@ describe('ProjectDuplicator', function() {
}
this.foundRootDoc = { _id: 'rootDocId', name: 'rootDocHere' }
this.creationHandler = {
createBlankProject: sinon
.stub()
.callsArgWith(2, null, this.stubbedNewProject)
this.ProjectCreationHandler = {
promises: {
createBlankProject: sinon.stub().resolves(this.stubbedNewProject)
}
}
this.newFolder = { _id: 'newFolderId' }
this.locator = {
findRootDoc: sinon.stub().callsArgWith(1, null, this.foundRootDoc, {})
this.ProjectLocator = {
promises: {
findRootDoc: sinon
.stub()
.resolves({ element: this.foundRootDoc, path: {} })
}
}
this.projectOptionsHandler = { setCompiler: sinon.stub().callsArg(2) }
this.ProjectOptionsHandler = {
promises: {
setCompiler: sinon.stub().resolves()
}
}
this.ProjectEntityUpdateHandler = {
addDoc: sinon.stub().callsArgWith(5, null, { name: 'somDoc' }),
copyFileFromExistingProjectWithProject: sinon.stub(),
@ -129,14 +127,23 @@ describe('ProjectDuplicator', function() {
.callsArg(6)
this.DocumentUpdaterHandler = {
flushProjectToMongo: sinon.stub().callsArg(1)
promises: {
flushProjectToMongo: sinon.stub().resolves()
}
}
this.Project = {
findById: sinon.stub().callsArgWith(1, null, this.project)
promises: {
findById: sinon.stub().resolves(this.project)
}
}
this.ProjectGetter = { getProject: sinon.stub() }
this.ProjectGetter = {
getProject: sinon.stub(),
promises: {
getProject: sinon.stub()
}
}
this.ProjectGetter.getProject
.withArgs(this.old_project_id, sinon.match.any)
@ -144,10 +151,20 @@ describe('ProjectDuplicator', function() {
this.ProjectGetter.getProject
.withArgs(this.new_project_id, sinon.match.any)
.callsArgWith(2, null, this.stubbedNewProject)
this.ProjectGetter.promises.getProject
.withArgs(this.old_project_id, sinon.match.any)
.resolves(this.project)
this.ProjectGetter.promises.getProject
.withArgs(this.new_project_id, sinon.match.any)
.resolves(this.stubbedNewProject)
this.ProjectDeleter = { deleteProject: sinon.stub().callsArgWith(1, null) }
this.ProjectDeleter = {
promises: {
deleteProject: sinon.stub().resolves()
}
}
return (this.duplicator = SandboxedModule.require(modulePath, {
this.ProjectDuplicator = SandboxedModule.require(MODULE_PATH, {
globals: {
console: console
},
@ -155,11 +172,11 @@ describe('ProjectDuplicator', function() {
'../../models/Project': { Project: this.Project },
'../DocumentUpdater/DocumentUpdaterHandler': this
.DocumentUpdaterHandler,
'./ProjectCreationHandler': this.creationHandler,
'./ProjectCreationHandler': this.ProjectCreationHandler,
'./ProjectEntityUpdateHandler': this.ProjectEntityUpdateHandler,
'./ProjectLocator': this.locator,
'./ProjectLocator': this.ProjectLocator,
'./ProjectDeleter': this.ProjectDeleter,
'./ProjectOptionsHandler': this.projectOptionsHandler,
'./ProjectOptionsHandler': this.ProjectOptionsHandler,
'../Docstore/DocstoreManager': this.DocstoreManager,
'./ProjectGetter': this.ProjectGetter,
'logger-sharelatex': {
@ -168,265 +185,204 @@ describe('ProjectDuplicator', function() {
err() {}
}
}
}))
})
})
describe('when the copy succeeds', function() {
it('should look up the original project', function(done) {
it('should look up the original project', async function() {
const newProjectName = 'someProj'
return this.duplicator.duplicate(
await this.ProjectDuplicator.promises.duplicate(
this.owner,
this.old_project_id,
newProjectName,
(err, newProject) => {
this.ProjectGetter.getProject
.calledWith(this.old_project_id)
.should.equal(true)
return done()
}
newProjectName
)
this.ProjectGetter.promises.getProject.should.have.been.calledWith(
this.old_project_id
)
})
it('should flush the original project to mongo', function(done) {
it('should flush the original project to mongo', async function() {
const newProjectName = 'someProj'
return this.duplicator.duplicate(
await this.ProjectDuplicator.promises.duplicate(
this.owner,
this.old_project_id,
newProjectName,
(err, newProject) => {
this.DocumentUpdaterHandler.flushProjectToMongo
.calledWith(this.old_project_id)
.should.equal(true)
return done()
}
newProjectName
)
this.DocumentUpdaterHandler.promises.flushProjectToMongo.should.have.been.calledWith(
this.old_project_id
)
})
it('should create a blank project', function(done) {
it('should create a blank project', async function() {
const newProjectName = 'someProj'
return this.duplicator.duplicate(
const newProject = await this.ProjectDuplicator.promises.duplicate(
this.owner,
this.old_project_id,
newProjectName,
(err, newProject) => {
newProject._id.should.equal(this.stubbedNewProject._id)
this.creationHandler.createBlankProject
.calledWith(this.owner._id, newProjectName)
.should.equal(true)
return done()
}
newProjectName
)
newProject._id.should.equal(this.stubbedNewProject._id)
this.ProjectCreationHandler.promises.createBlankProject.should.have.been.calledWith(
this.owner._id,
newProjectName
)
})
it('should use the same compiler', function(done) {
it('should use the same compiler', async function() {
this.ProjectEntityUpdateHandler.addDoc.callsArgWith(
5,
null,
this.rootFolder.docs[0],
this.owner._id
)
return this.duplicator.duplicate(
await this.ProjectDuplicator.promises.duplicate(
this.owner,
this.old_project_id,
'',
(err, newProject) => {
this.projectOptionsHandler.setCompiler
.calledWith(this.stubbedNewProject._id, this.project.compiler)
.should.equal(true)
return done()
}
''
)
this.ProjectOptionsHandler.promises.setCompiler.should.have.been.calledWith(
this.stubbedNewProject._id,
this.project.compiler
)
})
it('should use the same root doc', function(done) {
it('should use the same root doc', async function() {
this.ProjectEntityUpdateHandler.addDoc.callsArgWith(
5,
null,
this.rootFolder.docs[0],
this.owner._id
)
return this.duplicator.duplicate(
await this.ProjectDuplicator.promises.duplicate(
this.owner,
this.old_project_id,
'',
(err, newProject) => {
this.ProjectEntityUpdateHandler.setRootDoc
.calledWith(this.stubbedNewProject._id, this.rootFolder.docs[0]._id)
.should.equal(true)
return done()
}
''
)
this.ProjectEntityUpdateHandler.setRootDoc.should.have.been.calledWith(
this.stubbedNewProject._id,
this.rootFolder.docs[0]._id
)
})
it('should not copy the collaberators or read only refs', function(done) {
return this.duplicator.duplicate(
it('should not copy the collaberators or read only refs', async function() {
const newProject = await this.ProjectDuplicator.promises.duplicate(
this.owner,
this.old_project_id,
'',
(err, newProject) => {
newProject.collaberator_refs.length.should.equal(0)
newProject.readOnly_refs.length.should.equal(0)
return done()
}
''
)
newProject.collaberator_refs.length.should.equal(0)
newProject.readOnly_refs.length.should.equal(0)
})
it('should copy all the folders', async function() {
await this.ProjectDuplicator.promises.duplicate(
this.owner,
this.old_project_id,
''
)
this.ProjectEntityUpdateHandler.addFolder.should.have.been.calledWith(
this.new_project_id,
this.stubbedNewProject.rootFolder[0]._id,
this.level1folder.name
)
this.ProjectEntityUpdateHandler.addFolder.should.have.been.calledWith(
this.new_project_id,
this.newFolder._id,
this.level2folder.name
)
this.ProjectEntityUpdateHandler.addFolder.callCount.should.equal(2)
})
it('should copy all the docs', async function() {
await this.ProjectDuplicator.promises.duplicate(
this.owner,
this.old_project_id,
''
)
this.DocstoreManager.promises.getAllDocs.should.have.been.calledWith(
this.old_project_id
)
this.ProjectEntityUpdateHandler.addDoc.should.have.been.calledWith(
this.new_project_id,
this.stubbedNewProject.rootFolder[0]._id,
this.doc0.name,
this.doc0_lines,
this.owner._id
)
this.ProjectEntityUpdateHandler.addDoc.should.have.been.calledWith(
this.new_project_id,
this.newFolder._id,
this.doc1.name,
this.doc1_lines,
this.owner._id
)
this.ProjectEntityUpdateHandler.addDoc.should.have.been.calledWith(
this.new_project_id,
this.newFolder._id,
this.doc2.name,
this.doc2_lines,
this.owner._id
)
})
it('should copy all the folders', function(done) {
return this.duplicator.duplicate(
it('should copy all the files', async function() {
await this.ProjectDuplicator.promises.duplicate(
this.owner,
this.old_project_id,
'',
(err, newProject) => {
this.ProjectEntityUpdateHandler.addFolder
.calledWith(
this.new_project_id,
this.stubbedNewProject.rootFolder[0]._id,
this.level1folder.name
)
.should.equal(true)
this.ProjectEntityUpdateHandler.addFolder
.calledWith(
this.new_project_id,
this.newFolder._id,
this.level2folder.name
)
.should.equal(true)
this.ProjectEntityUpdateHandler.addFolder.callCount.should.equal(2)
return done()
}
''
)
})
it('should copy all the docs', function(done) {
return this.duplicator.duplicate(
this.owner,
this.old_project_id,
'',
(err, newProject) => {
this.DocstoreManager.getAllDocs
.calledWith(this.old_project_id)
.should.equal(true)
this.ProjectEntityUpdateHandler.addDoc
.calledWith(
this.new_project_id,
this.stubbedNewProject.rootFolder[0]._id,
this.doc0.name,
this.doc0_lines,
this.owner._id
)
.should.equal(true)
this.ProjectEntityUpdateHandler.addDoc
.calledWith(
this.new_project_id,
this.newFolder._id,
this.doc1.name,
this.doc1_lines,
this.owner._id
)
.should.equal(true)
this.ProjectEntityUpdateHandler.addDoc
.calledWith(
this.new_project_id,
this.newFolder._id,
this.doc2.name,
this.doc2_lines,
this.owner._id
)
.should.equal(true)
return done()
}
this.ProjectEntityUpdateHandler.copyFileFromExistingProjectWithProject.should.have.been.calledWith(
this.stubbedNewProject._id,
this.stubbedNewProject,
this.stubbedNewProject.rootFolder[0]._id,
this.project._id,
this.rootFolder.fileRefs[0],
this.owner._id
)
})
it('should copy all the files', function(done) {
return this.duplicator.duplicate(
this.owner,
this.old_project_id,
'',
(err, newProject) => {
this.ProjectEntityUpdateHandler.copyFileFromExistingProjectWithProject
.calledWith(
this.stubbedNewProject._id,
this.stubbedNewProject,
this.stubbedNewProject.rootFolder[0]._id,
this.project._id,
this.rootFolder.fileRefs[0],
this.owner._id
)
.should.equal(true)
this.ProjectEntityUpdateHandler.copyFileFromExistingProjectWithProject
.calledWith(
this.stubbedNewProject._id,
this.stubbedNewProject,
this.newFolder._id,
this.project._id,
this.level1folder.fileRefs[0],
this.owner._id
)
.should.equal(true)
this.ProjectEntityUpdateHandler.copyFileFromExistingProjectWithProject
.calledWith(
this.stubbedNewProject._id,
this.stubbedNewProject,
this.newFolder._id,
this.project._id,
this.level2folder.fileRefs[0],
this.owner._id
)
.should.equal(true)
return done()
}
this.ProjectEntityUpdateHandler.copyFileFromExistingProjectWithProject.should.have.been.calledWith(
this.stubbedNewProject._id,
this.stubbedNewProject,
this.newFolder._id,
this.project._id,
this.level1folder.fileRefs[0],
this.owner._id
)
this.ProjectEntityUpdateHandler.copyFileFromExistingProjectWithProject.should.have.been.calledWith(
this.stubbedNewProject._id,
this.stubbedNewProject,
this.newFolder._id,
this.project._id,
this.level2folder.fileRefs[0],
this.owner._id
)
})
})
describe('when there is an error', function() {
beforeEach(function() {
return (this.rootFolder.fileRefs = [
beforeEach(async function() {
this.rootFolder.fileRefs = [
{ name: 'file0', _id: 'file0' },
'BROKEN-FILE',
{ name: 'file1', _id: 'file1' },
{ name: 'file2', _id: 'file2' }
])
]
await expect(
this.ProjectDuplicator.promises.duplicate(
this.owner,
this.old_project_id,
''
)
).to.be.rejected
})
it('should delete the broken cloned project', function(done) {
return this.duplicator.duplicate(
this.owner,
this.old_project_id,
'',
(err, newProject) => {
this.ProjectDeleter.deleteProject
.calledWith(this.stubbedNewProject._id)
.should.equal(true)
return done()
}
it('should delete the broken cloned project', function() {
this.ProjectDeleter.promises.deleteProject.should.have.been.calledWith(
this.stubbedNewProject._id
)
})
it('should not delete the original project', function(done) {
return this.duplicator.duplicate(
this.owner,
this.old_project_id,
'',
(err, newProject) => {
this.ProjectDeleter.deleteProject
.calledWith(this.old_project_id)
.should.equal(false)
return done()
}
)
})
it('should return an error', function(done) {
return this.duplicator.duplicate(
this.owner,
this.old_project_id,
'',
(err, newProject) => {
err.should.not.equal(null)
return done()
}
it('should not delete the original project', function() {
this.ProjectDeleter.promises.deleteProject.should.not.have.been.calledWith(
this.old_project_id
)
})
})