2019-05-29 05:21:06 -04:00
|
|
|
const _ = require('lodash')
|
|
|
|
const async = require('async')
|
|
|
|
const logger = require('logger-sharelatex')
|
2019-10-03 10:10:00 -04:00
|
|
|
const Settings = require('settings-sharelatex')
|
2019-11-18 09:03:04 -05:00
|
|
|
const Path = require('path')
|
2019-05-29 05:21:06 -04:00
|
|
|
const { Doc } = require('../../models/Doc')
|
|
|
|
const DocstoreManager = require('../Docstore/DocstoreManager')
|
|
|
|
const DocumentUpdaterHandler = require('../../Features/DocumentUpdater/DocumentUpdaterHandler')
|
|
|
|
const Errors = require('../Errors/Errors')
|
|
|
|
const { File } = require('../../models/File')
|
|
|
|
const FileStoreHandler = require('../FileStore/FileStoreHandler')
|
|
|
|
const LockManager = require('../../infrastructure/LockManager')
|
|
|
|
const { Project } = require('../../models/Project')
|
|
|
|
const ProjectEntityHandler = require('./ProjectEntityHandler')
|
|
|
|
const ProjectGetter = require('./ProjectGetter')
|
|
|
|
const ProjectLocator = require('./ProjectLocator')
|
|
|
|
const ProjectUpdateHandler = require('./ProjectUpdateHandler')
|
|
|
|
const ProjectEntityMongoUpdateHandler = require('./ProjectEntityMongoUpdateHandler')
|
|
|
|
const SafePath = require('./SafePath')
|
|
|
|
const TpdsUpdateSender = require('../ThirdPartyDataStore/TpdsUpdateSender')
|
|
|
|
|
|
|
|
const LOCK_NAMESPACE = 'sequentialProjectStructureUpdateLock'
|
2019-11-18 09:03:04 -05:00
|
|
|
const VALID_ROOT_DOC_EXTENSIONS = Settings.validRootDocExtensions
|
|
|
|
const VALID_ROOT_DOC_REGEXP = new RegExp(
|
|
|
|
`^\\.(${VALID_ROOT_DOC_EXTENSIONS.join('|')})$`,
|
2019-10-03 10:10:00 -04:00
|
|
|
'i'
|
|
|
|
)
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
function wrapWithLock(methodWithoutLock) {
|
2019-05-29 05:21:06 -04:00
|
|
|
// This lock is used to make sure that the project structure updates are made
|
|
|
|
// sequentially. In particular the updates must be made in mongo and sent to
|
|
|
|
// the doc-updater in the same order.
|
|
|
|
if (typeof methodWithoutLock === 'function') {
|
2019-11-18 09:03:04 -05:00
|
|
|
const methodWithLock = (projectId, ...rest) => {
|
|
|
|
const adjustedLength = Math.max(rest.length, 1)
|
|
|
|
const args = rest.slice(0, adjustedLength - 1)
|
|
|
|
const callback = rest[adjustedLength - 1]
|
|
|
|
LockManager.runWithLock(
|
2019-05-29 05:21:06 -04:00
|
|
|
LOCK_NAMESPACE,
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
cb => methodWithoutLock(projectId, ...args, cb),
|
2019-05-29 05:21:06 -04:00
|
|
|
callback
|
|
|
|
)
|
|
|
|
}
|
|
|
|
methodWithLock.withoutLock = methodWithoutLock
|
|
|
|
return methodWithLock
|
|
|
|
} else {
|
|
|
|
// handle case with separate setup and locked stages
|
|
|
|
const wrapWithSetup = methodWithoutLock.beforeLock // a function to set things up before the lock
|
|
|
|
const mainTask = methodWithoutLock.withLock // function to execute inside the lock
|
2019-11-18 09:03:04 -05:00
|
|
|
const methodWithLock = wrapWithSetup((projectId, ...rest) => {
|
|
|
|
const adjustedLength = Math.max(rest.length, 1)
|
|
|
|
const args = rest.slice(0, adjustedLength - 1)
|
|
|
|
const callback = rest[adjustedLength - 1]
|
|
|
|
LockManager.runWithLock(
|
2019-05-29 05:21:06 -04:00
|
|
|
LOCK_NAMESPACE,
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
cb => mainTask(projectId, ...args, cb),
|
2019-05-29 05:21:06 -04:00
|
|
|
callback
|
|
|
|
)
|
|
|
|
})
|
|
|
|
methodWithLock.withoutLock = wrapWithSetup(mainTask)
|
|
|
|
methodWithLock.beforeLock = methodWithoutLock.beforeLock
|
|
|
|
methodWithLock.mainTask = methodWithoutLock.withLock
|
|
|
|
return methodWithLock
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
const ProjectEntityUpdateHandler = {
|
2019-05-29 05:21:06 -04:00
|
|
|
copyFileFromExistingProjectWithProject: wrapWithLock({
|
|
|
|
beforeLock(next) {
|
|
|
|
return function(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
project,
|
2019-11-18 09:03:04 -05:00
|
|
|
folderId,
|
|
|
|
originalProjectId,
|
|
|
|
originalFileRef,
|
2019-05-29 05:21:06 -04:00
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
) {
|
|
|
|
logger.log(
|
2019-11-18 09:03:04 -05:00
|
|
|
{ projectId, folderId, originalProjectId, originalFileRef },
|
2019-05-29 05:21:06 -04:00
|
|
|
'copying file in s3 with project'
|
|
|
|
)
|
2019-11-21 07:44:39 -05:00
|
|
|
folderId = ProjectEntityMongoUpdateHandler._confirmFolder(
|
2019-05-29 05:21:06 -04:00
|
|
|
project,
|
2019-11-21 07:44:39 -05:00
|
|
|
folderId
|
|
|
|
)
|
|
|
|
if (originalFileRef == null) {
|
|
|
|
logger.err(
|
|
|
|
{ projectId, folderId, originalProjectId, originalFileRef },
|
|
|
|
'file trying to copy is null'
|
|
|
|
)
|
|
|
|
return callback()
|
|
|
|
}
|
|
|
|
// convert any invalid characters in original file to '_'
|
|
|
|
const fileProperties = {
|
|
|
|
name: SafePath.clean(originalFileRef.name)
|
|
|
|
}
|
|
|
|
if (originalFileRef.linkedFileData != null) {
|
|
|
|
fileProperties.linkedFileData = originalFileRef.linkedFileData
|
|
|
|
}
|
|
|
|
if (originalFileRef.hash != null) {
|
|
|
|
fileProperties.hash = originalFileRef.hash
|
|
|
|
}
|
|
|
|
const fileRef = new File(fileProperties)
|
|
|
|
FileStoreHandler.copyFile(
|
|
|
|
originalProjectId,
|
|
|
|
originalFileRef._id,
|
|
|
|
project._id,
|
|
|
|
fileRef._id,
|
|
|
|
(err, fileStoreUrl) => {
|
|
|
|
if (err != null) {
|
|
|
|
logger.warn(
|
|
|
|
{
|
|
|
|
err,
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
folderId,
|
|
|
|
originalProjectId,
|
2019-11-21 07:44:39 -05:00
|
|
|
originalFileRef
|
|
|
|
},
|
|
|
|
'error coping file in s3'
|
|
|
|
)
|
|
|
|
return callback(err)
|
|
|
|
}
|
|
|
|
next(
|
|
|
|
projectId,
|
|
|
|
project,
|
|
|
|
folderId,
|
|
|
|
originalProjectId,
|
|
|
|
originalFileRef,
|
|
|
|
userId,
|
|
|
|
fileRef,
|
|
|
|
fileStoreUrl,
|
|
|
|
callback
|
2019-05-29 05:21:06 -04:00
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
},
|
|
|
|
withLock(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
project,
|
2019-11-18 09:03:04 -05:00
|
|
|
folderId,
|
|
|
|
originalProjectId,
|
|
|
|
originalFileRef,
|
2019-05-29 05:21:06 -04:00
|
|
|
userId,
|
|
|
|
fileRef,
|
|
|
|
fileStoreUrl,
|
|
|
|
callback
|
|
|
|
) {
|
2019-11-18 09:03:04 -05:00
|
|
|
const projectHistoryId =
|
|
|
|
project.overleaf &&
|
|
|
|
project.overleaf.history &&
|
|
|
|
project.overleaf.history.id
|
|
|
|
ProjectEntityMongoUpdateHandler._putElement(
|
2019-05-29 05:21:06 -04:00
|
|
|
project,
|
2019-11-18 09:03:04 -05:00
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fileRef,
|
|
|
|
'file',
|
2019-11-18 09:03:04 -05:00
|
|
|
(err, result, newProject) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
2019-07-01 09:48:09 -04:00
|
|
|
logger.warn(
|
2019-11-18 09:03:04 -05:00
|
|
|
{ err, projectId, folderId },
|
2019-05-29 05:21:06 -04:00
|
|
|
'error putting element as part of copy'
|
|
|
|
)
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
TpdsUpdateSender.addFile(
|
2019-05-29 05:21:06 -04:00
|
|
|
{
|
2019-11-18 09:03:04 -05:00
|
|
|
project_id: projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
file_id: fileRef._id,
|
2019-11-18 09:03:04 -05:00
|
|
|
path: result && result.path && result.path.fileSystem,
|
2019-05-29 05:21:06 -04:00
|
|
|
rev: fileRef.rev,
|
|
|
|
project_name: project.name
|
|
|
|
},
|
2019-11-18 09:03:04 -05:00
|
|
|
err => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
logger.err(
|
|
|
|
{
|
|
|
|
err,
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
folderId,
|
|
|
|
originalProjectId,
|
|
|
|
originalFileRef
|
2019-05-29 05:21:06 -04:00
|
|
|
},
|
|
|
|
'error sending file to tpds worker'
|
|
|
|
)
|
|
|
|
}
|
|
|
|
const newFiles = [
|
|
|
|
{
|
|
|
|
file: fileRef,
|
2019-11-18 09:03:04 -05:00
|
|
|
path: result && result.path && result.path.fileSystem,
|
2019-05-29 05:21:06 -04:00
|
|
|
url: fileStoreUrl
|
|
|
|
}
|
|
|
|
]
|
2019-11-18 09:03:04 -05:00
|
|
|
DocumentUpdaterHandler.updateProjectStructure(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
projectHistoryId,
|
|
|
|
userId,
|
|
|
|
{ newFiles, newProject },
|
2019-11-18 09:03:04 -05:00
|
|
|
error => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
callback(null, fileRef, folderId)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}),
|
|
|
|
|
|
|
|
updateDocLines(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
docId,
|
2019-05-29 05:21:06 -04:00
|
|
|
lines,
|
|
|
|
version,
|
|
|
|
ranges,
|
|
|
|
lastUpdatedAt,
|
|
|
|
lastUpdatedBy,
|
|
|
|
callback
|
|
|
|
) {
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectGetter.getProjectWithoutDocLines(projectId, (err, project) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
|
|
|
if (project == null) {
|
|
|
|
return callback(new Errors.NotFoundError('project not found'))
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
logger.log({ projectId, docId }, 'updating doc lines')
|
|
|
|
ProjectLocator.findElement(
|
|
|
|
{ project, element_id: docId, type: 'docs' },
|
|
|
|
(err, doc, path) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
let isDeletedDoc = false
|
|
|
|
if (err != null) {
|
|
|
|
if (err instanceof Errors.NotFoundError) {
|
|
|
|
// We need to be able to update the doclines of deleted docs. This is
|
|
|
|
// so the doc-updater can flush a doc's content to the doc-store after
|
|
|
|
// the doc is deleted.
|
|
|
|
isDeletedDoc = true
|
|
|
|
doc = _.find(
|
|
|
|
project.deletedDocs,
|
2019-11-18 09:03:04 -05:00
|
|
|
doc => doc._id.toString() === docId.toString()
|
2019-05-29 05:21:06 -04:00
|
|
|
)
|
|
|
|
} else {
|
|
|
|
return callback(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (doc == null) {
|
|
|
|
// Do not allow an update to a doc which has never exist on this project
|
2019-07-01 09:48:09 -04:00
|
|
|
logger.warn(
|
2019-11-19 09:19:08 -05:00
|
|
|
{ docId, projectId },
|
2019-05-29 05:21:06 -04:00
|
|
|
'doc not found while updating doc lines'
|
|
|
|
)
|
|
|
|
return callback(new Errors.NotFoundError('doc not found'))
|
|
|
|
}
|
|
|
|
|
|
|
|
logger.log(
|
2019-11-18 09:03:04 -05:00
|
|
|
{ projectId, docId },
|
2019-05-29 05:21:06 -04:00
|
|
|
'telling docstore manager to update doc'
|
|
|
|
)
|
2019-11-18 09:03:04 -05:00
|
|
|
DocstoreManager.updateDoc(
|
|
|
|
projectId,
|
|
|
|
docId,
|
2019-05-29 05:21:06 -04:00
|
|
|
lines,
|
|
|
|
version,
|
|
|
|
ranges,
|
2019-11-18 09:03:04 -05:00
|
|
|
(err, modified, rev) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
2019-07-01 09:48:09 -04:00
|
|
|
logger.warn(
|
2019-11-19 09:19:08 -05:00
|
|
|
{ err, docId, projectId },
|
2019-05-29 05:21:06 -04:00
|
|
|
'error sending doc to docstore'
|
|
|
|
)
|
|
|
|
return callback(err)
|
|
|
|
}
|
|
|
|
logger.log(
|
2019-11-18 09:03:04 -05:00
|
|
|
{ projectId, docId, modified },
|
2019-05-29 05:21:06 -04:00
|
|
|
'finished updating doc lines'
|
|
|
|
)
|
|
|
|
// path will only be present if the doc is not deleted
|
|
|
|
if (modified && !isDeletedDoc) {
|
|
|
|
// Don't need to block for marking as updated
|
|
|
|
ProjectUpdateHandler.markAsUpdated(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
lastUpdatedAt,
|
|
|
|
lastUpdatedBy
|
|
|
|
)
|
2019-11-18 09:03:04 -05:00
|
|
|
TpdsUpdateSender.addDoc(
|
2019-05-29 05:21:06 -04:00
|
|
|
{
|
2019-11-18 09:03:04 -05:00
|
|
|
project_id: projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
path: path.fileSystem,
|
2019-11-18 09:03:04 -05:00
|
|
|
doc_id: docId,
|
2019-05-29 05:21:06 -04:00
|
|
|
project_name: project.name,
|
|
|
|
rev
|
|
|
|
},
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
} else {
|
2019-11-18 09:03:04 -05:00
|
|
|
callback()
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
})
|
|
|
|
},
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
setRootDoc(projectId, newRootDocID, callback) {
|
|
|
|
logger.log({ projectId, rootDocId: newRootDocID }, 'setting root doc')
|
|
|
|
if (projectId == null || newRootDocID == null) {
|
2019-10-03 10:10:00 -04:00
|
|
|
return callback(
|
|
|
|
new Errors.InvalidError('missing arguments (project or doc)')
|
|
|
|
)
|
|
|
|
}
|
|
|
|
ProjectEntityHandler.getDocPathByProjectIdAndDocId(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
2019-10-03 10:10:00 -04:00
|
|
|
newRootDocID,
|
2019-11-18 09:03:04 -05:00
|
|
|
(err, docPath) => {
|
2019-10-03 10:10:00 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
|
|
|
if (ProjectEntityUpdateHandler.isPathValidForRootDoc(docPath)) {
|
2019-11-18 09:03:04 -05:00
|
|
|
Project.update(
|
|
|
|
{ _id: projectId },
|
2019-10-03 10:10:00 -04:00
|
|
|
{ rootDoc_id: newRootDocID },
|
|
|
|
{},
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
} else {
|
2019-11-18 09:03:04 -05:00
|
|
|
callback(
|
2019-10-03 10:10:00 -04:00
|
|
|
new Errors.UnsupportedFileTypeError(
|
|
|
|
'invalid file extension for root doc'
|
|
|
|
)
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
2019-05-29 05:21:06 -04:00
|
|
|
)
|
|
|
|
},
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
unsetRootDoc(projectId, callback) {
|
|
|
|
logger.log({ projectId }, 'removing root doc')
|
|
|
|
Project.update(
|
|
|
|
{ _id: projectId },
|
2019-05-29 05:21:06 -04:00
|
|
|
{ $unset: { rootDoc_id: true } },
|
|
|
|
{},
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
},
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
_addDocAndSendToTpds(projectId, folderId, doc, callback) {
|
|
|
|
ProjectEntityMongoUpdateHandler.addDoc(
|
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
doc,
|
2019-11-18 09:03:04 -05:00
|
|
|
(err, result, project) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
2019-07-01 09:48:09 -04:00
|
|
|
logger.warn(
|
2019-05-29 05:21:06 -04:00
|
|
|
{
|
|
|
|
err,
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
doc_name: doc != null ? doc.name : undefined,
|
|
|
|
doc_id: doc != null ? doc._id : undefined
|
|
|
|
},
|
|
|
|
'error adding file with project'
|
|
|
|
)
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
TpdsUpdateSender.addDoc(
|
2019-05-29 05:21:06 -04:00
|
|
|
{
|
2019-11-18 09:03:04 -05:00
|
|
|
project_id: projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
doc_id: doc != null ? doc._id : undefined,
|
2019-11-18 09:03:04 -05:00
|
|
|
path: result && result.path && result.path.fileSystem,
|
2019-05-29 05:21:06 -04:00
|
|
|
project_name: project.name,
|
|
|
|
rev: 0
|
|
|
|
},
|
2019-11-18 09:03:04 -05:00
|
|
|
err => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
callback(null, result, project)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
},
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
addDoc(projectId, folderId, docName, docLines, userId, callback) {
|
|
|
|
ProjectEntityUpdateHandler.addDocWithRanges(
|
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
docName,
|
|
|
|
docLines,
|
|
|
|
{},
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
},
|
|
|
|
|
|
|
|
addDocWithRanges: wrapWithLock({
|
|
|
|
beforeLock(next) {
|
|
|
|
return function(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
docName,
|
|
|
|
docLines,
|
|
|
|
ranges,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
) {
|
|
|
|
if (!SafePath.isCleanFilename(docName)) {
|
|
|
|
return callback(new Errors.InvalidNameError('invalid element name'))
|
|
|
|
}
|
|
|
|
// Put doc in docstore first, so that if it errors, we don't have a doc_id in the project
|
|
|
|
// which hasn't been created in docstore.
|
|
|
|
const doc = new Doc({ name: docName })
|
2019-11-18 09:03:04 -05:00
|
|
|
DocstoreManager.updateDoc(
|
|
|
|
projectId.toString(),
|
2019-05-29 05:21:06 -04:00
|
|
|
doc._id.toString(),
|
|
|
|
docLines,
|
|
|
|
0,
|
|
|
|
ranges,
|
2019-11-18 09:03:04 -05:00
|
|
|
(err, modified, rev) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
next(
|
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
doc,
|
|
|
|
docName,
|
|
|
|
docLines,
|
|
|
|
ranges,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
},
|
|
|
|
withLock(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
doc,
|
|
|
|
docName,
|
|
|
|
docLines,
|
|
|
|
ranges,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
) {
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityUpdateHandler._addDocAndSendToTpds(
|
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
doc,
|
2019-11-18 09:03:04 -05:00
|
|
|
(err, result, project) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
const docPath = result && result.path && result.path.fileSystem
|
|
|
|
const projectHistoryId =
|
|
|
|
project.overleaf &&
|
|
|
|
project.overleaf.history &&
|
|
|
|
project.overleaf.history.id
|
2019-05-29 05:21:06 -04:00
|
|
|
const newDocs = [
|
|
|
|
{
|
|
|
|
doc,
|
|
|
|
path: docPath,
|
|
|
|
docLines: docLines.join('\n')
|
|
|
|
}
|
|
|
|
]
|
2019-11-18 09:03:04 -05:00
|
|
|
DocumentUpdaterHandler.updateProjectStructure(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
projectHistoryId,
|
|
|
|
userId,
|
|
|
|
{ newDocs, newProject: project },
|
2019-11-18 09:03:04 -05:00
|
|
|
error => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
callback(null, doc, folderId)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}),
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
_uploadFile(projectId, folderId, fileName, fsPath, linkedFileData, callback) {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (!SafePath.isCleanFilename(fileName)) {
|
|
|
|
return callback(new Errors.InvalidNameError('invalid element name'))
|
|
|
|
}
|
|
|
|
const fileArgs = {
|
|
|
|
name: fileName,
|
|
|
|
linkedFileData
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
FileStoreHandler.uploadFileFromDisk(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fileArgs,
|
|
|
|
fsPath,
|
2019-11-18 09:03:04 -05:00
|
|
|
(err, fileStoreUrl, fileRef) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
2019-07-01 09:48:09 -04:00
|
|
|
logger.warn(
|
2019-11-18 09:03:04 -05:00
|
|
|
{ err, projectId, folderId, file_name: fileName, fileRef },
|
2019-05-29 05:21:06 -04:00
|
|
|
'error uploading image to s3'
|
|
|
|
)
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
callback(null, fileStoreUrl, fileRef)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
)
|
|
|
|
},
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
_addFileAndSendToTpds(projectId, folderId, fileRef, callback) {
|
|
|
|
ProjectEntityMongoUpdateHandler.addFile(
|
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fileRef,
|
2019-11-18 09:03:04 -05:00
|
|
|
(err, result, project) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
2019-07-01 09:48:09 -04:00
|
|
|
logger.warn(
|
2019-11-18 09:03:04 -05:00
|
|
|
{ err, projectId, folderId, file_name: fileRef.name, fileRef },
|
2019-05-29 05:21:06 -04:00
|
|
|
'error adding file with project'
|
|
|
|
)
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
TpdsUpdateSender.addFile(
|
2019-05-29 05:21:06 -04:00
|
|
|
{
|
2019-11-18 09:03:04 -05:00
|
|
|
project_id: projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
file_id: fileRef._id,
|
2019-11-18 09:03:04 -05:00
|
|
|
path: result && result.path && result.path.fileSystem,
|
2019-05-29 05:21:06 -04:00
|
|
|
project_name: project.name,
|
|
|
|
rev: fileRef.rev
|
|
|
|
},
|
2019-11-18 09:03:04 -05:00
|
|
|
err => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
callback(null, result, project)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
},
|
|
|
|
|
|
|
|
addFile: wrapWithLock({
|
|
|
|
beforeLock(next) {
|
|
|
|
return function(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fileName,
|
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
) {
|
|
|
|
if (!SafePath.isCleanFilename(fileName)) {
|
|
|
|
return callback(new Errors.InvalidNameError('invalid element name'))
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityUpdateHandler._uploadFile(
|
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fileName,
|
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
2019-11-18 09:03:04 -05:00
|
|
|
(error, fileStoreUrl, fileRef) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
next(
|
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fileName,
|
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
|
|
|
userId,
|
|
|
|
fileRef,
|
|
|
|
fileStoreUrl,
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
},
|
|
|
|
withLock(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fileName,
|
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
|
|
|
userId,
|
|
|
|
fileRef,
|
|
|
|
fileStoreUrl,
|
|
|
|
callback
|
|
|
|
) {
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityUpdateHandler._addFileAndSendToTpds(
|
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fileRef,
|
2019-11-18 09:03:04 -05:00
|
|
|
(err, result, project) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
const projectHistoryId =
|
|
|
|
project.overleaf &&
|
|
|
|
project.overleaf.history &&
|
|
|
|
project.overleaf.history.id
|
2019-05-29 05:21:06 -04:00
|
|
|
const newFiles = [
|
|
|
|
{
|
|
|
|
file: fileRef,
|
2019-11-18 09:03:04 -05:00
|
|
|
path: result && result.path && result.path.fileSystem,
|
2019-05-29 05:21:06 -04:00
|
|
|
url: fileStoreUrl
|
|
|
|
}
|
|
|
|
]
|
2019-11-18 09:03:04 -05:00
|
|
|
DocumentUpdaterHandler.updateProjectStructure(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
projectHistoryId,
|
|
|
|
userId,
|
|
|
|
{ newFiles, newProject: project },
|
2019-11-18 09:03:04 -05:00
|
|
|
error => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
callback(null, fileRef, folderId)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}),
|
|
|
|
|
|
|
|
replaceFile: wrapWithLock({
|
|
|
|
beforeLock(next) {
|
|
|
|
return function(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
fileId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
) {
|
|
|
|
// create a new file
|
|
|
|
const fileArgs = {
|
|
|
|
name: 'dummy-upload-filename',
|
|
|
|
linkedFileData
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
FileStoreHandler.uploadFileFromDisk(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fileArgs,
|
|
|
|
fsPath,
|
2019-11-18 09:03:04 -05:00
|
|
|
(err, fileStoreUrl, fileRef) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
next(
|
|
|
|
projectId,
|
|
|
|
fileId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
|
|
|
userId,
|
|
|
|
fileRef,
|
|
|
|
fileStoreUrl,
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
},
|
|
|
|
withLock(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
fileId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
|
|
|
userId,
|
|
|
|
newFileRef,
|
|
|
|
fileStoreUrl,
|
|
|
|
callback
|
|
|
|
) {
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityMongoUpdateHandler.replaceFileWithNew(
|
|
|
|
projectId,
|
|
|
|
fileId,
|
2019-05-29 05:21:06 -04:00
|
|
|
newFileRef,
|
2019-11-18 09:03:04 -05:00
|
|
|
(err, oldFileRef, project, path, newProject) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
|
|
|
const oldFiles = [
|
|
|
|
{
|
|
|
|
file: oldFileRef,
|
|
|
|
path: path.fileSystem
|
|
|
|
}
|
|
|
|
]
|
|
|
|
const newFiles = [
|
|
|
|
{
|
|
|
|
file: newFileRef,
|
|
|
|
path: path.fileSystem,
|
|
|
|
url: fileStoreUrl
|
|
|
|
}
|
|
|
|
]
|
2019-11-18 09:03:04 -05:00
|
|
|
const projectHistoryId =
|
|
|
|
project.overleaf &&
|
|
|
|
project.overleaf.history &&
|
|
|
|
project.overleaf.history.id
|
2019-05-29 05:21:06 -04:00
|
|
|
// Increment the rev for an in-place update (with the same path) so the third-party-datastore
|
|
|
|
// knows this is a new file.
|
|
|
|
// Ideally we would get this from ProjectEntityMongoUpdateHandler.replaceFileWithNew
|
|
|
|
// but it returns the original oldFileRef (after incrementing the rev value in mongo),
|
|
|
|
// so we add 1 to the rev from that. This isn't atomic and relies on the lock
|
|
|
|
// but it is acceptable for now.
|
2019-11-18 09:03:04 -05:00
|
|
|
TpdsUpdateSender.addFile(
|
2019-05-29 05:21:06 -04:00
|
|
|
{
|
|
|
|
project_id: project._id,
|
|
|
|
file_id: newFileRef._id,
|
|
|
|
path: path.fileSystem,
|
|
|
|
rev: oldFileRef.rev + 1,
|
|
|
|
project_name: project.name
|
|
|
|
},
|
2019-11-18 09:03:04 -05:00
|
|
|
err => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
DocumentUpdaterHandler.updateProjectStructure(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
projectHistoryId,
|
|
|
|
userId,
|
|
|
|
{ oldFiles, newFiles, newProject },
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}),
|
|
|
|
|
|
|
|
upsertDoc: wrapWithLock(function(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
docName,
|
|
|
|
docLines,
|
|
|
|
source,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
) {
|
|
|
|
if (!SafePath.isCleanFilename(docName)) {
|
|
|
|
return callback(new Errors.InvalidNameError('invalid element name'))
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectLocator.findElement(
|
|
|
|
{ project_id: projectId, element_id: folderId, type: 'folder' },
|
|
|
|
(error, folder) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
|
|
|
if (folder == null) {
|
|
|
|
return callback(new Error("Couldn't find folder"))
|
|
|
|
}
|
|
|
|
let existingDoc = null
|
2019-11-18 09:03:04 -05:00
|
|
|
for (let doc of folder.docs) {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (doc.name === docName) {
|
|
|
|
existingDoc = doc
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (existingDoc != null) {
|
2019-11-18 09:03:04 -05:00
|
|
|
DocumentUpdaterHandler.setDocument(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
existingDoc._id,
|
|
|
|
userId,
|
|
|
|
docLines,
|
|
|
|
source,
|
|
|
|
err => {
|
2019-11-18 09:03:04 -05:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-05-29 05:21:06 -04:00
|
|
|
logger.log(
|
2019-11-18 09:03:04 -05:00
|
|
|
{ projectId, docId: existingDoc._id },
|
2019-05-29 05:21:06 -04:00
|
|
|
'notifying users that the document has been updated'
|
|
|
|
)
|
2019-11-18 09:03:04 -05:00
|
|
|
DocumentUpdaterHandler.flushDocToMongo(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
existingDoc._id,
|
2019-11-18 09:03:04 -05:00
|
|
|
err => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
callback(null, existingDoc, existingDoc == null)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
} else {
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityUpdateHandler.addDocWithRanges.withoutLock(
|
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
docName,
|
|
|
|
docLines,
|
|
|
|
{},
|
|
|
|
userId,
|
2019-11-18 09:03:04 -05:00
|
|
|
(err, doc) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
callback(null, doc, existingDoc == null)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}),
|
|
|
|
|
|
|
|
upsertFile: wrapWithLock({
|
|
|
|
beforeLock(next) {
|
|
|
|
return function(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fileName,
|
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
) {
|
|
|
|
if (!SafePath.isCleanFilename(fileName)) {
|
|
|
|
return callback(new Errors.InvalidNameError('invalid element name'))
|
|
|
|
}
|
|
|
|
// create a new file
|
|
|
|
const fileArgs = {
|
|
|
|
name: fileName,
|
|
|
|
linkedFileData
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
FileStoreHandler.uploadFileFromDisk(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fileArgs,
|
|
|
|
fsPath,
|
2019-11-18 09:03:04 -05:00
|
|
|
(err, fileStoreUrl, fileRef) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
next(
|
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fileName,
|
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
|
|
|
userId,
|
|
|
|
fileRef,
|
|
|
|
fileStoreUrl,
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
},
|
|
|
|
withLock(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fileName,
|
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
|
|
|
userId,
|
|
|
|
newFileRef,
|
|
|
|
fileStoreUrl,
|
|
|
|
callback
|
|
|
|
) {
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectLocator.findElement(
|
|
|
|
{ project_id: projectId, element_id: folderId, type: 'folder' },
|
|
|
|
(error, folder) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
|
|
|
if (folder == null) {
|
|
|
|
return callback(new Error("Couldn't find folder"))
|
|
|
|
}
|
|
|
|
let existingFile = null
|
2019-11-18 09:03:04 -05:00
|
|
|
for (let fileRef of folder.fileRefs) {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (fileRef.name === fileName) {
|
|
|
|
existingFile = fileRef
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (existingFile != null) {
|
|
|
|
// this calls directly into the replaceFile main task (without the beforeLock part)
|
2019-11-18 09:03:04 -05:00
|
|
|
return ProjectEntityUpdateHandler.replaceFile.mainTask(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
existingFile._id,
|
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
|
|
|
userId,
|
|
|
|
newFileRef,
|
|
|
|
fileStoreUrl,
|
2019-11-18 09:03:04 -05:00
|
|
|
err => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
callback(null, newFileRef, existingFile == null, existingFile)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
)
|
|
|
|
} else {
|
|
|
|
// this calls directly into the addFile main task (without the beforeLock part)
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityUpdateHandler.addFile.mainTask(
|
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fileName,
|
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
|
|
|
userId,
|
|
|
|
newFileRef,
|
|
|
|
fileStoreUrl,
|
2019-11-18 09:03:04 -05:00
|
|
|
err => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
callback(null, newFileRef, existingFile == null, existingFile)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}),
|
|
|
|
|
|
|
|
upsertDocWithPath: wrapWithLock(function(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
elementPath,
|
|
|
|
docLines,
|
|
|
|
source,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
) {
|
|
|
|
if (!SafePath.isCleanPath(elementPath)) {
|
|
|
|
return callback(new Errors.InvalidNameError('invalid element name'))
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
const docName = Path.basename(elementPath)
|
|
|
|
const folderPath = Path.dirname(elementPath)
|
|
|
|
ProjectEntityUpdateHandler.mkdirp.withoutLock(
|
|
|
|
projectId,
|
|
|
|
folderPath,
|
|
|
|
(err, newFolders, folder) => {
|
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityUpdateHandler.upsertDoc.withoutLock(
|
|
|
|
projectId,
|
|
|
|
folder._id,
|
|
|
|
docName,
|
|
|
|
docLines,
|
|
|
|
source,
|
|
|
|
userId,
|
|
|
|
(err, doc, isNewDoc) => {
|
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
|
|
|
callback(null, doc, isNewDoc, newFolders, folder)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
2019-05-29 05:21:06 -04:00
|
|
|
}),
|
|
|
|
|
|
|
|
upsertFileWithPath: wrapWithLock({
|
|
|
|
beforeLock(next) {
|
|
|
|
return function(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
elementPath,
|
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
) {
|
|
|
|
if (!SafePath.isCleanPath(elementPath)) {
|
|
|
|
return callback(new Errors.InvalidNameError('invalid element name'))
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
const fileName = Path.basename(elementPath)
|
|
|
|
const folderPath = Path.dirname(elementPath)
|
2019-05-29 05:21:06 -04:00
|
|
|
// create a new file
|
|
|
|
const fileArgs = {
|
|
|
|
name: fileName,
|
|
|
|
linkedFileData
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
FileStoreHandler.uploadFileFromDisk(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fileArgs,
|
|
|
|
fsPath,
|
2019-11-18 09:03:04 -05:00
|
|
|
(err, fileStoreUrl, fileRef) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
next(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
folderPath,
|
|
|
|
fileName,
|
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
|
|
|
userId,
|
|
|
|
fileRef,
|
|
|
|
fileStoreUrl,
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
},
|
|
|
|
withLock(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
folderPath,
|
|
|
|
fileName,
|
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
|
|
|
userId,
|
|
|
|
fileRef,
|
|
|
|
fileStoreUrl,
|
|
|
|
callback
|
|
|
|
) {
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityUpdateHandler.mkdirp.withoutLock(
|
|
|
|
projectId,
|
|
|
|
folderPath,
|
|
|
|
(err, newFolders, folder) => {
|
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
// this calls directly into the upsertFile main task (without the beforeLock part)
|
|
|
|
ProjectEntityUpdateHandler.upsertFile.mainTask(
|
|
|
|
projectId,
|
|
|
|
folder._id,
|
|
|
|
fileName,
|
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
|
|
|
userId,
|
|
|
|
fileRef,
|
|
|
|
fileStoreUrl,
|
|
|
|
(err, newFile, isNewFile, existingFile) => {
|
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
|
|
|
callback(
|
|
|
|
null,
|
|
|
|
newFile,
|
|
|
|
isNewFile,
|
|
|
|
existingFile,
|
|
|
|
newFolders,
|
|
|
|
folder
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
}),
|
|
|
|
|
|
|
|
deleteEntity: wrapWithLock(function(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
entityId,
|
2019-05-29 05:21:06 -04:00
|
|
|
entityType,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
) {
|
2019-11-18 09:03:04 -05:00
|
|
|
logger.log({ entityId, entityType, projectId }, 'deleting project entity')
|
2019-05-29 05:21:06 -04:00
|
|
|
if (entityType == null) {
|
2019-11-18 09:03:04 -05:00
|
|
|
logger.warn({ err: 'No entityType set', projectId, entityId })
|
2019-07-01 09:54:23 -04:00
|
|
|
return callback(new Error('No entityType set'))
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
entityType = entityType.toLowerCase()
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityMongoUpdateHandler.deleteEntity(
|
|
|
|
projectId,
|
|
|
|
entityId,
|
2019-05-29 05:21:06 -04:00
|
|
|
entityType,
|
2019-11-18 09:03:04 -05:00
|
|
|
(error, entity, path, projectBeforeDeletion, newProject) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityUpdateHandler._cleanUpEntity(
|
2019-05-29 05:21:06 -04:00
|
|
|
projectBeforeDeletion,
|
|
|
|
newProject,
|
|
|
|
entity,
|
|
|
|
entityType,
|
|
|
|
path.fileSystem,
|
|
|
|
userId,
|
2019-11-18 09:03:04 -05:00
|
|
|
error => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
TpdsUpdateSender.deleteEntity(
|
2019-05-29 05:21:06 -04:00
|
|
|
{
|
2019-11-18 09:03:04 -05:00
|
|
|
project_id: projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
path: path.fileSystem,
|
|
|
|
project_name: projectBeforeDeletion.name
|
|
|
|
},
|
2019-11-18 09:03:04 -05:00
|
|
|
error => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
callback(null, entityId)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}),
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
deleteEntityWithPath: wrapWithLock((projectId, path, userId, callback) =>
|
|
|
|
ProjectLocator.findElementByPath(
|
|
|
|
{ project_id: projectId, path },
|
|
|
|
(err, element, type) => {
|
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
|
|
|
if (element == null) {
|
|
|
|
return callback(new Errors.NotFoundError('project not found'))
|
|
|
|
}
|
|
|
|
ProjectEntityUpdateHandler.deleteEntity.withoutLock(
|
|
|
|
projectId,
|
|
|
|
element._id,
|
|
|
|
type,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
)
|
2019-05-29 05:21:06 -04:00
|
|
|
),
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
mkdirp: wrapWithLock(function(projectId, path, callback) {
|
|
|
|
for (let folder of path.split('/')) {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (folder.length > 0 && !SafePath.isCleanFilename(folder)) {
|
|
|
|
return callback(new Errors.InvalidNameError('invalid element name'))
|
|
|
|
}
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityMongoUpdateHandler.mkdirp(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
path,
|
|
|
|
{ exactCaseMatch: false },
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
}),
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
mkdirpWithExactCase: wrapWithLock(function(projectId, path, callback) {
|
|
|
|
for (let folder of path.split('/')) {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (folder.length > 0 && !SafePath.isCleanFilename(folder)) {
|
|
|
|
return callback(new Errors.InvalidNameError('invalid element name'))
|
|
|
|
}
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityMongoUpdateHandler.mkdirp(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
path,
|
|
|
|
{ exactCaseMatch: true },
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
}),
|
|
|
|
|
|
|
|
addFolder: wrapWithLock(function(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
parentFolderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
folderName,
|
|
|
|
callback
|
|
|
|
) {
|
|
|
|
if (!SafePath.isCleanFilename(folderName)) {
|
|
|
|
return callback(new Errors.InvalidNameError('invalid element name'))
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityMongoUpdateHandler.addFolder(
|
|
|
|
projectId,
|
|
|
|
parentFolderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
folderName,
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
}),
|
|
|
|
|
|
|
|
moveEntity: wrapWithLock(function(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
entityId,
|
2019-05-29 05:21:06 -04:00
|
|
|
destFolderId,
|
|
|
|
entityType,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
) {
|
|
|
|
logger.log(
|
2019-11-18 09:03:04 -05:00
|
|
|
{ entityType, entityId, projectId, destFolderId },
|
2019-05-29 05:21:06 -04:00
|
|
|
'moving entity'
|
|
|
|
)
|
|
|
|
if (entityType == null) {
|
2019-11-18 09:03:04 -05:00
|
|
|
logger.warn({ err: 'No entityType set', projectId, entityId })
|
2019-07-01 09:54:23 -04:00
|
|
|
return callback(new Error('No entityType set'))
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
entityType = entityType.toLowerCase()
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityMongoUpdateHandler.moveEntity(
|
|
|
|
projectId,
|
|
|
|
entityId,
|
2019-05-29 05:21:06 -04:00
|
|
|
destFolderId,
|
|
|
|
entityType,
|
2019-11-18 09:03:04 -05:00
|
|
|
(err, project, startPath, endPath, rev, changes) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
const projectHistoryId =
|
|
|
|
project.overleaf &&
|
|
|
|
project.overleaf.history &&
|
|
|
|
project.overleaf.history.id
|
2019-05-29 05:21:06 -04:00
|
|
|
TpdsUpdateSender.moveEntity({
|
2019-11-18 09:03:04 -05:00
|
|
|
project_id: projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
project_name: project.name,
|
|
|
|
startPath,
|
|
|
|
endPath,
|
|
|
|
rev
|
|
|
|
})
|
2019-11-18 09:03:04 -05:00
|
|
|
DocumentUpdaterHandler.updateProjectStructure(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
projectHistoryId,
|
|
|
|
userId,
|
|
|
|
changes,
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}),
|
|
|
|
|
|
|
|
renameEntity: wrapWithLock(function(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
entityId,
|
2019-05-29 05:21:06 -04:00
|
|
|
entityType,
|
|
|
|
newName,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
) {
|
|
|
|
if (!SafePath.isCleanFilename(newName)) {
|
|
|
|
return callback(new Errors.InvalidNameError('invalid element name'))
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
logger.log({ entityId, projectId }, `renaming ${entityType}`)
|
2019-05-29 05:21:06 -04:00
|
|
|
if (entityType == null) {
|
2019-11-18 09:03:04 -05:00
|
|
|
logger.warn({ err: 'No entityType set', projectId, entityId })
|
2019-07-01 09:54:23 -04:00
|
|
|
return callback(new Error('No entityType set'))
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
entityType = entityType.toLowerCase()
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityMongoUpdateHandler.renameEntity(
|
|
|
|
projectId,
|
|
|
|
entityId,
|
2019-05-29 05:21:06 -04:00
|
|
|
entityType,
|
|
|
|
newName,
|
2019-11-18 09:03:04 -05:00
|
|
|
(err, project, startPath, endPath, rev, changes) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
const projectHistoryId =
|
|
|
|
project.overleaf &&
|
|
|
|
project.overleaf.history &&
|
|
|
|
project.overleaf.history.id
|
2019-05-29 05:21:06 -04:00
|
|
|
TpdsUpdateSender.moveEntity({
|
2019-11-18 09:03:04 -05:00
|
|
|
project_id: projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
project_name: project.name,
|
|
|
|
startPath,
|
|
|
|
endPath,
|
|
|
|
rev
|
|
|
|
})
|
2019-11-18 09:03:04 -05:00
|
|
|
DocumentUpdaterHandler.updateProjectStructure(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
projectHistoryId,
|
|
|
|
userId,
|
|
|
|
changes,
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}),
|
|
|
|
|
|
|
|
// This doesn't directly update project structure but we need to take the lock
|
|
|
|
// to prevent anything else being queued before the resync update
|
2019-11-18 09:03:04 -05:00
|
|
|
resyncProjectHistory: wrapWithLock((projectId, callback) =>
|
2019-05-29 05:21:06 -04:00
|
|
|
ProjectGetter.getProject(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
{ rootFolder: true, overleaf: true },
|
2019-11-18 09:03:04 -05:00
|
|
|
(error, project) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
const projectHistoryId =
|
|
|
|
project &&
|
|
|
|
project.overleaf &&
|
|
|
|
project.overleaf.history &&
|
|
|
|
project.overleaf.history.id
|
2019-05-29 05:21:06 -04:00
|
|
|
if (projectHistoryId == null) {
|
|
|
|
error = new Errors.ProjectHistoryDisabledError(
|
2019-11-18 09:03:04 -05:00
|
|
|
`project history not enabled for ${projectId}`
|
2019-05-29 05:21:06 -04:00
|
|
|
)
|
|
|
|
return callback(error)
|
|
|
|
}
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityHandler.getAllEntitiesFromProject(
|
|
|
|
project,
|
|
|
|
(error, docs, files) => {
|
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
2019-05-29 05:21:06 -04:00
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
docs = _.map(docs, doc => ({
|
|
|
|
doc: doc.doc._id,
|
|
|
|
path: doc.path
|
|
|
|
}))
|
2019-05-29 05:21:06 -04:00
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
files = _.map(files, file => ({
|
|
|
|
file: file.file._id,
|
|
|
|
path: file.path,
|
|
|
|
url: FileStoreHandler._buildUrl(projectId, file.file._id)
|
|
|
|
}))
|
2019-05-29 05:21:06 -04:00
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
DocumentUpdaterHandler.resyncProjectHistory(
|
|
|
|
projectId,
|
|
|
|
projectHistoryId,
|
|
|
|
docs,
|
|
|
|
files,
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
)
|
|
|
|
),
|
|
|
|
|
2019-10-03 10:10:00 -04:00
|
|
|
isPathValidForRootDoc(docPath) {
|
2019-11-18 09:03:04 -05:00
|
|
|
let docExtension = Path.extname(docPath)
|
|
|
|
return VALID_ROOT_DOC_REGEXP.test(docExtension)
|
2019-10-03 10:10:00 -04:00
|
|
|
},
|
|
|
|
|
2019-05-29 05:21:06 -04:00
|
|
|
_cleanUpEntity(
|
|
|
|
project,
|
|
|
|
newProject,
|
|
|
|
entity,
|
|
|
|
entityType,
|
|
|
|
path,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
) {
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityUpdateHandler._updateProjectStructureWithDeletedEntity(
|
2019-05-29 05:21:06 -04:00
|
|
|
project,
|
|
|
|
newProject,
|
|
|
|
entity,
|
|
|
|
entityType,
|
|
|
|
path,
|
|
|
|
userId,
|
2019-11-18 09:03:04 -05:00
|
|
|
error => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
|
|
|
if (entityType.indexOf('file') !== -1) {
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityUpdateHandler._cleanUpFile(
|
|
|
|
project,
|
|
|
|
entity,
|
|
|
|
path,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
)
|
2019-05-29 05:21:06 -04:00
|
|
|
} else if (entityType.indexOf('doc') !== -1) {
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityUpdateHandler._cleanUpDoc(
|
|
|
|
project,
|
|
|
|
entity,
|
|
|
|
path,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
)
|
2019-05-29 05:21:06 -04:00
|
|
|
} else if (entityType.indexOf('folder') !== -1) {
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityUpdateHandler._cleanUpFolder(
|
|
|
|
project,
|
|
|
|
entity,
|
|
|
|
path,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
)
|
2019-05-29 05:21:06 -04:00
|
|
|
} else {
|
2019-11-18 09:03:04 -05:00
|
|
|
callback()
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
},
|
|
|
|
|
|
|
|
// Note: the _cleanUpEntity code and _updateProjectStructureWithDeletedEntity
|
|
|
|
// methods both need to recursively iterate over the entities in folder.
|
|
|
|
// These are currently using separate implementations of the recursion. In
|
|
|
|
// future, these could be simplified using a common project entity iterator.
|
|
|
|
_updateProjectStructureWithDeletedEntity(
|
|
|
|
project,
|
|
|
|
newProject,
|
|
|
|
entity,
|
|
|
|
entityType,
|
|
|
|
entityPath,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
) {
|
|
|
|
// compute the changes to the project structure
|
|
|
|
let changes
|
|
|
|
if (entityType.indexOf('file') !== -1) {
|
|
|
|
changes = { oldFiles: [{ file: entity, path: entityPath }] }
|
|
|
|
} else if (entityType.indexOf('doc') !== -1) {
|
|
|
|
changes = { oldDocs: [{ doc: entity, path: entityPath }] }
|
|
|
|
} else if (entityType.indexOf('folder') !== -1) {
|
|
|
|
changes = { oldDocs: [], oldFiles: [] }
|
2019-11-18 09:03:04 -05:00
|
|
|
const _recurseFolder = (folder, folderPath) => {
|
|
|
|
for (let doc of folder.docs) {
|
|
|
|
changes.oldDocs.push({ doc, path: Path.join(folderPath, doc.name) })
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
for (let file of folder.fileRefs) {
|
2019-05-29 05:21:06 -04:00
|
|
|
changes.oldFiles.push({
|
|
|
|
file,
|
2019-11-18 09:03:04 -05:00
|
|
|
path: Path.join(folderPath, file.name)
|
2019-05-29 05:21:06 -04:00
|
|
|
})
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
for (const childFolder of folder.folders) {
|
|
|
|
_recurseFolder(childFolder, Path.join(folderPath, childFolder.name))
|
|
|
|
}
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
_recurseFolder(entity, entityPath)
|
|
|
|
}
|
|
|
|
// now send the project structure changes to the docupdater
|
|
|
|
changes.newProject = newProject
|
2019-11-18 09:03:04 -05:00
|
|
|
const projectId = project._id.toString()
|
|
|
|
const projectHistoryId =
|
|
|
|
project.overleaf &&
|
|
|
|
project.overleaf.history &&
|
|
|
|
project.overleaf.history.id
|
|
|
|
DocumentUpdaterHandler.updateProjectStructure(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
projectHistoryId,
|
|
|
|
userId,
|
|
|
|
changes,
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
},
|
|
|
|
|
|
|
|
_cleanUpDoc(project, doc, path, userId, callback) {
|
2019-11-18 09:03:04 -05:00
|
|
|
const projectId = project._id.toString()
|
|
|
|
const docId = doc._id.toString()
|
2019-05-29 05:21:06 -04:00
|
|
|
const unsetRootDocIfRequired = callback => {
|
|
|
|
if (
|
|
|
|
project.rootDoc_id != null &&
|
2019-11-18 09:03:04 -05:00
|
|
|
project.rootDoc_id.toString() === docId
|
2019-05-29 05:21:06 -04:00
|
|
|
) {
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityUpdateHandler.unsetRootDoc(projectId, callback)
|
2019-05-29 05:21:06 -04:00
|
|
|
} else {
|
2019-11-18 09:03:04 -05:00
|
|
|
callback()
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
unsetRootDocIfRequired(error => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityMongoUpdateHandler._insertDeletedDocReference(
|
2019-05-29 05:21:06 -04:00
|
|
|
project._id,
|
|
|
|
doc,
|
2019-11-18 09:03:04 -05:00
|
|
|
error => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
DocumentUpdaterHandler.deleteDoc(projectId, docId, error => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
DocstoreManager.deleteDoc(projectId, docId, callback)
|
2019-05-29 05:21:06 -04:00
|
|
|
})
|
|
|
|
}
|
|
|
|
)
|
|
|
|
})
|
|
|
|
},
|
|
|
|
|
|
|
|
_cleanUpFile(project, file, path, userId, callback) {
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityMongoUpdateHandler._insertDeletedFileReference(
|
2019-05-29 05:21:06 -04:00
|
|
|
project._id,
|
|
|
|
file,
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
},
|
|
|
|
|
|
|
|
_cleanUpFolder(project, folder, folderPath, userId, callback) {
|
|
|
|
const jobs = []
|
2019-11-18 09:03:04 -05:00
|
|
|
folder.docs.forEach(doc => {
|
|
|
|
const docPath = Path.join(folderPath, doc.name)
|
|
|
|
jobs.push(callback =>
|
|
|
|
ProjectEntityUpdateHandler._cleanUpDoc(
|
|
|
|
project,
|
|
|
|
doc,
|
|
|
|
docPath,
|
|
|
|
userId,
|
|
|
|
callback
|
2019-05-29 05:21:06 -04:00
|
|
|
)
|
2019-11-18 09:03:04 -05:00
|
|
|
)
|
|
|
|
})
|
2019-05-29 05:21:06 -04:00
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
folder.fileRefs.forEach(file => {
|
|
|
|
const filePath = Path.join(folderPath, file.name)
|
|
|
|
jobs.push(callback =>
|
|
|
|
ProjectEntityUpdateHandler._cleanUpFile(
|
|
|
|
project,
|
|
|
|
file,
|
|
|
|
filePath,
|
|
|
|
userId,
|
|
|
|
callback
|
2019-05-29 05:21:06 -04:00
|
|
|
)
|
2019-11-18 09:03:04 -05:00
|
|
|
)
|
|
|
|
})
|
2019-05-29 05:21:06 -04:00
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
folder.folders.forEach(childFolder => {
|
|
|
|
folderPath = Path.join(folderPath, childFolder.name)
|
|
|
|
jobs.push(callback =>
|
|
|
|
ProjectEntityUpdateHandler._cleanUpFolder(
|
|
|
|
project,
|
|
|
|
childFolder,
|
|
|
|
folderPath,
|
|
|
|
userId,
|
|
|
|
callback
|
2019-05-29 05:21:06 -04:00
|
|
|
)
|
2019-11-18 09:03:04 -05:00
|
|
|
)
|
|
|
|
})
|
2019-05-29 05:21:06 -04:00
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
async.series(jobs, callback)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
module.exports = ProjectEntityUpdateHandler
|