2019-05-29 05:21:06 -04:00
|
|
|
const _ = require('lodash')
|
2020-08-11 05:35:08 -04:00
|
|
|
const OError = require('@overleaf/o-error')
|
2019-05-29 05:21:06 -04:00
|
|
|
const async = require('async')
|
|
|
|
const logger = require('logger-sharelatex')
|
2019-10-03 10:10:00 -04:00
|
|
|
const Settings = require('settings-sharelatex')
|
2019-11-18 09:03:04 -05:00
|
|
|
const Path = require('path')
|
2020-03-04 04:37:43 -05:00
|
|
|
const fs = require('fs')
|
2019-05-29 05:21:06 -04:00
|
|
|
const { Doc } = require('../../models/Doc')
|
|
|
|
const DocstoreManager = require('../Docstore/DocstoreManager')
|
|
|
|
const DocumentUpdaterHandler = require('../../Features/DocumentUpdater/DocumentUpdaterHandler')
|
|
|
|
const Errors = require('../Errors/Errors')
|
|
|
|
const FileStoreHandler = require('../FileStore/FileStoreHandler')
|
|
|
|
const LockManager = require('../../infrastructure/LockManager')
|
|
|
|
const { Project } = require('../../models/Project')
|
|
|
|
const ProjectEntityHandler = require('./ProjectEntityHandler')
|
|
|
|
const ProjectGetter = require('./ProjectGetter')
|
|
|
|
const ProjectLocator = require('./ProjectLocator')
|
|
|
|
const ProjectUpdateHandler = require('./ProjectUpdateHandler')
|
|
|
|
const ProjectEntityMongoUpdateHandler = require('./ProjectEntityMongoUpdateHandler')
|
|
|
|
const SafePath = require('./SafePath')
|
|
|
|
const TpdsUpdateSender = require('../ThirdPartyDataStore/TpdsUpdateSender')
|
2020-03-04 04:37:43 -05:00
|
|
|
const FileWriter = require('../../infrastructure/FileWriter')
|
|
|
|
const EditorRealTimeController = require('../Editor/EditorRealTimeController')
|
|
|
|
const { promisifyAll } = require('../../util/promises')
|
2019-05-29 05:21:06 -04:00
|
|
|
|
|
|
|
const LOCK_NAMESPACE = 'sequentialProjectStructureUpdateLock'
|
2019-11-18 09:03:04 -05:00
|
|
|
const VALID_ROOT_DOC_EXTENSIONS = Settings.validRootDocExtensions
|
|
|
|
const VALID_ROOT_DOC_REGEXP = new RegExp(
|
|
|
|
`^\\.(${VALID_ROOT_DOC_EXTENSIONS.join('|')})$`,
|
2019-10-03 10:10:00 -04:00
|
|
|
'i'
|
|
|
|
)
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
function wrapWithLock(methodWithoutLock) {
|
2019-05-29 05:21:06 -04:00
|
|
|
// This lock is used to make sure that the project structure updates are made
|
|
|
|
// sequentially. In particular the updates must be made in mongo and sent to
|
|
|
|
// the doc-updater in the same order.
|
|
|
|
if (typeof methodWithoutLock === 'function') {
|
2019-11-18 09:03:04 -05:00
|
|
|
const methodWithLock = (projectId, ...rest) => {
|
|
|
|
const adjustedLength = Math.max(rest.length, 1)
|
|
|
|
const args = rest.slice(0, adjustedLength - 1)
|
|
|
|
const callback = rest[adjustedLength - 1]
|
|
|
|
LockManager.runWithLock(
|
2019-05-29 05:21:06 -04:00
|
|
|
LOCK_NAMESPACE,
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
cb => methodWithoutLock(projectId, ...args, cb),
|
2019-05-29 05:21:06 -04:00
|
|
|
callback
|
|
|
|
)
|
|
|
|
}
|
|
|
|
methodWithLock.withoutLock = methodWithoutLock
|
|
|
|
return methodWithLock
|
|
|
|
} else {
|
|
|
|
// handle case with separate setup and locked stages
|
|
|
|
const wrapWithSetup = methodWithoutLock.beforeLock // a function to set things up before the lock
|
|
|
|
const mainTask = methodWithoutLock.withLock // function to execute inside the lock
|
2019-11-18 09:03:04 -05:00
|
|
|
const methodWithLock = wrapWithSetup((projectId, ...rest) => {
|
|
|
|
const adjustedLength = Math.max(rest.length, 1)
|
|
|
|
const args = rest.slice(0, adjustedLength - 1)
|
|
|
|
const callback = rest[adjustedLength - 1]
|
|
|
|
LockManager.runWithLock(
|
2019-05-29 05:21:06 -04:00
|
|
|
LOCK_NAMESPACE,
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
cb => mainTask(projectId, ...args, cb),
|
2019-05-29 05:21:06 -04:00
|
|
|
callback
|
|
|
|
)
|
|
|
|
})
|
|
|
|
methodWithLock.withoutLock = wrapWithSetup(mainTask)
|
|
|
|
methodWithLock.beforeLock = methodWithoutLock.beforeLock
|
|
|
|
methodWithLock.mainTask = methodWithoutLock.withLock
|
|
|
|
return methodWithLock
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-02-09 09:55:41 -05:00
|
|
|
function getDocContext(projectId, docId, callback) {
|
|
|
|
ProjectGetter.getProject(
|
|
|
|
projectId,
|
|
|
|
{ name: true, rootFolder: true },
|
|
|
|
(err, project) => {
|
|
|
|
if (err) {
|
|
|
|
return callback(
|
|
|
|
OError.tag(err, 'error fetching project', {
|
|
|
|
projectId
|
|
|
|
})
|
|
|
|
)
|
|
|
|
}
|
|
|
|
if (!project) {
|
|
|
|
return callback(new Errors.NotFoundError('project not found'))
|
|
|
|
}
|
|
|
|
ProjectLocator.findElement(
|
|
|
|
{ project, element_id: docId, type: 'docs' },
|
|
|
|
(err, doc, path) => {
|
|
|
|
if (err && err instanceof Errors.NotFoundError) {
|
|
|
|
// (Soft-)Deleted docs are removed from the file-tree (rootFolder).
|
|
|
|
// docstore can tell whether it exists and is (soft)-deleted.
|
|
|
|
DocstoreManager.isDocDeleted(
|
|
|
|
projectId,
|
|
|
|
docId,
|
|
|
|
(err, isDeletedDoc) => {
|
|
|
|
if (err && err instanceof Errors.NotFoundError) {
|
|
|
|
logger.warn(
|
|
|
|
{ projectId, docId },
|
|
|
|
'doc not found while updating doc lines'
|
|
|
|
)
|
|
|
|
callback(err)
|
|
|
|
} else if (err) {
|
|
|
|
callback(
|
|
|
|
OError.tag(
|
|
|
|
err,
|
|
|
|
'error checking deletion status with docstore',
|
|
|
|
{ projectId, docId }
|
|
|
|
)
|
|
|
|
)
|
|
|
|
} else {
|
|
|
|
if (!isDeletedDoc) {
|
|
|
|
// NOTE: This can happen while we delete a doc:
|
|
|
|
// 1. web will update the projects entry
|
|
|
|
// 2. web triggers flushes to tpds/doc-updater
|
|
|
|
// 3. web triggers (soft)-delete in docstore
|
|
|
|
// Specifically when an update comes in after 1
|
|
|
|
// and before 3 completes.
|
|
|
|
logger.info(
|
|
|
|
{ projectId, docId },
|
|
|
|
'updating doc that is in process of getting soft-deleted'
|
|
|
|
)
|
|
|
|
}
|
|
|
|
callback(null, {
|
|
|
|
projectName: project.name,
|
|
|
|
isDeletedDoc: true,
|
|
|
|
path: null
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
} else if (err) {
|
|
|
|
callback(
|
|
|
|
OError.tag(err, 'error finding doc in rootFolder', {
|
|
|
|
docId,
|
|
|
|
projectId
|
|
|
|
})
|
|
|
|
)
|
|
|
|
} else {
|
|
|
|
callback(null, {
|
|
|
|
projectName: project.name,
|
|
|
|
isDeletedDoc: false,
|
|
|
|
path: path.fileSystem
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
const ProjectEntityUpdateHandler = {
|
2019-05-29 05:21:06 -04:00
|
|
|
updateDocLines(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
docId,
|
2019-05-29 05:21:06 -04:00
|
|
|
lines,
|
|
|
|
version,
|
|
|
|
ranges,
|
|
|
|
lastUpdatedAt,
|
|
|
|
lastUpdatedBy,
|
|
|
|
callback
|
|
|
|
) {
|
2021-02-09 09:55:41 -05:00
|
|
|
getDocContext(projectId, docId, (err, ctx) => {
|
|
|
|
if (err && err instanceof Errors.NotFoundError) {
|
|
|
|
// Do not allow an update to a doc which has never exist on this project
|
|
|
|
logger.warn(
|
|
|
|
{ docId, projectId },
|
|
|
|
'project or doc not found while updating doc lines'
|
|
|
|
)
|
2019-05-29 05:21:06 -04:00
|
|
|
return callback(err)
|
|
|
|
}
|
2021-02-09 09:55:41 -05:00
|
|
|
if (err) {
|
|
|
|
return callback(err)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
2021-02-09 09:55:41 -05:00
|
|
|
const { projectName, isDeletedDoc, path } = ctx
|
|
|
|
logger.log({ projectId, docId }, 'telling docstore manager to update doc')
|
|
|
|
DocstoreManager.updateDoc(
|
|
|
|
projectId,
|
|
|
|
docId,
|
|
|
|
lines,
|
|
|
|
version,
|
|
|
|
ranges,
|
|
|
|
(err, modified, rev) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
2021-02-09 09:55:41 -05:00
|
|
|
OError.tag(err, 'error sending doc to docstore', {
|
|
|
|
docId,
|
|
|
|
projectId
|
|
|
|
})
|
|
|
|
return callback(err)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
logger.log(
|
2021-02-09 09:55:41 -05:00
|
|
|
{ projectId, docId, modified },
|
|
|
|
'finished updating doc lines'
|
2019-05-29 05:21:06 -04:00
|
|
|
)
|
2021-02-09 09:55:41 -05:00
|
|
|
// path will only be present if the doc is not deleted
|
|
|
|
if (!modified || isDeletedDoc) {
|
|
|
|
return callback()
|
|
|
|
}
|
|
|
|
// Don't need to block for marking as updated
|
|
|
|
ProjectUpdateHandler.markAsUpdated(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
2021-02-09 09:55:41 -05:00
|
|
|
lastUpdatedAt,
|
|
|
|
lastUpdatedBy
|
|
|
|
)
|
|
|
|
TpdsUpdateSender.addDoc(
|
|
|
|
{
|
|
|
|
project_id: projectId,
|
|
|
|
path,
|
|
|
|
doc_id: docId,
|
|
|
|
project_name: projectName,
|
|
|
|
rev
|
|
|
|
},
|
|
|
|
callback
|
2019-05-29 05:21:06 -04:00
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
})
|
|
|
|
},
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
setRootDoc(projectId, newRootDocID, callback) {
|
|
|
|
logger.log({ projectId, rootDocId: newRootDocID }, 'setting root doc')
|
|
|
|
if (projectId == null || newRootDocID == null) {
|
2019-10-03 10:10:00 -04:00
|
|
|
return callback(
|
|
|
|
new Errors.InvalidError('missing arguments (project or doc)')
|
|
|
|
)
|
|
|
|
}
|
|
|
|
ProjectEntityHandler.getDocPathByProjectIdAndDocId(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
2019-10-03 10:10:00 -04:00
|
|
|
newRootDocID,
|
2019-11-18 09:03:04 -05:00
|
|
|
(err, docPath) => {
|
2019-10-03 10:10:00 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
|
|
|
if (ProjectEntityUpdateHandler.isPathValidForRootDoc(docPath)) {
|
2020-11-03 04:19:05 -05:00
|
|
|
Project.updateOne(
|
2019-11-18 09:03:04 -05:00
|
|
|
{ _id: projectId },
|
2019-10-03 10:10:00 -04:00
|
|
|
{ rootDoc_id: newRootDocID },
|
|
|
|
{},
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
} else {
|
2019-11-18 09:03:04 -05:00
|
|
|
callback(
|
2019-10-03 10:10:00 -04:00
|
|
|
new Errors.UnsupportedFileTypeError(
|
|
|
|
'invalid file extension for root doc'
|
|
|
|
)
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
2019-05-29 05:21:06 -04:00
|
|
|
)
|
|
|
|
},
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
unsetRootDoc(projectId, callback) {
|
|
|
|
logger.log({ projectId }, 'removing root doc')
|
2020-11-03 04:19:05 -05:00
|
|
|
Project.updateOne(
|
2019-11-18 09:03:04 -05:00
|
|
|
{ _id: projectId },
|
2019-05-29 05:21:06 -04:00
|
|
|
{ $unset: { rootDoc_id: true } },
|
|
|
|
{},
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
},
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
_addDocAndSendToTpds(projectId, folderId, doc, callback) {
|
|
|
|
ProjectEntityMongoUpdateHandler.addDoc(
|
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
doc,
|
2019-11-18 09:03:04 -05:00
|
|
|
(err, result, project) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
2020-08-11 05:35:08 -04:00
|
|
|
OError.tag(err, 'error adding file with project', {
|
|
|
|
projectId,
|
|
|
|
folderId,
|
|
|
|
doc_name: doc != null ? doc.name : undefined,
|
|
|
|
doc_id: doc != null ? doc._id : undefined
|
|
|
|
})
|
2019-05-29 05:21:06 -04:00
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
TpdsUpdateSender.addDoc(
|
2019-05-29 05:21:06 -04:00
|
|
|
{
|
2019-11-18 09:03:04 -05:00
|
|
|
project_id: projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
doc_id: doc != null ? doc._id : undefined,
|
2019-11-18 09:03:04 -05:00
|
|
|
path: result && result.path && result.path.fileSystem,
|
2019-05-29 05:21:06 -04:00
|
|
|
project_name: project.name,
|
|
|
|
rev: 0
|
|
|
|
},
|
2019-11-18 09:03:04 -05:00
|
|
|
err => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
callback(null, result, project)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
},
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
addDoc(projectId, folderId, docName, docLines, userId, callback) {
|
|
|
|
ProjectEntityUpdateHandler.addDocWithRanges(
|
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
docName,
|
|
|
|
docLines,
|
|
|
|
{},
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
},
|
|
|
|
|
|
|
|
addDocWithRanges: wrapWithLock({
|
|
|
|
beforeLock(next) {
|
2021-04-14 09:17:21 -04:00
|
|
|
return function (
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
docName,
|
|
|
|
docLines,
|
|
|
|
ranges,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
) {
|
|
|
|
if (!SafePath.isCleanFilename(docName)) {
|
|
|
|
return callback(new Errors.InvalidNameError('invalid element name'))
|
|
|
|
}
|
|
|
|
// Put doc in docstore first, so that if it errors, we don't have a doc_id in the project
|
|
|
|
// which hasn't been created in docstore.
|
|
|
|
const doc = new Doc({ name: docName })
|
2019-11-18 09:03:04 -05:00
|
|
|
DocstoreManager.updateDoc(
|
|
|
|
projectId.toString(),
|
2019-05-29 05:21:06 -04:00
|
|
|
doc._id.toString(),
|
|
|
|
docLines,
|
|
|
|
0,
|
|
|
|
ranges,
|
2019-11-18 09:03:04 -05:00
|
|
|
(err, modified, rev) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
next(
|
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
doc,
|
|
|
|
docName,
|
|
|
|
docLines,
|
|
|
|
ranges,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
},
|
|
|
|
withLock(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
doc,
|
|
|
|
docName,
|
|
|
|
docLines,
|
|
|
|
ranges,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
) {
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityUpdateHandler._addDocAndSendToTpds(
|
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
doc,
|
2019-11-18 09:03:04 -05:00
|
|
|
(err, result, project) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
const docPath = result && result.path && result.path.fileSystem
|
|
|
|
const projectHistoryId =
|
|
|
|
project.overleaf &&
|
|
|
|
project.overleaf.history &&
|
|
|
|
project.overleaf.history.id
|
2019-05-29 05:21:06 -04:00
|
|
|
const newDocs = [
|
|
|
|
{
|
|
|
|
doc,
|
|
|
|
path: docPath,
|
|
|
|
docLines: docLines.join('\n')
|
|
|
|
}
|
|
|
|
]
|
2019-11-18 09:03:04 -05:00
|
|
|
DocumentUpdaterHandler.updateProjectStructure(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
projectHistoryId,
|
|
|
|
userId,
|
|
|
|
{ newDocs, newProject: project },
|
2019-11-18 09:03:04 -05:00
|
|
|
error => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
callback(null, doc, folderId)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}),
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
_uploadFile(projectId, folderId, fileName, fsPath, linkedFileData, callback) {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (!SafePath.isCleanFilename(fileName)) {
|
|
|
|
return callback(new Errors.InvalidNameError('invalid element name'))
|
|
|
|
}
|
|
|
|
const fileArgs = {
|
|
|
|
name: fileName,
|
|
|
|
linkedFileData
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
FileStoreHandler.uploadFileFromDisk(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fileArgs,
|
|
|
|
fsPath,
|
2019-11-18 09:03:04 -05:00
|
|
|
(err, fileStoreUrl, fileRef) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
2020-08-11 05:35:08 -04:00
|
|
|
OError.tag(err, 'error uploading image to s3', {
|
|
|
|
projectId,
|
|
|
|
folderId,
|
|
|
|
file_name: fileName,
|
|
|
|
fileRef
|
|
|
|
})
|
2019-05-29 05:21:06 -04:00
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
callback(null, fileStoreUrl, fileRef)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
)
|
|
|
|
},
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
_addFileAndSendToTpds(projectId, folderId, fileRef, callback) {
|
|
|
|
ProjectEntityMongoUpdateHandler.addFile(
|
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fileRef,
|
2019-11-18 09:03:04 -05:00
|
|
|
(err, result, project) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
2020-08-11 05:35:08 -04:00
|
|
|
OError.tag(err, 'error adding file with project', {
|
|
|
|
projectId,
|
|
|
|
folderId,
|
|
|
|
file_name: fileRef.name,
|
|
|
|
fileRef
|
|
|
|
})
|
2019-05-29 05:21:06 -04:00
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
TpdsUpdateSender.addFile(
|
2019-05-29 05:21:06 -04:00
|
|
|
{
|
2019-11-18 09:03:04 -05:00
|
|
|
project_id: projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
file_id: fileRef._id,
|
2019-11-18 09:03:04 -05:00
|
|
|
path: result && result.path && result.path.fileSystem,
|
2019-05-29 05:21:06 -04:00
|
|
|
project_name: project.name,
|
|
|
|
rev: fileRef.rev
|
|
|
|
},
|
2019-11-18 09:03:04 -05:00
|
|
|
err => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
callback(null, result, project)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
},
|
|
|
|
|
|
|
|
addFile: wrapWithLock({
|
|
|
|
beforeLock(next) {
|
2021-04-14 09:17:21 -04:00
|
|
|
return function (
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fileName,
|
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
) {
|
|
|
|
if (!SafePath.isCleanFilename(fileName)) {
|
|
|
|
return callback(new Errors.InvalidNameError('invalid element name'))
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityUpdateHandler._uploadFile(
|
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fileName,
|
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
2019-11-18 09:03:04 -05:00
|
|
|
(error, fileStoreUrl, fileRef) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
next(
|
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fileName,
|
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
|
|
|
userId,
|
|
|
|
fileRef,
|
|
|
|
fileStoreUrl,
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
},
|
|
|
|
withLock(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fileName,
|
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
|
|
|
userId,
|
|
|
|
fileRef,
|
|
|
|
fileStoreUrl,
|
|
|
|
callback
|
|
|
|
) {
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityUpdateHandler._addFileAndSendToTpds(
|
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fileRef,
|
2019-11-18 09:03:04 -05:00
|
|
|
(err, result, project) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
const projectHistoryId =
|
|
|
|
project.overleaf &&
|
|
|
|
project.overleaf.history &&
|
|
|
|
project.overleaf.history.id
|
2019-05-29 05:21:06 -04:00
|
|
|
const newFiles = [
|
|
|
|
{
|
|
|
|
file: fileRef,
|
2019-11-18 09:03:04 -05:00
|
|
|
path: result && result.path && result.path.fileSystem,
|
2019-05-29 05:21:06 -04:00
|
|
|
url: fileStoreUrl
|
|
|
|
}
|
|
|
|
]
|
2019-11-18 09:03:04 -05:00
|
|
|
DocumentUpdaterHandler.updateProjectStructure(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
projectHistoryId,
|
|
|
|
userId,
|
|
|
|
{ newFiles, newProject: project },
|
2019-11-18 09:03:04 -05:00
|
|
|
error => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
2021-03-30 11:40:14 -04:00
|
|
|
ProjectUpdateHandler.markAsUpdated(projectId, new Date(), userId)
|
2019-11-18 09:03:04 -05:00
|
|
|
callback(null, fileRef, folderId)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}),
|
|
|
|
|
|
|
|
replaceFile: wrapWithLock({
|
|
|
|
beforeLock(next) {
|
2021-04-14 09:17:21 -04:00
|
|
|
return function (
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
fileId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
) {
|
|
|
|
// create a new file
|
|
|
|
const fileArgs = {
|
|
|
|
name: 'dummy-upload-filename',
|
|
|
|
linkedFileData
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
FileStoreHandler.uploadFileFromDisk(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fileArgs,
|
|
|
|
fsPath,
|
2019-11-18 09:03:04 -05:00
|
|
|
(err, fileStoreUrl, fileRef) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
next(
|
|
|
|
projectId,
|
|
|
|
fileId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
|
|
|
userId,
|
|
|
|
fileRef,
|
|
|
|
fileStoreUrl,
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
},
|
|
|
|
withLock(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
fileId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
|
|
|
userId,
|
|
|
|
newFileRef,
|
|
|
|
fileStoreUrl,
|
|
|
|
callback
|
|
|
|
) {
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityMongoUpdateHandler.replaceFileWithNew(
|
|
|
|
projectId,
|
|
|
|
fileId,
|
2019-05-29 05:21:06 -04:00
|
|
|
newFileRef,
|
2019-11-18 09:03:04 -05:00
|
|
|
(err, oldFileRef, project, path, newProject) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
|
|
|
const oldFiles = [
|
|
|
|
{
|
|
|
|
file: oldFileRef,
|
|
|
|
path: path.fileSystem
|
|
|
|
}
|
|
|
|
]
|
|
|
|
const newFiles = [
|
|
|
|
{
|
|
|
|
file: newFileRef,
|
|
|
|
path: path.fileSystem,
|
|
|
|
url: fileStoreUrl
|
|
|
|
}
|
|
|
|
]
|
2019-11-18 09:03:04 -05:00
|
|
|
const projectHistoryId =
|
|
|
|
project.overleaf &&
|
|
|
|
project.overleaf.history &&
|
|
|
|
project.overleaf.history.id
|
2019-05-29 05:21:06 -04:00
|
|
|
// Increment the rev for an in-place update (with the same path) so the third-party-datastore
|
|
|
|
// knows this is a new file.
|
|
|
|
// Ideally we would get this from ProjectEntityMongoUpdateHandler.replaceFileWithNew
|
|
|
|
// but it returns the original oldFileRef (after incrementing the rev value in mongo),
|
|
|
|
// so we add 1 to the rev from that. This isn't atomic and relies on the lock
|
|
|
|
// but it is acceptable for now.
|
2019-11-18 09:03:04 -05:00
|
|
|
TpdsUpdateSender.addFile(
|
2019-05-29 05:21:06 -04:00
|
|
|
{
|
|
|
|
project_id: project._id,
|
|
|
|
file_id: newFileRef._id,
|
|
|
|
path: path.fileSystem,
|
|
|
|
rev: oldFileRef.rev + 1,
|
|
|
|
project_name: project.name
|
|
|
|
},
|
2019-11-18 09:03:04 -05:00
|
|
|
err => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2021-03-30 11:40:14 -04:00
|
|
|
ProjectUpdateHandler.markAsUpdated(projectId, new Date(), userId)
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
DocumentUpdaterHandler.updateProjectStructure(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
projectHistoryId,
|
|
|
|
userId,
|
|
|
|
{ oldFiles, newFiles, newProject },
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}),
|
|
|
|
|
2021-04-14 09:17:21 -04:00
|
|
|
upsertDoc: wrapWithLock(function (
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
docName,
|
|
|
|
docLines,
|
|
|
|
source,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
) {
|
|
|
|
if (!SafePath.isCleanFilename(docName)) {
|
|
|
|
return callback(new Errors.InvalidNameError('invalid element name'))
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectLocator.findElement(
|
|
|
|
{ project_id: projectId, element_id: folderId, type: 'folder' },
|
2020-05-20 10:14:49 -04:00
|
|
|
(error, folder, folderPath) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
|
|
|
if (folder == null) {
|
|
|
|
return callback(new Error("Couldn't find folder"))
|
|
|
|
}
|
2020-04-27 09:00:56 -04:00
|
|
|
const existingDoc = folder.docs.find(({ name }) => name === docName)
|
|
|
|
const existingFile = folder.fileRefs.find(
|
|
|
|
({ name }) => name === docName
|
|
|
|
)
|
|
|
|
if (existingFile) {
|
|
|
|
const doc = new Doc({ name: docName })
|
2020-05-20 10:14:49 -04:00
|
|
|
const filePath = `${folderPath.fileSystem}/${existingFile.name}`
|
2020-04-27 09:00:56 -04:00
|
|
|
DocstoreManager.updateDoc(
|
|
|
|
projectId.toString(),
|
|
|
|
doc._id.toString(),
|
|
|
|
docLines,
|
|
|
|
0,
|
|
|
|
{},
|
|
|
|
(err, modified, rev) => {
|
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
|
|
|
ProjectEntityMongoUpdateHandler.replaceFileWithDoc(
|
|
|
|
projectId,
|
|
|
|
existingFile._id,
|
|
|
|
doc,
|
|
|
|
(err, project) => {
|
|
|
|
if (err) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
|
|
|
TpdsUpdateSender.addDoc(
|
|
|
|
{
|
|
|
|
project_id: projectId,
|
|
|
|
doc_id: doc._id,
|
2020-05-20 10:14:49 -04:00
|
|
|
path: filePath,
|
2020-04-27 09:00:56 -04:00
|
|
|
project_name: project.name,
|
|
|
|
rev: existingFile.rev + 1
|
|
|
|
},
|
|
|
|
err => {
|
|
|
|
if (err) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
|
|
|
const projectHistoryId =
|
|
|
|
project.overleaf &&
|
|
|
|
project.overleaf.history &&
|
|
|
|
project.overleaf.history.id
|
|
|
|
const newDocs = [
|
|
|
|
{
|
|
|
|
doc,
|
2020-05-20 10:14:49 -04:00
|
|
|
path: filePath,
|
2020-04-27 09:00:56 -04:00
|
|
|
docLines: docLines.join('\n')
|
|
|
|
}
|
|
|
|
]
|
|
|
|
const oldFiles = [
|
|
|
|
{
|
|
|
|
file: existingFile,
|
2020-05-20 10:14:49 -04:00
|
|
|
path: filePath
|
2020-04-27 09:00:56 -04:00
|
|
|
}
|
|
|
|
]
|
|
|
|
DocumentUpdaterHandler.updateProjectStructure(
|
|
|
|
projectId,
|
|
|
|
projectHistoryId,
|
|
|
|
userId,
|
|
|
|
{ oldFiles, newDocs, newProject: project },
|
|
|
|
error => {
|
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
|
|
|
EditorRealTimeController.emitToRoom(
|
|
|
|
projectId,
|
|
|
|
'removeEntity',
|
|
|
|
existingFile._id,
|
|
|
|
'convertFileToDoc'
|
|
|
|
)
|
|
|
|
callback(null, doc, true)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
} else if (existingDoc) {
|
2019-11-18 09:03:04 -05:00
|
|
|
DocumentUpdaterHandler.setDocument(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
existingDoc._id,
|
|
|
|
userId,
|
|
|
|
docLines,
|
|
|
|
source,
|
|
|
|
err => {
|
2019-11-18 09:03:04 -05:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-05-29 05:21:06 -04:00
|
|
|
logger.log(
|
2019-11-18 09:03:04 -05:00
|
|
|
{ projectId, docId: existingDoc._id },
|
2019-05-29 05:21:06 -04:00
|
|
|
'notifying users that the document has been updated'
|
|
|
|
)
|
2019-11-18 09:03:04 -05:00
|
|
|
DocumentUpdaterHandler.flushDocToMongo(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
existingDoc._id,
|
2019-11-18 09:03:04 -05:00
|
|
|
err => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
callback(null, existingDoc, existingDoc == null)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
} else {
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityUpdateHandler.addDocWithRanges.withoutLock(
|
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
docName,
|
|
|
|
docLines,
|
|
|
|
{},
|
|
|
|
userId,
|
2019-11-18 09:03:04 -05:00
|
|
|
(err, doc) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
callback(null, doc, existingDoc == null)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}),
|
|
|
|
|
|
|
|
upsertFile: wrapWithLock({
|
|
|
|
beforeLock(next) {
|
2021-04-14 09:17:21 -04:00
|
|
|
return function (
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fileName,
|
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
) {
|
|
|
|
if (!SafePath.isCleanFilename(fileName)) {
|
|
|
|
return callback(new Errors.InvalidNameError('invalid element name'))
|
|
|
|
}
|
|
|
|
// create a new file
|
|
|
|
const fileArgs = {
|
|
|
|
name: fileName,
|
|
|
|
linkedFileData
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
FileStoreHandler.uploadFileFromDisk(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fileArgs,
|
|
|
|
fsPath,
|
2019-11-18 09:03:04 -05:00
|
|
|
(err, fileStoreUrl, fileRef) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
next(
|
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fileName,
|
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
|
|
|
userId,
|
|
|
|
fileRef,
|
|
|
|
fileStoreUrl,
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
},
|
|
|
|
withLock(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fileName,
|
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
|
|
|
userId,
|
|
|
|
newFileRef,
|
|
|
|
fileStoreUrl,
|
|
|
|
callback
|
|
|
|
) {
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectLocator.findElement(
|
|
|
|
{ project_id: projectId, element_id: folderId, type: 'folder' },
|
|
|
|
(error, folder) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
|
|
|
if (folder == null) {
|
|
|
|
return callback(new Error("Couldn't find folder"))
|
|
|
|
}
|
2020-03-18 10:27:33 -04:00
|
|
|
const existingFile = folder.fileRefs.find(
|
|
|
|
({ name }) => name === fileName
|
|
|
|
)
|
|
|
|
const existingDoc = folder.docs.find(({ name }) => name === fileName)
|
|
|
|
|
|
|
|
if (existingDoc) {
|
|
|
|
ProjectLocator.findElement(
|
|
|
|
{
|
|
|
|
project_id: projectId,
|
|
|
|
element_id: existingDoc._id,
|
|
|
|
type: 'doc'
|
|
|
|
},
|
|
|
|
(err, doc, path) => {
|
|
|
|
if (err) {
|
|
|
|
return callback(new Error('coudnt find existing file'))
|
|
|
|
}
|
|
|
|
ProjectEntityMongoUpdateHandler.replaceDocWithFile(
|
|
|
|
projectId,
|
|
|
|
existingDoc._id,
|
|
|
|
newFileRef,
|
|
|
|
(err, project) => {
|
|
|
|
if (err) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
|
|
|
const projectHistoryId =
|
|
|
|
project.overleaf &&
|
|
|
|
project.overleaf.history &&
|
|
|
|
project.overleaf.history.id
|
|
|
|
TpdsUpdateSender.addFile(
|
|
|
|
{
|
|
|
|
project_id: project._id,
|
|
|
|
file_id: newFileRef._id,
|
|
|
|
path: path.fileSystem,
|
|
|
|
rev: newFileRef.rev,
|
|
|
|
project_name: project.name
|
|
|
|
},
|
|
|
|
err => {
|
|
|
|
if (err) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
|
|
|
DocumentUpdaterHandler.updateProjectStructure(
|
|
|
|
projectId,
|
|
|
|
projectHistoryId,
|
|
|
|
userId,
|
|
|
|
{
|
|
|
|
oldDocs: [
|
|
|
|
{ doc: existingDoc, path: path.fileSystem }
|
|
|
|
],
|
|
|
|
|
|
|
|
newFiles: [
|
|
|
|
{
|
|
|
|
file: newFileRef,
|
|
|
|
path: path.fileSystem,
|
|
|
|
url: fileStoreUrl
|
|
|
|
}
|
|
|
|
],
|
|
|
|
newProject: project
|
|
|
|
},
|
|
|
|
err => {
|
|
|
|
if (err) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
|
|
|
EditorRealTimeController.emitToRoom(
|
|
|
|
projectId,
|
|
|
|
'removeEntity',
|
|
|
|
existingDoc._id,
|
|
|
|
'convertDocToFile'
|
|
|
|
)
|
|
|
|
callback(null, newFileRef, true, existingFile)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
} else if (existingFile) {
|
2019-05-29 05:21:06 -04:00
|
|
|
// this calls directly into the replaceFile main task (without the beforeLock part)
|
2019-11-18 09:03:04 -05:00
|
|
|
return ProjectEntityUpdateHandler.replaceFile.mainTask(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
existingFile._id,
|
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
|
|
|
userId,
|
|
|
|
newFileRef,
|
|
|
|
fileStoreUrl,
|
2019-11-18 09:03:04 -05:00
|
|
|
err => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
callback(null, newFileRef, existingFile == null, existingFile)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
)
|
|
|
|
} else {
|
|
|
|
// this calls directly into the addFile main task (without the beforeLock part)
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityUpdateHandler.addFile.mainTask(
|
|
|
|
projectId,
|
|
|
|
folderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fileName,
|
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
|
|
|
userId,
|
|
|
|
newFileRef,
|
|
|
|
fileStoreUrl,
|
2019-11-18 09:03:04 -05:00
|
|
|
err => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
callback(null, newFileRef, existingFile == null, existingFile)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}),
|
|
|
|
|
2021-04-14 09:17:21 -04:00
|
|
|
upsertDocWithPath: wrapWithLock(function (
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
elementPath,
|
|
|
|
docLines,
|
|
|
|
source,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
) {
|
|
|
|
if (!SafePath.isCleanPath(elementPath)) {
|
|
|
|
return callback(new Errors.InvalidNameError('invalid element name'))
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
const docName = Path.basename(elementPath)
|
|
|
|
const folderPath = Path.dirname(elementPath)
|
|
|
|
ProjectEntityUpdateHandler.mkdirp.withoutLock(
|
|
|
|
projectId,
|
|
|
|
folderPath,
|
|
|
|
(err, newFolders, folder) => {
|
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityUpdateHandler.upsertDoc.withoutLock(
|
|
|
|
projectId,
|
|
|
|
folder._id,
|
|
|
|
docName,
|
|
|
|
docLines,
|
|
|
|
source,
|
|
|
|
userId,
|
|
|
|
(err, doc, isNewDoc) => {
|
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
|
|
|
callback(null, doc, isNewDoc, newFolders, folder)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
2019-05-29 05:21:06 -04:00
|
|
|
}),
|
|
|
|
|
|
|
|
upsertFileWithPath: wrapWithLock({
|
|
|
|
beforeLock(next) {
|
2021-04-14 09:17:21 -04:00
|
|
|
return function (
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
elementPath,
|
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
) {
|
|
|
|
if (!SafePath.isCleanPath(elementPath)) {
|
|
|
|
return callback(new Errors.InvalidNameError('invalid element name'))
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
const fileName = Path.basename(elementPath)
|
|
|
|
const folderPath = Path.dirname(elementPath)
|
2019-05-29 05:21:06 -04:00
|
|
|
// create a new file
|
|
|
|
const fileArgs = {
|
|
|
|
name: fileName,
|
|
|
|
linkedFileData
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
FileStoreHandler.uploadFileFromDisk(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
fileArgs,
|
|
|
|
fsPath,
|
2019-11-18 09:03:04 -05:00
|
|
|
(err, fileStoreUrl, fileRef) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
next(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
folderPath,
|
|
|
|
fileName,
|
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
|
|
|
userId,
|
|
|
|
fileRef,
|
|
|
|
fileStoreUrl,
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
},
|
|
|
|
withLock(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
folderPath,
|
|
|
|
fileName,
|
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
|
|
|
userId,
|
|
|
|
fileRef,
|
|
|
|
fileStoreUrl,
|
|
|
|
callback
|
|
|
|
) {
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityUpdateHandler.mkdirp.withoutLock(
|
|
|
|
projectId,
|
|
|
|
folderPath,
|
|
|
|
(err, newFolders, folder) => {
|
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
// this calls directly into the upsertFile main task (without the beforeLock part)
|
|
|
|
ProjectEntityUpdateHandler.upsertFile.mainTask(
|
|
|
|
projectId,
|
|
|
|
folder._id,
|
|
|
|
fileName,
|
|
|
|
fsPath,
|
|
|
|
linkedFileData,
|
|
|
|
userId,
|
|
|
|
fileRef,
|
|
|
|
fileStoreUrl,
|
|
|
|
(err, newFile, isNewFile, existingFile) => {
|
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
|
|
|
callback(
|
|
|
|
null,
|
|
|
|
newFile,
|
|
|
|
isNewFile,
|
|
|
|
existingFile,
|
|
|
|
newFolders,
|
|
|
|
folder
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
}),
|
|
|
|
|
2021-04-14 09:17:21 -04:00
|
|
|
deleteEntity: wrapWithLock(function (
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
entityId,
|
2019-05-29 05:21:06 -04:00
|
|
|
entityType,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
) {
|
2019-11-18 09:03:04 -05:00
|
|
|
logger.log({ entityId, entityType, projectId }, 'deleting project entity')
|
2019-05-29 05:21:06 -04:00
|
|
|
if (entityType == null) {
|
2019-11-18 09:03:04 -05:00
|
|
|
logger.warn({ err: 'No entityType set', projectId, entityId })
|
2019-07-01 09:54:23 -04:00
|
|
|
return callback(new Error('No entityType set'))
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
entityType = entityType.toLowerCase()
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityMongoUpdateHandler.deleteEntity(
|
|
|
|
projectId,
|
|
|
|
entityId,
|
2019-05-29 05:21:06 -04:00
|
|
|
entityType,
|
2019-11-18 09:03:04 -05:00
|
|
|
(error, entity, path, projectBeforeDeletion, newProject) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityUpdateHandler._cleanUpEntity(
|
2019-05-29 05:21:06 -04:00
|
|
|
projectBeforeDeletion,
|
|
|
|
newProject,
|
|
|
|
entity,
|
|
|
|
entityType,
|
|
|
|
path.fileSystem,
|
|
|
|
userId,
|
2019-11-18 09:03:04 -05:00
|
|
|
error => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
TpdsUpdateSender.deleteEntity(
|
2019-05-29 05:21:06 -04:00
|
|
|
{
|
2019-11-18 09:03:04 -05:00
|
|
|
project_id: projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
path: path.fileSystem,
|
|
|
|
project_name: projectBeforeDeletion.name
|
|
|
|
},
|
2019-11-18 09:03:04 -05:00
|
|
|
error => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
callback(null, entityId)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}),
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
deleteEntityWithPath: wrapWithLock((projectId, path, userId, callback) =>
|
|
|
|
ProjectLocator.findElementByPath(
|
|
|
|
{ project_id: projectId, path },
|
|
|
|
(err, element, type) => {
|
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
|
|
|
if (element == null) {
|
|
|
|
return callback(new Errors.NotFoundError('project not found'))
|
|
|
|
}
|
|
|
|
ProjectEntityUpdateHandler.deleteEntity.withoutLock(
|
|
|
|
projectId,
|
|
|
|
element._id,
|
|
|
|
type,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
)
|
2019-05-29 05:21:06 -04:00
|
|
|
),
|
|
|
|
|
2021-04-14 09:17:21 -04:00
|
|
|
mkdirp: wrapWithLock(function (projectId, path, callback) {
|
2019-11-18 09:03:04 -05:00
|
|
|
for (let folder of path.split('/')) {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (folder.length > 0 && !SafePath.isCleanFilename(folder)) {
|
|
|
|
return callback(new Errors.InvalidNameError('invalid element name'))
|
|
|
|
}
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityMongoUpdateHandler.mkdirp(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
path,
|
|
|
|
{ exactCaseMatch: false },
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
}),
|
|
|
|
|
2021-04-14 09:17:21 -04:00
|
|
|
mkdirpWithExactCase: wrapWithLock(function (projectId, path, callback) {
|
2019-11-18 09:03:04 -05:00
|
|
|
for (let folder of path.split('/')) {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (folder.length > 0 && !SafePath.isCleanFilename(folder)) {
|
|
|
|
return callback(new Errors.InvalidNameError('invalid element name'))
|
|
|
|
}
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityMongoUpdateHandler.mkdirp(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
path,
|
|
|
|
{ exactCaseMatch: true },
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
}),
|
|
|
|
|
2021-04-14 09:17:21 -04:00
|
|
|
addFolder: wrapWithLock(function (
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
parentFolderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
folderName,
|
|
|
|
callback
|
|
|
|
) {
|
|
|
|
if (!SafePath.isCleanFilename(folderName)) {
|
|
|
|
return callback(new Errors.InvalidNameError('invalid element name'))
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityMongoUpdateHandler.addFolder(
|
|
|
|
projectId,
|
|
|
|
parentFolderId,
|
2019-05-29 05:21:06 -04:00
|
|
|
folderName,
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
}),
|
|
|
|
|
2021-04-14 09:17:21 -04:00
|
|
|
moveEntity: wrapWithLock(function (
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
entityId,
|
2019-05-29 05:21:06 -04:00
|
|
|
destFolderId,
|
|
|
|
entityType,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
) {
|
|
|
|
logger.log(
|
2019-11-18 09:03:04 -05:00
|
|
|
{ entityType, entityId, projectId, destFolderId },
|
2019-05-29 05:21:06 -04:00
|
|
|
'moving entity'
|
|
|
|
)
|
|
|
|
if (entityType == null) {
|
2019-11-18 09:03:04 -05:00
|
|
|
logger.warn({ err: 'No entityType set', projectId, entityId })
|
2019-07-01 09:54:23 -04:00
|
|
|
return callback(new Error('No entityType set'))
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
entityType = entityType.toLowerCase()
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityMongoUpdateHandler.moveEntity(
|
|
|
|
projectId,
|
|
|
|
entityId,
|
2019-05-29 05:21:06 -04:00
|
|
|
destFolderId,
|
|
|
|
entityType,
|
2019-11-18 09:03:04 -05:00
|
|
|
(err, project, startPath, endPath, rev, changes) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
const projectHistoryId =
|
|
|
|
project.overleaf &&
|
|
|
|
project.overleaf.history &&
|
|
|
|
project.overleaf.history.id
|
2020-05-19 17:17:45 -04:00
|
|
|
// do not wait
|
|
|
|
TpdsUpdateSender.promises
|
|
|
|
.moveEntity({
|
|
|
|
project_id: projectId,
|
|
|
|
project_name: project.name,
|
|
|
|
startPath,
|
|
|
|
endPath,
|
|
|
|
rev
|
|
|
|
})
|
|
|
|
.catch(err => {
|
|
|
|
logger.error({ err }, 'error sending tpds update')
|
|
|
|
})
|
2019-11-18 09:03:04 -05:00
|
|
|
DocumentUpdaterHandler.updateProjectStructure(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
projectHistoryId,
|
|
|
|
userId,
|
|
|
|
changes,
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}),
|
|
|
|
|
2021-04-14 09:17:21 -04:00
|
|
|
renameEntity: wrapWithLock(function (
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
|
|
|
entityId,
|
2019-05-29 05:21:06 -04:00
|
|
|
entityType,
|
|
|
|
newName,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
) {
|
|
|
|
if (!SafePath.isCleanFilename(newName)) {
|
|
|
|
return callback(new Errors.InvalidNameError('invalid element name'))
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
logger.log({ entityId, projectId }, `renaming ${entityType}`)
|
2019-05-29 05:21:06 -04:00
|
|
|
if (entityType == null) {
|
2019-11-18 09:03:04 -05:00
|
|
|
logger.warn({ err: 'No entityType set', projectId, entityId })
|
2019-07-01 09:54:23 -04:00
|
|
|
return callback(new Error('No entityType set'))
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
entityType = entityType.toLowerCase()
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityMongoUpdateHandler.renameEntity(
|
|
|
|
projectId,
|
|
|
|
entityId,
|
2019-05-29 05:21:06 -04:00
|
|
|
entityType,
|
|
|
|
newName,
|
2019-11-18 09:03:04 -05:00
|
|
|
(err, project, startPath, endPath, rev, changes) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (err != null) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
const projectHistoryId =
|
|
|
|
project.overleaf &&
|
|
|
|
project.overleaf.history &&
|
|
|
|
project.overleaf.history.id
|
2020-05-19 17:17:45 -04:00
|
|
|
// do not wait
|
|
|
|
TpdsUpdateSender.promises
|
|
|
|
.moveEntity({
|
|
|
|
project_id: projectId,
|
|
|
|
project_name: project.name,
|
|
|
|
startPath,
|
|
|
|
endPath,
|
|
|
|
rev
|
|
|
|
})
|
|
|
|
.catch(err => {
|
|
|
|
logger.error({ err }, 'error sending tpds update')
|
|
|
|
})
|
2019-11-18 09:03:04 -05:00
|
|
|
DocumentUpdaterHandler.updateProjectStructure(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
projectHistoryId,
|
|
|
|
userId,
|
|
|
|
changes,
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}),
|
|
|
|
|
|
|
|
// This doesn't directly update project structure but we need to take the lock
|
|
|
|
// to prevent anything else being queued before the resync update
|
2019-11-18 09:03:04 -05:00
|
|
|
resyncProjectHistory: wrapWithLock((projectId, callback) =>
|
2019-05-29 05:21:06 -04:00
|
|
|
ProjectGetter.getProject(
|
2019-11-18 09:03:04 -05:00
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
{ rootFolder: true, overleaf: true },
|
2019-11-18 09:03:04 -05:00
|
|
|
(error, project) => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
const projectHistoryId =
|
|
|
|
project &&
|
|
|
|
project.overleaf &&
|
|
|
|
project.overleaf.history &&
|
|
|
|
project.overleaf.history.id
|
2019-05-29 05:21:06 -04:00
|
|
|
if (projectHistoryId == null) {
|
|
|
|
error = new Errors.ProjectHistoryDisabledError(
|
2019-11-18 09:03:04 -05:00
|
|
|
`project history not enabled for ${projectId}`
|
2019-05-29 05:21:06 -04:00
|
|
|
)
|
|
|
|
return callback(error)
|
|
|
|
}
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityHandler.getAllEntitiesFromProject(
|
|
|
|
project,
|
|
|
|
(error, docs, files) => {
|
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
2019-05-29 05:21:06 -04:00
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
docs = _.map(docs, doc => ({
|
|
|
|
doc: doc.doc._id,
|
|
|
|
path: doc.path
|
|
|
|
}))
|
2019-05-29 05:21:06 -04:00
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
files = _.map(files, file => ({
|
|
|
|
file: file.file._id,
|
|
|
|
path: file.path,
|
|
|
|
url: FileStoreHandler._buildUrl(projectId, file.file._id)
|
|
|
|
}))
|
2019-05-29 05:21:06 -04:00
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
DocumentUpdaterHandler.resyncProjectHistory(
|
|
|
|
projectId,
|
|
|
|
projectHistoryId,
|
|
|
|
docs,
|
|
|
|
files,
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
)
|
|
|
|
),
|
|
|
|
|
2019-10-03 10:10:00 -04:00
|
|
|
isPathValidForRootDoc(docPath) {
|
2019-11-18 09:03:04 -05:00
|
|
|
let docExtension = Path.extname(docPath)
|
|
|
|
return VALID_ROOT_DOC_REGEXP.test(docExtension)
|
2019-10-03 10:10:00 -04:00
|
|
|
},
|
|
|
|
|
2019-05-29 05:21:06 -04:00
|
|
|
_cleanUpEntity(
|
|
|
|
project,
|
|
|
|
newProject,
|
|
|
|
entity,
|
|
|
|
entityType,
|
|
|
|
path,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
) {
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityUpdateHandler._updateProjectStructureWithDeletedEntity(
|
2019-05-29 05:21:06 -04:00
|
|
|
project,
|
|
|
|
newProject,
|
|
|
|
entity,
|
|
|
|
entityType,
|
|
|
|
path,
|
|
|
|
userId,
|
2019-11-18 09:03:04 -05:00
|
|
|
error => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
|
|
|
if (entityType.indexOf('file') !== -1) {
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityUpdateHandler._cleanUpFile(
|
|
|
|
project,
|
|
|
|
entity,
|
|
|
|
path,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
)
|
2019-05-29 05:21:06 -04:00
|
|
|
} else if (entityType.indexOf('doc') !== -1) {
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityUpdateHandler._cleanUpDoc(
|
|
|
|
project,
|
|
|
|
entity,
|
|
|
|
path,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
)
|
2019-05-29 05:21:06 -04:00
|
|
|
} else if (entityType.indexOf('folder') !== -1) {
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityUpdateHandler._cleanUpFolder(
|
|
|
|
project,
|
|
|
|
entity,
|
|
|
|
path,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
)
|
2019-05-29 05:21:06 -04:00
|
|
|
} else {
|
2019-11-18 09:03:04 -05:00
|
|
|
callback()
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
},
|
|
|
|
|
|
|
|
// Note: the _cleanUpEntity code and _updateProjectStructureWithDeletedEntity
|
|
|
|
// methods both need to recursively iterate over the entities in folder.
|
|
|
|
// These are currently using separate implementations of the recursion. In
|
|
|
|
// future, these could be simplified using a common project entity iterator.
|
|
|
|
_updateProjectStructureWithDeletedEntity(
|
|
|
|
project,
|
|
|
|
newProject,
|
|
|
|
entity,
|
|
|
|
entityType,
|
|
|
|
entityPath,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
) {
|
|
|
|
// compute the changes to the project structure
|
|
|
|
let changes
|
|
|
|
if (entityType.indexOf('file') !== -1) {
|
|
|
|
changes = { oldFiles: [{ file: entity, path: entityPath }] }
|
|
|
|
} else if (entityType.indexOf('doc') !== -1) {
|
|
|
|
changes = { oldDocs: [{ doc: entity, path: entityPath }] }
|
|
|
|
} else if (entityType.indexOf('folder') !== -1) {
|
|
|
|
changes = { oldDocs: [], oldFiles: [] }
|
2019-11-18 09:03:04 -05:00
|
|
|
const _recurseFolder = (folder, folderPath) => {
|
|
|
|
for (let doc of folder.docs) {
|
|
|
|
changes.oldDocs.push({ doc, path: Path.join(folderPath, doc.name) })
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
for (let file of folder.fileRefs) {
|
2019-05-29 05:21:06 -04:00
|
|
|
changes.oldFiles.push({
|
|
|
|
file,
|
2019-11-18 09:03:04 -05:00
|
|
|
path: Path.join(folderPath, file.name)
|
2019-05-29 05:21:06 -04:00
|
|
|
})
|
|
|
|
}
|
2019-11-18 09:03:04 -05:00
|
|
|
for (const childFolder of folder.folders) {
|
|
|
|
_recurseFolder(childFolder, Path.join(folderPath, childFolder.name))
|
|
|
|
}
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
_recurseFolder(entity, entityPath)
|
|
|
|
}
|
|
|
|
// now send the project structure changes to the docupdater
|
|
|
|
changes.newProject = newProject
|
2019-11-18 09:03:04 -05:00
|
|
|
const projectId = project._id.toString()
|
|
|
|
const projectHistoryId =
|
|
|
|
project.overleaf &&
|
|
|
|
project.overleaf.history &&
|
|
|
|
project.overleaf.history.id
|
|
|
|
DocumentUpdaterHandler.updateProjectStructure(
|
|
|
|
projectId,
|
2019-05-29 05:21:06 -04:00
|
|
|
projectHistoryId,
|
|
|
|
userId,
|
|
|
|
changes,
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
},
|
|
|
|
|
|
|
|
_cleanUpDoc(project, doc, path, userId, callback) {
|
2019-11-18 09:03:04 -05:00
|
|
|
const projectId = project._id.toString()
|
|
|
|
const docId = doc._id.toString()
|
2019-05-29 05:21:06 -04:00
|
|
|
const unsetRootDocIfRequired = callback => {
|
|
|
|
if (
|
|
|
|
project.rootDoc_id != null &&
|
2019-11-18 09:03:04 -05:00
|
|
|
project.rootDoc_id.toString() === docId
|
2019-05-29 05:21:06 -04:00
|
|
|
) {
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityUpdateHandler.unsetRootDoc(projectId, callback)
|
2019-05-29 05:21:06 -04:00
|
|
|
} else {
|
2019-11-18 09:03:04 -05:00
|
|
|
callback()
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
unsetRootDocIfRequired(error => {
|
2019-05-29 05:21:06 -04:00
|
|
|
if (error != null) {
|
|
|
|
return callback(error)
|
|
|
|
}
|
2021-04-06 07:13:16 -04:00
|
|
|
const { name } = doc
|
|
|
|
const deletedAt = new Date()
|
|
|
|
DocstoreManager.deleteDoc(projectId, docId, name, deletedAt, error => {
|
2021-04-06 07:13:06 -04:00
|
|
|
if (error) {
|
|
|
|
return callback(error)
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
2021-04-06 07:13:06 -04:00
|
|
|
DocumentUpdaterHandler.deleteDoc(projectId, docId, callback)
|
|
|
|
})
|
2019-05-29 05:21:06 -04:00
|
|
|
})
|
|
|
|
},
|
|
|
|
|
|
|
|
_cleanUpFile(project, file, path, userId, callback) {
|
2019-11-18 09:03:04 -05:00
|
|
|
ProjectEntityMongoUpdateHandler._insertDeletedFileReference(
|
2019-05-29 05:21:06 -04:00
|
|
|
project._id,
|
|
|
|
file,
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
},
|
|
|
|
|
|
|
|
_cleanUpFolder(project, folder, folderPath, userId, callback) {
|
|
|
|
const jobs = []
|
2019-11-18 09:03:04 -05:00
|
|
|
folder.docs.forEach(doc => {
|
|
|
|
const docPath = Path.join(folderPath, doc.name)
|
|
|
|
jobs.push(callback =>
|
|
|
|
ProjectEntityUpdateHandler._cleanUpDoc(
|
|
|
|
project,
|
|
|
|
doc,
|
|
|
|
docPath,
|
|
|
|
userId,
|
|
|
|
callback
|
2019-05-29 05:21:06 -04:00
|
|
|
)
|
2019-11-18 09:03:04 -05:00
|
|
|
)
|
|
|
|
})
|
2019-05-29 05:21:06 -04:00
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
folder.fileRefs.forEach(file => {
|
|
|
|
const filePath = Path.join(folderPath, file.name)
|
|
|
|
jobs.push(callback =>
|
|
|
|
ProjectEntityUpdateHandler._cleanUpFile(
|
|
|
|
project,
|
|
|
|
file,
|
|
|
|
filePath,
|
|
|
|
userId,
|
|
|
|
callback
|
2019-05-29 05:21:06 -04:00
|
|
|
)
|
2019-11-18 09:03:04 -05:00
|
|
|
)
|
|
|
|
})
|
2019-05-29 05:21:06 -04:00
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
folder.folders.forEach(childFolder => {
|
|
|
|
folderPath = Path.join(folderPath, childFolder.name)
|
|
|
|
jobs.push(callback =>
|
|
|
|
ProjectEntityUpdateHandler._cleanUpFolder(
|
|
|
|
project,
|
|
|
|
childFolder,
|
|
|
|
folderPath,
|
|
|
|
userId,
|
|
|
|
callback
|
2019-05-29 05:21:06 -04:00
|
|
|
)
|
2019-11-18 09:03:04 -05:00
|
|
|
)
|
|
|
|
})
|
2019-05-29 05:21:06 -04:00
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
async.series(jobs, callback)
|
2020-03-04 04:37:43 -05:00
|
|
|
},
|
|
|
|
|
|
|
|
convertDocToFile: wrapWithLock({
|
|
|
|
beforeLock(next) {
|
2021-04-14 09:17:21 -04:00
|
|
|
return function (projectId, docId, userId, callback) {
|
2020-03-10 08:36:53 -04:00
|
|
|
DocumentUpdaterHandler.flushDocToMongo(projectId, docId, err => {
|
2020-03-04 04:37:43 -05:00
|
|
|
if (err) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
|
|
|
ProjectLocator.findElement(
|
|
|
|
{ project_id: projectId, element_id: docId, type: 'doc' },
|
|
|
|
(err, doc, path) => {
|
|
|
|
const docPath = path.fileSystem
|
|
|
|
if (err) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2020-03-10 08:36:53 -04:00
|
|
|
DocstoreManager.getDoc(
|
|
|
|
projectId,
|
|
|
|
docId,
|
|
|
|
(err, docLines, rev, version, ranges) => {
|
|
|
|
if (err) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
|
|
|
if (!_.isEmpty(ranges)) {
|
|
|
|
return callback(new Errors.DocHasRangesError({}))
|
|
|
|
}
|
|
|
|
DocumentUpdaterHandler.deleteDoc(projectId, docId, err => {
|
2020-03-04 04:37:43 -05:00
|
|
|
if (err) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2020-03-10 08:36:53 -04:00
|
|
|
FileWriter.writeLinesToDisk(
|
2020-03-04 04:37:43 -05:00
|
|
|
projectId,
|
2020-03-10 08:36:53 -04:00
|
|
|
docLines,
|
|
|
|
(err, fsPath) => {
|
2020-03-04 04:37:43 -05:00
|
|
|
if (err) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
2020-03-10 08:36:53 -04:00
|
|
|
FileStoreHandler.uploadFileFromDisk(
|
|
|
|
projectId,
|
2020-03-16 07:31:10 -04:00
|
|
|
{ name: doc.name, rev: rev + 1 },
|
2020-03-10 08:36:53 -04:00
|
|
|
fsPath,
|
|
|
|
(err, fileStoreUrl, fileRef) => {
|
|
|
|
if (err) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
|
|
|
fs.unlink(fsPath, err => {
|
|
|
|
if (err) {
|
|
|
|
logger.warn(
|
|
|
|
{ err, path: fsPath },
|
|
|
|
'failed to clean up temporary file'
|
|
|
|
)
|
|
|
|
}
|
|
|
|
next(
|
|
|
|
projectId,
|
|
|
|
doc,
|
|
|
|
docPath,
|
|
|
|
fileRef,
|
|
|
|
fileStoreUrl,
|
|
|
|
userId,
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
})
|
2020-03-04 04:37:43 -05:00
|
|
|
}
|
2020-03-10 08:36:53 -04:00
|
|
|
)
|
2020-03-04 04:37:43 -05:00
|
|
|
}
|
|
|
|
)
|
2020-03-10 08:36:53 -04:00
|
|
|
})
|
|
|
|
}
|
|
|
|
)
|
2020-03-04 04:37:43 -05:00
|
|
|
}
|
|
|
|
)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
},
|
|
|
|
withLock(projectId, doc, path, fileRef, fileStoreUrl, userId, callback) {
|
|
|
|
ProjectEntityMongoUpdateHandler.replaceDocWithFile(
|
|
|
|
projectId,
|
|
|
|
doc._id,
|
|
|
|
fileRef,
|
|
|
|
(err, project) => {
|
|
|
|
if (err) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
|
|
|
const projectHistoryId =
|
|
|
|
project.overleaf &&
|
|
|
|
project.overleaf.history &&
|
|
|
|
project.overleaf.history.id
|
|
|
|
DocumentUpdaterHandler.updateProjectStructure(
|
|
|
|
projectId,
|
|
|
|
projectHistoryId,
|
|
|
|
userId,
|
|
|
|
{
|
|
|
|
oldDocs: [{ doc, path }],
|
|
|
|
newFiles: [{ file: fileRef, path, url: fileStoreUrl }],
|
|
|
|
newProject: project
|
|
|
|
},
|
|
|
|
err => {
|
|
|
|
if (err) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
|
|
|
ProjectLocator.findElement(
|
|
|
|
{
|
|
|
|
project_id: projectId,
|
|
|
|
element_id: fileRef._id,
|
|
|
|
type: 'file'
|
|
|
|
},
|
|
|
|
(err, element, path, folder) => {
|
|
|
|
if (err) {
|
|
|
|
return callback(err)
|
|
|
|
}
|
|
|
|
EditorRealTimeController.emitToRoom(
|
|
|
|
projectId,
|
|
|
|
'removeEntity',
|
|
|
|
doc._id,
|
|
|
|
'convertDocToFile'
|
|
|
|
)
|
|
|
|
EditorRealTimeController.emitToRoom(
|
|
|
|
projectId,
|
|
|
|
'reciveNewFile',
|
|
|
|
folder._id,
|
|
|
|
fileRef,
|
|
|
|
'convertDocToFile',
|
2021-01-05 05:56:58 -05:00
|
|
|
null,
|
|
|
|
userId
|
2020-03-04 04:37:43 -05:00
|
|
|
)
|
|
|
|
callback(null, fileRef)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
})
|
2019-05-29 05:21:06 -04:00
|
|
|
}
|
|
|
|
|
2019-11-18 09:03:04 -05:00
|
|
|
module.exports = ProjectEntityUpdateHandler
|
2020-03-04 04:37:43 -05:00
|
|
|
module.exports.promises = promisifyAll(ProjectEntityUpdateHandler, {
|
|
|
|
without: ['isPathValidForRootDoc'],
|
|
|
|
multiResult: {
|
|
|
|
_addDocAndSendToTpds: ['result', 'project'],
|
|
|
|
addDoc: ['doc', 'folderId'],
|
|
|
|
addDocWithRanges: ['doc', 'folderId'],
|
|
|
|
_uploadFile: ['fileStoreUrl', 'fileRef'],
|
|
|
|
_addFileAndSendToTpds: ['result', 'project'],
|
|
|
|
addFile: ['fileRef', 'folderId'],
|
|
|
|
upsertDoc: ['doc', 'isNew'],
|
|
|
|
upsertFile: ['fileRef', 'isNew', 'oldFileRef'],
|
|
|
|
upsertDocWithPath: ['doc', 'isNew', 'newFolders', 'folder'],
|
|
|
|
upsertFileWithPath: ['fileRef', 'isNew', 'oldFile', 'newFolders', 'folder'],
|
|
|
|
mkdirp: ['newFolders', 'folder'],
|
|
|
|
mkdirpWithExactCase: ['newFolders', 'folder'],
|
|
|
|
addFolder: ['folder', 'parentFolderId']
|
|
|
|
}
|
|
|
|
})
|