Move functions to be promisified

Moving these first will make later commits have a cleaner diff. There
are no substantive code changes in the moved functions.

GitOrigin-RevId: 06e0c5bb02f300942c8fbe80e81484c4d717dbc9
This commit is contained in:
andrew rumble 2024-07-22 11:30:23 +01:00 committed by Copybot
parent 10ef61a02b
commit c4d7da87a6

View file

@ -154,10 +154,7 @@ function getDocContext(projectId, docId, callback) {
)
}
const ProjectEntityUpdateHandler = {
LOCK_NAMESPACE,
updateDocLines(
function updateDocLines(
projectId,
docId,
lines,
@ -166,7 +163,7 @@ const ProjectEntityUpdateHandler = {
lastUpdatedAt,
lastUpdatedBy,
callback
) {
) {
getDocContext(projectId, docId, (err, ctx) => {
if (err && err instanceof Errors.NotFoundError) {
// Do not allow an update to a doc which has never exist on this project
@ -180,10 +177,7 @@ const ProjectEntityUpdateHandler = {
return callback(err)
}
const { projectName, isDeletedDoc, path, folder } = ctx
logger.debug(
{ projectId, docId },
'telling docstore manager to update doc'
)
logger.debug({ projectId, docId }, 'telling docstore manager to update doc')
DocstoreManager.updateDoc(
projectId,
docId,
@ -231,9 +225,9 @@ const ProjectEntityUpdateHandler = {
}
)
})
},
}
setRootDoc(projectId, newRootDocID, callback) {
function setRootDoc(projectId, newRootDocID, callback) {
logger.debug({ projectId, rootDocId: newRootDocID }, 'setting root doc')
if (projectId == null || newRootDocID == null) {
return callback(
@ -265,9 +259,9 @@ const ProjectEntityUpdateHandler = {
}
}
)
},
}
unsetRootDoc(projectId, callback) {
function unsetRootDoc(projectId, callback) {
logger.debug({ projectId }, 'removing root doc')
// Ignore spurious floating promises warning until we promisify
// eslint-disable-next-line @typescript-eslint/no-floating-promises
@ -277,44 +271,17 @@ const ProjectEntityUpdateHandler = {
{},
callback
)
},
}
_addDocAndSendToTpds(projectId, folderId, doc, callback) {
ProjectEntityMongoUpdateHandler.addDoc(
function addDoc(
projectId,
folderId,
doc,
(err, result, project) => {
if (err != null) {
OError.tag(err, 'error adding file with project', {
projectId,
folderId,
doc_name: doc != null ? doc.name : undefined,
doc_id: doc != null ? doc._id : undefined,
})
return callback(err)
}
TpdsUpdateSender.addDoc(
{
projectId,
docId: doc != null ? doc._id : undefined,
path: result?.path?.fileSystem,
projectName: project.name,
rev: 0,
folderId,
},
err => {
if (err != null) {
return callback(err)
}
callback(null, result, project)
}
)
}
)
},
addDoc(projectId, folderId, docName, docLines, userId, source, callback) {
docName,
docLines,
userId,
source,
callback
) {
ProjectEntityUpdateHandler.addDocWithRanges(
projectId,
folderId,
@ -325,9 +292,9 @@ const ProjectEntityUpdateHandler = {
source,
callback
)
},
}
addDocWithRanges: wrapWithLock({
const addDocWithRanges = wrapWithLock({
beforeLock(next) {
return function (
projectId,
@ -419,71 +386,9 @@ const ProjectEntityUpdateHandler = {
}
)
},
}),
})
_uploadFile(projectId, folderId, fileName, fsPath, linkedFileData, callback) {
if (!SafePath.isCleanFilename(fileName)) {
return callback(new Errors.InvalidNameError('invalid element name'))
}
const fileArgs = {
name: fileName,
linkedFileData,
}
FileStoreHandler.uploadFileFromDisk(
projectId,
fileArgs,
fsPath,
(err, fileStoreUrl, fileRef) => {
if (err != null) {
OError.tag(err, 'error uploading image to s3', {
projectId,
folderId,
file_name: fileName,
fileRef,
})
return callback(err)
}
callback(null, fileStoreUrl, fileRef)
}
)
},
_addFileAndSendToTpds(projectId, folderId, fileRef, callback) {
ProjectEntityMongoUpdateHandler.addFile(
projectId,
folderId,
fileRef,
(err, result, project) => {
if (err != null) {
OError.tag(err, 'error adding file with project', {
projectId,
folderId,
file_name: fileRef.name,
fileRef,
})
return callback(err)
}
TpdsUpdateSender.addFile(
{
projectId,
fileId: fileRef._id,
path: result?.path?.fileSystem,
projectName: project.name,
rev: fileRef.rev,
folderId,
},
err => {
if (err != null) {
return callback(err)
}
callback(null, result, project)
}
)
}
)
},
addFile: wrapWithLock({
const addFile = wrapWithLock({
beforeLock(next) {
return function (
projectId,
@ -576,93 +481,10 @@ const ProjectEntityUpdateHandler = {
}
)
},
}),
})
_replaceFile(
projectId,
fileId,
fsPath,
linkedFileData,
userId,
newFileRef,
fileStoreUrl,
folderId,
source,
callback
) {
ProjectEntityMongoUpdateHandler.replaceFileWithNew(
projectId,
fileId,
newFileRef,
(err, oldFileRef, project, path, newProject, newFileRef) => {
if (err != null) {
return callback(err)
}
const oldFiles = [
{
file: oldFileRef,
path: path.fileSystem,
},
]
const newFiles = [
{
file: newFileRef,
path: path.fileSystem,
url: fileStoreUrl,
},
]
const projectHistoryId =
project.overleaf &&
project.overleaf.history &&
project.overleaf.history.id
TpdsUpdateSender.addFile(
{
projectId: project._id,
fileId: newFileRef._id,
path: path.fileSystem,
rev: newFileRef.rev,
projectName: project.name,
folderId,
},
err => {
if (err != null) {
return callback(err)
}
ProjectUpdateHandler.promises
.markAsUpdated(projectId, new Date(), userId)
.catch(error => {
logger.error({ error }, 'failed to mark project as updated')
})
DocumentUpdaterHandler.updateProjectStructure(
projectId,
projectHistoryId,
userId,
{ oldFiles, newFiles, newProject },
source,
err => {
if (err) {
return callback(err)
}
callback(null, newFileRef)
}
)
}
)
}
)
},
upsertDoc: wrapWithLock(
function (
projectId,
folderId,
docName,
docLines,
source,
userId,
callback
) {
const upsertDoc = wrapWithLock(
function (projectId, folderId, docName, docLines, source, userId, callback) {
if (!SafePath.isCleanFilename(docName)) {
return callback(new Errors.InvalidNameError('invalid element name'))
}
@ -778,11 +600,7 @@ const ProjectEntityUpdateHandler = {
// flushes it as part of setDoc.
//
// combine rev from response with existing doc metadata
callback(
null,
{ ...existingDoc, ...result },
existingDoc == null
)
callback(null, { ...existingDoc, ...result }, existingDoc == null)
}
)
} else {
@ -805,9 +623,9 @@ const ProjectEntityUpdateHandler = {
}
)
}
),
)
upsertFile: wrapWithLock({
const upsertFile = wrapWithLock({
beforeLock(next) {
return function (
projectId,
@ -995,9 +813,9 @@ const ProjectEntityUpdateHandler = {
}
)
},
}),
})
upsertDocWithPath: wrapWithLock(
const upsertDocWithPath = wrapWithLock(
function (projectId, elementPath, docLines, source, userId, callback) {
if (!SafePath.isCleanPath(elementPath)) {
return callback(new Errors.InvalidNameError('invalid element name'))
@ -1028,9 +846,9 @@ const ProjectEntityUpdateHandler = {
}
)
}
),
)
upsertFileWithPath: wrapWithLock({
const upsertFileWithPath = wrapWithLock({
beforeLock(next) {
return function (
projectId,
@ -1109,27 +927,17 @@ const ProjectEntityUpdateHandler = {
if (err != null) {
return callback(err)
}
callback(
null,
newFile,
isNewFile,
existingFile,
newFolders,
folder
)
callback(null, newFile, isNewFile, existingFile, newFolders, folder)
}
)
}
)
},
}),
})
deleteEntity: wrapWithLock(
const deleteEntity = wrapWithLock(
function (projectId, entityId, entityType, userId, source, callback) {
logger.debug(
{ entityId, entityType, projectId },
'deleting project entity'
)
logger.debug({ entityId, entityType, projectId }, 'deleting project entity')
if (entityType == null) {
logger.warn({ err: 'No entityType set', projectId, entityId })
return callback(new Error('No entityType set'))
@ -1179,9 +987,9 @@ const ProjectEntityUpdateHandler = {
}
)
}
),
)
deleteEntityWithPath: wrapWithLock(
const deleteEntityWithPath = wrapWithLock(
(projectId, path, userId, source, callback) =>
ProjectLocator.findElementByPath(
{ project_id: projectId, path, exactCaseMatch: true },
@ -1202,9 +1010,9 @@ const ProjectEntityUpdateHandler = {
)
}
)
),
)
mkdirp: wrapWithLock(function (projectId, path, callback) {
const mkdirp = wrapWithLock(function (projectId, path, callback) {
for (const folder of path.split('/')) {
if (folder.length > 0 && !SafePath.isCleanFilename(folder)) {
return callback(new Errors.InvalidNameError('invalid element name'))
@ -1216,9 +1024,9 @@ const ProjectEntityUpdateHandler = {
{ exactCaseMatch: false },
callback
)
}),
})
mkdirpWithExactCase: wrapWithLock(function (projectId, path, callback) {
const mkdirpWithExactCase = wrapWithLock(function (projectId, path, callback) {
for (const folder of path.split('/')) {
if (folder.length > 0 && !SafePath.isCleanFilename(folder)) {
return callback(new Errors.InvalidNameError('invalid element name'))
@ -1230,9 +1038,9 @@ const ProjectEntityUpdateHandler = {
{ exactCaseMatch: true },
callback
)
}),
})
addFolder: wrapWithLock(
const addFolder = wrapWithLock(
function (projectId, parentFolderId, folderName, callback) {
if (!SafePath.isCleanFilename(folderName)) {
return callback(new Errors.InvalidNameError('invalid element name'))
@ -1244,9 +1052,9 @@ const ProjectEntityUpdateHandler = {
callback
)
}
),
)
moveEntity: wrapWithLock(
const moveEntity = wrapWithLock(
function (
projectId,
entityId,
@ -1311,9 +1119,9 @@ const ProjectEntityUpdateHandler = {
)
})
}
),
)
renameEntity: wrapWithLock(
const renameEntity = wrapWithLock(
function (
projectId,
entityId,
@ -1392,11 +1200,11 @@ const ProjectEntityUpdateHandler = {
)
})
}
),
)
// This doesn't directly update project structure but we need to take the lock
// to prevent anything else being queued before the resync update
resyncProjectHistory: wrapWithLock(
// This doesn't directly update project structure, but we need to take the lock
// to prevent anything else being queued before the resync update
const resyncProjectHistory = wrapWithLock(
(projectId, opts, callback) =>
ProjectGetter.getProject(
projectId,
@ -1462,7 +1270,364 @@ const ProjectEntityUpdateHandler = {
}
),
LockManager.withTimeout(6 * 60) // use an extended lock for the resync operations
),
)
const convertDocToFile = wrapWithLock({
beforeLock(next) {
return function (projectId, docId, userId, source, callback) {
DocumentUpdaterHandler.flushDocToMongo(projectId, docId, err => {
if (err) {
return callback(err)
}
ProjectLocator.findElement(
{ project_id: projectId, element_id: docId, type: 'doc' },
(err, doc, path) => {
const docPath = path.fileSystem
if (err) {
return callback(err)
}
DocstoreManager.getDoc(
projectId,
docId,
(err, docLines, rev, version, ranges) => {
if (err) {
return callback(err)
}
if (!_.isEmpty(ranges)) {
return callback(new Errors.DocHasRangesError({}))
}
DocumentUpdaterHandler.deleteDoc(projectId, docId, err => {
if (err) {
return callback(err)
}
FileWriter.writeLinesToDisk(
projectId,
docLines,
(err, fsPath) => {
if (err) {
return callback(err)
}
FileStoreHandler.uploadFileFromDisk(
projectId,
{ name: doc.name, rev: rev + 1 },
fsPath,
(err, fileStoreUrl, fileRef) => {
if (err) {
return callback(err)
}
fs.unlink(fsPath, err => {
if (err) {
logger.warn(
{ err, path: fsPath },
'failed to clean up temporary file'
)
}
next(
projectId,
doc,
docPath,
fileRef,
fileStoreUrl,
userId,
source,
callback
)
})
}
)
}
)
})
}
)
}
)
})
}
},
withLock(
projectId,
doc,
path,
fileRef,
fileStoreUrl,
userId,
source,
callback
) {
ProjectEntityMongoUpdateHandler.replaceDocWithFile(
projectId,
doc._id,
fileRef,
(err, project) => {
if (err) {
return callback(err)
}
const projectHistoryId =
project.overleaf &&
project.overleaf.history &&
project.overleaf.history.id
DocumentUpdaterHandler.updateProjectStructure(
projectId,
projectHistoryId,
userId,
{
oldDocs: [{ doc, path }],
newFiles: [{ file: fileRef, path, url: fileStoreUrl }],
newProject: project,
},
source,
err => {
if (err) {
return callback(err)
}
ProjectLocator.findElement(
{
project_id: projectId,
element_id: fileRef._id,
type: 'file',
},
(err, element, path, folder) => {
if (err) {
return callback(err)
}
EditorRealTimeController.emitToRoom(
projectId,
'removeEntity',
doc._id,
'convertDocToFile'
)
EditorRealTimeController.emitToRoom(
projectId,
'reciveNewFile',
folder._id,
fileRef,
'convertDocToFile',
null,
userId
)
callback(null, fileRef)
}
)
}
)
}
)
},
})
const ProjectEntityUpdateHandler = {
LOCK_NAMESPACE,
addDoc,
addDocWithRanges,
addFile,
addFolder,
convertDocToFile,
deleteEntity,
deleteEntityWithPath,
mkdirp,
mkdirpWithExactCase,
moveEntity,
renameEntity,
resyncProjectHistory,
setRootDoc,
unsetRootDoc,
updateDocLines,
upsertDoc,
upsertDocWithPath,
upsertFile,
upsertFileWithPath,
_addDocAndSendToTpds(projectId, folderId, doc, callback) {
ProjectEntityMongoUpdateHandler.addDoc(
projectId,
folderId,
doc,
(err, result, project) => {
if (err != null) {
OError.tag(err, 'error adding file with project', {
projectId,
folderId,
doc_name: doc != null ? doc.name : undefined,
doc_id: doc != null ? doc._id : undefined,
})
return callback(err)
}
TpdsUpdateSender.addDoc(
{
projectId,
docId: doc != null ? doc._id : undefined,
path: result?.path?.fileSystem,
projectName: project.name,
rev: 0,
folderId,
},
err => {
if (err != null) {
return callback(err)
}
callback(null, result, project)
}
)
}
)
},
_uploadFile(projectId, folderId, fileName, fsPath, linkedFileData, callback) {
if (!SafePath.isCleanFilename(fileName)) {
return callback(new Errors.InvalidNameError('invalid element name'))
}
const fileArgs = {
name: fileName,
linkedFileData,
}
FileStoreHandler.uploadFileFromDisk(
projectId,
fileArgs,
fsPath,
(err, fileStoreUrl, fileRef) => {
if (err != null) {
OError.tag(err, 'error uploading image to s3', {
projectId,
folderId,
file_name: fileName,
fileRef,
})
return callback(err)
}
callback(null, fileStoreUrl, fileRef)
}
)
},
_addFileAndSendToTpds(projectId, folderId, fileRef, callback) {
ProjectEntityMongoUpdateHandler.addFile(
projectId,
folderId,
fileRef,
(err, result, project) => {
if (err != null) {
OError.tag(err, 'error adding file with project', {
projectId,
folderId,
file_name: fileRef.name,
fileRef,
})
return callback(err)
}
TpdsUpdateSender.addFile(
{
projectId,
fileId: fileRef._id,
path: result?.path?.fileSystem,
projectName: project.name,
rev: fileRef.rev,
folderId,
},
err => {
if (err != null) {
return callback(err)
}
callback(null, result, project)
}
)
}
)
},
_replaceFile(
projectId,
fileId,
fsPath,
linkedFileData,
userId,
newFileRef,
fileStoreUrl,
folderId,
source,
callback
) {
ProjectEntityMongoUpdateHandler.replaceFileWithNew(
projectId,
fileId,
newFileRef,
(err, oldFileRef, project, path, newProject, newFileRef) => {
if (err != null) {
return callback(err)
}
const oldFiles = [
{
file: oldFileRef,
path: path.fileSystem,
},
]
const newFiles = [
{
file: newFileRef,
path: path.fileSystem,
url: fileStoreUrl,
},
]
const projectHistoryId =
project.overleaf &&
project.overleaf.history &&
project.overleaf.history.id
TpdsUpdateSender.addFile(
{
projectId: project._id,
fileId: newFileRef._id,
path: path.fileSystem,
rev: newFileRef.rev,
projectName: project.name,
folderId,
},
err => {
if (err != null) {
return callback(err)
}
ProjectUpdateHandler.promises
.markAsUpdated(projectId, new Date(), userId)
.catch(error => {
logger.error({ error }, 'failed to mark project as updated')
})
DocumentUpdaterHandler.updateProjectStructure(
projectId,
projectHistoryId,
userId,
{ oldFiles, newFiles, newProject },
source,
err => {
if (err) {
return callback(err)
}
callback(null, newFileRef)
}
)
}
)
}
)
},
_checkFiletree(projectId, projectHistoryId, entities, callback) {
const adjustPathsAfterFolderRename = (oldPath, newPath) => {
@ -1759,150 +1924,6 @@ const ProjectEntityUpdateHandler = {
callback
)
},
convertDocToFile: wrapWithLock({
beforeLock(next) {
return function (projectId, docId, userId, source, callback) {
DocumentUpdaterHandler.flushDocToMongo(projectId, docId, err => {
if (err) {
return callback(err)
}
ProjectLocator.findElement(
{ project_id: projectId, element_id: docId, type: 'doc' },
(err, doc, path) => {
const docPath = path.fileSystem
if (err) {
return callback(err)
}
DocstoreManager.getDoc(
projectId,
docId,
(err, docLines, rev, version, ranges) => {
if (err) {
return callback(err)
}
if (!_.isEmpty(ranges)) {
return callback(new Errors.DocHasRangesError({}))
}
DocumentUpdaterHandler.deleteDoc(projectId, docId, err => {
if (err) {
return callback(err)
}
FileWriter.writeLinesToDisk(
projectId,
docLines,
(err, fsPath) => {
if (err) {
return callback(err)
}
FileStoreHandler.uploadFileFromDisk(
projectId,
{ name: doc.name, rev: rev + 1 },
fsPath,
(err, fileStoreUrl, fileRef) => {
if (err) {
return callback(err)
}
fs.unlink(fsPath, err => {
if (err) {
logger.warn(
{ err, path: fsPath },
'failed to clean up temporary file'
)
}
next(
projectId,
doc,
docPath,
fileRef,
fileStoreUrl,
userId,
source,
callback
)
})
}
)
}
)
})
}
)
}
)
})
}
},
withLock(
projectId,
doc,
path,
fileRef,
fileStoreUrl,
userId,
source,
callback
) {
ProjectEntityMongoUpdateHandler.replaceDocWithFile(
projectId,
doc._id,
fileRef,
(err, project) => {
if (err) {
return callback(err)
}
const projectHistoryId =
project.overleaf &&
project.overleaf.history &&
project.overleaf.history.id
DocumentUpdaterHandler.updateProjectStructure(
projectId,
projectHistoryId,
userId,
{
oldDocs: [{ doc, path }],
newFiles: [{ file: fileRef, path, url: fileStoreUrl }],
newProject: project,
},
source,
err => {
if (err) {
return callback(err)
}
ProjectLocator.findElement(
{
project_id: projectId,
element_id: fileRef._id,
type: 'file',
},
(err, element, path, folder) => {
if (err) {
return callback(err)
}
EditorRealTimeController.emitToRoom(
projectId,
'removeEntity',
doc._id,
'convertDocToFile'
)
EditorRealTimeController.emitToRoom(
projectId,
'reciveNewFile',
folder._id,
fileRef,
'convertDocToFile',
null,
userId
)
callback(null, fileRef)
}
)
}
)
}
)
},
}),
}
/**