Decaf cleanup: move functions to top level

This commit is contained in:
Eric Mc Sween 2020-05-11 11:12:15 -04:00
parent 64a881461f
commit e4ac63dd19

View file

@ -1,4 +1,3 @@
let HttpController
const DocumentManager = require('./DocumentManager') const DocumentManager = require('./DocumentManager')
const HistoryManager = require('./HistoryManager') const HistoryManager = require('./HistoryManager')
const ProjectManager = require('./ProjectManager') const ProjectManager = require('./ProjectManager')
@ -11,395 +10,401 @@ const async = require('async')
const TWO_MEGABYTES = 2 * 1024 * 1024 const TWO_MEGABYTES = 2 * 1024 * 1024
module.exports = HttpController = { module.exports = {
getDoc(req, res, next) { getDoc,
let fromVersion getProjectDocsAndFlushIfOld,
const docId = req.params.doc_id clearProjectState,
const projectId = req.params.project_id setDoc,
logger.log({ projectId, docId }, 'getting doc via http') flushDocIfLoaded,
const timer = new Metrics.Timer('http.getDoc') deleteDoc,
flushProject,
deleteProject,
deleteMultipleProjects,
acceptChanges,
deleteComment,
updateProject,
resyncProjectHistory,
flushAllProjects,
flushQueuedProjects
}
if (req.query.fromVersion != null) { function getDoc(req, res, next) {
fromVersion = parseInt(req.query.fromVersion, 10) let fromVersion
} else { const docId = req.params.doc_id
fromVersion = -1 const projectId = req.params.project_id
} logger.log({ projectId, docId }, 'getting doc via http')
const timer = new Metrics.Timer('http.getDoc')
DocumentManager.getDocAndRecentOpsWithLock( if (req.query.fromVersion != null) {
projectId, fromVersion = parseInt(req.query.fromVersion, 10)
docId, } else {
fromVersion, fromVersion = -1
function (error, lines, version, ops, ranges, pathname) { }
timer.done()
if (error) {
return next(error)
}
logger.log({ projectId, docId }, 'got doc via http')
if (lines == null || version == null) {
return next(new Errors.NotFoundError('document not found'))
}
res.json({
id: docId,
lines,
version,
ops,
ranges,
pathname
})
}
)
},
_getTotalSizeOfLines(lines) { DocumentManager.getDocAndRecentOpsWithLock(
let size = 0 projectId,
for (const line of lines) { docId,
size += line.length + 1 fromVersion,
} function (error, lines, version, ops, ranges, pathname) {
return size
},
getProjectDocsAndFlushIfOld(req, res, next) {
const projectId = req.params.project_id
const projectStateHash = req.query.state
// exclude is string of existing docs "id:version,id:version,..."
const excludeItems =
req.query.exclude != null ? req.query.exclude.split(',') : []
logger.log({ projectId, exclude: excludeItems }, 'getting docs via http')
const timer = new Metrics.Timer('http.getAllDocs')
const excludeVersions = {}
for (const item of excludeItems) {
const [id, version] = item.split(':')
excludeVersions[id] = version
}
logger.log(
{ projectId, projectStateHash, excludeVersions },
'excluding versions'
)
ProjectManager.getProjectDocsAndFlushIfOld(
projectId,
projectStateHash,
excludeVersions,
function (error, result) {
timer.done()
if (error instanceof Errors.ProjectStateChangedError) {
res.sendStatus(409) // conflict
} else if (error) {
next(error)
} else {
logger.log(
{
projectId,
result: result.map((doc) => `${doc._id}:${doc.v}`)
},
'got docs via http'
)
res.send(result)
}
}
)
},
clearProjectState(req, res, next) {
const projectId = req.params.project_id
const timer = new Metrics.Timer('http.clearProjectState')
logger.log({ projectId }, 'clearing project state via http')
ProjectManager.clearProjectState(projectId, function (error) {
timer.done() timer.done()
if (error) { if (error) {
return next(error)
}
logger.log({ projectId, docId }, 'got doc via http')
if (lines == null || version == null) {
return next(new Errors.NotFoundError('document not found'))
}
res.json({
id: docId,
lines,
version,
ops,
ranges,
pathname
})
}
)
}
function _getTotalSizeOfLines(lines) {
let size = 0
for (const line of lines) {
size += line.length + 1
}
return size
}
function getProjectDocsAndFlushIfOld(req, res, next) {
const projectId = req.params.project_id
const projectStateHash = req.query.state
// exclude is string of existing docs "id:version,id:version,..."
const excludeItems =
req.query.exclude != null ? req.query.exclude.split(',') : []
logger.log({ projectId, exclude: excludeItems }, 'getting docs via http')
const timer = new Metrics.Timer('http.getAllDocs')
const excludeVersions = {}
for (const item of excludeItems) {
const [id, version] = item.split(':')
excludeVersions[id] = version
}
logger.log(
{ projectId, projectStateHash, excludeVersions },
'excluding versions'
)
ProjectManager.getProjectDocsAndFlushIfOld(
projectId,
projectStateHash,
excludeVersions,
function (error, result) {
timer.done()
if (error instanceof Errors.ProjectStateChangedError) {
res.sendStatus(409) // conflict
} else if (error) {
next(error) next(error)
} else { } else {
res.sendStatus(200) logger.log(
{
projectId,
result: result.map((doc) => `${doc._id}:${doc.v}`)
},
'got docs via http'
)
res.send(result)
} }
})
},
setDoc(req, res, next) {
const docId = req.params.doc_id
const projectId = req.params.project_id
const { lines, source, user_id: userId, undoing } = req.body
const lineSize = HttpController._getTotalSizeOfLines(lines)
if (lineSize > TWO_MEGABYTES) {
logger.log(
{ projectId, docId, source, lineSize, userId },
'document too large, returning 406 response'
)
return res.sendStatus(406)
} }
logger.log( )
{ projectId, docId, lines, source, userId, undoing }, }
'setting doc via http'
)
const timer = new Metrics.Timer('http.setDoc')
DocumentManager.setDocWithLock(
projectId,
docId,
lines,
source,
userId,
undoing,
function (error) {
timer.done()
if (error) {
return next(error)
}
logger.log({ projectId, docId }, 'set doc via http')
res.sendStatus(204)
}
)
}, // No Content
flushDocIfLoaded(req, res, next) { function clearProjectState(req, res, next) {
const docId = req.params.doc_id const projectId = req.params.project_id
const projectId = req.params.project_id const timer = new Metrics.Timer('http.clearProjectState')
logger.log({ projectId, docId }, 'flushing doc via http') logger.log({ projectId }, 'clearing project state via http')
const timer = new Metrics.Timer('http.flushDoc') ProjectManager.clearProjectState(projectId, function (error) {
DocumentManager.flushDocIfLoadedWithLock(projectId, docId, function ( timer.done()
if (error) {
next(error)
} else {
res.sendStatus(200)
}
})
}
function setDoc(req, res, next) {
const docId = req.params.doc_id
const projectId = req.params.project_id
const { lines, source, user_id: userId, undoing } = req.body
const lineSize = _getTotalSizeOfLines(lines)
if (lineSize > TWO_MEGABYTES) {
logger.log(
{ projectId, docId, source, lineSize, userId },
'document too large, returning 406 response'
)
return res.sendStatus(406)
}
logger.log(
{ projectId, docId, lines, source, userId, undoing },
'setting doc via http'
)
const timer = new Metrics.Timer('http.setDoc')
DocumentManager.setDocWithLock(
projectId,
docId,
lines,
source,
userId,
undoing,
function (error) {
timer.done()
if (error) {
return next(error)
}
logger.log({ projectId, docId }, 'set doc via http')
res.sendStatus(204) // No Content
}
)
}
function flushDocIfLoaded(req, res, next) {
const docId = req.params.doc_id
const projectId = req.params.project_id
logger.log({ projectId, docId }, 'flushing doc via http')
const timer = new Metrics.Timer('http.flushDoc')
DocumentManager.flushDocIfLoadedWithLock(projectId, docId, function (error) {
timer.done()
if (error) {
return next(error)
}
logger.log({ projectId, docId }, 'flushed doc via http')
res.sendStatus(204) // No Content
})
}
function deleteDoc(req, res, next) {
const docId = req.params.doc_id
const projectId = req.params.project_id
const ignoreFlushErrors = req.query.ignore_flush_errors === 'true'
const timer = new Metrics.Timer('http.deleteDoc')
logger.log({ projectId, docId }, 'deleting doc via http')
DocumentManager.flushAndDeleteDocWithLock(
projectId,
docId,
{ ignoreFlushErrors },
function (error) {
timer.done()
// There is no harm in flushing project history if the previous call
// failed and sometimes it is required
HistoryManager.flushProjectChangesAsync(projectId)
if (error) {
return next(error)
}
logger.log({ projectId, docId }, 'deleted doc via http')
res.sendStatus(204) // No Content
}
)
}
function flushProject(req, res, next) {
const projectId = req.params.project_id
logger.log({ projectId }, 'flushing project via http')
const timer = new Metrics.Timer('http.flushProject')
ProjectManager.flushProjectWithLocks(projectId, function (error) {
timer.done()
if (error) {
return next(error)
}
logger.log({ projectId }, 'flushed project via http')
res.sendStatus(204) // No Content
})
}
function deleteProject(req, res, next) {
const projectId = req.params.project_id
logger.log({ projectId }, 'deleting project via http')
const options = {}
if (req.query.background) {
options.background = true
} // allow non-urgent flushes to be queued
if (req.query.shutdown) {
options.skip_history_flush = true
} // don't flush history when realtime shuts down
if (req.query.background) {
ProjectManager.queueFlushAndDeleteProject(projectId, function (error) {
if (error) {
return next(error)
}
logger.log({ projectId }, 'queue delete of project via http')
res.sendStatus(204)
}) // No Content
} else {
const timer = new Metrics.Timer('http.deleteProject')
ProjectManager.flushAndDeleteProjectWithLocks(projectId, options, function (
error error
) { ) {
timer.done() timer.done()
if (error) { if (error) {
return next(error) return next(error)
} }
logger.log({ projectId, docId }, 'flushed doc via http') logger.log({ projectId }, 'deleted project via http')
res.sendStatus(204) res.sendStatus(204) // No Content
}) })
}, // No Content }
}
deleteDoc(req, res, next) { function deleteMultipleProjects(req, res, next) {
const docId = req.params.doc_id const projectIds = req.body.project_ids || []
const projectId = req.params.project_id logger.log({ projectIds }, 'deleting multiple projects via http')
const ignoreFlushErrors = req.query.ignore_flush_errors === 'true' async.eachSeries(
const timer = new Metrics.Timer('http.deleteDoc') projectIds,
logger.log({ projectId, docId }, 'deleting doc via http') function (projectId, cb) {
DocumentManager.flushAndDeleteDocWithLock( logger.log({ projectId }, 'queue delete of project via http')
projectId, ProjectManager.queueFlushAndDeleteProject(projectId, cb)
docId, },
{ ignoreFlushErrors }, function (error) {
function (error) { if (error) {
timer.done() return next(error)
// There is no harm in flushing project history if the previous call
// failed and sometimes it is required
HistoryManager.flushProjectChangesAsync(projectId)
if (error) {
return next(error)
}
logger.log({ projectId, docId }, 'deleted doc via http')
res.sendStatus(204)
} }
) res.sendStatus(204) // No Content
}, // No Content }
)
}
flushProject(req, res, next) { function acceptChanges(req, res, next) {
const projectId = req.params.project_id const { project_id: projectId, doc_id: docId } = req.params
logger.log({ projectId }, 'flushing project via http') let changeIds = req.body.change_ids
const timer = new Metrics.Timer('http.flushProject') if (changeIds == null) {
ProjectManager.flushProjectWithLocks(projectId, function (error) { changeIds = [req.params.change_id]
}
logger.log(
{ projectId, docId },
`accepting ${changeIds.length} changes via http`
)
const timer = new Metrics.Timer('http.acceptChanges')
DocumentManager.acceptChangesWithLock(projectId, docId, changeIds, function (
error
) {
timer.done()
if (error) {
return next(error)
}
logger.log(
{ projectId, docId },
`accepted ${changeIds.length} changes via http`
)
res.sendStatus(204) // No Content
})
}
function deleteComment(req, res, next) {
const {
project_id: projectId,
doc_id: docId,
comment_id: commentId
} = req.params
logger.log({ projectId, docId, commentId }, 'deleting comment via http')
const timer = new Metrics.Timer('http.deleteComment')
DocumentManager.deleteCommentWithLock(projectId, docId, commentId, function (
error
) {
timer.done()
if (error) {
return next(error)
}
logger.log({ projectId, docId, commentId }, 'deleted comment via http')
res.sendStatus(204) // No Content
})
}
function updateProject(req, res, next) {
const timer = new Metrics.Timer('http.updateProject')
const projectId = req.params.project_id
const {
projectHistoryId,
userId,
docUpdates,
fileUpdates,
version
} = req.body
logger.log(
{ projectId, docUpdates, fileUpdates, version },
'updating project via http'
)
ProjectManager.updateProjectWithLocks(
projectId,
projectHistoryId,
userId,
docUpdates,
fileUpdates,
version,
function (error) {
timer.done() timer.done()
if (error) { if (error) {
return next(error) return next(error)
} }
logger.log({ projectId }, 'flushed project via http') logger.log({ projectId }, 'updated project via http')
res.sendStatus(204) res.sendStatus(204) // No Content
})
}, // No Content
deleteProject(req, res, next) {
const projectId = req.params.project_id
logger.log({ projectId }, 'deleting project via http')
const options = {}
if (req.query.background) {
options.background = true
} // allow non-urgent flushes to be queued
if (req.query.shutdown) {
options.skip_history_flush = true
} // don't flush history when realtime shuts down
if (req.query.background) {
ProjectManager.queueFlushAndDeleteProject(projectId, function (error) {
if (error) {
return next(error)
}
logger.log({ projectId }, 'queue delete of project via http')
res.sendStatus(204)
}) // No Content
} else {
const timer = new Metrics.Timer('http.deleteProject')
ProjectManager.flushAndDeleteProjectWithLocks(
projectId,
options,
function (error) {
timer.done()
if (error) {
return next(error)
}
logger.log({ projectId }, 'deleted project via http')
res.sendStatus(204)
}
)
} }
}, // No Content )
}
deleteMultipleProjects(req, res, next) {
const projectIds = req.body.project_ids || [] function resyncProjectHistory(req, res, next) {
logger.log({ projectIds }, 'deleting multiple projects via http') const projectId = req.params.project_id
async.eachSeries( const { projectHistoryId, docs, files } = req.body
projectIds,
function (projectId, cb) { logger.log(
logger.log({ projectId }, 'queue delete of project via http') { projectId, docs, files },
ProjectManager.queueFlushAndDeleteProject(projectId, cb) 'queuing project history resync via http'
}, )
function (error) { HistoryManager.resyncProjectHistory(
if (error) { projectId,
return next(error) projectHistoryId,
} docs,
res.sendStatus(204) files,
} function (error) {
) if (error) {
}, // No Content return next(error)
}
acceptChanges(req, res, next) { logger.log({ projectId }, 'queued project history resync via http')
const { project_id: projectId, doc_id: docId } = req.params res.sendStatus(204)
let changeIds = req.body.change_ids }
if (changeIds == null) { )
changeIds = [req.params.change_id] }
}
logger.log( function flushAllProjects(req, res, next) {
{ projectId, docId }, res.setTimeout(5 * 60 * 1000)
`accepting ${changeIds.length} changes via http` const options = {
) limit: req.query.limit || 1000,
const timer = new Metrics.Timer('http.acceptChanges') concurrency: req.query.concurrency || 5,
DocumentManager.acceptChangesWithLock( dryRun: req.query.dryRun || false
projectId, }
docId, ProjectFlusher.flushAllProjects(options, function (err, projectIds) {
changeIds, if (err) {
function (error) { logger.err({ err }, 'error bulk flushing projects')
timer.done() res.sendStatus(500)
if (error) { } else {
return next(error) res.send(projectIds)
} }
logger.log( })
{ projectId, docId }, }
`accepted ${changeIds.length} changes via http`
) function flushQueuedProjects(req, res, next) {
res.sendStatus(204) // No Content res.setTimeout(10 * 60 * 1000)
} const options = {
) limit: req.query.limit || 1000,
}, timeout: 5 * 60 * 1000,
min_delete_age: req.query.min_delete_age || 5 * 60 * 1000
deleteComment(req, res, next) { }
const { DeleteQueueManager.flushAndDeleteOldProjects(options, function (
project_id: projectId, err,
doc_id: docId, flushed
comment_id: commentId ) {
} = req.params if (err) {
logger.log({ projectId, docId, commentId }, 'deleting comment via http') logger.err({ err }, 'error flushing old projects')
const timer = new Metrics.Timer('http.deleteComment') res.sendStatus(500)
DocumentManager.deleteCommentWithLock( } else {
projectId, logger.log({ flushed }, 'flush of queued projects completed')
docId, res.send({ flushed })
commentId, }
function (error) { })
timer.done()
if (error) {
return next(error)
}
logger.log({ projectId, docId, commentId }, 'deleted comment via http')
res.sendStatus(204)
}
)
}, // No Content
updateProject(req, res, next) {
const timer = new Metrics.Timer('http.updateProject')
const projectId = req.params.project_id
const {
projectHistoryId,
userId,
docUpdates,
fileUpdates,
version
} = req.body
logger.log(
{ projectId, docUpdates, fileUpdates, version },
'updating project via http'
)
ProjectManager.updateProjectWithLocks(
projectId,
projectHistoryId,
userId,
docUpdates,
fileUpdates,
version,
function (error) {
timer.done()
if (error) {
return next(error)
}
logger.log({ projectId }, 'updated project via http')
res.sendStatus(204)
}
)
}, // No Content
resyncProjectHistory(req, res, next) {
const projectId = req.params.project_id
const { projectHistoryId, docs, files } = req.body
logger.log(
{ projectId, docs, files },
'queuing project history resync via http'
)
HistoryManager.resyncProjectHistory(
projectId,
projectHistoryId,
docs,
files,
function (error) {
if (error) {
return next(error)
}
logger.log({ projectId }, 'queued project history resync via http')
res.sendStatus(204)
}
)
},
flushAllProjects(req, res, next) {
res.setTimeout(5 * 60 * 1000)
const options = {
limit: req.query.limit || 1000,
concurrency: req.query.concurrency || 5,
dryRun: req.query.dryRun || false
}
ProjectFlusher.flushAllProjects(options, function (err, projectIds) {
if (err) {
logger.err({ err }, 'error bulk flushing projects')
res.sendStatus(500)
} else {
res.send(projectIds)
}
})
},
flushQueuedProjects(req, res, next) {
res.setTimeout(10 * 60 * 1000)
const options = {
limit: req.query.limit || 1000,
timeout: 5 * 60 * 1000,
min_delete_age: req.query.min_delete_age || 5 * 60 * 1000
}
DeleteQueueManager.flushAndDeleteOldProjects(options, function (
err,
flushed
) {
if (err) {
logger.err({ err }, 'error flushing old projects')
res.sendStatus(500)
} else {
logger.log({ flushed }, 'flush of queued projects completed')
res.send({ flushed })
}
})
}
} }