2014-02-12 05:40:42 -05:00
|
|
|
DocumentManager = require "./DocumentManager"
|
2018-02-22 05:01:05 -05:00
|
|
|
HistoryManager = require "./HistoryManager"
|
2014-02-12 05:40:42 -05:00
|
|
|
ProjectManager = require "./ProjectManager"
|
|
|
|
Errors = require "./Errors"
|
|
|
|
logger = require "logger-sharelatex"
|
|
|
|
Metrics = require "./Metrics"
|
2019-05-02 11:30:36 -04:00
|
|
|
ProjectFlusher = require("./ProjectFlusher")
|
2019-09-25 11:42:49 -04:00
|
|
|
DeleteQueueManager = require("./DeleteQueueManager")
|
|
|
|
async = require "async"
|
2014-02-12 05:40:42 -05:00
|
|
|
|
2016-04-29 10:08:21 -04:00
|
|
|
TWO_MEGABYTES = 2 * 1024 * 1024
|
|
|
|
|
2014-02-12 05:40:42 -05:00
|
|
|
module.exports = HttpController =
|
|
|
|
getDoc: (req, res, next = (error) ->) ->
|
|
|
|
doc_id = req.params.doc_id
|
|
|
|
project_id = req.params.project_id
|
|
|
|
logger.log project_id: project_id, doc_id: doc_id, "getting doc via http"
|
|
|
|
timer = new Metrics.Timer("http.getDoc")
|
|
|
|
|
|
|
|
if req.query?.fromVersion?
|
|
|
|
fromVersion = parseInt(req.query.fromVersion, 10)
|
|
|
|
else
|
|
|
|
fromVersion = -1
|
|
|
|
|
2017-11-06 12:18:28 -05:00
|
|
|
DocumentManager.getDocAndRecentOpsWithLock project_id, doc_id, fromVersion, (error, lines, version, ops, ranges, pathname) ->
|
2014-02-12 05:40:42 -05:00
|
|
|
timer.done()
|
|
|
|
return next(error) if error?
|
|
|
|
logger.log project_id: project_id, doc_id: doc_id, "got doc via http"
|
|
|
|
if !lines? or !version?
|
|
|
|
return next(new Errors.NotFoundError("document not found"))
|
2019-02-12 11:45:11 -05:00
|
|
|
res.json
|
2014-02-12 05:40:42 -05:00
|
|
|
id: doc_id
|
|
|
|
lines: lines
|
|
|
|
version: version
|
|
|
|
ops: ops
|
2016-12-08 07:31:43 -05:00
|
|
|
ranges: ranges
|
2017-11-06 12:18:28 -05:00
|
|
|
pathname: pathname
|
2014-02-12 05:40:42 -05:00
|
|
|
|
2016-05-11 10:55:21 -04:00
|
|
|
_getTotalSizeOfLines: (lines) ->
|
|
|
|
size = 0
|
|
|
|
for line in lines
|
2016-05-12 04:26:50 -04:00
|
|
|
size += (line.length + 1)
|
2016-05-11 10:55:21 -04:00
|
|
|
return size
|
|
|
|
|
2017-10-11 10:29:57 -04:00
|
|
|
getProjectDocsAndFlushIfOld: (req, res, next = (error) ->) ->
|
2017-07-28 11:34:32 -04:00
|
|
|
project_id = req.params.project_id
|
2017-08-07 09:43:28 -04:00
|
|
|
projectStateHash = req.query?.state
|
2017-08-03 09:42:08 -04:00
|
|
|
# exclude is string of existing docs "id:version,id:version,..."
|
|
|
|
excludeItems = req.query?.exclude?.split(',') or []
|
|
|
|
logger.log project_id: project_id, exclude: excludeItems, "getting docs via http"
|
2017-07-28 11:34:32 -04:00
|
|
|
timer = new Metrics.Timer("http.getAllDocs")
|
|
|
|
excludeVersions = {}
|
2017-08-03 09:42:08 -04:00
|
|
|
for item in excludeItems
|
2017-07-28 11:34:32 -04:00
|
|
|
[id,version] = item?.split(':')
|
|
|
|
excludeVersions[id] = version
|
2017-08-07 09:43:28 -04:00
|
|
|
logger.log {project_id: project_id, projectStateHash: projectStateHash, excludeVersions: excludeVersions}, "excluding versions"
|
2017-10-11 10:29:57 -04:00
|
|
|
ProjectManager.getProjectDocsAndFlushIfOld project_id, projectStateHash, excludeVersions, (error, result) ->
|
2017-07-28 11:34:32 -04:00
|
|
|
timer.done()
|
2017-08-07 09:43:28 -04:00
|
|
|
if error instanceof Errors.ProjectStateChangedError
|
|
|
|
res.send 409 # conflict
|
|
|
|
else if error?
|
|
|
|
return next(error)
|
|
|
|
else
|
2017-08-10 09:57:27 -04:00
|
|
|
logger.log project_id: project_id, result: ("#{doc._id}:#{doc.v}" for doc in result), "got docs via http"
|
2017-08-07 09:43:28 -04:00
|
|
|
res.send result
|
2017-07-28 11:34:32 -04:00
|
|
|
|
2017-09-08 10:50:26 -04:00
|
|
|
clearProjectState: (req, res, next = (error) ->) ->
|
|
|
|
project_id = req.params.project_id
|
|
|
|
timer = new Metrics.Timer("http.clearProjectState")
|
|
|
|
logger.log project_id: project_id, "clearing project state via http"
|
|
|
|
ProjectManager.clearProjectState project_id, (error) ->
|
|
|
|
timer.done()
|
|
|
|
if error?
|
|
|
|
return next(error)
|
|
|
|
else
|
|
|
|
res.send 200
|
|
|
|
|
2014-02-12 05:40:42 -05:00
|
|
|
setDoc: (req, res, next = (error) ->) ->
|
|
|
|
doc_id = req.params.doc_id
|
|
|
|
project_id = req.params.project_id
|
2017-03-21 07:20:38 -04:00
|
|
|
{lines, source, user_id, undoing} = req.body
|
2016-05-11 10:55:21 -04:00
|
|
|
lineSize = HttpController._getTotalSizeOfLines(lines)
|
|
|
|
if lineSize > TWO_MEGABYTES
|
|
|
|
logger.log {project_id, doc_id, source, lineSize, user_id}, "document too large, returning 406 response"
|
2016-04-29 10:08:21 -04:00
|
|
|
return res.send 406
|
2017-03-21 07:20:38 -04:00
|
|
|
logger.log {project_id, doc_id, lines, source, user_id, undoing}, "setting doc via http"
|
2014-02-12 05:40:42 -05:00
|
|
|
timer = new Metrics.Timer("http.setDoc")
|
2017-03-21 07:20:38 -04:00
|
|
|
DocumentManager.setDocWithLock project_id, doc_id, lines, source, user_id, undoing, (error) ->
|
2014-02-12 05:40:42 -05:00
|
|
|
timer.done()
|
|
|
|
return next(error) if error?
|
|
|
|
logger.log project_id: project_id, doc_id: doc_id, "set doc via http"
|
|
|
|
res.send 204 # No Content
|
2016-04-29 10:08:21 -04:00
|
|
|
|
2014-02-12 05:40:42 -05:00
|
|
|
|
|
|
|
flushDocIfLoaded: (req, res, next = (error) ->) ->
|
|
|
|
doc_id = req.params.doc_id
|
|
|
|
project_id = req.params.project_id
|
|
|
|
logger.log project_id: project_id, doc_id: doc_id, "flushing doc via http"
|
|
|
|
timer = new Metrics.Timer("http.flushDoc")
|
|
|
|
DocumentManager.flushDocIfLoadedWithLock project_id, doc_id, (error) ->
|
|
|
|
timer.done()
|
|
|
|
return next(error) if error?
|
|
|
|
logger.log project_id: project_id, doc_id: doc_id, "flushed doc via http"
|
|
|
|
res.send 204 # No Content
|
2016-04-29 10:08:21 -04:00
|
|
|
|
2020-03-07 08:11:18 -05:00
|
|
|
deleteDoc: (req, res, next = (error) ->) ->
|
2014-02-12 05:40:42 -05:00
|
|
|
doc_id = req.params.doc_id
|
|
|
|
project_id = req.params.project_id
|
2020-03-07 08:11:18 -05:00
|
|
|
flush = req.body.flush ? true
|
|
|
|
logger.log project_id: project_id, doc_id: doc_id, flush: flush, "deleting doc via http"
|
2014-02-12 05:40:42 -05:00
|
|
|
timer = new Metrics.Timer("http.deleteDoc")
|
2020-03-07 08:11:18 -05:00
|
|
|
if flush
|
|
|
|
DocumentManager.flushAndDeleteDocWithLock project_id, doc_id, (error) ->
|
|
|
|
timer.done()
|
|
|
|
# There is no harm in flushing project history if the previous call
|
|
|
|
# failed and sometimes it is required
|
|
|
|
HistoryManager.flushProjectChangesAsync project_id
|
2018-02-22 05:01:05 -05:00
|
|
|
|
2020-03-07 08:11:18 -05:00
|
|
|
return next(error) if error?
|
|
|
|
logger.log project_id: project_id, doc_id: doc_id, "deleted doc via http"
|
|
|
|
res.send 204 # No Content
|
|
|
|
else
|
|
|
|
DocumentManager.deleteDocWithLock project_id, doc_id, (error) ->
|
|
|
|
timer.done()
|
|
|
|
return next(error) if error?
|
|
|
|
logger.log project_id: project_id, doc_id: doc_id, "deleted doc via http"
|
|
|
|
res.send 204 # No Content
|
2014-02-12 05:40:42 -05:00
|
|
|
|
|
|
|
flushProject: (req, res, next = (error) ->) ->
|
|
|
|
project_id = req.params.project_id
|
|
|
|
logger.log project_id: project_id, "flushing project via http"
|
|
|
|
timer = new Metrics.Timer("http.flushProject")
|
|
|
|
ProjectManager.flushProjectWithLocks project_id, (error) ->
|
|
|
|
timer.done()
|
|
|
|
return next(error) if error?
|
|
|
|
logger.log project_id: project_id, "flushed project via http"
|
|
|
|
res.send 204 # No Content
|
2016-04-29 10:08:21 -04:00
|
|
|
|
2014-02-12 05:40:42 -05:00
|
|
|
deleteProject: (req, res, next = (error) ->) ->
|
|
|
|
project_id = req.params.project_id
|
|
|
|
logger.log project_id: project_id, "deleting project via http"
|
2019-06-03 05:01:10 -04:00
|
|
|
options = {}
|
|
|
|
options.background = true if req.query?.background # allow non-urgent flushes to be queued
|
2019-08-15 04:51:16 -04:00
|
|
|
options.skip_history_flush = true if req.query?.shutdown # don't flush history when realtime shuts down
|
2019-09-25 11:42:49 -04:00
|
|
|
if req.query?.background
|
|
|
|
ProjectManager.queueFlushAndDeleteProject project_id, (error) ->
|
|
|
|
return next(error) if error?
|
|
|
|
logger.log project_id: project_id, "queue delete of project via http"
|
|
|
|
res.send 204 # No Content
|
|
|
|
else
|
|
|
|
timer = new Metrics.Timer("http.deleteProject")
|
|
|
|
ProjectManager.flushAndDeleteProjectWithLocks project_id, options, (error) ->
|
|
|
|
timer.done()
|
|
|
|
return next(error) if error?
|
|
|
|
logger.log project_id: project_id, "deleted project via http"
|
|
|
|
res.send 204 # No Content
|
|
|
|
|
|
|
|
deleteMultipleProjects: (req, res, next = (error) ->) ->
|
|
|
|
project_ids = req.body?.project_ids || []
|
|
|
|
logger.log project_ids: project_ids, "deleting multiple projects via http"
|
|
|
|
async.eachSeries project_ids, (project_id, cb) ->
|
|
|
|
logger.log project_id: project_id, "queue delete of project via http"
|
|
|
|
ProjectManager.queueFlushAndDeleteProject project_id, cb
|
|
|
|
, (error) ->
|
2014-02-12 05:40:42 -05:00
|
|
|
return next(error) if error?
|
|
|
|
res.send 204 # No Content
|
2017-05-04 10:32:54 -04:00
|
|
|
|
2017-05-05 10:12:06 -04:00
|
|
|
acceptChanges: (req, res, next = (error) ->) ->
|
2017-05-04 10:32:54 -04:00
|
|
|
{project_id, doc_id} = req.params
|
2017-05-08 11:34:17 -04:00
|
|
|
change_ids = req.body?.change_ids
|
2017-05-05 10:12:06 -04:00
|
|
|
if !change_ids?
|
|
|
|
change_ids = [ req.params.change_id ]
|
2017-05-04 10:32:54 -04:00
|
|
|
logger.log {project_id, doc_id}, "accepting #{ change_ids.length } changes via http"
|
2017-05-05 10:12:06 -04:00
|
|
|
timer = new Metrics.Timer("http.acceptChanges")
|
|
|
|
DocumentManager.acceptChangesWithLock project_id, doc_id, change_ids, (error) ->
|
2017-05-04 10:32:54 -04:00
|
|
|
timer.done()
|
|
|
|
return next(error) if error?
|
|
|
|
logger.log {project_id, doc_id}, "accepted #{ change_ids.length } changes via http"
|
|
|
|
res.send 204 # No Content
|
2017-11-01 15:16:49 -04:00
|
|
|
|
2017-01-24 09:57:11 -05:00
|
|
|
deleteComment: (req, res, next = (error) ->) ->
|
|
|
|
{project_id, doc_id, comment_id} = req.params
|
|
|
|
logger.log {project_id, doc_id, comment_id}, "deleting comment via http"
|
|
|
|
timer = new Metrics.Timer("http.deleteComment")
|
|
|
|
DocumentManager.deleteCommentWithLock project_id, doc_id, comment_id, (error) ->
|
|
|
|
timer.done()
|
|
|
|
return next(error) if error?
|
|
|
|
logger.log {project_id, doc_id, comment_id}, "deleted comment via http"
|
|
|
|
res.send 204 # No Content
|
2016-11-28 05:14:42 -05:00
|
|
|
|
2017-11-01 15:16:49 -04:00
|
|
|
updateProject: (req, res, next = (error) ->) ->
|
|
|
|
timer = new Metrics.Timer("http.updateProject")
|
|
|
|
project_id = req.params.project_id
|
2018-04-23 07:08:04 -04:00
|
|
|
{projectHistoryId, userId, docUpdates, fileUpdates, version} = req.body
|
2018-03-05 07:14:47 -05:00
|
|
|
logger.log {project_id, docUpdates, fileUpdates, version}, "updating project via http"
|
2017-11-01 15:16:49 -04:00
|
|
|
|
2018-04-23 07:08:04 -04:00
|
|
|
ProjectManager.updateProjectWithLocks project_id, projectHistoryId, userId, docUpdates, fileUpdates, version, (error) ->
|
2017-11-01 15:16:49 -04:00
|
|
|
timer.done()
|
|
|
|
return next(error) if error?
|
|
|
|
logger.log project_id: project_id, "updated project via http"
|
|
|
|
res.send 204 # No Content
|
2018-02-28 12:31:43 -05:00
|
|
|
|
2018-03-07 08:29:09 -05:00
|
|
|
resyncProjectHistory: (req, res, next = (error) ->) ->
|
2018-02-28 12:31:43 -05:00
|
|
|
project_id = req.params.project_id
|
2018-04-23 07:08:04 -04:00
|
|
|
{projectHistoryId, docs, files} = req.body
|
2018-02-28 12:31:43 -05:00
|
|
|
|
2018-03-09 07:21:48 -05:00
|
|
|
logger.log {project_id, docs, files}, "queuing project history resync via http"
|
2018-04-23 07:08:04 -04:00
|
|
|
HistoryManager.resyncProjectHistory project_id, projectHistoryId, docs, files, (error) ->
|
2018-02-28 12:31:43 -05:00
|
|
|
return next(error) if error?
|
2018-03-07 11:16:19 -05:00
|
|
|
logger.log {project_id}, "queued project history resync via http"
|
2018-02-28 12:31:43 -05:00
|
|
|
res.send 204
|
2019-05-02 11:30:36 -04:00
|
|
|
|
|
|
|
flushAllProjects: (req, res, next = (error)-> )->
|
|
|
|
res.setTimeout(5 * 60 * 1000)
|
2019-05-02 11:54:22 -04:00
|
|
|
options =
|
|
|
|
limit : req.query.limit || 1000
|
|
|
|
concurrency : req.query.concurrency || 5
|
|
|
|
dryRun : req.query.dryRun || false
|
|
|
|
ProjectFlusher.flushAllProjects options, (err, project_ids)->
|
2019-05-02 11:30:36 -04:00
|
|
|
if err?
|
|
|
|
logger.err err:err, "error bulk flushing projects"
|
|
|
|
res.send 500
|
|
|
|
else
|
|
|
|
res.send project_ids
|
|
|
|
|
2019-09-25 11:42:49 -04:00
|
|
|
flushQueuedProjects: (req, res, next = (error) ->) ->
|
2019-09-27 05:46:24 -04:00
|
|
|
res.setTimeout(10 * 60 * 1000)
|
2019-09-25 11:42:49 -04:00
|
|
|
options =
|
|
|
|
limit : req.query.limit || 1000
|
|
|
|
timeout: 5 * 60 * 1000
|
|
|
|
min_delete_age: req.query.min_delete_age || 5 * 60 * 1000
|
2019-09-26 10:46:14 -04:00
|
|
|
DeleteQueueManager.flushAndDeleteOldProjects options, (err, flushed)->
|
2019-09-25 11:42:49 -04:00
|
|
|
if err?
|
|
|
|
logger.err err:err, "error flushing old projects"
|
2019-09-27 05:46:24 -04:00
|
|
|
res.send 500
|
|
|
|
else
|
2019-09-26 10:46:14 -04:00
|
|
|
logger.log {flushed: flushed}, "flush of queued projects completed"
|
2019-09-27 05:46:24 -04:00
|
|
|
res.send {flushed: flushed}
|