mirror of
https://github.com/overleaf/overleaf.git
synced 2025-04-14 08:54:28 +00:00
provide endpoint for current project docs in redis
This commit is contained in:
parent
c3465cf3d4
commit
0109585092
4 changed files with 51 additions and 1 deletions
services/document-updater
|
@ -38,6 +38,7 @@ app.param 'doc_id', (req, res, next, doc_id) ->
|
|||
next new Error("invalid doc id")
|
||||
|
||||
app.get '/project/:project_id/doc/:doc_id', HttpController.getDoc
|
||||
app.get '/project/:project_id', HttpController.getProjectDocs
|
||||
app.post '/project/:project_id/doc/:doc_id', HttpController.setDoc
|
||||
app.post '/project/:project_id/doc/:doc_id/flush', HttpController.flushDocIfLoaded
|
||||
app.delete '/project/:project_id/doc/:doc_id', HttpController.flushAndDeleteDoc
|
||||
|
|
|
@ -37,6 +37,23 @@ module.exports = HttpController =
|
|||
size += (line.length + 1)
|
||||
return size
|
||||
|
||||
getProjectDocs: (req, res, next = (error) ->) ->
|
||||
project_id = req.params.project_id
|
||||
# filter is string of existing docs "id:version,id:version,..."
|
||||
filterItems = req.query?.filter?.split(',') or []
|
||||
logger.log project_id: project_id, filter: filterItems, "getting docs via http"
|
||||
timer = new Metrics.Timer("http.getAllDocs")
|
||||
excludeVersions = {}
|
||||
for item in filterItems
|
||||
[id,version] = item?.split(':')
|
||||
excludeVersions[id] = version
|
||||
logger.log {project_id: project_id, excludeVersions: excludeVersions}, "excluding versions"
|
||||
ProjectManager.getProjectDocs project_id, excludeVersions, (error, result) ->
|
||||
timer.done()
|
||||
return next(error) if error?
|
||||
logger.log project_id: project_id, result: result, "got docs via http"
|
||||
res.send result
|
||||
|
||||
setDoc: (req, res, next = (error) ->) ->
|
||||
doc_id = req.params.doc_id
|
||||
project_id = req.params.project_id
|
||||
|
|
|
@ -56,3 +56,30 @@ module.exports = ProjectManager =
|
|||
callback new Error("Errors deleting docs. See log for details")
|
||||
else
|
||||
callback(null)
|
||||
|
||||
getProjectDocs: (project_id, excludeVersions = {}, _callback = (error) ->) ->
|
||||
timer = new Metrics.Timer("projectManager.getProjectDocs")
|
||||
callback = (args...) ->
|
||||
timer.done()
|
||||
_callback(args...)
|
||||
|
||||
RedisManager.getDocIdsInProject project_id, (error, doc_ids) ->
|
||||
return callback(error) if error?
|
||||
jobs = []
|
||||
docs = []
|
||||
for doc_id in doc_ids or []
|
||||
do (doc_id) ->
|
||||
jobs.push (cb) ->
|
||||
# check the doc version first
|
||||
RedisManager.getDocVersion doc_id, (error, version) ->
|
||||
return cb(error) if error?
|
||||
# skip getting the doc if we already have that version
|
||||
return cb() if version is excludeVersions[doc_id]
|
||||
# otherwise get the doc lines from redis
|
||||
RedisManager.getDocLines doc_id, (error, lines) ->
|
||||
return cb(error) if error?
|
||||
docs.push {_id: doc_id, lines: lines, rev: version}
|
||||
cb()
|
||||
async.series jobs, (error) ->
|
||||
return callback(error) if error?
|
||||
callback(null, docs)
|
||||
|
|
|
@ -142,6 +142,11 @@ module.exports = RedisManager =
|
|||
version = parseInt(version, 10)
|
||||
callback null, version
|
||||
|
||||
getDocLines: (doc_id, callback = (error, version) ->) ->
|
||||
rclient.get keys.docLines(doc_id: doc_id), (error, docLines) ->
|
||||
return callback(error) if error?
|
||||
callback null, docLines
|
||||
|
||||
getPreviousDocOps: (doc_id, start, end, callback = (error, jsonOps) ->) ->
|
||||
timer = new metrics.Timer("redis.get-prev-docops")
|
||||
rclient.llen keys.docOps(doc_id: doc_id), (error, length) ->
|
||||
|
@ -239,7 +244,7 @@ module.exports = RedisManager =
|
|||
|
||||
getDocIdsInProject: (project_id, callback = (error, doc_ids) ->) ->
|
||||
rclient.smembers keys.docsInProject(project_id: project_id), callback
|
||||
|
||||
|
||||
_serializeRanges: (ranges, callback = (error, serializedRanges) ->) ->
|
||||
jsonRanges = JSON.stringify(ranges)
|
||||
if jsonRanges? and jsonRanges.length > MAX_RANGES_SIZE
|
||||
|
|
Loading…
Add table
Reference in a new issue