overleaf/services/track-changes/app/coffee/MongoManager.coffee

105 lines
3.7 KiB
CoffeeScript
Raw Normal View History

{db, ObjectId} = require "./mongojs"
2015-02-17 06:14:13 -05:00
PackManager = require "./PackManager"
async = require "async"
2015-12-11 10:56:47 -05:00
_ = require "underscore"
2017-03-16 11:17:38 -04:00
metrics = require 'metrics-sharelatex'
2017-03-17 10:58:35 -04:00
logger = require 'logger-sharelatex'
module.exports = MongoManager =
getLastCompressedUpdate: (doc_id, callback = (error, update) ->) ->
db.docHistory
2015-12-11 10:56:47 -05:00
.find(doc_id: ObjectId(doc_id.toString()), {pack: {$slice:-1}}) # only return the last entry in a pack
2014-03-07 09:02:16 -05:00
.sort( v: -1 )
.limit(1)
.toArray (error, compressedUpdates) ->
return callback(error) if error?
2015-12-11 10:56:47 -05:00
callback null, compressedUpdates[0] or null
2015-10-08 11:10:48 -04:00
peekLastCompressedUpdate: (doc_id, callback = (error, update, version) ->) ->
# under normal use we pass back the last update as
# callback(null,update,version).
#
# when we have an existing last update but want to force a new one
# to start, we pass it back as callback(null,null,version), just
# giving the version so we can check consistency.
MongoManager.getLastCompressedUpdate doc_id, (error, update) ->
return callback(error) if error?
if update?
if update.broken # marked as broken so we will force a new op
return callback null, null
else if update.pack?
if update.finalised # no more ops can be appended
return callback null, null, update.pack[0]?.v
else
return callback null, update, update.pack[0]?.v
else
return callback null, update, update.v
else
PackManager.getLastPackFromIndex doc_id, (error, pack) ->
return callback(error) if error?
return callback(null, null, pack.v_end) if pack?.inS3? and pack?.v_end?
callback null, null
backportProjectId: (project_id, doc_id, callback = (error) ->) ->
db.docHistory.update {
doc_id: ObjectId(doc_id.toString())
project_id: { $exists: false }
}, {
$set: { project_id: ObjectId(project_id.toString()) }
}, {
multi: true
}, callback
getProjectMetaData: (project_id, callback = (error, metadata) ->) ->
db.projectHistoryMetaData.find {
project_id: ObjectId(project_id.toString())
}, (error, results) ->
return callback(error) if error?
callback null, results[0]
setProjectMetaData: (project_id, metadata, callback = (error) ->) ->
db.projectHistoryMetaData.update {
project_id: ObjectId(project_id)
}, {
$set: metadata
}, {
upsert: true
}, callback
upgradeHistory: (project_id, callback = (error) ->) ->
# preserve the project's existing history
db.docHistory.update {
project_id: ObjectId(project_id)
temporary: true
expiresAt: {$exists: true}
}, {
$set: {temporary: false}
$unset: {expiresAt: ""}
}, {
multi: true
}, callback
ensureIndices: () ->
# For finding all updates that go into a diff for a doc
2014-05-16 12:00:30 -04:00
db.docHistory.ensureIndex { doc_id: 1, v: 1 }, { background: true }
# For finding all updates that affect a project
db.docHistory.ensureIndex { project_id: 1, "meta.end_ts": 1 }, { background: true }
2015-08-31 17:13:18 -04:00
# For finding updates that don't yet have a project_id and need it inserting
2014-05-16 12:00:30 -04:00
db.docHistory.ensureIndex { doc_id: 1, project_id: 1 }, { background: true }
# For finding project meta-data
2014-05-16 12:00:30 -04:00
db.projectHistoryMetaData.ensureIndex { project_id: 1 }, { background: true }
# TTL index for auto deleting week old temporary ops
2014-05-16 12:00:30 -04:00
db.docHistory.ensureIndex { expiresAt: 1 }, { expireAfterSeconds: 0, background: true }
2016-03-01 06:38:23 -05:00
# For finding packs to be checked for archiving
db.docHistory.ensureIndex { last_checked: 1 }, { background: true }
# For finding archived packs
db.docHistoryIndex.ensureIndex { project_id: 1 }, { background: true }
2017-03-16 11:17:38 -04:00
2017-03-17 10:58:35 -04:00
[
'getLastCompressedUpdate',
'getProjectMetaData',
'setProjectMetaData'
].map (method) ->
metrics.timeAsyncMethod(MongoManager, method, 'mongo.MongoManager', logger)