2014-02-24 12:43:27 -05:00
|
|
|
{db, ObjectId} = require "./mongojs"
|
2015-02-17 06:14:13 -05:00
|
|
|
PackManager = require "./PackManager"
|
2014-02-24 12:43:27 -05:00
|
|
|
async = require "async"
|
2015-08-24 11:19:19 -04:00
|
|
|
logger = require "logger-sharelatex"
|
2014-02-24 12:43:27 -05:00
|
|
|
|
|
|
|
module.exports = MongoManager =
|
|
|
|
getLastCompressedUpdate: (doc_id, callback = (error, update) ->) ->
|
|
|
|
db.docHistory
|
|
|
|
.find(doc_id: ObjectId(doc_id.toString()))
|
2014-03-07 09:02:16 -05:00
|
|
|
.sort( v: -1 )
|
2014-02-24 12:43:27 -05:00
|
|
|
.limit(1)
|
|
|
|
.toArray (error, compressedUpdates) ->
|
|
|
|
return callback(error) if error?
|
2015-02-17 09:53:50 -05:00
|
|
|
if compressedUpdates[0]?.pack?
|
|
|
|
# cannot pop from a pack, throw error
|
|
|
|
error = new Error("last compressed update is a pack")
|
|
|
|
return callback error, null
|
2014-02-24 12:43:27 -05:00
|
|
|
return callback null, compressedUpdates[0] or null
|
|
|
|
|
|
|
|
deleteCompressedUpdate: (id, callback = (error) ->) ->
|
|
|
|
db.docHistory.remove({ _id: ObjectId(id.toString()) }, callback)
|
|
|
|
|
2015-09-23 08:22:38 -04:00
|
|
|
popLastCompressedUpdate: (doc_id, callback = (error, update, version) ->) ->
|
|
|
|
# under normal use we pass back the last update as
|
|
|
|
# callback(null,update).
|
|
|
|
#
|
|
|
|
# when we have an existing last update but want to force a new one
|
|
|
|
# to start, we pass it back as callback(null,null,version), just
|
|
|
|
# giving the version so we can check consistency.
|
2014-02-24 12:43:27 -05:00
|
|
|
MongoManager.getLastCompressedUpdate doc_id, (error, update) ->
|
|
|
|
return callback(error) if error?
|
|
|
|
if update?
|
2015-09-23 08:22:38 -04:00
|
|
|
if update.inS3?
|
|
|
|
# we want to force a new update, but ensure that it is
|
|
|
|
# consistent with the version of the existing one in S3
|
|
|
|
return callback null, null, update.v
|
|
|
|
else
|
|
|
|
MongoManager.deleteCompressedUpdate update._id, (error) ->
|
|
|
|
return callback(error) if error?
|
|
|
|
callback null, update
|
2014-02-24 12:43:27 -05:00
|
|
|
else
|
|
|
|
callback null, null
|
|
|
|
|
2015-05-22 09:12:29 -04:00
|
|
|
insertCompressedUpdates: (project_id, doc_id, updates, temporary, callback = (error) ->) ->
|
2014-02-24 12:43:27 -05:00
|
|
|
jobs = []
|
|
|
|
for update in updates
|
|
|
|
do (update) ->
|
2015-05-22 09:12:29 -04:00
|
|
|
jobs.push (callback) -> MongoManager.insertCompressedUpdate project_id, doc_id, update, temporary, callback
|
2015-05-22 09:13:34 -04:00
|
|
|
async.series jobs, (err, results) ->
|
|
|
|
if not temporary
|
|
|
|
# keep track of updates to be packed
|
2015-05-26 06:00:55 -04:00
|
|
|
db.docHistoryStats.update {doc_id:ObjectId(doc_id)}, {
|
|
|
|
$inc:{update_count:updates.length},
|
|
|
|
$currentDate:{last_update:true}
|
|
|
|
}, {upsert:true}, () ->
|
2015-05-22 09:13:34 -04:00
|
|
|
callback(err,results)
|
|
|
|
else
|
|
|
|
callback(err,results)
|
|
|
|
|
2014-02-24 12:43:27 -05:00
|
|
|
|
2014-05-16 10:59:12 -04:00
|
|
|
insertCompressedUpdate: (project_id, doc_id, update, temporary, callback = (error) ->) ->
|
|
|
|
update = {
|
2014-02-24 12:43:27 -05:00
|
|
|
doc_id: ObjectId(doc_id.toString())
|
2014-03-19 12:40:55 -04:00
|
|
|
project_id: ObjectId(project_id.toString())
|
2014-02-24 12:43:27 -05:00
|
|
|
op: update.op
|
|
|
|
meta: update.meta
|
|
|
|
v: update.v
|
2014-05-16 10:59:12 -04:00
|
|
|
}
|
2015-08-09 18:52:32 -04:00
|
|
|
|
2014-05-16 10:59:12 -04:00
|
|
|
if temporary
|
2014-05-16 11:41:14 -04:00
|
|
|
seconds = 1000
|
|
|
|
minutes = 60 * seconds
|
|
|
|
hours = 60 * minutes
|
|
|
|
days = 24 * hours
|
|
|
|
update.expiresAt = new Date(Date.now() + 7 * days)
|
2015-02-05 11:36:41 -05:00
|
|
|
# may need to roll over a pack here if we are inserting packs
|
2014-05-16 10:59:12 -04:00
|
|
|
db.docHistory.insert update, callback
|
2014-03-04 08:02:48 -05:00
|
|
|
|
2014-03-19 13:44:16 -04:00
|
|
|
getDocUpdates:(doc_id, options = {}, callback = (error, updates) ->) ->
|
2014-03-04 09:05:17 -05:00
|
|
|
query =
|
|
|
|
doc_id: ObjectId(doc_id.toString())
|
2014-03-05 10:06:46 -05:00
|
|
|
if options.from?
|
2014-03-06 05:45:51 -05:00
|
|
|
query["v"] ||= {}
|
|
|
|
query["v"]["$gte"] = options.from
|
2014-03-05 10:06:46 -05:00
|
|
|
if options.to?
|
2014-03-06 05:45:51 -05:00
|
|
|
query["v"] ||= {}
|
|
|
|
query["v"]["$lte"] = options.to
|
2014-03-05 10:59:40 -05:00
|
|
|
|
2015-02-17 08:41:31 -05:00
|
|
|
PackManager.findDocResults(db.docHistory, query, options.limit, callback)
|
2014-03-04 08:02:48 -05:00
|
|
|
|
2014-03-19 13:44:16 -04:00
|
|
|
getProjectUpdates: (project_id, options = {}, callback = (error, updates) ->) ->
|
|
|
|
query =
|
|
|
|
project_id: ObjectId(project_id.toString())
|
|
|
|
|
|
|
|
if options.before?
|
|
|
|
query["meta.end_ts"] = { $lt: options.before }
|
|
|
|
|
2015-02-17 08:41:31 -05:00
|
|
|
PackManager.findProjectResults(db.docHistory, query, options.limit, callback)
|
2014-03-19 13:44:16 -04:00
|
|
|
|
2014-03-21 10:40:51 -04:00
|
|
|
backportProjectId: (project_id, doc_id, callback = (error) ->) ->
|
|
|
|
db.docHistory.update {
|
|
|
|
doc_id: ObjectId(doc_id.toString())
|
|
|
|
project_id: { $exists: false }
|
|
|
|
}, {
|
|
|
|
$set: { project_id: ObjectId(project_id.toString()) }
|
|
|
|
}, {
|
|
|
|
multi: true
|
|
|
|
}, callback
|
|
|
|
|
2014-03-28 12:01:34 -04:00
|
|
|
getProjectMetaData: (project_id, callback = (error, metadata) ->) ->
|
|
|
|
db.projectHistoryMetaData.find {
|
|
|
|
project_id: ObjectId(project_id.toString())
|
|
|
|
}, (error, results) ->
|
|
|
|
return callback(error) if error?
|
|
|
|
callback null, results[0]
|
|
|
|
|
|
|
|
setProjectMetaData: (project_id, metadata, callback = (error) ->) ->
|
|
|
|
db.projectHistoryMetaData.update {
|
|
|
|
project_id: ObjectId(project_id)
|
|
|
|
}, {
|
|
|
|
$set: metadata
|
|
|
|
}, {
|
|
|
|
upsert: true
|
|
|
|
}, callback
|
|
|
|
|
|
|
|
ensureIndices: () ->
|
2015-02-17 08:41:31 -05:00
|
|
|
# For finding all updates that go into a diff for a doc
|
2014-05-16 12:00:30 -04:00
|
|
|
db.docHistory.ensureIndex { doc_id: 1, v: 1 }, { background: true }
|
2015-02-17 08:41:31 -05:00
|
|
|
# For finding all updates that affect a project
|
2014-05-16 12:00:30 -04:00
|
|
|
db.docHistory.ensureIndex { project_id: 1, "meta.end_ts": 1 }, { background: true }
|
2015-02-10 11:54:58 -05:00
|
|
|
# For finding all packs that affect a project (use a sparse index so only packs are included)
|
2015-02-17 08:41:31 -05:00
|
|
|
db.docHistory.ensureIndex { project_id: 1, "pack.0.meta.end_ts": 1, "meta.end_ts": 1}, { background: true, sparse: true }
|
2015-08-31 17:13:18 -04:00
|
|
|
# For finding updates that don't yet have a project_id and need it inserting
|
2014-05-16 12:00:30 -04:00
|
|
|
db.docHistory.ensureIndex { doc_id: 1, project_id: 1 }, { background: true }
|
2014-03-28 12:01:34 -04:00
|
|
|
# For finding project meta-data
|
2014-05-16 12:00:30 -04:00
|
|
|
db.projectHistoryMetaData.ensureIndex { project_id: 1 }, { background: true }
|
2014-05-16 10:59:12 -04:00
|
|
|
# TTL index for auto deleting week old temporary ops
|
2014-05-16 12:00:30 -04:00
|
|
|
db.docHistory.ensureIndex { expiresAt: 1 }, { expireAfterSeconds: 0, background: true }
|
2015-05-22 09:13:34 -04:00
|
|
|
# For finding documents which need packing
|
|
|
|
db.docHistoryStats.ensureIndex { doc_id: 1 }, { background: true }
|
|
|
|
db.docHistoryStats.ensureIndex { updates: -1, doc_id: 1 }, { background: true }
|
2015-08-06 10:11:43 -04:00
|
|
|
|
2015-08-06 16:09:36 -04:00
|
|
|
getDocChangesCount: (doc_id, callback)->
|
2015-08-24 11:19:19 -04:00
|
|
|
db.docHistory.count { doc_id : ObjectId(doc_id.toString()), inS3 : { $exists : false }}, {}, callback
|
2015-08-09 16:50:15 -04:00
|
|
|
|
2015-08-09 18:52:32 -04:00
|
|
|
getArchivedDocChanges: (doc_id, callback)->
|
2015-09-23 08:29:32 -04:00
|
|
|
db.docHistory.count { doc_id: ObjectId(doc_id.toString()) , inS3: { $exists: true }}, {}, callback
|
2015-08-09 18:52:32 -04:00
|
|
|
|
2015-09-23 08:28:07 -04:00
|
|
|
markDocHistoryAsArchiveInProgress: (doc_id, update, callback) ->
|
|
|
|
db.docHistory.update { _id: update._id }, { $set : { inS3 : false } }, callback
|
|
|
|
|
|
|
|
clearDocHistoryAsArchiveInProgress: (doc_id, update, callback) ->
|
2015-09-24 04:09:49 -04:00
|
|
|
db.docHistory.update { _id: update._id }, { $unset : { inS3 : true } }, callback
|
2015-09-23 08:28:07 -04:00
|
|
|
|
2015-08-14 14:07:16 -04:00
|
|
|
markDocHistoryAsArchived: (doc_id, update, callback)->
|
2015-08-24 11:19:19 -04:00
|
|
|
db.docHistory.update { _id: update._id }, { $set : { inS3 : true } }, (error)->
|
2015-08-14 14:07:16 -04:00
|
|
|
return callback(error) if error?
|
2015-08-24 11:19:19 -04:00
|
|
|
db.docHistory.remove { doc_id : ObjectId(doc_id.toString()), inS3 : { $exists : false }, v: { $lt : update.v }, expiresAt: {$exists : false} }, (error)->
|
2015-08-09 16:50:15 -04:00
|
|
|
return callback(error) if error?
|
2015-08-14 14:07:16 -04:00
|
|
|
callback(error)
|
2015-08-09 16:50:15 -04:00
|
|
|
|
|
|
|
markDocHistoryAsUnarchived: (doc_id, callback)->
|
2015-09-23 08:28:07 -04:00
|
|
|
# note this removes any inS3 field, regardless of its value (true/false/null)
|
2015-08-24 11:19:19 -04:00
|
|
|
db.docHistory.update { doc_id: ObjectId(doc_id.toString()) }, { $unset : { inS3 : true } }, { multi: true }, (error)->
|
2015-08-09 16:50:15 -04:00
|
|
|
callback(error)
|