overleaf/services/track-changes/app/coffee/MongoManager.coffee

362 lines
12 KiB
CoffeeScript
Raw Normal View History

{db, ObjectId} = require "./mongojs"
async = require "async"
2015-02-05 11:36:41 -05:00
_ = require "underscore"
module.exports = MongoManager =
2015-02-05 11:36:41 -05:00
# only used in this module
getLastCompressedUpdate: (doc_id, callback = (error, update) ->) ->
db.docHistory
.find(doc_id: ObjectId(doc_id.toString()))
2014-03-07 09:02:16 -05:00
.sort( v: -1 )
.limit(1)
.toArray (error, compressedUpdates) ->
return callback(error) if error?
return callback null, null if compressedUpdates[0]?.pack? # cannot pop from a pack
return callback null, compressedUpdates[0] or null
2015-02-05 11:36:41 -05:00
# only used in this module
deleteCompressedUpdate: (id, callback = (error) ->) ->
db.docHistory.remove({ _id: ObjectId(id.toString()) }, callback)
2015-02-05 11:36:41 -05:00
# used in UpdatesManager
popLastCompressedUpdate: (doc_id, callback = (error, update) ->) ->
MongoManager.getLastCompressedUpdate doc_id, (error, update) ->
return callback(error) if error?
if update?
MongoManager.deleteCompressedUpdate update._id, (error) ->
return callback(error) if error?
callback null, update
else
callback null, null
2015-02-05 11:36:41 -05:00
# used in UpdatesManager
insertCompressedUpdates: (project_id, doc_id, updates, permanent, callback = (error) ->) ->
jobs = []
for update in updates
do (update) ->
jobs.push (callback) -> MongoManager.insertCompressedUpdate project_id, doc_id, update, permanent, callback
async.series jobs, callback
insertCompressedUpdate: (project_id, doc_id, update, temporary, callback = (error) ->) ->
update = {
doc_id: ObjectId(doc_id.toString())
2014-03-19 12:40:55 -04:00
project_id: ObjectId(project_id.toString())
op: update.op
meta: update.meta
v: update.v
}
if temporary
seconds = 1000
minutes = 60 * seconds
hours = 60 * minutes
days = 24 * hours
update.expiresAt = new Date(Date.now() + 7 * days)
2015-02-05 11:36:41 -05:00
# may need to roll over a pack here if we are inserting packs
db.docHistory.insert update, callback
2015-02-06 11:59:09 -05:00
# The following function implements a method like a mongo find, but
# which expands any documents containing a 'pack' field into
# multiple values
#
# e.g. a single update looks like
#
# {
# "doc_id" : 549dae9e0a2a615c0c7f0c98,
# "project_id" : 549dae9c0a2a615c0c7f0c8c,
# "op" : [ {"p" : 6981, "d" : "?" } ],
# "meta" : { "user_id" : 52933..., "start_ts" : 1422310693931, "end_ts" : 1422310693931 },
# "v" : 17082
# }
#
# and a pack looks like this
#
# {
# "doc_id" : 549dae9e0a2a615c0c7f0c98,
# "project_id" : 549dae9c0a2a615c0c7f0c8c,
# "pack" : [ D1, D2, D3, ....],
# "meta" : { "user_id" : 52933..., "start_ts" : 1422310693931, "end_ts" : 1422310693931 },
# "v" : 17082
# }
#
# where D1, D2, D3, .... are single updates stripped of their
# doc_id and project_id fields (which are the same for all the
# updates in the pack). The meta and v fields of the pack itself
# are those of the first entry in the pack D1 (this makes it
# possible to treat packs and single updates in the same way).
_findDocResults: (query, limit, callback) ->
2015-02-06 11:59:09 -05:00
# query - the mongo query selector, includes both the doc_id/project_id and
# the range on v
2015-02-06 11:59:09 -05:00
# limit - the mongo limit, we need to apply it after unpacking any
# packs
2015-02-05 11:36:41 -05:00
sort = {}
sort['v'] = -1;
2015-02-05 11:36:41 -05:00
cursor = db.docHistory
.find( query )
.sort( sort )
2015-02-06 11:59:09 -05:00
# if we have packs, we will trim the results more later after expanding them
2015-02-05 11:36:41 -05:00
if limit?
cursor.limit(limit)
2015-02-06 11:59:09 -05:00
# take the part of the query which selects the range over the parameter
rangeQuery = query['v']
2015-02-05 11:36:41 -05:00
2015-02-06 11:59:09 -05:00
# helper function to check if an item from a pack is inside the
# desired range
2015-02-05 11:36:41 -05:00
filterFn = (item) ->
return false if rangeQuery?['$gte']? && item['v'] < rangeQuery['$gte']
return false if rangeQuery?['$lte']? && item['v'] > rangeQuery['$lte']
return false if rangeQuery?['$lt']? && item['v'] >= rangeQuery['$lt']
return false if rangeQuery?['$gt']? && item['v'] <= rangeQuery['$gt']
2015-02-05 11:36:41 -05:00
return true
2015-02-06 11:59:09 -05:00
# create a query which can be used to select the entries BEFORE
# the range because we sometimes need to find extra ones (when the
# boundary falls in the middle of a pack)
extraQuery = _.clone(query)
# The pack uses its first entry for its metadata and v, so the
# only queries where we might not get all the packs are those for
# $gt and $gte (i.e. we need to find packs which start before our
# range but end in it)
if rangeQuery?['$gte']?
extraQuery['v'] = {'$lt' : rangeQuery['$gte']}
2015-02-06 11:59:09 -05:00
else if rangeQuery?['$gt']
extraQuery['v'] = {'$lte' : rangeQuery['$gt']}
2015-02-06 11:59:09 -05:00
else
delete extraQuery['v']
2015-02-06 11:59:09 -05:00
needMore = false # keep track of whether we need to load more data
updates = [] # used to accumulate the set of results
2015-02-06 10:04:46 -05:00
cursor.toArray (err, result) ->
unpackedSet = MongoManager._unpackResults(result)
updates = MongoManager._filterAndLimit(updates, unpackedSet, filterFn, limit)
2015-02-06 11:59:09 -05:00
# check if we need to retrieve more data, because there is a
# pack that crosses into our range
2015-02-06 10:04:46 -05:00
last = if unpackedSet.length then unpackedSet[unpackedSet.length-1] else null
if limit? && updates.length == limit
needMore = false
else if extraQuery['v']? && last? && filterFn(last)
2015-02-06 10:04:46 -05:00
needMore = true
else if extraQuery['v']? && updates.length == 0
2015-02-06 10:04:46 -05:00
needMore = true
if needMore
2015-02-06 11:59:09 -05:00
# we do need an extra result set
2015-02-05 11:36:41 -05:00
extra = db.docHistory
.find(extraQuery)
.sort(sort)
.limit(1)
2015-02-06 10:04:46 -05:00
extra.toArray (err, result2) ->
if err?
return callback err, updates
else
2015-02-06 11:59:09 -05:00
extraSet = MongoManager._unpackResults(result2)
updates = MongoManager._filterAndLimit(updates, extraSet, filterFn, limit)
2015-02-06 10:04:46 -05:00
callback err, updates
2015-02-05 11:36:41 -05:00
return
if err?
2015-02-06 10:04:46 -05:00
callback err, result
2015-02-05 11:36:41 -05:00
else
2015-02-06 10:04:46 -05:00
callback err, updates
2015-02-05 11:36:41 -05:00
_findProjectResults: (query, limit, callback) ->
# query - the mongo query selector, includes both the doc_id/project_id and
# the range on v or meta.end_ts
# limit - the mongo limit, we need to apply it after unpacking any
# packs
sort = {}
sort['meta.end_ts'] = -1;
projection = {"op":false, "pack.op": false}
cursor = db.docHistory
.find( query, projection ) # no need to return the op only need version info
.sort( sort )
# if we have packs, we will trim the results more later after expanding them
if limit?
cursor.limit(limit)
# take the part of the query which selects the range over the parameter
before = query['meta.end_ts']?['$lt'] # may be null
updates = [] # used to accumulate the set of results
cursor.toArray (err, result) ->
if err?
return callback err, result
if result.length == 0 && not before? # no results and no time range specified
return callback err, result
unpackedSet = MongoManager._unpackResults(result)
if limit?
unpackedSet = unpackedSet.slice(0, limit)
# find the end time of the last result, we will take all the
# results up to this, and then all the changes at that time
# (without imposing a limit) and any overlapping packs
cutoff = if unpackedSet.length then unpackedSet[unpackedSet.length-1].meta.end_ts else null
console.log 'before is', before
console.log 'cutoff is', cutoff
console.log 'limit is', limit
filterFn = (item) ->
ts = item?.meta?.end_ts
console.log 'checking', ts, before, cutoff
return false if before? && ts >= before
return false if cutoff? && ts < cutoff
return true
updates = MongoManager._filterAndLimit(updates, unpackedSet, filterFn, limit)
console.log 'initial updates are', updates
# get all elements on the lower bound (cutoff)
tailQuery = _.clone(query)
tailQuery['meta.end_ts'] = cutoff
tail = db.docHistory
.find(tailQuery, projection)
.sort(sort)
console.log 'tailQuery is', tailQuery
# now find any packs that overlap with the time window
overlapQuery = _.clone(query)
if before? && cutoff?
overlapQuery['meta.end_ts'] = {"$gte": before}
overlapQuery['pack.0.meta.end_ts'] = {"$lte": before }
else if before? && not cutoff?
overlapQuery['meta.end_ts'] = {"$gte": before}
overlapQuery['pack.0.meta.end_ts'] = {"$lte": before }
else if not before? && cutoff?
overlapQuery['meta.end_ts'] = {"$gte": cutoff}
overlapQuery['pack.0.meta.end_ts'] = {"$gte": 0 }
else if not before? && not cutoff?
overlapQuery['meta.end_ts'] = {"$gte": 0 }
overlapQuery['pack.0.meta.end_ts'] = {"$gte": 0 }
overlap = db.docHistory
.find(overlapQuery, projection)
.sort(sort)
console.log 'overlapQuery is', overlapQuery
# we don't specify a limit here, as there could be any number of overlaps
# NB. need to catch items in original query and followup query for duplicates
applyAndUpdate = (result) ->
extraSet = MongoManager._unpackResults(result)
# note: final argument is null, no limit applied because we
# need all the updates at the final time to avoid breaking
# the changeset into parts
updates = MongoManager._filterAndLimit(updates, extraSet, filterFn, null)
console.log 'extra updates after filterandlimit', updates
# remove duplicates
seen = {}
updates = updates.filter (item) ->
key = item.doc_id + ' ' + item.v
console.log 'key is', key
if seen[key]
return false
else
seen[key] = true
return true
console.log 'extra updates are', updates
tail.toArray (err, result2) ->
if err?
return callback err, updates
else
applyAndUpdate result2
overlap.toArray (err, result3) ->
if err?
return callback err, updates
else
applyAndUpdate result3
callback err, updates
2015-02-05 11:36:41 -05:00
_unpackResults: (updates) ->
2015-02-06 11:59:09 -05:00
# iterate over the updates, if there's a pack, expand it into ops and
# insert it into the array at that point
2015-02-05 11:36:41 -05:00
result = []
updates.forEach (item) ->
if item.pack?
all = MongoManager._explodePackToOps item
result = result.concat all
else
result.push item
return result
_explodePackToOps: (packObj) ->
2015-02-06 11:59:09 -05:00
# convert a pack into an array of ops
2015-02-05 11:36:41 -05:00
doc_id = packObj.doc_id
project_id = packObj.project_id
result = packObj.pack.map (item) ->
item.doc_id = doc_id
item.project_id = project_id
item
return result.reverse()
2015-02-06 10:04:46 -05:00
_filterAndLimit: (results, extra, filterFn, limit) ->
2015-02-06 11:59:09 -05:00
# update results with extra docs, after filtering and limiting
filtered = extra.filter(filterFn)
newResults = results.concat filtered
newResults.slice(0, limit) if limit?
return newResults
2015-02-06 11:59:09 -05:00
getDocUpdates:(doc_id, options = {}, callback = (error, updates) ->) ->
2014-03-04 09:05:17 -05:00
query =
doc_id: ObjectId(doc_id.toString())
if options.from?
query["v"] ||= {}
query["v"]["$gte"] = options.from
if options.to?
query["v"] ||= {}
query["v"]["$lte"] = options.to
MongoManager._findDocResults(query, options.limit, callback)
getProjectUpdates: (project_id, options = {}, callback = (error, updates) ->) ->
query =
project_id: ObjectId(project_id.toString())
if options.before?
query["meta.end_ts"] = { $lt: options.before }
MongoManager._findProjectResults(query, options.limit, callback)
backportProjectId: (project_id, doc_id, callback = (error) ->) ->
db.docHistory.update {
doc_id: ObjectId(doc_id.toString())
project_id: { $exists: false }
}, {
$set: { project_id: ObjectId(project_id.toString()) }
}, {
multi: true
}, callback
getProjectMetaData: (project_id, callback = (error, metadata) ->) ->
db.projectHistoryMetaData.find {
project_id: ObjectId(project_id.toString())
}, (error, results) ->
return callback(error) if error?
callback null, results[0]
setProjectMetaData: (project_id, metadata, callback = (error) ->) ->
db.projectHistoryMetaData.update {
project_id: ObjectId(project_id)
}, {
$set: metadata
}, {
upsert: true
}, callback
ensureIndices: () ->
2015-02-05 11:36:41 -05:00
# For finding all updates that go into a diff for a doc (getLastCompressedUpdate, getDocUpdates v > from && v < to)
2014-05-16 12:00:30 -04:00
db.docHistory.ensureIndex { doc_id: 1, v: 1 }, { background: true }
2015-02-05 11:36:41 -05:00
# For finding all updates that affect a project (getProjectUpdates meta.end_ts < before
2014-05-16 12:00:30 -04:00
db.docHistory.ensureIndex { project_id: 1, "meta.end_ts": 1 }, { background: true }
# For finding updates that don't yet have a project_id and need it inserting
2014-05-16 12:00:30 -04:00
db.docHistory.ensureIndex { doc_id: 1, project_id: 1 }, { background: true }
# For finding project meta-data
2014-05-16 12:00:30 -04:00
db.projectHistoryMetaData.ensureIndex { project_id: 1 }, { background: true }
# TTL index for auto deleting week old temporary ops
2014-05-16 12:00:30 -04:00
db.docHistory.ensureIndex { expiresAt: 1 }, { expireAfterSeconds: 0, background: true }