2015-02-13 11:18:15 -05:00
|
|
|
async = require "async"
|
|
|
|
_ = require "underscore"
|
2015-12-17 09:11:44 -05:00
|
|
|
{db, ObjectId, BSON} = require "./mongojs"
|
2015-06-04 11:21:40 -04:00
|
|
|
logger = require "logger-sharelatex"
|
|
|
|
LockManager = require "./LockManager"
|
2016-02-08 11:22:42 -05:00
|
|
|
MongoAWS = require "./MongoAWS"
|
2017-04-03 10:17:29 -04:00
|
|
|
Metrics = require "metrics-sharelatex"
|
2016-02-08 11:22:42 -05:00
|
|
|
ProjectIterator = require "./ProjectIterator"
|
2017-05-15 05:34:24 -04:00
|
|
|
Settings = require "settings-sharelatex"
|
|
|
|
keys = Settings.redis.lock.key_schema
|
2016-02-08 11:22:42 -05:00
|
|
|
|
|
|
|
# Sharejs operations are stored in a 'pack' object
|
|
|
|
#
|
|
|
|
# e.g. a single sharejs update looks like
|
|
|
|
#
|
|
|
|
# {
|
|
|
|
# "doc_id" : 549dae9e0a2a615c0c7f0c98,
|
|
|
|
# "project_id" : 549dae9c0a2a615c0c7f0c8c,
|
|
|
|
# "op" : [ {"p" : 6981, "d" : "?" } ],
|
|
|
|
# "meta" : { "user_id" : 52933..., "start_ts" : 1422310693931, "end_ts" : 1422310693931 },
|
|
|
|
# "v" : 17082
|
|
|
|
# }
|
|
|
|
#
|
|
|
|
# and a pack looks like this
|
|
|
|
#
|
|
|
|
# {
|
|
|
|
# "doc_id" : 549dae9e0a2a615c0c7f0c98,
|
|
|
|
# "project_id" : 549dae9c0a2a615c0c7f0c8c,
|
|
|
|
# "pack" : [ U1, U2, U3, ...., UN],
|
|
|
|
# "meta" : { "user_id" : 52933..., "start_ts" : 1422310693931, "end_ts" : 1422310693931 },
|
|
|
|
# "v" : 17082
|
|
|
|
# "v_end" : ...
|
|
|
|
# }
|
|
|
|
#
|
|
|
|
# where U1, U2, U3, .... are single updates stripped of their
|
|
|
|
# doc_id and project_id fields (which are the same for all the
|
|
|
|
# updates in the pack).
|
|
|
|
#
|
|
|
|
# The pack itself has v and meta fields, this makes it possible to
|
|
|
|
# treat packs and single updates in a similar way.
|
|
|
|
#
|
|
|
|
# The v field of the pack itself is from the first entry U1, the
|
|
|
|
# v_end field from UN. The meta.end_ts field of the pack itself is
|
|
|
|
# from the last entry UN, the meta.start_ts field from U1.
|
2015-02-13 11:18:15 -05:00
|
|
|
|
2015-12-17 06:41:20 -05:00
|
|
|
DAYS = 24 * 3600 * 1000 # one day in milliseconds
|
|
|
|
|
2015-02-17 06:14:13 -05:00
|
|
|
module.exports = PackManager =
|
2015-05-21 11:56:05 -04:00
|
|
|
|
2015-06-03 05:35:36 -04:00
|
|
|
MAX_SIZE: 1024*1024 # make these configurable parameters
|
2016-02-08 11:22:42 -05:00
|
|
|
MAX_COUNT: 1024
|
2015-12-11 10:56:47 -05:00
|
|
|
|
|
|
|
insertCompressedUpdates: (project_id, doc_id, lastUpdate, newUpdates, temporary, callback = (error) ->) ->
|
|
|
|
return callback() if newUpdates.length == 0
|
|
|
|
|
2016-04-06 12:00:16 -04:00
|
|
|
# never append permanent ops to a pack that will expire
|
|
|
|
lastUpdate = null if lastUpdate?.expiresAt? and not temporary
|
|
|
|
|
2015-12-11 10:56:47 -05:00
|
|
|
updatesToFlush = []
|
|
|
|
updatesRemaining = newUpdates.slice()
|
|
|
|
|
|
|
|
n = lastUpdate?.n || 0
|
|
|
|
sz = lastUpdate?.sz || 0
|
|
|
|
|
|
|
|
while updatesRemaining.length and n < PackManager.MAX_COUNT and sz < PackManager.MAX_SIZE
|
|
|
|
nextUpdate = updatesRemaining[0]
|
|
|
|
nextUpdateSize = BSON.calculateObjectSize(nextUpdate)
|
|
|
|
if nextUpdateSize + sz > PackManager.MAX_SIZE and n > 0
|
|
|
|
break
|
|
|
|
n++
|
|
|
|
sz += nextUpdateSize
|
|
|
|
updatesToFlush.push updatesRemaining.shift()
|
|
|
|
|
|
|
|
PackManager.flushCompressedUpdates project_id, doc_id, lastUpdate, updatesToFlush, temporary, (error) ->
|
|
|
|
return callback(error) if error?
|
|
|
|
PackManager.insertCompressedUpdates project_id, doc_id, null, updatesRemaining, temporary, callback
|
|
|
|
|
|
|
|
flushCompressedUpdates: (project_id, doc_id, lastUpdate, newUpdates, temporary, callback = (error) ->) ->
|
|
|
|
return callback() if newUpdates.length == 0
|
2016-04-06 12:00:16 -04:00
|
|
|
|
|
|
|
canAppend = false
|
|
|
|
# check if it is safe to append to an existing pack
|
|
|
|
if lastUpdate?
|
|
|
|
if not temporary and not lastUpdate.expiresAt?
|
|
|
|
# permanent pack appends to permanent pack
|
|
|
|
canAppend = true
|
|
|
|
age = Date.now() - lastUpdate.meta?.start_ts
|
|
|
|
if temporary and lastUpdate.expiresAt? and age < 1 * DAYS
|
|
|
|
# temporary pack appends to temporary pack if same day
|
|
|
|
canAppend = true
|
|
|
|
|
|
|
|
if canAppend
|
2015-12-11 10:56:47 -05:00
|
|
|
PackManager.appendUpdatesToExistingPack project_id, doc_id, lastUpdate, newUpdates, temporary, callback
|
|
|
|
else
|
|
|
|
PackManager.insertUpdatesIntoNewPack project_id, doc_id, newUpdates, temporary, callback
|
|
|
|
|
|
|
|
insertUpdatesIntoNewPack: (project_id, doc_id, newUpdates, temporary, callback = (error) ->) ->
|
|
|
|
first = newUpdates[0]
|
|
|
|
last = newUpdates[newUpdates.length - 1]
|
|
|
|
n = newUpdates.length
|
|
|
|
sz = BSON.calculateObjectSize(newUpdates)
|
|
|
|
newPack =
|
|
|
|
project_id: ObjectId(project_id.toString())
|
|
|
|
doc_id: ObjectId(doc_id.toString())
|
|
|
|
pack: newUpdates
|
|
|
|
n: n
|
|
|
|
sz: sz
|
|
|
|
meta:
|
|
|
|
start_ts: first.meta.start_ts
|
|
|
|
end_ts: last.meta.end_ts
|
|
|
|
v: first.v
|
|
|
|
v_end: last.v
|
2016-03-24 07:02:58 -04:00
|
|
|
temporary: temporary
|
2015-12-17 06:41:20 -05:00
|
|
|
if temporary
|
|
|
|
newPack.expiresAt = new Date(Date.now() + 7 * DAYS)
|
2016-04-06 09:29:49 -04:00
|
|
|
newPack.last_checked = new Date(Date.now() + 30 * DAYS) # never check temporary packs
|
2015-12-11 10:56:47 -05:00
|
|
|
logger.log {project_id, doc_id, newUpdates}, "inserting updates into new pack"
|
2016-02-08 11:22:42 -05:00
|
|
|
db.docHistory.save newPack, (err, result) ->
|
|
|
|
return callback(err) if err?
|
2016-04-07 10:15:47 -04:00
|
|
|
Metrics.inc("insert-pack-" + if temporary then "temporary" else "permanent")
|
2016-02-08 11:22:42 -05:00
|
|
|
if temporary
|
|
|
|
return callback()
|
|
|
|
else
|
|
|
|
PackManager.updateIndex project_id, doc_id, callback
|
2015-12-11 10:56:47 -05:00
|
|
|
|
|
|
|
appendUpdatesToExistingPack: (project_id, doc_id, lastUpdate, newUpdates, temporary, callback = (error) ->) ->
|
|
|
|
first = newUpdates[0]
|
|
|
|
last = newUpdates[newUpdates.length - 1]
|
|
|
|
n = newUpdates.length
|
|
|
|
sz = BSON.calculateObjectSize(newUpdates)
|
|
|
|
query =
|
|
|
|
_id: lastUpdate._id
|
|
|
|
project_id: ObjectId(project_id.toString())
|
|
|
|
doc_id: ObjectId(doc_id.toString())
|
|
|
|
pack: {$exists: true}
|
|
|
|
update =
|
|
|
|
$push:
|
|
|
|
"pack": {$each: newUpdates}
|
|
|
|
$inc:
|
|
|
|
"n": n
|
|
|
|
"sz": sz
|
|
|
|
$set:
|
|
|
|
"meta.end_ts": last.meta.end_ts
|
|
|
|
"v_end": last.v
|
2015-12-17 06:41:20 -05:00
|
|
|
if lastUpdate.expiresAt and temporary
|
|
|
|
update.$set.expiresAt = new Date(Date.now() + 7 * DAYS)
|
2015-12-11 10:56:47 -05:00
|
|
|
logger.log {project_id, doc_id, lastUpdate, newUpdates}, "appending updates to existing pack"
|
2016-04-07 10:56:11 -04:00
|
|
|
Metrics.inc("append-pack-" + if temporary then "temporary" else "permanent")
|
2016-02-08 11:22:42 -05:00
|
|
|
db.docHistory.findAndModify {query, update, new:true, fields:{meta:1,v_end:1}}, callback
|
|
|
|
|
|
|
|
# Retrieve all changes for a document
|
2015-12-11 10:56:47 -05:00
|
|
|
|
2016-02-08 11:22:42 -05:00
|
|
|
getOpsByVersionRange: (project_id, doc_id, fromVersion, toVersion, callback = (error, updates) ->) ->
|
|
|
|
PackManager.loadPacksByVersionRange project_id, doc_id, fromVersion, toVersion, (error) ->
|
|
|
|
query = {doc_id:ObjectId(doc_id.toString())}
|
|
|
|
query.v = {$lte:toVersion} if toVersion?
|
|
|
|
query.v_end = {$gte:fromVersion} if fromVersion?
|
|
|
|
#console.log "query:", query
|
|
|
|
db.docHistory.find(query).sort {v:-1}, (err, result) ->
|
|
|
|
return callback(err) if err?
|
|
|
|
#console.log "getOpsByVersionRange:", err, result
|
|
|
|
updates = []
|
|
|
|
opInRange = (op, from, to) ->
|
|
|
|
return false if fromVersion? and op.v < fromVersion
|
|
|
|
return false if toVersion? and op.v > toVersion
|
|
|
|
return true
|
|
|
|
for docHistory in result
|
|
|
|
#console.log 'adding', docHistory.pack
|
|
|
|
for op in docHistory.pack.reverse() when opInRange(op, fromVersion, toVersion)
|
|
|
|
op.project_id = docHistory.project_id
|
|
|
|
op.doc_id = docHistory.doc_id
|
|
|
|
#console.log "added op", op.v, fromVersion, toVersion
|
|
|
|
updates.push op
|
|
|
|
callback(null, updates)
|
|
|
|
|
|
|
|
loadPacksByVersionRange: (project_id, doc_id, fromVersion, toVersion, callback) ->
|
|
|
|
PackManager.getIndex doc_id, (err, indexResult) ->
|
2016-01-27 10:14:23 -05:00
|
|
|
return callback(err) if err?
|
2016-02-08 11:22:42 -05:00
|
|
|
indexPacks = indexResult?.packs or []
|
|
|
|
packInRange = (pack, from, to) ->
|
|
|
|
return false if fromVersion? and pack.v_end < fromVersion
|
|
|
|
return false if toVersion? and pack.v > toVersion
|
|
|
|
return true
|
|
|
|
neededIds = (pack._id for pack in indexPacks when packInRange(pack, fromVersion, toVersion))
|
2016-03-24 07:52:31 -04:00
|
|
|
if neededIds.length
|
|
|
|
PackManager.fetchPacksIfNeeded project_id, doc_id, neededIds, callback
|
|
|
|
else
|
|
|
|
callback()
|
2016-02-08 11:22:42 -05:00
|
|
|
|
|
|
|
fetchPacksIfNeeded: (project_id, doc_id, pack_ids, callback) ->
|
|
|
|
db.docHistory.find {_id: {$in: (ObjectId(id) for id in pack_ids)}}, {_id:1}, (err, loadedPacks) ->
|
|
|
|
return callback(err) if err?
|
|
|
|
allPackIds = (id.toString() for id in pack_ids)
|
|
|
|
loadedPackIds = (pack._id.toString() for pack in loadedPacks)
|
|
|
|
packIdsToFetch = _.difference allPackIds, loadedPackIds
|
2016-03-24 07:52:31 -04:00
|
|
|
logger.log {project_id, doc_id, loadedPackIds, allPackIds, packIdsToFetch}, "analysed packs"
|
|
|
|
return callback() if packIdsToFetch.length is 0
|
2016-02-08 11:22:42 -05:00
|
|
|
async.eachLimit packIdsToFetch, 4, (pack_id, cb) ->
|
|
|
|
MongoAWS.unArchivePack project_id, doc_id, pack_id, cb
|
|
|
|
, (err) ->
|
|
|
|
return callback(err) if err?
|
2016-03-24 07:52:31 -04:00
|
|
|
logger.log {project_id, doc_id}, "done unarchiving"
|
2016-02-08 11:22:42 -05:00
|
|
|
callback()
|
|
|
|
|
|
|
|
# Retrieve all changes across a project
|
|
|
|
|
|
|
|
makeProjectIterator: (project_id, before, callback) ->
|
|
|
|
# get all the docHistory Entries
|
|
|
|
db.docHistory.find({project_id: ObjectId(project_id)},{pack:false}).sort {"meta.end_ts":-1}, (err, packs) ->
|
|
|
|
return callback(err) if err?
|
|
|
|
allPacks = []
|
|
|
|
seenIds = {}
|
|
|
|
for pack in packs
|
|
|
|
allPacks.push pack
|
|
|
|
seenIds[pack._id] = true
|
|
|
|
db.docHistoryIndex.find {project_id: ObjectId(project_id)}, (err, indexes) ->
|
|
|
|
return callback(err) if err?
|
|
|
|
for index in indexes
|
|
|
|
for pack in index.packs when not seenIds[pack._id]
|
|
|
|
pack.project_id = index.project_id
|
|
|
|
pack.doc_id = index._id
|
|
|
|
pack.fromIndex = true
|
|
|
|
allPacks.push pack
|
|
|
|
seenIds[pack._id] = true
|
|
|
|
callback(null, new ProjectIterator(allPacks, before, PackManager.getPackById))
|
|
|
|
|
|
|
|
getPackById: (project_id, doc_id, pack_id, callback) ->
|
|
|
|
db.docHistory.findOne {_id: pack_id}, (err, pack) ->
|
|
|
|
return callback(err) if err?
|
|
|
|
if not pack?
|
|
|
|
MongoAWS.unArchivePack project_id, doc_id, pack_id, callback
|
2016-04-07 09:46:31 -04:00
|
|
|
else if pack.expiresAt? and pack.temporary is false
|
|
|
|
# we only need to touch the TTL when listing the changes in the project
|
2016-02-08 11:22:42 -05:00
|
|
|
# because diffs on individual documents are always done after that
|
|
|
|
PackManager.increaseTTL pack, callback
|
2016-03-24 07:37:57 -04:00
|
|
|
# only do this for cached packs, not temporary ones to avoid older packs
|
|
|
|
# being kept longer than newer ones (which messes up the last update version)
|
2016-02-08 11:22:42 -05:00
|
|
|
else
|
|
|
|
callback(null, pack)
|
|
|
|
|
|
|
|
increaseTTL: (pack, callback) ->
|
|
|
|
if pack.expiresAt < new Date(Date.now() + 6 * DAYS)
|
|
|
|
# update cache expiry since we are using this pack
|
|
|
|
db.docHistory.findAndModify {
|
|
|
|
query: {_id: pack._id}
|
|
|
|
update: {$set: {expiresAt: new Date(Date.now() + 7 * DAYS)}}
|
|
|
|
}, (err) ->
|
|
|
|
return callback(err, pack)
|
|
|
|
else
|
|
|
|
callback(null, pack)
|
|
|
|
|
|
|
|
# Manage docHistoryIndex collection
|
|
|
|
|
|
|
|
getIndex: (doc_id, callback) ->
|
|
|
|
db.docHistoryIndex.findOne {_id:ObjectId(doc_id.toString())}, callback
|
|
|
|
|
|
|
|
getPackFromIndex: (doc_id, pack_id, callback) ->
|
|
|
|
db.docHistoryIndex.findOne {_id:ObjectId(doc_id.toString()), "packs._id": pack_id}, {"packs.$":1}, callback
|
|
|
|
|
|
|
|
getLastPackFromIndex: (doc_id, callback) ->
|
|
|
|
db.docHistoryIndex.findOne {_id: ObjectId(doc_id.toString())}, {packs:{$slice:-1}}, (err, indexPack) ->
|
|
|
|
return callback(err) if err?
|
|
|
|
return callback() if not indexPack?
|
|
|
|
callback(null,indexPack[0])
|
|
|
|
|
|
|
|
getIndexWithKeys: (doc_id, callback) ->
|
|
|
|
PackManager.getIndex doc_id, (err, index) ->
|
|
|
|
return callback(err) if err?
|
|
|
|
return callback() if not index?
|
|
|
|
for pack in index?.packs or []
|
|
|
|
index[pack._id] = pack
|
|
|
|
callback(null, index)
|
|
|
|
|
|
|
|
initialiseIndex: (project_id, doc_id, callback) ->
|
|
|
|
PackManager.findCompletedPacks project_id, doc_id, (err, packs) ->
|
|
|
|
#console.log 'err', err, 'packs', packs, packs?.length
|
|
|
|
return callback(err) if err?
|
|
|
|
return callback() if not packs?
|
|
|
|
PackManager.insertPacksIntoIndexWithLock project_id, doc_id, packs, callback
|
|
|
|
|
|
|
|
updateIndex: (project_id, doc_id, callback) ->
|
|
|
|
# find all packs prior to current pack
|
|
|
|
PackManager.findUnindexedPacks project_id, doc_id, (err, newPacks) ->
|
|
|
|
return callback(err) if err?
|
2016-04-06 09:29:21 -04:00
|
|
|
return callback() if not newPacks? or newPacks.length is 0
|
2016-02-08 11:22:42 -05:00
|
|
|
PackManager.insertPacksIntoIndexWithLock project_id, doc_id, newPacks, (err) ->
|
|
|
|
return callback(err) if err?
|
|
|
|
logger.log {project_id, doc_id, newPacks}, "added new packs to index"
|
|
|
|
callback()
|
|
|
|
|
|
|
|
findCompletedPacks: (project_id, doc_id, callback) ->
|
2016-03-09 09:32:53 -05:00
|
|
|
query = { doc_id: ObjectId(doc_id.toString()), expiresAt: {$exists:false} }
|
2016-02-08 11:22:42 -05:00
|
|
|
db.docHistory.find(query, {pack:false}).sort {v:1}, (err, packs) ->
|
|
|
|
return callback(err) if err?
|
|
|
|
return callback() if not packs?
|
2016-04-06 08:30:09 -04:00
|
|
|
return callback() if not packs?.length
|
|
|
|
last = packs.pop() # discard the last pack, if it's still in progress
|
|
|
|
packs.push(last) if last.finalised # it's finalised so we push it back to archive it
|
2016-02-08 11:22:42 -05:00
|
|
|
callback(null, packs)
|
|
|
|
|
2016-04-06 09:27:44 -04:00
|
|
|
findPacks: (project_id, doc_id, callback) ->
|
|
|
|
query = { doc_id: ObjectId(doc_id.toString()), expiresAt: {$exists:false} }
|
|
|
|
db.docHistory.find(query, {pack:false}).sort {v:1}, (err, packs) ->
|
|
|
|
return callback(err) if err?
|
|
|
|
return callback() if not packs?
|
|
|
|
return callback() if not packs?.length
|
|
|
|
callback(null, packs)
|
2016-02-08 11:22:42 -05:00
|
|
|
|
|
|
|
findUnindexedPacks: (project_id, doc_id, callback) ->
|
|
|
|
PackManager.getIndexWithKeys doc_id, (err, indexResult) ->
|
|
|
|
return callback(err) if err?
|
|
|
|
PackManager.findCompletedPacks project_id, doc_id, (err, historyPacks) ->
|
|
|
|
return callback(err) if err?
|
|
|
|
return callback() if not historyPacks?
|
|
|
|
# select only the new packs not already in the index
|
2016-03-03 09:35:53 -05:00
|
|
|
newPacks = (pack for pack in historyPacks when not indexResult?[pack._id]?)
|
2016-04-06 10:17:20 -04:00
|
|
|
newPacks = (_.omit(pack, 'doc_id', 'project_id', 'n', 'sz', 'last_checked', 'finalised') for pack in newPacks)
|
2016-04-06 09:26:54 -04:00
|
|
|
if newPacks.length
|
|
|
|
logger.log {project_id, doc_id, n: newPacks.length}, "found new packs"
|
2016-02-08 11:22:42 -05:00
|
|
|
callback(null, newPacks)
|
|
|
|
|
|
|
|
insertPacksIntoIndexWithLock: (project_id, doc_id, newPacks, callback) ->
|
|
|
|
LockManager.runWithLock(
|
2017-05-15 05:34:24 -04:00
|
|
|
keys.historyIndexLock({doc_id}),
|
2016-02-08 11:22:42 -05:00
|
|
|
(releaseLock) ->
|
|
|
|
PackManager._insertPacksIntoIndex project_id, doc_id, newPacks, releaseLock
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
|
|
|
|
_insertPacksIntoIndex: (project_id, doc_id, newPacks, callback) ->
|
|
|
|
db.docHistoryIndex.findAndModify {
|
|
|
|
query: {_id:ObjectId(doc_id.toString())}
|
|
|
|
update:
|
|
|
|
$setOnInsert: project_id: ObjectId(project_id.toString())
|
|
|
|
$push:
|
|
|
|
packs: {$each: newPacks, $sort: {v: 1}}
|
|
|
|
upsert: true
|
|
|
|
}, callback
|
|
|
|
|
|
|
|
# Archiving packs to S3
|
|
|
|
|
|
|
|
archivePack: (project_id, doc_id, pack_id, callback) ->
|
|
|
|
clearFlagOnError = (err, cb) ->
|
|
|
|
if err? # clear the inS3 flag on error
|
|
|
|
PackManager.clearPackAsArchiveInProgress project_id, doc_id, pack_id, (err2) ->
|
|
|
|
return cb(err2) if err2?
|
|
|
|
return cb(err)
|
|
|
|
else
|
|
|
|
cb()
|
|
|
|
async.series [
|
|
|
|
(cb) ->
|
|
|
|
PackManager.checkArchiveNotInProgress project_id, doc_id, pack_id, cb
|
|
|
|
(cb) ->
|
|
|
|
PackManager.markPackAsArchiveInProgress project_id, doc_id, pack_id, cb
|
|
|
|
(cb) ->
|
|
|
|
MongoAWS.archivePack project_id, doc_id, pack_id, (err) ->
|
|
|
|
clearFlagOnError(err, cb)
|
|
|
|
(cb) ->
|
|
|
|
PackManager.checkArchivedPack project_id, doc_id, pack_id, (err) ->
|
|
|
|
clearFlagOnError(err, cb)
|
|
|
|
(cb) ->
|
|
|
|
PackManager.markPackAsArchived project_id, doc_id, pack_id, cb
|
|
|
|
(cb) ->
|
|
|
|
PackManager.setTTLOnArchivedPack project_id, doc_id, pack_id, callback
|
|
|
|
], callback
|
|
|
|
|
|
|
|
|
|
|
|
checkArchivedPack: (project_id, doc_id, pack_id, callback) ->
|
|
|
|
db.docHistory.findOne {_id: pack_id}, (err, pack) ->
|
|
|
|
return callback(err) if err?
|
|
|
|
return callback new Error("pack not found") if not pack?
|
|
|
|
MongoAWS.readArchivedPack project_id, doc_id, pack_id, (err, result) ->
|
|
|
|
delete result.last_checked
|
|
|
|
delete pack.last_checked
|
|
|
|
# need to compare ids as ObjectIds with .equals()
|
|
|
|
for key in ['_id', 'project_id', 'doc_id']
|
|
|
|
result[key] = pack[key] if result[key].equals(pack[key])
|
|
|
|
for op, i in result.pack
|
|
|
|
op._id = pack.pack[i]._id if op._id? and op._id.equals(pack.pack[i]._id)
|
|
|
|
if _.isEqual pack, result
|
|
|
|
callback()
|
|
|
|
else
|
|
|
|
logger.err {pack, result, jsondiff: JSON.stringify(pack) is JSON.stringify(result)}, "difference when comparing packs"
|
|
|
|
callback new Error("pack retrieved from s3 does not match pack in mongo")
|
2016-03-09 11:56:49 -05:00
|
|
|
# Extra methods to test archive/unarchive for a doc_id
|
|
|
|
|
|
|
|
pushOldPacks: (project_id, doc_id, callback) ->
|
2016-04-06 09:27:44 -04:00
|
|
|
PackManager.findPacks project_id, doc_id, (err, packs) ->
|
2016-03-09 11:56:49 -05:00
|
|
|
return callback(err) if err?
|
|
|
|
return callback() if not packs?.length
|
|
|
|
PackManager.processOldPack project_id, doc_id, packs[0]._id, callback
|
|
|
|
|
|
|
|
pullOldPacks: (project_id, doc_id, callback) ->
|
|
|
|
PackManager.loadPacksByVersionRange project_id, doc_id, null, null, callback
|
|
|
|
|
2016-02-08 11:22:42 -05:00
|
|
|
|
|
|
|
# Processing old packs via worker
|
|
|
|
|
|
|
|
processOldPack: (project_id, doc_id, pack_id, callback) ->
|
|
|
|
markAsChecked = (err) ->
|
|
|
|
PackManager.markPackAsChecked project_id, doc_id, pack_id, (err2) ->
|
|
|
|
return callback(err2) if err2?
|
|
|
|
callback(err)
|
|
|
|
logger.log {project_id, doc_id}, "processing old packs"
|
|
|
|
db.docHistory.findOne {_id:pack_id}, (err, pack) ->
|
|
|
|
return markAsChecked(err) if err?
|
|
|
|
return markAsChecked() if not pack?
|
|
|
|
return callback() if pack.expiresAt? # return directly
|
2016-04-06 08:30:09 -04:00
|
|
|
PackManager.finaliseIfNeeded project_id, doc_id, pack._id, pack, (err) ->
|
2016-02-08 11:22:42 -05:00
|
|
|
return markAsChecked(err) if err?
|
2016-04-06 08:30:09 -04:00
|
|
|
PackManager.updateIndexIfNeeded project_id, doc_id, (err) ->
|
2016-02-08 11:22:42 -05:00
|
|
|
return markAsChecked(err) if err?
|
2016-04-06 08:30:09 -04:00
|
|
|
PackManager.findUnarchivedPacks project_id, doc_id, (err, unarchivedPacks) ->
|
2016-02-08 11:22:42 -05:00
|
|
|
return markAsChecked(err) if err?
|
2016-04-06 08:30:09 -04:00
|
|
|
if not unarchivedPacks?.length
|
2016-04-06 09:26:54 -04:00
|
|
|
logger.log {project_id, doc_id}, "no packs need archiving"
|
2016-04-06 08:30:09 -04:00
|
|
|
return markAsChecked()
|
|
|
|
async.eachSeries unarchivedPacks, (pack, cb) ->
|
|
|
|
PackManager.archivePack project_id, doc_id, pack._id, cb
|
|
|
|
, (err) ->
|
|
|
|
return markAsChecked(err) if err?
|
2016-04-06 09:26:54 -04:00
|
|
|
logger.log {project_id, doc_id}, "done processing"
|
2016-04-06 08:30:09 -04:00
|
|
|
markAsChecked()
|
|
|
|
|
|
|
|
finaliseIfNeeded: (project_id, doc_id, pack_id, pack, callback) ->
|
|
|
|
sz = pack.sz / (1024 * 1024) # in fractions of a megabyte
|
|
|
|
n = pack.n / 1024 # in fraction of 1024 ops
|
|
|
|
age = (Date.now() - pack.meta.end_ts) / DAYS
|
|
|
|
if age < 30 # always keep if less than 1 month old
|
2016-04-06 09:26:54 -04:00
|
|
|
logger.log {project_id, doc_id, pack_id, age}, "less than 30 days old"
|
2016-04-06 08:30:09 -04:00
|
|
|
return callback()
|
|
|
|
# compute an archiving threshold which decreases for each month of age
|
|
|
|
archive_threshold = 30 / age
|
|
|
|
if sz > archive_threshold or n > archive_threshold or age > 90
|
2016-04-06 09:26:54 -04:00
|
|
|
logger.log {project_id, doc_id, pack_id, age, archive_threshold, sz, n}, "meets archive threshold"
|
2016-04-06 08:30:09 -04:00
|
|
|
PackManager.markPackAsFinalisedWithLock project_id, doc_id, pack_id, callback
|
|
|
|
else
|
2016-04-06 09:26:54 -04:00
|
|
|
logger.log {project_id, doc_id, pack_id, age, archive_threshold, sz, n}, "does not meet archive threshold"
|
2016-04-06 08:30:09 -04:00
|
|
|
callback()
|
|
|
|
|
|
|
|
markPackAsFinalisedWithLock: (project_id, doc_id, pack_id, callback) ->
|
|
|
|
LockManager.runWithLock(
|
2017-05-15 05:34:24 -04:00
|
|
|
keys.historyLock({doc_id}),
|
2016-04-06 08:30:09 -04:00
|
|
|
(releaseLock) ->
|
|
|
|
PackManager._markPackAsFinalised project_id, doc_id, pack_id, releaseLock
|
|
|
|
callback
|
|
|
|
)
|
|
|
|
|
|
|
|
_markPackAsFinalised: (project_id, doc_id, pack_id, callback) ->
|
|
|
|
logger.log {project_id, doc_id, pack_id}, "marking pack as finalised"
|
|
|
|
db.docHistory.findAndModify {
|
|
|
|
query: {_id: pack_id}
|
|
|
|
update: {$set: {finalised: true}}
|
|
|
|
}, callback
|
|
|
|
|
2016-02-08 11:22:42 -05:00
|
|
|
|
|
|
|
updateIndexIfNeeded: (project_id, doc_id, callback) ->
|
|
|
|
logger.log {project_id, doc_id}, "archiving old packs"
|
|
|
|
PackManager.getIndexWithKeys doc_id, (err, index) ->
|
|
|
|
return callback(err) if err?
|
|
|
|
if not index?
|
|
|
|
PackManager.initialiseIndex project_id, doc_id, callback
|
|
|
|
else
|
|
|
|
PackManager.updateIndex project_id, doc_id, callback
|
|
|
|
|
|
|
|
markPackAsChecked: (project_id, doc_id, pack_id, callback) ->
|
|
|
|
logger.log {project_id, doc_id, pack_id}, "marking pack as checked"
|
|
|
|
db.docHistory.findAndModify {
|
|
|
|
query: {_id: pack_id}
|
|
|
|
update: {$currentDate: {"last_checked":true}}
|
|
|
|
}, callback
|
|
|
|
|
|
|
|
findUnarchivedPacks: (project_id, doc_id, callback) ->
|
|
|
|
PackManager.getIndex doc_id, (err, indexResult) ->
|
|
|
|
return callback(err) if err?
|
|
|
|
indexPacks = indexResult?.packs or []
|
|
|
|
unArchivedPacks = (pack for pack in indexPacks when not pack.inS3?)
|
2016-04-06 09:26:54 -04:00
|
|
|
if unArchivedPacks.length
|
|
|
|
logger.log {project_id, doc_id, n: unArchivedPacks.length}, "find unarchived packs"
|
2016-02-08 11:22:42 -05:00
|
|
|
callback(null, unArchivedPacks)
|
|
|
|
|
|
|
|
# Archive locking flags
|
|
|
|
|
|
|
|
checkArchiveNotInProgress: (project_id, doc_id, pack_id, callback) ->
|
|
|
|
logger.log {project_id, doc_id, pack_id}, "checking if archive in progress"
|
|
|
|
PackManager.getPackFromIndex doc_id, pack_id, (err, result) ->
|
|
|
|
return callback(err) if err?
|
|
|
|
return callback new Error("pack not found in index") if not result?
|
2016-03-09 09:33:19 -05:00
|
|
|
if result.inS3
|
|
|
|
return callback new Error("pack archiving already done")
|
|
|
|
else if result.inS3?
|
|
|
|
return callback new Error("pack archiving already in progress")
|
2016-02-08 11:22:42 -05:00
|
|
|
else
|
2016-03-09 09:33:19 -05:00
|
|
|
return callback()
|
2016-02-08 11:22:42 -05:00
|
|
|
|
|
|
|
markPackAsArchiveInProgress: (project_id, doc_id, pack_id, callback) ->
|
|
|
|
logger.log {project_id, doc_id}, "marking pack as archive in progress status"
|
|
|
|
db.docHistoryIndex.findAndModify {
|
|
|
|
query: {_id:ObjectId(doc_id.toString()), packs: {$elemMatch: {"_id": pack_id, inS3: {$exists:false}}}}
|
|
|
|
fields: { "packs.$": 1 }
|
|
|
|
update: {$set: {"packs.$.inS3":false}}
|
|
|
|
}, (err, result) ->
|
|
|
|
return callback(err) if err?
|
|
|
|
return callback new Error("archive is already in progress") if not result?
|
|
|
|
logger.log {project_id, doc_id, pack_id}, "marked as archive in progress"
|
|
|
|
callback()
|
|
|
|
|
|
|
|
clearPackAsArchiveInProgress: (project_id, doc_id, pack_id, callback) ->
|
|
|
|
logger.log {project_id, doc_id, pack_id}, "clearing as archive in progress"
|
|
|
|
db.docHistoryIndex.findAndModify {
|
|
|
|
query: {_id:ObjectId(doc_id.toString()), "packs" : {$elemMatch: {"_id": pack_id, inS3: false}}}
|
|
|
|
fields: { "packs.$": 1 }
|
|
|
|
update: {$unset: {"packs.$.inS3":true}}
|
|
|
|
}, callback
|
|
|
|
|
|
|
|
markPackAsArchived: (project_id, doc_id, pack_id, callback) ->
|
|
|
|
logger.log {project_id, doc_id, pack_id}, "marking pack as archived"
|
|
|
|
db.docHistoryIndex.findAndModify {
|
|
|
|
query: {_id:ObjectId(doc_id.toString()), "packs" : {$elemMatch: {"_id": pack_id, inS3: false}}}
|
|
|
|
fields: { "packs.$": 1 }
|
|
|
|
update: {$set: {"packs.$.inS3":true}}
|
|
|
|
}, (err, result) ->
|
|
|
|
return callback(err) if err?
|
|
|
|
return callback new Error("archive is not marked as progress") if not result?
|
|
|
|
logger.log {project_id, doc_id, pack_id}, "marked as archived"
|
|
|
|
callback()
|
|
|
|
|
|
|
|
setTTLOnArchivedPack: (project_id, doc_id, pack_id, callback) ->
|
|
|
|
db.docHistory.findAndModify {
|
|
|
|
query: {_id: pack_id}
|
2017-01-31 10:07:58 -05:00
|
|
|
update: {$set: {expiresAt: new Date(Date.now() + 1*DAYS)}}
|
2016-02-08 11:22:42 -05:00
|
|
|
}, (err) ->
|
|
|
|
logger.log {project_id, doc_id, pack_id}, "set expiry on pack"
|
|
|
|
callback()
|
2017-01-26 08:07:54 -05:00
|
|
|
|
|
|
|
|
2017-01-31 10:07:58 -05:00
|
|
|
# _getOneDayInFutureWithRandomDelay: ->
|
|
|
|
# thirtyMins = 1000 * 60 * 30
|
|
|
|
# randomThirtyMinMax = Math.ceil(Math.random() * thirtyMins)
|
|
|
|
# return new Date(Date.now() + randomThirtyMinMax + 1*DAYS)
|