2016-01-05 11:00:52 -05:00
|
|
|
Settings = require "settings-sharelatex"
|
2015-06-04 11:24:35 -04:00
|
|
|
async = require "async"
|
|
|
|
_ = require "underscore"
|
2015-12-17 09:11:44 -05:00
|
|
|
{db, ObjectId, BSON} = require "./mongojs"
|
2016-04-04 12:00:10 -04:00
|
|
|
fs = require "fs"
|
2016-04-08 05:29:04 -04:00
|
|
|
Metrics = require "metrics-sharelatex"
|
|
|
|
Metrics.initialize("track-changes")
|
2015-06-04 11:24:35 -04:00
|
|
|
logger = require "logger-sharelatex"
|
|
|
|
logger.initialize("track-changes-packworker")
|
2016-01-05 11:00:52 -05:00
|
|
|
if Settings.sentry?.dsn?
|
|
|
|
logger.initializeErrorReporting(Settings.sentry.dsn)
|
|
|
|
|
2016-02-08 11:22:42 -05:00
|
|
|
DAYS = 24 * 3600 * 1000
|
|
|
|
|
2015-06-04 11:24:35 -04:00
|
|
|
LockManager = require "./LockManager"
|
|
|
|
PackManager = require "./PackManager"
|
|
|
|
|
|
|
|
# this worker script is forked by the main process to look for
|
2016-02-08 11:22:42 -05:00
|
|
|
# document histories which can be archived
|
2015-06-04 11:24:35 -04:00
|
|
|
|
2016-04-04 12:00:10 -04:00
|
|
|
source = process.argv[2]
|
2015-06-05 08:38:47 -04:00
|
|
|
DOCUMENT_PACK_DELAY = Number(process.argv[3]) || 1000
|
|
|
|
TIMEOUT = Number(process.argv[4]) || 30*60*1000
|
2017-05-09 08:43:21 -04:00
|
|
|
COUNT = 0 # number processed
|
|
|
|
TOTAL = 0 # total number to process
|
2015-06-04 11:24:35 -04:00
|
|
|
|
2017-05-09 06:33:21 -04:00
|
|
|
if !source.match(/^[0-9]+$/)
|
2016-04-04 12:00:10 -04:00
|
|
|
file = fs.readFileSync source
|
|
|
|
result = for line in file.toString().split('\n')
|
|
|
|
[project_id, doc_id] = line.split(' ')
|
|
|
|
{doc_id, project_id}
|
|
|
|
pending = _.filter result, (row) -> row?.doc_id?.match(/^[a-f0-9]{24}$/)
|
|
|
|
else
|
|
|
|
LIMIT = Number(process.argv[2]) || 1000
|
|
|
|
|
2015-06-05 08:38:47 -04:00
|
|
|
shutDownRequested = false
|
2016-01-06 04:43:10 -05:00
|
|
|
shutDownTimer = setTimeout () ->
|
2015-06-05 08:38:47 -04:00
|
|
|
logger.log "pack timed out, requesting shutdown"
|
|
|
|
# start the shutdown on the next pack
|
|
|
|
shutDownRequested = true
|
|
|
|
# do a hard shutdown after a further 5 minutes
|
2017-05-09 08:59:09 -04:00
|
|
|
hardTimeout = setTimeout () ->
|
2016-02-08 11:22:42 -05:00
|
|
|
logger.error "HARD TIMEOUT in pack archive worker"
|
2015-06-05 08:38:47 -04:00
|
|
|
process.exit()
|
|
|
|
, 5*60*1000
|
2017-05-09 08:59:09 -04:00
|
|
|
hardTimeout.unref()
|
2015-06-05 08:38:47 -04:00
|
|
|
, TIMEOUT
|
|
|
|
|
|
|
|
logger.log "checking for updates, limit=#{LIMIT}, delay=#{DOCUMENT_PACK_DELAY}, timeout=#{TIMEOUT}"
|
2015-06-04 11:24:35 -04:00
|
|
|
|
2015-12-22 10:36:15 -05:00
|
|
|
# work around for https://github.com/mafintosh/mongojs/issues/224
|
|
|
|
db.close = (callback) ->
|
|
|
|
this._getServer (err, server) ->
|
|
|
|
return callback(err) if err?
|
|
|
|
server = if server.destroy? then server else server.topology
|
|
|
|
server.destroy(true, true)
|
|
|
|
callback()
|
|
|
|
|
2015-06-04 11:24:35 -04:00
|
|
|
finish = () ->
|
2016-01-06 04:43:10 -05:00
|
|
|
if shutDownTimer?
|
|
|
|
logger.log 'cancelling timeout'
|
|
|
|
clearTimeout shutDownTimer
|
2015-06-04 11:24:35 -04:00
|
|
|
logger.log 'closing db'
|
|
|
|
db.close () ->
|
2016-01-06 04:43:10 -05:00
|
|
|
logger.log 'closing LockManager Redis Connection'
|
|
|
|
LockManager.close () ->
|
2017-05-09 08:43:21 -04:00
|
|
|
logger.log {processedCount: COUNT, allCount: TOTAL}, 'ready to exit from pack archive worker'
|
2016-01-06 04:43:10 -05:00
|
|
|
hardTimeout = setTimeout () ->
|
2016-02-08 11:22:42 -05:00
|
|
|
logger.error 'hard exit from pack archive worker'
|
2016-01-06 04:43:10 -05:00
|
|
|
process.exit(1)
|
|
|
|
, 5*1000
|
|
|
|
hardTimeout.unref()
|
|
|
|
|
|
|
|
process.on 'exit', (code) ->
|
2016-02-08 11:22:42 -05:00
|
|
|
logger.log {code}, 'pack archive worker exited'
|
2015-06-04 11:24:35 -04:00
|
|
|
|
|
|
|
processUpdates = (pending) ->
|
2016-02-08 11:22:42 -05:00
|
|
|
async.eachSeries pending, (result, callback) ->
|
|
|
|
{_id, project_id, doc_id} = result
|
2017-05-09 08:43:21 -04:00
|
|
|
COUNT++
|
|
|
|
logger.log {project_id, doc_id}, "processing #{COUNT}/#{TOTAL}"
|
2016-02-08 11:22:42 -05:00
|
|
|
if not project_id? or not doc_id?
|
|
|
|
logger.log {project_id, doc_id}, "skipping pack, missing project/doc id"
|
|
|
|
return callback()
|
2016-04-04 12:00:10 -04:00
|
|
|
handler = (err, result) ->
|
2016-04-13 09:39:11 -04:00
|
|
|
if err? and err.code is "InternalError" and err.retryable
|
|
|
|
logger.warn {err, result}, "ignoring S3 error in pack archive worker"
|
|
|
|
# Ignore any s3 errors due to random problems
|
|
|
|
err = null
|
2015-06-04 11:24:35 -04:00
|
|
|
if err?
|
2016-02-08 11:22:42 -05:00
|
|
|
logger.error {err, result}, "error in pack archive worker"
|
2015-06-04 11:24:35 -04:00
|
|
|
return callback(err)
|
2015-06-05 08:38:47 -04:00
|
|
|
if shutDownRequested
|
2017-05-09 08:58:40 -04:00
|
|
|
logger.warn "shutting down pack archive worker"
|
2015-06-05 08:38:47 -04:00
|
|
|
return callback(new Error("shutdown"))
|
2015-06-04 11:24:35 -04:00
|
|
|
setTimeout () ->
|
|
|
|
callback(err, result)
|
|
|
|
, DOCUMENT_PACK_DELAY
|
2016-04-04 12:00:10 -04:00
|
|
|
if not _id?
|
|
|
|
PackManager.pushOldPacks project_id, doc_id, handler
|
|
|
|
else
|
|
|
|
PackManager.processOldPack project_id, doc_id, _id, handler
|
2015-06-04 11:24:35 -04:00
|
|
|
, (err, results) ->
|
2015-09-12 06:07:54 -04:00
|
|
|
if err? and err.message != "shutdown"
|
2016-02-08 11:22:42 -05:00
|
|
|
logger.error {err}, 'error in pack archive worker processUpdates'
|
2015-06-04 11:24:35 -04:00
|
|
|
finish()
|
|
|
|
|
2016-02-08 11:22:42 -05:00
|
|
|
# find the packs which can be archived
|
|
|
|
|
|
|
|
ObjectIdFromDate = (date) ->
|
|
|
|
id = Math.floor(date.getTime() / 1000).toString(16) + "0000000000000000";
|
|
|
|
return ObjectId(id)
|
|
|
|
|
|
|
|
# new approach, two passes
|
|
|
|
# find packs to be marked as finalised:true, those which have a newer pack present
|
|
|
|
# then only consider finalised:true packs for archiving
|
2015-06-04 11:24:35 -04:00
|
|
|
|
2016-04-04 12:00:10 -04:00
|
|
|
if pending?
|
|
|
|
logger.log "got #{pending.length} entries from #{source}"
|
2015-06-04 11:24:35 -04:00
|
|
|
processUpdates pending
|
2016-04-04 12:00:10 -04:00
|
|
|
else
|
2017-05-09 06:32:24 -04:00
|
|
|
oneWeekAgo = new Date(Date.now() - 7 * DAYS)
|
2016-04-04 12:00:10 -04:00
|
|
|
db.docHistory.find({
|
|
|
|
expiresAt: {$exists: false}
|
|
|
|
project_id: {$exists: true}
|
|
|
|
v_end: {$exists: true}
|
2017-05-09 06:32:24 -04:00
|
|
|
_id: {$lt: ObjectIdFromDate(oneWeekAgo)}
|
|
|
|
last_checked: {$lt: oneWeekAgo}
|
2016-04-04 12:00:10 -04:00
|
|
|
}, {_id:1, doc_id:1, project_id:1}).sort({
|
|
|
|
last_checked:1
|
|
|
|
}).limit LIMIT, (err, results) ->
|
|
|
|
if err?
|
|
|
|
logger.log {err}, 'error checking for updates'
|
|
|
|
finish()
|
|
|
|
return
|
|
|
|
pending = _.uniq results, false, (result) -> result.doc_id.toString()
|
2017-05-09 08:43:21 -04:00
|
|
|
TOTAL = pending.length
|
|
|
|
logger.log "found #{TOTAL} documents to archive"
|
2016-04-04 12:00:10 -04:00
|
|
|
processUpdates pending
|