overleaf/services/track-changes/app/coffee/DocArchiveManager.coffee

75 lines
3 KiB
CoffeeScript
Raw Normal View History

2015-08-06 10:11:43 -04:00
MongoManager = require "./MongoManager"
2015-08-06 14:46:44 -04:00
MongoAWS = require "./MongoAWS"
LockManager = require "./LockManager"
DocstoreHandler = require "./DocstoreHandler"
2015-08-06 10:11:43 -04:00
logger = require "logger-sharelatex"
_ = require "underscore"
async = require "async"
settings = require("settings-sharelatex")
2015-09-16 11:09:38 -04:00
# increase lock timeouts because archiving can be slow
LockManager.LOCK_TEST_INTERVAL = 500 # 500ms between each test of the lock
LockManager.MAX_LOCK_WAIT_TIME = 30000 # 30s maximum time to spend trying to get the lock
LockManager.LOCK_TTL = 30 # seconds
2015-08-06 10:11:43 -04:00
module.exports = DocArchiveManager =
archiveAllDocsChanges: (project_id, callback = (error, docs) ->) ->
DocstoreHandler.getAllDocs project_id, (error, docs) ->
2015-08-06 10:11:43 -04:00
if error?
return callback(error)
else if !docs?
return callback new Error("No docs for project #{project_id}")
jobs = _.map docs, (doc) ->
(cb)-> DocArchiveManager.archiveDocChangesWithLock project_id, doc._id, cb
2015-08-06 10:11:43 -04:00
async.series jobs, callback
archiveDocChangesWithLock: (project_id, doc_id, callback = (error) ->) ->
job = (releaseLock) ->
DocArchiveManager.archiveDocChanges project_id, doc_id, releaseLock
LockManager.runWithLock("HistoryLock:#{doc_id}", job, callback)
2015-08-06 10:11:43 -04:00
archiveDocChanges: (project_id, doc_id, callback)->
2015-08-06 16:09:36 -04:00
MongoManager.getDocChangesCount doc_id, (error, count) ->
2015-09-08 11:33:45 -04:00
return callback(error) if error?
2015-08-06 16:09:36 -04:00
if count == 0
logger.log {project_id, doc_id}, "document history is empty, not archiving"
2015-08-09 14:47:47 -04:00
return callback()
2015-08-06 16:09:36 -04:00
else
MongoManager.getLastCompressedUpdate doc_id, (error, update) ->
2015-09-08 11:33:45 -04:00
return callback(error) if error?
MongoAWS.archiveDocHistory project_id, doc_id, (error) ->
2015-09-08 11:33:45 -04:00
return callback(error) if error?
logger.log doc_id:doc_id, project_id:project_id, "exported document to S3"
MongoManager.markDocHistoryAsArchived doc_id, update, (error) ->
return callback(error) if error?
callback()
2015-08-06 16:09:36 -04:00
unArchiveAllDocsChanges: (project_id, callback = (error, docs) ->) ->
DocstoreHandler.getAllDocs project_id, (error, docs) ->
2015-08-06 16:09:36 -04:00
if error?
return callback(error)
else if !docs?
return callback new Error("No docs for project #{project_id}")
jobs = _.map docs, (doc) ->
2015-09-16 11:18:36 -04:00
(cb)-> DocArchiveManager.unArchiveDocChangesWithLock project_id, doc._id, cb
async.parallelLimit jobs, 4, callback
2015-08-06 16:09:36 -04:00
2015-09-16 11:18:36 -04:00
unArchiveDocChangesWithLock: (project_id, doc_id, callback = (error) ->) ->
job = (releaseLock) ->
DocArchiveManager.unArchiveDocChanges project_id, doc_id, releaseLock
LockManager.runWithLock("HistoryLock:#{doc_id}", job, callback)
2015-08-06 16:09:36 -04:00
unArchiveDocChanges: (project_id, doc_id, callback)->
2015-08-09 18:52:32 -04:00
MongoManager.getArchivedDocChanges doc_id, (error, count) ->
2015-09-08 11:33:45 -04:00
return callback(error) if error?
2015-08-09 14:47:47 -04:00
if count == 0
return callback()
else
MongoAWS.unArchiveDocHistory project_id, doc_id, (error) ->
2015-09-08 11:33:45 -04:00
return callback(error) if error?
logger.log doc_id:doc_id, project_id:project_id, "imported document from S3"
2015-08-09 16:50:15 -04:00
MongoManager.markDocHistoryAsUnarchived doc_id, (error) ->
return callback(error) if error?
callback()