add unarchive doc track from s3

This commit is contained in:
Henrique Dias 2015-08-06 17:09:36 -03:00
parent 438c4f4d0c
commit bca48ac117
6 changed files with 101 additions and 16 deletions

View file

@ -29,6 +29,7 @@ app.post "/doc/:doc_id/pack", HttpController.packDoc
if Settings.filestore?.backend == "s3"
app.get '/project/:project_id/archive', HttpController.archiveProject
app.get '/project/:project_id/unarchive', HttpController.unArchiveProject
packWorker = null # use a single packing worker

View file

@ -22,6 +22,25 @@ module.exports = DocArchiveManager =
archiveDocChanges: (project_id, doc_id, callback)->
MongoAWS.archiveDocHistory project_id, doc_id, (error) ->
logger.log doc_id:doc_id, error: error, "mongoexport"
callback()
MongoManager.getDocChangesCount doc_id, (error, count) ->
if count == 0
callback()
else
MongoAWS.archiveDocHistory project_id, doc_id, (error) ->
logger.log doc_id:doc_id, error: error, "mongoexport"
callback()
unArchiveAllDocsChanges: (project_id, callback = (error, docs) ->) ->
MongoManager.getProjectsDocs project_id, (error, docs) ->
if error?
return callback(error)
else if !docs?
return callback new Error("No docs for project #{project_id}")
jobs = _.map docs, (doc) ->
(cb)-> DocArchiveManager.unArchiveDocChanges project_id, doc._id, cb
async.series jobs, callback
unArchiveDocChanges: (project_id, doc_id, callback)->
MongoAWS.unArchiveDocHistory project_id, doc_id, (error) ->
logger.log doc_id:doc_id, error: error, "mongoimport"
callback()

View file

@ -70,7 +70,14 @@ module.exports = HttpController =
archiveProject: (req, res, next = (error) ->) ->
project_id = req.params.project_id
logger.log project_id: project_id, "archiving all track changes"
logger.log project_id: project_id, "archiving all track changes to s3"
DocArchiveManager.archiveAllDocsChanges project_id, (error) ->
return next(error) if error?
res.send 204
unArchiveProject: (req, res, next = (error) ->) ->
project_id = req.params.project_id
logger.log project_id: project_id, "unarchiving all track changes from s3"
DocArchiveManager.unArchiveAllDocsChanges project_id, (error) ->
return next(error) if error?
res.send 204

View file

@ -5,16 +5,22 @@ logger = require "logger-sharelatex"
AWS = require 'aws-sdk'
fs = require 'fs'
module.exports = MongoAWS =
archiveDocHistory: (project_id, doc_id, callback = (error) ->) ->
MongoAWS.mongoDumpDocHistory doc_id, (error,filepath) ->
MongoAWS.mongoExportDocHistory doc_id, (error, filepath) ->
MongoAWS.s3upload project_id, doc_id, filepath, callback
mongoDumpDocHistory: (doc_id, callback = (error, filepath) ->) ->
unArchiveDocHistory: (project_id, doc_id, callback = (error) ->) ->
MongoAWS.s3download project_id, doc_id, (error, filepath) ->
if error == null
MongoAWS.mongoImportDocHistory filepath, callback
else
callback
mongoExportDocHistory: (doc_id, callback = (error, filepath) ->) ->
uriData = mongoUri.parse(settings.mongo.url);
filepath = settings.path.dumpFolder + '/' + doc_id + '.json'
filepath = settings.path.dumpFolder + '/' + doc_id + '.jsonUp'
args = []
args.push '-h'
@ -41,6 +47,33 @@ module.exports = MongoAWS =
else
return callback(new Error("mongodump failed: #{stderr}"),null)
mongoImportDocHistory: (filepath, callback = (error) ->) ->
uriData = mongoUri.parse(settings.mongo.url);
args = []
args.push '-h'
args.push uriData.hosts[0]
args.push '-d'
args.push uriData.database
args.push '-c'
args.push 'docHistory'
args.push '--file'
args.push filepath
proc = child_process.spawn "mongoimport", args
proc.on "error", callback
stderr = ""
proc.stderr.on "data", (chunk) -> stderr += chunk.toString()
proc.on "close", (code) ->
if code == 0
return callback(null,filepath)
else
return callback(new Error("mongodump failed: #{stderr}"),null)
s3upload: (project_id, doc_id, filepath, callback = (error) ->) ->
AWS.config.update {
@ -67,3 +100,25 @@ module.exports = MongoAWS =
#Pipe the incoming filestream and up to S3.
read.pipe(upload);
s3download: (project_id, doc_id, callback = (error, filepath) ->) ->
filepath = settings.path.dumpFolder + '/' + doc_id + '.jsonDown'
AWS.config.update {
accessKeyId: settings.filestore.s3.key
secretAccessKey: settings.filestore.s3.secret
}
params = {
"Bucket": settings.filestore.stores.user_files,
"Key": project_id+"/changes-"+doc_id
}
s3 = new AWS.S3()
s3.getObject params, (err, data) ->
if !err && data.ContentLength > 0
fs.writeFile filepath, data.Body, (err) ->
return callback(null,filepath)
else
return callback(new Error("s3download failed: #{err}"),null)

View file

@ -132,3 +132,6 @@ module.exports = MongoManager =
getDocChanges: (doc_id, callback)->
db.docHistory.find doc_id: ObjectId(doc_id.toString()), {}, callback
getDocChangesCount: (doc_id, callback)->
db.docHistory.count doc_id: ObjectId(doc_id.toString()), {}, callback

View file

@ -23,13 +23,13 @@ module.exports =
port: 6379
pass: ""
#filestore:
# backend: "s3"
# stores:
# user_files: ""
# s3:
# key: ""
# secret: ""
filestore:
backend: "s3"
stores:
user_files: ""
s3:
key: ""
secret: ""
path:
dumpFolder: Path.join(TMP_DIR, "dumpFolder")
dumpFolder: Path.join(TMP_DIR, "dumpFolder")