using mongoexport for s3 archive

This commit is contained in:
Henrique Dias 2015-08-06 15:46:44 -03:00
parent 028fe2fa03
commit 438c4f4d0c
4 changed files with 83 additions and 28 deletions

View file

@ -1,4 +1,5 @@
MongoManager = require "./MongoManager"
MongoAWS = require "./MongoAWS"
logger = require "logger-sharelatex"
_ = require "underscore"
async = require "async"
@ -21,29 +22,6 @@ module.exports = DocArchiveManager =
archiveDocChanges: (project_id, doc_id, callback)->
MongoManager.getDocChanges doc_id, (error, docChanges) ->
logger.log project_id: project_id, doc_id: doc_id, "sending doc changes to s3"
options = DocArchiveManager.buildS3Options(docChanges, project_id+"/changes-"+doc_id)
request.put options, (err, res)->
md5lines = crypto.createHash("md5").update(JSON.stringify(docChanges)).digest("hex")
md5response = res.headers.etag.toString().replace(/\"/g, '')
if err? || res.statusCode != 200
logger.err err:err, res:res, "something went wrong archiving doc changes in aws"
return callback new Error("Error in S3 request")
if md5lines != md5response
logger.err responseMD5:md5response, linesMD5:md5lines, "error in response md5 from s3"
return callback new Error("Error in S3 md5 response")
#MongoManager.markDocAsArchived doc._id, doc.rev, (error) ->
# return callback(error) if error?
callback()
buildS3Options: (content, key)->
return {
aws:
key: settings.filestore.s3.key
secret: settings.filestore.s3.secret
bucket: settings.filestore.stores.user_files
timeout: thirtySeconds
json: content
uri:"https://#{settings.filestore.stores.user_files}.s3.amazonaws.com/#{key}"
}
MongoAWS.archiveDocHistory project_id, doc_id, (error) ->
logger.log doc_id:doc_id, error: error, "mongoexport"
callback()

View file

@ -0,0 +1,69 @@
settings = require "settings-sharelatex"
child_process = require "child_process"
mongoUri = require "mongo-uri";
logger = require "logger-sharelatex"
AWS = require 'aws-sdk'
fs = require 'fs'
module.exports = MongoAWS =
archiveDocHistory: (project_id, doc_id, callback = (error) ->) ->
MongoAWS.mongoDumpDocHistory doc_id, (error,filepath) ->
MongoAWS.s3upload project_id, doc_id, filepath, callback
mongoDumpDocHistory: (doc_id, callback = (error, filepath) ->) ->
uriData = mongoUri.parse(settings.mongo.url);
filepath = settings.path.dumpFolder + '/' + doc_id + '.json'
args = []
args.push '-h'
args.push uriData.hosts[0]
args.push '-d'
args.push uriData.database
args.push '-c'
args.push 'docHistory'
args.push '-q'
args.push "{doc_id: ObjectId('#{doc_id}') }"
args.push '-o'
args.push filepath
proc = child_process.spawn "mongoexport", args
proc.on "error", callback
stderr = ""
proc.stderr.on "data", (chunk) -> stderr += chunk.toString()
proc.on "close", (code) ->
if code == 0
return callback(null,filepath)
else
return callback(new Error("mongodump failed: #{stderr}"),null)
s3upload: (project_id, doc_id, filepath, callback = (error) ->) ->
AWS.config.update {
accessKeyId: settings.filestore.s3.key
secretAccessKey: settings.filestore.s3.secret
}
s3Stream = require('s3-upload-stream')(new AWS.S3());
upload = s3Stream.upload {
"Bucket": settings.filestore.stores.user_files,
"Key": project_id+"/changes-"+doc_id
}
read = fs.createReadStream filepath
#Handle errors.
upload.on 'error', callback
#Handle upload completion.
upload.on 'uploaded', (details) ->
return callback(null)
#Pipe the incoming filestream and up to S3.
read.pipe(upload);

View file

@ -1,3 +1,6 @@
Path = require('path')
TMP_DIR = Path.resolve(Path.join(__dirname, "../../", "tmp"))
module.exports =
mongo:
url: 'mongodb://127.0.0.1/sharelatex'
@ -23,7 +26,10 @@ module.exports =
#filestore:
# backend: "s3"
# stores:
# user_files: "sharelatex-dev"
# user_files: ""
# s3:
# key: ""
# secret: ""
path:
dumpFolder: Path.join(TMP_DIR, "dumpFolder")

View file

@ -18,7 +18,9 @@
"request": "~2.33.0",
"redis-sharelatex": "~0.0.4",
"redis": "~0.10.1",
"underscore": "~1.7.0"
"underscore": "~1.7.0",
"mongo-uri": "^0.1.2",
"s3-upload-stream": "^1.0.7"
},
"devDependencies": {
"chai": "~1.9.0",