overleaf/services/docstore/app/coffee/DocArchiveManager.coffee

90 lines
3.5 KiB
CoffeeScript
Raw Normal View History

2015-06-02 14:55:22 -04:00
MongoManager = require "./MongoManager"
Errors = require "./Errors"
logger = require "logger-sharelatex"
_ = require "underscore"
async = require "async"
settings = require("settings-sharelatex")
request = require("request")
crypto = require("crypto")
thirtySeconds = 30 * 1000
module.exports = DocArchive =
archiveAllDocs: (project_id, callback = (err, docs) ->) ->
MongoManager.getProjectsDocs project_id, {include_deleted: true}, (err, docs) ->
if err?
return callback(err)
2015-06-02 14:55:22 -04:00
else if !docs?
return callback new Errors.NotFoundError("No docs for project #{project_id}")
docs = _.filter docs, (doc)-> doc.inS3 != true
2015-06-02 14:55:22 -04:00
jobs = _.map docs, (doc) ->
(cb)->
DocArchive.archiveDoc project_id, doc, cb
async.parallelLimit jobs, 5, callback
2015-06-02 14:55:22 -04:00
archiveDoc: (project_id, doc, callback)->
logger.log project_id: project_id, doc_id: doc._id, "sending doc to s3"
try
options = DocArchive.buildS3Options(doc.lines, project_id+"/"+doc._id)
catch e
return callback e
2015-06-02 14:55:22 -04:00
request.put options, (err, res)->
if err? || res.statusCode != 200
logger.err err:err, res:res, project_id:project_id, doc_id: doc._id, statusCode: res?.statusCode, "something went wrong archiving doc in aws"
return callback new Error("Error in S3 request")
md5lines = crypto.createHash("md5").update(JSON.stringify(doc.lines), "utf8").digest("hex")
2015-06-03 10:45:47 -04:00
md5response = res.headers.etag.toString().replace(/\"/g, '')
if md5lines != md5response
2015-08-13 08:59:12 -04:00
logger.err responseMD5:md5response, linesMD5:md5lines, project_id:project_id, doc_id: doc?._id, "err in response md5 from s3"
return callback new Error("Error in S3 md5 response")
MongoManager.markDocAsArchived doc._id, doc.rev, (err) ->
return callback(err) if err?
2015-06-02 14:55:22 -04:00
callback()
unArchiveAllDocs: (project_id, callback = (err) ->) ->
MongoManager.getArchivedProjectDocs project_id, (err, docs) ->
if err?
logger.err err:err, project_id:project_id, "error unarchiving all docs"
return callback(err)
2015-06-02 14:55:22 -04:00
else if !docs?
return callback new Errors.NotFoundError("No docs for project #{project_id}")
jobs = _.map docs, (doc) ->
(cb)->
if !doc.inS3?
return cb()
else
DocArchive.unarchiveDoc project_id, doc._id, cb
async.parallelLimit jobs, 5, callback
2015-06-02 14:55:22 -04:00
unarchiveDoc: (project_id, doc_id, callback)->
logger.log project_id: project_id, doc_id: doc_id, "getting doc from s3"
try
options = DocArchive.buildS3Options(true, project_id+"/"+doc_id)
catch e
return callback e
2015-06-02 14:55:22 -04:00
request.get options, (err, res, lines)->
if err? || res.statusCode != 200
2015-08-13 08:59:12 -04:00
logger.err err:err, res:res, project_id:project_id, doc_id:doc_id, "something went wrong unarchiving doc from aws"
2015-06-02 19:08:50 -04:00
return callback new Errors.NotFoundError("Error in S3 request")
MongoManager.upsertIntoDocCollection project_id, doc_id.toString(), {lines}, (err) ->
return callback(err) if err?
2015-06-03 19:05:15 -04:00
logger.log project_id: project_id, doc_id: doc_id, "deleting doc from s3"
request.del options, (err, res, body)->
if err? || res.statusCode != 204
2015-08-13 08:59:12 -04:00
logger.err err:err, res:res, project_id:project_id, doc_id:doc_id, "something went wrong deleting doc from aws"
2015-06-03 19:05:15 -04:00
return callback new Errors.NotFoundError("Error in S3 request")
callback()
2015-06-02 14:55:22 -04:00
2015-06-02 18:24:45 -04:00
buildS3Options: (content, key)->
if !settings.docstore.s3?
throw new Error("S3 settings are not configured")
2015-06-02 18:24:45 -04:00
return {
aws:
key: settings.docstore.s3.key
secret: settings.docstore.s3.secret
bucket: settings.docstore.s3.bucket
2015-06-02 18:24:45 -04:00
timeout: thirtySeconds
json: content
uri:"https://#{settings.docstore.s3.bucket}.s3.amazonaws.com/#{key}"
2015-06-02 18:24:45 -04:00
}