refactoring

s3 filter in router
This commit is contained in:
Henrique Dias 2015-06-02 15:55:22 -03:00
parent 86f16caeef
commit ddd26798f1
5 changed files with 89 additions and 68 deletions

View file

@ -20,7 +20,9 @@ app.get '/project/:project_id/doc/:doc_id', HttpController.getDoc
app.get '/project/:project_id/doc/:doc_id/raw', HttpController.getRawDoc
app.post '/project/:project_id/doc/:doc_id', bodyParser.json(limit: "2mb"), HttpController.updateDoc
app.del '/project/:project_id/doc/:doc_id', HttpController.deleteDoc
app.get '/project/:project_id/archive', HttpController.archiveAllDocs
if Settings.filestore.backend == "s3"
app.get '/project/:project_id/archive', HttpController.archiveAllDocs
app.get '/status', (req, res)->
res.send('docstore is alive')

View file

@ -0,0 +1,71 @@
MongoManager = require "./MongoManager"
Errors = require "./Errors"
logger = require "logger-sharelatex"
_ = require "underscore"
async = require "async"
settings = require("settings-sharelatex")
request = require("request")
crypto = require("crypto")
thirtySeconds = 30 * 1000
module.exports = DocArchive =
archiveAllDocs: (project_id, callback = (error, docs) ->) ->
MongoManager.getProjectsDocs project_id, (error, docs) ->
if err?
return callback(error)
else if !docs?
return callback new Errors.NotFoundError("No docs for project #{project_id}")
jobs = _.map docs, (doc) ->
(cb)-> DocArchive.archiveDoc project_id, doc, cb
async.series jobs, callback
archiveDoc: (project_id, doc, callback)->
logger.log project_id: project_id, doc_id: doc._id, "sending doc to s3"
options = buildS3Options(doc.lines, project_id+"/"+doc._id)
request.put options, (err, res)->
if err? || res.statusCode != 200
logger.err err:err, res:res, "something went wrong archiving doc in aws"
callback(err)
MongoManager.markDocAsArchived doc._id, doc.rev, (error) ->
return callback(error) if error?
callback()
unArchiveAllDocs: (project_id, callback = (error) ->) ->
MongoManager.getArchivedProjectDocs project_id, (error, docs) ->
if err?
return callback(error)
else if !docs?
return callback new Errors.NotFoundError("No docs for project #{project_id}")
jobs = _.map docs, (doc) ->
(cb)->
if !doc.inS3?
return cb()
else
DocArchive.unarchiveDoc project_id, doc._id, cb
async.series jobs, callback
unarchiveDoc: (project_id, doc_id, callback)->
logger.log project_id: project_id, doc_id: doc_id, "getting doc from s3"
options = buildS3Options(true, project_id+"/"+doc_id)
request.get options, (err, res, lines)->
if err? || res.statusCode != 200
logger.err err:err, res:res, "something went wrong unarchiving doc from aws"
callback(err)
MongoManager.upsertIntoDocCollection project_id, doc_id.toString(), lines, (error) ->
return callback(error) if error?
callback()
buildS3Options = (content, key)->
return {
aws:
key: settings.filestore.s3.key
secret: settings.filestore.s3.secret
bucket: settings.filestore.stores.user_files
timeout: thirtySeconds
json: content
#headers:
# 'content-md5': crypto.createHash("md5").update(content).digest("hex")
uri:"https://#{settings.filestore.stores.user_files}.s3.amazonaws.com/#{key}"
}

View file

@ -7,6 +7,7 @@ settings = require("settings-sharelatex")
request = require("request")
crypto = require("crypto")
thirtySeconds = 30 * 1000
DocArchive = require "./DocArchive"
module.exports = DocManager =
@ -17,7 +18,7 @@ module.exports = DocManager =
else if !doc?
return callback new Errors.NotFoundError("No such doc: #{doc_id} in project #{project_id}")
else if doc?.inS3
DocManager.unarchiveDoc project_id, doc_id, (err)->
DocArchive.unarchiveDoc project_id, doc_id, (err)->
if err?
return callback(err)
MongoManager.findDoc doc_id, callback
@ -25,7 +26,7 @@ module.exports = DocManager =
callback err, doc
getAllDocs: (project_id, callback = (error, docs) ->) ->
DocManager.unArchiveAllDocs project_id, (error) ->
DocArchive.unArchiveAllDocs project_id, (error) ->
MongoManager.getProjectsDocs project_id, (error, docs) ->
if err?
return callback(error)
@ -68,65 +69,3 @@ module.exports = DocManager =
MongoManager.markDocAsDeleted doc_id, (error) ->
return callback(error) if error?
callback()
#DOC ARCHIVER
archiveAllDocs: (project_id, callback = (error, docs) ->) ->
MongoManager.getProjectsDocs project_id, (error, docs) ->
if err?
return callback(error)
else if !docs?
return callback new Errors.NotFoundError("No docs for project #{project_id}")
jobs = _.map docs, (doc) ->
(cb)-> DocManager.archiveDoc project_id, doc, cb
async.series jobs, callback
archiveDoc: (project_id, doc, callback)->
logger.log project_id: project_id, doc_id: doc._id, "sending doc to s3"
options = buildS3Options(doc.lines, project_id+"/"+doc._id)
request.put options, (err, res)->
if err? || res.statusCode != 200
logger.err err:err, res:res, "something went wrong archiving doc in aws"
callback(err)
MongoManager.markDocAsArchived doc._id, doc.rev, (error) ->
return callback(error) if error?
callback()
unArchiveAllDocs: (project_id, callback = (error) ->) ->
MongoManager.getProjectsDocs project_id, (error, docs) ->
if err?
return callback(error)
else if !docs?
return callback new Errors.NotFoundError("No docs for project #{project_id}")
jobs = _.map docs, (doc) ->
(cb)->
if !doc.inS3?
return cb()
else
DocManager.unarchiveDoc project_id, doc._id, cb
async.series jobs, callback
unarchiveDoc: (project_id, doc_id, callback)->
logger.log project_id: project_id, doc_id: doc_id, "getting doc from s3"
options = buildS3Options(true, project_id+"/"+doc_id)
request.get options, (err, res, lines)->
if err? || res.statusCode != 200
logger.err err:err, res:res, "something went wrong unarchiving doc from aws"
callback(err)
MongoManager.upsertIntoDocCollection project_id, doc_id.toString(), lines, (error) ->
return callback(error) if error?
callback()
buildS3Options = (content, key)->
return {
aws:
key: settings.filestore.s3.key
secret: settings.filestore.s3.secret
bucket: settings.filestore.stores.user_files
timeout: thirtySeconds
json: content
#headers:
# 'content-md5': crypto.createHash("md5").update(content).digest("hex")
uri:"https://#{settings.filestore.stores.user_files}.s3.amazonaws.com/#{key}"
}

View file

@ -1,5 +1,6 @@
DocManager = require "./DocManager"
logger = require "logger-sharelatex"
DocArchive = require "./DocArchive"
module.exports = HttpController =
getDoc: (req, res, next = (error) ->) ->
@ -82,6 +83,6 @@ module.exports = HttpController =
archiveAllDocs: (req, res, next = (error) ->) ->
project_id = req.params.project_id
logger.log project_id: project_id, "archiving all docs"
DocManager.archiveAllDocs project_id, (error) ->
DocArchive.archiveAllDocs project_id, (error) ->
return next(error) if error?
res.send 204

View file

@ -9,6 +9,12 @@ module.exports = MongoManager =
getProjectsDocs: (project_id, callback)->
db.docs.find project_id: ObjectId(project_id.toString()), {}, callback
getArchivedProjectDocs: (project_id, callback)->
query =
project_id: ObjectId(project_id.toString())
inS3: true
db.docs.find query, {}, callback
upsertIntoDocCollection: (project_id, doc_id, lines, callback)->
update =
$set:{}
@ -34,6 +40,8 @@ module.exports = MongoManager =
$unset: {}
update.$set["inS3"] = true
update.$unset["lines"] = true
# to ensure that the lines have not changed during the archive process we search via the rev
db.docs.update _id: doc_id, update, (err)->
query =
_id: doc_id
rev: rev
db.docs.update query, update, (err)->
callback(err)