more acceptance tests

This commit is contained in:
Henrique Dias 2015-06-02 19:24:45 -03:00
parent 0cd2120430
commit a26320013d
4 changed files with 82 additions and 27 deletions

View file

@ -23,7 +23,7 @@ module.exports = DocArchive =
archiveDoc: (project_id, doc, callback)->
logger.log project_id: project_id, doc_id: doc._id, "sending doc to s3"
options = buildS3Options(doc.lines, project_id+"/"+doc._id)
options = DocArchive.buildS3Options(doc.lines, project_id+"/"+doc._id)
request.put options, (err, res)->
if err? || res.statusCode != 200
logger.err err:err, res:res, "something went wrong archiving doc in aws"
@ -48,7 +48,7 @@ module.exports = DocArchive =
unarchiveDoc: (project_id, doc_id, callback)->
logger.log project_id: project_id, doc_id: doc_id, "getting doc from s3"
options = buildS3Options(true, project_id+"/"+doc_id)
options = DocArchive.buildS3Options(true, project_id+"/"+doc_id)
request.get options, (err, res, lines)->
if err? || res.statusCode != 200
logger.err err:err, res:res, "something went wrong unarchiving doc from aws"
@ -57,15 +57,15 @@ module.exports = DocArchive =
return callback(error) if error?
callback()
buildS3Options = (content, key)->
return {
aws:
key: settings.filestore.s3.key
secret: settings.filestore.s3.secret
bucket: settings.filestore.stores.user_files
timeout: thirtySeconds
json: content
#headers:
# 'content-md5': crypto.createHash("md5").update(content).digest("hex")
uri:"https://#{settings.filestore.stores.user_files}.s3.amazonaws.com/#{key}"
}
buildS3Options: (content, key)->
return {
aws:
key: settings.filestore.s3.key
secret: settings.filestore.s3.secret
bucket: settings.filestore.stores.user_files
timeout: thirtySeconds
json: content
#headers:
# 'content-md5': crypto.createHash("md5").update(content).digest("hex")
uri:"https://#{settings.filestore.stores.user_files}.s3.amazonaws.com/#{key}"
}

View file

@ -1,7 +1,7 @@
sinon = require "sinon"
chai = require("chai")
chai.should()
{ObjectId} = require "mongojs"
should = chai.should()
{db, ObjectId} = require "../../../app/js/mongojs"
async = require "async"
Settings = require("settings-sharelatex")
@ -11,6 +11,7 @@ if Settings.filestore?.backend == "s3"
describe "Archiving all docs", ->
beforeEach (done) ->
@callback = sinon.stub()
@project_id = ObjectId()
@docs = [{
_id: ObjectId()
@ -33,17 +34,49 @@ if Settings.filestore?.backend == "s3"
DocstoreClient.updateDoc @project_id, doc._id, doc.lines, callback
async.series jobs, done
it "should archive all the docs", (done) ->
DocstoreClient.archiveAllDoc @project_id, (error, res) =>
res.statusCode.should.equal 204
done()
afterEach (done) ->
db.docs.remove({project_id: @project_id}, done)
it "should unarchive all the docs", (done) ->
DocstoreClient.archiveAllDoc @project_id, (error, res) =>
DocstoreClient.getAllDocs @project_id, (error, res, docs) =>
throw error if error?
docs.length.should.equal @docs.length
for doc, i in docs
doc.lines.should.deep.equal @docs[i].lines
describe "Archiving all docs", ->
beforeEach (done) ->
DocstoreClient.archiveAllDoc @project_id, (error, @res) =>
done()
it "should archive all the docs", (done) ->
@res.statusCode.should.equal 204
done()
it "should set inS3 and unset lines in each doc", (done) ->
jobs = for archiveDoc in @docs
do (archiveDoc) =>
(callback) =>
db.docs.findOne _id: archiveDoc._id, (error, doc) =>
should.not.exist doc.lines
doc.inS3.should.equal true
callback()
async.series jobs, done
it "should be able get the same docs back", (done) ->
jobs = for archiveDoc in @docs
do (archiveDoc) =>
(callback) =>
DocstoreClient.getS3Doc @project_id, archiveDoc._id, (error, res, doc) =>
doc.toString().should.equal archiveDoc.lines.toString()
callback()
async.series jobs, done
describe "Unarchiving all docs", ->
it "should unarchive all the docs", (done) ->
DocstoreClient.archiveAllDoc @project_id, (error, res) =>
DocstoreClient.getAllDocs @project_id, (error, res, docs) =>
throw error if error?
docs.length.should.equal @docs.length
for doc, i in docs
doc.lines.should.deep.equal @docs[i].lines
done()

View file

@ -1,6 +1,7 @@
request = require("request").defaults(jar: false)
{db, ObjectId} = require("../../../../app/js/mongojs")
settings = require("settings-sharelatex")
DocArchiveManager = require("../../../../app/js/DocArchiveManager.js")
module.exports = DocstoreClient =
@ -45,3 +46,6 @@ module.exports = DocstoreClient =
url: "http://localhost:#{settings.internal.docstore.port}/project/#{project_id}/archive"
}, callback
getS3Doc: (project_id, doc_id, callback = (error, res, body) ->) ->
options = DocArchiveManager.buildS3Options(true, project_id+"/"+doc_id)
request.get options, callback

View file

@ -11,6 +11,7 @@ describe "HttpController", ->
beforeEach ->
@HttpController = SandboxedModule.require modulePath, requires:
"./DocManager": @DocManager = {}
"./DocArchiveManager": @DocArchiveManager = {}
"logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() }
@res = { send: sinon.stub(), json: sinon.stub(), setHeader:sinon.stub() }
@req = { query:{}}
@ -247,3 +248,20 @@ describe "HttpController", ->
@res.send
.calledWith(204)
.should.equal true
describe "archiveAllDocs", ->
beforeEach ->
@req.params =
project_id: @project_id
@DocArchiveManager.archiveAllDocs = sinon.stub().callsArg(1)
@HttpController.archiveAllDocs @req, @res, @next
it "should archive the project", ->
@DocArchiveManager.archiveAllDocs
.calledWith(@project_id)
.should.equal true
it "should return a 204 (No Content)", ->
@res.send
.calledWith(204)
.should.equal true