mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
updated MongoAWS tests
This commit is contained in:
parent
1419d20b1f
commit
4d58c14573
2 changed files with 32 additions and 77 deletions
|
@ -39,6 +39,7 @@
|
||||||
"bunyan": "~0.22.1",
|
"bunyan": "~0.22.1",
|
||||||
"grunt-bunyan": "~0.5.0",
|
"grunt-bunyan": "~0.5.0",
|
||||||
"grunt-forever": "~0.4.2",
|
"grunt-forever": "~0.4.2",
|
||||||
"timekeeper": "0.0.4"
|
"timekeeper": "0.0.4",
|
||||||
|
"memorystream": "0.3.1"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,6 +4,8 @@ sinon = require("sinon")
|
||||||
modulePath = "../../../../app/js/MongoAWS.js"
|
modulePath = "../../../../app/js/MongoAWS.js"
|
||||||
SandboxedModule = require('sandboxed-module')
|
SandboxedModule = require('sandboxed-module')
|
||||||
{ObjectId} = require("mongojs")
|
{ObjectId} = require("mongojs")
|
||||||
|
MemoryStream = require('memorystream')
|
||||||
|
zlib = require "zlib"
|
||||||
|
|
||||||
describe "MongoAWS", ->
|
describe "MongoAWS", ->
|
||||||
beforeEach ->
|
beforeEach ->
|
||||||
|
@ -20,95 +22,47 @@ describe "MongoAWS", ->
|
||||||
"logger-sharelatex": @logger = {log: sinon.stub(), error: sinon.stub(), err:->}
|
"logger-sharelatex": @logger = {log: sinon.stub(), error: sinon.stub(), err:->}
|
||||||
"aws-sdk": @awssdk = {}
|
"aws-sdk": @awssdk = {}
|
||||||
"fs": @fs = {}
|
"fs": @fs = {}
|
||||||
"s3-streams": @s3streams = {}
|
"s3-streams": @S3S = {}
|
||||||
"./mongojs" : { db: @db = {}, ObjectId: ObjectId }
|
"./mongojs" : { db: @db = {}, ObjectId: ObjectId }
|
||||||
"JSONStream": @JSONStream = {}
|
"JSONStream": @JSONStream = {}
|
||||||
"readline-stream": @readline = sinon.stub()
|
"readline-stream": @readline = sinon.stub()
|
||||||
|
|
||||||
@project_id = ObjectId().toString()
|
@project_id = ObjectId().toString()
|
||||||
@doc_id = ObjectId().toString()
|
@doc_id = ObjectId().toString()
|
||||||
|
@pack_id = ObjectId()
|
||||||
@update = { v:123 }
|
@update = { v:123 }
|
||||||
@callback = sinon.stub()
|
@callback = sinon.stub()
|
||||||
|
|
||||||
# describe "archiveDocHistory", ->
|
|
||||||
|
|
||||||
# beforeEach ->
|
describe "archivePack", ->
|
||||||
# @awssdk.config = { update: sinon.stub() }
|
|
||||||
# @awssdk.S3 = sinon.stub()
|
|
||||||
# @s3streams.WriteStream = sinon.stub()
|
|
||||||
# @db.docHistory = {}
|
|
||||||
# @db.docHistory.on = sinon.stub()
|
|
||||||
# @db.docHistory.find = sinon.stub().returns @db.docHistory
|
|
||||||
# @db.docHistory.on.returns
|
|
||||||
# pipe:->
|
|
||||||
# pipe:->
|
|
||||||
# on: (type, cb)->
|
|
||||||
# on: (type, cb)->
|
|
||||||
# cb()
|
|
||||||
# @JSONStream.stringify = sinon.stub()
|
|
||||||
|
|
||||||
# @MongoAWS.archiveDocHistory @project_id, @doc_id, @update, @callback
|
beforeEach (done) ->
|
||||||
|
@awssdk.config = { update: sinon.stub() }
|
||||||
|
@awssdk.S3 = sinon.stub()
|
||||||
|
@S3S.WriteStream = MemoryStream.createWriteStream
|
||||||
|
@db.docHistory = {}
|
||||||
|
@db.docHistory.findOne = sinon.stub().callsArgWith(1, null, {"pack":"hello"})
|
||||||
|
|
||||||
# it "should call the callback", ->
|
@MongoAWS.archivePack @project_id, @doc_id, @pack_id, (err, result) =>
|
||||||
# @callback.called.should.equal true
|
@callback()
|
||||||
|
done()
|
||||||
|
|
||||||
# describe "unArchiveDocHistory", ->
|
it "should call the callback", ->
|
||||||
|
@callback.called.should.equal true
|
||||||
|
|
||||||
# beforeEach ->
|
describe "unArchivePack", ->
|
||||||
# @awssdk.config = { update: sinon.stub() }
|
|
||||||
# @awssdk.S3 = sinon.stub()
|
|
||||||
# @s3streams.ReadStream = sinon.stub()
|
|
||||||
|
|
||||||
# @s3streams.ReadStream.returns
|
beforeEach (done) ->
|
||||||
# #describe on 'open' behavior
|
zlib.gzip '{"pack":"123"}', (err, zbuf) =>
|
||||||
# on: (type, cb)->
|
@awssdk.config = { update: sinon.stub() }
|
||||||
# #describe on 'error' behavior
|
@awssdk.S3 = sinon.stub()
|
||||||
# on: (type, cb)->
|
@S3S.ReadStream = () ->
|
||||||
# pipe:->
|
MemoryStream.createReadStream(zbuf, {readable:true})
|
||||||
# #describe on 'data' behavior
|
@db.docHistory = {}
|
||||||
# on: (type, cb)->
|
@db.docHistory.insert = sinon.stub().callsArgWith(1, null, "pack")
|
||||||
# cb([])
|
|
||||||
# #describe on 'end' behavior
|
|
||||||
# on: (type, cb)->
|
|
||||||
# cb()
|
|
||||||
# #describe on 'error' behavior
|
|
||||||
# on: sinon.stub()
|
|
||||||
|
|
||||||
# @MongoAWS.handleBulk = sinon.stub()
|
@MongoAWS.unArchivePack @project_id, @doc_id, @pack_id, (err, result) =>
|
||||||
# @MongoAWS.unArchiveDocHistory @project_id, @doc_id, @callback
|
@callback()
|
||||||
|
done()
|
||||||
# it "should call handleBulk", ->
|
|
||||||
# @MongoAWS.handleBulk.called.should.equal true
|
|
||||||
|
|
||||||
# describe "handleBulk", ->
|
|
||||||
# beforeEach ->
|
|
||||||
# @bulkOps = [{
|
|
||||||
# _id: ObjectId()
|
|
||||||
# doc_id: ObjectId()
|
|
||||||
# project_id: ObjectId()
|
|
||||||
# }, {
|
|
||||||
# _id: ObjectId()
|
|
||||||
# doc_id: ObjectId()
|
|
||||||
# project_id: ObjectId()
|
|
||||||
# }, {
|
|
||||||
# _id: ObjectId()
|
|
||||||
# doc_id: ObjectId()
|
|
||||||
# project_id: ObjectId()
|
|
||||||
# }]
|
|
||||||
# @bulk =
|
|
||||||
# find: sinon.stub().returns
|
|
||||||
# upsert: sinon.stub().returns
|
|
||||||
# updateOne: sinon.stub()
|
|
||||||
# execute: sinon.stub().callsArgWith(0, null, {})
|
|
||||||
# @db.docHistory = {}
|
|
||||||
# @db.docHistory.initializeUnorderedBulkOp = sinon.stub().returns @bulk
|
|
||||||
# @MongoAWS.handleBulk @bulkOps, @bulkOps.length, @callback
|
|
||||||
|
|
||||||
# it "should call updateOne for each operation", ->
|
|
||||||
# @bulk.find.calledWith({_id:@bulkOps[0]._id}).should.equal true
|
|
||||||
# @bulk.find.calledWith({_id:@bulkOps[1]._id}).should.equal true
|
|
||||||
# @bulk.find.calledWith({_id:@bulkOps[2]._id}).should.equal true
|
|
||||||
|
|
||||||
# it "should call the callback", ->
|
|
||||||
# @callback.calledWith(null).should.equal true
|
|
||||||
|
|
||||||
|
it "should call db.docHistory.insert", ->
|
||||||
|
@db.docHistory.insert.called.should.equal true
|
||||||
|
|
Loading…
Reference in a new issue