2015-06-02 16:12:11 -04:00
|
|
|
assert = require("chai").assert
|
|
|
|
sinon = require('sinon')
|
|
|
|
chai = require('chai')
|
|
|
|
should = chai.should()
|
|
|
|
expect = chai.expect
|
|
|
|
modulePath = "../../../app/js/DocArchiveManager.js"
|
|
|
|
SandboxedModule = require('sandboxed-module')
|
|
|
|
ObjectId = require("mongojs").ObjectId
|
|
|
|
Errors = require "../../../app/js/Errors"
|
2015-06-03 11:00:08 -04:00
|
|
|
crypto = require("crypto")
|
2015-06-02 16:12:11 -04:00
|
|
|
|
|
|
|
describe "DocArchiveManager", ->
|
|
|
|
|
|
|
|
beforeEach ->
|
|
|
|
|
|
|
|
@settings =
|
2015-08-13 08:00:09 -04:00
|
|
|
docstore:
|
2015-06-02 16:12:11 -04:00
|
|
|
s3:
|
|
|
|
secret: "secret"
|
|
|
|
key: "this_key"
|
2015-08-13 08:00:09 -04:00
|
|
|
bucket:"doc-archive-unit-test"
|
2015-06-02 16:12:11 -04:00
|
|
|
|
|
|
|
@request =
|
2015-06-02 17:13:16 -04:00
|
|
|
put: {}
|
|
|
|
get: {}
|
2015-06-03 19:05:15 -04:00
|
|
|
del: {}
|
2015-06-02 16:12:11 -04:00
|
|
|
|
2015-08-13 08:00:09 -04:00
|
|
|
@archivedDocs = [{
|
|
|
|
_id: ObjectId()
|
|
|
|
inS3:true
|
|
|
|
rev: 2
|
|
|
|
}, {
|
|
|
|
_id: ObjectId()
|
|
|
|
inS3:true
|
|
|
|
rev: 4
|
|
|
|
}, {
|
|
|
|
_id: ObjectId()
|
|
|
|
inS3:true
|
|
|
|
rev: 6
|
|
|
|
}]
|
|
|
|
|
|
|
|
@mongoDocs = [{
|
2015-06-02 16:12:11 -04:00
|
|
|
_id: ObjectId()
|
|
|
|
lines: ["one", "two", "three"]
|
|
|
|
rev: 2
|
|
|
|
}, {
|
|
|
|
_id: ObjectId()
|
|
|
|
lines: ["aaa", "bbb", "ccc"]
|
|
|
|
rev: 4
|
2015-08-13 08:00:09 -04:00
|
|
|
}, {
|
|
|
|
_id: ObjectId()
|
|
|
|
inS3: true
|
|
|
|
rev: 6
|
|
|
|
}, {
|
|
|
|
_id: ObjectId()
|
2015-06-02 17:13:16 -04:00
|
|
|
inS3: true
|
2015-08-13 08:00:09 -04:00
|
|
|
rev: 6
|
2015-06-02 16:12:11 -04:00
|
|
|
}, {
|
|
|
|
_id: ObjectId()
|
|
|
|
lines: ["111", "222", "333"]
|
|
|
|
rev: 6
|
|
|
|
}]
|
|
|
|
|
2019-07-02 07:45:54 -04:00
|
|
|
@unarchivedDocs = [{
|
|
|
|
_id: ObjectId()
|
|
|
|
lines: ["wombat", "potato", "banana"]
|
|
|
|
rev: 2
|
|
|
|
}, {
|
|
|
|
_id: ObjectId()
|
|
|
|
lines: ["llama", "turnip", "apple"]
|
|
|
|
rev: 4
|
|
|
|
}, {
|
|
|
|
_id: ObjectId()
|
|
|
|
lines: ["elephant", "swede", "nectarine"]
|
|
|
|
rev: 6
|
|
|
|
}]
|
|
|
|
|
|
|
|
@mixedDocs = @archivedDocs.concat(@unarchivedDocs)
|
|
|
|
|
2015-06-02 17:13:16 -04:00
|
|
|
@MongoManager =
|
|
|
|
markDocAsArchived: sinon.stub().callsArgWith(2, null)
|
|
|
|
upsertIntoDocCollection: sinon.stub().callsArgWith(3, null)
|
2016-12-05 12:27:31 -05:00
|
|
|
getProjectsDocs: sinon.stub().callsArgWith(3, null, @mongoDocs)
|
|
|
|
getArchivedProjectDocs: sinon.stub().callsArgWith(2, null, @mongoDocs)
|
2015-06-02 17:13:16 -04:00
|
|
|
|
|
|
|
@requires =
|
|
|
|
"settings-sharelatex": @settings
|
|
|
|
"./MongoManager": @MongoManager
|
|
|
|
"request": @request
|
2017-03-30 12:13:43 -04:00
|
|
|
"./RangeManager": @RangeManager = {}
|
2015-06-02 17:13:16 -04:00
|
|
|
"logger-sharelatex":
|
|
|
|
log:->
|
|
|
|
err:->
|
2017-04-03 11:04:54 -04:00
|
|
|
@globals =
|
|
|
|
JSON: JSON
|
2015-06-02 17:13:16 -04:00
|
|
|
|
|
|
|
@error = "my errror"
|
2015-06-02 16:12:11 -04:00
|
|
|
@project_id = ObjectId().toString()
|
2015-06-03 11:00:08 -04:00
|
|
|
@stubbedError = new Errors.NotFoundError("Error in S3 request")
|
2017-04-03 11:04:54 -04:00
|
|
|
@DocArchiveManager = SandboxedModule.require modulePath, requires: @requires, globals: @globals
|
2015-06-02 16:12:11 -04:00
|
|
|
|
|
|
|
describe "archiveDoc", ->
|
|
|
|
|
|
|
|
it "should use correct options", (done)->
|
2015-06-03 11:00:08 -04:00
|
|
|
@request.put = sinon.stub().callsArgWith(1, null, {statusCode:200,headers:{etag:""}})
|
2015-08-13 08:00:09 -04:00
|
|
|
@DocArchiveManager.archiveDoc @project_id, @mongoDocs[0], (err)=>
|
2015-06-02 16:12:11 -04:00
|
|
|
opts = @request.put.args[0][0]
|
2015-08-13 08:00:09 -04:00
|
|
|
assert.deepEqual(opts.aws, {key:@settings.docstore.s3.key, secret:@settings.docstore.s3.secret, bucket:@settings.docstore.s3.bucket})
|
2017-03-30 12:13:43 -04:00
|
|
|
opts.body.should.equal JSON.stringify(
|
|
|
|
lines: @mongoDocs[0].lines
|
|
|
|
ranges: @mongoDocs[0].ranges
|
|
|
|
schema_v: 1
|
|
|
|
)
|
2015-06-02 16:12:11 -04:00
|
|
|
opts.timeout.should.equal (30*1000)
|
2015-08-13 08:00:09 -04:00
|
|
|
opts.uri.should.equal "https://#{@settings.docstore.s3.bucket}.s3.amazonaws.com/#{@project_id}/#{@mongoDocs[0]._id}"
|
2015-06-02 17:13:16 -04:00
|
|
|
done()
|
|
|
|
|
2015-06-03 11:00:08 -04:00
|
|
|
it "should return no md5 error", (done)->
|
2017-03-30 12:13:43 -04:00
|
|
|
data = JSON.stringify(
|
|
|
|
lines: @mongoDocs[0].lines
|
|
|
|
ranges: @mongoDocs[0].ranges
|
|
|
|
schema_v: 1
|
|
|
|
)
|
|
|
|
@md5 = crypto.createHash("md5").update(data).digest("hex")
|
2015-06-03 11:00:08 -04:00
|
|
|
@request.put = sinon.stub().callsArgWith(1, null, {statusCode:200,headers:{etag:@md5}})
|
2015-08-13 08:00:09 -04:00
|
|
|
@DocArchiveManager.archiveDoc @project_id, @mongoDocs[0], (err)=>
|
2015-06-03 11:00:08 -04:00
|
|
|
should.not.exist err
|
|
|
|
done()
|
|
|
|
|
2015-06-02 17:13:16 -04:00
|
|
|
it "should return the error", (done)->
|
2015-06-03 11:00:08 -04:00
|
|
|
@request.put = sinon.stub().callsArgWith(1, @stubbedError, {statusCode:400,headers:{etag:""}})
|
2015-08-13 08:00:09 -04:00
|
|
|
@DocArchiveManager.archiveDoc @project_id, @mongoDocs[0], (err)=>
|
2015-06-03 11:00:08 -04:00
|
|
|
should.exist err
|
2015-06-02 17:13:16 -04:00
|
|
|
done()
|
|
|
|
|
|
|
|
describe "unarchiveDoc", ->
|
|
|
|
|
|
|
|
it "should use correct options", (done)->
|
2015-08-13 08:00:09 -04:00
|
|
|
@request.get = sinon.stub().callsArgWith(1, null, statusCode:200, @mongoDocs[0].lines)
|
2015-06-03 19:05:15 -04:00
|
|
|
@request.del = sinon.stub().callsArgWith(1, null, statusCode:204, {})
|
2015-08-13 08:00:09 -04:00
|
|
|
@DocArchiveManager.unarchiveDoc @project_id, @mongoDocs[0]._id, (err)=>
|
2015-06-02 17:13:16 -04:00
|
|
|
opts = @request.get.args[0][0]
|
2015-08-13 08:00:09 -04:00
|
|
|
assert.deepEqual(opts.aws, {key:@settings.docstore.s3.key, secret:@settings.docstore.s3.secret, bucket:@settings.docstore.s3.bucket})
|
2015-06-02 17:13:16 -04:00
|
|
|
opts.json.should.equal true
|
|
|
|
opts.timeout.should.equal (30*1000)
|
2015-08-13 08:00:09 -04:00
|
|
|
opts.uri.should.equal "https://#{@settings.docstore.s3.bucket}.s3.amazonaws.com/#{@project_id}/#{@mongoDocs[0]._id}"
|
2015-06-02 17:13:16 -04:00
|
|
|
done()
|
|
|
|
|
|
|
|
it "should return the error", (done)->
|
2015-06-03 11:00:08 -04:00
|
|
|
@request.get = sinon.stub().callsArgWith(1, @stubbedError, {}, {})
|
2015-08-13 08:00:09 -04:00
|
|
|
@DocArchiveManager.unarchiveDoc @project_id, @mongoDocs[0], (err)=>
|
2015-06-03 11:00:08 -04:00
|
|
|
should.exist err
|
2015-06-02 17:13:16 -04:00
|
|
|
done()
|
|
|
|
|
2017-03-09 12:34:33 -05:00
|
|
|
it "should error if the doc lines are a string not an array", (done)->
|
|
|
|
@request.get = sinon.stub().callsArgWith(1, null, statusCode:200, "this is a string")
|
|
|
|
@request.del = sinon.stub()
|
|
|
|
@DocArchiveManager.unarchiveDoc @project_id, @mongoDocs[0], (err)=>
|
|
|
|
should.exist err
|
|
|
|
@request.del.called.should.equal false
|
|
|
|
done()
|
|
|
|
|
2015-06-02 17:13:16 -04:00
|
|
|
describe "archiveAllDocs", ->
|
|
|
|
|
2015-08-13 08:00:09 -04:00
|
|
|
it "should archive all project docs which are not in s3", (done)->
|
2016-12-05 12:27:31 -05:00
|
|
|
@MongoManager.getProjectsDocs = sinon.stub().callsArgWith(3, null, @mongoDocs)
|
2015-06-02 17:13:16 -04:00
|
|
|
@DocArchiveManager.archiveDoc = sinon.stub().callsArgWith(2, null)
|
|
|
|
|
|
|
|
@DocArchiveManager.archiveAllDocs @project_id, (err)=>
|
2015-08-13 08:00:09 -04:00
|
|
|
@DocArchiveManager.archiveDoc.calledWith(@project_id, @mongoDocs[0]).should.equal true
|
|
|
|
@DocArchiveManager.archiveDoc.calledWith(@project_id, @mongoDocs[1]).should.equal true
|
|
|
|
@DocArchiveManager.archiveDoc.calledWith(@project_id, @mongoDocs[4]).should.equal true
|
|
|
|
|
|
|
|
@DocArchiveManager.archiveDoc.calledWith(@project_id, @mongoDocs[2]).should.equal false
|
|
|
|
@DocArchiveManager.archiveDoc.calledWith(@project_id, @mongoDocs[3]).should.equal false
|
|
|
|
|
2015-06-02 17:13:16 -04:00
|
|
|
should.not.exist err
|
|
|
|
done()
|
|
|
|
|
|
|
|
it "should return error if have no docs", (done)->
|
2016-12-05 12:27:31 -05:00
|
|
|
@MongoManager.getProjectsDocs = sinon.stub().callsArgWith(3, null, null)
|
2015-06-02 17:13:16 -04:00
|
|
|
|
|
|
|
@DocArchiveManager.archiveAllDocs @project_id, (err)=>
|
|
|
|
should.exist err
|
|
|
|
done()
|
|
|
|
|
|
|
|
it "should return the error", (done)->
|
2016-12-05 12:27:31 -05:00
|
|
|
@MongoManager.getProjectsDocs = sinon.stub().callsArgWith(3, @error, null)
|
2015-06-02 17:13:16 -04:00
|
|
|
|
|
|
|
@DocArchiveManager.archiveAllDocs @project_id, (err)=>
|
|
|
|
err.should.equal @error
|
|
|
|
done()
|
|
|
|
|
2015-09-07 09:06:20 -04:00
|
|
|
describe "when most have been already put in s3", ->
|
|
|
|
|
|
|
|
beforeEach ->
|
|
|
|
numberOfDocs = 10 * 1000
|
|
|
|
@mongoDocs = []
|
|
|
|
while --numberOfDocs != 0
|
|
|
|
@mongoDocs.push({inS3:true, _id: ObjectId()})
|
|
|
|
|
2016-12-05 12:27:31 -05:00
|
|
|
@MongoManager.getProjectsDocs = sinon.stub().callsArgWith(3, null, @mongoDocs)
|
2015-09-07 09:06:20 -04:00
|
|
|
@DocArchiveManager.archiveDoc = sinon.stub().callsArgWith(2, null)
|
|
|
|
|
|
|
|
it "should not throw and error", (done)->
|
|
|
|
@DocArchiveManager.archiveAllDocs @project_id, (err)=>
|
2015-09-07 09:07:37 -04:00
|
|
|
should.not.exist err
|
2015-09-07 09:06:20 -04:00
|
|
|
done()
|
|
|
|
|
|
|
|
|
2015-06-02 17:13:16 -04:00
|
|
|
describe "unArchiveAllDocs", ->
|
|
|
|
|
|
|
|
it "should unarchive all inS3 docs", (done)->
|
2015-08-13 08:00:09 -04:00
|
|
|
@MongoManager.getArchivedProjectDocs = sinon.stub().callsArgWith(1, null, @archivedDocs)
|
2015-06-02 17:13:16 -04:00
|
|
|
@DocArchiveManager.unarchiveDoc = sinon.stub().callsArgWith(2, null)
|
|
|
|
@DocArchiveManager.unArchiveAllDocs @project_id, (err)=>
|
2015-08-13 08:00:09 -04:00
|
|
|
for doc in @archivedDocs
|
2015-06-02 17:13:16 -04:00
|
|
|
@DocArchiveManager.unarchiveDoc.calledWith(@project_id, doc._id).should.equal true
|
|
|
|
should.not.exist err
|
|
|
|
done()
|
|
|
|
|
|
|
|
it "should return error if have no docs", (done)->
|
|
|
|
@MongoManager.getArchivedProjectDocs = sinon.stub().callsArgWith(1, null, null)
|
|
|
|
@DocArchiveManager.unArchiveAllDocs @project_id, (err)=>
|
|
|
|
should.exist err
|
|
|
|
done()
|
|
|
|
|
|
|
|
it "should return the error", (done)->
|
|
|
|
@MongoManager.getArchivedProjectDocs = sinon.stub().callsArgWith(1, @error, null)
|
|
|
|
@DocArchiveManager.unArchiveAllDocs @project_id, (err)=>
|
|
|
|
err.should.equal @error
|
|
|
|
done()
|
2019-07-02 07:45:54 -04:00
|
|
|
|
|
|
|
describe "destroyAllDocs", ->
|
|
|
|
beforeEach ->
|
|
|
|
@request.del = sinon.stub().callsArgWith(1, null, statusCode:204, {})
|
|
|
|
@MongoManager.getProjectsDocs = sinon.stub().callsArgWith(3, null, @mixedDocs)
|
|
|
|
@MongoManager.findDoc = sinon.stub().callsArgWith(3, null, null)
|
|
|
|
@MongoManager.destroyDoc = sinon.stub().yields()
|
|
|
|
for doc in @mixedDocs
|
|
|
|
@MongoManager.findDoc.withArgs(@project_id, doc._id).callsArgWith(3, null, doc)
|
|
|
|
|
|
|
|
it "should destroy all the docs", (done)->
|
|
|
|
@DocArchiveManager.destroyDoc = sinon.stub().callsArgWith(2, null)
|
|
|
|
@DocArchiveManager.destroyAllDocs @project_id, (err)=>
|
|
|
|
for doc in @mixedDocs
|
|
|
|
@DocArchiveManager.destroyDoc.calledWith(@project_id, doc._id).should.equal true
|
|
|
|
should.not.exist err
|
|
|
|
done()
|
|
|
|
|
|
|
|
it "should only the s3 docs from s3", (done)->
|
|
|
|
docOpts = (doc) =>
|
|
|
|
JSON.parse(JSON.stringify({
|
|
|
|
aws: {key:@settings.docstore.s3.key, secret:@settings.docstore.s3.secret, bucket:@settings.docstore.s3.bucket},
|
|
|
|
json: true,
|
|
|
|
timeout: 30 * 1000
|
|
|
|
uri:"https://#{@settings.docstore.s3.bucket}.s3.amazonaws.com/#{@project_id}/#{doc._id}"
|
|
|
|
}))
|
|
|
|
|
|
|
|
@DocArchiveManager.destroyAllDocs @project_id, (err)=>
|
|
|
|
expect(err).not.to.exist
|
|
|
|
|
|
|
|
for doc in @archivedDocs
|
|
|
|
sinon.assert.calledWith(@request.del, docOpts(doc))
|
|
|
|
for doc in @unarchivedDocs
|
|
|
|
expect(@request.del.calledWith(docOpts(doc))).to.equal false # no notCalledWith
|
|
|
|
|
|
|
|
done()
|
|
|
|
|
|
|
|
it "should remove the docs from mongo", (done)->
|
|
|
|
@DocArchiveManager.destroyAllDocs @project_id, (err)=>
|
|
|
|
expect(err).not.to.exist
|
|
|
|
|
|
|
|
for doc in @mixedDocs
|
|
|
|
sinon.assert.calledWith(@MongoManager.destroyDoc, doc._id)
|
|
|
|
|
|
|
|
done()
|
2017-04-03 10:50:33 -04:00
|
|
|
|
|
|
|
describe "_s3DocToMongoDoc", ->
|
|
|
|
describe "with the old schema", ->
|
|
|
|
it "should return the docs lines", (done) ->
|
|
|
|
@DocArchiveManager._s3DocToMongoDoc ["doc", "lines"], (error, doc) ->
|
|
|
|
expect(doc).to.deep.equal {
|
|
|
|
lines: ["doc", "lines"]
|
|
|
|
}
|
|
|
|
done()
|
|
|
|
|
|
|
|
describe "with the new schema", ->
|
|
|
|
it "should return the doc lines and ranges", (done) ->
|
|
|
|
@RangeManager.jsonRangesToMongo = sinon.stub().returns {"mongo": "ranges"}
|
|
|
|
@DocArchiveManager._s3DocToMongoDoc {
|
|
|
|
lines: ["doc", "lines"]
|
|
|
|
ranges: {"json": "ranges"}
|
|
|
|
schema_v: 1
|
|
|
|
}, (error, doc) ->
|
|
|
|
expect(doc).to.deep.equal {
|
|
|
|
lines: ["doc", "lines"]
|
|
|
|
ranges: {"mongo": "ranges"}
|
|
|
|
}
|
|
|
|
done()
|
|
|
|
|
|
|
|
it "should return just the doc lines when there are no ranges", (done) ->
|
|
|
|
@DocArchiveManager._s3DocToMongoDoc {
|
|
|
|
lines: ["doc", "lines"]
|
|
|
|
schema_v: 1
|
|
|
|
}, (error, doc) ->
|
|
|
|
expect(doc).to.deep.equal {
|
|
|
|
lines: ["doc", "lines"]
|
|
|
|
}
|
|
|
|
done()
|
|
|
|
|
|
|
|
describe "with an unrecognised schema", ->
|
|
|
|
it "should return an error", (done) ->
|
|
|
|
@DocArchiveManager._s3DocToMongoDoc {
|
|
|
|
schema_v: 2
|
|
|
|
}, (error, doc) ->
|
|
|
|
expect(error).to.exist
|
|
|
|
done()
|
2017-04-03 11:04:54 -04:00
|
|
|
|
|
|
|
describe "_mongoDocToS3Doc", ->
|
|
|
|
describe "with a valid doc", ->
|
|
|
|
it "should return the json version", (done) ->
|
|
|
|
@DocArchiveManager._mongoDocToS3Doc doc = {
|
|
|
|
lines: ["doc", "lines"]
|
|
|
|
ranges: { "mock": "ranges" }
|
|
|
|
}, (err, s3_doc) ->
|
|
|
|
expect(s3_doc).to.equal JSON.stringify({
|
|
|
|
lines: ["doc", "lines"]
|
|
|
|
ranges: { "mock": "ranges" }
|
|
|
|
schema_v: 1
|
|
|
|
})
|
|
|
|
done()
|
|
|
|
|
|
|
|
describe "with null bytes in the result", ->
|
|
|
|
beforeEach ->
|
|
|
|
@_stringify = JSON.stringify
|
|
|
|
JSON.stringify = sinon.stub().returns '{"bad": "\u0000"}'
|
|
|
|
|
|
|
|
afterEach ->
|
|
|
|
JSON.stringify = @_stringify
|
|
|
|
|
|
|
|
it "should return an error", (done) ->
|
|
|
|
@DocArchiveManager._mongoDocToS3Doc {
|
|
|
|
lines: ["doc", "lines"]
|
|
|
|
ranges: { "mock": "ranges" }
|
|
|
|
}, (err, s3_doc) ->
|
|
|
|
expect(err).to.exist
|
|
|
|
done()
|
|
|
|
|
|
|
|
describe "without doc lines", ->
|
|
|
|
it "should return an error", (done) ->
|
|
|
|
@DocArchiveManager._mongoDocToS3Doc {}, (err, s3_doc) ->
|
|
|
|
expect(err).to.exist
|
|
|
|
done()
|
|
|
|
|
2017-04-03 10:50:33 -04:00
|
|
|
|