2015-06-01 17:24:40 -04:00
|
|
|
sinon = require "sinon"
|
|
|
|
chai = require("chai")
|
2015-06-02 18:24:45 -04:00
|
|
|
should = chai.should()
|
|
|
|
{db, ObjectId} = require "../../../app/js/mongojs"
|
2015-06-01 17:24:40 -04:00
|
|
|
async = require "async"
|
2015-06-02 15:29:32 -04:00
|
|
|
Settings = require("settings-sharelatex")
|
2015-06-01 17:24:40 -04:00
|
|
|
|
|
|
|
DocstoreClient = require "./helpers/DocstoreClient"
|
|
|
|
|
2015-06-02 15:29:32 -04:00
|
|
|
if Settings.filestore?.backend == "s3"
|
2015-06-01 17:24:40 -04:00
|
|
|
|
2015-06-02 15:29:32 -04:00
|
|
|
describe "Archiving all docs", ->
|
|
|
|
beforeEach (done) ->
|
2015-06-02 18:24:45 -04:00
|
|
|
@callback = sinon.stub()
|
2015-06-02 15:29:32 -04:00
|
|
|
@project_id = ObjectId()
|
|
|
|
@docs = [{
|
|
|
|
_id: ObjectId()
|
|
|
|
lines: ["one", "two", "three"]
|
|
|
|
rev: 2
|
|
|
|
}, {
|
|
|
|
_id: ObjectId()
|
|
|
|
lines: ["aaa", "bbb", "ccc"]
|
|
|
|
rev: 4
|
|
|
|
}, {
|
|
|
|
_id: ObjectId()
|
|
|
|
lines: ["111", "222", "333"]
|
|
|
|
rev: 6
|
|
|
|
}]
|
|
|
|
jobs = for doc in @docs
|
|
|
|
do (doc) =>
|
2015-08-13 08:00:09 -04:00
|
|
|
(callback) =>
|
2015-06-02 15:29:32 -04:00
|
|
|
DocstoreClient.createDoc @project_id, doc._id, doc.lines, (err)=>
|
|
|
|
doc.lines[0] = doc.lines[0]+" added"
|
|
|
|
DocstoreClient.updateDoc @project_id, doc._id, doc.lines, callback
|
|
|
|
async.series jobs, done
|
2015-06-01 18:36:26 -04:00
|
|
|
|
2015-06-02 18:24:45 -04:00
|
|
|
afterEach (done) ->
|
|
|
|
db.docs.remove({project_id: @project_id}, done)
|
|
|
|
|
2015-06-01 18:36:26 -04:00
|
|
|
|
2015-08-13 08:00:09 -04:00
|
|
|
describe "Archiving all docs", ->
|
|
|
|
beforeEach (done) ->
|
2015-06-02 15:29:32 -04:00
|
|
|
|
2015-08-13 08:00:09 -04:00
|
|
|
DocstoreClient.archiveAllDoc @project_id, (error, @res) =>
|
2015-06-02 19:08:50 -04:00
|
|
|
done()
|
|
|
|
|
2015-08-13 08:00:09 -04:00
|
|
|
it "should archive all the docs", (done) ->
|
|
|
|
@res.statusCode.should.equal 204
|
|
|
|
done()
|
|
|
|
|
|
|
|
it "should set inS3 and unset lines in each doc", (done) ->
|
|
|
|
|
|
|
|
jobs = for archiveDoc in @docs
|
|
|
|
do (archiveDoc) =>
|
|
|
|
(callback) =>
|
|
|
|
db.docs.findOne _id: archiveDoc._id, (error, doc) =>
|
|
|
|
should.not.exist doc.lines
|
|
|
|
doc.inS3.should.equal true
|
|
|
|
callback()
|
|
|
|
async.series jobs, done
|
|
|
|
|
|
|
|
it "should be able get the same docs back", (done) ->
|
|
|
|
|
|
|
|
jobs = for archiveDoc in @docs
|
|
|
|
do (archiveDoc) =>
|
|
|
|
(callback) =>
|
|
|
|
DocstoreClient.getS3Doc @project_id, archiveDoc._id, (error, res, doc) =>
|
|
|
|
doc.toString().should.equal archiveDoc.lines.toString()
|
|
|
|
callback()
|
|
|
|
async.series jobs, done
|
|
|
|
|
|
|
|
describe "Arching all docs twice", ->
|
|
|
|
beforeEach (done) ->
|
|
|
|
DocstoreClient.archiveAllDoc @project_id, (error, @res) =>
|
|
|
|
@res.statusCode.should.equal 204
|
|
|
|
DocstoreClient.archiveAllDoc @project_id, (error, @res) =>
|
|
|
|
@res.statusCode.should.equal 204
|
|
|
|
done()
|
2015-06-02 19:08:50 -04:00
|
|
|
|
2015-08-13 08:00:09 -04:00
|
|
|
it "should archive all the docs", (done) ->
|
|
|
|
@res.statusCode.should.equal 204
|
|
|
|
done()
|
|
|
|
|
|
|
|
it "should set inS3 and unset lines in each doc", (done) ->
|
|
|
|
|
|
|
|
jobs = for archiveDoc in @docs
|
|
|
|
do (archiveDoc) =>
|
|
|
|
(callback) =>
|
|
|
|
db.docs.findOne _id: archiveDoc._id, (error, doc) =>
|
|
|
|
should.not.exist doc.lines
|
|
|
|
doc.inS3.should.equal true
|
|
|
|
callback()
|
|
|
|
async.series jobs, done
|
|
|
|
|
|
|
|
it "should be able get the same docs back", (done) ->
|
|
|
|
|
|
|
|
jobs = for archiveDoc in @docs
|
|
|
|
do (archiveDoc) =>
|
|
|
|
(callback) =>
|
|
|
|
DocstoreClient.getS3Doc @project_id, archiveDoc._id, (error, res, doc) =>
|
|
|
|
doc.toString().should.equal archiveDoc.lines.toString()
|
|
|
|
callback()
|
|
|
|
async.series jobs, done
|
|
|
|
|
2015-08-13 08:59:40 -04:00
|
|
|
|
|
|
|
|
|
|
|
describe "archiving massive document", (done)->
|
|
|
|
beforeEach (done)->
|
|
|
|
@timeout 1000 * 30
|
|
|
|
quarterMegInBytes = 250000
|
|
|
|
lines = require("crypto").randomBytes(quarterMegInBytes).toString("hex")
|
|
|
|
console.log @project_id, @docs[1]._id, "helllllo"
|
|
|
|
@docs[1].lines = [lines,lines,lines,lines]
|
|
|
|
DocstoreClient.updateDoc @project_id, @docs[1]._id, @docs[1].lines, =>
|
|
|
|
DocstoreClient.archiveAllDoc @project_id, (error, @res) =>
|
|
|
|
done()
|
|
|
|
|
|
|
|
|
|
|
|
it "should archive all the docs", (done) ->
|
|
|
|
@res.statusCode.should.equal 204
|
|
|
|
done()
|
|
|
|
|
|
|
|
it "should set inS3 and unset lines in each doc", (done) ->
|
|
|
|
jobs = for archiveDoc in @docs
|
|
|
|
do (archiveDoc) =>
|
|
|
|
(callback) =>
|
|
|
|
db.docs.findOne _id: archiveDoc._id, (error, doc) =>
|
|
|
|
should.not.exist doc.lines
|
|
|
|
doc.inS3.should.equal true
|
|
|
|
callback()
|
|
|
|
async.series jobs, done
|
|
|
|
|
|
|
|
it "should be able get the same docs back", (done) ->
|
|
|
|
|
|
|
|
jobs = for archiveDoc in @docs
|
|
|
|
do (archiveDoc) =>
|
|
|
|
(callback) =>
|
|
|
|
DocstoreClient.getS3Doc @project_id, archiveDoc._id, (error, res, doc) =>
|
|
|
|
doc.toString().should.equal archiveDoc.lines.toString()
|
|
|
|
callback()
|
|
|
|
async.series jobs, done
|
|
|
|
|
2015-08-13 08:00:09 -04:00
|
|
|
describe "Unarchiving all docs", ->
|
|
|
|
|
|
|
|
it "should unarchive all the docs", (done) ->
|
|
|
|
DocstoreClient.archiveAllDoc @project_id, (error, res) =>
|
|
|
|
DocstoreClient.getAllDocs @project_id, (error, res, docs) =>
|
|
|
|
throw error if error?
|
|
|
|
docs.length.should.equal @docs.length
|
|
|
|
for doc, i in docs
|
|
|
|
doc.lines.should.deep.equal @docs[i].lines
|
2015-06-02 19:08:50 -04:00
|
|
|
done()
|