mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
acceptance tests - work in progress
This commit is contained in:
parent
e2e8292590
commit
f01bf99682
5 changed files with 83 additions and 49 deletions
|
@ -48,6 +48,9 @@ app.post "/project/:project_id/flush", HttpController.flushProject
|
|||
|
||||
app.post "/project/:project_id/doc/:doc_id/version/:version/restore", HttpController.restore
|
||||
|
||||
app.post '/project/:project_id/doc/:doc_id/push', HttpController.pushDocHistory
|
||||
app.post '/project/:project_id/doc/:doc_id/pull', HttpController.pullDocHistory
|
||||
|
||||
packWorker = null # use a single packing worker
|
||||
|
||||
app.post "/pack", (req, res, next) ->
|
||||
|
|
|
@ -77,6 +77,22 @@ module.exports = HttpController =
|
|||
return next(error) if error?
|
||||
res.send 204
|
||||
|
||||
pushDocHistory: (req, res, next = (error) ->) ->
|
||||
project_id = req.params.project_id
|
||||
doc_id = req.params.doc_id
|
||||
logger.log {project_id, doc_id}, "pushing all finalised changes to s3"
|
||||
PackManager.pushOldPacks project_id, doc_id, (error) ->
|
||||
return next(error) if error?
|
||||
res.send 204
|
||||
|
||||
pullDocHistory: (req, res, next = (error) ->) ->
|
||||
project_id = req.params.project_id
|
||||
doc_id = req.params.doc_id
|
||||
logger.log {project_id, doc_id}, "pulling all packs from s3"
|
||||
PackManager.pullOldPacks project_id, doc_id, (error) ->
|
||||
return next(error) if error?
|
||||
res.send 204
|
||||
|
||||
healthCheck: (req, res)->
|
||||
HealthChecker.check (err)->
|
||||
if err?
|
||||
|
|
|
@ -349,6 +349,17 @@ module.exports = PackManager =
|
|||
else
|
||||
logger.err {pack, result, jsondiff: JSON.stringify(pack) is JSON.stringify(result)}, "difference when comparing packs"
|
||||
callback new Error("pack retrieved from s3 does not match pack in mongo")
|
||||
# Extra methods to test archive/unarchive for a doc_id
|
||||
|
||||
pushOldPacks: (project_id, doc_id, callback) ->
|
||||
PackManager.findCompletedPacks project_id, doc_id, (err, packs) ->
|
||||
return callback(err) if err?
|
||||
return callback() if not packs?.length
|
||||
PackManager.processOldPack project_id, doc_id, packs[0]._id, callback
|
||||
|
||||
pullOldPacks: (project_id, doc_id, callback) ->
|
||||
PackManager.loadPacksByVersionRange project_id, doc_id, null, null, callback
|
||||
|
||||
|
||||
# Processing old packs via worker
|
||||
|
||||
|
|
|
@ -42,15 +42,15 @@ describe "Archiving updates", ->
|
|||
sinon.spy MockDocStoreApi, "getAllDoc"
|
||||
|
||||
@updates = []
|
||||
for i in [0..9]
|
||||
for i in [0..1024+9]
|
||||
@updates.push {
|
||||
op: [{ i: "a", p: 0 }]
|
||||
meta: { ts: @now - (9 - i) * @hours - 2 * @minutes, user_id: @user_id }
|
||||
meta: { ts: @now - i * @hours, user_id: @user_id }
|
||||
v: 2 * i + 1
|
||||
}
|
||||
@updates.push {
|
||||
op: [{ i: "b", p: 0 }]
|
||||
meta: { ts: @now - (9 - i) * @hours, user_id: @user_id }
|
||||
meta: { ts: @now - i * @hours + 10*@minutes, user_id: @user_id }
|
||||
v: 2 * i + 2
|
||||
}
|
||||
|
||||
|
@ -67,49 +67,53 @@ describe "Archiving updates", ->
|
|||
|
||||
describe "archiving a doc's updates", ->
|
||||
before (done) ->
|
||||
TrackChangesClient.archiveProject @project_id, (error) ->
|
||||
TrackChangesClient.pushDocHistory @project_id, @doc_id, (error) ->
|
||||
throw error if error?
|
||||
done()
|
||||
|
||||
it "should remain zero doc change", (done) ->
|
||||
db.docHistory.count { doc_id: ObjectId(@doc_id) }, (error, count) ->
|
||||
it "should have one cached pack", (done) ->
|
||||
db.docHistory.count { doc_id: ObjectId(@doc_id), expiresAt:{$exists:true}}, (error, count) ->
|
||||
throw error if error?
|
||||
count.should.equal 0
|
||||
count.should.equal 1
|
||||
done()
|
||||
|
||||
it "should have docHistoryStats marked as inS3", (done) ->
|
||||
db.docHistoryStats.findOne { doc_id: ObjectId(@doc_id) }, (error, doc) ->
|
||||
it "should have one remaining pack after cache is expired", (done) ->
|
||||
db.docHistory.remove {
|
||||
doc_id: ObjectId(@doc_id),
|
||||
expiresAt:{$exists:true}
|
||||
}, (err, result) =>
|
||||
throw error if error?
|
||||
doc.inS3.should.equal true
|
||||
done()
|
||||
|
||||
it "should have docHistoryStats with the last version", (done) ->
|
||||
db.docHistoryStats.findOne { doc_id: ObjectId(@doc_id) }, (error, doc) ->
|
||||
throw error if error?
|
||||
doc.lastVersion.should.equal 20
|
||||
done()
|
||||
|
||||
it "should store twenty doc changes in S3 in one pack", (done) ->
|
||||
TrackChangesClient.getS3Doc @project_id, @doc_id, (error, res, doc) =>
|
||||
doc.length.should.equal 1
|
||||
doc[0].pack.length.should.equal 20
|
||||
done()
|
||||
|
||||
describe "unarchiving a doc's updates", ->
|
||||
before (done) ->
|
||||
TrackChangesClient.unarchiveProject @project_id, (error) ->
|
||||
throw error if error?
|
||||
done()
|
||||
|
||||
it "should restore doc changes", (done) ->
|
||||
db.docHistory.count { doc_id: ObjectId(@doc_id)}, (error, count) ->
|
||||
throw error if error?
|
||||
count.should.equal 1
|
||||
done()
|
||||
|
||||
it "should remove doc marked as inS3", (done) ->
|
||||
db.docHistoryStats.findOne {doc_id: ObjectId(@doc_id)}, (error, doc) ->
|
||||
it "should have a docHistoryIndex entry marked as inS3", (done) ->
|
||||
db.docHistoryIndex.findOne { _id: ObjectId(@doc_id) }, (error, index) ->
|
||||
throw error if error?
|
||||
doc.should.not.contain.key('inS3')
|
||||
doc.should.not.contain.key('lastVersion')
|
||||
index.packs[0].inS3.should.equal true
|
||||
done()
|
||||
|
||||
it "should have a docHistoryIndex entry with the last version", (done) ->
|
||||
db.docHistoryIndex.findOne { _id: ObjectId(@doc_id) }, (error, index) ->
|
||||
throw error if error?
|
||||
index.packs[0].v_end.should.equal 100
|
||||
done()
|
||||
|
||||
# it "should store twenty doc changes in S3 in one pack", (done) ->
|
||||
# TrackChangesClient.getS3Doc @project_id, @doc_id, (error, res, doc) =>
|
||||
# doc.length.should.equal 1
|
||||
# doc[0].pack.length.should.equal 20
|
||||
# done()
|
||||
|
||||
describe "unarchiving a doc's updates", ->
|
||||
before (done) ->
|
||||
TrackChangesClient.pullDocHistory @project_id, @doc_id, (error) ->
|
||||
throw error if error?
|
||||
done()
|
||||
|
||||
it "should restore both packs", (done) ->
|
||||
db.docHistory.count { doc_id: ObjectId(@doc_id) }, (error, count) ->
|
||||
throw error if error?
|
||||
count.should.equal 2
|
||||
done()
|
||||
|
|
|
@ -74,16 +74,16 @@ module.exports = TrackChangesClient =
|
|||
response.statusCode.should.equal 204
|
||||
callback null
|
||||
|
||||
archiveProject: (project_id, callback = (error) ->) ->
|
||||
pushDocHistory: (project_id, doc_id, callback = (error) ->) ->
|
||||
request.post {
|
||||
url: "http://localhost:3015/project/#{project_id}/archive"
|
||||
url: "http://localhost:3015/project/#{project_id}/doc/#{doc_id}/push"
|
||||
}, (error, response, body) =>
|
||||
response.statusCode.should.equal 204
|
||||
callback(error)
|
||||
|
||||
unarchiveProject: (project_id, callback = (error) ->) ->
|
||||
pullDocHistory: (project_id, doc_id, callback = (error) ->) ->
|
||||
request.post {
|
||||
url: "http://localhost:3015/project/#{project_id}/unarchive"
|
||||
url: "http://localhost:3015/project/#{project_id}/doc/#{doc_id}/pull"
|
||||
}, (error, response, body) =>
|
||||
response.statusCode.should.equal 204
|
||||
callback(error)
|
||||
|
@ -91,12 +91,12 @@ module.exports = TrackChangesClient =
|
|||
buildS3Options: (content, key)->
|
||||
return {
|
||||
aws:
|
||||
key: Settings.filestore.s3.key
|
||||
secret: Settings.filestore.s3.secret
|
||||
bucket: Settings.filestore.stores.user_files
|
||||
key: Settings.trackchanges.s3.key
|
||||
secret: Settings.trackchanges.s3.secret
|
||||
bucket: Settings.trackchanges.stores.doc_history
|
||||
timeout: 30 * 1000
|
||||
json: content
|
||||
uri:"https://#{Settings.filestore.stores.user_files}.s3.amazonaws.com/#{key}"
|
||||
uri:"https://#{Settings.trackchanges.stores.doc_history}.s3.amazonaws.com/#{key}"
|
||||
}
|
||||
|
||||
getS3Doc: (project_id, doc_id, callback = (error, res, body) ->) ->
|
||||
|
|
Loading…
Reference in a new issue