Get acceptance tests running

This commit is contained in:
James Allen 2014-02-26 12:11:45 +00:00
parent 45fe6978af
commit d27872c9bd
9 changed files with 91 additions and 19 deletions

View file

@ -6,11 +6,13 @@ HttpController = require "./app/js/HttpController"
express = require "express" express = require "express"
app = express() app = express()
app.use express.logger()
app.post "/doc/:doc_id/flush", HttpController.flushUpdatesWithLock app.post "/doc/:doc_id/flush", HttpController.flushUpdatesWithLock
app.use (error, req, res, next) -> app.use (error, req, res, next) ->
logger.error err: error, "an internal error occured" logger.error err: error, "an internal error occured"
req.send 500 res.send 500
port = Settings.internal?.history?.port or 3014 port = Settings.internal?.history?.port or 3014
host = Settings.internal?.history?.host or "localhost" host = Settings.internal?.history?.host or "localhost"

View file

@ -12,6 +12,7 @@ module.exports = HistoryManager =
MongoManager.popLastCompressedUpdate doc_id, (error, lastCompressedUpdate) -> MongoManager.popLastCompressedUpdate doc_id, (error, lastCompressedUpdate) ->
return callback(error) if error? return callback(error) if error?
logger.log doc_id: doc_id, "popped last update"
# Ensure that raw updates start where lastCompressedUpdate left off # Ensure that raw updates start where lastCompressedUpdate left off
if lastCompressedUpdate? if lastCompressedUpdate?
@ -30,25 +31,31 @@ module.exports = HistoryManager =
REDIS_READ_BATCH_SIZE: 100 REDIS_READ_BATCH_SIZE: 100
processUncompressedUpdates: (doc_id, callback = (error) ->) -> processUncompressedUpdates: (doc_id, callback = (error) ->) ->
logger.log "processUncompressedUpdates"
RedisManager.getOldestRawUpdates doc_id, HistoryManager.REDIS_READ_BATCH_SIZE, (error, rawUpdates) -> RedisManager.getOldestRawUpdates doc_id, HistoryManager.REDIS_READ_BATCH_SIZE, (error, rawUpdates) ->
return callback(error) if error? return callback(error) if error?
length = rawUpdates.length length = rawUpdates.length
logger.log doc_id: doc_id, length: length, "got raw updates from redis"
HistoryManager.compressAndSaveRawUpdates doc_id, rawUpdates, (error) -> HistoryManager.compressAndSaveRawUpdates doc_id, rawUpdates, (error) ->
return callback(error) if error? return callback(error) if error?
logger.log doc_id: doc_id, "compressed and saved doc updates"
RedisManager.deleteOldestRawUpdates doc_id, HistoryManager.REDIS_READ_BATCH_SIZE, (error) -> RedisManager.deleteOldestRawUpdates doc_id, HistoryManager.REDIS_READ_BATCH_SIZE, (error) ->
return callback(error) if error? return callback(error) if error?
if length == HistoryManager.REDIS_READ_BATCH_SIZE if length == HistoryManager.REDIS_READ_BATCH_SIZE
# There might be more updates # There might be more updates
logger.log doc_id: doc_id, "continuing processing updates"
setTimeout () -> setTimeout () ->
HistoryManager.processUncompressedUpdates doc_id, callback HistoryManager.processUncompressedUpdates doc_id, callback
, 0 , 0
else else
logger.log doc_id: doc_id, "all raw updates processed"
callback() callback()
processUncompressedUpdatesWithLock: (doc_id, callback = (error) ->) -> processUncompressedUpdatesWithLock: (doc_id, callback = (error) ->) ->
LockManager.runWithLock( LockManager.runWithLock(
"HistoryLock:#{doc_id}", "HistoryLock:#{doc_id}",
HistoryManager.processUncompressedUpdates, (releaseLock) ->
HistoryManager.processUncompressedUpdates doc_id, releaseLock
callback callback
) )

View file

@ -7,4 +7,5 @@ module.exports = HttpController =
logger.log doc_id: doc_id, "compressing doc history" logger.log doc_id: doc_id, "compressing doc history"
HistoryManager.processUncompressedUpdatesWithLock doc_id, (error) -> HistoryManager.processUncompressedUpdatesWithLock doc_id, (error) ->
return next(error) if error? return next(error) if error?
logger.log "done http request"
res.send 204 res.send 204

View file

@ -26,6 +26,7 @@ module.exports = UpdateCompressor =
start_ts: update.meta.start_ts or update.meta.ts start_ts: update.meta.start_ts or update.meta.ts
end_ts: update.meta.end_ts or update.meta.ts end_ts: update.meta.end_ts or update.meta.ts
user_id: update.meta.user_id user_id: update.meta.user_id
v: update.v
return normalizedUpdates return normalizedUpdates
compressRawUpdates: (lastPreviousUpdate, rawUpdates) -> compressRawUpdates: (lastPreviousUpdate, rawUpdates) ->
@ -56,12 +57,14 @@ module.exports = UpdateCompressor =
user_id: firstUpdate.meta.user_id or null user_id: firstUpdate.meta.user_id or null
start_ts: firstUpdate.meta.start_ts or firstUpdate.meta.ts start_ts: firstUpdate.meta.start_ts or firstUpdate.meta.ts
end_ts: firstUpdate.meta.end_ts or firstUpdate.meta.ts end_ts: firstUpdate.meta.end_ts or firstUpdate.meta.ts
v: firstUpdate.v
secondUpdate = secondUpdate =
op: secondUpdate.op op: secondUpdate.op
meta: meta:
user_id: secondUpdate.meta.user_id or null user_id: secondUpdate.meta.user_id or null
start_ts: secondUpdate.meta.start_ts or secondUpdate.meta.ts start_ts: secondUpdate.meta.start_ts or secondUpdate.meta.ts
end_ts: secondUpdate.meta.end_ts or secondUpdate.meta.ts end_ts: secondUpdate.meta.end_ts or secondUpdate.meta.ts
v: secondUpdate.v
if firstUpdate.meta.user_id != secondUpdate.meta.user_id if firstUpdate.meta.user_id != secondUpdate.meta.user_id
return [firstUpdate, secondUpdate] return [firstUpdate, secondUpdate]
@ -81,6 +84,7 @@ module.exports = UpdateCompressor =
op: op:
p: firstOp.p p: firstOp.p
i: strInject(firstOp.i, secondOp.p - firstOp.p, secondOp.i) i: strInject(firstOp.i, secondOp.p - firstOp.p, secondOp.i)
v: secondUpdate.v
] ]
# Two deletes # Two deletes
else if firstOp.d? and secondOp.d? and secondOp.p <= firstOp.p <= (secondOp.p + secondOp.d.length) else if firstOp.d? and secondOp.d? and secondOp.p <= firstOp.p <= (secondOp.p + secondOp.d.length)
@ -92,6 +96,7 @@ module.exports = UpdateCompressor =
op: op:
p: secondOp.p p: secondOp.p
d: strInject(secondOp.d, firstOp.p - secondOp.p, firstOp.d) d: strInject(secondOp.d, firstOp.p - secondOp.p, firstOp.d)
v: secondUpdate.v
] ]
# An insert and then a delete # An insert and then a delete
else if firstOp.i? and secondOp.d? and firstOp.p <= secondOp.p <= (firstOp.p + firstOp.i.length) else if firstOp.i? and secondOp.d? and firstOp.p <= secondOp.p <= (firstOp.p + firstOp.i.length)
@ -99,7 +104,6 @@ module.exports = UpdateCompressor =
insertedText = firstOp.i.slice(offset, offset + secondOp.d.length) insertedText = firstOp.i.slice(offset, offset + secondOp.d.length)
if insertedText == secondOp.d if insertedText == secondOp.d
insert = strRemove(firstOp.i, offset, secondOp.d.length) insert = strRemove(firstOp.i, offset, secondOp.d.length)
return [] if insert == ""
return [ return [
meta: meta:
start_ts: firstUpdate.meta.start_ts start_ts: firstUpdate.meta.start_ts
@ -108,6 +112,7 @@ module.exports = UpdateCompressor =
op: op:
p: firstOp.p p: firstOp.p
i: insert i: insert
v: secondUpdate.v
] ]
else else
# This shouldn't be possible! # This shouldn't be possible!

View file

@ -1,6 +1,6 @@
Settings = require "settings-sharelatex" Settings = require "settings-sharelatex"
mongojs = require "mongojs" mongojs = require "mongojs"
db = mongojs.connect(Settings.mongo.url, ["docHistory", "docOps"]) db = mongojs.connect(Settings.mongo.url, ["docHistory"])
module.exports = module.exports =
db: db db: db
ObjectId: mongojs.ObjectId ObjectId: mongojs.ObjectId

View file

@ -10,6 +10,7 @@
"mongojs": "~0.9.11", "mongojs": "~0.9.11",
"settings": "git+ssh://git@bitbucket.org:sharelatex/settings-sharelatex.git#master", "settings": "git+ssh://git@bitbucket.org:sharelatex/settings-sharelatex.git#master",
"logger": "git+ssh://git@bitbucket.org:sharelatex/logger-sharelatex.git#bunyan", "logger": "git+ssh://git@bitbucket.org:sharelatex/logger-sharelatex.git#bunyan",
"request": "~2.33.0" "request": "~2.33.0",
"redis": "~0.10.1"
} }
} }

View file

@ -1,11 +1,13 @@
sinon = require "sinon" sinon = require "sinon"
chai = require("chai") chai = require("chai")
chai.should() chai.should()
expect = chai.expect
mongojs = require "../../../app/js/mongojs" mongojs = require "../../../app/js/mongojs"
db = mongojs.db db = mongojs.db
ObjectId = mongojs.ObjectId ObjectId = mongojs.ObjectId
Settings = require "settings-sharelatex" Settings = require "settings-sharelatex"
request = require "request" request = require "request"
rclient = require("redis").createClient() # Only works locally for now
describe "Appending doc ops to the history", -> describe "Appending doc ops to the history", ->
describe "when the history does not exist yet", -> describe "when the history does not exist yet", ->
@ -15,27 +17,41 @@ describe "Appending doc ops to the history", ->
updates = [{ updates = [{
op: [{ i: "f", p: 3 }] op: [{ i: "f", p: 3 }]
meta: { ts: Date.now(), user_id: @user_id } meta: { ts: Date.now(), user_id: @user_id }
v: 3
}, { }, {
op: [{ i: "o", p: 4 }] op: [{ i: "o", p: 4 }]
meta: { ts: Date.now(), user_id: @user_id } meta: { ts: Date.now(), user_id: @user_id }
v: 4
}, { }, {
op: [{ i: "o", p: 5 }] op: [{ i: "o", p: 5 }]
meta: { ts: Date.now(), user_id: @user_id } meta: { ts: Date.now(), user_id: @user_id }
v: 5
}] }]
@version = 3
rclient.rpush "UncompressedHistoryOps:#{@doc_id}", (JSON.stringify(u) for u in updates)...
request.post { request.post {
url: "http://localhost:#{Settings.port}/doc/#{@doc_id}/history" url: "http://localhost:#{Settings.port}/doc/#{@doc_id}/flush"
json:
version: @version
docOps: updates
}, (@error, @response, @body) => }, (@error, @response, @body) =>
db.docHistory
.find(doc_id: ObjectId(@doc_id))
.sort("meta.end_ts": -1)
.toArray (error, updates) =>
@update = updates[0]
done() done()
it "should return a successful response", -> it "should return a successful response", ->
@response.statusCode.should.equal 204 @response.statusCode.should.equal 204
it "should insert the compressed op into mongo", ->
expect(@update.op).to.deep.equal {
p: 3, i: "foo"
}
it "should insert the correct version number into mongo", ->
expect(@update.v).to.equal 5
###
describe "when the history has already been started", -> describe "when the history has already been started", ->
beforeEach (done) -> beforeEach (done) ->
@doc_id = ObjectId().toString() @doc_id = ObjectId().toString()
@ -112,4 +128,4 @@ describe "Appending doc ops to the history", ->
it "should return a successful response", -> it "should return a successful response", ->
@response.statusCode.should.equal 204 @response.statusCode.should.equal 204
###

View file

@ -176,15 +176,14 @@ describe "HistoryManager", ->
describe "processCompressedUpdatesWithLock", -> describe "processCompressedUpdatesWithLock", ->
beforeEach -> beforeEach ->
@HistoryManager.processUncompressedUpdates = sinon.stub() @HistoryManager.processUncompressedUpdates = sinon.stub().callsArg(2)
@LockManager.runWithLock = sinon.stub().callsArg(2) @LockManager.runWithLock = sinon.stub().callsArg(2)
@HistoryManager.processUncompressedUpdatesWithLock @doc_id, @callback @HistoryManager.processUncompressedUpdatesWithLock @doc_id, @callback
it "should run processUncompressedUpdates with the lock", -> it "should run processUncompressedUpdates with the lock", ->
@LockManager.runWithLock @LockManager.runWithLock
.calledWith( .calledWith(
"HistoryLock:#{@doc_id}", "HistoryLock:#{@doc_id}"
@HistoryManager.processUncompressedUpdates
) )
.should.equal true .should.equal true

View file

@ -18,19 +18,24 @@ describe "UpdateCompressor", ->
expect(@UpdateCompressor.convertRawUpdatesToCompressedFormat [{ expect(@UpdateCompressor.convertRawUpdatesToCompressedFormat [{
op: [ @op1 = { p: 0, i: "Foo" }, @op2 = { p: 6, i: "bar"} ] op: [ @op1 = { p: 0, i: "Foo" }, @op2 = { p: 6, i: "bar"} ]
meta: { ts: @ts1, user_id: @user_id } meta: { ts: @ts1, user_id: @user_id }
v: 42
}, { }, {
op: [ @op3 = { p: 10, i: "baz" } ] op: [ @op3 = { p: 10, i: "baz" } ]
meta: { ts: @ts2, user_id: @other_user_id } meta: { ts: @ts2, user_id: @other_user_id }
v: 43
}]) }])
.to.deep.equal [{ .to.deep.equal [{
op: @op1, op: @op1,
meta: { start_ts: @ts1, end_ts: @ts1, user_id: @user_id } meta: { start_ts: @ts1, end_ts: @ts1, user_id: @user_id },
v: 42
}, { }, {
op: @op2, op: @op2,
meta: { start_ts: @ts1, end_ts: @ts1, user_id: @user_id } meta: { start_ts: @ts1, end_ts: @ts1, user_id: @user_id },
v: 42
}, { }, {
op: @op3, op: @op3,
meta: { start_ts: @ts2, end_ts: @ts2, user_id: @other_user_id } meta: { start_ts: @ts2, end_ts: @ts2, user_id: @other_user_id },
v: 43
}] }]
describe "compress", -> describe "compress", ->
@ -39,42 +44,52 @@ describe "UpdateCompressor", ->
expect(@UpdateCompressor.compressUpdates [{ expect(@UpdateCompressor.compressUpdates [{
op: { p: 3, i: "foo" } op: { p: 3, i: "foo" }
meta: ts: @ts1, user_id: @user_id meta: ts: @ts1, user_id: @user_id
v: 42
}, { }, {
op: { p: 6, i: "bar" } op: { p: 6, i: "bar" }
meta: ts: @ts2, user_id: @user_id meta: ts: @ts2, user_id: @user_id
v: 43
}]) }])
.to.deep.equal [{ .to.deep.equal [{
op: { p: 3, i: "foobar" } op: { p: 3, i: "foobar" }
meta: start_ts: @ts1, end_ts: @ts2, user_id: @user_id meta: start_ts: @ts1, end_ts: @ts2, user_id: @user_id
v: 43
}] }]
it "should insert one insert inside the other", -> it "should insert one insert inside the other", ->
expect(@UpdateCompressor.compressUpdates [{ expect(@UpdateCompressor.compressUpdates [{
op: { p: 3, i: "foo" } op: { p: 3, i: "foo" }
meta: ts: @ts1, user_id: @user_id meta: ts: @ts1, user_id: @user_id
v: 42
}, { }, {
op: { p: 5, i: "bar" } op: { p: 5, i: "bar" }
meta: ts: @ts2, user_id: @user_id meta: ts: @ts2, user_id: @user_id
v: 43
}]) }])
.to.deep.equal [{ .to.deep.equal [{
op: { p: 3, i: "fobaro" } op: { p: 3, i: "fobaro" }
meta: start_ts: @ts1, end_ts: @ts2, user_id: @user_id meta: start_ts: @ts1, end_ts: @ts2, user_id: @user_id
v: 43
}] }]
it "should not append separated inserts", -> it "should not append separated inserts", ->
expect(@UpdateCompressor.compressUpdates [{ expect(@UpdateCompressor.compressUpdates [{
op: { p: 3, i: "foo" } op: { p: 3, i: "foo" }
meta: ts: @ts1, user_id: @user_id meta: ts: @ts1, user_id: @user_id
v: 42
}, { }, {
op: { p: 9, i: "bar" } op: { p: 9, i: "bar" }
meta: ts: @ts2, user_id: @user_id meta: ts: @ts2, user_id: @user_id
v: 43
}]) }])
.to.deep.equal [{ .to.deep.equal [{
op: { p: 3, i: "foo" } op: { p: 3, i: "foo" }
meta: start_ts: @ts1, end_ts: @ts1, user_id: @user_id meta: start_ts: @ts1, end_ts: @ts1, user_id: @user_id
v: 42
}, { }, {
op: { p: 9, i: "bar" } op: { p: 9, i: "bar" }
meta: start_ts: @ts2, end_ts: @ts2, user_id: @user_id meta: start_ts: @ts2, end_ts: @ts2, user_id: @user_id
v: 43
}] }]
describe "delete - delete", -> describe "delete - delete", ->
@ -82,42 +97,52 @@ describe "UpdateCompressor", ->
expect(@UpdateCompressor.compressUpdates [{ expect(@UpdateCompressor.compressUpdates [{
op: { p: 3, d: "foo" } op: { p: 3, d: "foo" }
meta: ts: @ts1, user_id: @user_id meta: ts: @ts1, user_id: @user_id
v: 42
}, { }, {
op: { p: 3, d: "bar" } op: { p: 3, d: "bar" }
meta: ts: @ts2, user_id: @user_id meta: ts: @ts2, user_id: @user_id
v: 43
}]) }])
.to.deep.equal [{ .to.deep.equal [{
op: { p: 3, d: "foobar" } op: { p: 3, d: "foobar" }
meta: start_ts: @ts1, end_ts: @ts2, user_id: @user_id meta: start_ts: @ts1, end_ts: @ts2, user_id: @user_id
v: 43
}] }]
it "should insert one delete inside the other", -> it "should insert one delete inside the other", ->
expect(@UpdateCompressor.compressUpdates [{ expect(@UpdateCompressor.compressUpdates [{
op: { p: 3, d: "foo" } op: { p: 3, d: "foo" }
meta: ts: @ts1, user_id: @user_id meta: ts: @ts1, user_id: @user_id
v: 42
}, { }, {
op: { p: 1, d: "bar" } op: { p: 1, d: "bar" }
meta: ts: @ts2, user_id: @user_id meta: ts: @ts2, user_id: @user_id
v: 43
}]) }])
.to.deep.equal [{ .to.deep.equal [{
op: { p: 1, d: "bafoor" } op: { p: 1, d: "bafoor" }
meta: start_ts: @ts1, end_ts: @ts2, user_id: @user_id meta: start_ts: @ts1, end_ts: @ts2, user_id: @user_id
v: 43
}] }]
it "should not append separated deletes", -> it "should not append separated deletes", ->
expect(@UpdateCompressor.compressUpdates [{ expect(@UpdateCompressor.compressUpdates [{
op: { p: 3, d: "foo" } op: { p: 3, d: "foo" }
meta: ts: @ts1, user_id: @user_id meta: ts: @ts1, user_id: @user_id
v: 42
}, { }, {
op: { p: 9, d: "bar" } op: { p: 9, d: "bar" }
meta: ts: @ts2, user_id: @user_id meta: ts: @ts2, user_id: @user_id
v: 43
}]) }])
.to.deep.equal [{ .to.deep.equal [{
op: { p: 3, d: "foo" } op: { p: 3, d: "foo" }
meta: start_ts: @ts1, end_ts: @ts1, user_id: @user_id meta: start_ts: @ts1, end_ts: @ts1, user_id: @user_id
v: 42
}, { }, {
op: { p: 9, d: "bar" } op: { p: 9, d: "bar" }
meta: start_ts: @ts2, end_ts: @ts2, user_id: @user_id meta: start_ts: @ts2, end_ts: @ts2, user_id: @user_id
v: 43
}] }]
describe "insert - delete", -> describe "insert - delete", ->
@ -125,52 +150,68 @@ describe "UpdateCompressor", ->
expect(@UpdateCompressor.compressUpdates [{ expect(@UpdateCompressor.compressUpdates [{
op: { p: 3, i: "foo" } op: { p: 3, i: "foo" }
meta: ts: @ts1, user_id: @user_id meta: ts: @ts1, user_id: @user_id
v: 42
}, { }, {
op: { p: 5, d: "o" } op: { p: 5, d: "o" }
meta: ts: @ts2, user_id: @user_id meta: ts: @ts2, user_id: @user_id
v: 43
}]) }])
.to.deep.equal [{ .to.deep.equal [{
op: { p: 3, i: "fo" } op: { p: 3, i: "fo" }
meta: start_ts: @ts1, end_ts: @ts2, user_id: @user_id meta: start_ts: @ts1, end_ts: @ts2, user_id: @user_id
v: 43
}] }]
it "should remove part of an insert from the middle", -> it "should remove part of an insert from the middle", ->
expect(@UpdateCompressor.compressUpdates [{ expect(@UpdateCompressor.compressUpdates [{
op: { p: 3, i: "fobaro" } op: { p: 3, i: "fobaro" }
meta: ts: @ts1, user_id: @user_id meta: ts: @ts1, user_id: @user_id
v: 42
}, { }, {
op: { p: 5, d: "bar" } op: { p: 5, d: "bar" }
meta: ts: @ts2, user_id: @user_id meta: ts: @ts2, user_id: @user_id
v: 43
}]) }])
.to.deep.equal [{ .to.deep.equal [{
op: { p: 3, i: "foo" } op: { p: 3, i: "foo" }
meta: start_ts: @ts1, end_ts: @ts2, user_id: @user_id meta: start_ts: @ts1, end_ts: @ts2, user_id: @user_id
v: 43
}] }]
it "should cancel out two opposite updates", -> it "should cancel out two opposite updates", ->
expect(@UpdateCompressor.compressUpdates [{ expect(@UpdateCompressor.compressUpdates [{
op: { p: 3, i: "foo" } op: { p: 3, i: "foo" }
meta: ts: @ts1, user_id: @user_id meta: ts: @ts1, user_id: @user_id
v: 42
}, { }, {
op: { p: 3, d: "foo" } op: { p: 3, d: "foo" }
meta: ts: @ts2, user_id: @user_id meta: ts: @ts2, user_id: @user_id
v: 43
}]) }])
.to.deep.equal [] .to.deep.equal [
op: { p: 3, i: "" }
meta: start_ts: @ts1, end_ts: @ts2, user_id: @user_id
v: 43
]
it "should not combine separated updates", -> it "should not combine separated updates", ->
expect(@UpdateCompressor.compressUpdates [{ expect(@UpdateCompressor.compressUpdates [{
op: { p: 3, i: "foo" } op: { p: 3, i: "foo" }
meta: ts: @ts1, user_id: @user_id meta: ts: @ts1, user_id: @user_id
v: 42
}, { }, {
op: { p: 9, d: "bar" } op: { p: 9, d: "bar" }
meta: ts: @ts2, user_id: @user_id meta: ts: @ts2, user_id: @user_id
v: 43
}]) }])
.to.deep.equal [{ .to.deep.equal [{
op: { p: 3, i: "foo" } op: { p: 3, i: "foo" }
meta: start_ts: @ts1, end_ts: @ts1, user_id: @user_id meta: start_ts: @ts1, end_ts: @ts1, user_id: @user_id
v: 42
}, { }, {
op: { p: 9, d: "bar" } op: { p: 9, d: "bar" }
meta: start_ts: @ts2, end_ts: @ts2, user_id: @user_id meta: start_ts: @ts2, end_ts: @ts2, user_id: @user_id
v: 43
}] }]