mirror of
https://github.com/overleaf/overleaf.git
synced 2025-03-30 23:43:53 +00:00
Fetch updates up to the limit, even after summarising
This commit is contained in:
parent
81811d7cc5
commit
d32b0ee12f
4 changed files with 241 additions and 104 deletions
|
@ -42,7 +42,6 @@ module.exports = HttpController =
|
|||
return next(error) if error?
|
||||
res.send JSON.stringify updates: updates
|
||||
|
||||
|
||||
restore: (req, res, next = (error) ->) ->
|
||||
{doc_id, project_id, version} = req.params
|
||||
user_id = req.headers["x-user-id"]
|
||||
|
|
|
@ -77,9 +77,38 @@ module.exports = UpdatesManager =
|
|||
callback null, updates
|
||||
|
||||
getSummarizedUpdates: (doc_id, options = {}, callback = (error, updates) ->) ->
|
||||
UpdatesManager.getUpdatesWithUserInfo doc_id, options, (error, updates) ->
|
||||
options.limit ||= 25
|
||||
summarizedUpdates = []
|
||||
to = options.to
|
||||
do fetchNextBatch = () ->
|
||||
UpdatesManager._extendBatchOfSummarizedUpdates doc_id, summarizedUpdates, to, options.limit, (error, updates, endOfDatabase) ->
|
||||
return callback(error) if error?
|
||||
if endOfDatabase or updates.length >= options.limit
|
||||
callback null, updates
|
||||
else
|
||||
to = updates[updates.length - 1].fromV - 1
|
||||
summarizedUpdates = updates
|
||||
fetchNextBatch()
|
||||
|
||||
_extendBatchOfSummarizedUpdates: (
|
||||
doc_id,
|
||||
existingSummarizedUpdates,
|
||||
to, desiredLength,
|
||||
callback = (error, summarizedUpdates, endOfDatabase) ->
|
||||
) ->
|
||||
UpdatesManager.getUpdatesWithUserInfo doc_id, { to: to, limit: 3 * desiredLength }, (error, updates) ->
|
||||
return callback(error) if error?
|
||||
callback null, UpdatesManager._summarizeUpdates(updates)
|
||||
if !updates? or updates.length == 0
|
||||
endOfDatabase = true
|
||||
else
|
||||
endOfDatabase = false
|
||||
summarizedUpdates = UpdatesManager._summarizeUpdates(
|
||||
updates, existingSummarizedUpdates
|
||||
)
|
||||
console.log "Summarized", summarizedUpdates
|
||||
callback null,
|
||||
summarizedUpdates.slice(0, desiredLength),
|
||||
endOfDatabase
|
||||
|
||||
fillUserInfo: (updates, callback = (error, updates) ->) ->
|
||||
users = {}
|
||||
|
@ -113,23 +142,26 @@ module.exports = UpdatesManager =
|
|||
|
||||
|
||||
TIME_BETWEEN_DISTINCT_UPDATES: fiveMinutes = 5 * 60 * 1000
|
||||
_summarizeUpdates: (updates) ->
|
||||
view = []
|
||||
for update in updates.slice().reverse()
|
||||
lastUpdate = view[view.length - 1]
|
||||
if lastUpdate and update.meta.start_ts - lastUpdate.meta.end_ts < @TIME_BETWEEN_DISTINCT_UPDATES
|
||||
_summarizeUpdates: (updates, existingSummarizedUpdates = []) ->
|
||||
summarizedUpdates = existingSummarizedUpdates.slice()
|
||||
for update in updates
|
||||
earliestUpdate = summarizedUpdates[summarizedUpdates.length - 1]
|
||||
console.log "Considering update", update, earliestUpdate
|
||||
if earliestUpdate and earliestUpdate.meta.start_ts - update.meta.end_ts < @TIME_BETWEEN_DISTINCT_UPDATES
|
||||
console.log "Concatting"
|
||||
if update.meta.user?
|
||||
userExists = false
|
||||
for user in lastUpdate.meta.users
|
||||
for user in earliestUpdate.meta.users
|
||||
if user.id == update.meta.user.id
|
||||
userExists = true
|
||||
break
|
||||
if !userExists
|
||||
lastUpdate.meta.users.push update.meta.user
|
||||
lastUpdate.meta.start_ts = Math.min(lastUpdate.meta.start_ts, update.meta.start_ts)
|
||||
lastUpdate.meta.end_ts = Math.max(lastUpdate.meta.end_ts, update.meta.end_ts)
|
||||
lastUpdate.toV = update.v
|
||||
earliestUpdate.meta.users.push update.meta.user
|
||||
earliestUpdate.meta.start_ts = Math.min(earliestUpdate.meta.start_ts, update.meta.start_ts)
|
||||
earliestUpdate.meta.end_ts = Math.max(earliestUpdate.meta.end_ts, update.meta.end_ts)
|
||||
earliestUpdate.fromV = update.v
|
||||
else
|
||||
console.log "creating new"
|
||||
newUpdate =
|
||||
meta:
|
||||
users: []
|
||||
|
@ -141,6 +173,7 @@ module.exports = UpdatesManager =
|
|||
if update.meta.user?
|
||||
newUpdate.meta.users.push update.meta.user
|
||||
|
||||
view.push newUpdate
|
||||
summarizedUpdates.push newUpdate
|
||||
|
||||
return summarizedUpdates
|
||||
|
||||
return view.reverse()
|
||||
|
|
|
@ -11,12 +11,15 @@ TrackChangesClient = require "./helpers/TrackChangesClient"
|
|||
MockWebApi = require "./helpers/MockWebApi"
|
||||
|
||||
describe "Getting updates", ->
|
||||
before ->
|
||||
before (done) ->
|
||||
@now = Date.now()
|
||||
@to = @now
|
||||
@user_id = ObjectId().toString()
|
||||
@doc_id = ObjectId().toString()
|
||||
@project_id = ObjectId().toString()
|
||||
|
||||
@minutes = 60 * 1000
|
||||
@days = 24 * 60 * @minutes
|
||||
|
||||
MockWebApi.users[@user_id] = @user =
|
||||
email: "user@sharelatex.com"
|
||||
|
@ -25,87 +28,84 @@ describe "Getting updates", ->
|
|||
id: @user_id
|
||||
sinon.spy MockWebApi, "getUser"
|
||||
|
||||
@updates = []
|
||||
for i in [0..9]
|
||||
@updates.push {
|
||||
op: [{ i: "a", p: 0 }]
|
||||
meta: { ts: @now - (9 - i) * @days - 2 * @minutes, user_id: @user_id }
|
||||
v: 2 * i + 1
|
||||
}
|
||||
@updates.push {
|
||||
op: [{ i: "b", p: 0 }]
|
||||
meta: { ts: @now - (9 - i) * @days, user_id: @user_id }
|
||||
v: 2 * i + 2
|
||||
}
|
||||
|
||||
TrackChangesClient.pushRawUpdates @doc_id, @updates, (error) =>
|
||||
throw error if error?
|
||||
done()
|
||||
|
||||
after: () ->
|
||||
MockWebApi.getUser.restore()
|
||||
|
||||
describe "getting updates in the middle", ->
|
||||
describe "getting updates up to the limit", ->
|
||||
before (done) ->
|
||||
@doc_id = ObjectId().toString()
|
||||
@project_id = ObjectId().toString()
|
||||
@updates = [{
|
||||
op: [{ i: "one ", p: 0 }]
|
||||
meta: { ts: @to - 20 * @minutes, user_id: @user_id }
|
||||
v: 3
|
||||
}, {
|
||||
op: [{ i: "two ", p: 4 }]
|
||||
meta: { ts: @to - 10 * @minutes }
|
||||
v: 4
|
||||
}, {
|
||||
op: [{ i: "three ", p: 8 }]
|
||||
meta: { ts: @to, user_id: @user_id }
|
||||
v: @toVersion = 5
|
||||
}, {
|
||||
op: [{ i: "four", p: 14 }]
|
||||
meta: { ts: @to + 2 * @minutes, user_id: @user_id }
|
||||
v: 6
|
||||
}]
|
||||
|
||||
TrackChangesClient.pushRawUpdates @doc_id, @updates, (error) =>
|
||||
TrackChangesClient.getUpdates @project_id, @doc_id, { to: 20, limit: 3 }, (error, body) =>
|
||||
throw error if error?
|
||||
TrackChangesClient.getUpdates @project_id, @doc_id, { to: @toVersion, limit: 2 }, (error, body) =>
|
||||
throw error if error?
|
||||
@updates = body.updates
|
||||
done()
|
||||
@updates = body.updates
|
||||
done()
|
||||
|
||||
it "should fetch the user details from the web api", ->
|
||||
MockWebApi.getUser
|
||||
.calledWith(@user_id)
|
||||
.should.equal true
|
||||
|
||||
it "should return the updates", ->
|
||||
expect(@updates).to.deep.equal [{
|
||||
meta:
|
||||
start_ts: @to
|
||||
end_ts: @to
|
||||
users: [@user]
|
||||
fromV: 5
|
||||
toV: 5
|
||||
}, {
|
||||
meta:
|
||||
users: []
|
||||
start_ts: @to - 10 * @minutes
|
||||
end_ts: @to - 10 * @minutes
|
||||
toV: 4
|
||||
fromV: 4
|
||||
}]
|
||||
|
||||
describe "getting updates that should be combined", ->
|
||||
before (done) ->
|
||||
@doc_id = ObjectId().toString()
|
||||
@project_id = ObjectId().toString()
|
||||
@updates = [{
|
||||
op: [{ i: "two ", p: 4 }]
|
||||
meta: { ts: @to - 2 * @minutes, user_id: @user_id }
|
||||
v: 4
|
||||
}, {
|
||||
op: [{ i: "three ", p: 8 }]
|
||||
meta: { ts: @to, user_id: @user_id }
|
||||
v: @toVersion = 5
|
||||
}]
|
||||
|
||||
TrackChangesClient.pushRawUpdates @doc_id, @updates, (error) =>
|
||||
throw error if error?
|
||||
TrackChangesClient.getUpdates @project_id, @doc_id, { to: @toVersion, limit: 2 }, (error, body) =>
|
||||
throw error if error?
|
||||
@updates = body.updates
|
||||
done()
|
||||
|
||||
it "should return the updates", ->
|
||||
it "should return the same number of summarized updates as the limit", ->
|
||||
expect(@updates).to.deep.equal [{
|
||||
meta:
|
||||
start_ts: @to - 2 * @minutes
|
||||
end_ts: @to
|
||||
users: [@user]
|
||||
fromV: 4
|
||||
toV: 5
|
||||
toV: 20
|
||||
fromV: 19
|
||||
}, {
|
||||
meta:
|
||||
start_ts: @to - 1 * @days - 2 * @minutes
|
||||
end_ts: @to - 1 * @days
|
||||
users: [@user]
|
||||
toV: 18
|
||||
fromV: 17
|
||||
}, {
|
||||
meta:
|
||||
start_ts: @to - 2 * @days - 2 * @minutes
|
||||
end_ts: @to - 2 * @days
|
||||
users: [@user]
|
||||
toV: 16
|
||||
fromV: 15
|
||||
}]
|
||||
|
||||
|
||||
describe "getting updates beyond the end of the database", ->
|
||||
before (done) ->
|
||||
TrackChangesClient.getUpdates @project_id, @doc_id, { to: 4, limit: 30 }, (error, body) =>
|
||||
throw error if error?
|
||||
@updates = body.updates
|
||||
done()
|
||||
|
||||
it "should return as many updates as it can", ->
|
||||
expect(@updates).to.deep.equal [{
|
||||
meta:
|
||||
start_ts: @to - 8 * @days - 2 * @minutes
|
||||
end_ts: @to - 8 * @days
|
||||
users: [@user]
|
||||
toV: 4
|
||||
fromV: 3
|
||||
}, {
|
||||
meta:
|
||||
start_ts: @to - 9 * @days - 2 * @minutes
|
||||
end_ts: @to - 9 * @days
|
||||
users: [@user]
|
||||
toV: 2
|
||||
fromV: 1
|
||||
}]
|
||||
|
||||
|
|
|
@ -239,31 +239,107 @@ describe "UpdatesManager", ->
|
|||
.calledWith(@updates)
|
||||
.should.equal true
|
||||
|
||||
it "shoudl return the updates with the filled details", ->
|
||||
it "should return the updates with the filled details", ->
|
||||
@callback.calledWith(null, @updatesWithUserInfo).should.equal true
|
||||
|
||||
describe "getSummarizedUpdates", ->
|
||||
describe "_extendBatchOfSummarizedUpdates", ->
|
||||
beforeEach ->
|
||||
@to = 42
|
||||
@limit = 10
|
||||
@updates = ["mock-updates"]
|
||||
@summarizedUpdates = ["summarized-updates"]
|
||||
@UpdatesManager._summarizeUpdates = sinon.stub().returns(@summarizedUpdates)
|
||||
@UpdatesManager.getUpdatesWithUserInfo = sinon.stub().callsArgWith(2, null, @updates)
|
||||
@UpdatesManager.getSummarizedUpdates @doc_id, { to: @to, limit: @limit }, @callback
|
||||
@limit = 2
|
||||
@existingSummarizedUpdates = ["summarized-updates-3"]
|
||||
@summarizedUpdates = ["summarized-updates-3", "summarized-update-2", "summarized-update-1"]
|
||||
|
||||
it "should get the updates", ->
|
||||
@UpdatesManager.getUpdatesWithUserInfo
|
||||
.calledWith(@doc_id, { to: @to, limit: @limit })
|
||||
.should.equal true
|
||||
describe "when there are updates to get", ->
|
||||
beforeEach ->
|
||||
@updates = ["mock-updates"]
|
||||
@existingSummarizedUpdates = ["summarized-updates-3"]
|
||||
@summarizedUpdates = ["summarized-updates-3", "summarized-update-2", "summarized-update-1"]
|
||||
@UpdatesManager._summarizeUpdates = sinon.stub().returns(@summarizedUpdates)
|
||||
@UpdatesManager.getUpdatesWithUserInfo = sinon.stub().callsArgWith(2, null, @updates)
|
||||
@UpdatesManager._extendBatchOfSummarizedUpdates @doc_id, @existingSummarizedUpdates, @to, @limit, @callback
|
||||
|
||||
it "should summarize the updates", ->
|
||||
@UpdatesManager._summarizeUpdates
|
||||
.calledWith(@updates)
|
||||
.should.equal true
|
||||
it "should get the updates", ->
|
||||
@UpdatesManager.getUpdatesWithUserInfo
|
||||
.calledWith(@doc_id, { to: @to, limit: 3 * @limit })
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback with the summarized updates", ->
|
||||
@callback.calledWith(null, @summarizedUpdates).should.equal true
|
||||
it "should summarize the updates", ->
|
||||
@UpdatesManager._summarizeUpdates
|
||||
.calledWith(@updates, @existingSummarizedUpdates)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback with the summarized updates and false for end-of-databse", ->
|
||||
@callback.calledWith(null, @summarizedUpdates.slice(0, @limit), false).should.equal true
|
||||
|
||||
describe "when there are no more updates", ->
|
||||
beforeEach ->
|
||||
@updates = []
|
||||
@UpdatesManager._summarizeUpdates = sinon.stub().returns(@summarizedUpdates)
|
||||
@UpdatesManager.getUpdatesWithUserInfo = sinon.stub().callsArgWith(2, null, @updates)
|
||||
@UpdatesManager._extendBatchOfSummarizedUpdates @doc_id, @existingSummarizedUpdates, @to, @limit, @callback
|
||||
|
||||
it "should call the callback with the summarized updates and true for end-of-database", ->
|
||||
@callback.calledWith(null, @summarizedUpdates.slice(0, @limit), true).should.equal true
|
||||
|
||||
describe "getSummarizedUpdates", ->
|
||||
describe "when one batch of updates is enough to meet the limit", ->
|
||||
beforeEach ->
|
||||
@to = 42
|
||||
@limit = 2
|
||||
@updates = ["summarized-updates-3", "summarized-updates-2"]
|
||||
@UpdatesManager._extendBatchOfSummarizedUpdates = sinon.stub().callsArgWith(4, null, @updates)
|
||||
@UpdatesManager.getSummarizedUpdates @doc_id, { to: @to, limit: @limit }, @callback
|
||||
|
||||
it "should get the batch of summarized updates", ->
|
||||
@UpdatesManager._extendBatchOfSummarizedUpdates
|
||||
.calledWith(@doc_id, [], @to, @limit)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback with the updates", ->
|
||||
@callback.calledWith(null, @updates).should.equal true
|
||||
|
||||
describe "when multiple batches are needed to meet the limit", ->
|
||||
beforeEach ->
|
||||
@to = 6
|
||||
@limit = 4
|
||||
@firstBatch = [{ toV: 6, fromV: 6 }, { toV: 5, fromV: 5 }]
|
||||
@secondBatch = [{ toV: 4, fromV: 4 }, { toV: 3, fromV: 3 }]
|
||||
@UpdatesManager._extendBatchOfSummarizedUpdates = (doc_id, existingUpdates, to, limit, callback) =>
|
||||
if existingUpdates.length == 0
|
||||
callback null, @firstBatch, false
|
||||
else
|
||||
callback null, @firstBatch.concat(@secondBatch), false
|
||||
sinon.spy @UpdatesManager, "_extendBatchOfSummarizedUpdates"
|
||||
@UpdatesManager.getSummarizedUpdates @doc_id, { to: @to, limit: @limit }, @callback
|
||||
|
||||
it "should get the first batch of summarized updates", ->
|
||||
@UpdatesManager._extendBatchOfSummarizedUpdates
|
||||
.calledWith(@doc_id, [], @to, @limit)
|
||||
.should.equal true
|
||||
|
||||
it "should get the second batch of summarized updates", ->
|
||||
@UpdatesManager._extendBatchOfSummarizedUpdates
|
||||
.calledWith(@doc_id, @firstBatch, 4, @limit)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback with all the updates", ->
|
||||
@callback.calledWith(null, @firstBatch.concat(@secondBatch)).should.equal true
|
||||
|
||||
describe "when the end of the database is hit", ->
|
||||
beforeEach ->
|
||||
@to = 6
|
||||
@limit = 4
|
||||
@updates = [{ toV: 6, fromV: 6 }, { toV: 5, fromV: 5 }]
|
||||
@UpdatesManager._extendBatchOfSummarizedUpdates = sinon.stub().callsArgWith(4, null, @updates, true)
|
||||
@UpdatesManager.getSummarizedUpdates @doc_id, { to: @to, limit: @limit }, @callback
|
||||
|
||||
it "should get the batch of summarized updates", ->
|
||||
@UpdatesManager._extendBatchOfSummarizedUpdates
|
||||
.calledWith(@doc_id, [], @to, @limit)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback with the updates", ->
|
||||
@callback.calledWith(null, @updates).should.equal true
|
||||
|
||||
describe "fillUserInfo", ->
|
||||
describe "with valid users", ->
|
||||
|
@ -360,13 +436,13 @@ describe "UpdatesManager", ->
|
|||
it "should concat updates that are close in time", ->
|
||||
expect(@UpdatesManager._summarizeUpdates [{
|
||||
meta:
|
||||
user: @user_2 = { id: "mock-user-2" }
|
||||
user: @user_1 = { id: "mock-user-1" }
|
||||
start_ts: @now + 20
|
||||
end_ts: @now + 30
|
||||
v: 5
|
||||
}, {
|
||||
meta:
|
||||
user: @user_1 = { id: "mock-user-1" }
|
||||
user: @user_2 = { id: "mock-user-2" }
|
||||
start_ts: @now
|
||||
end_ts: @now + 10
|
||||
v: 4
|
||||
|
@ -409,4 +485,33 @@ describe "UpdatesManager", ->
|
|||
toV: 4
|
||||
}]
|
||||
|
||||
|
||||
it "should concat onto existing summarized updates", ->
|
||||
@user_1 = { id: "mock-user-1" }
|
||||
@user_2 = { id: "mock-user-2" }
|
||||
expect(@UpdatesManager._summarizeUpdates [{
|
||||
meta:
|
||||
user: @user_1
|
||||
start_ts: @now + 20
|
||||
end_ts: @now + 30
|
||||
v: 5
|
||||
}, {
|
||||
meta:
|
||||
user: @user_2
|
||||
start_ts: @now
|
||||
end_ts: @now + 10
|
||||
v: 4
|
||||
}], [{
|
||||
meta:
|
||||
users: [@user_1]
|
||||
start_ts: @now + 40
|
||||
end_ts: @now + 50
|
||||
fromV: 6
|
||||
toV: 8
|
||||
}]).to.deep.equal [{
|
||||
meta:
|
||||
users: [@user_1, @user_2]
|
||||
start_ts: @now
|
||||
end_ts: @now + 50
|
||||
fromV: 4
|
||||
toV: 8
|
||||
}]
|
||||
|
|
Loading…
Reference in a new issue