mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-29 04:13:41 -05:00
Merge branch 'master' into sk-upgrade-dependencies
This commit is contained in:
commit
f935c392bc
14 changed files with 87 additions and 42 deletions
|
@ -1,10 +1,14 @@
|
||||||
Settings = require('settings-sharelatex')
|
Settings = require('settings-sharelatex')
|
||||||
projectHistoryKeys = Settings.redis?.project_history?.key_schema
|
projectHistoryKeys = Settings.redis?.project_history?.key_schema
|
||||||
rclient = require("redis-sharelatex").createClient(Settings.redis.documentupdater)
|
rclient = require("redis-sharelatex").createClient(Settings.redis.project_history)
|
||||||
logger = require('logger-sharelatex')
|
logger = require('logger-sharelatex')
|
||||||
|
metrics = require('./Metrics')
|
||||||
|
|
||||||
module.exports = ProjectHistoryRedisManager =
|
module.exports = ProjectHistoryRedisManager =
|
||||||
queueOps: (project_id, ops..., callback = (error, projectUpdateCount) ->) ->
|
queueOps: (project_id, ops..., callback = (error, projectUpdateCount) ->) ->
|
||||||
|
# Record metric for ops pushed onto queue
|
||||||
|
for op in ops
|
||||||
|
metrics.summary "redis.projectHistoryOps", op.length, {status: "push"}
|
||||||
multi = rclient.multi()
|
multi = rclient.multi()
|
||||||
# Push the ops onto the project history queue
|
# Push the ops onto the project history queue
|
||||||
multi.rpush projectHistoryKeys.projectHistoryOps({project_id}), ops...
|
multi.rpush projectHistoryKeys.projectHistoryOps({project_id}), ops...
|
||||||
|
|
|
@ -5,6 +5,7 @@ Keys = Settings.redis.documentupdater.key_schema
|
||||||
logger = require('logger-sharelatex')
|
logger = require('logger-sharelatex')
|
||||||
os = require "os"
|
os = require "os"
|
||||||
crypto = require "crypto"
|
crypto = require "crypto"
|
||||||
|
metrics = require('./Metrics')
|
||||||
|
|
||||||
HOST = os.hostname()
|
HOST = os.hostname()
|
||||||
RND = crypto.randomBytes(4).toString('hex') # generate a random key for this process
|
RND = crypto.randomBytes(4).toString('hex') # generate a random key for this process
|
||||||
|
@ -20,6 +21,9 @@ module.exports = RealTimeRedisManager =
|
||||||
multi.exec (error, replys) ->
|
multi.exec (error, replys) ->
|
||||||
return callback(error) if error?
|
return callback(error) if error?
|
||||||
jsonUpdates = replys[0]
|
jsonUpdates = replys[0]
|
||||||
|
for jsonUpdate in jsonUpdates
|
||||||
|
# record metric for each update removed from queue
|
||||||
|
metrics.summary "redis.pendingUpdates", jsonUpdate.length, {status: "pop"}
|
||||||
updates = []
|
updates = []
|
||||||
for jsonUpdate in jsonUpdates
|
for jsonUpdate in jsonUpdates
|
||||||
try
|
try
|
||||||
|
@ -36,9 +40,13 @@ module.exports = RealTimeRedisManager =
|
||||||
# create a unique message id using a counter
|
# create a unique message id using a counter
|
||||||
message_id = "doc:#{HOST}:#{RND}-#{COUNT++}"
|
message_id = "doc:#{HOST}:#{RND}-#{COUNT++}"
|
||||||
data?._id = message_id
|
data?._id = message_id
|
||||||
|
|
||||||
|
blob = JSON.stringify(data)
|
||||||
|
metrics.summary "redis.publish.applied-ops", blob.length
|
||||||
|
|
||||||
# publish on separate channels for individual projects and docs when
|
# publish on separate channels for individual projects and docs when
|
||||||
# configured (needs realtime to be configured for this too).
|
# configured (needs realtime to be configured for this too).
|
||||||
if Settings.publishOnIndividualChannels
|
if Settings.publishOnIndividualChannels
|
||||||
pubsubClient.publish "applied-ops:#{data.doc_id}", JSON.stringify(data)
|
pubsubClient.publish "applied-ops:#{data.doc_id}", blob
|
||||||
else
|
else
|
||||||
pubsubClient.publish "applied-ops", JSON.stringify(data)
|
pubsubClient.publish "applied-ops", blob
|
||||||
|
|
|
@ -23,7 +23,7 @@ MEGABYTES = 1024 * 1024
|
||||||
MAX_RANGES_SIZE = 3 * MEGABYTES
|
MAX_RANGES_SIZE = 3 * MEGABYTES
|
||||||
|
|
||||||
keys = Settings.redis.documentupdater.key_schema
|
keys = Settings.redis.documentupdater.key_schema
|
||||||
historyKeys = Settings.redis.history.key_schema
|
historyKeys = Settings.redis.history.key_schema # note: this is track changes, not project-history
|
||||||
|
|
||||||
module.exports = RedisManager =
|
module.exports = RedisManager =
|
||||||
rclient: rclient
|
rclient: rclient
|
||||||
|
@ -41,6 +41,8 @@ module.exports = RedisManager =
|
||||||
logger.error {err: error, doc_id: doc_id, docLines: docLines}, error.message
|
logger.error {err: error, doc_id: doc_id, docLines: docLines}, error.message
|
||||||
return callback(error)
|
return callback(error)
|
||||||
docHash = RedisManager._computeHash(docLines)
|
docHash = RedisManager._computeHash(docLines)
|
||||||
|
# record bytes sent to redis
|
||||||
|
metrics.summary "redis.docLines", docLines.length, {status: "set"}
|
||||||
logger.log {project_id, doc_id, version, docHash, pathname, projectHistoryId}, "putting doc in redis"
|
logger.log {project_id, doc_id, version, docHash, pathname, projectHistoryId}, "putting doc in redis"
|
||||||
RedisManager._serializeRanges ranges, (error, ranges) ->
|
RedisManager._serializeRanges ranges, (error, ranges) ->
|
||||||
if error?
|
if error?
|
||||||
|
@ -73,6 +75,7 @@ module.exports = RedisManager =
|
||||||
_callback()
|
_callback()
|
||||||
|
|
||||||
multi = rclient.multi()
|
multi = rclient.multi()
|
||||||
|
multi.strlen keys.docLines(doc_id:doc_id)
|
||||||
multi.del keys.docLines(doc_id:doc_id)
|
multi.del keys.docLines(doc_id:doc_id)
|
||||||
multi.del keys.projectKey(doc_id:doc_id)
|
multi.del keys.projectKey(doc_id:doc_id)
|
||||||
multi.del keys.docVersion(doc_id:doc_id)
|
multi.del keys.docVersion(doc_id:doc_id)
|
||||||
|
@ -84,8 +87,12 @@ module.exports = RedisManager =
|
||||||
multi.del keys.unflushedTime(doc_id:doc_id)
|
multi.del keys.unflushedTime(doc_id:doc_id)
|
||||||
multi.del keys.lastUpdatedAt(doc_id: doc_id)
|
multi.del keys.lastUpdatedAt(doc_id: doc_id)
|
||||||
multi.del keys.lastUpdatedBy(doc_id: doc_id)
|
multi.del keys.lastUpdatedBy(doc_id: doc_id)
|
||||||
multi.exec (error) ->
|
multi.exec (error, response) ->
|
||||||
return callback(error) if error?
|
return callback(error) if error?
|
||||||
|
length = response?[0]
|
||||||
|
if length > 0
|
||||||
|
# record bytes freed in redis
|
||||||
|
metrics.summary "redis.docLines", length, {status: "del"}
|
||||||
multi = rclient.multi()
|
multi = rclient.multi()
|
||||||
multi.srem keys.docsInProject(project_id:project_id), doc_id
|
multi.srem keys.docsInProject(project_id:project_id), doc_id
|
||||||
multi.del keys.projectState(project_id:project_id)
|
multi.del keys.projectState(project_id:project_id)
|
||||||
|
@ -125,6 +132,9 @@ module.exports = RedisManager =
|
||||||
if timeSpan > MAX_REDIS_REQUEST_LENGTH
|
if timeSpan > MAX_REDIS_REQUEST_LENGTH
|
||||||
error = new Error("redis getDoc exceeded timeout")
|
error = new Error("redis getDoc exceeded timeout")
|
||||||
return callback(error)
|
return callback(error)
|
||||||
|
# record bytes loaded from redis
|
||||||
|
if docLines?
|
||||||
|
metrics.summary "redis.docLines", docLines.length, {status: "get"}
|
||||||
# check sha1 hash value if present
|
# check sha1 hash value if present
|
||||||
if docLines? and storedHash?
|
if docLines? and storedHash?
|
||||||
computedHash = RedisManager._computeHash(docLines)
|
computedHash = RedisManager._computeHash(docLines)
|
||||||
|
@ -240,7 +250,8 @@ module.exports = RedisManager =
|
||||||
|
|
||||||
opVersions = appliedOps.map (op) -> op?.v
|
opVersions = appliedOps.map (op) -> op?.v
|
||||||
logger.log doc_id: doc_id, version: newVersion, hash: newHash, op_versions: opVersions, "updating doc in redis"
|
logger.log doc_id: doc_id, version: newVersion, hash: newHash, op_versions: opVersions, "updating doc in redis"
|
||||||
|
# record bytes sent to redis in update
|
||||||
|
metrics.summary "redis.docLines", newDocLines.length, {status: "update"}
|
||||||
RedisManager._serializeRanges ranges, (error, ranges) ->
|
RedisManager._serializeRanges ranges, (error, ranges) ->
|
||||||
if error?
|
if error?
|
||||||
logger.error {err: error, doc_id}, error.message
|
logger.error {err: error, doc_id}, error.message
|
||||||
|
|
|
@ -37,9 +37,9 @@ module.exports =
|
||||||
docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:{#{project_id}}"
|
docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:{#{project_id}}"
|
||||||
|
|
||||||
project_history:
|
project_history:
|
||||||
port: process.env["HISTORY_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379"
|
port: process.env["NEW_HISTORY_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379"
|
||||||
host: process.env["HISTORY_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost"
|
host: process.env["NEW_HISTORY_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost"
|
||||||
password: process.env["HISTORY_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or ""
|
password: process.env["NEW_HISTORY_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or ""
|
||||||
maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] or "20")
|
maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] or "20")
|
||||||
key_schema:
|
key_schema:
|
||||||
projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:{#{project_id}}"
|
projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:{#{project_id}}"
|
||||||
|
|
|
@ -12,13 +12,13 @@ services:
|
||||||
environment:
|
environment:
|
||||||
NODE_ENV: test
|
NODE_ENV: test
|
||||||
|
|
||||||
|
|
||||||
test_acceptance:
|
test_acceptance:
|
||||||
build: .
|
build: .
|
||||||
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
|
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
|
||||||
environment:
|
environment:
|
||||||
ELASTIC_SEARCH_DSN: es:9200
|
ELASTIC_SEARCH_DSN: es:9200
|
||||||
REDIS_HOST: redis
|
REDIS_HOST: redis
|
||||||
|
NEW_HISTORY_REDIS_HOST: new_redis
|
||||||
MONGO_HOST: mongo
|
MONGO_HOST: mongo
|
||||||
POSTGRES_HOST: postgres
|
POSTGRES_HOST: postgres
|
||||||
MOCHA_GREP: ${MOCHA_GREP}
|
MOCHA_GREP: ${MOCHA_GREP}
|
||||||
|
@ -28,10 +28,11 @@ services:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
redis:
|
redis:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
new_redis:
|
||||||
|
condition: service_healthy
|
||||||
user: node
|
user: node
|
||||||
command: npm run test:acceptance:_run
|
command: npm run test:acceptance:_run
|
||||||
|
|
||||||
|
|
||||||
tar:
|
tar:
|
||||||
build: .
|
build: .
|
||||||
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
|
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
|
||||||
|
@ -42,5 +43,8 @@ services:
|
||||||
redis:
|
redis:
|
||||||
image: redis
|
image: redis
|
||||||
|
|
||||||
|
new_redis:
|
||||||
|
image: redis
|
||||||
|
|
||||||
mongo:
|
mongo:
|
||||||
image: mongo:3.6
|
image: mongo:3.6
|
||||||
|
|
|
@ -24,6 +24,7 @@ services:
|
||||||
environment:
|
environment:
|
||||||
ELASTIC_SEARCH_DSN: es:9200
|
ELASTIC_SEARCH_DSN: es:9200
|
||||||
REDIS_HOST: redis
|
REDIS_HOST: redis
|
||||||
|
NEW_HISTORY_REDIS_HOST: new_redis
|
||||||
MONGO_HOST: mongo
|
MONGO_HOST: mongo
|
||||||
POSTGRES_HOST: postgres
|
POSTGRES_HOST: postgres
|
||||||
MOCHA_GREP: ${MOCHA_GREP}
|
MOCHA_GREP: ${MOCHA_GREP}
|
||||||
|
@ -35,11 +36,15 @@ services:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
redis:
|
redis:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
new_redis:
|
||||||
|
condition: service_healthy
|
||||||
command: npm run test:acceptance
|
command: npm run test:acceptance
|
||||||
|
|
||||||
redis:
|
redis:
|
||||||
image: redis
|
image: redis
|
||||||
|
|
||||||
|
new_redis:
|
||||||
|
image: redis
|
||||||
|
|
||||||
mongo:
|
mongo:
|
||||||
image: mongo:3.6
|
image: mongo:3.6
|
||||||
|
|
||||||
|
|
14
services/document-updater/package-lock.json
generated
14
services/document-updater/package-lock.json
generated
|
@ -2179,9 +2179,9 @@
|
||||||
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
|
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
|
||||||
},
|
},
|
||||||
"ioredis": {
|
"ioredis": {
|
||||||
"version": "4.14.1",
|
"version": "4.16.1",
|
||||||
"resolved": "https://registry.npmjs.org/ioredis/-/ioredis-4.14.1.tgz",
|
"resolved": "https://registry.npmjs.org/ioredis/-/ioredis-4.16.1.tgz",
|
||||||
"integrity": "sha512-94W+X//GHM+1GJvDk6JPc+8qlM7Dul+9K+lg3/aHixPN7ZGkW6qlvX0DG6At9hWtH2v3B32myfZqWoANUJYGJA==",
|
"integrity": "sha512-g76Mm9dE7BLuewncu1MimGZw5gDDjDwjoRony/VoSxSJEKAhuYncDEwYKYjtHi2NWsTNIB6XXRjE64uVa/wpKQ==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"cluster-key-slot": "^1.1.0",
|
"cluster-key-slot": "^1.1.0",
|
||||||
"debug": "^4.1.1",
|
"debug": "^4.1.1",
|
||||||
|
@ -3320,13 +3320,13 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"redis-sharelatex": {
|
"redis-sharelatex": {
|
||||||
"version": "1.0.11",
|
"version": "1.0.12",
|
||||||
"resolved": "https://registry.npmjs.org/redis-sharelatex/-/redis-sharelatex-1.0.11.tgz",
|
"resolved": "https://registry.npmjs.org/redis-sharelatex/-/redis-sharelatex-1.0.12.tgz",
|
||||||
"integrity": "sha512-rKXPVLmFC9ycpRc5e4rULOwi9DB0LqRcWEiUxQuJNSVgcqCxpGqVw+zwivo+grk3G2tGpduh3/8y+4KVHWOntw==",
|
"integrity": "sha512-Z+LDGaRNgZ+NiDaCC/R0N3Uy6SCtbKXqiXlvCwAbIQRSZUc69OVx/cQ3i5qDF7zeERhh+pnTd+zGs8nVfa5p+Q==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"async": "^2.5.0",
|
"async": "^2.5.0",
|
||||||
"coffee-script": "1.8.0",
|
"coffee-script": "1.8.0",
|
||||||
"ioredis": "~4.14.1",
|
"ioredis": "~4.16.1",
|
||||||
"redis-sentinel": "0.1.1",
|
"redis-sentinel": "0.1.1",
|
||||||
"underscore": "1.7.0"
|
"underscore": "1.7.0"
|
||||||
},
|
},
|
||||||
|
|
|
@ -29,7 +29,7 @@
|
||||||
"logger-sharelatex": "^1.9.1",
|
"logger-sharelatex": "^1.9.1",
|
||||||
"metrics-sharelatex": "^2.6.2",
|
"metrics-sharelatex": "^2.6.2",
|
||||||
"mongojs": "^3.1.0",
|
"mongojs": "^3.1.0",
|
||||||
"redis-sharelatex": "^1.0.11",
|
"redis-sharelatex": "^1.0.12",
|
||||||
"request": "^2.88.2",
|
"request": "^2.88.2",
|
||||||
"requestretry": "^4.1.0",
|
"requestretry": "^4.1.0",
|
||||||
"settings-sharelatex": "^1.1.0"
|
"settings-sharelatex": "^1.1.0"
|
||||||
|
|
|
@ -4,7 +4,8 @@ chai.should()
|
||||||
expect = chai.expect
|
expect = chai.expect
|
||||||
async = require "async"
|
async = require "async"
|
||||||
Settings = require('settings-sharelatex')
|
Settings = require('settings-sharelatex')
|
||||||
rclient_history = require("redis-sharelatex").createClient(Settings.redis.history)
|
rclient_history = require("redis-sharelatex").createClient(Settings.redis.history) # note: this is track changes, not project-history
|
||||||
|
rclient_project_history = require("redis-sharelatex").createClient(Settings.redis.project_history)
|
||||||
rclient_du = require("redis-sharelatex").createClient(Settings.redis.documentupdater)
|
rclient_du = require("redis-sharelatex").createClient(Settings.redis.documentupdater)
|
||||||
Keys = Settings.redis.documentupdater.key_schema
|
Keys = Settings.redis.documentupdater.key_schema
|
||||||
HistoryKeys = Settings.redis.history.key_schema
|
HistoryKeys = Settings.redis.history.key_schema
|
||||||
|
@ -65,14 +66,14 @@ describe "Applying updates to a doc", ->
|
||||||
return null
|
return null
|
||||||
|
|
||||||
it "should push the applied updates to the project history changes api", (done) ->
|
it "should push the applied updates to the project history changes api", (done) ->
|
||||||
rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) =>
|
rclient_project_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) =>
|
||||||
throw error if error?
|
throw error if error?
|
||||||
JSON.parse(updates[0]).op.should.deep.equal @update.op
|
JSON.parse(updates[0]).op.should.deep.equal @update.op
|
||||||
done()
|
done()
|
||||||
return null
|
return null
|
||||||
|
|
||||||
it "should set the first op timestamp", (done) ->
|
it "should set the first op timestamp", (done) ->
|
||||||
rclient_history.get ProjectHistoryKeys.projectHistoryFirstOpTimestamp({@project_id}), (error, result) =>
|
rclient_project_history.get ProjectHistoryKeys.projectHistoryFirstOpTimestamp({@project_id}), (error, result) =>
|
||||||
throw error if error?
|
throw error if error?
|
||||||
result.should.be.within(@startTime, Date.now())
|
result.should.be.within(@startTime, Date.now())
|
||||||
@firstOpTimestamp = result
|
@firstOpTimestamp = result
|
||||||
|
@ -90,7 +91,7 @@ describe "Applying updates to a doc", ->
|
||||||
return null
|
return null
|
||||||
|
|
||||||
it "should not change the first op timestamp", (done) ->
|
it "should not change the first op timestamp", (done) ->
|
||||||
rclient_history.get ProjectHistoryKeys.projectHistoryFirstOpTimestamp({@project_id}), (error, result) =>
|
rclient_project_history.get ProjectHistoryKeys.projectHistoryFirstOpTimestamp({@project_id}), (error, result) =>
|
||||||
throw error if error?
|
throw error if error?
|
||||||
result.should.equal @firstOpTimestamp
|
result.should.equal @firstOpTimestamp
|
||||||
done()
|
done()
|
||||||
|
@ -130,7 +131,7 @@ describe "Applying updates to a doc", ->
|
||||||
return null
|
return null
|
||||||
|
|
||||||
it "should push the applied updates to the project history changes api", (done) ->
|
it "should push the applied updates to the project history changes api", (done) ->
|
||||||
rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) =>
|
rclient_project_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) =>
|
||||||
JSON.parse(updates[0]).op.should.deep.equal @update.op
|
JSON.parse(updates[0]).op.should.deep.equal @update.op
|
||||||
done()
|
done()
|
||||||
return null
|
return null
|
||||||
|
@ -164,7 +165,7 @@ describe "Applying updates to a doc", ->
|
||||||
return null
|
return null
|
||||||
|
|
||||||
it "should push the applied updates to the project history changes api", (done) ->
|
it "should push the applied updates to the project history changes api", (done) ->
|
||||||
rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) =>
|
rclient_project_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) =>
|
||||||
JSON.parse(updates[0]).op.should.deep.equal @update.op
|
JSON.parse(updates[0]).op.should.deep.equal @update.op
|
||||||
done()
|
done()
|
||||||
return null
|
return null
|
||||||
|
|
|
@ -2,7 +2,7 @@ sinon = require "sinon"
|
||||||
chai = require("chai")
|
chai = require("chai")
|
||||||
chai.should()
|
chai.should()
|
||||||
Settings = require('settings-sharelatex')
|
Settings = require('settings-sharelatex')
|
||||||
rclient_history = require("redis-sharelatex").createClient(Settings.redis.history)
|
rclient_project_history = require("redis-sharelatex").createClient(Settings.redis.project_history)
|
||||||
ProjectHistoryKeys = Settings.redis.project_history.key_schema
|
ProjectHistoryKeys = Settings.redis.project_history.key_schema
|
||||||
|
|
||||||
MockProjectHistoryApi = require "./helpers/MockProjectHistoryApi"
|
MockProjectHistoryApi = require "./helpers/MockProjectHistoryApi"
|
||||||
|
@ -30,7 +30,7 @@ describe "Applying updates to a project's structure", ->
|
||||||
setTimeout done, 200
|
setTimeout done, 200
|
||||||
|
|
||||||
it "should push the applied file renames to the project history api", (done) ->
|
it "should push the applied file renames to the project history api", (done) ->
|
||||||
rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) =>
|
rclient_project_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) =>
|
||||||
throw error if error?
|
throw error if error?
|
||||||
|
|
||||||
update = JSON.parse(updates[0])
|
update = JSON.parse(updates[0])
|
||||||
|
@ -61,7 +61,7 @@ describe "Applying updates to a project's structure", ->
|
||||||
return null
|
return null
|
||||||
|
|
||||||
it "should push the applied doc renames to the project history api", (done) ->
|
it "should push the applied doc renames to the project history api", (done) ->
|
||||||
rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) =>
|
rclient_project_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) =>
|
||||||
throw error if error?
|
throw error if error?
|
||||||
|
|
||||||
update = JSON.parse(updates[0])
|
update = JSON.parse(updates[0])
|
||||||
|
@ -97,7 +97,7 @@ describe "Applying updates to a project's structure", ->
|
||||||
return null
|
return null
|
||||||
|
|
||||||
it "should push the applied doc renames to the project history api", (done) ->
|
it "should push the applied doc renames to the project history api", (done) ->
|
||||||
rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) =>
|
rclient_project_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) =>
|
||||||
throw error if error?
|
throw error if error?
|
||||||
|
|
||||||
update = JSON.parse(updates[0])
|
update = JSON.parse(updates[0])
|
||||||
|
@ -141,7 +141,7 @@ describe "Applying updates to a project's structure", ->
|
||||||
return null
|
return null
|
||||||
|
|
||||||
it "should push the applied doc renames to the project history api", (done) ->
|
it "should push the applied doc renames to the project history api", (done) ->
|
||||||
rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) =>
|
rclient_project_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) =>
|
||||||
throw error if error?
|
throw error if error?
|
||||||
|
|
||||||
update = JSON.parse(updates[0])
|
update = JSON.parse(updates[0])
|
||||||
|
@ -194,7 +194,7 @@ describe "Applying updates to a project's structure", ->
|
||||||
return null
|
return null
|
||||||
|
|
||||||
it "should push the file addition to the project history api", (done) ->
|
it "should push the file addition to the project history api", (done) ->
|
||||||
rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) =>
|
rclient_project_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) =>
|
||||||
throw error if error?
|
throw error if error?
|
||||||
|
|
||||||
update = JSON.parse(updates[0])
|
update = JSON.parse(updates[0])
|
||||||
|
@ -222,7 +222,7 @@ describe "Applying updates to a project's structure", ->
|
||||||
return null
|
return null
|
||||||
|
|
||||||
it "should push the doc addition to the project history api", (done) ->
|
it "should push the doc addition to the project history api", (done) ->
|
||||||
rclient_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) =>
|
rclient_project_history.lrange ProjectHistoryKeys.projectHistoryOps({@project_id}), 0, -1, (error, updates) =>
|
||||||
throw error if error?
|
throw error if error?
|
||||||
|
|
||||||
update = JSON.parse(updates[0])
|
update = JSON.parse(updates[0])
|
||||||
|
|
|
@ -26,6 +26,7 @@ describe "ProjectHistoryRedisManager", ->
|
||||||
createClient: () => @rclient
|
createClient: () => @rclient
|
||||||
"logger-sharelatex":
|
"logger-sharelatex":
|
||||||
log:->
|
log:->
|
||||||
|
"./Metrics": @metrics = { summary: sinon.stub()}
|
||||||
globals:
|
globals:
|
||||||
JSON: @JSON = JSON
|
JSON: @JSON = JSON
|
||||||
|
|
||||||
|
|
|
@ -25,6 +25,7 @@ describe "RealTimeRedisManager", ->
|
||||||
"logger-sharelatex": { log: () -> }
|
"logger-sharelatex": { log: () -> }
|
||||||
"crypto": @crypto = { randomBytes: sinon.stub().withArgs(4).returns(Buffer.from([0x1, 0x2, 0x3, 0x4])) }
|
"crypto": @crypto = { randomBytes: sinon.stub().withArgs(4).returns(Buffer.from([0x1, 0x2, 0x3, 0x4])) }
|
||||||
"os": @os = {hostname: sinon.stub().returns("somehost")}
|
"os": @os = {hostname: sinon.stub().returns("somehost")}
|
||||||
|
"./Metrics": @metrics = { summary: sinon.stub()}
|
||||||
|
|
||||||
@doc_id = "doc-id-123"
|
@doc_id = "doc-id-123"
|
||||||
@project_id = "project-id-123"
|
@project_id = "project-id-123"
|
||||||
|
@ -89,3 +90,6 @@ describe "RealTimeRedisManager", ->
|
||||||
|
|
||||||
it "should send the op with a message id", ->
|
it "should send the op with a message id", ->
|
||||||
@pubsubClient.publish.calledWith("applied-ops", JSON.stringify({op:"thisop",_id:@message_id})).should.equal true
|
@pubsubClient.publish.calledWith("applied-ops", JSON.stringify({op:"thisop",_id:@message_id})).should.equal true
|
||||||
|
|
||||||
|
it "should track the payload size", ->
|
||||||
|
@metrics.summary.calledWith("redis.publish.applied-ops", JSON.stringify({op:"thisop",_id:@message_id}).length).should.equal true
|
||||||
|
|
|
@ -48,6 +48,7 @@ describe "RedisManager", ->
|
||||||
createClient: () => @rclient
|
createClient: () => @rclient
|
||||||
"./Metrics": @metrics =
|
"./Metrics": @metrics =
|
||||||
inc: sinon.stub()
|
inc: sinon.stub()
|
||||||
|
summary: sinon.stub()
|
||||||
Timer: class Timer
|
Timer: class Timer
|
||||||
constructor: () ->
|
constructor: () ->
|
||||||
this.start = new Date()
|
this.start = new Date()
|
||||||
|
@ -670,11 +671,17 @@ describe "RedisManager", ->
|
||||||
|
|
||||||
describe "removeDocFromMemory", ->
|
describe "removeDocFromMemory", ->
|
||||||
beforeEach (done) ->
|
beforeEach (done) ->
|
||||||
|
@multi.strlen = sinon.stub()
|
||||||
@multi.del = sinon.stub()
|
@multi.del = sinon.stub()
|
||||||
@multi.srem = sinon.stub()
|
@multi.srem = sinon.stub()
|
||||||
@multi.exec.yields()
|
@multi.exec.yields()
|
||||||
@RedisManager.removeDocFromMemory @project_id, @doc_id, done
|
@RedisManager.removeDocFromMemory @project_id, @doc_id, done
|
||||||
|
|
||||||
|
it "should check the length of the current doclines", ->
|
||||||
|
@multi.strlen
|
||||||
|
.calledWith("doclines:#{@doc_id}")
|
||||||
|
.should.equal true
|
||||||
|
|
||||||
it "should delete the lines", ->
|
it "should delete the lines", ->
|
||||||
@multi.del
|
@multi.del
|
||||||
.calledWith("doclines:#{@doc_id}")
|
.calledWith("doclines:#{@doc_id}")
|
||||||
|
|
Loading…
Reference in a new issue