Merge branch 'master' into bg-use-separate-redis-for-project-history

This commit is contained in:
Brian Gough 2020-02-21 14:13:33 +00:00 committed by GitHub
commit 0419039d4d
14 changed files with 108 additions and 69 deletions

View file

@ -1 +1 @@
6.9.5 10.19.0

View file

@ -1,11 +0,0 @@
language: node_js
before_install:
- npm install -g grunt-cli
install:
- npm install
- grunt install
script:
- grunt test:unit

View file

@ -16,6 +16,7 @@ pipeline {
} }
stages { stages {
stage('Install') { stage('Install') {
steps { steps {
withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) { withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) {

View file

@ -1,7 +1,7 @@
# This file was auto-generated, do not edit it directly. # This file was auto-generated, do not edit it directly.
# Instead run bin/update_build_scripts from # Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment # https://github.com/sharelatex/sharelatex-dev-environment
# Version: 1.1.24 # Version: 1.3.5
BUILD_NUMBER ?= local BUILD_NUMBER ?= local
BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD)
@ -28,14 +28,20 @@ test_unit:
test_acceptance: test_clean test_acceptance_pre_run test_acceptance_run test_acceptance: test_clean test_acceptance_pre_run test_acceptance_run
test_acceptance_debug: test_clean test_acceptance_pre_run test_acceptance_run_debug
test_acceptance_run: test_acceptance_run:
@[ ! -d test/acceptance ] && echo "document-updater has no acceptance tests" || $(DOCKER_COMPOSE) run --rm test_acceptance @[ ! -d test/acceptance ] && echo "document-updater has no acceptance tests" || $(DOCKER_COMPOSE) run --rm test_acceptance
test_acceptance_run_debug:
@[ ! -d test/acceptance ] && echo "document-updater has no acceptance tests" || $(DOCKER_COMPOSE) run -p 127.0.0.9:19999:19999 --rm test_acceptance npm run test:acceptance -- --inspect=0.0.0.0:19999 --inspect-brk
test_clean: test_clean:
$(DOCKER_COMPOSE) down -v -t 0 $(DOCKER_COMPOSE) down -v -t 0
test_acceptance_pre_run: test_acceptance_pre_run:
@[ ! -f test/acceptance/js/scripts/pre-run ] && echo "document-updater has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/js/scripts/pre-run @[ ! -f test/acceptance/js/scripts/pre-run ] && echo "document-updater has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/js/scripts/pre-run
build: build:
docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \
--tag gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ --tag gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \
@ -48,4 +54,5 @@ publish:
docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
.PHONY: clean test test_unit test_acceptance test_clean build publish .PHONY: clean test test_unit test_acceptance test_clean build publish

View file

@ -12,6 +12,21 @@ request = (require("requestretry")).defaults({
# hold us up, and need to bail out quickly if there is a problem. # hold us up, and need to bail out quickly if there is a problem.
MAX_HTTP_REQUEST_LENGTH = 5000 # 5 seconds MAX_HTTP_REQUEST_LENGTH = 5000 # 5 seconds
updateMetric = (method, error, response) ->
# find the status, with special handling for connection timeouts
# https://github.com/request/request#timeouts
status = if error?.connect is true
"#{error.code} (connect)"
else if error?
error.code
else if response?
response.statusCode
Metrics.inc method, 1, {status: status}
if error?.attempts > 1
Metrics.inc "#{method}-retries", 1, {status: 'error'}
if response?.attempts > 1
Metrics.inc "#{method}-retries", 1, {status: 'success'}
module.exports = PersistenceManager = module.exports = PersistenceManager =
getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges, pathname, projectHistoryId, projectHistoryType) ->) -> getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges, pathname, projectHistoryId, projectHistoryType) ->) ->
timer = new Metrics.Timer("persistenceManager.getDoc") timer = new Metrics.Timer("persistenceManager.getDoc")
@ -32,6 +47,7 @@ module.exports = PersistenceManager =
jar: false jar: false
timeout: MAX_HTTP_REQUEST_LENGTH timeout: MAX_HTTP_REQUEST_LENGTH
}, (error, res, body) -> }, (error, res, body) ->
updateMetric('getDoc', error, res)
return callback(error) if error? return callback(error) if error?
if res.statusCode >= 200 and res.statusCode < 300 if res.statusCode >= 200 and res.statusCode < 300
try try
@ -73,6 +89,7 @@ module.exports = PersistenceManager =
jar: false jar: false
timeout: MAX_HTTP_REQUEST_LENGTH timeout: MAX_HTTP_REQUEST_LENGTH
}, (error, res, body) -> }, (error, res, body) ->
updateMetric('setDoc', error, res)
return callback(error) if error? return callback(error) if error?
if res.statusCode >= 200 and res.statusCode < 300 if res.statusCode >= 200 and res.statusCode < 300
return callback null return callback null

View file

@ -36,6 +36,8 @@ module.exports = RedisManager =
docLines = JSON.stringify(docLines) docLines = JSON.stringify(docLines)
if docLines.indexOf("\u0000") != -1 if docLines.indexOf("\u0000") != -1
error = new Error("null bytes found in doc lines") error = new Error("null bytes found in doc lines")
# this check was added to catch memory corruption in JSON.stringify.
# It sometimes returned null bytes at the end of the string.
logger.error {err: error, doc_id: doc_id, docLines: docLines}, error.message logger.error {err: error, doc_id: doc_id, docLines: docLines}, error.message
return callback(error) return callback(error)
docHash = RedisManager._computeHash(docLines) docHash = RedisManager._computeHash(docLines)
@ -224,12 +226,14 @@ module.exports = RedisManager =
for op in jsonOps for op in jsonOps
if op.indexOf("\u0000") != -1 if op.indexOf("\u0000") != -1
error = new Error("null bytes found in jsonOps") error = new Error("null bytes found in jsonOps")
# this check was added to catch memory corruption in JSON.stringify
logger.error {err: error, doc_id: doc_id, jsonOps: jsonOps}, error.message logger.error {err: error, doc_id: doc_id, jsonOps: jsonOps}, error.message
return callback(error) return callback(error)
newDocLines = JSON.stringify(docLines) newDocLines = JSON.stringify(docLines)
if newDocLines.indexOf("\u0000") != -1 if newDocLines.indexOf("\u0000") != -1
error = new Error("null bytes found in doc lines") error = new Error("null bytes found in doc lines")
# this check was added to catch memory corruption in JSON.stringify
logger.error {err: error, doc_id: doc_id, newDocLines: newDocLines}, error.message logger.error {err: error, doc_id: doc_id, newDocLines: newDocLines}, error.message
return callback(error) return callback(error)
newHash = RedisManager._computeHash(newDocLines) newHash = RedisManager._computeHash(newDocLines)
@ -243,6 +247,7 @@ module.exports = RedisManager =
return callback(error) return callback(error)
if ranges? and ranges.indexOf("\u0000") != -1 if ranges? and ranges.indexOf("\u0000") != -1
error = new Error("null bytes found in ranges") error = new Error("null bytes found in ranges")
# this check was added to catch memory corruption in JSON.stringify
logger.error err: error, doc_id: doc_id, ranges: ranges, error.message logger.error err: error, doc_id: doc_id, ranges: ranges, error.message
return callback(error) return callback(error)
multi = rclient.multi() multi = rclient.multi()

View file

@ -1,10 +1,10 @@
document-updater document-updater
--public-repo=True
--language=coffeescript --language=coffeescript
--node-version=6.9.5 --env-add=
--node-version=10.19.0
--acceptance-creds=None --acceptance-creds=None
--dependencies=mongo,redis --dependencies=mongo,redis
--docker-repos=gcr.io/overleaf-ops --docker-repos=gcr.io/overleaf-ops
--build-target=docker
--script-version=1.1.24
--env-pass-through= --env-pass-through=
--public-repo=True --script-version=1.3.5

View file

@ -20,13 +20,6 @@
"pify": "^4.0.1", "pify": "^4.0.1",
"retry-request": "^4.0.0", "retry-request": "^4.0.0",
"teeny-request": "^3.11.3" "teeny-request": "^3.11.3"
},
"dependencies": {
"extend": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
"integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="
}
} }
}, },
"@google-cloud/debug-agent": { "@google-cloud/debug-agent": {
@ -453,7 +446,6 @@
"version": "1.0.1", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz", "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz",
"integrity": "sha1-Y7xdy2EzG5K8Bf1SiVPDNGKgb40=", "integrity": "sha1-Y7xdy2EzG5K8Bf1SiVPDNGKgb40=",
"optional": true,
"requires": { "requires": {
"tweetnacl": "^0.14.3" "tweetnacl": "^0.14.3"
} }
@ -934,7 +926,6 @@
"version": "0.1.1", "version": "0.1.1",
"resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz", "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz",
"integrity": "sha1-D8c6ntXw1Tw4GTOYUj735UN3dQU=", "integrity": "sha1-D8c6ntXw1Tw4GTOYUj735UN3dQU=",
"optional": true,
"requires": { "requires": {
"jsbn": "~0.1.0" "jsbn": "~0.1.0"
} }
@ -1088,9 +1079,9 @@
} }
}, },
"extend": { "extend": {
"version": "3.0.1", "version": "3.0.2",
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.1.tgz", "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
"integrity": "sha1-p1Xqe8Gt/MWjHOfnYtuq3F5jZEQ=" "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="
}, },
"extsprintf": { "extsprintf": {
"version": "1.3.0", "version": "1.3.0",
@ -1204,13 +1195,6 @@
"extend": "^3.0.2", "extend": "^3.0.2",
"https-proxy-agent": "^2.2.1", "https-proxy-agent": "^2.2.1",
"node-fetch": "^2.3.0" "node-fetch": "^2.3.0"
},
"dependencies": {
"extend": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
"integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="
}
} }
}, },
"gcp-metadata": { "gcp-metadata": {
@ -1442,8 +1426,7 @@
"jsbn": { "jsbn": {
"version": "0.1.1", "version": "0.1.1",
"resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
"integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=", "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM="
"optional": true
}, },
"json-bigint": { "json-bigint": {
"version": "0.3.0", "version": "0.3.0",
@ -1511,9 +1494,9 @@
"integrity": "sha1-SjGI1CkbZrT2XtuZ+AaqmuKTWSo=" "integrity": "sha1-SjGI1CkbZrT2XtuZ+AaqmuKTWSo="
}, },
"lodash": { "lodash": {
"version": "4.17.4", "version": "4.17.13",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.13.tgz",
"integrity": "sha1-eCA6TRwyiuHYbcpkYONptX9AVa4=" "integrity": "sha512-vm3/XWXfWtRua0FkUyEHBZy8kCPjErNBT9fJx8Zvs+U6zjqPbTUOpkaoum3O5uiA8sm+yNMHXfYkTUHFoMxFNA=="
}, },
"lodash.defaults": { "lodash.defaults": {
"version": "4.2.0", "version": "4.2.0",
@ -1594,11 +1577,6 @@
"nan": "^2.10.0" "nan": "^2.10.0"
} }
}, },
"extend": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
"integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="
},
"fast-deep-equal": { "fast-deep-equal": {
"version": "2.0.1", "version": "2.0.1",
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz",
@ -2657,6 +2635,11 @@
"integrity": "sha512-gH8eh2nZudPQO6TytOvbxnuhYBOvDBBLW52tz5q6X58lJcd/tkmqFR+5Z9adS8aJtURSXWThWy/xJtJwixErvg==", "integrity": "sha512-gH8eh2nZudPQO6TytOvbxnuhYBOvDBBLW52tz5q6X58lJcd/tkmqFR+5Z9adS8aJtURSXWThWy/xJtJwixErvg==",
"optional": true "optional": true
}, },
"safer-buffer": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
},
"sandboxed-module": { "sandboxed-module": {
"version": "0.2.2", "version": "0.2.2",
"resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-0.2.2.tgz", "resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-0.2.2.tgz",
@ -2788,9 +2771,9 @@
} }
}, },
"sshpk": { "sshpk": {
"version": "1.13.1", "version": "1.16.1",
"resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.13.1.tgz", "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz",
"integrity": "sha1-US322mKHFEMW3EwY/hzx2UBzm+M=", "integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==",
"requires": { "requires": {
"asn1": "~0.2.3", "asn1": "~0.2.3",
"assert-plus": "^1.0.0", "assert-plus": "^1.0.0",
@ -2799,13 +2782,17 @@
"ecc-jsbn": "~0.1.1", "ecc-jsbn": "~0.1.1",
"getpass": "^0.1.1", "getpass": "^0.1.1",
"jsbn": "~0.1.0", "jsbn": "~0.1.0",
"safer-buffer": "^2.0.2",
"tweetnacl": "~0.14.0" "tweetnacl": "~0.14.0"
}, },
"dependencies": { "dependencies": {
"asn1": { "asn1": {
"version": "0.2.3", "version": "0.2.4",
"resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz", "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz",
"integrity": "sha1-2sh4dxPJlmhJ/IGAd36+nB3fO4Y=" "integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==",
"requires": {
"safer-buffer": "~2.1.0"
}
}, },
"assert-plus": { "assert-plus": {
"version": "1.0.0", "version": "1.0.0",
@ -2874,9 +2861,9 @@
} }
}, },
"stringstream": { "stringstream": {
"version": "0.0.5", "version": "0.0.6",
"resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.5.tgz", "resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.6.tgz",
"integrity": "sha1-TkhM1N5aC7vuGORjB3EKioFiGHg=" "integrity": "sha512-87GEBAkegbBcweToUrdzf3eLhWNg06FJTebl4BVJz/JgWy8CvEr9dRtX5qWphiynMSQlxxi+QqN0z5T32SLlhA=="
}, },
"tdigest": { "tdigest": {
"version": "0.1.1", "version": "0.1.1",
@ -2949,8 +2936,7 @@
"tweetnacl": { "tweetnacl": {
"version": "0.14.5", "version": "0.14.5",
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz",
"integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=", "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q="
"optional": true
}, },
"type-is": { "type-is": {
"version": "1.3.1", "version": "1.3.1",

View file

@ -23,7 +23,7 @@
"async": "^2.5.0", "async": "^2.5.0",
"coffee-script": "~1.7.0", "coffee-script": "~1.7.0",
"express": "3.11.0", "express": "3.11.0",
"lodash": "^4.17.4", "lodash": "^4.17.13",
"logger-sharelatex": "^1.7.0", "logger-sharelatex": "^1.7.0",
"lynx": "0.0.11", "lynx": "0.0.11",
"metrics-sharelatex": "^2.4.0", "metrics-sharelatex": "^2.4.0",

View file

@ -33,7 +33,17 @@ module.exports = DocUpdaterClient =
do (update) -> do (update) ->
jobs.push (callback) -> jobs.push (callback) ->
DocUpdaterClient.sendUpdate project_id, doc_id, update, callback DocUpdaterClient.sendUpdate project_id, doc_id, update, callback
async.series jobs, callback async.series jobs, (err) ->
DocUpdaterClient.waitForPendingUpdates project_id, doc_id, callback
waitForPendingUpdates: (project_id, doc_id, callback) ->
async.retry {times: 30, interval: 100}, (cb) ->
rclient.llen keys.pendingUpdates({doc_id}), (err, length) ->
if length > 0
cb(new Error("updates still pending"))
else
cb()
, callback
getDoc: (project_id, doc_id, callback = (error, res, body) ->) -> getDoc: (project_id, doc_id, callback = (error, res, body) ->) ->
request.get "http://localhost:3003/project/#{project_id}/doc/#{doc_id}", (error, res, body) -> request.get "http://localhost:3003/project/#{project_id}/doc/#{doc_id}", (error, res, body) ->

View file

@ -13,7 +13,7 @@ describe "DispatchManager", ->
"logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub(), warn: sinon.stub() } "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub(), warn: sinon.stub() }
"settings-sharelatex": @settings = "settings-sharelatex": @settings =
redis: redis:
realtime: {} documentupdater: {}
"redis-sharelatex": @redis = {} "redis-sharelatex": @redis = {}
"./RateLimitManager": {} "./RateLimitManager": {}
"./Errors": Errors "./Errors": Errors

View file

@ -15,6 +15,7 @@ describe "PersistenceManager", ->
"./Metrics": @Metrics = "./Metrics": @Metrics =
Timer: class Timer Timer: class Timer
done: sinon.stub() done: sinon.stub()
inc: sinon.stub()
"logger-sharelatex": @logger = {log: sinon.stub(), err: sinon.stub()} "logger-sharelatex": @logger = {log: sinon.stub(), err: sinon.stub()}
@project_id = "project-id-123" @project_id = "project-id-123"
@projectHistoryId = "history-id-123" @projectHistoryId = "history-id-123"
@ -71,9 +72,14 @@ describe "PersistenceManager", ->
it "should time the execution", -> it "should time the execution", ->
@Metrics.Timer::done.called.should.equal true @Metrics.Timer::done.called.should.equal true
it "should increment the metric", ->
@Metrics.inc.calledWith("getDoc", 1, {status: 200}).should.equal true
describe "when request returns an error", -> describe "when request returns an error", ->
beforeEach -> beforeEach ->
@request.callsArgWith(1, @error = new Error("oops"), null, null) @error = new Error("oops")
@error.code = "EOOPS"
@request.callsArgWith(1, @error, null, null)
@PersistenceManager.getDoc(@project_id, @doc_id, @callback) @PersistenceManager.getDoc(@project_id, @doc_id, @callback)
it "should return the error", -> it "should return the error", ->
@ -82,6 +88,9 @@ describe "PersistenceManager", ->
it "should time the execution", -> it "should time the execution", ->
@Metrics.Timer::done.called.should.equal true @Metrics.Timer::done.called.should.equal true
it "should increment the metric", ->
@Metrics.inc.calledWith("getDoc", 1, {status: "EOOPS"}).should.equal true
describe "when the request returns 404", -> describe "when the request returns 404", ->
beforeEach -> beforeEach ->
@request.callsArgWith(1, null, {statusCode: 404}, "") @request.callsArgWith(1, null, {statusCode: 404}, "")
@ -93,6 +102,9 @@ describe "PersistenceManager", ->
it "should time the execution", -> it "should time the execution", ->
@Metrics.Timer::done.called.should.equal true @Metrics.Timer::done.called.should.equal true
it "should increment the metric", ->
@Metrics.inc.calledWith("getDoc", 1, {status: 404}).should.equal true
describe "when the request returns an error status code", -> describe "when the request returns an error status code", ->
beforeEach -> beforeEach ->
@request.callsArgWith(1, null, {statusCode: 500}, "") @request.callsArgWith(1, null, {statusCode: 500}, "")
@ -104,6 +116,9 @@ describe "PersistenceManager", ->
it "should time the execution", -> it "should time the execution", ->
@Metrics.Timer::done.called.should.equal true @Metrics.Timer::done.called.should.equal true
it "should increment the metric", ->
@Metrics.inc.calledWith("getDoc", 1, {status: 500}).should.equal true
describe "when request returns an doc without lines", -> describe "when request returns an doc without lines", ->
beforeEach -> beforeEach ->
delete @webResponse.lines delete @webResponse.lines
@ -163,9 +178,14 @@ describe "PersistenceManager", ->
it "should time the execution", -> it "should time the execution", ->
@Metrics.Timer::done.called.should.equal true @Metrics.Timer::done.called.should.equal true
it "should increment the metric", ->
@Metrics.inc.calledWith("setDoc", 1, {status: 200}).should.equal true
describe "when request returns an error", -> describe "when request returns an error", ->
beforeEach -> beforeEach ->
@request.callsArgWith(1, @error = new Error("oops"), null, null) @error = new Error("oops")
@error.code = "EOOPS"
@request.callsArgWith(1, @error, null, null)
@PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @ranges, @lastUpdatedAt, @lastUpdatedBy, @callback) @PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @ranges, @lastUpdatedAt, @lastUpdatedBy, @callback)
it "should return the error", -> it "should return the error", ->
@ -174,6 +194,9 @@ describe "PersistenceManager", ->
it "should time the execution", -> it "should time the execution", ->
@Metrics.Timer::done.called.should.equal true @Metrics.Timer::done.called.should.equal true
it "should increment the metric", ->
@Metrics.inc.calledWith("setDoc", 1, {status: "EOOPS"}).should.equal true
describe "when the request returns 404", -> describe "when the request returns 404", ->
beforeEach -> beforeEach ->
@request.callsArgWith(1, null, {statusCode: 404}, "") @request.callsArgWith(1, null, {statusCode: 404}, "")
@ -185,6 +208,9 @@ describe "PersistenceManager", ->
it "should time the execution", -> it "should time the execution", ->
@Metrics.Timer::done.called.should.equal true @Metrics.Timer::done.called.should.equal true
it "should increment the metric", ->
@Metrics.inc.calledWith("setDoc", 1, {status: 404}).should.equal true
describe "when the request returns an error status code", -> describe "when the request returns an error status code", ->
beforeEach -> beforeEach ->
@request.callsArgWith(1, null, {statusCode: 500}, "") @request.callsArgWith(1, null, {statusCode: 500}, "")
@ -196,3 +222,5 @@ describe "PersistenceManager", ->
it "should time the execution", -> it "should time the execution", ->
@Metrics.Timer::done.called.should.equal true @Metrics.Timer::done.called.should.equal true
it "should increment the metric", ->
@Metrics.inc.calledWith("setDoc", 1, {status: 500}).should.equal true

View file

@ -9,9 +9,7 @@ describe "RateLimitManager", ->
beforeEach -> beforeEach ->
@RateLimitManager = SandboxedModule.require modulePath, requires: @RateLimitManager = SandboxedModule.require modulePath, requires:
"logger-sharelatex": @logger = { log: sinon.stub() } "logger-sharelatex": @logger = { log: sinon.stub() }
"settings-sharelatex": @settings = "settings-sharelatex": @settings = {}
redis:
realtime: {}
"./Metrics": @Metrics = "./Metrics": @Metrics =
Timer: class Timer Timer: class Timer
done: sinon.stub() done: sinon.stub()

View file

@ -445,13 +445,12 @@ describe "RedisManager", ->
describe "with project history disabled", -> describe "with project history disabled", ->
beforeEach -> beforeEach ->
@rclient.rpush = sinon.stub()
@settings.apis.project_history.enabled = false @settings.apis.project_history.enabled = false
@RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length) @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length)
@RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @updateMeta, @callback @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @updateMeta, @callback
it "should not push the updates into the project history ops list", -> it "should not push the updates into the project history ops list", ->
@rclient.rpush.called.should.equal false @ProjectHistoryRedisManager.queueOps.called.should.equal false
it "should call the callback", -> it "should call the callback", ->
@callback @callback
@ -493,7 +492,6 @@ describe "RedisManager", ->
describe "with no updates", -> describe "with no updates", ->
beforeEach -> beforeEach ->
@rclient.rpush = sinon.stub().callsArgWith(1, null, @project_update_list_length)
@RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version) @RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version)
@RedisManager.updateDocument @project_id, @doc_id, @lines, @version, [], @ranges, @updateMeta, @callback @RedisManager.updateDocument @project_id, @doc_id, @lines, @version, [], @ranges, @updateMeta, @callback
@ -503,7 +501,7 @@ describe "RedisManager", ->
.should.equal false .should.equal false
it "should not try to enqueue project updates", -> it "should not try to enqueue project updates", ->
@rclient.rpush @ProjectHistoryRedisManager.queueOps
.called .called
.should.equal false .should.equal false