Merge branch 'master' into bg-use-separate-redis-for-project-history

This commit is contained in:
Brian Gough 2020-02-21 14:13:33 +00:00 committed by GitHub
commit 0419039d4d
14 changed files with 108 additions and 69 deletions

View file

@ -1 +1 @@
6.9.5
10.19.0

View file

@ -1,11 +0,0 @@
language: node_js
before_install:
- npm install -g grunt-cli
install:
- npm install
- grunt install
script:
- grunt test:unit

View file

@ -16,6 +16,7 @@ pipeline {
}
stages {
stage('Install') {
steps {
withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) {

View file

@ -1,7 +1,7 @@
# This file was auto-generated, do not edit it directly.
# Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment
# Version: 1.1.24
# Version: 1.3.5
BUILD_NUMBER ?= local
BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD)
@ -28,14 +28,20 @@ test_unit:
test_acceptance: test_clean test_acceptance_pre_run test_acceptance_run
test_acceptance_debug: test_clean test_acceptance_pre_run test_acceptance_run_debug
test_acceptance_run:
@[ ! -d test/acceptance ] && echo "document-updater has no acceptance tests" || $(DOCKER_COMPOSE) run --rm test_acceptance
test_acceptance_run_debug:
@[ ! -d test/acceptance ] && echo "document-updater has no acceptance tests" || $(DOCKER_COMPOSE) run -p 127.0.0.9:19999:19999 --rm test_acceptance npm run test:acceptance -- --inspect=0.0.0.0:19999 --inspect-brk
test_clean:
$(DOCKER_COMPOSE) down -v -t 0
test_acceptance_pre_run:
@[ ! -f test/acceptance/js/scripts/pre-run ] && echo "document-updater has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/js/scripts/pre-run
build:
docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \
--tag gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \
@ -48,4 +54,5 @@ publish:
docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
.PHONY: clean test test_unit test_acceptance test_clean build publish

View file

@ -12,6 +12,21 @@ request = (require("requestretry")).defaults({
# hold us up, and need to bail out quickly if there is a problem.
MAX_HTTP_REQUEST_LENGTH = 5000 # 5 seconds
updateMetric = (method, error, response) ->
# find the status, with special handling for connection timeouts
# https://github.com/request/request#timeouts
status = if error?.connect is true
"#{error.code} (connect)"
else if error?
error.code
else if response?
response.statusCode
Metrics.inc method, 1, {status: status}
if error?.attempts > 1
Metrics.inc "#{method}-retries", 1, {status: 'error'}
if response?.attempts > 1
Metrics.inc "#{method}-retries", 1, {status: 'success'}
module.exports = PersistenceManager =
getDoc: (project_id, doc_id, _callback = (error, lines, version, ranges, pathname, projectHistoryId, projectHistoryType) ->) ->
timer = new Metrics.Timer("persistenceManager.getDoc")
@ -32,6 +47,7 @@ module.exports = PersistenceManager =
jar: false
timeout: MAX_HTTP_REQUEST_LENGTH
}, (error, res, body) ->
updateMetric('getDoc', error, res)
return callback(error) if error?
if res.statusCode >= 200 and res.statusCode < 300
try
@ -73,6 +89,7 @@ module.exports = PersistenceManager =
jar: false
timeout: MAX_HTTP_REQUEST_LENGTH
}, (error, res, body) ->
updateMetric('setDoc', error, res)
return callback(error) if error?
if res.statusCode >= 200 and res.statusCode < 300
return callback null

View file

@ -36,6 +36,8 @@ module.exports = RedisManager =
docLines = JSON.stringify(docLines)
if docLines.indexOf("\u0000") != -1
error = new Error("null bytes found in doc lines")
# this check was added to catch memory corruption in JSON.stringify.
# It sometimes returned null bytes at the end of the string.
logger.error {err: error, doc_id: doc_id, docLines: docLines}, error.message
return callback(error)
docHash = RedisManager._computeHash(docLines)
@ -224,12 +226,14 @@ module.exports = RedisManager =
for op in jsonOps
if op.indexOf("\u0000") != -1
error = new Error("null bytes found in jsonOps")
# this check was added to catch memory corruption in JSON.stringify
logger.error {err: error, doc_id: doc_id, jsonOps: jsonOps}, error.message
return callback(error)
newDocLines = JSON.stringify(docLines)
if newDocLines.indexOf("\u0000") != -1
error = new Error("null bytes found in doc lines")
# this check was added to catch memory corruption in JSON.stringify
logger.error {err: error, doc_id: doc_id, newDocLines: newDocLines}, error.message
return callback(error)
newHash = RedisManager._computeHash(newDocLines)
@ -243,6 +247,7 @@ module.exports = RedisManager =
return callback(error)
if ranges? and ranges.indexOf("\u0000") != -1
error = new Error("null bytes found in ranges")
# this check was added to catch memory corruption in JSON.stringify
logger.error err: error, doc_id: doc_id, ranges: ranges, error.message
return callback(error)
multi = rclient.multi()

View file

@ -1,10 +1,10 @@
document-updater
--public-repo=True
--language=coffeescript
--node-version=6.9.5
--env-add=
--node-version=10.19.0
--acceptance-creds=None
--dependencies=mongo,redis
--docker-repos=gcr.io/overleaf-ops
--build-target=docker
--script-version=1.1.24
--env-pass-through=
--public-repo=True
--script-version=1.3.5

View file

@ -20,13 +20,6 @@
"pify": "^4.0.1",
"retry-request": "^4.0.0",
"teeny-request": "^3.11.3"
},
"dependencies": {
"extend": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
"integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="
}
}
},
"@google-cloud/debug-agent": {
@ -453,7 +446,6 @@
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz",
"integrity": "sha1-Y7xdy2EzG5K8Bf1SiVPDNGKgb40=",
"optional": true,
"requires": {
"tweetnacl": "^0.14.3"
}
@ -934,7 +926,6 @@
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz",
"integrity": "sha1-D8c6ntXw1Tw4GTOYUj735UN3dQU=",
"optional": true,
"requires": {
"jsbn": "~0.1.0"
}
@ -1088,9 +1079,9 @@
}
},
"extend": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.1.tgz",
"integrity": "sha1-p1Xqe8Gt/MWjHOfnYtuq3F5jZEQ="
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
"integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="
},
"extsprintf": {
"version": "1.3.0",
@ -1204,13 +1195,6 @@
"extend": "^3.0.2",
"https-proxy-agent": "^2.2.1",
"node-fetch": "^2.3.0"
},
"dependencies": {
"extend": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
"integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="
}
}
},
"gcp-metadata": {
@ -1442,8 +1426,7 @@
"jsbn": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
"integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=",
"optional": true
"integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM="
},
"json-bigint": {
"version": "0.3.0",
@ -1511,9 +1494,9 @@
"integrity": "sha1-SjGI1CkbZrT2XtuZ+AaqmuKTWSo="
},
"lodash": {
"version": "4.17.4",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz",
"integrity": "sha1-eCA6TRwyiuHYbcpkYONptX9AVa4="
"version": "4.17.13",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.13.tgz",
"integrity": "sha512-vm3/XWXfWtRua0FkUyEHBZy8kCPjErNBT9fJx8Zvs+U6zjqPbTUOpkaoum3O5uiA8sm+yNMHXfYkTUHFoMxFNA=="
},
"lodash.defaults": {
"version": "4.2.0",
@ -1594,11 +1577,6 @@
"nan": "^2.10.0"
}
},
"extend": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
"integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="
},
"fast-deep-equal": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz",
@ -2657,6 +2635,11 @@
"integrity": "sha512-gH8eh2nZudPQO6TytOvbxnuhYBOvDBBLW52tz5q6X58lJcd/tkmqFR+5Z9adS8aJtURSXWThWy/xJtJwixErvg==",
"optional": true
},
"safer-buffer": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
},
"sandboxed-module": {
"version": "0.2.2",
"resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-0.2.2.tgz",
@ -2788,9 +2771,9 @@
}
},
"sshpk": {
"version": "1.13.1",
"resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.13.1.tgz",
"integrity": "sha1-US322mKHFEMW3EwY/hzx2UBzm+M=",
"version": "1.16.1",
"resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz",
"integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==",
"requires": {
"asn1": "~0.2.3",
"assert-plus": "^1.0.0",
@ -2799,13 +2782,17 @@
"ecc-jsbn": "~0.1.1",
"getpass": "^0.1.1",
"jsbn": "~0.1.0",
"safer-buffer": "^2.0.2",
"tweetnacl": "~0.14.0"
},
"dependencies": {
"asn1": {
"version": "0.2.3",
"resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz",
"integrity": "sha1-2sh4dxPJlmhJ/IGAd36+nB3fO4Y="
"version": "0.2.4",
"resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz",
"integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==",
"requires": {
"safer-buffer": "~2.1.0"
}
},
"assert-plus": {
"version": "1.0.0",
@ -2874,9 +2861,9 @@
}
},
"stringstream": {
"version": "0.0.5",
"resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.5.tgz",
"integrity": "sha1-TkhM1N5aC7vuGORjB3EKioFiGHg="
"version": "0.0.6",
"resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.6.tgz",
"integrity": "sha512-87GEBAkegbBcweToUrdzf3eLhWNg06FJTebl4BVJz/JgWy8CvEr9dRtX5qWphiynMSQlxxi+QqN0z5T32SLlhA=="
},
"tdigest": {
"version": "0.1.1",
@ -2949,8 +2936,7 @@
"tweetnacl": {
"version": "0.14.5",
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz",
"integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=",
"optional": true
"integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q="
},
"type-is": {
"version": "1.3.1",

View file

@ -23,7 +23,7 @@
"async": "^2.5.0",
"coffee-script": "~1.7.0",
"express": "3.11.0",
"lodash": "^4.17.4",
"lodash": "^4.17.13",
"logger-sharelatex": "^1.7.0",
"lynx": "0.0.11",
"metrics-sharelatex": "^2.4.0",

View file

@ -33,7 +33,17 @@ module.exports = DocUpdaterClient =
do (update) ->
jobs.push (callback) ->
DocUpdaterClient.sendUpdate project_id, doc_id, update, callback
async.series jobs, callback
async.series jobs, (err) ->
DocUpdaterClient.waitForPendingUpdates project_id, doc_id, callback
waitForPendingUpdates: (project_id, doc_id, callback) ->
async.retry {times: 30, interval: 100}, (cb) ->
rclient.llen keys.pendingUpdates({doc_id}), (err, length) ->
if length > 0
cb(new Error("updates still pending"))
else
cb()
, callback
getDoc: (project_id, doc_id, callback = (error, res, body) ->) ->
request.get "http://localhost:3003/project/#{project_id}/doc/#{doc_id}", (error, res, body) ->

View file

@ -13,7 +13,7 @@ describe "DispatchManager", ->
"logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub(), warn: sinon.stub() }
"settings-sharelatex": @settings =
redis:
realtime: {}
documentupdater: {}
"redis-sharelatex": @redis = {}
"./RateLimitManager": {}
"./Errors": Errors

View file

@ -15,6 +15,7 @@ describe "PersistenceManager", ->
"./Metrics": @Metrics =
Timer: class Timer
done: sinon.stub()
inc: sinon.stub()
"logger-sharelatex": @logger = {log: sinon.stub(), err: sinon.stub()}
@project_id = "project-id-123"
@projectHistoryId = "history-id-123"
@ -71,9 +72,14 @@ describe "PersistenceManager", ->
it "should time the execution", ->
@Metrics.Timer::done.called.should.equal true
it "should increment the metric", ->
@Metrics.inc.calledWith("getDoc", 1, {status: 200}).should.equal true
describe "when request returns an error", ->
beforeEach ->
@request.callsArgWith(1, @error = new Error("oops"), null, null)
@error = new Error("oops")
@error.code = "EOOPS"
@request.callsArgWith(1, @error, null, null)
@PersistenceManager.getDoc(@project_id, @doc_id, @callback)
it "should return the error", ->
@ -82,6 +88,9 @@ describe "PersistenceManager", ->
it "should time the execution", ->
@Metrics.Timer::done.called.should.equal true
it "should increment the metric", ->
@Metrics.inc.calledWith("getDoc", 1, {status: "EOOPS"}).should.equal true
describe "when the request returns 404", ->
beforeEach ->
@request.callsArgWith(1, null, {statusCode: 404}, "")
@ -93,6 +102,9 @@ describe "PersistenceManager", ->
it "should time the execution", ->
@Metrics.Timer::done.called.should.equal true
it "should increment the metric", ->
@Metrics.inc.calledWith("getDoc", 1, {status: 404}).should.equal true
describe "when the request returns an error status code", ->
beforeEach ->
@request.callsArgWith(1, null, {statusCode: 500}, "")
@ -104,6 +116,9 @@ describe "PersistenceManager", ->
it "should time the execution", ->
@Metrics.Timer::done.called.should.equal true
it "should increment the metric", ->
@Metrics.inc.calledWith("getDoc", 1, {status: 500}).should.equal true
describe "when request returns an doc without lines", ->
beforeEach ->
delete @webResponse.lines
@ -163,9 +178,14 @@ describe "PersistenceManager", ->
it "should time the execution", ->
@Metrics.Timer::done.called.should.equal true
it "should increment the metric", ->
@Metrics.inc.calledWith("setDoc", 1, {status: 200}).should.equal true
describe "when request returns an error", ->
beforeEach ->
@request.callsArgWith(1, @error = new Error("oops"), null, null)
@error = new Error("oops")
@error.code = "EOOPS"
@request.callsArgWith(1, @error, null, null)
@PersistenceManager.setDoc(@project_id, @doc_id, @lines, @version, @ranges, @lastUpdatedAt, @lastUpdatedBy, @callback)
it "should return the error", ->
@ -174,6 +194,9 @@ describe "PersistenceManager", ->
it "should time the execution", ->
@Metrics.Timer::done.called.should.equal true
it "should increment the metric", ->
@Metrics.inc.calledWith("setDoc", 1, {status: "EOOPS"}).should.equal true
describe "when the request returns 404", ->
beforeEach ->
@request.callsArgWith(1, null, {statusCode: 404}, "")
@ -185,6 +208,9 @@ describe "PersistenceManager", ->
it "should time the execution", ->
@Metrics.Timer::done.called.should.equal true
it "should increment the metric", ->
@Metrics.inc.calledWith("setDoc", 1, {status: 404}).should.equal true
describe "when the request returns an error status code", ->
beforeEach ->
@request.callsArgWith(1, null, {statusCode: 500}, "")
@ -196,3 +222,5 @@ describe "PersistenceManager", ->
it "should time the execution", ->
@Metrics.Timer::done.called.should.equal true
it "should increment the metric", ->
@Metrics.inc.calledWith("setDoc", 1, {status: 500}).should.equal true

View file

@ -9,9 +9,7 @@ describe "RateLimitManager", ->
beforeEach ->
@RateLimitManager = SandboxedModule.require modulePath, requires:
"logger-sharelatex": @logger = { log: sinon.stub() }
"settings-sharelatex": @settings =
redis:
realtime: {}
"settings-sharelatex": @settings = {}
"./Metrics": @Metrics =
Timer: class Timer
done: sinon.stub()

View file

@ -445,13 +445,12 @@ describe "RedisManager", ->
describe "with project history disabled", ->
beforeEach ->
@rclient.rpush = sinon.stub()
@settings.apis.project_history.enabled = false
@RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version - @ops.length)
@RedisManager.updateDocument @project_id, @doc_id, @lines, @version, @ops, @ranges, @updateMeta, @callback
it "should not push the updates into the project history ops list", ->
@rclient.rpush.called.should.equal false
@ProjectHistoryRedisManager.queueOps.called.should.equal false
it "should call the callback", ->
@callback
@ -493,7 +492,6 @@ describe "RedisManager", ->
describe "with no updates", ->
beforeEach ->
@rclient.rpush = sinon.stub().callsArgWith(1, null, @project_update_list_length)
@RedisManager.getDocVersion.withArgs(@doc_id).yields(null, @version)
@RedisManager.updateDocument @project_id, @doc_id, @lines, @version, [], @ranges, @updateMeta, @callback
@ -503,7 +501,7 @@ describe "RedisManager", ->
.should.equal false
it "should not try to enqueue project updates", ->
@rclient.rpush
@ProjectHistoryRedisManager.queueOps
.called
.should.equal false