mirror of
https://github.com/overleaf/overleaf.git
synced 2025-04-06 15:12:07 +00:00
commit
8bc3ff69e1
23 changed files with 2356 additions and 170 deletions
9
services/real-time/.dockerignore
Normal file
9
services/real-time/.dockerignore
Normal file
|
@ -0,0 +1,9 @@
|
|||
node_modules/*
|
||||
gitrev
|
||||
.git
|
||||
.gitignore
|
||||
.npm
|
||||
.nvmrc
|
||||
nodemon.json
|
||||
app.js
|
||||
**/js/*
|
38
services/real-time/.github/ISSUE_TEMPLATE.md
vendored
Normal file
38
services/real-time/.github/ISSUE_TEMPLATE.md
vendored
Normal file
|
@ -0,0 +1,38 @@
|
|||
<!-- BUG REPORT TEMPLATE -->
|
||||
|
||||
## Steps to Reproduce
|
||||
<!-- Describe the steps leading up to when / where you found the bug. -->
|
||||
<!-- Screenshots may be helpful here. -->
|
||||
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
## Expected Behaviour
|
||||
<!-- What should have happened when you completed the steps above? -->
|
||||
|
||||
## Observed Behaviour
|
||||
<!-- What actually happened when you completed the steps above? -->
|
||||
<!-- Screenshots may be helpful here. -->
|
||||
|
||||
## Context
|
||||
<!-- How has this issue affected you? What were you trying to accomplish? -->
|
||||
|
||||
## Technical Info
|
||||
<!-- Provide any technical details that may be applicable (or N/A if not applicable). -->
|
||||
|
||||
* URL:
|
||||
* Browser Name and version:
|
||||
* Operating System and version (desktop or mobile):
|
||||
* Signed in as:
|
||||
* Project and/or file:
|
||||
|
||||
## Analysis
|
||||
<!--- Optionally, document investigation of / suggest a fix for the bug, e.g. 'comes from this line / commit' -->
|
||||
|
||||
## Who Needs to Know?
|
||||
<!-- If you want to bring this to the attention of particular people, @-mention them below. -->
|
||||
<!-- If a user reported this bug and should be notified when it is fixed, provide the Front conversation link. -->
|
||||
|
||||
-
|
||||
-
|
45
services/real-time/.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
45
services/real-time/.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
|
@ -0,0 +1,45 @@
|
|||
<!-- Please review https://github.com/overleaf/write_latex/blob/master/.github/CONTRIBUTING.md for guidance on what is expected in each section. -->
|
||||
|
||||
### Description
|
||||
|
||||
|
||||
|
||||
#### Screenshots
|
||||
|
||||
|
||||
|
||||
#### Related Issues / PRs
|
||||
|
||||
|
||||
|
||||
### Review
|
||||
|
||||
|
||||
|
||||
#### Potential Impact
|
||||
|
||||
|
||||
|
||||
#### Manual Testing Performed
|
||||
|
||||
- [ ]
|
||||
- [ ]
|
||||
|
||||
#### Accessibility
|
||||
|
||||
|
||||
|
||||
### Deployment
|
||||
|
||||
|
||||
|
||||
#### Deployment Checklist
|
||||
|
||||
- [ ] Update documentation not included in the PR (if any)
|
||||
- [ ]
|
||||
|
||||
#### Metrics and Monitoring
|
||||
|
||||
|
||||
|
||||
#### Who Needs to Know?
|
1
services/real-time/.gitignore
vendored
1
services/real-time/.gitignore
vendored
|
@ -4,3 +4,4 @@ app.js
|
|||
app/js
|
||||
test/unit/js
|
||||
test/acceptance/js
|
||||
**/*.map
|
||||
|
|
22
services/real-time/Dockerfile
Normal file
22
services/real-time/Dockerfile
Normal file
|
@ -0,0 +1,22 @@
|
|||
FROM node:6.15.1 as app
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
#wildcard as some files may not be in all repos
|
||||
COPY package*.json npm-shrink*.json /app/
|
||||
|
||||
RUN npm install --quiet
|
||||
|
||||
COPY . /app
|
||||
|
||||
|
||||
RUN npm run compile:all
|
||||
|
||||
FROM node:6.15.1
|
||||
|
||||
COPY --from=app /app /app
|
||||
|
||||
WORKDIR /app
|
||||
USER node
|
||||
|
||||
CMD ["node", "--expose-gc", "app.js"]
|
|
@ -1,75 +0,0 @@
|
|||
module.exports = (grunt) ->
|
||||
grunt.initConfig
|
||||
forever:
|
||||
app:
|
||||
options:
|
||||
index: "app.js"
|
||||
coffee:
|
||||
app_src:
|
||||
expand: true,
|
||||
flatten: true,
|
||||
cwd: "app"
|
||||
src: ['coffee/*.coffee'],
|
||||
dest: 'app/js/',
|
||||
ext: '.js'
|
||||
|
||||
app:
|
||||
src: "app.coffee"
|
||||
dest: "app.js"
|
||||
|
||||
unit_tests:
|
||||
expand: true
|
||||
cwd: "test/unit/coffee"
|
||||
src: ["**/*.coffee"]
|
||||
dest: "test/unit/js/"
|
||||
ext: ".js"
|
||||
|
||||
acceptance_tests:
|
||||
expand: true
|
||||
cwd: "test/acceptance/coffee"
|
||||
src: ["**/*.coffee"]
|
||||
dest: "test/acceptance/js/"
|
||||
ext: ".js"
|
||||
clean:
|
||||
app: ["app/js/"]
|
||||
unit_tests: ["test/unit/js"]
|
||||
acceptance_tests: ["test/acceptance/js"]
|
||||
smoke_tests: ["test/smoke/js"]
|
||||
|
||||
execute:
|
||||
app:
|
||||
src: "app.js"
|
||||
|
||||
mochaTest:
|
||||
unit:
|
||||
options:
|
||||
reporter: grunt.option('reporter') or 'spec'
|
||||
grep: grunt.option("grep")
|
||||
src: ["test/unit/js/**/*.js"]
|
||||
acceptance:
|
||||
options:
|
||||
reporter: grunt.option('reporter') or 'spec'
|
||||
timeout: 40000
|
||||
grep: grunt.option("grep")
|
||||
src: ["test/acceptance/js/**/*.js"]
|
||||
|
||||
grunt.loadNpmTasks 'grunt-contrib-coffee'
|
||||
grunt.loadNpmTasks 'grunt-contrib-clean'
|
||||
grunt.loadNpmTasks 'grunt-mocha-test'
|
||||
grunt.loadNpmTasks 'grunt-shell'
|
||||
grunt.loadNpmTasks 'grunt-execute'
|
||||
grunt.loadNpmTasks 'grunt-bunyan'
|
||||
grunt.loadNpmTasks 'grunt-forever'
|
||||
|
||||
grunt.registerTask 'compile:app', ['clean:app', 'coffee:app', 'coffee:app_src']
|
||||
grunt.registerTask 'run', ['compile:app', 'bunyan', 'execute']
|
||||
|
||||
grunt.registerTask 'compile:unit_tests', ['clean:unit_tests', 'coffee:unit_tests']
|
||||
grunt.registerTask 'test:unit', ['compile:app', 'compile:unit_tests', 'mochaTest:unit']
|
||||
|
||||
grunt.registerTask 'compile:acceptance_tests', ['clean:acceptance_tests', 'coffee:acceptance_tests']
|
||||
grunt.registerTask 'test:acceptance', ['compile:acceptance_tests', 'mochaTest:acceptance']
|
||||
|
||||
grunt.registerTask 'install', 'compile:app'
|
||||
|
||||
grunt.registerTask 'default', ['run']
|
109
services/real-time/Jenkinsfile
vendored
109
services/real-time/Jenkinsfile
vendored
|
@ -1,87 +1,122 @@
|
|||
String cron_string = BRANCH_NAME == "master" ? "@daily" : ""
|
||||
|
||||
pipeline {
|
||||
|
||||
agent any
|
||||
|
||||
environment {
|
||||
HOME = "/tmp"
|
||||
environment {
|
||||
GIT_PROJECT = "real-time-sharelatex"
|
||||
JENKINS_WORKFLOW = "real-time-sharelatex"
|
||||
TARGET_URL = "${env.JENKINS_URL}blue/organizations/jenkins/${JENKINS_WORKFLOW}/detail/$BRANCH_NAME/$BUILD_NUMBER/pipeline"
|
||||
GIT_API_URL = "https://api.github.com/repos/sharelatex/${GIT_PROJECT}/statuses/$GIT_COMMIT"
|
||||
}
|
||||
|
||||
triggers {
|
||||
pollSCM('* * * * *')
|
||||
cron('@daily')
|
||||
cron(cron_string)
|
||||
}
|
||||
|
||||
stages {
|
||||
stage('Install') {
|
||||
agent {
|
||||
docker {
|
||||
image 'node:6.15.1'
|
||||
args "-v /var/lib/jenkins/.npm:/tmp/.npm -e HOME=/tmp"
|
||||
reuseNode true
|
||||
steps {
|
||||
withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) {
|
||||
sh "curl $GIT_API_URL \
|
||||
--data '{ \
|
||||
\"state\" : \"pending\", \
|
||||
\"target_url\": \"$TARGET_URL\", \
|
||||
\"description\": \"Your build is underway\", \
|
||||
\"context\": \"ci/jenkins\" }' \
|
||||
-u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Build') {
|
||||
steps {
|
||||
// we need to disable logallrefupdates, else git clones during the npm install will require git to lookup the user id
|
||||
// which does not exist in the container's /etc/passwd file, causing the clone to fail.
|
||||
sh 'git config --global core.logallrefupdates false'
|
||||
sh 'rm -fr node_modules'
|
||||
sh 'npm install'
|
||||
sh 'npm rebuild'
|
||||
sh 'npm install --quiet grunt-cli'
|
||||
sh 'make build'
|
||||
}
|
||||
}
|
||||
stage('Compile and Test') {
|
||||
agent {
|
||||
docker {
|
||||
image 'node:6.15.1'
|
||||
args "-v /var/lib/jenkins/.npm:/tmp/.npm -e HOME=/tmp"
|
||||
reuseNode true
|
||||
}
|
||||
}
|
||||
|
||||
stage('Unit Tests') {
|
||||
steps {
|
||||
sh 'node_modules/.bin/grunt install'
|
||||
sh 'node_modules/.bin/grunt compile:acceptance_tests'
|
||||
sh 'node_modules/.bin/grunt test:unit'
|
||||
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_unit'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Acceptance Tests') {
|
||||
steps {
|
||||
sh 'docker pull sharelatex/acceptance-test-runner'
|
||||
sh 'docker run --rm -v $(pwd):/app sharelatex/acceptance-test-runner'
|
||||
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_acceptance'
|
||||
}
|
||||
}
|
||||
stage('Package') {
|
||||
|
||||
stage('Package and docker push') {
|
||||
steps {
|
||||
sh 'echo ${BUILD_NUMBER} > build_number.txt'
|
||||
sh 'touch build.tar.gz' // Avoid tar warning about files changing during read
|
||||
sh 'tar -czf build.tar.gz --exclude=build.tar.gz --exclude-vcs .'
|
||||
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make tar'
|
||||
|
||||
withCredentials([file(credentialsId: 'gcr.io_overleaf-ops', variable: 'DOCKER_REPO_KEY_PATH')]) {
|
||||
sh 'docker login -u _json_key --password-stdin https://gcr.io/overleaf-ops < ${DOCKER_REPO_KEY_PATH}'
|
||||
}
|
||||
sh 'DOCKER_REPO=gcr.io/overleaf-ops make publish'
|
||||
sh 'docker logout https://gcr.io/overleaf-ops'
|
||||
|
||||
}
|
||||
}
|
||||
stage('Publish') {
|
||||
|
||||
stage('Publish to s3') {
|
||||
steps {
|
||||
sh 'echo ${BRANCH_NAME}-${BUILD_NUMBER} > build_number.txt'
|
||||
withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") {
|
||||
s3Upload(file:'build.tar.gz', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/${BUILD_NUMBER}.tar.gz")
|
||||
}
|
||||
withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") {
|
||||
// The deployment process uses this file to figure out the latest build
|
||||
s3Upload(file:'build_number.txt', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/latest")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
post {
|
||||
always {
|
||||
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_clean'
|
||||
sh 'make clean'
|
||||
}
|
||||
|
||||
success {
|
||||
withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) {
|
||||
sh "curl $GIT_API_URL \
|
||||
--data '{ \
|
||||
\"state\" : \"success\", \
|
||||
\"target_url\": \"$TARGET_URL\", \
|
||||
\"description\": \"Your build succeeded!\", \
|
||||
\"context\": \"ci/jenkins\" }' \
|
||||
-u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD"
|
||||
}
|
||||
}
|
||||
|
||||
failure {
|
||||
mail(from: "${EMAIL_ALERT_FROM}",
|
||||
to: "${EMAIL_ALERT_TO}",
|
||||
mail(from: "${EMAIL_ALERT_FROM}",
|
||||
to: "${EMAIL_ALERT_TO}",
|
||||
subject: "Jenkins build failed: ${JOB_NAME}:${BUILD_NUMBER}",
|
||||
body: "Build: ${BUILD_URL}")
|
||||
withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) {
|
||||
sh "curl $GIT_API_URL \
|
||||
--data '{ \
|
||||
\"state\" : \"failure\", \
|
||||
\"target_url\": \"$TARGET_URL\", \
|
||||
\"description\": \"Your build failed\", \
|
||||
\"context\": \"ci/jenkins\" }' \
|
||||
-u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// The options directive is for configuration that applies to the whole job.
|
||||
options {
|
||||
// we'd like to make sure remove old builds, so we don't fill up our storage!
|
||||
buildDiscarder(logRotator(numToKeepStr:'50'))
|
||||
|
||||
|
||||
// And we'd really like to be sure that this build doesn't hang forever, so let's time it out after:
|
||||
timeout(time: 30, unit: 'MINUTES')
|
||||
}
|
||||
|
|
49
services/real-time/Makefile
Normal file
49
services/real-time/Makefile
Normal file
|
@ -0,0 +1,49 @@
|
|||
# This file was auto-generated, do not edit it directly.
|
||||
# Instead run bin/update_build_scripts from
|
||||
# https://github.com/sharelatex/sharelatex-dev-environment
|
||||
# Version: 1.1.12
|
||||
|
||||
BUILD_NUMBER ?= local
|
||||
BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||
PROJECT_NAME = real-time
|
||||
DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml
|
||||
DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \
|
||||
BRANCH_NAME=$(BRANCH_NAME) \
|
||||
PROJECT_NAME=$(PROJECT_NAME) \
|
||||
MOCHA_GREP=${MOCHA_GREP} \
|
||||
docker-compose ${DOCKER_COMPOSE_FLAGS}
|
||||
|
||||
clean:
|
||||
docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
|
||||
docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
|
||||
rm -f app.js
|
||||
rm -rf app/js
|
||||
rm -rf test/unit/js
|
||||
rm -rf test/acceptance/js
|
||||
|
||||
test: test_unit test_acceptance
|
||||
|
||||
test_unit:
|
||||
@[ ! -d test/unit ] && echo "real-time has no unit tests" || $(DOCKER_COMPOSE) run --rm test_unit
|
||||
|
||||
test_acceptance: test_clean test_acceptance_pre_run # clear the database before each acceptance test run
|
||||
@[ ! -d test/acceptance ] && echo "real-time has no acceptance tests" || $(DOCKER_COMPOSE) run --rm test_acceptance
|
||||
|
||||
test_clean:
|
||||
$(DOCKER_COMPOSE) down -v -t 0
|
||||
|
||||
test_acceptance_pre_run:
|
||||
@[ ! -f test/acceptance/scripts/pre-run ] && echo "real-time has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/scripts/pre-run
|
||||
build:
|
||||
docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \
|
||||
--tag gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \
|
||||
.
|
||||
|
||||
tar:
|
||||
$(DOCKER_COMPOSE) up tar
|
||||
|
||||
publish:
|
||||
|
||||
docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
|
||||
|
||||
.PHONY: clean test test_unit test_acceptance test_clean build publish
|
|
@ -1,10 +1,16 @@
|
|||
Metrics = require("metrics-sharelatex")
|
||||
Settings = require "settings-sharelatex"
|
||||
Metrics.initialize(Settings.appName or "real-time")
|
||||
|
||||
console.log Settings.redis
|
||||
|
||||
logger = require "logger-sharelatex"
|
||||
logger.initialize("real-time-sharelatex")
|
||||
logger.initialize("real-time")
|
||||
Metrics.event_loop.monitor(logger)
|
||||
|
||||
express = require("express")
|
||||
session = require("express-session")
|
||||
redis = require("redis-sharelatex")
|
||||
Settings = require "settings-sharelatex"
|
||||
if Settings.sentry?.dsn?
|
||||
logger.initializeErrorReporting(Settings.sentry.dsn)
|
||||
|
||||
|
@ -14,19 +20,18 @@ RedisStore = require('connect-redis')(session)
|
|||
SessionSockets = require('session.socket.io')
|
||||
CookieParser = require("cookie-parser")
|
||||
|
||||
Metrics = require("metrics-sharelatex")
|
||||
Metrics.initialize(Settings.appName or "real-time")
|
||||
Metrics.event_loop.monitor(logger)
|
||||
|
||||
DrainManager = require("./app/js/DrainManager")
|
||||
|
||||
# Set up socket.io server
|
||||
app = express()
|
||||
Metrics.injectMetricsRoute(app)
|
||||
server = require('http').createServer(app)
|
||||
io = require('socket.io').listen(server)
|
||||
|
||||
# Bind to sessions
|
||||
sessionStore = new RedisStore(client: sessionRedisClient)
|
||||
cookieParser = CookieParser(Settings.security.sessionSecret)
|
||||
|
||||
sessionSockets = new SessionSockets(io, sessionStore, cookieParser, Settings.cookieName)
|
||||
|
||||
io.configure ->
|
||||
|
@ -43,6 +48,9 @@ io.configure ->
|
|||
io.set('transports', ['websocket', 'flashsocket', 'htmlfile', 'xhr-polling', 'jsonp-polling'])
|
||||
io.set('log level', 1)
|
||||
|
||||
app.get "/", (req, res, next) ->
|
||||
res.send "real-time-sharelatex is alive"
|
||||
|
||||
app.get "/status", (req, res, next) ->
|
||||
res.send "real-time-sharelatex is alive"
|
||||
|
||||
|
@ -55,6 +63,8 @@ app.get "/health_check/redis", (req, res, next) ->
|
|||
else
|
||||
res.sendStatus 200
|
||||
|
||||
Metrics.injectMetricsRoute(app)
|
||||
|
||||
Router = require "./app/js/Router"
|
||||
Router.configure(app, io, sessionSockets)
|
||||
|
||||
|
@ -73,3 +83,37 @@ server.listen port, host, (error) ->
|
|||
|
||||
# Stop huge stack traces in logs from all the socket.io parsing steps.
|
||||
Error.stackTraceLimit = 10
|
||||
|
||||
|
||||
shutdownCleanly = (signal) ->
|
||||
connectedClients = io.sockets.clients()?.length
|
||||
if connectedClients == 0
|
||||
logger.log("no clients connected, exiting")
|
||||
process.exit()
|
||||
else
|
||||
logger.log {connectedClients}, "clients still connected, not shutting down yet"
|
||||
setTimeout () ->
|
||||
shutdownCleanly(signal)
|
||||
, 10000
|
||||
|
||||
forceDrain = ->
|
||||
logger.log {delay_ms:Settings.forceDrainMsDelay}, "starting force drain after timeout"
|
||||
setTimeout ()->
|
||||
logger.log "starting drain"
|
||||
DrainManager.startDrain(io, 4)
|
||||
, Settings.forceDrainMsDelay
|
||||
|
||||
shutDownInProgress = false
|
||||
if Settings.forceDrainMsDelay?
|
||||
Settings.forceDrainMsDelay = parseInt(Settings.forceDrainMsDelay, 10)
|
||||
logger.log forceDrainMsDelay: Settings.forceDrainMsDelay,"forceDrainMsDelay enabled"
|
||||
for signal in ['SIGINT', 'SIGHUP', 'SIGQUIT', 'SIGUSR1', 'SIGUSR2', 'SIGTERM', 'SIGABRT']
|
||||
process.on signal, ->
|
||||
if shutDownInProgress
|
||||
logger.log signal: signal, "shutdown already in progress, ignoring signal"
|
||||
return
|
||||
else
|
||||
shutDownInProgress = true
|
||||
logger.log signal: signal, "received interrupt, cleaning up"
|
||||
shutdownCleanly(signal)
|
||||
forceDrain()
|
||||
|
|
|
@ -12,7 +12,6 @@ FOUR_DAYS_IN_S = ONE_DAY_IN_S * 4
|
|||
USER_TIMEOUT_IN_S = ONE_HOUR_IN_S / 4
|
||||
|
||||
module.exports =
|
||||
|
||||
# Use the same method for when a user connects, and when a user sends a cursor
|
||||
# update. This way we don't care if the connected_user key has expired when
|
||||
# we receive a cursor update.
|
||||
|
|
|
@ -197,8 +197,6 @@ module.exports = WebsocketController =
|
|||
update.meta.source = client.id
|
||||
update.meta.user_id = user_id
|
||||
metrics.inc "editor.doc-update", 0.3
|
||||
metrics.set "editor.active-projects", project_id, 0.3
|
||||
metrics.set "editor.active-users", user_id, 0.3
|
||||
|
||||
logger.log {user_id, doc_id, project_id, client_id: client.id, version: update.v}, "sending update to doc updater"
|
||||
|
||||
|
|
8
services/real-time/buildscript.txt
Normal file
8
services/real-time/buildscript.txt
Normal file
|
@ -0,0 +1,8 @@
|
|||
real-time
|
||||
--language=coffeescript
|
||||
--node-version=6.15.1
|
||||
--acceptance-creds=None
|
||||
--dependencies=mongo,redis
|
||||
--docker-repos=gcr.io/overleaf-ops
|
||||
--build-target=docker
|
||||
--script-version=1.1.12
|
|
@ -1,24 +1,24 @@
|
|||
module.exports =
|
||||
settings =
|
||||
redis:
|
||||
realtime:
|
||||
host: process.env['REDIS_HOST'] or "localhost"
|
||||
port: "6379"
|
||||
password: ""
|
||||
host: process.env['REAL_TIME_REDIS_HOST'] or process.env['REDIS_HOST'] or "localhost"
|
||||
port: process.env['REAL_TIME_REDIS_PORT'] or process.env['REDIS_PORT'] or "6379"
|
||||
password: process.env["REAL_TIME_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or ""
|
||||
key_schema:
|
||||
clientsInProject: ({project_id}) -> "clients_in_project:#{project_id}"
|
||||
connectedUser: ({project_id, client_id})-> "connected_user:#{project_id}:#{client_id}"
|
||||
clientsInProject: ({project_id}) -> "clients_in_project:{#{project_id}}"
|
||||
connectedUser: ({project_id, client_id})-> "connected_user:{#{project_id}}:#{client_id}"
|
||||
|
||||
documentupdater:
|
||||
host: process.env['REDIS_HOST'] or "localhost"
|
||||
port: "6379"
|
||||
password: ""
|
||||
host: process.env['DOC_UPDATER_REDIS_HOST'] or process.env['REDIS_HOST'] or "localhost"
|
||||
port: process.env['DOC_UPDATER_REDIS_PORT'] or process.env['REDIS_PORT'] or "6379"
|
||||
password: process.env["DOC_UPDATER_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or ""
|
||||
key_schema:
|
||||
pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}"
|
||||
pendingUpdates: ({doc_id}) -> "PendingUpdates:{#{doc_id}}"
|
||||
|
||||
websessions:
|
||||
host: process.env['REDIS_HOST'] or "localhost"
|
||||
port: "6379"
|
||||
password: ""
|
||||
websessions:
|
||||
host: process.env['WEB_REDIS_HOST'] or process.env['REDIS_HOST'] or "localhost"
|
||||
port: process.env['WEB_REDIS_PORT'] or process.env['REDIS_PORT'] or "6379"
|
||||
password: process.env["WEB_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or ""
|
||||
|
||||
internal:
|
||||
realTime:
|
||||
|
@ -29,15 +29,21 @@ module.exports =
|
|||
|
||||
apis:
|
||||
web:
|
||||
url: "http://#{process.env['WEB_HOST'] or "localhost"}:#{process.env['WEB_PORT'] or 3000}"
|
||||
user: "sharelatex"
|
||||
pass: "password"
|
||||
url: "http://#{process.env['WEB_API_HOST'] or process.env['WEB_HOST'] or "localhost"}:#{process.env['WEB_API_PORT'] or process.env['WEB_PORT'] or 3000}"
|
||||
user: process.env['WEB_API_USER'] or "sharelatex"
|
||||
pass: process.env['WEB_API_PASSWORD'] or "password"
|
||||
documentupdater:
|
||||
url: "http://#{process.env['DOCUPDATER_HOST'] or "localhost"}:3003"
|
||||
url: "http://#{process.env['DOCUMENT_UPDATER_HOST'] or process.env['DOCUPDATER_HOST'] or "localhost"}:3003"
|
||||
|
||||
security:
|
||||
sessionSecret: "secret-please-change"
|
||||
sessionSecret: process.env['SESSION_SECRET'] or "secret-please-change"
|
||||
|
||||
cookieName: "sharelatex.sid"
|
||||
cookieName: process.env['COOKIE_NAME'] or "sharelatex.sid"
|
||||
|
||||
max_doc_length: 2 * 1024 * 1024 # 2mb
|
||||
max_doc_length: 2 * 1024 * 1024 # 2mb
|
||||
|
||||
forceDrainMsDelay: process.env['FORCE_DRAIN_MS_DELAY'] or false
|
||||
|
||||
|
||||
# console.log settings.redis
|
||||
module.exports = settings
|
44
services/real-time/docker-compose.ci.yml
Normal file
44
services/real-time/docker-compose.ci.yml
Normal file
|
@ -0,0 +1,44 @@
|
|||
# This file was auto-generated, do not edit it directly.
|
||||
# Instead run bin/update_build_scripts from
|
||||
# https://github.com/sharelatex/sharelatex-dev-environment
|
||||
# Version: 1.1.12
|
||||
|
||||
version: "2"
|
||||
|
||||
services:
|
||||
test_unit:
|
||||
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
|
||||
user: node
|
||||
command: npm run test:unit:_run
|
||||
|
||||
|
||||
test_acceptance:
|
||||
build: .
|
||||
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
|
||||
environment:
|
||||
ELASTIC_SEARCH_DSN: es:9200
|
||||
REDIS_HOST: redis
|
||||
MONGO_HOST: mongo
|
||||
POSTGRES_HOST: postgres
|
||||
MOCHA_GREP: ${MOCHA_GREP}
|
||||
depends_on:
|
||||
- mongo
|
||||
- redis
|
||||
user: node
|
||||
command: npm run test:acceptance:_run
|
||||
|
||||
|
||||
|
||||
tar:
|
||||
build: .
|
||||
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
|
||||
volumes:
|
||||
- ./:/tmp/build/
|
||||
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
|
||||
user: root
|
||||
|
||||
redis:
|
||||
image: redis
|
||||
|
||||
mongo:
|
||||
image: mongo:3.4
|
51
services/real-time/docker-compose.yml
Normal file
51
services/real-time/docker-compose.yml
Normal file
|
@ -0,0 +1,51 @@
|
|||
# This file was auto-generated, do not edit it directly.
|
||||
# Instead run bin/update_build_scripts from
|
||||
# https://github.com/sharelatex/sharelatex-dev-environment
|
||||
# Version: 1.1.12
|
||||
|
||||
version: "2"
|
||||
|
||||
services:
|
||||
test_unit:
|
||||
build: .
|
||||
volumes:
|
||||
- .:/app
|
||||
working_dir: /app
|
||||
environment:
|
||||
MOCHA_GREP: ${MOCHA_GREP}
|
||||
command: npm run test:unit
|
||||
user: node
|
||||
|
||||
test_acceptance:
|
||||
build: .
|
||||
volumes:
|
||||
- .:/app
|
||||
working_dir: /app
|
||||
environment:
|
||||
ELASTIC_SEARCH_DSN: es:9200
|
||||
REDIS_HOST: redis
|
||||
MONGO_HOST: mongo
|
||||
POSTGRES_HOST: postgres
|
||||
MOCHA_GREP: ${MOCHA_GREP}
|
||||
user: node
|
||||
depends_on:
|
||||
- mongo
|
||||
- redis
|
||||
command: npm run test:acceptance
|
||||
|
||||
|
||||
|
||||
tar:
|
||||
build: .
|
||||
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
|
||||
volumes:
|
||||
- ./:/tmp/build/
|
||||
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
|
||||
user: root
|
||||
|
||||
redis:
|
||||
image: redis
|
||||
|
||||
mongo:
|
||||
image: mongo:3.4
|
||||
|
19
services/real-time/nodemon.json
Normal file
19
services/real-time/nodemon.json
Normal file
|
@ -0,0 +1,19 @@
|
|||
{
|
||||
"ignore": [
|
||||
".git",
|
||||
"node_modules/"
|
||||
],
|
||||
"verbose": true,
|
||||
"legacyWatch": true,
|
||||
"execMap": {
|
||||
"js": "npm run start"
|
||||
},
|
||||
|
||||
"watch": [
|
||||
"app/coffee/",
|
||||
"app.coffee",
|
||||
"config/"
|
||||
],
|
||||
"ext": "coffee"
|
||||
|
||||
}
|
1871
services/real-time/npm-shrinkwrap.json
generated
Normal file
1871
services/real-time/npm-shrinkwrap.json
generated
Normal file
File diff suppressed because it is too large
Load diff
|
@ -8,8 +8,17 @@
|
|||
"url": "https://github.com/sharelatex/real-time-sharelatex.git"
|
||||
},
|
||||
"scripts": {
|
||||
"compile:app": "coffee -o app/js -c app/coffee && coffee -c app.coffee",
|
||||
"start": "npm run compile:app && node app.js"
|
||||
"compile:app": "([ -e app/coffee ] && coffee -m $COFFEE_OPTIONS -o app/js -c app/coffee || echo 'No CoffeeScript folder to compile') && ( [ -e app.coffee ] && coffee -m $COFFEE_OPTIONS -c app.coffee || echo 'No CoffeeScript app to compile')",
|
||||
"start": "npm run compile:app && node $NODE_APP_OPTIONS app.js",
|
||||
"test:acceptance:_run": "mocha --recursive --reporter spec --timeout 30000 --exit $@ test/acceptance/js",
|
||||
"test:acceptance": "npm run compile:app && npm run compile:acceptance_tests && npm run test:acceptance:_run -- --grep=$MOCHA_GREP",
|
||||
"test:unit:_run": "mocha --recursive --reporter spec --exit $@ test/unit/js",
|
||||
"test:unit": "npm run compile:app && npm run compile:unit_tests && npm run test:unit:_run -- --grep=$MOCHA_GREP",
|
||||
"compile:unit_tests": "[ ! -e test/unit/coffee ] && echo 'No unit tests to compile' || coffee -o test/unit/js -c test/unit/coffee",
|
||||
"compile:acceptance_tests": "[ ! -e test/acceptance/coffee ] && echo 'No acceptance tests to compile' || coffee -o test/acceptance/js -c test/acceptance/coffee",
|
||||
"compile:all": "npm run compile:app && npm run compile:unit_tests && npm run compile:acceptance_tests && npm run compile:smoke_tests",
|
||||
"nodemon": "nodemon --config nodemon.json",
|
||||
"compile:smoke_tests": "[ ! -e test/smoke/coffee ] && echo 'No smoke tests to compile' || coffee -o test/smoke/js -c test/smoke/coffee"
|
||||
},
|
||||
"dependencies": {
|
||||
"async": "^0.9.0",
|
||||
|
@ -19,12 +28,12 @@
|
|||
"cookie-parser": "^1.3.3",
|
||||
"express": "^4.10.1",
|
||||
"express-session": "^1.9.1",
|
||||
"logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.6",
|
||||
"metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.4.0",
|
||||
"redis-sharelatex": "git+https://github.com/sharelatex/redis-sharelatex.git#v1.0.4",
|
||||
"logger-sharelatex": "^1.6.0",
|
||||
"metrics-sharelatex": "^2.1.1",
|
||||
"redis-sharelatex": "https://registry.npmjs.org/redis-sharelatex/-/redis-sharelatex-1.0.5.tgz",
|
||||
"request": "~2.34.0",
|
||||
"session.socket.io": "^0.1.6",
|
||||
"settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0",
|
||||
"settings-sharelatex": "^1.1.0",
|
||||
"socket.io": "0.9.16",
|
||||
"socket.io-client": "^0.9.16"
|
||||
},
|
||||
|
@ -32,16 +41,9 @@
|
|||
"bunyan": "~0.22.3",
|
||||
"chai": "~1.9.1",
|
||||
"cookie-signature": "^1.0.5",
|
||||
"grunt": "~0.4.4",
|
||||
"grunt-bunyan": "~0.5.0",
|
||||
"grunt-contrib-clean": "~0.5.0",
|
||||
"grunt-contrib-coffee": "~0.10.1",
|
||||
"grunt-execute": "~0.2.1",
|
||||
"grunt-forever": "~0.4.4",
|
||||
"grunt-mocha-test": "~0.10.2",
|
||||
"grunt-shell": "~0.7.0",
|
||||
"sandboxed-module": "~0.3.0",
|
||||
"sinon": "~1.5.2",
|
||||
"mocha": "^4.0.1",
|
||||
"uid-safe": "^1.0.1",
|
||||
"timekeeper": "0.0.4"
|
||||
}
|
||||
|
|
|
@ -8,7 +8,9 @@ FixturesManager = require "./helpers/FixturesManager"
|
|||
|
||||
settings = require "settings-sharelatex"
|
||||
redis = require "redis-sharelatex"
|
||||
rclient = redis.createClient(settings.redis.web)
|
||||
rclient = redis.createClient(settings.redis.websessions)
|
||||
|
||||
redisSettings = settings.redis
|
||||
|
||||
describe "applyOtUpdate", ->
|
||||
before ->
|
||||
|
@ -48,7 +50,7 @@ describe "applyOtUpdate", ->
|
|||
done()
|
||||
|
||||
it "should push the update into redis", (done) ->
|
||||
rclient.lrange "PendingUpdates:#{@doc_id}", 0, -1, (error, [update]) =>
|
||||
rclient.lrange redisSettings.documentupdater.key_schema.pendingUpdates({@doc_id}), 0, -1, (error, [update]) =>
|
||||
update = JSON.parse(update)
|
||||
update.op.should.deep.equal @update.op
|
||||
update.meta.should.deep.equal {
|
||||
|
@ -61,7 +63,7 @@ describe "applyOtUpdate", ->
|
|||
async.series [
|
||||
(cb) => rclient.del "pending-updates-list", cb
|
||||
(cb) => rclient.del "DocsWithPendingUpdates", "#{@project_id}:#{@doc_id}", cb
|
||||
(cb) => rclient.del "PendingUpdates:#{@doc_id}", cb
|
||||
(cb) => rclient.del redisSettings.documentupdater.key_schema.pendingUpdates(@doc_id), cb
|
||||
], done
|
||||
|
||||
describe "when authorized to read-only with an edit update", ->
|
||||
|
@ -102,7 +104,7 @@ describe "applyOtUpdate", ->
|
|||
, 300
|
||||
|
||||
it "should not put the update in redis", (done) ->
|
||||
rclient.llen "PendingUpdates:#{@doc_id}", (error, len) =>
|
||||
rclient.llen redisSettings.documentupdater.key_schema.pendingUpdates({@doc_id}), (error, len) =>
|
||||
len.should.equal 0
|
||||
done()
|
||||
|
||||
|
@ -142,7 +144,7 @@ describe "applyOtUpdate", ->
|
|||
done()
|
||||
|
||||
it "should push the update into redis", (done) ->
|
||||
rclient.lrange "PendingUpdates:#{@doc_id}", 0, -1, (error, [update]) =>
|
||||
rclient.lrange redisSettings.documentupdater.key_schema.pendingUpdates({@doc_id}), 0, -1, (error, [update]) =>
|
||||
update = JSON.parse(update)
|
||||
update.op.should.deep.equal @comment_update.op
|
||||
update.meta.should.deep.equal {
|
||||
|
@ -155,5 +157,5 @@ describe "applyOtUpdate", ->
|
|||
async.series [
|
||||
(cb) => rclient.del "pending-updates-list", cb
|
||||
(cb) => rclient.del "DocsWithPendingUpdates", "#{@project_id}:#{@doc_id}", cb
|
||||
(cb) => rclient.del "PendingUpdates:#{@doc_id}", cb
|
||||
(cb) => rclient.del redisSettings.documentupdater.key_schema.pendingUpdates({@doc_id}), cb
|
||||
], done
|
|
@ -10,7 +10,7 @@ async = require "async"
|
|||
|
||||
settings = require "settings-sharelatex"
|
||||
redis = require "redis-sharelatex"
|
||||
rclient = redis.createClient(settings.redis.web)
|
||||
rclient = redis.createClient(settings.redis.websessions)
|
||||
|
||||
describe "receiveUpdate", ->
|
||||
before (done) ->
|
||||
|
|
|
@ -4,7 +4,7 @@ io = require("socket.io-client")
|
|||
request = require "request"
|
||||
Settings = require "settings-sharelatex"
|
||||
redis = require "redis-sharelatex"
|
||||
rclient = redis.createClient(Settings.redis.web)
|
||||
rclient = redis.createClient(Settings.redis.websessions)
|
||||
|
||||
uid = require('uid-safe').sync
|
||||
signature = require("cookie-signature")
|
||||
|
|
|
@ -0,0 +1,24 @@
|
|||
app = require('../../../../app')
|
||||
require("logger-sharelatex").logger.level("info")
|
||||
logger = require("logger-sharelatex")
|
||||
Settings = require("settings-sharelatex")
|
||||
|
||||
module.exports =
|
||||
running: false
|
||||
initing: false
|
||||
callbacks: []
|
||||
ensureRunning: (callback = (error) ->) ->
|
||||
if @running
|
||||
return callback()
|
||||
else if @initing
|
||||
@callbacks.push callback
|
||||
else
|
||||
@initing = true
|
||||
@callbacks.push callback
|
||||
app.listen Settings.internal?.realtime?.port, "localhost", (error) =>
|
||||
throw error if error?
|
||||
@running = true
|
||||
logger.log("clsi running in dev mode")
|
||||
|
||||
for callback in @callbacks
|
||||
callback()
|
|
@ -559,12 +559,6 @@ describe 'WebsocketController', ->
|
|||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
|
||||
it "should update the active users metric", ->
|
||||
@metrics.set.calledWith("editor.active-users", @user_id).should.equal true
|
||||
|
||||
it "should update the active projects metric", ->
|
||||
@metrics.set.calledWith("editor.active-projects", @project_id).should.equal true
|
||||
|
||||
it "should increment the doc updates", ->
|
||||
@metrics.inc.calledWith("editor.doc-update").should.equal true
|
||||
|
|
Loading…
Add table
Reference in a new issue