Provide hosts and siblings container as environment settings and add npm run start script

wip acceptence tests run, but don't all pass

wip

removed npm-debug from git
This commit is contained in:
James Allen 2017-12-29 08:08:19 +00:00 committed by Henry Oswald
parent adaf742a7b
commit df21b9de53
14 changed files with 3431 additions and 94 deletions

View file

@ -1 +1 @@
6.11.2
6.9.5

19
services/clsi/Dockerfile Normal file
View file

@ -0,0 +1,19 @@
FROM node:6.9.5
RUN wget -qO- https://get.docker.com/ | sh
# ---- Copy Files/Build ----
WORKDIR /app
COPY ./ /app
# Build react/vue/angular bundle static files
# RUN npm run build
RUN npm install
RUN npm run compile
EXPOSE 3013
ENV SHARELATEX_CONFIG /app/config/settings.production.coffee
ENV NODE_ENV production
CMD ["node","/app/app.js"]

View file

@ -1,79 +1,72 @@
pipeline {
String cron_string = BRANCH_NAME == "master" ? "@daily" : ""
pipeline {
agent any
triggers {
pollSCM('* * * * *')
cron('@daily')
cron(cron_string)
}
stages {
stage('Clean') {
steps {
// This is a terrible hack to set the file ownership to jenkins:jenkins so we can cleanup the directory
sh 'docker run -v $(pwd):/app --rm busybox /bin/chown -R 111:119 /app'
sh 'rm -fr node_modules'
}
}
stage('Install') {
agent {
docker {
image 'node:6.11.2'
image 'node:6.9.5'
args "-v /var/lib/jenkins/.npm:/tmp/.npm -e HOME=/tmp"
reuseNode true
}
}
steps {
// we need to disable logallrefupdates, else git clones
// during the npm install will require git to lookup the
// user id which does not exist in the container's
// /etc/passwd file, causing the clone to fail.
sh 'git config --global core.logallrefupdates false'
sh 'rm -fr node_modules'
checkout([$class: 'GitSCM', branches: [[name: '*/master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: '_docker-runner'], [$class: 'CloneOption', shallow: true]], userRemoteConfigs: [[credentialsId: 'GIT_DEPLOY_KEY', url: 'git@github.com:sharelatex/docker-runner-sharelatex']]])
sh 'npm install ./_docker-runner'
sh 'rm -fr ./_docker-runner ./_docker-runner@tmp'
sh 'npm install'
sh 'npm rebuild'
sh 'npm install --quiet grunt-cli'
sh 'rm -rf node_modules'
sh 'npm install && npm rebuild'
}
}
stage('Compile and Test') {
stage('Compile') {
agent {
docker {
image 'node:6.11.2'
image 'node:6.9.5'
reuseNode true
}
}
steps {
sh 'node_modules/.bin/grunt compile:app'
sh 'node_modules/.bin/grunt compile:acceptance_tests'
sh 'NODE_ENV=development node_modules/.bin/grunt test:unit'
sh 'npm run compile:all'
}
}
stage('Acceptance Tests') {
environment {
TEXLIVE_IMAGE="quay.io/sharelatex/texlive-full:2017.1"
}
stage('Unit Tests') {
steps {
sh 'mkdir -p compiles cache'
// Not yet running, due to volumes/sibling containers
sh 'docker container prune -f'
sh 'docker pull $TEXLIVE_IMAGE'
sh 'docker pull sharelatex/acceptance-test-runner:clsi-6.11.2'
sh 'docker run --rm -e SIBLING_CONTAINER_USER=root -e SANDBOXED_COMPILES_HOST_DIR=$(pwd)/compiles -e SANDBOXED_COMPILES_SIBLING_CONTAINERS=true -e TEXLIVE_IMAGE=$TEXLIVE_IMAGE -v /var/run/docker.sock:/var/run/docker.sock -v $(pwd):/app sharelatex/acceptance-test-runner:clsi-6.11.2'
// This is a terrible hack to set the file ownership to jenkins:jenkins so we can cleanup the directory
sh 'docker run -v $(pwd):/app --rm busybox /bin/chown -R 111:119 /app'
sh 'rm -r compiles cache server.log db.sqlite config/settings.defaults.coffee'
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_unit'
}
}
stage('Package') {
stage('Acceptance Tests') {
steps {
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_acceptance'
}
}
stage('Package and publish build') {
steps {
sh 'echo ${BUILD_NUMBER} > build_number.txt'
sh 'touch build.tar.gz' // Avoid tar warning about files changing during read
sh 'tar -czf build.tar.gz --exclude=build.tar.gz --exclude-vcs .'
}
}
stage('Publish') {
steps {
withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") {
s3Upload(file:'build.tar.gz', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/${BUILD_NUMBER}.tar.gz")
}
}
}
stage('Publish build number') {
steps {
sh 'echo ${BRANCH_NAME}-${BUILD_NUMBER} > build_number.txt'
withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") {
// The deployment process uses this file to figure out the latest build
s3Upload(file:'build_number.txt', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/latest")
}
@ -82,6 +75,10 @@ pipeline {
}
post {
always {
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_clean'
}
failure {
mail(from: "${EMAIL_ALERT_FROM}",
to: "${EMAIL_ALERT_TO}",

29
services/clsi/Makefile Normal file
View file

@ -0,0 +1,29 @@
# This file was auto-generated, do not edit it directly.
# Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment
# Version: 1.0.1
BUILD_NUMBER ?= local
BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD)
PROJECT_NAME = clsi
DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml
DOCKER_COMPOSE := docker-compose ${DOCKER_COMPOSE_FLAGS}
clean:
rm -f app.js
rm -rf app/js
rm -rf test/unit/js
rm -rf test/acceptance/js
test: test_unit test_acceptance
test_unit:
@[ -d test/unit ] && $(DOCKER_COMPOSE) run --rm test_unit -- ${MOCHA_ARGS} || echo "clsi has no unit tests"
test_acceptance: test_clean # clear the database before each acceptance test run
@[ -d test/acceptance ] && $(DOCKER_COMPOSE) run --rm test_acceptance -- ${MOCHA_ARGS} || echo "clsi has no acceptance tests"
test_clean:
$(DOCKER_COMPOSE) down
.PHONY: clean test test_unit test_acceptance test_clean build publish

View file

@ -132,6 +132,7 @@ resCacher =
if Settings.smokeTest
do runSmokeTest = ->
logger.log("running smoke tests")
console.log(__dirname, __filename)
smokeTest.run(require.resolve(__dirname + "/test/smoke/js/SmokeTests.js"))({}, resCacher)
setTimeout(runSmokeTest, 30 * 1000)
@ -139,6 +140,10 @@ app.get "/health_check", (req, res)->
res.contentType(resCacher?.setContentType)
res.status(resCacher?.code).send(resCacher?.body)
app.get "/smoke_test_force", (req, res)->
smokeTest.run(require.resolve(__dirname + "/test/smoke/js/SmokeTests.js"))(req, res)
profiler = require "v8-profiler"
app.get "/profile", (req, res) ->
time = parseInt(req.query.time || "1000")

View file

@ -0,0 +1,4 @@
#!/bin/bash
set -e;
MOCHA="node_modules/.bin/mocha --recursive --reporter spec --timeout 15000"
$MOCHA "$@"

View file

@ -16,27 +16,28 @@ module.exports =
clsiCacheDir: Path.resolve(__dirname + "/../cache")
synctexBaseDir: (project_id) -> Path.join(@compilesDir, project_id)
# clsi:
# commandRunner: "docker-runner-sharelatex"
# docker:
# image: "quay.io/sharelatex/texlive-full:2017.1"
# env:
# HOME: "/tmp"
# socketPath: "/var/run/docker.sock"
# user: "tex"
# expireProjectAfterIdleMs: 24 * 60 * 60 * 1000
# checkProjectsIntervalMs: 10 * 60 * 1000
internal:
clsi:
port: 3013
host: "localhost"
host: process.env["LISTEN_ADDRESS"] or "0.0.0.0"
apis:
clsi:
url: "http://localhost:3013"
url: "http://#{process.env['CLSI_HOST'] or 'localhost'}:3013"
smokeTest: false
project_cache_length_ms: 1000 * 60 * 60 * 24
parallelFileDownloads:1
if process.env["COMMAND_RUNNER"]
module.exports.clsi =
commandRunner: process.env["COMMAND_RUNNER"]
docker:
image: process.env["TEXLIVE_IMAGE"] or "quay.io/sharelatex/texlive-full:2017.1"
env:
HOME: "/tmp"
socketPath: "/var/run/docker.sock"
user: process.env["TEXLIVE_IMAGE_USER"] or "tex"
expireProjectAfterIdleMs: 24 * 60 * 60 * 1000
checkProjectsIntervalMs: 10 * 60 * 1000
module.exports.path.sandboxedCompilesHostDir = process.env["COMPILES_HOST_DIR"]

View file

@ -0,0 +1,33 @@
# This file was auto-generated, do not edit it directly.
# Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment
# Version: 1.0.1
version: "2"
services:
test_unit:
image: node:6.9.5
volumes:
- .:/app
working_dir: /app
entrypoint: npm run test:unit:_run
test_acceptance:
image: node:6.9.5
volumes:
- .:/app
working_dir: /app
environment:
REDIS_HOST: redis
MONGO_HOST: mongo
depends_on:
- redis
- mongo
entrypoint: npm run test:acceptance:_run
redis:
image: redis
mongo:
image: mongo:3.4

View file

@ -0,0 +1,51 @@
# This file was auto-generated, do not edit it directly.
# Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment
# Version: 1.0.1
version: "2"
services:
test_unit:
image: node:6.9.5
volumes:
- .:/app
working_dir: /app
entrypoint: npm run test:unit
test_acceptance:
image: node:6.9.5
volumes:
- .:/app
environment:
REDIS_HOST: redis
MONGO_HOST: mongo
CLSI_HOST: clsi
depends_on:
- clsi
- redis
- mongo
working_dir: /app
entrypoint: npm run test:acceptance
redis:
image: redis
mongo:
image: mongo:3.4
clsi:
image: gcr.io/henry-terraform-admin/clsi
build: .
environment:
TEXLIVE_IMAGE: quay.io/sharelatex/texlive-small:latest
TEXLIVE_IMAGE_USER: root # Not ideal, but makes running in dev very simple
COMMAND_RUNNER: docker-runner-sharelatex
SHARELATEX_CONFIG: /app/config/settings.defaults.coffee
COMPILES_HOST_DIR: $PWD/compiles
volumes:
- .:/app:cached
- /var/run/docker.sock:/var/run/docker.sock
- ./docker-runner:/app/node_modules/docker-runner-sharelatex
ports:
- 3013:3013

@ -0,0 +1 @@
Subproject commit f861a1c810ad844a6e00e82f2ebedac26dcad8b7

View file

@ -0,0 +1,15 @@
{
"ignore": [
".git",
"node_modules/"
],
"verbose": true,
"execMap": {
"js": "npm run start"
},
"watch": [
"app/coffee/",
"app.coffee"
],
"ext": "coffee"
}

3162
services/clsi/package-lock.json generated Normal file

File diff suppressed because it is too large Load diff

View file

@ -6,10 +6,29 @@
"type": "git",
"url": "https://github.com/sharelatex/clsi-sharelatex.git"
},
"scripts": {
"test:acceptance:wait_for_app": "echo 'Waiting for app to be accessible' && while (! curl -s -o /dev/null localhost:3013/status) do sleep 1; done",
"test:acceptance:run": "mocha --recursive --reporter spec --timeout 15000",
"test:acceptance:dir": "npm -q run test:acceptance:wait_for_app && npm -q run test:acceptance:run -- $@",
"test:acceptance": "npm run compile:app && npm run compile:acceptance_tests && npm run test:acceptance:_run -- $@",
"compile:test:acceptance": "coffee -o test/acceptance/js -c test/acceptance/coffee",
"compile:test:smoke": "coffee -o test/smoke/js -c test/smoke/coffee",
"compile:app": "coffee -o app/js -c app/coffee && coffee -c app.coffee",
"compile": "npm run compile:app && npm run compile:test:acceptance && npm run compile:test:smoke",
"start": "npm run compile:app && node app.js",
"test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 $@ test/acceptance/js",
"test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js",
"test:unit": "npm run compile:app && npm run compile:unit_tests && npm run test:unit:_run -- $@",
"compile:unit_tests": "[ -e test/unit ] && coffee -o test/unit/js -c test/unit/coffee || echo 'No unit tests to compile'",
"compile:acceptance_tests": "[ -e test/acceptance ] && coffee -o test/acceptance/js -c test/acceptance/coffee || echo 'No acceptance tests to compile'",
"compile:all": "npm run compile:app && npm run compile:unit_tests && npm run compile:acceptance_tests && compile:test:smoke",
"nodemon": "nodemon --config nodemon.json"
},
"author": "James Allen <james@sharelatex.com>",
"dependencies": {
"async": "0.2.9",
"body-parser": "^1.2.0",
"dockerode": "^2.5.3",
"express": "^4.2.0",
"fs-extra": "^0.16.3",
"grunt-mkdir": "^1.0.0",

View file

@ -11,6 +11,7 @@ module.exports = Client =
Math.random().toString(16).slice(2)
compile: (project_id, data, callback = (error, res, body) ->) ->
console.log("#{@host}/project/#{project_id}/compile")
request.post {
url: "#{@host}/project/#{project_id}/compile"
json: