Merge pull request #47 from sharelatex/ho-docker

Move to docker build process
This commit is contained in:
Henry Oswald 2019-02-06 10:06:13 +00:00 committed by GitHub
commit f4602d0b38
36 changed files with 1581 additions and 1710 deletions

View file

@ -0,0 +1,9 @@
node_modules/*
gitrev
.git
.gitignore
.npm
.nvmrc
nodemon.json
app.js
**/js/*

View file

@ -0,0 +1,38 @@
<!-- BUG REPORT TEMPLATE -->
## Steps to Reproduce
<!-- Describe the steps leading up to when / where you found the bug. -->
<!-- Screenshots may be helpful here. -->
1.
2.
3.
## Expected Behaviour
<!-- What should have happened when you completed the steps above? -->
## Observed Behaviour
<!-- What actually happened when you completed the steps above? -->
<!-- Screenshots may be helpful here. -->
## Context
<!-- How has this issue affected you? What were you trying to accomplish? -->
## Technical Info
<!-- Provide any technical details that may be applicable (or N/A if not applicable). -->
* URL:
* Browser Name and version:
* Operating System and version (desktop or mobile):
* Signed in as:
* Project and/or file:
## Analysis
<!--- Optionally, document investigation of / suggest a fix for the bug, e.g. 'comes from this line / commit' -->
## Who Needs to Know?
<!-- If you want to bring this to the attention of particular people, @-mention them below. -->
<!-- If a user reported this bug and should be notified when it is fixed, provide the Front conversation link. -->
-
-

View file

@ -0,0 +1,45 @@
<!-- Please review https://github.com/overleaf/write_latex/blob/master/.github/CONTRIBUTING.md for guidance on what is expected in each section. -->
### Description
#### Screenshots
#### Related Issues / PRs
### Review
#### Potential Impact
#### Manual Testing Performed
- [ ]
- [ ]
#### Accessibility
### Deployment
#### Deployment Checklist
- [ ] Update documentation not included in the PR (if any)
- [ ]
#### Metrics and Monitoring
#### Who Needs to Know?

View file

@ -40,6 +40,7 @@ test/IntergrationTests/js/*
data/*/*
app.js
**/*.map
cluster.js
app/js/*
test/IntergrationTests/js/*
@ -56,6 +57,7 @@ public/minjs/
test/unit/js/
test/acceptence/js
cluster.js
test/acceptance/js/
user_files/*
template_files/*

View file

@ -1,4 +1,23 @@
FROM node:6.9.5 as app
WORKDIR /app
#wildcard as some files may not be in all repos
COPY package*.json npm-shrink*.json /app/
RUN npm install --quiet
COPY . /app
RUN npm run compile:all
FROM node:6.9.5
# we also need imagemagick but it is already in the node docker image
RUN apt-get update && apt-get install -y --no-install-recommends ghostscript optipng
COPY --from=app /app /app
WORKDIR /app
RUN chmod 0755 ./install_deps.sh && ./install_deps.sh
USER node
CMD ["node", "--expose-gc", "app.js"]

View file

@ -1,97 +0,0 @@
module.exports = (grunt) ->
# Project configuration.
grunt.initConfig
forever:
app:
options:
index: "app.js"
coffee:
server:
expand: true,
flatten: false,
cwd: 'app/coffee',
src: ['**/*.coffee'],
dest: 'app/js/',
ext: '.js'
app_server:
expand: true,
flatten: false,
src: ['app.coffee', 'cluster.coffee'],
dest: './',
ext: '.js'
server_tests:
expand: true,
flatten: false,
cwd: 'test/acceptence/coffee',
src: ['*.coffee', '**/*.coffee'],
dest: 'test/acceptence/js/',
ext: '.js'
server_acc_tests:
expand: true,
flatten: false,
cwd: 'test/unit/coffee',
src: ['*.coffee', '**/*.coffee'],
dest: 'test/unit/js/',
ext: '.js'
watch:
server_coffee:
files: ['app/*.coffee','app/**/*.coffee', 'test/unit/coffee/**/*.coffee', 'test/unit/coffee/*.coffee', "app.coffee", "cluster.coffee"]
tasks: ["clean", 'coffee', 'mochaTest']
clean: ["app/js", "test/unit/js", "app.js"]
nodemon:
dev:
script: 'app.js'
options:
ext:"*.coffee"
execute:
app:
src: "app.js"
concurrent:
dev:
tasks: ['nodemon', 'watch']
options:
logConcurrentOutput: true
mochaTest:
unit:
src: ["test/unit/js/#{grunt.option('feature') or '**'}/*.js"]
options:
reporter: grunt.option('reporter') or 'spec'
grep: grunt.option("grep")
acceptence:
src: ["test/acceptence/js/#{grunt.option('feature') or '**'}/*.js"]
options:
reporter: grunt.option('reporter') or 'spec'
grep: grunt.option("grep")
grunt.loadNpmTasks 'grunt-contrib-coffee'
grunt.loadNpmTasks 'grunt-contrib-watch'
grunt.loadNpmTasks 'grunt-nodemon'
grunt.loadNpmTasks 'grunt-contrib-clean'
grunt.loadNpmTasks 'grunt-concurrent'
grunt.loadNpmTasks 'grunt-mocha-test'
grunt.loadNpmTasks 'grunt-forever'
grunt.loadNpmTasks 'grunt-bunyan'
grunt.loadNpmTasks 'grunt-execute'
grunt.registerTask "test:unit", ["coffee", "mochaTest:unit"]
grunt.registerTask "test:acceptence", ["coffee", "mochaTest:acceptence"]
grunt.registerTask "test:acceptance", ["test:acceptence"]
grunt.registerTask "ci", "test:unit"
grunt.registerTask 'default', ['coffee', 'bunyan','execute']
grunt.registerTask "compile", "coffee"
grunt.registerTask "install", "compile"

View file

@ -1,79 +1,122 @@
pipeline {
agent {
docker {
image 'node:6.9.5'
args "-v /var/lib/jenkins/.npm:/tmp/.npm"
}
}
String cron_string = BRANCH_NAME == "master" ? "@daily" : ""
environment {
HOME = "/tmp"
pipeline {
agent any
environment {
GIT_PROJECT = "filestore-sharelatex"
JENKINS_WORKFLOW = "filestore-sharelatex"
TARGET_URL = "${env.JENKINS_URL}blue/organizations/jenkins/${JENKINS_WORKFLOW}/detail/$BRANCH_NAME/$BUILD_NUMBER/pipeline"
GIT_API_URL = "https://api.github.com/repos/sharelatex/${GIT_PROJECT}/statuses/$GIT_COMMIT"
}
triggers {
pollSCM('* * * * *')
cron('@daily')
cron(cron_string)
}
stages {
stage('Set up') {
steps {
// we need to disable logallrefupdates, else git clones during the npm install will require git to lookup the user id
// which does not exist in the container's /etc/passwd file, causing the clone to fail.
sh 'git config --global core.logallrefupdates false'
}
}
stage('Install') {
steps {
sh 'rm -fr node_modules'
sh 'npm install'
sh 'npm rebuild'
sh 'npm install --quiet grunt-cli'
withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) {
sh "curl $GIT_API_URL \
--data '{ \
\"state\" : \"pending\", \
\"target_url\": \"$TARGET_URL\", \
\"description\": \"Your build is underway\", \
\"context\": \"ci/jenkins\" }' \
-u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD"
}
}
}
stage('Compile') {
stage('Build') {
steps {
sh 'node_modules/.bin/grunt compile'
sh 'make build'
}
}
stage('Test') {
stage('Unit Tests') {
steps {
sh 'NODE_ENV=development node_modules/.bin/grunt test:unit'
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_unit'
}
}
stage('Package') {
stage('Acceptance Tests') {
steps {
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_acceptance'
}
}
stage('Package and docker push') {
steps {
sh 'echo ${BUILD_NUMBER} > build_number.txt'
sh 'touch build.tar.gz' // Avoid tar warning about files changing during read
sh 'tar -czf build.tar.gz --exclude=build.tar.gz --exclude-vcs .'
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make tar'
withCredentials([file(credentialsId: 'gcr.io_overleaf-ops', variable: 'DOCKER_REPO_KEY_PATH')]) {
sh 'docker login -u _json_key --password-stdin https://gcr.io/overleaf-ops < ${DOCKER_REPO_KEY_PATH}'
}
sh 'DOCKER_REPO=gcr.io/overleaf-ops make publish'
sh 'docker logout https://gcr.io/overleaf-ops'
}
}
stage('Publish') {
stage('Publish to s3') {
steps {
sh 'echo ${BRANCH_NAME}-${BUILD_NUMBER} > build_number.txt'
withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") {
s3Upload(file:'build.tar.gz', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/${BUILD_NUMBER}.tar.gz")
}
withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") {
// The deployment process uses this file to figure out the latest build
s3Upload(file:'build_number.txt', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/latest")
}
}
}
}
post {
always {
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_clean'
sh 'make clean'
}
success {
withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) {
sh "curl $GIT_API_URL \
--data '{ \
\"state\" : \"success\", \
\"target_url\": \"$TARGET_URL\", \
\"description\": \"Your build succeeded!\", \
\"context\": \"ci/jenkins\" }' \
-u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD"
}
}
failure {
mail(from: "${EMAIL_ALERT_FROM}",
to: "${EMAIL_ALERT_TO}",
mail(from: "${EMAIL_ALERT_FROM}",
to: "${EMAIL_ALERT_TO}",
subject: "Jenkins build failed: ${JOB_NAME}:${BUILD_NUMBER}",
body: "Build: ${BUILD_URL}")
withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) {
sh "curl $GIT_API_URL \
--data '{ \
\"state\" : \"failure\", \
\"target_url\": \"$TARGET_URL\", \
\"description\": \"Your build failed\", \
\"context\": \"ci/jenkins\" }' \
-u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD"
}
}
}
// The options directive is for configuration that applies to the whole job.
options {
// we'd like to make sure remove old builds, so we don't fill up our storage!
buildDiscarder(logRotator(numToKeepStr:'50'))
// And we'd really like to be sure that this build doesn't hang forever, so let's time it out after:
timeout(time: 30, unit: 'MINUTES')
}

View file

@ -0,0 +1,49 @@
# This file was auto-generated, do not edit it directly.
# Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment
# Version: 1.1.12
BUILD_NUMBER ?= local
BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD)
PROJECT_NAME = filestore
DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml
DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \
BRANCH_NAME=$(BRANCH_NAME) \
PROJECT_NAME=$(PROJECT_NAME) \
MOCHA_GREP=${MOCHA_GREP} \
docker-compose ${DOCKER_COMPOSE_FLAGS}
clean:
docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
rm -f app.js
rm -rf app/js
rm -rf test/unit/js
rm -rf test/acceptance/js
test: test_unit test_acceptance
test_unit:
@[ ! -d test/unit ] && echo "filestore has no unit tests" || $(DOCKER_COMPOSE) run --rm test_unit
test_acceptance: test_clean test_acceptance_pre_run # clear the database before each acceptance test run
@[ ! -d test/acceptance ] && echo "filestore has no acceptance tests" || $(DOCKER_COMPOSE) run --rm test_acceptance
test_clean:
$(DOCKER_COMPOSE) down -v -t 0
test_acceptance_pre_run:
@[ ! -f test/acceptance/scripts/pre-run ] && echo "filestore has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/scripts/pre-run
build:
docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \
--tag gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \
.
tar:
$(DOCKER_COMPOSE) up tar
publish:
docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
.PHONY: clean test test_unit test_acceptance test_clean build publish

View file

@ -1,4 +1,7 @@
Metrics = require "metrics-sharelatex"
Metrics.initialize("filestore")
express = require('express')
bodyParser = require "body-parser"
logger = require('logger-sharelatex')
logger.initialize("filestore")
settings = require("settings-sharelatex")
@ -14,24 +17,11 @@ app = express()
if settings.sentry?.dsn?
logger.initializeErrorReporting(settings.sentry.dsn)
Metrics = require "metrics-sharelatex"
Metrics.initialize("filestore")
Metrics.open_sockets.monitor(logger)
Metrics.event_loop?.monitor(logger)
Metrics.memory.monitor(logger)
app.configure ->
app.use Metrics.http.monitor(logger)
app.configure 'development', ->
console.log "Development Enviroment"
app.use express.errorHandler({ dumpExceptions: true, showStack: true })
app.configure 'production', ->
console.log "Production Enviroment"
app.use express.errorHandler()
Metrics.inc "startup"
app.use Metrics.http.monitor(logger)
app.use (req, res, next)->
Metrics.inc "http-request"
@ -71,10 +61,12 @@ app.use (req, res, next) ->
res.set 'Connection', 'close'
next()
Metrics.injectMetricsRoute(app)
app.get "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.getFile
app.post "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.insertFile
app.put "/project/:project_id/file/:file_id", keyBuilder.userFileKey, express.bodyParser(), fileController.copyFile
app.put "/project/:project_id/file/:file_id", keyBuilder.userFileKey, bodyParser.json(), fileController.copyFile
app.del "/project/:project_id/file/:file_id", keyBuilder.userFileKey, fileController.deleteFile
app.get "/template/:template_id/v/:version/:format", keyBuilder.templateFileKey, fileController.getFile
@ -85,7 +77,7 @@ app.post "/template/:template_id/v/:version/:format", keyBuilder.templateFileKey
app.get "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.getFile
app.post "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.insertFile
app.put "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, express.bodyParser(), fileController.copyFile
app.put "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, bodyParser.json(), fileController.copyFile
app.del "/project/:project_id/public/:public_file_id", keyBuilder.publicFileKey, fileController.deleteFile
app.get "/project/:project_id/size", keyBuilder.publicProjectKey, fileController.directorySize
@ -114,9 +106,8 @@ app.get "/health_check", healthCheckController.check
app.get '*', (req, res)->
res.send 404
server = require('http').createServer(app)
port = settings.internal.filestore.port or 3009
host = settings.internal.filestore.host or "localhost"
beginShutdown = () ->
if appIsOk
@ -126,14 +117,22 @@ beginShutdown = () ->
process.exit 1
, 120*1000
killTimer.unref?() # prevent timer from keeping process alive
server.close () ->
app.close () ->
logger.log "closed all connections"
Metrics.close()
process.disconnect?()
logger.log "server will stop accepting connections"
server.listen port, host, ->
logger.info "Filestore starting up, listening on #{host}:#{port}"
port = settings.internal.filestore.port or 3009
host = "0.0.0.0"
if !module.parent # Called directly
app.listen port, host, (error) ->
logger.info "Filestore starting up, listening on #{host}:#{port}"
module.exports = app
process.on 'SIGTERM', () ->
logger.log("filestore got SIGTERM, shutting down gracefully")

View file

@ -49,13 +49,13 @@ module.exports =
sourceStream.on 'error', (err) ->
logger.err err:err, location:location, name:name, "Error reading from file"
if err.code == 'ENOENT'
callback new Errors.NotFoundError(err.message), null
return callback new Errors.NotFoundError(err.message), null
else
callback err, null
return callback err, null
sourceStream.on 'readable', () ->
# This can be called multiple times, but the callback wrapper
# ensures the callback is only called once
callback null, sourceStream
return callback null, sourceStream
copyFile: (location, fromName, toName, callback = (err)->)->

View file

@ -34,11 +34,11 @@ module.exports =
callback(err, destPath)
thumbnail: (sourcePath, callback)->
logger.log sourcePath:sourcePath, "thumbnail convert file"
destPath = "#{sourcePath}.png"
sourcePath = "#{sourcePath}[0]"
width = "260x"
command = ["convert", "-flatten", "-background", "white", "-density", "300", "-define", "pdf:fit-page=#{width}", sourcePath, "-resize", width, destPath]
logger.log sourcePath:sourcePath, destPath:destPath, command:command, "thumbnail convert file"
command = Settings.commands.convertCommandPrefix.concat(command)
safe_exec command, childProcessOpts, (err, stdout, stderr)->
if err?

View file

@ -95,6 +95,8 @@ module.exports = FileHandler =
LocalFileWriter.deleteFile originalFsPath, ->
callback(err, destPath, originalFsPath)
logger.log opts:opts, "converting file depending on opts"
if opts.format?
FileConverter.convert originalFsPath, opts.format, done
else if opts.style == "thumbnail"

View file

@ -33,6 +33,8 @@ checkCanStoreFiles = (callback)->
callback(err)
checkFileConvert = (callback)->
if !settings.enableConversions
return callback()
imgPath = path.join(settings.path.uploadFolder, "/tiny.pdf")
async.waterfall [
(cb)->

View file

@ -1,5 +1,6 @@
exec = require('child_process').exec
logger = require("logger-sharelatex")
Settings = require "settings-sharelatex"
module.exports =
@ -10,6 +11,9 @@ module.exports =
opts =
timeout: 30 * 1000
killSignal: "SIGKILL"
if !Settings.enableConversions
error = new Error("Image conversions are disabled")
return callback(error)
exec args, opts,(err, stdout, stderr)->
if err? and err.signal == 'SIGKILL'
logger.warn {err: err, stderr: stderr, localPath: localPath}, "optimiser timeout reached"

View file

@ -2,11 +2,10 @@ settings = require("settings-sharelatex")
logger = require("logger-sharelatex")
# assume s3 if none specified
settings.filestore.backend ||= "s3"
settings?.filestore?.backend ||= "s3"
logger.log backend:settings.filestore.backend, "Loading backend"
module.exports = switch settings.filestore.backend
logger.log backend:settings?.filestore?.backend, "Loading backend"
module.exports = switch settings?.filestore?.backend
when "aws-sdk"
require "./AWSSDKPersistorManager"
when "s3"

View file

@ -88,10 +88,11 @@ module.exports =
# permission to list the bucket contents.
logger.log bucketName:bucketName, key:key, "file not found in s3"
return callback new Errors.NotFoundError("File not found in S3: #{bucketName}:#{key}"), null
if res.statusCode not in [200, 206]
else if res.statusCode not in [200, 206]
logger.log bucketName:bucketName, key:key, "error getting file from s3: #{res.statusCode}"
return callback new Error("Got non-200 response from S3: #{res.statusCode}"), null
callback null, res
else
return callback null, res
s3Stream.on 'error', (err) ->
logger.err err:err, bucketName:bucketName, key:key, "error getting file stream from s3"
callback err

View file

@ -1,6 +1,7 @@
_ = require("underscore")
logger = require("logger-sharelatex")
child_process = require('child_process')
Settings = require "settings-sharelatex"
# execute a command in the same way as 'exec' but with a timeout that
# kills all child processes
@ -9,6 +10,10 @@ child_process = require('child_process')
# group, then we can kill everything in that process group.
module.exports = (command, options, callback = (err, stdout, stderr) ->) ->
if !Settings.enableConversions
error = new Error("Image conversions are disabled")
return callback(error)
# options are {timeout: number-of-milliseconds, killSignal: signal-name}
[cmd, args...] = command

View file

@ -0,0 +1,8 @@
filestore
--language=coffeescript
--node-version=6.9.5
--acceptance-creds=None
--dependencies=mongo,redis
--docker-repos=gcr.io/overleaf-ops
--build-target=docker
--script-version=1.1.12

View file

@ -1,6 +1,6 @@
Path = require "path"
module.exports =
settings =
internal:
filestore:
port: 3009
@ -11,31 +11,37 @@ module.exports =
# Choices are
# s3 - Amazon S3
# fs - local filesystem
backend: "fs"
stores:
# where to store user and template binary files
if process.env['AWS_KEY']? or process.env['S3_BUCKET_CREDENTIALS']?
backend: "s3"
s3:
key: process.env['AWS_KEY']
secret: process.env['AWS_SECRET']
stores:
user_files: process.env['AWS_S3_USER_FILES_BUCKET_NAME']
template_files: process.env['AWS_S3_TEMPLATE_FILES_BUCKET_NAME']
public_files: process.env['AWS_S3_PUBLIC_FILES_BUCKET_NAME']
# if you are using S3, then fill in your S3 details below,
# or use env var with the same structure.
# s3:
# key: "" # default
# secret: "" # default
#
# For Amazon S3 this is the bucket name to store binary files in.
#
# For local filesystem this is the directory to store the files in.
# Must contain full path, e.g. "/var/lib/sharelatex/data".
# This path must exist, not be tmpfs and be writable to by the user sharelatex is run as.
user_files: Path.resolve(__dirname + "/../user_files")
public_files: Path.resolve(__dirname + "/../public_files")
template_files: Path.resolve(__dirname + "/../template_files")
# if you are using S3, then fill in your S3 details below,
# or use env var with the same structure.
# s3:
# key: "" # default
# secret: "" # default
#
# s3BucketCreds:
# bucketname1: # secrets for bucketname1
# auth_key: ""
# auth_secret: ""
# bucketname2: # secrets for bucketname2...
s3BucketCreds: JSON.parse process.env['S3_BUCKET_CREDENTIALS'] if process.env['S3_BUCKET_CREDENTIALS']
# s3BucketCreds:
# bucketname1: # secrets for bucketname1
# auth_key: ""
# auth_secret: ""
# bucketname2: # secrets for bucketname2...
s3BucketCreds: JSON.parse process.env['S3_BUCKET_CREDENTIALS'] if process.env['S3_BUCKET_CREDENTIALS']?
else
backend: "fs"
stores:
#
# For local filesystem this is the directory to store the files in.
# Must contain full path, e.g. "/var/lib/sharelatex/data".
# This path must exist, not be tmpfs and be writable to by the user sharelatex is run as.
user_files: Path.resolve(__dirname + "/../user_files")
public_files: Path.resolve(__dirname + "/../public_files")
template_files: Path.resolve(__dirname + "/../template_files")
path:
uploadFolder: Path.resolve(__dirname + "/../uploads")
@ -44,9 +50,14 @@ module.exports =
# Any commands to wrap the convert utility in, for example ["nice"], or ["firejail", "--profile=/etc/firejail/convert.profile"]
convertCommandPrefix: []
# Filestore health check
# ----------------------
# Project and file details to check in persistor when calling /health_check
# health_check:
# project_id: ""
# file_id: ""
enableConversions: if process.env['ENABLE_CONVERSIONS'] == 'true' then true else false
# Filestore health check
# ----------------------
# Project and file details to check in persistor when calling /health_check
if process.env['HEALTH_CHECK_PROJECT_ID']? and process.env['HEALTH_CHECK_FILE_ID']?
settings.health_check =
project_id: process.env['HEALTH_CHECK_PROJECT_ID']
file_id: process.env['HEALTH_CHECK_FILE_ID']
module.exports = settings

View file

@ -0,0 +1,45 @@
# This file was auto-generated, do not edit it directly.
# Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment
# Version: 1.1.12
version: "2"
services:
test_unit:
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
user: node
command: npm run test:unit:_run
test_acceptance:
build: .
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
environment:
ELASTIC_SEARCH_DSN: es:9200
REDIS_HOST: redis
MONGO_HOST: mongo
POSTGRES_HOST: postgres
MOCHA_GREP: ${MOCHA_GREP}
ENABLE_CONVERSIONS: "true"
depends_on:
- mongo
- redis
user: node
command: npm run test:acceptance:_run
tar:
build: .
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
volumes:
- ./:/tmp/build/
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
user: root
redis:
image: redis
mongo:
image: mongo:3.4

View file

@ -0,0 +1,52 @@
# This file was auto-generated, do not edit it directly.
# Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment
# Version: 1.1.12
version: "2"
services:
test_unit:
build: .
volumes:
- .:/app
working_dir: /app
environment:
MOCHA_GREP: ${MOCHA_GREP}
command: npm run test:unit
user: node
test_acceptance:
build: .
volumes:
- .:/app
working_dir: /app
environment:
ELASTIC_SEARCH_DSN: es:9200
REDIS_HOST: redis
MONGO_HOST: mongo
POSTGRES_HOST: postgres
MOCHA_GREP: ${MOCHA_GREP}
ENABLE_CONVERSIONS: "true"
user: node
depends_on:
- mongo
- redis
command: npm run test:acceptance
tar:
build: .
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
volumes:
- ./:/tmp/build/
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
user: root
redis:
image: redis
mongo:
image: mongo:3.4

View file

@ -0,0 +1,15 @@
#!/bin/sh
apt-get update
apt-get install vim imagemagick optipng --yes
wget -q https://s3.amazonaws.com/sl-public-dev-assets/ghostscript-9.15.tar.gz -O /tmp/ghostscript-9.15.tar.gz
cd /tmp
tar -xvf /tmp/ghostscript-9.15.tar.gz
cd /tmp/ghostscript-9.15 && ./configure && make && make install
npm rebuild
mkdir /app/user_files/ /app/uploads/ /app/template_files/
chown -R node:node /app/user_files
chown -R node:node /app/uploads
chown -R node:node /app/template_files
ls -al /app

View file

@ -0,0 +1,19 @@
{
"ignore": [
".git",
"node_modules/"
],
"verbose": true,
"legacyWatch": true,
"execMap": {
"js": "npm run start"
},
"watch": [
"app/coffee/",
"app.coffee",
"config/"
],
"ext": "coffee"
}

File diff suppressed because it is too large Load diff

View file

@ -7,19 +7,31 @@
"url": "https://github.com/sharelatex/filestore-sharelatex.git"
},
"scripts": {
"compile:app": "coffee -o app/js -c app/coffee && coffee -c app.coffee",
"start": "npm run compile:app && node app.js"
"test:acceptance:run": "mocha --recursive --reporter spec --timeout 15000 $@ test/acceptance/js",
"test:acceptance": "npm run compile:app && npm run compile:acceptance_tests && npm run test:acceptance:_run -- --grep=$MOCHA_GREP",
"test:unit:run": "mocha --recursive --reporter spec $@ test/unit/js",
"test:unit": "npm run compile:app && npm run compile:unit_tests && npm run test:unit:_run -- --grep=$MOCHA_GREP",
"compile:unit_tests": "[ ! -e test/unit/coffee ] && echo 'No unit tests to compile' || coffee -o test/unit/js -c test/unit/coffee",
"compile:acceptance_tests": "[ ! -e test/acceptance/coffee ] && echo 'No acceptance tests to compile' || coffee -o test/acceptance/js -c test/acceptance/coffee",
"compile:app": "([ -e app/coffee ] && coffee -m $COFFEE_OPTIONS -o app/js -c app/coffee || echo 'No CoffeeScript folder to compile') && ( [ -e app.coffee ] && coffee -m $COFFEE_OPTIONS -c app.coffee || echo 'No CoffeeScript app to compile')",
"compile:all": "npm run compile:app && npm run compile:unit_tests && npm run compile:acceptance_tests && npm run compile:smoke_tests",
"start": "npm run compile:app && node $NODE_APP_OPTIONS app.js",
"nodemon": "nodemon --config nodemon.json",
"test:acceptance:_run": "mocha --recursive --reporter spec --timeout 30000 --exit $@ test/acceptance/js",
"test:unit:_run": "mocha --recursive --reporter spec --exit $@ test/unit/js",
"compile:smoke_tests": "[ ! -e test/smoke/coffee ] && echo 'No smoke tests to compile' || coffee -o test/smoke/js -c test/smoke/coffee"
},
"dependencies": {
"async": "~0.2.10",
"aws-sdk": "^2.1.39",
"coffee-script": "~1.7.1",
"express": "~3.4.8",
"body-parser": "^1.2.0",
"express": "^4.2.0",
"fs-extra": "^1.0.0",
"heapdump": "^0.3.2",
"knox": "~0.9.1",
"logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.8",
"metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.3.0",
"logger-sharelatex": "^1.6.0",
"metrics-sharelatex": "^2.1.1",
"mocha": "5.2.0",
"node-transloadit": "0.0.4",
"node-uuid": "~1.4.1",
"pngcrush": "0.0.3",
@ -28,26 +40,15 @@
"request": "2.14.0",
"response": "0.14.0",
"rimraf": "2.2.8",
"settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0",
"settings-sharelatex": "^1.1.0",
"stream-browserify": "^2.0.1",
"stream-buffers": "~0.2.5",
"underscore": "~1.5.2"
},
"devDependencies": {
"sinon": "",
"chai": "",
"sandboxed-module": "",
"bunyan": "^1.3.5",
"grunt": "0.4.1",
"grunt-bunyan": "^0.5.0",
"grunt-execute": "^0.2.2",
"grunt-mocha-test": "~0.8.2",
"grunt-contrib-requirejs": "0.4.1",
"grunt-contrib-coffee": "0.7.0",
"grunt-contrib-watch": "0.5.3",
"grunt-nodemon": "0.2.1",
"grunt-contrib-clean": "0.5.0",
"grunt-concurrent": "0.4.2",
"grunt-forever": "~0.4.4"
"chai": "4.2.0",
"sandboxed-module": "2.0.3",
"sinon": "7.1.1"
}
}

View file

@ -0,0 +1,24 @@
app = require('../../../app')
require("logger-sharelatex").logger.level("info")
logger = require("logger-sharelatex")
Settings = require("settings-sharelatex")
module.exports =
running: false
initing: false
callbacks: []
ensureRunning: (callback = (error) ->) ->
if @running
return callback()
else if @initing
@callbacks.push callback
else
@initing = true
@callbacks.push callback
app.listen Settings.internal?.filestore?.port, "localhost", (error) =>
throw error if error?
@running = true
logger.log("filestore running in dev mode")
for callback in @callbacks
callback()

View file

@ -1,4 +1,3 @@
assert = require("chai").assert
sinon = require('sinon')
chai = require('chai')
@ -9,6 +8,7 @@ SandboxedModule = require('sandboxed-module')
fs = require("fs")
request = require("request")
settings = require("settings-sharelatex")
FilestoreApp = require "./FilestoreApp"
describe "Filestore", ->
@ -26,8 +26,9 @@ describe "Filestore", ->
@filestoreUrl = "http://localhost:#{settings.internal.filestore.port}"
beforeEach (done)->
fs.unlink @localFileWritePath, =>
done()
FilestoreApp.ensureRunning =>
fs.unlink @localFileWritePath, ->
done()
@ -135,18 +136,18 @@ describe "Filestore", ->
describe "getting the preview image", ->
beforeEach ->
@fileUrl = @fileUrl + '?style=preview'
@previewFileUrl = "#{@fileUrl}?style=preview"
it "should not time out", (done) ->
@timeout(1000 * 20)
request.get @fileUrl, (err, response, body) =>
request.get @previewFileUrl, (err, response, body) =>
expect(response).to.not.equal null
done()
it "should respond with image data", (done) ->
# note: this test relies of the imagemagick conversion working
@timeout(1000 * 20)
request.get @fileUrl, (err, response, body) =>
request.get @previewFileUrl, (err, response, body) =>
expect(response.statusCode).to.equal 200
expect(body.length).to.be.greaterThan 400
done()

View file

@ -35,6 +35,8 @@ describe "BucketController", ->
"./FileHandler": @FileHandler
"./PersistorManager":@PersistorManager
"settings-sharelatex": @settings
"metrics-sharelatex":
inc:->
"logger-sharelatex":
log:->
err:->

View file

@ -121,20 +121,16 @@ describe "FSPersistorManagerTests", ->
describe "error conditions", ->
beforeEach ->
@fakeCode = 'ENOENT'
@Fs.createReadStream.returns(
on: (key, callback) =>
err = new Error()
err.code = @fakeCode
callback(err, null)
)
describe "when the file does not exist", ->
beforeEach ->
@fakeCode = 'ENOENT'
@Fs.createReadStream.returns(
on: (key, callback) =>
err = new Error()
err.code = @fakeCode
callback(err, null)
)
it "should give a NotFoundError", (done) ->
@FSPersistorManager.getFileStream @location, @name1, @opts, (err,res)=>
expect(res).to.equal null
@ -146,6 +142,12 @@ describe "FSPersistorManagerTests", ->
beforeEach ->
@fakeCode = 'SOMETHINGHORRIBLE'
@Fs.createReadStream.returns(
on: (key, callback) =>
err = new Error()
err.code = @fakeCode
callback(err, null)
)
it "should give an Error", (done) ->
@FSPersistorManager.getFileStream @location, @name1, @opts, (err,res)=>

View file

@ -31,6 +31,8 @@ describe "FileController", ->
"./Errors": @Errors =
NotFoundError: sinon.stub()
"settings-sharelatex": @settings
"metrics-sharelatex":
inc:->
"logger-sharelatex":
log:->
err:->

View file

@ -16,6 +16,10 @@ describe "FileConverter", ->
"logger-sharelatex":
log:->
err:->
"metrics-sharelatex":
inc:->
Timer:->
done:->
"settings-sharelatex": @Settings =
commands:
convertCommandPrefix: []

View file

@ -11,13 +11,16 @@ describe "ImageOptimiser", ->
beforeEach ->
@child_process =
exec : sinon.stub()
@settings =
enableConversions:true
@optimiser = SandboxedModule.require modulePath, requires:
'child_process': @child_process
"logger-sharelatex":
log:->
err:->
warn:->
"settings-sharelatex": @settings
@sourcePath = "/this/path/here.eps"
@error = "Error"
@ -33,18 +36,29 @@ describe "ImageOptimiser", ->
done()
it "should return the errro the file", (done)->
it "should return the error", (done)->
@child_process.exec.callsArgWith(2, @error)
@optimiser.compressPng @sourcePath, (err)=>
err.should.equal @error
done()
describe 'when optimiser is sigkilled', ->
describe 'when enableConversions is disabled', ->
it 'should not produce an error', (done) ->
@error = new Error('woops')
@error.signal = 'SIGKILL'
@child_process.exec.callsArgWith(2, @error)
@optimiser.compressPng @sourcePath, (err)=>
expect(err).to.equal(null)
done()
it 'should produce an error', (done) ->
@settings.enableConversions = false
@child_process.exec.callsArgWith(2)
@optimiser.compressPng @sourcePath, (err)=>
@child_process.exec.called.should.equal false
expect(err).to.exist
done()
describe 'when optimiser is sigkilled', ->
it 'should not produce an error', (done) ->
@error = new Error('woops')
@error.signal = 'SIGKILL'
@child_process.exec.callsArgWith(2, @error)
@optimiser.compressPng @sourcePath, (err)=>
expect(err).to.equal(null)
done()

View file

@ -30,6 +30,11 @@ describe "LocalFileWriter", ->
log:->
err:->
"settings-sharelatex":@settings
"metrics-sharelatex":
inc:->
Timer:->
done:->
@stubbedFsPath = "something/uploads/eio2k1j3"
describe "writeStrem", ->

View file

@ -66,14 +66,14 @@ describe "PersistorManagerTests", ->
describe "test unspecified mixins", ->
it "should load s3 when no wrapper specified", (done) ->
@settings =
@settings = {filestore:{}}
@requires =
"./S3PersistorManager": @S3PersistorManager
"settings-sharelatex": @settings
"logger-sharelatex":
log:->
err:->
@PersistorManager=SandboxedModule.require modulePath, requires: @requires
@PersistorManager = SandboxedModule.require modulePath, requires: @requires
@PersistorManager.should.respondTo("getFileStream")
@PersistorManager.getFileStream()
@S3PersistorManager.getFileStream.calledOnce.should.equal true

View file

@ -110,21 +110,17 @@ describe "S3PersistorManagerTests", ->
describe "error conditions", ->
beforeEach ->
@fakeResponse =
statusCode: 500
@stubbedKnoxClient.get.returns(
on: (key, callback) =>
if key == 'response'
callback(@fakeResponse)
end: ->
)
describe "when the file doesn't exist", ->
beforeEach ->
@fakeResponse =
statusCode: 404
@stubbedKnoxClient.get.returns(
on: (key, callback) =>
if key == 'response'
callback(@fakeResponse)
end: ->
)
it "should produce a NotFoundError", (done) ->
@S3PersistorManager.getFileStream @bucketName, @key, @opts, (err, stream)=> # empty callback
@ -145,6 +141,12 @@ describe "S3PersistorManagerTests", ->
beforeEach ->
@fakeResponse =
statusCode: 500
@stubbedKnoxClient.get.returns(
on: (key, callback) =>
if key == 'response'
callback(@fakeResponse)
end: ->
)
it "should produce an error", (done) ->
@S3PersistorManager.getFileStream @bucketName, @key, @opts, (err, stream)=> # empty callback
@ -314,4 +316,4 @@ describe "S3PersistorManagerTests", ->
@stubbedKnoxClient.list.callsArgWith(1, null, data)
@S3PersistorManager.directorySize @bucketName, @key, (err, totalSize)=>
totalSize.should.equal 3072
done()
done()

View file

@ -9,11 +9,13 @@ SandboxedModule = require('sandboxed-module')
describe "SafeExec", ->
beforeEach ->
@settings =
enableConversions:true
@safe_exec = SandboxedModule.require modulePath, requires:
"logger-sharelatex":
log:->
err:->
"settings-sharelatex": @settings
@options = {timeout: 10*1000, killSignal: "SIGTERM" }
describe "safe_exec", ->
@ -24,6 +26,12 @@ describe "SafeExec", ->
should.not.exist(err)
done()
it "should error when conversions are disabled", (done) ->
@settings.enableConversions = false
@safe_exec ["/bin/echo", "hello"], @options, (err, stdout, stderr) =>
expect(err).to.exist
done()
it "should execute a command with non-zero exit status", (done) ->
@safe_exec ["/usr/bin/env", "false"], @options, (err, stdout, stderr) =>
stdout.should.equal ""