Merge branch 'master' into bg-support-other-runtimes

port coffee changes to js
This commit is contained in:
Henry Oswald 2020-04-23 13:56:33 +01:00
commit e1c90ec234
137 changed files with 18817 additions and 10212 deletions

View file

@ -5,5 +5,6 @@ gitrev
.npm .npm
.nvmrc .nvmrc
nodemon.json nodemon.json
app.js cache/
**/js/* compiles/
db/

64
services/clsi/.eslintrc Normal file
View file

@ -0,0 +1,64 @@
// this file was auto-generated, do not edit it directly.
// instead run bin/update_build_scripts from
// https://github.com/sharelatex/sharelatex-dev-environment
{
"extends": [
"standard",
"prettier",
"prettier/standard"
],
"parserOptions": {
"ecmaVersion": 2017
},
"plugins": [
"mocha",
"chai-expect",
"chai-friendly"
],
"env": {
"node": true,
"mocha": true
},
"rules": {
// Swap the no-unused-expressions rule with a more chai-friendly one
"no-unused-expressions": 0,
"chai-friendly/no-unused-expressions": "error"
},
"overrides": [
{
// Test specific rules
"files": ["test/**/*.js"],
"globals": {
"expect": true
},
"rules": {
// mocha-specific rules
"mocha/handle-done-callback": "error",
"mocha/no-exclusive-tests": "error",
"mocha/no-global-tests": "error",
"mocha/no-identical-title": "error",
"mocha/no-nested-tests": "error",
"mocha/no-pending-tests": "error",
"mocha/no-skipped-tests": "error",
"mocha/no-mocha-arrows": "error",
// chai-specific rules
"chai-expect/missing-assertion": "error",
"chai-expect/terminating-properties": "error",
// prefer-arrow-callback applies to all callbacks, not just ones in mocha tests.
// we don't enforce this at the top-level - just in tests to manage `this` scope
// based on mocha's context mechanism
"mocha/prefer-arrow-callback": "error"
}
},
{
// Backend specific rules
"files": ["app/**/*.js", "app.js", "index.js"],
"rules": {
// don't allow console.log in backend code
"no-console": "error"
}
}
]
}

View file

@ -1,13 +1,7 @@
**.swp **.swp
node_modules node_modules
app/js
test/unit/js
test/smoke/js
test/acceptance/js
test/acceptance/fixtures/tmp test/acceptance/fixtures/tmp
compiles compiles
app.js
**/*.map
.DS_Store .DS_Store
*~ *~
cache cache

View file

@ -1 +1 @@
10.15.0 10.19.0

View file

@ -0,0 +1,7 @@
# This file was auto-generated, do not edit it directly.
# Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment
{
"semi": false,
"singleQuote": true
}

View file

@ -1,15 +0,0 @@
language: node_js
before_install:
- npm install -g grunt-cli
install:
- npm install
- grunt install
script:
- grunt test:unit
services:
- redis-server
- mongodb

View file

@ -1,6 +1,16 @@
FROM gcr.io/overleaf-ops/node:10.19.0 as app # This file was auto-generated, do not edit it directly.
# Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment
FROM node:10.19.0 as base
WORKDIR /app WORKDIR /app
COPY install_deps.sh /app
RUN chmod 0755 ./install_deps.sh && ./install_deps.sh
ENTRYPOINT ["/bin/sh", "entrypoint.sh"]
COPY entrypoint.sh /app
FROM base as app
#wildcard as some files may not be in all repos #wildcard as some files may not be in all repos
COPY package*.json npm-shrink*.json /app/ COPY package*.json npm-shrink*.json /app/
@ -10,14 +20,11 @@ RUN npm install --quiet
COPY . /app COPY . /app
RUN npm run compile:all
FROM gcr.io/overleaf-ops/node:10.19.0 FROM base
COPY --from=app /app /app COPY --from=app /app /app
RUN mkdir -p cache compiles db \
WORKDIR /app && chown node:node cache compiles db
RUN chmod 0755 ./install_deps.sh && ./install_deps.sh
ENTRYPOINT ["/bin/sh", "entrypoint.sh"]
CMD ["node", "--expose-gc", "app.js"] CMD ["node", "--expose-gc", "app.js"]

View file

@ -16,6 +16,7 @@ pipeline {
} }
stages { stages {
stage('Install') { stage('Install') {
steps { steps {
withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) { withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) {
@ -36,6 +37,13 @@ pipeline {
} }
} }
stage('Linting') {
steps {
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make format'
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make lint'
}
}
stage('Unit Tests') { stage('Unit Tests') {
steps { steps {
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_unit' sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_unit'

View file

@ -1,11 +1,12 @@
# This file was auto-generated, do not edit it directly. # This file was auto-generated, do not edit it directly.
# Instead run bin/update_build_scripts from # Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment # https://github.com/sharelatex/sharelatex-dev-environment
# Version: 1.1.24
BUILD_NUMBER ?= local BUILD_NUMBER ?= local
BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD)
PROJECT_NAME = clsi PROJECT_NAME = clsi
BUILD_DIR_NAME = $(shell pwd | xargs basename | tr -cd '[a-zA-Z0-9_.\-]')
DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml
DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \
BRANCH_NAME=$(BRANCH_NAME) \ BRANCH_NAME=$(BRANCH_NAME) \
@ -13,29 +14,64 @@ DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \
MOCHA_GREP=${MOCHA_GREP} \ MOCHA_GREP=${MOCHA_GREP} \
docker-compose ${DOCKER_COMPOSE_FLAGS} docker-compose ${DOCKER_COMPOSE_FLAGS}
DOCKER_COMPOSE_TEST_ACCEPTANCE = \
COMPOSE_PROJECT_NAME=test_acceptance_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE)
DOCKER_COMPOSE_TEST_UNIT = \
COMPOSE_PROJECT_NAME=test_unit_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE)
clean: clean:
docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
rm -f app.js
rm -rf app/js
rm -rf test/unit/js
rm -rf test/acceptance/js
test: test_unit test_acceptance format:
$(DOCKER_COMPOSE) run --rm test_unit npm run format
format_fix:
$(DOCKER_COMPOSE) run --rm test_unit npm run format:fix
lint:
$(DOCKER_COMPOSE) run --rm test_unit npm run lint
test: format lint test_unit test_acceptance
test_unit: test_unit:
@[ ! -d test/unit ] && echo "clsi has no unit tests" || $(DOCKER_COMPOSE) run --rm test_unit ifneq (,$(wildcard test/unit))
$(DOCKER_COMPOSE_TEST_UNIT) run --rm test_unit
$(MAKE) test_unit_clean
endif
test_acceptance: test_clean test_acceptance_pre_run test_acceptance_run test_clean: test_unit_clean
test_unit_clean:
ifneq (,$(wildcard test/unit))
$(DOCKER_COMPOSE_TEST_UNIT) down -v -t 0
endif
test_acceptance: test_acceptance_clean test_acceptance_pre_run test_acceptance_run
$(MAKE) test_acceptance_clean
test_acceptance_debug: test_acceptance_clean test_acceptance_pre_run test_acceptance_run_debug
$(MAKE) test_acceptance_clean
test_acceptance_run: test_acceptance_run:
@[ ! -d test/acceptance ] && echo "clsi has no acceptance tests" || $(DOCKER_COMPOSE) run --rm test_acceptance ifneq (,$(wildcard test/acceptance))
$(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance
endif
test_clean: test_acceptance_run_debug:
$(DOCKER_COMPOSE) down -v -t 0 ifneq (,$(wildcard test/acceptance))
$(DOCKER_COMPOSE_TEST_ACCEPTANCE) run -p 127.0.0.9:19999:19999 --rm test_acceptance npm run test:acceptance -- --inspect=0.0.0.0:19999 --inspect-brk
endif
test_clean: test_acceptance_clean
test_acceptance_clean:
$(DOCKER_COMPOSE_TEST_ACCEPTANCE) down -v -t 0
test_acceptance_pre_run: test_acceptance_pre_run:
@[ ! -f test/acceptance/js/scripts/pre-run ] && echo "clsi has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/js/scripts/pre-run ifneq (,$(wildcard test/acceptance/js/scripts/pre-run))
$(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance test/acceptance/js/scripts/pre-run
endif
build: build:
docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \
--tag gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ --tag gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \
@ -48,4 +84,5 @@ publish:
docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
.PHONY: clean test test_unit test_acceptance test_clean build publish .PHONY: clean test test_unit test_acceptance test_clean build publish

View file

@ -1,244 +0,0 @@
Metrics = require "metrics-sharelatex"
Metrics.initialize("clsi")
CompileController = require "./app/js/CompileController"
Settings = require "settings-sharelatex"
logger = require "logger-sharelatex"
logger.initialize("clsi")
if Settings.sentry?.dsn?
logger.initializeErrorReporting(Settings.sentry.dsn)
smokeTest = require "smoke-test-sharelatex"
ContentTypeMapper = require "./app/js/ContentTypeMapper"
Errors = require './app/js/Errors'
Path = require "path"
fs = require "fs"
Metrics.open_sockets.monitor(logger)
Metrics.memory.monitor(logger)
ProjectPersistenceManager = require "./app/js/ProjectPersistenceManager"
OutputCacheManager = require "./app/js/OutputCacheManager"
require("./app/js/db").sync()
express = require "express"
bodyParser = require "body-parser"
app = express()
Metrics.injectMetricsRoute(app)
app.use Metrics.http.monitor(logger)
# Compile requests can take longer than the default two
# minutes (including file download time), so bump up the
# timeout a bit.
TIMEOUT = 10 * 60 * 1000
app.use (req, res, next) ->
req.setTimeout TIMEOUT
res.setTimeout TIMEOUT
res.removeHeader("X-Powered-By")
next()
app.param 'project_id', (req, res, next, project_id) ->
if project_id?.match /^[a-zA-Z0-9_-]+$/
next()
else
next new Error("invalid project id")
app.param 'user_id', (req, res, next, user_id) ->
if user_id?.match /^[0-9a-f]{24}$/
next()
else
next new Error("invalid user id")
app.param 'build_id', (req, res, next, build_id) ->
if build_id?.match OutputCacheManager.BUILD_REGEX
next()
else
next new Error("invalid build id #{build_id}")
app.post "/project/:project_id/compile", bodyParser.json(limit: Settings.compileSizeLimit), CompileController.compile
app.post "/project/:project_id/compile/stop", CompileController.stopCompile
app.delete "/project/:project_id", CompileController.clearCache
app.get "/project/:project_id/sync/code", CompileController.syncFromCode
app.get "/project/:project_id/sync/pdf", CompileController.syncFromPdf
app.get "/project/:project_id/wordcount", CompileController.wordcount
app.get "/project/:project_id/status", CompileController.status
# Per-user containers
app.post "/project/:project_id/user/:user_id/compile", bodyParser.json(limit: Settings.compileSizeLimit), CompileController.compile
app.post "/project/:project_id/user/:user_id/compile/stop", CompileController.stopCompile
app.delete "/project/:project_id/user/:user_id", CompileController.clearCache
app.get "/project/:project_id/user/:user_id/sync/code", CompileController.syncFromCode
app.get "/project/:project_id/user/:user_id/sync/pdf", CompileController.syncFromPdf
app.get "/project/:project_id/user/:user_id/wordcount", CompileController.wordcount
ForbidSymlinks = require "./app/js/StaticServerForbidSymlinks"
# create a static server which does not allow access to any symlinks
# avoids possible mismatch of root directory between middleware check
# and serving the files
staticServer = ForbidSymlinks express.static, Settings.path.compilesDir, setHeaders: (res, path, stat) ->
if Path.basename(path) == "output.pdf"
# Calculate an etag in the same way as nginx
# https://github.com/tj/send/issues/65
etag = (path, stat) ->
'"' + Math.ceil(+stat.mtime / 1000).toString(16) +
'-' + Number(stat.size).toString(16) + '"'
res.set("Etag", etag(path, stat))
res.set("Content-Type", ContentTypeMapper.map(path))
app.get "/project/:project_id/user/:user_id/build/:build_id/output/*", (req, res, next) ->
# for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
req.url = "/#{req.params.project_id}-#{req.params.user_id}/" + OutputCacheManager.path(req.params.build_id, "/#{req.params[0]}")
staticServer(req, res, next)
app.get "/project/:project_id/build/:build_id/output/*", (req, res, next) ->
# for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
req.url = "/#{req.params.project_id}/" + OutputCacheManager.path(req.params.build_id, "/#{req.params[0]}")
staticServer(req, res, next)
app.get "/project/:project_id/user/:user_id/output/*", (req, res, next) ->
# for specific user get the path to the top level file
req.url = "/#{req.params.project_id}-#{req.params.user_id}/#{req.params[0]}"
staticServer(req, res, next)
app.get "/project/:project_id/output/*", (req, res, next) ->
if req.query?.build? && req.query.build.match(OutputCacheManager.BUILD_REGEX)
# for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
req.url = "/#{req.params.project_id}/" + OutputCacheManager.path(req.query.build, "/#{req.params[0]}")
else
req.url = "/#{req.params.project_id}/#{req.params[0]}"
staticServer(req, res, next)
app.get "/oops", (req, res, next) ->
logger.error {err: "hello"}, "test error"
res.send "error\n"
app.get "/status", (req, res, next) ->
res.send "CLSI is alive\n"
resCacher =
contentType:(@setContentType)->
send:(@code, @body)->
#default the server to be down
code:500
body:{}
setContentType:"application/json"
if Settings.smokeTest
do runSmokeTest = ->
logger.log("running smoke tests")
smokeTest.run(require.resolve(__dirname + "/test/smoke/js/SmokeTests.js"))({}, resCacher)
setTimeout(runSmokeTest, 30 * 1000)
app.get "/health_check", (req, res)->
res.contentType(resCacher?.setContentType)
res.status(resCacher?.code).send(resCacher?.body)
app.get "/smoke_test_force", (req, res)->
smokeTest.run(require.resolve(__dirname + "/test/smoke/js/SmokeTests.js"))(req, res)
profiler = require "v8-profiler-node8"
app.get "/profile", (req, res) ->
time = parseInt(req.query.time || "1000")
profiler.startProfiling("test")
setTimeout () ->
profile = profiler.stopProfiling("test")
res.json(profile)
, time
app.get "/heapdump", (req, res)->
require('heapdump').writeSnapshot '/tmp/' + Date.now() + '.clsi.heapsnapshot', (err, filename)->
res.send filename
app.use (error, req, res, next) ->
if error instanceof Errors.NotFoundError
logger.warn {err: error, url: req.url}, "not found error"
return res.sendStatus(404)
else
logger.error {err: error, url: req.url}, "server error"
res.sendStatus(error?.statusCode || 500)
net = require "net"
os = require "os"
STATE = "up"
loadTcpServer = net.createServer (socket) ->
socket.on "error", (err)->
if err.code == "ECONNRESET"
# this always comes up, we don't know why
return
logger.err err:err, "error with socket on load check"
socket.destroy()
if STATE == "up" and Settings.internal.load_balancer_agent.report_load
currentLoad = os.loadavg()[0]
# staging clis's have 1 cpu core only
if os.cpus().length == 1
availableWorkingCpus = 1
else
availableWorkingCpus = os.cpus().length - 1
freeLoad = availableWorkingCpus - currentLoad
freeLoadPercentage = Math.round((freeLoad / availableWorkingCpus) * 100)
if freeLoadPercentage <= 0
freeLoadPercentage = 1 # when its 0 the server is set to drain and will move projects to different servers
socket.write("up, #{freeLoadPercentage}%\n", "ASCII")
socket.end()
else
socket.write("#{STATE}\n", "ASCII")
socket.end()
loadHttpServer = express()
loadHttpServer.post "/state/up", (req, res, next) ->
STATE = "up"
logger.info "getting message to set server to down"
res.sendStatus 204
loadHttpServer.post "/state/down", (req, res, next) ->
STATE = "down"
logger.info "getting message to set server to down"
res.sendStatus 204
loadHttpServer.post "/state/maint", (req, res, next) ->
STATE = "maint"
logger.info "getting message to set server to maint"
res.sendStatus 204
port = (Settings.internal?.clsi?.port or 3013)
host = (Settings.internal?.clsi?.host or "localhost")
load_tcp_port = Settings.internal.load_balancer_agent.load_port
load_http_port = Settings.internal.load_balancer_agent.local_port
if !module.parent # Called directly
app.listen port, host, (error) ->
logger.info "CLSI starting up, listening on #{host}:#{port}"
loadTcpServer.listen load_tcp_port, host, (error) ->
throw error if error?
logger.info "Load tcp agent listening on load port #{load_tcp_port}"
loadHttpServer.listen load_http_port, host, (error) ->
throw error if error?
logger.info "Load http agent listening on load port #{load_http_port}"
module.exports = app
setInterval () ->
ProjectPersistenceManager.clearExpiredProjects()
, tenMinutes = 10 * 60 * 1000

371
services/clsi/app.js Normal file
View file

@ -0,0 +1,371 @@
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS103: Rewrite code to no longer use __guard__
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let tenMinutes
const Metrics = require('metrics-sharelatex')
Metrics.initialize('clsi')
const CompileController = require('./app/js/CompileController')
const Settings = require('settings-sharelatex')
const logger = require('logger-sharelatex')
logger.initialize('clsi')
if ((Settings.sentry != null ? Settings.sentry.dsn : undefined) != null) {
logger.initializeErrorReporting(Settings.sentry.dsn)
}
const smokeTest = require('smoke-test-sharelatex')
const ContentTypeMapper = require('./app/js/ContentTypeMapper')
const Errors = require('./app/js/Errors')
const Path = require('path')
Metrics.open_sockets.monitor(logger)
Metrics.memory.monitor(logger)
const ProjectPersistenceManager = require('./app/js/ProjectPersistenceManager')
const OutputCacheManager = require('./app/js/OutputCacheManager')
require('./app/js/db').sync()
const express = require('express')
const bodyParser = require('body-parser')
const app = express()
Metrics.injectMetricsRoute(app)
app.use(Metrics.http.monitor(logger))
// Compile requests can take longer than the default two
// minutes (including file download time), so bump up the
// timeout a bit.
const TIMEOUT = 10 * 60 * 1000
app.use(function(req, res, next) {
req.setTimeout(TIMEOUT)
res.setTimeout(TIMEOUT)
res.removeHeader('X-Powered-By')
return next()
})
app.param('project_id', function(req, res, next, project_id) {
if (project_id != null ? project_id.match(/^[a-zA-Z0-9_-]+$/) : undefined) {
return next()
} else {
return next(new Error('invalid project id'))
}
})
app.param('user_id', function(req, res, next, user_id) {
if (user_id != null ? user_id.match(/^[0-9a-f]{24}$/) : undefined) {
return next()
} else {
return next(new Error('invalid user id'))
}
})
app.param('build_id', function(req, res, next, build_id) {
if (
build_id != null
? build_id.match(OutputCacheManager.BUILD_REGEX)
: undefined
) {
return next()
} else {
return next(new Error(`invalid build id ${build_id}`))
}
})
app.post(
'/project/:project_id/compile',
bodyParser.json({ limit: Settings.compileSizeLimit }),
CompileController.compile
)
app.post('/project/:project_id/compile/stop', CompileController.stopCompile)
app.delete('/project/:project_id', CompileController.clearCache)
app.get('/project/:project_id/sync/code', CompileController.syncFromCode)
app.get('/project/:project_id/sync/pdf', CompileController.syncFromPdf)
app.get('/project/:project_id/wordcount', CompileController.wordcount)
app.get('/project/:project_id/status', CompileController.status)
// Per-user containers
app.post(
'/project/:project_id/user/:user_id/compile',
bodyParser.json({ limit: Settings.compileSizeLimit }),
CompileController.compile
)
app.post(
'/project/:project_id/user/:user_id/compile/stop',
CompileController.stopCompile
)
app.delete('/project/:project_id/user/:user_id', CompileController.clearCache)
app.get(
'/project/:project_id/user/:user_id/sync/code',
CompileController.syncFromCode
)
app.get(
'/project/:project_id/user/:user_id/sync/pdf',
CompileController.syncFromPdf
)
app.get(
'/project/:project_id/user/:user_id/wordcount',
CompileController.wordcount
)
const ForbidSymlinks = require('./app/js/StaticServerForbidSymlinks')
// create a static server which does not allow access to any symlinks
// avoids possible mismatch of root directory between middleware check
// and serving the files
const staticServer = ForbidSymlinks(express.static, Settings.path.compilesDir, {
setHeaders(res, path, stat) {
if (Path.basename(path) === 'output.pdf') {
// Calculate an etag in the same way as nginx
// https://github.com/tj/send/issues/65
const etag = (path, stat) =>
`"${Math.ceil(+stat.mtime / 1000).toString(16)}` +
'-' +
Number(stat.size).toString(16) +
'"'
res.set('Etag', etag(path, stat))
}
return res.set('Content-Type', ContentTypeMapper.map(path))
}
})
app.get('/project/:project_id/user/:user_id/build/:build_id/output/*', function(
req,
res,
next
) {
// for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
req.url =
`/${req.params.project_id}-${req.params.user_id}/` +
OutputCacheManager.path(req.params.build_id, `/${req.params[0]}`)
return staticServer(req, res, next)
})
app.get('/project/:project_id/build/:build_id/output/*', function(
req,
res,
next
) {
// for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
req.url =
`/${req.params.project_id}/` +
OutputCacheManager.path(req.params.build_id, `/${req.params[0]}`)
return staticServer(req, res, next)
})
app.get('/project/:project_id/user/:user_id/output/*', function(
req,
res,
next
) {
// for specific user get the path to the top level file
req.url = `/${req.params.project_id}-${req.params.user_id}/${req.params[0]}`
return staticServer(req, res, next)
})
app.get('/project/:project_id/output/*', function(req, res, next) {
if (
(req.query != null ? req.query.build : undefined) != null &&
req.query.build.match(OutputCacheManager.BUILD_REGEX)
) {
// for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
req.url =
`/${req.params.project_id}/` +
OutputCacheManager.path(req.query.build, `/${req.params[0]}`)
} else {
req.url = `/${req.params.project_id}/${req.params[0]}`
}
return staticServer(req, res, next)
})
app.get('/oops', function(req, res, next) {
logger.error({ err: 'hello' }, 'test error')
return res.send('error\n')
})
app.get('/status', (req, res, next) => res.send('CLSI is alive\n'))
const resCacher = {
contentType(setContentType) {
this.setContentType = setContentType
},
send(code, body) {
this.code = code
this.body = body
},
// default the server to be down
code: 500,
body: {},
setContentType: 'application/json'
}
let shutdownTime
if (Settings.processLifespanLimitMs) {
Settings.processLifespanLimitMs +=
Settings.processLifespanLimitMs * (Math.random() / 10)
shutdownTime = Date.now() + Settings.processLifespanLimitMs
logger.info('Lifespan limited to ', shutdownTime)
}
const checkIfProcessIsTooOld = function(cont) {
if (shutdownTime && shutdownTime < Date.now()) {
logger.log('shutting down, process is too old')
resCacher.send = function() {}
resCacher.code = 500
resCacher.body = { processToOld: true }
} else {
cont()
}
}
if (Settings.smokeTest) {
const runSmokeTest = function() {
checkIfProcessIsTooOld(function() {
logger.log('running smoke tests')
smokeTest.run(
require.resolve(__dirname + '/test/smoke/js/SmokeTests.js')
)({}, resCacher)
return setTimeout(runSmokeTest, 30 * 1000)
})
}
runSmokeTest()
}
app.get('/health_check', function(req, res) {
res.contentType(resCacher.setContentType)
return res.status(resCacher.code).send(resCacher.body)
})
app.get('/smoke_test_force', (req, res) =>
smokeTest.run(require.resolve(__dirname + '/test/smoke/js/SmokeTests.js'))(
req,
res
)
)
app.use(function(error, req, res, next) {
if (error instanceof Errors.NotFoundError) {
logger.warn({ err: error, url: req.url }, 'not found error')
return res.sendStatus(404)
} else {
logger.error({ err: error, url: req.url }, 'server error')
return res.sendStatus((error != null ? error.statusCode : undefined) || 500)
}
})
const net = require('net')
const os = require('os')
let STATE = 'up'
const loadTcpServer = net.createServer(function(socket) {
socket.on('error', function(err) {
if (err.code === 'ECONNRESET') {
// this always comes up, we don't know why
return
}
logger.err({ err }, 'error with socket on load check')
return socket.destroy()
})
if (STATE === 'up' && Settings.internal.load_balancer_agent.report_load) {
let availableWorkingCpus
const currentLoad = os.loadavg()[0]
// staging clis's have 1 cpu core only
if (os.cpus().length === 1) {
availableWorkingCpus = 1
} else {
availableWorkingCpus = os.cpus().length - 1
}
const freeLoad = availableWorkingCpus - currentLoad
let freeLoadPercentage = Math.round((freeLoad / availableWorkingCpus) * 100)
if (freeLoadPercentage <= 0) {
freeLoadPercentage = 1 // when its 0 the server is set to drain and will move projects to different servers
}
socket.write(`up, ${freeLoadPercentage}%\n`, 'ASCII')
return socket.end()
} else {
socket.write(`${STATE}\n`, 'ASCII')
return socket.end()
}
})
const loadHttpServer = express()
loadHttpServer.post('/state/up', function(req, res, next) {
STATE = 'up'
logger.info('getting message to set server to down')
return res.sendStatus(204)
})
loadHttpServer.post('/state/down', function(req, res, next) {
STATE = 'down'
logger.info('getting message to set server to down')
return res.sendStatus(204)
})
loadHttpServer.post('/state/maint', function(req, res, next) {
STATE = 'maint'
logger.info('getting message to set server to maint')
return res.sendStatus(204)
})
const port =
__guard__(
Settings.internal != null ? Settings.internal.clsi : undefined,
x => x.port
) || 3013
const host =
__guard__(
Settings.internal != null ? Settings.internal.clsi : undefined,
x1 => x1.host
) || 'localhost'
const load_tcp_port = Settings.internal.load_balancer_agent.load_port
const load_http_port = Settings.internal.load_balancer_agent.local_port
if (!module.parent) {
// Called directly
app.listen(port, host, error =>
logger.info(`CLSI starting up, listening on ${host}:${port}`)
)
loadTcpServer.listen(load_tcp_port, host, function(error) {
if (error != null) {
throw error
}
return logger.info(`Load tcp agent listening on load port ${load_tcp_port}`)
})
loadHttpServer.listen(load_http_port, host, function(error) {
if (error != null) {
throw error
}
return logger.info(
`Load http agent listening on load port ${load_http_port}`
)
})
}
module.exports = app
setInterval(
() => ProjectPersistenceManager.clearExpiredProjects(),
(tenMinutes = 10 * 60 * 1000)
)
function __guard__(value, transform) {
return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
}

View file

@ -1,11 +0,0 @@
Settings = require "settings-sharelatex"
logger = require "logger-sharelatex"
if Settings.clsi?.dockerRunner == true
commandRunnerPath = "./DockerRunner"
else
commandRunnerPath = "./LocalCommandRunner"
logger.info commandRunnerPath:commandRunnerPath, "selecting command runner for clsi"
CommandRunner = require(commandRunnerPath)
module.exports = CommandRunner

View file

@ -1,119 +0,0 @@
RequestParser = require "./RequestParser"
CompileManager = require "./CompileManager"
Settings = require "settings-sharelatex"
Metrics = require "./Metrics"
ProjectPersistenceManager = require "./ProjectPersistenceManager"
logger = require "logger-sharelatex"
Errors = require "./Errors"
module.exports = CompileController =
compile: (req, res, next = (error) ->) ->
timer = new Metrics.Timer("compile-request")
RequestParser.parse req.body, (error, request) ->
return next(error) if error?
request.project_id = req.params.project_id
request.user_id = req.params.user_id if req.params.user_id?
ProjectPersistenceManager.markProjectAsJustAccessed request.project_id, (error) ->
return next(error) if error?
CompileManager.doCompileWithLock request, (error, outputFiles = []) ->
if error instanceof Errors.AlreadyCompilingError
code = 423 # Http 423 Locked
status = "compile-in-progress"
else if error instanceof Errors.FilesOutOfSyncError
code = 409 # Http 409 Conflict
status = "retry"
else if error?.terminated
status = "terminated"
else if error?.validate
status = "validation-#{error.validate}"
else if error?.timedout
status = "timedout"
logger.log err: error, project_id: request.project_id, "timeout running compile"
else if error?
status = "error"
code = 500
logger.warn err: error, project_id: request.project_id, "error running compile"
else
status = "failure"
for file in outputFiles
if file.path?.match(/output\.pdf$/)
status = "success"
if status == "failure"
logger.warn project_id: request.project_id, outputFiles:outputFiles, "project failed to compile successfully, no output.pdf generated"
# log an error if any core files are found
for file in outputFiles
if file.path is "core"
logger.error project_id:request.project_id, req:req, outputFiles:outputFiles, "core file found in output"
if error?
outputFiles = error.outputFiles || []
timer.done()
res.status(code or 200).send {
compile:
status: status
error: error?.message or error
outputFiles: outputFiles.map (file) ->
url:
"#{Settings.apis.clsi.url}/project/#{request.project_id}" +
(if request.user_id? then "/user/#{request.user_id}" else "") +
(if file.build? then "/build/#{file.build}" else "") +
"/output/#{file.path}"
path: file.path
type: file.type
build: file.build
}
stopCompile: (req, res, next) ->
{project_id, user_id} = req.params
CompileManager.stopCompile project_id, user_id, (error) ->
return next(error) if error?
res.sendStatus(204)
clearCache: (req, res, next = (error) ->) ->
ProjectPersistenceManager.clearProject req.params.project_id, req.params.user_id, (error) ->
return next(error) if error?
res.sendStatus(204) # No content
syncFromCode: (req, res, next = (error) ->) ->
file = req.query.file
line = parseInt(req.query.line, 10)
column = parseInt(req.query.column, 10)
project_id = req.params.project_id
user_id = req.params.user_id
CompileManager.syncFromCode project_id, user_id, file, line, column, (error, pdfPositions) ->
return next(error) if error?
res.json {
pdf: pdfPositions
}
syncFromPdf: (req, res, next = (error) ->) ->
page = parseInt(req.query.page, 10)
h = parseFloat(req.query.h)
v = parseFloat(req.query.v)
project_id = req.params.project_id
user_id = req.params.user_id
CompileManager.syncFromPdf project_id, user_id, page, h, v, (error, codePositions) ->
return next(error) if error?
res.json {
code: codePositions
}
wordcount: (req, res, next = (error) ->) ->
file = req.query.file || "main.tex"
project_id = req.params.project_id
user_id = req.params.user_id
image = req.query.image
logger.log {image, file, project_id}, "word count request"
CompileManager.wordcount project_id, user_id, file, image, (error, result) ->
return next(error) if error?
res.json {
texcount: result
}
status: (req, res, next = (error)-> )->
res.send("OK")

View file

@ -1,345 +0,0 @@
ResourceWriter = require "./ResourceWriter"
LatexRunner = require "./LatexRunner"
OutputFileFinder = require "./OutputFileFinder"
OutputCacheManager = require "./OutputCacheManager"
Settings = require("settings-sharelatex")
Path = require "path"
logger = require "logger-sharelatex"
Metrics = require "./Metrics"
child_process = require "child_process"
DraftModeManager = require "./DraftModeManager"
TikzManager = require "./TikzManager"
LockManager = require "./LockManager"
fs = require("fs")
fse = require "fs-extra"
os = require("os")
async = require "async"
Errors = require './Errors'
CommandRunner = require "./CommandRunner"
getCompileName = (project_id, user_id) ->
if user_id? then "#{project_id}-#{user_id}" else project_id
getCompileDir = (project_id, user_id) ->
Path.join(Settings.path.compilesDir, getCompileName(project_id, user_id))
module.exports = CompileManager =
doCompileWithLock: (request, callback = (error, outputFiles) ->) ->
compileDir = getCompileDir(request.project_id, request.user_id)
lockFile = Path.join(compileDir, ".project-lock")
# use a .project-lock file in the compile directory to prevent
# simultaneous compiles
fse.ensureDir compileDir, (error) ->
return callback(error) if error?
LockManager.runWithLock lockFile, (releaseLock) ->
CompileManager.doCompile(request, releaseLock)
, callback
doCompile: (request, callback = (error, outputFiles) ->) ->
compileDir = getCompileDir(request.project_id, request.user_id)
timer = new Metrics.Timer("write-to-disk")
logger.log project_id: request.project_id, user_id: request.user_id, "syncing resources to disk"
ResourceWriter.syncResourcesToDisk request, compileDir, (error, resourceList) ->
# NOTE: resourceList is insecure, it should only be used to exclude files from the output list
if error? and error instanceof Errors.FilesOutOfSyncError
logger.warn project_id: request.project_id, user_id: request.user_id, "files out of sync, please retry"
return callback(error)
else if error?
logger.err err:error, project_id: request.project_id, user_id: request.user_id, "error writing resources to disk"
return callback(error)
logger.log project_id: request.project_id, user_id: request.user_id, time_taken: Date.now() - timer.start, "written files to disk"
timer.done()
injectDraftModeIfRequired = (callback) ->
if request.draft
DraftModeManager.injectDraftMode Path.join(compileDir, request.rootResourcePath), callback
else
callback()
createTikzFileIfRequired = (callback) ->
TikzManager.checkMainFile compileDir, request.rootResourcePath, resourceList, (error, needsMainFile) ->
return callback(error) if error?
if needsMainFile
TikzManager.injectOutputFile compileDir, request.rootResourcePath, callback
else
callback()
# set up environment variables for chktex
env = {}
# only run chktex on LaTeX files (not knitr .Rtex files or any others)
isLaTeXFile = request.rootResourcePath?.match(/\.tex$/i)
if request.check? and isLaTeXFile
env['CHKTEX_OPTIONS'] = '-nall -e9 -e10 -w15 -w16'
env['CHKTEX_ULIMIT_OPTIONS'] = '-t 5 -v 64000'
if request.check is 'error'
env['CHKTEX_EXIT_ON_ERROR'] = 1
if request.check is 'validate'
env['CHKTEX_VALIDATE'] = 1
# apply a series of file modifications/creations for draft mode and tikz
async.series [injectDraftModeIfRequired, createTikzFileIfRequired], (error) ->
return callback(error) if error?
timer = new Metrics.Timer("run-compile")
# find the image tag to log it as a metric, e.g. 2015.1 (convert . to - for graphite)
tag = request.imageName?.match(/:(.*)/)?[1]?.replace(/\./g,'-') or "default"
tag = "other" if not request.project_id.match(/^[0-9a-f]{24}$/) # exclude smoke test
Metrics.inc("compiles")
Metrics.inc("compiles-with-image.#{tag}")
compileName = getCompileName(request.project_id, request.user_id)
LatexRunner.runLatex compileName, {
directory: compileDir
mainFile: request.rootResourcePath
compiler: request.compiler
timeout: request.timeout
image: request.imageName
flags: request.flags
environment: env
}, (error, output, stats, timings) ->
# request was for validation only
if request.check is "validate"
result = if error?.code then "fail" else "pass"
error = new Error("validation")
error.validate = result
# request was for compile, and failed on validation
if request.check is "error" and error?.message is 'exited'
error = new Error("compilation")
error.validate = "fail"
# compile was killed by user, was a validation, or a compile which failed validation
if error?.terminated or error?.validate or error?.timedout
OutputFileFinder.findOutputFiles resourceList, compileDir, (err, outputFiles) ->
return callback(err) if err?
error.outputFiles = outputFiles # return output files so user can check logs
callback(error)
return
# compile completed normally
return callback(error) if error?
Metrics.inc("compiles-succeeded")
for metric_key, metric_value of stats or {}
Metrics.count(metric_key, metric_value)
for metric_key, metric_value of timings or {}
Metrics.timing(metric_key, metric_value)
loadavg = os.loadavg?()
Metrics.gauge("load-avg", loadavg[0]) if loadavg?
ts = timer.done()
logger.log {project_id: request.project_id, user_id: request.user_id, time_taken: ts, stats:stats, timings:timings, loadavg:loadavg}, "done compile"
if stats?["latex-runs"] > 0
Metrics.timing("run-compile-per-pass", ts / stats["latex-runs"])
if stats?["latex-runs"] > 0 and timings?["cpu-time"] > 0
Metrics.timing("run-compile-cpu-time-per-pass", timings["cpu-time"] / stats["latex-runs"])
OutputFileFinder.findOutputFiles resourceList, compileDir, (error, outputFiles) ->
return callback(error) if error?
OutputCacheManager.saveOutputFiles outputFiles, compileDir, (error, newOutputFiles) ->
callback null, newOutputFiles
stopCompile: (project_id, user_id, callback = (error) ->) ->
compileName = getCompileName(project_id, user_id)
LatexRunner.killLatex compileName, callback
clearProject: (project_id, user_id, _callback = (error) ->) ->
callback = (error) ->
_callback(error)
_callback = () ->
compileDir = getCompileDir(project_id, user_id)
CompileManager._checkDirectory compileDir, (err, exists) ->
return callback(err) if err?
return callback() if not exists # skip removal if no directory present
proc = child_process.spawn "rm", ["-r", compileDir]
proc.on "error", callback
stderr = ""
proc.stderr.on "data", (chunk) -> stderr += chunk.toString()
proc.on "close", (code) ->
if code == 0
return callback(null)
else
return callback(new Error("rm -r #{compileDir} failed: #{stderr}"))
_findAllDirs: (callback = (error, allDirs) ->) ->
root = Settings.path.compilesDir
fs.readdir root, (err, files) ->
return callback(err) if err?
allDirs = (Path.join(root, file) for file in files)
callback(null, allDirs)
clearExpiredProjects: (max_cache_age_ms, callback = (error) ->) ->
now = Date.now()
# action for each directory
expireIfNeeded = (checkDir, cb) ->
fs.stat checkDir, (err, stats) ->
return cb() if err? # ignore errors checking directory
age = now - stats.mtime
hasExpired = (age > max_cache_age_ms)
if hasExpired then fse.remove(checkDir, cb) else cb()
# iterate over all project directories
CompileManager._findAllDirs (error, allDirs) ->
return callback() if error?
async.eachSeries allDirs, expireIfNeeded, callback
_checkDirectory: (compileDir, callback = (error, exists) ->) ->
fs.lstat compileDir, (err, stats) ->
if err?.code is 'ENOENT'
return callback(null, false) # directory does not exist
else if err?
logger.err {dir: compileDir, err:err}, "error on stat of project directory for removal"
return callback(err)
else if not stats?.isDirectory()
logger.err {dir: compileDir, stats:stats}, "bad project directory for removal"
return callback new Error("project directory is not directory")
else
callback(null, true) # directory exists
syncFromCode: (project_id, user_id, file_name, line, column, callback = (error, pdfPositions) ->) ->
# If LaTeX was run in a virtual environment, the file path that synctex expects
# might not match the file path on the host. The .synctex.gz file however, will be accessed
# wherever it is on the host.
compileName = getCompileName(project_id, user_id)
base_dir = Settings.path.synctexBaseDir(compileName)
file_path = base_dir + "/" + file_name
compileDir = getCompileDir(project_id, user_id)
synctex_path = "#{base_dir}/output.pdf"
command = ["code", synctex_path, file_path, line, column]
fse.ensureDir compileDir, (error) ->
if error?
logger.err {error, project_id, user_id, file_name}, "error ensuring dir for sync from code"
return callback(error)
CompileManager._runSynctex project_id, user_id, command, (error, stdout) ->
return callback(error) if error?
logger.log project_id: project_id, user_id:user_id, file_name: file_name, line: line, column: column, command:command, stdout: stdout, "synctex code output"
callback null, CompileManager._parseSynctexFromCodeOutput(stdout)
syncFromPdf: (project_id, user_id, page, h, v, callback = (error, filePositions) ->) ->
compileName = getCompileName(project_id, user_id)
compileDir = getCompileDir(project_id, user_id)
base_dir = Settings.path.synctexBaseDir(compileName)
synctex_path = "#{base_dir}/output.pdf"
command = ["pdf", synctex_path, page, h, v]
fse.ensureDir compileDir, (error) ->
if error?
logger.err {error, project_id, user_id, file_name}, "error ensuring dir for sync to code"
return callback(error)
CompileManager._runSynctex project_id, user_id, command, (error, stdout) ->
return callback(error) if error?
logger.log project_id: project_id, user_id:user_id, page: page, h: h, v:v, stdout: stdout, "synctex pdf output"
callback null, CompileManager._parseSynctexFromPdfOutput(stdout, base_dir)
_checkFileExists: (path, callback = (error) ->) ->
synctexDir = Path.dirname(path)
synctexFile = Path.join(synctexDir, "output.synctex.gz")
fs.stat synctexDir, (error, stats) ->
if error?.code is 'ENOENT'
return callback(new Errors.NotFoundError("called synctex with no output directory"))
return callback(error) if error?
fs.stat synctexFile, (error, stats) ->
if error?.code is 'ENOENT'
return callback(new Errors.NotFoundError("called synctex with no output file"))
return callback(error) if error?
return callback(new Error("not a file")) if not stats?.isFile()
callback()
_runSynctex: (project_id, user_id, command, callback = (error, stdout) ->) ->
seconds = 1000
command.unshift("/opt/synctex")
directory = getCompileDir(project_id, user_id)
timeout = 60 * 1000 # increased to allow for large projects
compileName = getCompileName(project_id, user_id)
CommandRunner.run compileName, command, directory, Settings.clsi?.docker.image, timeout, {}, (error, output) ->
if error?
logger.err err:error, command:command, project_id:project_id, user_id:user_id, "error running synctex"
return callback(error)
callback(null, output.stdout)
_parseSynctexFromCodeOutput: (output) ->
results = []
for line in output.split("\n")
[node, page, h, v, width, height] = line.split("\t")
if node == "NODE"
results.push {
page: parseInt(page, 10)
h: parseFloat(h)
v: parseFloat(v)
height: parseFloat(height)
width: parseFloat(width)
}
return results
_parseSynctexFromPdfOutput: (output, base_dir) ->
results = []
for line in output.split("\n")
[node, file_path, line, column] = line.split("\t")
if node == "NODE"
file = file_path.slice(base_dir.length + 1)
results.push {
file: file
line: parseInt(line, 10)
column: parseInt(column, 10)
}
return results
wordcount: (project_id, user_id, file_name, image, callback = (error, pdfPositions) ->) ->
logger.log project_id:project_id, user_id:user_id, file_name:file_name, image:image, "running wordcount"
file_path = "$COMPILE_DIR/" + file_name
command = [ "texcount", '-nocol', '-inc', file_path, "-out=" + file_path + ".wc"]
compileDir = getCompileDir(project_id, user_id)
timeout = 60 * 1000
compileName = getCompileName(project_id, user_id)
fse.ensureDir compileDir, (error) ->
if error?
logger.err {error, project_id, user_id, file_name}, "error ensuring dir for sync from code"
return callback(error)
CommandRunner.run compileName, command, compileDir, image, timeout, {}, (error) ->
return callback(error) if error?
fs.readFile compileDir + "/" + file_name + ".wc", "utf-8", (err, stdout) ->
if err?
#call it node_err so sentry doesn't use random path error as unique id so it can't be ignored
logger.err node_err:err, command:command, compileDir:compileDir, project_id:project_id, user_id:user_id, "error reading word count output"
return callback(err)
results = CompileManager._parseWordcountFromOutput(stdout)
logger.log project_id:project_id, user_id:user_id, wordcount: results, "word count results"
callback null, results
_parseWordcountFromOutput: (output) ->
results = {
encode: ""
textWords: 0
headWords: 0
outside: 0
headers: 0
elements: 0
mathInline: 0
mathDisplay: 0
errors: 0
messages: ""
}
for line in output.split("\n")
[data, info] = line.split(":")
if data.indexOf("Encoding") > -1
results['encode'] = info.trim()
if data.indexOf("in text") > -1
results['textWords'] = parseInt(info, 10)
if data.indexOf("in head") > -1
results['headWords'] = parseInt(info, 10)
if data.indexOf("outside") > -1
results['outside'] = parseInt(info, 10)
if data.indexOf("of head") > -1
results['headers'] = parseInt(info, 10)
if data.indexOf("Number of floats/tables/figures") > -1
results['elements'] = parseInt(info, 10)
if data.indexOf("Number of math inlines") > -1
results['mathInline'] = parseInt(info, 10)
if data.indexOf("Number of math displayed") > -1
results['mathDisplay'] = parseInt(info, 10)
if data is "(errors" # errors reported as (errors:123)
results['errors'] = parseInt(info, 10)
if line.indexOf("!!! ") > -1 # errors logged as !!! message !!!
results['messages'] += line + "\n"
return results

View file

@ -1,24 +0,0 @@
Path = require 'path'
# here we coerce html, css and js to text/plain,
# otherwise choose correct mime type based on file extension,
# falling back to octet-stream
module.exports = ContentTypeMapper =
map: (path) ->
switch Path.extname(path)
when '.txt', '.html', '.js', '.css', '.svg'
return 'text/plain'
when '.csv'
return 'text/csv'
when '.pdf'
return 'application/pdf'
when '.png'
return 'image/png'
when '.jpg', '.jpeg'
return 'image/jpeg'
when '.tiff'
return 'image/tiff'
when '.gif'
return 'image/gif'
else
return 'application/octet-stream'

View file

@ -1,13 +0,0 @@
async = require "async"
Settings = require "settings-sharelatex"
logger = require("logger-sharelatex")
queue = async.queue((task, cb)->
task(cb)
, Settings.parallelSqlQueryLimit)
queue.drain = ()->
logger.debug('all items have been processed')
module.exports =
queue: queue

View file

@ -1,56 +0,0 @@
logger = require "logger-sharelatex"
LockState = {} # locks for docker container operations, by container name
module.exports = LockManager =
MAX_LOCK_HOLD_TIME: 15000 # how long we can keep a lock
MAX_LOCK_WAIT_TIME: 10000 # how long we wait for a lock
LOCK_TEST_INTERVAL: 1000 # retry time
tryLock: (key, callback = (err, gotLock) ->) ->
existingLock = LockState[key]
if existingLock? # the lock is already taken, check how old it is
lockAge = Date.now() - existingLock.created
if lockAge < LockManager.MAX_LOCK_HOLD_TIME
return callback(null, false) # we didn't get the lock, bail out
else
logger.error {key: key, lock: existingLock, age:lockAge}, "taking old lock by force"
# take the lock
LockState[key] = lockValue = {created: Date.now()}
callback(null, true, lockValue)
getLock: (key, callback = (error, lockValue) ->) ->
startTime = Date.now()
do attempt = () ->
LockManager.tryLock key, (error, gotLock, lockValue) ->
return callback(error) if error?
if gotLock
callback(null, lockValue)
else if Date.now() - startTime > LockManager.MAX_LOCK_WAIT_TIME
e = new Error("Lock timeout")
e.key = key
return callback(e)
else
setTimeout attempt, LockManager.LOCK_TEST_INTERVAL
releaseLock: (key, lockValue, callback = (error) ->) ->
existingLock = LockState[key]
if existingLock is lockValue # lockValue is an object, so we can test by reference
delete LockState[key] # our lock, so we can free it
callback()
else if existingLock? # lock exists but doesn't match ours
logger.error {key:key, lock: existingLock}, "tried to release lock taken by force"
callback()
else
logger.error {key:key, lock: existingLock}, "tried to release lock that has gone"
callback()
runWithLock: (key, runner = ( (releaseLock = (error) ->) -> ), callback = ( (error) -> )) ->
LockManager.getLock key, (error, lockValue) ->
return callback(error) if error?
runner (error1, args...) ->
LockManager.releaseLock key, lockValue, (error2) ->
error = error1 or error2
return callback(error) if error?
callback(null, args...)

View file

@ -1,360 +0,0 @@
Settings = require "settings-sharelatex"
logger = require "logger-sharelatex"
Docker = require("dockerode")
dockerode = new Docker()
crypto = require "crypto"
async = require "async"
LockManager = require "./DockerLockManager"
fs = require "fs"
Path = require 'path'
_ = require "underscore"
logger.info "using docker runner"
usingSiblingContainers = () ->
Settings?.path?.sandboxedCompilesHostDir?
module.exports = DockerRunner =
ERR_NOT_DIRECTORY: new Error("not a directory")
ERR_TERMINATED: new Error("terminated")
ERR_EXITED: new Error("exited")
ERR_TIMED_OUT: new Error("container timed out")
run: (project_id, command, directory, image, timeout, environment, callback = (error, output) ->) ->
if usingSiblingContainers()
_newPath = Settings.path.sandboxedCompilesHostDir
logger.log {path: _newPath}, "altering bind path for sibling containers"
# Server Pro, example:
# '/var/lib/sharelatex/data/compiles/<project-id>'
# ... becomes ...
# '/opt/sharelatex_data/data/compiles/<project-id>'
directory = Path.join(Settings.path.sandboxedCompilesHostDir, Path.basename(directory))
volumes = {}
volumes[directory] = "/compile"
command = (arg.toString().replace?('$COMPILE_DIR', "/compile") for arg in command)
if !image?
image = Settings.clsi.docker.image
if Settings.texliveImageNameOveride?
img = image.split("/")
image = "#{Settings.texliveImageNameOveride}/#{img[2]}"
options = DockerRunner._getContainerOptions(command, image, volumes, timeout, environment)
fingerprint = DockerRunner._fingerprintContainer(options)
options.name = name = "project-#{project_id}-#{fingerprint}"
# logOptions = _.clone(options)
# logOptions?.HostConfig?.SecurityOpt = "secomp used, removed in logging"
logger.log project_id: project_id, "running docker container"
DockerRunner._runAndWaitForContainer options, volumes, timeout, (error, output) ->
if error?.message?.match("HTTP code is 500")
logger.log err: error, project_id: project_id, "error running container so destroying and retrying"
DockerRunner.destroyContainer name, null, true, (error) ->
return callback(error) if error?
DockerRunner._runAndWaitForContainer options, volumes, timeout, callback
else
callback(error, output)
return name # pass back the container name to allow it to be killed
kill: (container_id, callback = (error) ->) ->
logger.log container_id: container_id, "sending kill signal to container"
container = dockerode.getContainer(container_id)
container.kill (error) ->
if error? and error?.message?.match?(/Cannot kill container .* is not running/)
logger.warn err: error, container_id: container_id, "container not running, continuing"
error = null
if error?
logger.error err: error, container_id: container_id, "error killing container"
return callback(error)
else
callback()
_runAndWaitForContainer: (options, volumes, timeout, _callback = (error, output) ->) ->
callback = (args...) ->
_callback(args...)
# Only call the callback once
_callback = () ->
name = options.name
streamEnded = false
containerReturned = false
output = {}
callbackIfFinished = () ->
if streamEnded and containerReturned
callback(null, output)
attachStreamHandler = (error, _output) ->
return callback(error) if error?
output = _output
streamEnded = true
callbackIfFinished()
DockerRunner.startContainer options, volumes, attachStreamHandler, (error, containerId) ->
return callback(error) if error?
DockerRunner.waitForContainer name, timeout, (error, exitCode) ->
return callback(error) if error?
if exitCode is 137 # exit status from kill -9
err = DockerRunner.ERR_TERMINATED
err.terminated = true
return callback(err)
if exitCode is 1 # exit status from chktex
err = DockerRunner.ERR_EXITED
err.code = exitCode
return callback(err)
containerReturned = true
options?.HostConfig?.SecurityOpt = null #small log line
logger.log err:err, exitCode:exitCode, options:options, "docker container has exited"
callbackIfFinished()
_getContainerOptions: (command, image, volumes, timeout, environment) ->
timeoutInSeconds = timeout / 1000
dockerVolumes = {}
for hostVol, dockerVol of volumes
dockerVolumes[dockerVol] = {}
if volumes[hostVol].slice(-3).indexOf(":r") == -1
volumes[hostVol] = "#{dockerVol}:rw"
# merge settings and environment parameter
env = {}
for src in [Settings.clsi.docker.env, environment or {}]
env[key] = value for key, value of src
# set the path based on the image year
if m = image.match /:([0-9]+)\.[0-9]+/
year = m[1]
else
year = "2014"
env['PATH'] = "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/#{year}/bin/x86_64-linux/"
options =
"Cmd" : command,
"Image" : image
"Volumes" : dockerVolumes
"WorkingDir" : "/compile"
"NetworkDisabled" : true
"Memory" : 1024 * 1024 * 1024 * 1024 # 1 Gb
"User" : Settings.clsi.docker.user
"Env" : ("#{key}=#{value}" for key, value of env) # convert the environment hash to an array
"HostConfig" :
"Binds": ("#{hostVol}:#{dockerVol}" for hostVol, dockerVol of volumes)
"LogConfig": {"Type": "none", "Config": {}}
"Ulimits": [{'Name': 'cpu', 'Soft': timeoutInSeconds+5, 'Hard': timeoutInSeconds+10}]
"CapDrop": "ALL"
"SecurityOpt": ["no-new-privileges"]
if Settings.path?.synctexBinHostPath?
options["HostConfig"]["Binds"].push("#{Settings.path.synctexBinHostPath}:/opt/synctex:ro")
if Settings.clsi.docker.runtime?
options["HostConfig"]["Runtime"] = Settings.clsi.docker.runtime
if Settings.clsi.docker.seccomp_profile?
options.HostConfig.SecurityOpt.push "seccomp=#{Settings.clsi.docker.seccomp_profile}"
return options
_fingerprintContainer: (containerOptions) ->
# Yay, Hashing!
json = JSON.stringify(containerOptions)
return crypto.createHash("md5").update(json).digest("hex")
startContainer: (options, volumes, attachStreamHandler, callback) ->
LockManager.runWithLock options.name, (releaseLock) ->
# Check that volumes exist before starting the container.
# When a container is started with volume pointing to a
# non-existent directory then docker creates the directory but
# with root ownership.
DockerRunner._checkVolumes options, volumes, (err) ->
return releaseLock(err) if err?
DockerRunner._startContainer options, volumes, attachStreamHandler, releaseLock
, callback
# Check that volumes exist and are directories
_checkVolumes: (options, volumes, callback = (error, containerName) ->) ->
if usingSiblingContainers()
# Server Pro, with sibling-containers active, skip checks
return callback(null)
checkVolume = (path, cb) ->
fs.stat path, (err, stats) ->
return cb(err) if err?
return cb(DockerRunner.ERR_NOT_DIRECTORY) if not stats?.isDirectory()
cb()
jobs = []
for vol of volumes
do (vol) ->
jobs.push (cb) -> checkVolume(vol, cb)
async.series jobs, callback
_startContainer: (options, volumes, attachStreamHandler, callback = ((error, output) ->)) ->
callback = _.once(callback)
name = options.name
logger.log {container_name: name}, "starting container"
container = dockerode.getContainer(name)
createAndStartContainer = ->
dockerode.createContainer options, (error, container) ->
return callback(error) if error?
startExistingContainer()
startExistingContainer = ->
DockerRunner.attachToContainer options.name, attachStreamHandler, (error)->
return callback(error) if error?
container.start (error) ->
if error? and error?.statusCode != 304 #already running
return callback(error)
else
callback()
container.inspect (error, stats)->
if error?.statusCode == 404
createAndStartContainer()
else if error?
logger.err {container_name: name, error:error}, "unable to inspect container to start"
return callback(error)
else
startExistingContainer()
attachToContainer: (containerId, attachStreamHandler, attachStartCallback) ->
container = dockerode.getContainer(containerId)
container.attach {stdout: 1, stderr: 1, stream: 1}, (error, stream) ->
if error?
logger.error err: error, container_id: containerId, "error attaching to container"
return attachStartCallback(error)
else
attachStartCallback()
logger.log container_id: containerId, "attached to container"
MAX_OUTPUT = 1024 * 1024 # limit output to 1MB
createStringOutputStream = (name) ->
return {
data: ""
overflowed: false
write: (data) ->
return if @overflowed
if @data.length < MAX_OUTPUT
@data += data
else
logger.error container_id: containerId, length: @data.length, maxLen: MAX_OUTPUT, "#{name} exceeds max size"
@data += "(...truncated at #{MAX_OUTPUT} chars...)"
@overflowed = true
# kill container if too much output
# docker.containers.kill(containerId, () ->)
}
stdout = createStringOutputStream "stdout"
stderr = createStringOutputStream "stderr"
container.modem.demuxStream(stream, stdout, stderr)
stream.on "error", (err) ->
logger.error err: err, container_id: containerId, "error reading from container stream"
stream.on "end", () ->
attachStreamHandler null, {stdout: stdout.data, stderr: stderr.data}
waitForContainer: (containerId, timeout, _callback = (error, exitCode) ->) ->
callback = (args...) ->
_callback(args...)
# Only call the callback once
_callback = () ->
container = dockerode.getContainer(containerId)
timedOut = false
timeoutId = setTimeout () ->
timedOut = true
logger.log container_id: containerId, "timeout reached, killing container"
container.kill(() ->)
, timeout
logger.log container_id: containerId, "waiting for docker container"
container.wait (error, res) ->
if error?
clearTimeout timeoutId
logger.error err: error, container_id: containerId, "error waiting for container"
return callback(error)
if timedOut
logger.log containerId: containerId, "docker container timed out"
error = DockerRunner.ERR_TIMED_OUT
error.timedout = true
callback error
else
clearTimeout timeoutId
logger.log container_id: containerId, exitCode: res.StatusCode, "docker container returned"
callback null, res.StatusCode
destroyContainer: (containerName, containerId, shouldForce, callback = (error) ->) ->
# We want the containerName for the lock and, ideally, the
# containerId to delete. There is a bug in the docker.io module
# where if you delete by name and there is an error, it throws an
# async exception, but if you delete by id it just does a normal
# error callback. We fall back to deleting by name if no id is
# supplied.
LockManager.runWithLock containerName, (releaseLock) ->
DockerRunner._destroyContainer containerId or containerName, shouldForce, releaseLock
, callback
_destroyContainer: (containerId, shouldForce, callback = (error) ->) ->
logger.log container_id: containerId, "destroying docker container"
container = dockerode.getContainer(containerId)
container.remove {force: shouldForce == true}, (error) ->
if error? and error?.statusCode == 404
logger.warn err: error, container_id: containerId, "container not found, continuing"
error = null
if error?
logger.error err: error, container_id: containerId, "error destroying container"
else
logger.log container_id: containerId, "destroyed container"
callback(error)
# handle expiry of docker containers
MAX_CONTAINER_AGE: Settings.clsi.docker.maxContainerAge or oneHour = 60 * 60 * 1000
examineOldContainer: (container, callback = (error, name, id, ttl)->) ->
name = container.Name or container.Names?[0]
created = container.Created * 1000 # creation time is returned in seconds
now = Date.now()
age = now - created
maxAge = DockerRunner.MAX_CONTAINER_AGE
ttl = maxAge - age
logger.log {containerName: name, created: created, now: now, age: age, maxAge: maxAge, ttl: ttl}, "checking whether to destroy container"
callback(null, name, container.Id, ttl)
destroyOldContainers: (callback = (error) ->) ->
dockerode.listContainers all: true, (error, containers) ->
return callback(error) if error?
jobs = []
for container in containers or []
do (container) ->
DockerRunner.examineOldContainer container, (err, name, id, ttl) ->
if name.slice(0, 9) == '/project-' && ttl <= 0
jobs.push (cb) ->
DockerRunner.destroyContainer name, id, false, () -> cb()
# Ignore errors because some containers get stuck but
# will be destroyed next time
async.series jobs, callback
startContainerMonitor: () ->
logger.log {maxAge: DockerRunner.MAX_CONTAINER_AGE}, "starting container expiry"
# randomise the start time
randomDelay = Math.floor(Math.random() * 5 * 60 * 1000)
setTimeout () ->
setInterval () ->
DockerRunner.destroyOldContainers()
, oneHour = 60 * 60 * 1000
, randomDelay
DockerRunner.startContainerMonitor()

View file

@ -1,24 +0,0 @@
fs = require "fs"
logger = require "logger-sharelatex"
module.exports = DraftModeManager =
injectDraftMode: (filename, callback = (error) ->) ->
fs.readFile filename, "utf8", (error, content) ->
return callback(error) if error?
# avoid adding draft mode more than once
if content?.indexOf("\\documentclass\[draft") >= 0
return callback()
modified_content = DraftModeManager._injectDraftOption content
logger.log {
content: content.slice(0,1024), # \documentclass is normally v near the top
modified_content: modified_content.slice(0,1024),
filename
}, "injected draft class"
fs.writeFile filename, modified_content, callback
_injectDraftOption: (content) ->
content
# With existing options (must be first, otherwise both are applied)
.replace(/\\documentclass\[/g, "\\documentclass[draft,")
# Without existing options
.replace(/\\documentclass\{/g, "\\documentclass[draft]{")

View file

@ -1,25 +0,0 @@
NotFoundError = (message) ->
error = new Error(message)
error.name = "NotFoundError"
error.__proto__ = NotFoundError.prototype
return error
NotFoundError.prototype.__proto__ = Error.prototype
FilesOutOfSyncError = (message) ->
error = new Error(message)
error.name = "FilesOutOfSyncError"
error.__proto__ = FilesOutOfSyncError.prototype
return error
FilesOutOfSyncError.prototype.__proto__ = Error.prototype
AlreadyCompilingError = (message) ->
error = new Error(message)
error.name = "AlreadyCompilingError"
error.__proto__ = AlreadyCompilingError.prototype
return error
AlreadyCompilingError.prototype.__proto__ = Error.prototype
module.exports = Errors =
NotFoundError: NotFoundError
FilesOutOfSyncError: FilesOutOfSyncError
AlreadyCompilingError: AlreadyCompilingError

View file

@ -1,95 +0,0 @@
Path = require "path"
Settings = require "settings-sharelatex"
logger = require "logger-sharelatex"
Metrics = require "./Metrics"
CommandRunner = require "./CommandRunner"
ProcessTable = {} # table of currently running jobs (pids or docker container names)
module.exports = LatexRunner =
runLatex: (project_id, options, callback = (error) ->) ->
{directory, mainFile, compiler, timeout, image, environment, flags} = options
compiler ||= "pdflatex"
timeout ||= 60000 # milliseconds
logger.log directory: directory, compiler: compiler, timeout: timeout, mainFile: mainFile, environment: environment, flags:flags, "starting compile"
# We want to run latexmk on the tex file which we will automatically
# generate from the Rtex/Rmd/md file.
mainFile = mainFile.replace(/\.(Rtex|md|Rmd)$/, ".tex")
if compiler == "pdflatex"
command = LatexRunner._pdflatexCommand mainFile, flags
else if compiler == "latex"
command = LatexRunner._latexCommand mainFile, flags
else if compiler == "xelatex"
command = LatexRunner._xelatexCommand mainFile, flags
else if compiler == "lualatex"
command = LatexRunner._lualatexCommand mainFile, flags
else
return callback new Error("unknown compiler: #{compiler}")
if Settings.clsi?.strace
command = ["strace", "-o", "strace", "-ff"].concat(command)
id = "#{project_id}" # record running project under this id
ProcessTable[id] = CommandRunner.run project_id, command, directory, image, timeout, environment, (error, output) ->
delete ProcessTable[id]
return callback(error) if error?
runs = output?.stderr?.match(/^Run number \d+ of .*latex/mg)?.length or 0
failed = if output?.stdout?.match(/^Latexmk: Errors/m)? then 1 else 0
# counters from latexmk output
stats = {}
stats["latexmk-errors"] = failed
stats["latex-runs"] = runs
stats["latex-runs-with-errors"] = if failed then runs else 0
stats["latex-runs-#{runs}"] = 1
stats["latex-runs-with-errors-#{runs}"] = if failed then 1 else 0
# timing information from /usr/bin/time
timings = {}
stderr = output?.stderr
timings["cpu-percent"] = stderr?.match(/Percent of CPU this job got: (\d+)/m)?[1] or 0
timings["cpu-time"] = stderr?.match(/User time.*: (\d+.\d+)/m)?[1] or 0
timings["sys-time"] = stderr?.match(/System time.*: (\d+.\d+)/m)?[1] or 0
callback error, output, stats, timings
killLatex: (project_id, callback = (error) ->) ->
id = "#{project_id}"
logger.log {id:id}, "killing running compile"
if not ProcessTable[id]?
logger.warn {id}, "no such project to kill"
return callback(null)
else
CommandRunner.kill ProcessTable[id], callback
_latexmkBaseCommand: (flags) ->
args = ["latexmk", "-cd", "-f", "-jobname=output", "-auxdir=$COMPILE_DIR", "-outdir=$COMPILE_DIR", "-synctex=1","-interaction=batchmode"]
if flags
args = args.concat(flags)
(Settings?.clsi?.latexmkCommandPrefix || []).concat(args)
_pdflatexCommand: (mainFile, flags) ->
LatexRunner._latexmkBaseCommand(flags).concat [
"-pdf",
Path.join("$COMPILE_DIR", mainFile)
]
_latexCommand: (mainFile, flags) ->
LatexRunner._latexmkBaseCommand(flags).concat [
"-pdfdvi",
Path.join("$COMPILE_DIR", mainFile)
]
_xelatexCommand: (mainFile, flags) ->
LatexRunner._latexmkBaseCommand(flags).concat [
"-xelatex",
Path.join("$COMPILE_DIR", mainFile)
]
_lualatexCommand: (mainFile, flags) ->
LatexRunner._latexmkBaseCommand(flags).concat [
"-lualatex",
Path.join("$COMPILE_DIR", mainFile)
]

View file

@ -1,48 +0,0 @@
spawn = require("child_process").spawn
logger = require "logger-sharelatex"
logger.info "using standard command runner"
module.exports = CommandRunner =
run: (project_id, command, directory, image, timeout, environment, callback = (error) ->) ->
command = (arg.toString().replace('$COMPILE_DIR', directory) for arg in command)
logger.log project_id: project_id, command: command, directory: directory, "running command"
logger.warn "timeouts and sandboxing are not enabled with CommandRunner"
# merge environment settings
env = {}
env[key] = value for key, value of process.env
env[key] = value for key, value of environment
# run command as detached process so it has its own process group (which can be killed if needed)
proc = spawn command[0], command.slice(1), cwd: directory, env: env
stdout = ""
proc.stdout.on "data", (data)->
stdout += data
proc.on "error", (err)->
logger.err err:err, project_id:project_id, command: command, directory: directory, "error running command"
callback(err)
proc.on "close", (code, signal) ->
logger.info code:code, signal:signal, project_id:project_id, "command exited"
if signal is 'SIGTERM' # signal from kill method below
err = new Error("terminated")
err.terminated = true
return callback(err)
else if code is 1 # exit status from chktex
err = new Error("exited")
err.code = code
return callback(err)
else
callback(null, {"stdout": stdout})
return proc.pid # return process id to allow job to be killed if necessary
kill: (pid, callback = (error) ->) ->
try
process.kill -pid # kill all processes in group
catch err
return callback(err)
callback()

View file

@ -1,31 +0,0 @@
Settings = require('settings-sharelatex')
logger = require "logger-sharelatex"
Lockfile = require('lockfile') # from https://github.com/npm/lockfile
Errors = require "./Errors"
fs = require("fs")
Path = require("path")
module.exports = LockManager =
LOCK_TEST_INTERVAL: 1000 # 50ms between each test of the lock
MAX_LOCK_WAIT_TIME: 15000 # 10s maximum time to spend trying to get the lock
LOCK_STALE: 5*60*1000 # 5 mins time until lock auto expires
runWithLock: (path, runner = ((releaseLock = (error) ->) ->), callback = ((error) ->)) ->
lockOpts =
wait: @MAX_LOCK_WAIT_TIME
pollPeriod: @LOCK_TEST_INTERVAL
stale: @LOCK_STALE
Lockfile.lock path, lockOpts, (error) ->
if error?.code is 'EEXIST'
return callback new Errors.AlreadyCompilingError("compile in progress")
else if error?
fs.lstat path, (statLockErr, statLock)->
fs.lstat Path.dirname(path), (statDirErr, statDir)->
fs.readdir Path.dirname(path), (readdirErr, readdirDir)->
logger.err error:error, path:path, statLock:statLock, statLockErr:statLockErr, statDir:statDir, statDirErr: statDirErr, readdirErr:readdirErr, readdirDir:readdirDir, "unable to get lock"
return callback(error)
else
runner (error1, args...) ->
Lockfile.unlock path, (error2) ->
error = error1 or error2
return callback(error) if error?
callback(null, args...)

View file

@ -1,2 +0,0 @@
module.exports = require "metrics-sharelatex"

View file

@ -1,199 +0,0 @@
async = require "async"
fs = require "fs"
fse = require "fs-extra"
Path = require "path"
logger = require "logger-sharelatex"
_ = require "underscore"
Settings = require "settings-sharelatex"
crypto = require "crypto"
OutputFileOptimiser = require "./OutputFileOptimiser"
module.exports = OutputCacheManager =
CACHE_SUBDIR: '.cache/clsi'
ARCHIVE_SUBDIR: '.archive/clsi'
# build id is HEXDATE-HEXRANDOM from Date.now()and RandomBytes
# for backwards compatibility, make the randombytes part optional
BUILD_REGEX: /^[0-9a-f]+(-[0-9a-f]+)?$/
CACHE_LIMIT: 2 # maximum number of cache directories
CACHE_AGE: 60*60*1000 # up to one hour old
path: (buildId, file) ->
# used by static server, given build id return '.cache/clsi/buildId'
if buildId.match OutputCacheManager.BUILD_REGEX
return Path.join(OutputCacheManager.CACHE_SUBDIR, buildId, file)
else
# for invalid build id, return top level
return file
generateBuildId: (callback = (error, buildId) ->) ->
# generate a secure build id from Date.now() and 8 random bytes in hex
crypto.randomBytes 8, (err, buf) ->
return callback(err) if err?
random = buf.toString('hex')
date = Date.now().toString(16)
callback err, "#{date}-#{random}"
saveOutputFiles: (outputFiles, compileDir, callback = (error) ->) ->
OutputCacheManager.generateBuildId (err, buildId) ->
return callback(err) if err?
OutputCacheManager.saveOutputFilesInBuildDir outputFiles, compileDir, buildId, callback
saveOutputFilesInBuildDir: (outputFiles, compileDir, buildId, callback = (error) ->) ->
# make a compileDir/CACHE_SUBDIR/build_id directory and
# copy all the output files into it
cacheRoot = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR)
# Put the files into a new cache subdirectory
cacheDir = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR, buildId)
# Is it a per-user compile? check if compile directory is PROJECTID-USERID
perUser = Path.basename(compileDir).match(/^[0-9a-f]{24}-[0-9a-f]{24}$/)
# Archive logs in background
if Settings.clsi?.archive_logs or Settings.clsi?.strace
OutputCacheManager.archiveLogs outputFiles, compileDir, buildId, (err) ->
if err?
logger.warn err:err, "erroring archiving log files"
# make the new cache directory
fse.ensureDir cacheDir, (err) ->
if err?
logger.error err: err, directory: cacheDir, "error creating cache directory"
callback(err, outputFiles)
else
# copy all the output files into the new cache directory
results = []
async.mapSeries outputFiles, (file, cb) ->
# don't send dot files as output, express doesn't serve them
if OutputCacheManager._fileIsHidden(file.path)
logger.debug compileDir: compileDir, path: file.path, "ignoring dotfile in output"
return cb()
# copy other files into cache directory if valid
newFile = _.clone(file)
[src, dst] = [Path.join(compileDir, file.path), Path.join(cacheDir, file.path)]
OutputCacheManager._checkFileIsSafe src, (err, isSafe) ->
return cb(err) if err?
if !isSafe
return cb()
OutputCacheManager._checkIfShouldCopy src, (err, shouldCopy) ->
return cb(err) if err?
if !shouldCopy
return cb()
OutputCacheManager._copyFile src, dst, (err) ->
return cb(err) if err?
newFile.build = buildId # attach a build id if we cached the file
results.push newFile
cb()
, (err) ->
if err?
# pass back the original files if we encountered *any* error
callback(err, outputFiles)
# clean up the directory we just created
fse.remove cacheDir, (err) ->
if err?
logger.error err: err, dir: cacheDir, "error removing cache dir after failure"
else
# pass back the list of new files in the cache
callback(err, results)
# let file expiry run in the background, expire all previous files if per-user
OutputCacheManager.expireOutputFiles cacheRoot, {keep: buildId, limit: if perUser then 1 else null}
archiveLogs: (outputFiles, compileDir, buildId, callback = (error) ->) ->
archiveDir = Path.join(compileDir, OutputCacheManager.ARCHIVE_SUBDIR, buildId)
logger.log {dir: archiveDir}, "archiving log files for project"
fse.ensureDir archiveDir, (err) ->
return callback(err) if err?
async.mapSeries outputFiles, (file, cb) ->
[src, dst] = [Path.join(compileDir, file.path), Path.join(archiveDir, file.path)]
OutputCacheManager._checkFileIsSafe src, (err, isSafe) ->
return cb(err) if err?
return cb() if !isSafe
OutputCacheManager._checkIfShouldArchive src, (err, shouldArchive) ->
return cb(err) if err?
return cb() if !shouldArchive
OutputCacheManager._copyFile src, dst, cb
, callback
expireOutputFiles: (cacheRoot, options, callback = (error) ->) ->
# look in compileDir for build dirs and delete if > N or age of mod time > T
fs.readdir cacheRoot, (err, results) ->
if err?
return callback(null) if err.code == 'ENOENT' # cache directory is empty
logger.error err: err, project_id: cacheRoot, "error clearing cache"
return callback(err)
dirs = results.sort().reverse()
currentTime = Date.now()
isExpired = (dir, index) ->
return false if options?.keep == dir
# remove any directories over the requested (non-null) limit
return true if options?.limit? and index > options.limit
# remove any directories over the hard limit
return true if index > OutputCacheManager.CACHE_LIMIT
# we can get the build time from the first part of the directory name DDDD-RRRR
# DDDD is date and RRRR is random bytes
dirTime = parseInt(dir.split('-')?[0], 16)
age = currentTime - dirTime
return age > OutputCacheManager.CACHE_AGE
toRemove = _.filter(dirs, isExpired)
removeDir = (dir, cb) ->
fse.remove Path.join(cacheRoot, dir), (err, result) ->
logger.log cache: cacheRoot, dir: dir, "removed expired cache dir"
if err?
logger.error err: err, dir: dir, "cache remove error"
cb(err, result)
async.eachSeries toRemove, (dir, cb) ->
removeDir dir, cb
, callback
_fileIsHidden: (path) ->
return path?.match(/^\.|\/\./)?
_checkFileIsSafe: (src, callback = (error, isSafe) ->) ->
# check if we have a valid file to copy into the cache
fs.stat src, (err, stats) ->
if err?.code is 'ENOENT'
logger.warn err: err, file: src, "file has disappeared before copying to build cache"
callback(err, false)
else if err?
# some other problem reading the file
logger.error err: err, file: src, "stat error for file in cache"
callback(err, false)
else if not stats.isFile()
# other filetype - reject it
logger.warn src: src, stat: stats, "nonfile output - refusing to copy to cache"
callback(null, false)
else
# it's a plain file, ok to copy
callback(null, true)
_copyFile: (src, dst, callback) ->
# copy output file into the cache
fse.copy src, dst, (err) ->
if err?.code is 'ENOENT'
logger.warn err: err, file: src, "file has disappeared when copying to build cache"
callback(err, false)
else if err?
logger.error err: err, src: src, dst: dst, "copy error for file in cache"
callback(err)
else
if Settings.clsi?.optimiseInDocker
# don't run any optimisations on the pdf when they are done
# in the docker container
callback()
else
# call the optimiser for the file too
OutputFileOptimiser.optimiseFile src, dst, callback
_checkIfShouldCopy: (src, callback = (err, shouldCopy) ->) ->
return callback(null, !Path.basename(src).match(/^strace/))
_checkIfShouldArchive: (src, callback = (err, shouldCopy) ->) ->
if Path.basename(src).match(/^strace/)
return callback(null, true)
if Settings.clsi?.archive_logs and Path.basename(src) in ["output.log", "output.blg"]
return callback(null, true)
return callback(null, false)

View file

@ -1,50 +0,0 @@
async = require "async"
fs = require "fs"
Path = require "path"
spawn = require("child_process").spawn
logger = require "logger-sharelatex"
module.exports = OutputFileFinder =
findOutputFiles: (resources, directory, callback = (error, outputFiles, allFiles) ->) ->
incomingResources = {}
for resource in resources
incomingResources[resource.path] = true
OutputFileFinder._getAllFiles directory, (error, allFiles = []) ->
if error?
logger.err err:error, "error finding all output files"
return callback(error)
outputFiles = []
for file in allFiles
if !incomingResources[file]
outputFiles.push {
path: file
type: file.match(/\.([^\.]+)$/)?[1]
}
callback null, outputFiles, allFiles
_getAllFiles: (directory, _callback = (error, fileList) ->) ->
callback = (error, fileList) ->
_callback(error, fileList)
_callback = () ->
# don't include clsi-specific files/directories in the output list
EXCLUDE_DIRS = ["-name", ".cache", "-o", "-name", ".archive","-o", "-name", ".project-*"]
args = [directory, "(", EXCLUDE_DIRS..., ")", "-prune", "-o", "-type", "f", "-print"]
logger.log args: args, "running find command"
proc = spawn("find", args)
stdout = ""
proc.stdout.on "data", (chunk) ->
stdout += chunk.toString()
proc.on "error", callback
proc.on "close", (code) ->
if code != 0
logger.warn {directory, code}, "find returned error, directory likely doesn't exist"
return callback null, []
fileList = stdout.trim().split("\n")
fileList = fileList.map (file) ->
# Strip leading directory
path = Path.relative(directory, file)
return callback null, fileList

View file

@ -1,55 +0,0 @@
fs = require "fs"
Path = require "path"
spawn = require("child_process").spawn
logger = require "logger-sharelatex"
Metrics = require "./Metrics"
_ = require "underscore"
module.exports = OutputFileOptimiser =
optimiseFile: (src, dst, callback = (error) ->) ->
# check output file (src) and see if we can optimise it, storing
# the result in the build directory (dst)
if src.match(/\/output\.pdf$/)
OutputFileOptimiser.checkIfPDFIsOptimised src, (err, isOptimised) ->
return callback(null) if err? or isOptimised
OutputFileOptimiser.optimisePDF src, dst, callback
else
callback (null)
checkIfPDFIsOptimised: (file, callback) ->
SIZE = 16*1024 # check the header of the pdf
result = new Buffer(SIZE)
result.fill(0) # prevent leakage of uninitialised buffer
fs.open file, "r", (err, fd) ->
return callback(err) if err?
fs.read fd, result, 0, SIZE, 0, (errRead, bytesRead, buffer) ->
fs.close fd, (errClose) ->
return callback(errRead) if errRead?
return callback(errClose) if errReadClose?
isOptimised = buffer.toString('ascii').indexOf("/Linearized 1") >= 0
callback(null, isOptimised)
optimisePDF: (src, dst, callback = (error) ->) ->
tmpOutput = dst + '.opt'
args = ["--linearize", src, tmpOutput]
logger.log args: args, "running qpdf command"
timer = new Metrics.Timer("qpdf")
proc = spawn("qpdf", args)
stdout = ""
proc.stdout.on "data", (chunk) ->
stdout += chunk.toString()
callback = _.once(callback) # avoid double call back for error and close event
proc.on "error", (err) ->
logger.warn {err, args}, "qpdf failed"
callback(null) # ignore the error
proc.on "close", (code) ->
timer.done()
if code != 0
logger.warn {code, args}, "qpdf returned error"
return callback(null) # ignore the error
fs.rename tmpOutput, dst, (err) ->
if err?
logger.warn {tmpOutput, dst}, "failed to rename output of qpdf command"
callback(null) # ignore the error

View file

@ -1,84 +0,0 @@
UrlCache = require "./UrlCache"
CompileManager = require "./CompileManager"
db = require "./db"
dbQueue = require "./DbQueue"
async = require "async"
logger = require "logger-sharelatex"
oneDay = 24 * 60 * 60 * 1000
Settings = require "settings-sharelatex"
module.exports = ProjectPersistenceManager =
EXPIRY_TIMEOUT: Settings.project_cache_length_ms || oneDay * 2.5
markProjectAsJustAccessed: (project_id, callback = (error) ->) ->
job = (cb)->
db.Project.findOrCreate(where: {project_id: project_id})
.spread(
(project, created) ->
project.updateAttributes(lastAccessed: new Date())
.then(() -> cb())
.error cb
)
.error cb
dbQueue.queue.push(job, callback)
clearExpiredProjects: (callback = (error) ->) ->
ProjectPersistenceManager._findExpiredProjectIds (error, project_ids) ->
return callback(error) if error?
logger.log project_ids: project_ids, "clearing expired projects"
jobs = for project_id in (project_ids or [])
do (project_id) ->
(callback) ->
ProjectPersistenceManager.clearProjectFromCache project_id, (err) ->
if err?
logger.error err: err, project_id: project_id, "error clearing project"
callback()
async.series jobs, (error) ->
return callback(error) if error?
CompileManager.clearExpiredProjects ProjectPersistenceManager.EXPIRY_TIMEOUT, (error) ->
callback() # ignore any errors from deleting directories
clearProject: (project_id, user_id, callback = (error) ->) ->
logger.log project_id: project_id, user_id:user_id, "clearing project for user"
CompileManager.clearProject project_id, user_id, (error) ->
return callback(error) if error?
ProjectPersistenceManager.clearProjectFromCache project_id, (error) ->
return callback(error) if error?
callback()
clearProjectFromCache: (project_id, callback = (error) ->) ->
logger.log project_id: project_id, "clearing project from cache"
UrlCache.clearProject project_id, (error) ->
if error?
logger.err error:error, project_id: project_id, "error clearing project from cache"
return callback(error)
ProjectPersistenceManager._clearProjectFromDatabase project_id, (error) ->
if error?
logger.err error:error, project_id:project_id, "error clearing project from database"
callback(error)
_clearProjectFromDatabase: (project_id, callback = (error) ->) ->
logger.log project_id:project_id, "clearing project from database"
job = (cb)->
db.Project.destroy(where: {project_id: project_id})
.then(() -> cb())
.error cb
dbQueue.queue.push(job, callback)
_findExpiredProjectIds: (callback = (error, project_ids) ->) ->
job = (cb)->
keepProjectsFrom = new Date(Date.now() - ProjectPersistenceManager.EXPIRY_TIMEOUT)
q = {}
q[db.op.lt] = keepProjectsFrom
db.Project.findAll(where:{lastAccessed:q})
.then((projects) ->
cb null, projects.map((project) -> project.project_id)
).error cb
dbQueue.queue.push(job, callback)
logger.log {EXPIRY_TIMEOUT: ProjectPersistenceManager.EXPIRY_TIMEOUT}, "project assets kept timeout"

View file

@ -1,128 +0,0 @@
settings = require("settings-sharelatex")
module.exports = RequestParser =
VALID_COMPILERS: ["pdflatex", "latex", "xelatex", "lualatex"]
MAX_TIMEOUT: 600
parse: (body, callback = (error, data) ->) ->
response = {}
if !body.compile?
return callback "top level object should have a compile attribute"
compile = body.compile
compile.options ||= {}
try
response.compiler = @_parseAttribute "compiler",
compile.options.compiler,
validValues: @VALID_COMPILERS
default: "pdflatex"
type: "string"
response.timeout = @_parseAttribute "timeout",
compile.options.timeout
default: RequestParser.MAX_TIMEOUT
type: "number"
response.imageName = @_parseAttribute "imageName",
compile.options.imageName,
type: "string"
response.draft = @_parseAttribute "draft",
compile.options.draft,
default: false,
type: "boolean"
response.check = @_parseAttribute "check",
compile.options.check,
type: "string"
response.flags = @_parseAttribute "flags",
compile.options.flags,
default: [],
type: "object"
# The syncType specifies whether the request contains all
# resources (full) or only those resources to be updated
# in-place (incremental).
response.syncType = @_parseAttribute "syncType",
compile.options.syncType,
validValues: ["full", "incremental"]
type: "string"
# The syncState is an identifier passed in with the request
# which has the property that it changes when any resource is
# added, deleted, moved or renamed.
#
# on syncType full the syncState identifier is passed in and
# stored
#
# on syncType incremental the syncState identifier must match
# the stored value
response.syncState = @_parseAttribute "syncState",
compile.options.syncState,
type: "string"
if response.timeout > RequestParser.MAX_TIMEOUT
response.timeout = RequestParser.MAX_TIMEOUT
response.timeout = response.timeout * 1000 # milliseconds
response.resources = (@_parseResource(resource) for resource in (compile.resources or []))
rootResourcePath = @_parseAttribute "rootResourcePath",
compile.rootResourcePath
default: "main.tex"
type: "string"
originalRootResourcePath = rootResourcePath
sanitizedRootResourcePath = RequestParser._sanitizePath(rootResourcePath)
response.rootResourcePath = RequestParser._checkPath(sanitizedRootResourcePath)
for resource in response.resources
if resource.path == originalRootResourcePath
resource.path = sanitizedRootResourcePath
catch error
return callback error
callback null, response
_parseResource: (resource) ->
if !resource.path? or typeof resource.path != "string"
throw "all resources should have a path attribute"
if resource.modified?
modified = new Date(resource.modified)
if isNaN(modified.getTime())
throw "resource modified date could not be understood: #{resource.modified}"
if !resource.url? and !resource.content?
throw "all resources should have either a url or content attribute"
if resource.content? and typeof resource.content != "string"
throw "content attribute should be a string"
if resource.url? and typeof resource.url != "string"
throw "url attribute should be a string"
return {
path: resource.path
modified: modified
url: resource.url
content: resource.content
}
_parseAttribute: (name, attribute, options) ->
if attribute?
if options.validValues?
if options.validValues.indexOf(attribute) == -1
throw "#{name} attribute should be one of: #{options.validValues.join(", ")}"
if options.type?
if typeof attribute != options.type
throw "#{name} attribute should be a #{options.type}"
else
return options.default if options.default?
return attribute
_sanitizePath: (path) ->
# See http://php.net/manual/en/function.escapeshellcmd.php
path.replace(/[\#\&\;\`\|\*\?\~\<\>\^\(\)\[\]\{\}\$\\\x0A\xFF\x00]/g, "")
_checkPath: (path) ->
# check that the request does not use a relative path
for dir in path.split('/')
if dir == '..'
throw "relative path in root resource"
return path

View file

@ -1,72 +0,0 @@
Path = require "path"
fs = require "fs"
logger = require "logger-sharelatex"
settings = require("settings-sharelatex")
Errors = require "./Errors"
SafeReader = require "./SafeReader"
module.exports = ResourceStateManager =
# The sync state is an identifier which must match for an
# incremental update to be allowed.
#
# The initial value is passed in and stored on a full
# compile, along with the list of resources..
#
# Subsequent incremental compiles must come with the same value - if
# not they will be rejected with a 409 Conflict response. The
# previous list of resources is returned.
#
# An incremental compile can only update existing files with new
# content. The sync state identifier must change if any docs or
# files are moved, added, deleted or renamed.
SYNC_STATE_FILE: ".project-sync-state"
SYNC_STATE_MAX_SIZE: 128*1024
saveProjectState: (state, resources, basePath, callback = (error) ->) ->
stateFile = Path.join(basePath, @SYNC_STATE_FILE)
if not state? # remove the file if no state passed in
logger.log state:state, basePath:basePath, "clearing sync state"
fs.unlink stateFile, (err) ->
if err? and err.code isnt 'ENOENT'
return callback(err)
else
return callback()
else
logger.log state:state, basePath:basePath, "writing sync state"
resourceList = (resource.path for resource in resources)
fs.writeFile stateFile, [resourceList..., "stateHash:#{state}"].join("\n"), callback
checkProjectStateMatches: (state, basePath, callback = (error, resources) ->) ->
stateFile = Path.join(basePath, @SYNC_STATE_FILE)
size = @SYNC_STATE_MAX_SIZE
SafeReader.readFile stateFile, size, 'utf8', (err, result, bytesRead) ->
return callback(err) if err?
if bytesRead is size
logger.error file:stateFile, size:size, bytesRead:bytesRead, "project state file truncated"
[resourceList..., oldState] = result?.toString()?.split("\n") or []
newState = "stateHash:#{state}"
logger.log state:state, oldState: oldState, basePath:basePath, stateMatches: (newState is oldState), "checking sync state"
if newState isnt oldState
return callback new Errors.FilesOutOfSyncError("invalid state for incremental update")
else
resources = ({path: path} for path in resourceList)
callback(null, resources)
checkResourceFiles: (resources, allFiles, basePath, callback = (error) ->) ->
# check the paths are all relative to current directory
for file in resources or []
for dir in file?.path?.split('/')
if dir == '..'
return callback new Error("relative path in resource file list")
# check if any of the input files are not present in list of files
seenFile = {}
for file in allFiles
seenFile[file] = true
missingFiles = (resource.path for resource in resources when not seenFile[resource.path])
if missingFiles?.length > 0
logger.err missingFiles:missingFiles, basePath:basePath, allFiles:allFiles, resources:resources, "missing input files for project"
return callback new Errors.FilesOutOfSyncError("resource files missing in incremental update")
else
callback()

View file

@ -1,142 +0,0 @@
UrlCache = require "./UrlCache"
Path = require "path"
fs = require "fs"
async = require "async"
mkdirp = require "mkdirp"
OutputFileFinder = require "./OutputFileFinder"
ResourceStateManager = require "./ResourceStateManager"
Metrics = require "./Metrics"
logger = require "logger-sharelatex"
settings = require("settings-sharelatex")
parallelFileDownloads = settings.parallelFileDownloads or 1
module.exports = ResourceWriter =
syncResourcesToDisk: (request, basePath, callback = (error, resourceList) ->) ->
if request.syncType is "incremental"
logger.log project_id: request.project_id, user_id: request.user_id, "incremental sync"
ResourceStateManager.checkProjectStateMatches request.syncState, basePath, (error, resourceList) ->
return callback(error) if error?
ResourceWriter._removeExtraneousFiles resourceList, basePath, (error, outputFiles, allFiles) ->
return callback(error) if error?
ResourceStateManager.checkResourceFiles resourceList, allFiles, basePath, (error) ->
return callback(error) if error?
ResourceWriter.saveIncrementalResourcesToDisk request.project_id, request.resources, basePath, (error) ->
return callback(error) if error?
callback(null, resourceList)
else
logger.log project_id: request.project_id, user_id: request.user_id, "full sync"
@saveAllResourcesToDisk request.project_id, request.resources, basePath, (error) ->
return callback(error) if error?
ResourceStateManager.saveProjectState request.syncState, request.resources, basePath, (error) ->
return callback(error) if error?
callback(null, request.resources)
saveIncrementalResourcesToDisk: (project_id, resources, basePath, callback = (error) ->) ->
@_createDirectory basePath, (error) =>
return callback(error) if error?
jobs = for resource in resources
do (resource) =>
(callback) => @_writeResourceToDisk(project_id, resource, basePath, callback)
async.parallelLimit jobs, parallelFileDownloads, callback
saveAllResourcesToDisk: (project_id, resources, basePath, callback = (error) ->) ->
@_createDirectory basePath, (error) =>
return callback(error) if error?
@_removeExtraneousFiles resources, basePath, (error) =>
return callback(error) if error?
jobs = for resource in resources
do (resource) =>
(callback) => @_writeResourceToDisk(project_id, resource, basePath, callback)
async.parallelLimit jobs, parallelFileDownloads, callback
_createDirectory: (basePath, callback = (error) ->) ->
fs.mkdir basePath, (err) ->
if err?
if err.code is 'EEXIST'
return callback()
else
logger.log {err: err, dir:basePath}, "error creating directory"
return callback(err)
else
return callback()
_removeExtraneousFiles: (resources, basePath, _callback = (error, outputFiles, allFiles) ->) ->
timer = new Metrics.Timer("unlink-output-files")
callback = (error, result...) ->
timer.done()
_callback(error, result...)
OutputFileFinder.findOutputFiles resources, basePath, (error, outputFiles, allFiles) ->
return callback(error) if error?
jobs = []
for file in outputFiles or []
do (file) ->
path = file.path
should_delete = true
if path.match(/^output\./) or path.match(/\.aux$/) or path.match(/^cache\//) # knitr cache
should_delete = false
if path.match(/^output-.*/) # Tikz cached figures (default case)
should_delete = false
if path.match(/\.(pdf|dpth|md5)$/) # Tikz cached figures (by extension)
should_delete = false
if path.match(/\.(pygtex|pygstyle)$/) or path.match(/(^|\/)_minted-[^\/]+\//) # minted files/directory
should_delete = false
if path.match(/\.md\.tex$/) or path.match(/(^|\/)_markdown_[^\/]+\//) # markdown files/directory
should_delete = false
if path.match(/-eps-converted-to\.pdf$/) # Epstopdf generated files
should_delete = false
if path == "output.pdf" or path == "output.dvi" or path == "output.log" or path == "output.xdv"
should_delete = true
if path == "output.tex" # created by TikzManager if present in output files
should_delete = true
if should_delete
jobs.push (callback) -> ResourceWriter._deleteFileIfNotDirectory Path.join(basePath, path), callback
async.series jobs, (error) ->
return callback(error) if error?
callback(null, outputFiles, allFiles)
_deleteFileIfNotDirectory: (path, callback = (error) ->) ->
fs.stat path, (error, stat) ->
if error? and error.code is 'ENOENT'
return callback()
else if error?
logger.err {err: error, path: path}, "error stating file in deleteFileIfNotDirectory"
return callback(error)
else if stat.isFile()
fs.unlink path, (error) ->
if error?
logger.err {err: error, path: path}, "error removing file in deleteFileIfNotDirectory"
callback(error)
else
callback()
else
callback()
_writeResourceToDisk: (project_id, resource, basePath, callback = (error) ->) ->
ResourceWriter.checkPath basePath, resource.path, (error, path) ->
return callback(error) if error?
mkdirp Path.dirname(path), (error) ->
return callback(error) if error?
# TODO: Don't overwrite file if it hasn't been modified
if resource.url?
UrlCache.downloadUrlToFile project_id, resource.url, path, resource.modified, (err)->
if err?
logger.err err:err, project_id:project_id, path:path, resource_url:resource.url, modified:resource.modified, "error downloading file for resources"
callback() #try and continue compiling even if http resource can not be downloaded at this time
else
process = require("process")
fs.writeFile path, resource.content, callback
try
result = fs.lstatSync(path)
catch e
checkPath: (basePath, resourcePath, callback) ->
path = Path.normalize(Path.join(basePath, resourcePath))
if (path.slice(0, basePath.length + 1) != basePath + "/")
return callback new Error("resource path is outside root directory")
else
return callback(null, path)

View file

@ -1,25 +0,0 @@
fs = require "fs"
logger = require "logger-sharelatex"
module.exports = SafeReader =
# safely read up to size bytes from a file and return result as a
# string
readFile: (file, size, encoding, callback = (error, result) ->) ->
fs.open file, 'r', (err, fd) ->
return callback() if err? and err.code is 'ENOENT'
return callback(err) if err?
# safely return always closing the file
callbackWithClose = (err, result...) ->
fs.close fd, (err1) ->
return callback(err) if err?
return callback(err1) if err1?
callback(null, result...)
buff = new Buffer(size, 0) # fill with zeros
fs.read fd, buff, 0, buff.length, 0, (err, bytesRead, buffer) ->
return callbackWithClose(err) if err?
result = buffer.toString(encoding, 0, bytesRead)
callbackWithClose(null, result, bytesRead)

View file

@ -1,41 +0,0 @@
Path = require("path")
fs = require("fs")
Settings = require("settings-sharelatex")
logger = require("logger-sharelatex")
url = require "url"
module.exports = ForbidSymlinks = (staticFn, root, options) ->
expressStatic = staticFn root, options
basePath = Path.resolve(root)
return (req, res, next) ->
path = url.parse(req.url)?.pathname
# check that the path is of the form /project_id_or_name/path/to/file.log
if result = path.match(/^\/?([a-zA-Z0-9_-]+)\/(.*)/)
project_id = result[1]
file = result[2]
else
logger.warn path: path, "unrecognized file request"
return res.sendStatus(404)
# check that the file does not use a relative path
for dir in file.split('/')
if dir == '..'
logger.warn path: path, "attempt to use a relative path"
return res.sendStatus(404)
# check that the requested path is normalized
requestedFsPath = "#{basePath}/#{project_id}/#{file}"
if requestedFsPath != Path.normalize(requestedFsPath)
logger.error path: requestedFsPath, "requestedFsPath is not normalized"
return res.sendStatus(404)
# check that the requested path is not a symlink
fs.realpath requestedFsPath, (err, realFsPath)->
if err?
if err.code == 'ENOENT'
return res.sendStatus(404)
else
logger.error err:err, requestedFsPath:requestedFsPath, realFsPath:realFsPath, path: req.params[0], project_id: req.params.project_id, "error checking file access"
return res.sendStatus(500)
else if requestedFsPath != realFsPath
logger.warn requestedFsPath:requestedFsPath, realFsPath:realFsPath, path: req.params[0], project_id: req.params.project_id, "trying to access a different file (symlink), aborting"
return res.sendStatus(404)
else
expressStatic(req, res, next)

View file

@ -1,37 +0,0 @@
fs = require "fs"
Path = require "path"
ResourceWriter = require "./ResourceWriter"
SafeReader = require "./SafeReader"
logger = require "logger-sharelatex"
# for \tikzexternalize or pstool to work the main file needs to match the
# jobname. Since we set the -jobname to output, we have to create a
# copy of the main file as 'output.tex'.
module.exports = TikzManager =
checkMainFile: (compileDir, mainFile, resources, callback = (error, needsMainFile) ->) ->
# if there's already an output.tex file, we don't want to touch it
for resource in resources
if resource.path is "output.tex"
logger.log compileDir: compileDir, mainFile: mainFile, "output.tex already in resources"
return callback(null, false)
# if there's no output.tex, see if we are using tikz/pgf or pstool in the main file
ResourceWriter.checkPath compileDir, mainFile, (error, path) ->
return callback(error) if error?
SafeReader.readFile path, 65536, "utf8", (error, content) ->
return callback(error) if error?
usesTikzExternalize = content?.indexOf("\\tikzexternalize") >= 0
usesPsTool = content?.indexOf("{pstool}") >= 0
logger.log compileDir: compileDir, mainFile: mainFile, usesTikzExternalize:usesTikzExternalize, usesPsTool: usesPsTool, "checked for packages needing main file as output.tex"
needsMainFile = (usesTikzExternalize || usesPsTool)
callback null, needsMainFile
injectOutputFile: (compileDir, mainFile, callback = (error) ->) ->
ResourceWriter.checkPath compileDir, mainFile, (error, path) ->
return callback(error) if error?
fs.readFile path, "utf8", (error, content) ->
return callback(error) if error?
logger.log compileDir: compileDir, mainFile: mainFile, "copied file to output.tex as project uses packages which require it"
# use wx flag to ensure that output file does not already exist
fs.writeFile Path.join(compileDir, "output.tex"), content, {flag:'wx'}, callback

View file

@ -1,134 +0,0 @@
db = require("./db")
dbQueue = require "./DbQueue"
UrlFetcher = require("./UrlFetcher")
Settings = require("settings-sharelatex")
crypto = require("crypto")
fs = require("fs")
logger = require "logger-sharelatex"
async = require "async"
module.exports = UrlCache =
downloadUrlToFile: (project_id, url, destPath, lastModified, callback = (error) ->) ->
UrlCache._ensureUrlIsInCache project_id, url, lastModified, (error, pathToCachedUrl) =>
return callback(error) if error?
UrlCache._copyFile pathToCachedUrl, destPath, (error) ->
if error?
UrlCache._clearUrlDetails project_id, url, () ->
callback(error)
else
callback(error)
clearProject: (project_id, callback = (error) ->) ->
UrlCache._findAllUrlsInProject project_id, (error, urls) ->
logger.log project_id: project_id, url_count: urls.length, "clearing project URLs"
return callback(error) if error?
jobs = for url in (urls or [])
do (url) ->
(callback) ->
UrlCache._clearUrlFromCache project_id, url, (error) ->
if error?
logger.error err: error, project_id: project_id, url: url, "error clearing project URL"
callback()
async.series jobs, callback
_ensureUrlIsInCache: (project_id, url, lastModified, callback = (error, pathOnDisk) ->) ->
if lastModified?
# MYSQL only stores dates to an accuracy of a second but the incoming lastModified might have milliseconds.
# So round down to seconds
lastModified = new Date(Math.floor(lastModified.getTime() / 1000) * 1000)
UrlCache._doesUrlNeedDownloading project_id, url, lastModified, (error, needsDownloading) =>
return callback(error) if error?
if needsDownloading
logger.log url: url, lastModified: lastModified, "downloading URL"
UrlFetcher.pipeUrlToFile url, UrlCache._cacheFilePathForUrl(project_id, url), (error) =>
return callback(error) if error?
UrlCache._updateOrCreateUrlDetails project_id, url, lastModified, (error) =>
return callback(error) if error?
callback null, UrlCache._cacheFilePathForUrl(project_id, url)
else
logger.log url: url, lastModified: lastModified, "URL is up to date in cache"
callback null, UrlCache._cacheFilePathForUrl(project_id, url)
_doesUrlNeedDownloading: (project_id, url, lastModified, callback = (error, needsDownloading) ->) ->
if !lastModified?
return callback null, true
UrlCache._findUrlDetails project_id, url, (error, urlDetails) ->
return callback(error) if error?
if !urlDetails? or !urlDetails.lastModified? or urlDetails.lastModified.getTime() < lastModified.getTime()
return callback null, true
else
return callback null, false
_cacheFileNameForUrl: (project_id, url) ->
project_id + ":" + crypto.createHash("md5").update(url).digest("hex")
_cacheFilePathForUrl: (project_id, url) ->
"#{Settings.path.clsiCacheDir}/#{UrlCache._cacheFileNameForUrl(project_id, url)}"
_copyFile: (from, to, _callback = (error) ->) ->
callbackOnce = (error) ->
if error?
logger.error err: error, from:from, to:to, "error copying file from cache"
_callback(error)
_callback = () ->
writeStream = fs.createWriteStream(to)
readStream = fs.createReadStream(from)
writeStream.on "error", callbackOnce
readStream.on "error", callbackOnce
writeStream.on "close", callbackOnce
writeStream.on "open", () ->
readStream.pipe(writeStream)
_clearUrlFromCache: (project_id, url, callback = (error) ->) ->
UrlCache._clearUrlDetails project_id, url, (error) ->
return callback(error) if error?
UrlCache._deleteUrlCacheFromDisk project_id, url, (error) ->
return callback(error) if error?
callback null
_deleteUrlCacheFromDisk: (project_id, url, callback = (error) ->) ->
fs.unlink UrlCache._cacheFilePathForUrl(project_id, url), (error) ->
if error? and error.code != 'ENOENT' # no error if the file isn't present
return callback(error)
else
return callback()
_findUrlDetails: (project_id, url, callback = (error, urlDetails) ->) ->
job = (cb)->
db.UrlCache.find(where: { url: url, project_id: project_id })
.then((urlDetails) -> cb null, urlDetails)
.error cb
dbQueue.queue.push job, callback
_updateOrCreateUrlDetails: (project_id, url, lastModified, callback = (error) ->) ->
job = (cb)->
db.UrlCache.findOrCreate(where: {url: url, project_id: project_id})
.spread(
(urlDetails, created) ->
urlDetails.updateAttributes(lastModified: lastModified)
.then(() -> cb())
.error(cb)
)
.error cb
dbQueue.queue.push(job, callback)
_clearUrlDetails: (project_id, url, callback = (error) ->) ->
job = (cb)->
db.UrlCache.destroy(where: {url: url, project_id: project_id})
.then(() -> cb null)
.error cb
dbQueue.queue.push(job, callback)
_findAllUrlsInProject: (project_id, callback = (error, urls) ->) ->
job = (cb)->
db.UrlCache.findAll(where: { project_id: project_id })
.then(
(urlEntries) ->
cb null, urlEntries.map((entry) -> entry.url)
)
.error cb
dbQueue.queue.push(job, callback)

View file

@ -1,70 +0,0 @@
request = require("request").defaults(jar: false)
fs = require("fs")
logger = require "logger-sharelatex"
settings = require("settings-sharelatex")
URL = require('url');
oneMinute = 60 * 1000
module.exports = UrlFetcher =
pipeUrlToFile: (url, filePath, _callback = (error) ->) ->
callbackOnce = (error) ->
clearTimeout timeoutHandler if timeoutHandler?
_callback(error)
_callback = () ->
if settings.filestoreDomainOveride?
p = URL.parse(url).path
url = "#{settings.filestoreDomainOveride}#{p}"
timeoutHandler = setTimeout () ->
timeoutHandler = null
logger.error url:url, filePath: filePath, "Timed out downloading file to cache"
callbackOnce(new Error("Timed out downloading file to cache #{url}"))
# FIXME: maybe need to close fileStream here
, 3 * oneMinute
logger.log url:url, filePath: filePath, "started downloading url to cache"
urlStream = request.get({url: url, timeout: oneMinute})
urlStream.pause() # stop data flowing until we are ready
# attach handlers before setting up pipes
urlStream.on "error", (error) ->
logger.error err: error, url:url, filePath: filePath, "error downloading url"
callbackOnce(error or new Error("Something went wrong downloading the URL #{url}"))
urlStream.on "end", () ->
logger.log url:url, filePath: filePath, "finished downloading file into cache"
urlStream.on "response", (res) ->
if res.statusCode >= 200 and res.statusCode < 300
fileStream = fs.createWriteStream(filePath)
# attach handlers before setting up pipes
fileStream.on 'error', (error) ->
logger.error err: error, url:url, filePath: filePath, "error writing file into cache"
fs.unlink filePath, (err) ->
if err?
logger.err err: err, filePath: filePath, "error deleting file from cache"
callbackOnce(error)
fileStream.on 'finish', () ->
logger.log url:url, filePath: filePath, "finished writing file into cache"
callbackOnce()
fileStream.on 'pipe', () ->
logger.log url:url, filePath: filePath, "piping into filestream"
urlStream.pipe(fileStream)
urlStream.resume() # now we are ready to handle the data
else
logger.error statusCode: res.statusCode, url:url, filePath: filePath, "unexpected status code downloading url to cache"
# https://nodejs.org/api/http.html#http_class_http_clientrequest
# If you add a 'response' event handler, then you must consume
# the data from the response object, either by calling
# response.read() whenever there is a 'readable' event, or by
# adding a 'data' handler, or by calling the .resume()
# method. Until the data is consumed, the 'end' event will not
# fire. Also, until the data is read it will consume memory
# that can eventually lead to a 'process out of memory' error.
urlStream.resume() # discard the data
callbackOnce(new Error("URL returned non-success status code: #{res.statusCode} #{url}"))

View file

@ -1,55 +0,0 @@
Sequelize = require("sequelize")
Settings = require("settings-sharelatex")
_ = require("underscore")
logger = require "logger-sharelatex"
options = _.extend {logging:false}, Settings.mysql.clsi
logger.log dbPath:Settings.mysql.clsi.storage, "connecting to db"
sequelize = new Sequelize(
Settings.mysql.clsi.database,
Settings.mysql.clsi.username,
Settings.mysql.clsi.password,
options
)
if Settings.mysql.clsi.dialect == "sqlite"
logger.log "running PRAGMA journal_mode=WAL;"
sequelize.query("PRAGMA journal_mode=WAL;")
sequelize.query("PRAGMA synchronous=OFF;")
sequelize.query("PRAGMA read_uncommitted = true;")
module.exports =
UrlCache: sequelize.define("UrlCache", {
url: Sequelize.STRING
project_id: Sequelize.STRING
lastModified: Sequelize.DATE
}, {
indexes: [
{fields: ['url', 'project_id']},
{fields: ['project_id']}
]
})
Project: sequelize.define("Project", {
project_id: {type: Sequelize.STRING, primaryKey: true}
lastAccessed: Sequelize.DATE
}, {
indexes: [
{fields: ['lastAccessed']}
]
})
op: Sequelize.Op
sync: () ->
logger.log dbPath:Settings.mysql.clsi.storage, "syncing db schema"
sequelize.sync()
.then(->
logger.log "db sync complete"
).catch((err)->
console.log err, "error syncing"
)

View file

@ -0,0 +1,20 @@
// TODO: This file was created by bulk-decaffeinate.
// Sanity-check the conversion and remove this comment.
/*
* decaffeinate suggestions:
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let commandRunnerPath
const Settings = require('settings-sharelatex')
const logger = require('logger-sharelatex')
if ((Settings.clsi != null ? Settings.clsi.dockerRunner : undefined) === true) {
commandRunnerPath = './DockerRunner'
} else {
commandRunnerPath = './LocalCommandRunner'
}
logger.info({ commandRunnerPath }, 'selecting command runner for clsi')
const CommandRunner = require(commandRunnerPath)
module.exports = CommandRunner

View file

@ -0,0 +1,238 @@
/* eslint-disable
camelcase,
handle-callback-err,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let CompileController
const RequestParser = require('./RequestParser')
const CompileManager = require('./CompileManager')
const Settings = require('settings-sharelatex')
const Metrics = require('./Metrics')
const ProjectPersistenceManager = require('./ProjectPersistenceManager')
const logger = require('logger-sharelatex')
const Errors = require('./Errors')
module.exports = CompileController = {
compile(req, res, next) {
if (next == null) {
next = function(error) {}
}
const timer = new Metrics.Timer('compile-request')
return RequestParser.parse(req.body, function(error, request) {
if (error != null) {
return next(error)
}
request.project_id = req.params.project_id
if (req.params.user_id != null) {
request.user_id = req.params.user_id
}
return ProjectPersistenceManager.markProjectAsJustAccessed(
request.project_id,
function(error) {
if (error != null) {
return next(error)
}
return CompileManager.doCompileWithLock(request, function(
error,
outputFiles
) {
let code, status
if (outputFiles == null) {
outputFiles = []
}
if (error instanceof Errors.AlreadyCompilingError) {
code = 423 // Http 423 Locked
status = 'compile-in-progress'
} else if (error instanceof Errors.FilesOutOfSyncError) {
code = 409 // Http 409 Conflict
status = 'retry'
} else if (error != null ? error.terminated : undefined) {
status = 'terminated'
} else if (error != null ? error.validate : undefined) {
status = `validation-${error.validate}`
} else if (error != null ? error.timedout : undefined) {
status = 'timedout'
logger.log(
{ err: error, project_id: request.project_id },
'timeout running compile'
)
} else if (error != null) {
status = 'error'
code = 500
logger.warn(
{ err: error, project_id: request.project_id },
'error running compile'
)
} else {
let file
status = 'failure'
for (file of Array.from(outputFiles)) {
if (
file.path != null
? file.path.match(/output\.pdf$/)
: undefined
) {
status = 'success'
}
}
if (status === 'failure') {
logger.warn(
{ project_id: request.project_id, outputFiles },
'project failed to compile successfully, no output.pdf generated'
)
}
// log an error if any core files are found
for (file of Array.from(outputFiles)) {
if (file.path === 'core') {
logger.error(
{ project_id: request.project_id, req, outputFiles },
'core file found in output'
)
}
}
}
if (error != null) {
outputFiles = error.outputFiles || []
}
timer.done()
return res.status(code || 200).send({
compile: {
status,
error: (error != null ? error.message : undefined) || error,
outputFiles: outputFiles.map(file => ({
url:
`${Settings.apis.clsi.url}/project/${request.project_id}` +
(request.user_id != null
? `/user/${request.user_id}`
: '') +
(file.build != null ? `/build/${file.build}` : '') +
`/output/${file.path}`,
path: file.path,
type: file.type,
build: file.build
}))
}
})
})
}
)
})
},
stopCompile(req, res, next) {
const { project_id, user_id } = req.params
return CompileManager.stopCompile(project_id, user_id, function(error) {
if (error != null) {
return next(error)
}
return res.sendStatus(204)
})
},
clearCache(req, res, next) {
if (next == null) {
next = function(error) {}
}
return ProjectPersistenceManager.clearProject(
req.params.project_id,
req.params.user_id,
function(error) {
if (error != null) {
return next(error)
}
return res.sendStatus(204)
}
)
}, // No content
syncFromCode(req, res, next) {
if (next == null) {
next = function(error) {}
}
const { file } = req.query
const line = parseInt(req.query.line, 10)
const column = parseInt(req.query.column, 10)
const { project_id } = req.params
const { user_id } = req.params
return CompileManager.syncFromCode(
project_id,
user_id,
file,
line,
column,
function(error, pdfPositions) {
if (error != null) {
return next(error)
}
return res.json({
pdf: pdfPositions
})
}
)
},
syncFromPdf(req, res, next) {
if (next == null) {
next = function(error) {}
}
const page = parseInt(req.query.page, 10)
const h = parseFloat(req.query.h)
const v = parseFloat(req.query.v)
const { project_id } = req.params
const { user_id } = req.params
return CompileManager.syncFromPdf(project_id, user_id, page, h, v, function(
error,
codePositions
) {
if (error != null) {
return next(error)
}
return res.json({
code: codePositions
})
})
},
wordcount(req, res, next) {
if (next == null) {
next = function(error) {}
}
const file = req.query.file || 'main.tex'
const { project_id } = req.params
const { user_id } = req.params
const { image } = req.query
logger.log({ image, file, project_id }, 'word count request')
return CompileManager.wordcount(project_id, user_id, file, image, function(
error,
result
) {
if (error != null) {
return next(error)
}
return res.json({
texcount: result
})
})
},
status(req, res, next) {
if (next == null) {
next = function(error) {}
}
return res.send('OK')
}
}

View file

@ -0,0 +1,705 @@
/* eslint-disable
camelcase,
handle-callback-err,
no-return-assign,
no-undef,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS103: Rewrite code to no longer use __guard__
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let CompileManager
const ResourceWriter = require('./ResourceWriter')
const LatexRunner = require('./LatexRunner')
const OutputFileFinder = require('./OutputFileFinder')
const OutputCacheManager = require('./OutputCacheManager')
const Settings = require('settings-sharelatex')
const Path = require('path')
const logger = require('logger-sharelatex')
const Metrics = require('./Metrics')
const child_process = require('child_process')
const DraftModeManager = require('./DraftModeManager')
const TikzManager = require('./TikzManager')
const LockManager = require('./LockManager')
const fs = require('fs')
const fse = require('fs-extra')
const os = require('os')
const async = require('async')
const Errors = require('./Errors')
const CommandRunner = require('./CommandRunner')
const getCompileName = function(project_id, user_id) {
if (user_id != null) {
return `${project_id}-${user_id}`
} else {
return project_id
}
}
const getCompileDir = (project_id, user_id) =>
Path.join(Settings.path.compilesDir, getCompileName(project_id, user_id))
module.exports = CompileManager = {
doCompileWithLock(request, callback) {
if (callback == null) {
callback = function(error, outputFiles) {}
}
const compileDir = getCompileDir(request.project_id, request.user_id)
const lockFile = Path.join(compileDir, '.project-lock')
// use a .project-lock file in the compile directory to prevent
// simultaneous compiles
return fse.ensureDir(compileDir, function(error) {
if (error != null) {
return callback(error)
}
return LockManager.runWithLock(
lockFile,
releaseLock => CompileManager.doCompile(request, releaseLock),
callback
)
})
},
doCompile(request, callback) {
if (callback == null) {
callback = function(error, outputFiles) {}
}
const compileDir = getCompileDir(request.project_id, request.user_id)
let timer = new Metrics.Timer('write-to-disk')
logger.log(
{ project_id: request.project_id, user_id: request.user_id },
'syncing resources to disk'
)
return ResourceWriter.syncResourcesToDisk(request, compileDir, function(
error,
resourceList
) {
// NOTE: resourceList is insecure, it should only be used to exclude files from the output list
if (error != null && error instanceof Errors.FilesOutOfSyncError) {
logger.warn(
{ project_id: request.project_id, user_id: request.user_id },
'files out of sync, please retry'
)
return callback(error)
} else if (error != null) {
logger.err(
{
err: error,
project_id: request.project_id,
user_id: request.user_id
},
'error writing resources to disk'
)
return callback(error)
}
logger.log(
{
project_id: request.project_id,
user_id: request.user_id,
time_taken: Date.now() - timer.start
},
'written files to disk'
)
timer.done()
const injectDraftModeIfRequired = function(callback) {
if (request.draft) {
return DraftModeManager.injectDraftMode(
Path.join(compileDir, request.rootResourcePath),
callback
)
} else {
return callback()
}
}
const createTikzFileIfRequired = callback =>
TikzManager.checkMainFile(
compileDir,
request.rootResourcePath,
resourceList,
function(error, needsMainFile) {
if (error != null) {
return callback(error)
}
if (needsMainFile) {
return TikzManager.injectOutputFile(
compileDir,
request.rootResourcePath,
callback
)
} else {
return callback()
}
}
)
// set up environment variables for chktex
const env = {}
// only run chktex on LaTeX files (not knitr .Rtex files or any others)
const isLaTeXFile =
request.rootResourcePath != null
? request.rootResourcePath.match(/\.tex$/i)
: undefined
if (request.check != null && isLaTeXFile) {
env.CHKTEX_OPTIONS = '-nall -e9 -e10 -w15 -w16'
env.CHKTEX_ULIMIT_OPTIONS = '-t 5 -v 64000'
if (request.check === 'error') {
env.CHKTEX_EXIT_ON_ERROR = 1
}
if (request.check === 'validate') {
env.CHKTEX_VALIDATE = 1
}
}
// apply a series of file modifications/creations for draft mode and tikz
return async.series(
[injectDraftModeIfRequired, createTikzFileIfRequired],
function(error) {
if (error != null) {
return callback(error)
}
timer = new Metrics.Timer('run-compile')
// find the image tag to log it as a metric, e.g. 2015.1 (convert . to - for graphite)
let tag =
__guard__(
__guard__(
request.imageName != null
? request.imageName.match(/:(.*)/)
: undefined,
x1 => x1[1]
),
x => x.replace(/\./g, '-')
) || 'default'
if (!request.project_id.match(/^[0-9a-f]{24}$/)) {
tag = 'other'
} // exclude smoke test
Metrics.inc('compiles')
Metrics.inc(`compiles-with-image.${tag}`)
const compileName = getCompileName(
request.project_id,
request.user_id
)
return LatexRunner.runLatex(
compileName,
{
directory: compileDir,
mainFile: request.rootResourcePath,
compiler: request.compiler,
timeout: request.timeout,
image: request.imageName,
flags: request.flags,
environment: env
},
function(error, output, stats, timings) {
// request was for validation only
let metric_key, metric_value
if (request.check === 'validate') {
const result = (error != null
? error.code
: undefined)
? 'fail'
: 'pass'
error = new Error('validation')
error.validate = result
}
// request was for compile, and failed on validation
if (
request.check === 'error' &&
(error != null ? error.message : undefined) === 'exited'
) {
error = new Error('compilation')
error.validate = 'fail'
}
// compile was killed by user, was a validation, or a compile which failed validation
if (
(error != null ? error.terminated : undefined) ||
(error != null ? error.validate : undefined) ||
(error != null ? error.timedout : undefined)
) {
OutputFileFinder.findOutputFiles(
resourceList,
compileDir,
function(err, outputFiles) {
if (err != null) {
return callback(err)
}
error.outputFiles = outputFiles // return output files so user can check logs
return callback(error)
}
)
return
}
// compile completed normally
if (error != null) {
return callback(error)
}
Metrics.inc('compiles-succeeded')
const object = stats || {}
for (metric_key in object) {
metric_value = object[metric_key]
Metrics.count(metric_key, metric_value)
}
const object1 = timings || {}
for (metric_key in object1) {
metric_value = object1[metric_key]
Metrics.timing(metric_key, metric_value)
}
const loadavg =
typeof os.loadavg === 'function' ? os.loadavg() : undefined
if (loadavg != null) {
Metrics.gauge('load-avg', loadavg[0])
}
const ts = timer.done()
logger.log(
{
project_id: request.project_id,
user_id: request.user_id,
time_taken: ts,
stats,
timings,
loadavg
},
'done compile'
)
if ((stats != null ? stats['latex-runs'] : undefined) > 0) {
Metrics.timing('run-compile-per-pass', ts / stats['latex-runs'])
}
if (
(stats != null ? stats['latex-runs'] : undefined) > 0 &&
(timings != null ? timings['cpu-time'] : undefined) > 0
) {
Metrics.timing(
'run-compile-cpu-time-per-pass',
timings['cpu-time'] / stats['latex-runs']
)
}
return OutputFileFinder.findOutputFiles(
resourceList,
compileDir,
function(error, outputFiles) {
if (error != null) {
return callback(error)
}
return OutputCacheManager.saveOutputFiles(
outputFiles,
compileDir,
(error, newOutputFiles) => callback(null, newOutputFiles)
)
}
)
}
)
}
)
})
},
stopCompile(project_id, user_id, callback) {
if (callback == null) {
callback = function(error) {}
}
const compileName = getCompileName(project_id, user_id)
return LatexRunner.killLatex(compileName, callback)
},
clearProject(project_id, user_id, _callback) {
if (_callback == null) {
_callback = function(error) {}
}
const callback = function(error) {
_callback(error)
return (_callback = function() {})
}
const compileDir = getCompileDir(project_id, user_id)
return CompileManager._checkDirectory(compileDir, function(err, exists) {
if (err != null) {
return callback(err)
}
if (!exists) {
return callback()
} // skip removal if no directory present
const proc = child_process.spawn('rm', ['-r', compileDir])
proc.on('error', callback)
let stderr = ''
proc.stderr.on('data', chunk => (stderr += chunk.toString()))
return proc.on('close', function(code) {
if (code === 0) {
return callback(null)
} else {
return callback(new Error(`rm -r ${compileDir} failed: ${stderr}`))
}
})
})
},
_findAllDirs(callback) {
if (callback == null) {
callback = function(error, allDirs) {}
}
const root = Settings.path.compilesDir
return fs.readdir(root, function(err, files) {
if (err != null) {
return callback(err)
}
const allDirs = Array.from(files).map(file => Path.join(root, file))
return callback(null, allDirs)
})
},
clearExpiredProjects(max_cache_age_ms, callback) {
if (callback == null) {
callback = function(error) {}
}
const now = Date.now()
// action for each directory
const expireIfNeeded = (checkDir, cb) =>
fs.stat(checkDir, function(err, stats) {
if (err != null) {
return cb()
} // ignore errors checking directory
const age = now - stats.mtime
const hasExpired = age > max_cache_age_ms
if (hasExpired) {
return fse.remove(checkDir, cb)
} else {
return cb()
}
})
// iterate over all project directories
return CompileManager._findAllDirs(function(error, allDirs) {
if (error != null) {
return callback()
}
return async.eachSeries(allDirs, expireIfNeeded, callback)
})
},
_checkDirectory(compileDir, callback) {
if (callback == null) {
callback = function(error, exists) {}
}
return fs.lstat(compileDir, function(err, stats) {
if ((err != null ? err.code : undefined) === 'ENOENT') {
return callback(null, false) // directory does not exist
} else if (err != null) {
logger.err(
{ dir: compileDir, err },
'error on stat of project directory for removal'
)
return callback(err)
} else if (!(stats != null ? stats.isDirectory() : undefined)) {
logger.err(
{ dir: compileDir, stats },
'bad project directory for removal'
)
return callback(new Error('project directory is not directory'))
} else {
return callback(null, true)
}
})
}, // directory exists
syncFromCode(project_id, user_id, file_name, line, column, callback) {
// If LaTeX was run in a virtual environment, the file path that synctex expects
// might not match the file path on the host. The .synctex.gz file however, will be accessed
// wherever it is on the host.
if (callback == null) {
callback = function(error, pdfPositions) {}
}
const compileName = getCompileName(project_id, user_id)
const base_dir = Settings.path.synctexBaseDir(compileName)
const file_path = base_dir + '/' + file_name
const compileDir = getCompileDir(project_id, user_id)
const synctex_path = `${base_dir}/output.pdf`
const command = ['code', synctex_path, file_path, line, column]
return fse.ensureDir(compileDir, function(error) {
if (error != null) {
logger.err(
{ error, project_id, user_id, file_name },
'error ensuring dir for sync from code'
)
return callback(error)
}
return CompileManager._runSynctex(project_id, user_id, command, function(
error,
stdout
) {
if (error != null) {
return callback(error)
}
logger.log(
{ project_id, user_id, file_name, line, column, command, stdout },
'synctex code output'
)
return callback(
null,
CompileManager._parseSynctexFromCodeOutput(stdout)
)
})
})
},
syncFromPdf(project_id, user_id, page, h, v, callback) {
if (callback == null) {
callback = function(error, filePositions) {}
}
const compileName = getCompileName(project_id, user_id)
const compileDir = getCompileDir(project_id, user_id)
const base_dir = Settings.path.synctexBaseDir(compileName)
const synctex_path = `${base_dir}/output.pdf`
const command = ['pdf', synctex_path, page, h, v]
return fse.ensureDir(compileDir, function(error) {
if (error != null) {
logger.err(
{ error, project_id, user_id, file_name },
'error ensuring dir for sync to code'
)
return callback(error)
}
return CompileManager._runSynctex(project_id, user_id, command, function(
error,
stdout
) {
if (error != null) {
return callback(error)
}
logger.log(
{ project_id, user_id, page, h, v, stdout },
'synctex pdf output'
)
return callback(
null,
CompileManager._parseSynctexFromPdfOutput(stdout, base_dir)
)
})
})
},
_checkFileExists(path, callback) {
if (callback == null) {
callback = function(error) {}
}
const synctexDir = Path.dirname(path)
const synctexFile = Path.join(synctexDir, 'output.synctex.gz')
return fs.stat(synctexDir, function(error, stats) {
if ((error != null ? error.code : undefined) === 'ENOENT') {
return callback(
new Errors.NotFoundError('called synctex with no output directory')
)
}
if (error != null) {
return callback(error)
}
return fs.stat(synctexFile, function(error, stats) {
if ((error != null ? error.code : undefined) === 'ENOENT') {
return callback(
new Errors.NotFoundError('called synctex with no output file')
)
}
if (error != null) {
return callback(error)
}
if (!(stats != null ? stats.isFile() : undefined)) {
return callback(new Error('not a file'))
}
return callback()
})
})
},
_runSynctex(project_id, user_id, command, callback) {
if (callback == null) {
callback = function(error, stdout) {}
}
const seconds = 1000
command.unshift('/opt/synctex')
const directory = getCompileDir(project_id, user_id)
const timeout = 60 * 1000 // increased to allow for large projects
const compileName = getCompileName(project_id, user_id)
return CommandRunner.run(
compileName,
command,
directory,
Settings.clsi != null ? Settings.clsi.docker.image : undefined,
timeout,
{},
function(error, output) {
if (error != null) {
logger.err(
{ err: error, command, project_id, user_id },
'error running synctex'
)
return callback(error)
}
return callback(null, output.stdout)
}
)
},
_parseSynctexFromCodeOutput(output) {
const results = []
for (const line of Array.from(output.split('\n'))) {
const [node, page, h, v, width, height] = Array.from(line.split('\t'))
if (node === 'NODE') {
results.push({
page: parseInt(page, 10),
h: parseFloat(h),
v: parseFloat(v),
height: parseFloat(height),
width: parseFloat(width)
})
}
}
return results
},
_parseSynctexFromPdfOutput(output, base_dir) {
const results = []
for (let line of Array.from(output.split('\n'))) {
let column, file_path, node
;[node, file_path, line, column] = Array.from(line.split('\t'))
if (node === 'NODE') {
const file = file_path.slice(base_dir.length + 1)
results.push({
file,
line: parseInt(line, 10),
column: parseInt(column, 10)
})
}
}
return results
},
wordcount(project_id, user_id, file_name, image, callback) {
if (callback == null) {
callback = function(error, pdfPositions) {}
}
logger.log({ project_id, user_id, file_name, image }, 'running wordcount')
const file_path = `$COMPILE_DIR/${file_name}`
const command = [
'texcount',
'-nocol',
'-inc',
file_path,
`-out=${file_path}.wc`
]
const compileDir = getCompileDir(project_id, user_id)
const timeout = 60 * 1000
const compileName = getCompileName(project_id, user_id)
return fse.ensureDir(compileDir, function(error) {
if (error != null) {
logger.err(
{ error, project_id, user_id, file_name },
'error ensuring dir for sync from code'
)
return callback(error)
}
return CommandRunner.run(
compileName,
command,
compileDir,
image,
timeout,
{},
function(error) {
if (error != null) {
return callback(error)
}
return fs.readFile(
compileDir + '/' + file_name + '.wc',
'utf-8',
function(err, stdout) {
if (err != null) {
// call it node_err so sentry doesn't use random path error as unique id so it can't be ignored
logger.err(
{ node_err: err, command, compileDir, project_id, user_id },
'error reading word count output'
)
return callback(err)
}
const results = CompileManager._parseWordcountFromOutput(stdout)
logger.log(
{ project_id, user_id, wordcount: results },
'word count results'
)
return callback(null, results)
}
)
}
)
})
},
_parseWordcountFromOutput(output) {
const results = {
encode: '',
textWords: 0,
headWords: 0,
outside: 0,
headers: 0,
elements: 0,
mathInline: 0,
mathDisplay: 0,
errors: 0,
messages: ''
}
for (const line of Array.from(output.split('\n'))) {
const [data, info] = Array.from(line.split(':'))
if (data.indexOf('Encoding') > -1) {
results.encode = info.trim()
}
if (data.indexOf('in text') > -1) {
results.textWords = parseInt(info, 10)
}
if (data.indexOf('in head') > -1) {
results.headWords = parseInt(info, 10)
}
if (data.indexOf('outside') > -1) {
results.outside = parseInt(info, 10)
}
if (data.indexOf('of head') > -1) {
results.headers = parseInt(info, 10)
}
if (data.indexOf('Number of floats/tables/figures') > -1) {
results.elements = parseInt(info, 10)
}
if (data.indexOf('Number of math inlines') > -1) {
results.mathInline = parseInt(info, 10)
}
if (data.indexOf('Number of math displayed') > -1) {
results.mathDisplay = parseInt(info, 10)
}
if (data === '(errors') {
// errors reported as (errors:123)
results.errors = parseInt(info, 10)
}
if (line.indexOf('!!! ') > -1) {
// errors logged as !!! message !!!
results.messages += line + '\n'
}
}
return results
}
}
function __guard__(value, transform) {
return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
}

View file

@ -0,0 +1,38 @@
/* eslint-disable
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
let ContentTypeMapper
const Path = require('path')
// here we coerce html, css and js to text/plain,
// otherwise choose correct mime type based on file extension,
// falling back to octet-stream
module.exports = ContentTypeMapper = {
map(path) {
switch (Path.extname(path)) {
case '.txt':
case '.html':
case '.js':
case '.css':
case '.svg':
return 'text/plain'
case '.csv':
return 'text/csv'
case '.pdf':
return 'application/pdf'
case '.png':
return 'image/png'
case '.jpg':
case '.jpeg':
return 'image/jpeg'
case '.tiff':
return 'image/tiff'
case '.gif':
return 'image/gif'
default:
return 'application/octet-stream'
}
}
}

View file

@ -0,0 +1,18 @@
// TODO: This file was created by bulk-decaffeinate.
// Sanity-check the conversion and remove this comment.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const async = require('async')
const Settings = require('settings-sharelatex')
const logger = require('logger-sharelatex')
const queue = async.queue(
(task, cb) => task(cb),
Settings.parallelSqlQueryLimit
)
queue.drain = () => logger.debug('all items have been processed')
module.exports = { queue }

View file

@ -0,0 +1,113 @@
/* eslint-disable
handle-callback-err,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let LockManager
const logger = require('logger-sharelatex')
const LockState = {} // locks for docker container operations, by container name
module.exports = LockManager = {
MAX_LOCK_HOLD_TIME: 15000, // how long we can keep a lock
MAX_LOCK_WAIT_TIME: 10000, // how long we wait for a lock
LOCK_TEST_INTERVAL: 1000, // retry time
tryLock(key, callback) {
let lockValue
if (callback == null) {
callback = function(err, gotLock) {}
}
const existingLock = LockState[key]
if (existingLock != null) {
// the lock is already taken, check how old it is
const lockAge = Date.now() - existingLock.created
if (lockAge < LockManager.MAX_LOCK_HOLD_TIME) {
return callback(null, false) // we didn't get the lock, bail out
} else {
logger.error(
{ key, lock: existingLock, age: lockAge },
'taking old lock by force'
)
}
}
// take the lock
LockState[key] = lockValue = { created: Date.now() }
return callback(null, true, lockValue)
},
getLock(key, callback) {
let attempt
if (callback == null) {
callback = function(error, lockValue) {}
}
const startTime = Date.now()
return (attempt = () =>
LockManager.tryLock(key, function(error, gotLock, lockValue) {
if (error != null) {
return callback(error)
}
if (gotLock) {
return callback(null, lockValue)
} else if (Date.now() - startTime > LockManager.MAX_LOCK_WAIT_TIME) {
const e = new Error('Lock timeout')
e.key = key
return callback(e)
} else {
return setTimeout(attempt, LockManager.LOCK_TEST_INTERVAL)
}
}))()
},
releaseLock(key, lockValue, callback) {
if (callback == null) {
callback = function(error) {}
}
const existingLock = LockState[key]
if (existingLock === lockValue) {
// lockValue is an object, so we can test by reference
delete LockState[key] // our lock, so we can free it
return callback()
} else if (existingLock != null) {
// lock exists but doesn't match ours
logger.error(
{ key, lock: existingLock },
'tried to release lock taken by force'
)
return callback()
} else {
logger.error(
{ key, lock: existingLock },
'tried to release lock that has gone'
)
return callback()
}
},
runWithLock(key, runner, callback) {
if (callback == null) {
callback = function(error) {}
}
return LockManager.getLock(key, function(error, lockValue) {
if (error != null) {
return callback(error)
}
return runner((error1, ...args) =>
LockManager.releaseLock(key, lockValue, function(error2) {
error = error1 || error2
if (error != null) {
return callback(error)
}
return callback(null, ...Array.from(args))
})
)
})
}
}

View file

@ -0,0 +1,695 @@
/* eslint-disable
camelcase,
handle-callback-err,
no-return-assign,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS103: Rewrite code to no longer use __guard__
* DS205: Consider reworking code to avoid use of IIFEs
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let DockerRunner, oneHour
const Settings = require('settings-sharelatex')
const logger = require('logger-sharelatex')
const Docker = require('dockerode')
const dockerode = new Docker()
const crypto = require('crypto')
const async = require('async')
const LockManager = require('./DockerLockManager')
const fs = require('fs')
const Path = require('path')
const _ = require('underscore')
logger.info('using docker runner')
const usingSiblingContainers = () =>
__guard__(
Settings != null ? Settings.path : undefined,
x => x.sandboxedCompilesHostDir
) != null
let containerMonitorTimeout
let containerMonitorInterval
module.exports = DockerRunner = {
ERR_NOT_DIRECTORY: new Error('not a directory'),
ERR_TERMINATED: new Error('terminated'),
ERR_EXITED: new Error('exited'),
ERR_TIMED_OUT: new Error('container timed out'),
run(project_id, command, directory, image, timeout, environment, callback) {
let name
if (callback == null) {
callback = function(error, output) {}
}
if (usingSiblingContainers()) {
const _newPath = Settings.path.sandboxedCompilesHostDir
logger.log(
{ path: _newPath },
'altering bind path for sibling containers'
)
// Server Pro, example:
// '/var/lib/sharelatex/data/compiles/<project-id>'
// ... becomes ...
// '/opt/sharelatex_data/data/compiles/<project-id>'
directory = Path.join(
Settings.path.sandboxedCompilesHostDir,
Path.basename(directory)
)
}
const volumes = {}
volumes[directory] = '/compile'
command = Array.from(command).map(arg =>
__guardMethod__(arg.toString(), 'replace', o =>
o.replace('$COMPILE_DIR', '/compile')
)
)
if (image == null) {
;({ image } = Settings.clsi.docker)
}
if (Settings.texliveImageNameOveride != null) {
const img = image.split('/')
image = `${Settings.texliveImageNameOveride}/${img[2]}`
}
const options = DockerRunner._getContainerOptions(
command,
image,
volumes,
timeout,
environment
)
const fingerprint = DockerRunner._fingerprintContainer(options)
options.name = name = `project-${project_id}-${fingerprint}`
// logOptions = _.clone(options)
// logOptions?.HostConfig?.SecurityOpt = "secomp used, removed in logging"
logger.log({ project_id }, 'running docker container')
DockerRunner._runAndWaitForContainer(options, volumes, timeout, function(
error,
output
) {
if (error && error.statusCode === 500) {
logger.log(
{ err: error, project_id },
'error running container so destroying and retrying'
)
return DockerRunner.destroyContainer(name, null, true, function(error) {
if (error != null) {
return callback(error)
}
return DockerRunner._runAndWaitForContainer(
options,
volumes,
timeout,
callback
)
})
} else {
return callback(error, output)
}
})
return name
}, // pass back the container name to allow it to be killed
kill(container_id, callback) {
if (callback == null) {
callback = function(error) {}
}
logger.log({ container_id }, 'sending kill signal to container')
const container = dockerode.getContainer(container_id)
return container.kill(function(error) {
if (
error != null &&
__guardMethod__(error != null ? error.message : undefined, 'match', o =>
o.match(/Cannot kill container .* is not running/)
)
) {
logger.warn(
{ err: error, container_id },
'container not running, continuing'
)
error = null
}
if (error != null) {
logger.error({ err: error, container_id }, 'error killing container')
return callback(error)
} else {
return callback()
}
})
},
_runAndWaitForContainer(options, volumes, timeout, _callback) {
if (_callback == null) {
_callback = function(error, output) {}
}
const callback = function(...args) {
_callback(...Array.from(args || []))
// Only call the callback once
return (_callback = function() {})
}
const { name } = options
let streamEnded = false
let containerReturned = false
let output = {}
const callbackIfFinished = function() {
if (streamEnded && containerReturned) {
return callback(null, output)
}
}
const attachStreamHandler = function(error, _output) {
if (error != null) {
return callback(error)
}
output = _output
streamEnded = true
return callbackIfFinished()
}
return DockerRunner.startContainer(
options,
volumes,
attachStreamHandler,
function(error, containerId) {
if (error != null) {
return callback(error)
}
return DockerRunner.waitForContainer(name, timeout, function(
error,
exitCode
) {
let err
if (error != null) {
return callback(error)
}
if (exitCode === 137) {
// exit status from kill -9
err = DockerRunner.ERR_TERMINATED
err.terminated = true
return callback(err)
}
if (exitCode === 1) {
// exit status from chktex
err = DockerRunner.ERR_EXITED
err.code = exitCode
return callback(err)
}
containerReturned = true
__guard__(
options != null ? options.HostConfig : undefined,
x => (x.SecurityOpt = null)
) // small log line
logger.log({ err, exitCode, options }, 'docker container has exited')
return callbackIfFinished()
})
}
)
},
_getContainerOptions(command, image, volumes, timeout, environment) {
let m, year
let key, value, hostVol, dockerVol
const timeoutInSeconds = timeout / 1000
const dockerVolumes = {}
for (hostVol in volumes) {
dockerVol = volumes[hostVol]
dockerVolumes[dockerVol] = {}
if (volumes[hostVol].slice(-3).indexOf(':r') === -1) {
volumes[hostVol] = `${dockerVol}:rw`
}
}
// merge settings and environment parameter
const env = {}
for (const src of [Settings.clsi.docker.env, environment || {}]) {
for (key in src) {
value = src[key]
env[key] = value
}
}
// set the path based on the image year
if ((m = image.match(/:([0-9]+)\.[0-9]+/))) {
year = m[1]
} else {
year = '2014'
}
env.PATH = `/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/${year}/bin/x86_64-linux/`
const options = {
Cmd: command,
Image: image,
Volumes: dockerVolumes,
WorkingDir: '/compile',
NetworkDisabled: true,
Memory: 1024 * 1024 * 1024 * 1024, // 1 Gb
User: Settings.clsi.docker.user,
Env: (() => {
const result = []
for (key in env) {
value = env[key]
result.push(`${key}=${value}`)
}
return result
})(), // convert the environment hash to an array
HostConfig: {
Binds: (() => {
const result1 = []
for (hostVol in volumes) {
dockerVol = volumes[hostVol]
result1.push(`${hostVol}:${dockerVol}`)
}
return result1
})(),
LogConfig: { Type: 'none', Config: {} },
Ulimits: [
{
Name: 'cpu',
Soft: timeoutInSeconds + 5,
Hard: timeoutInSeconds + 10
}
],
CapDrop: 'ALL',
SecurityOpt: ['no-new-privileges']
}
}
if (
(Settings.path != null ? Settings.path.synctexBinHostPath : undefined) !=
null
) {
options.HostConfig.Binds.push(
`${Settings.path.synctexBinHostPath}:/opt/synctex:ro`
)
}
if (Settings.clsi.docker.seccomp_profile != null) {
options.HostConfig.SecurityOpt.push(
`seccomp=${Settings.clsi.docker.seccomp_profile}`
)
}
if (Settings.clsi.docker.runtime) {
options.HostConfig.Runtime = Settings.clsi.docker.runtime
}
return options
},
_fingerprintContainer(containerOptions) {
// Yay, Hashing!
const json = JSON.stringify(containerOptions)
return crypto
.createHash('md5')
.update(json)
.digest('hex')
},
startContainer(options, volumes, attachStreamHandler, callback) {
return LockManager.runWithLock(
options.name,
releaseLock =>
// Check that volumes exist before starting the container.
// When a container is started with volume pointing to a
// non-existent directory then docker creates the directory but
// with root ownership.
DockerRunner._checkVolumes(options, volumes, function(err) {
if (err != null) {
return releaseLock(err)
}
return DockerRunner._startContainer(
options,
volumes,
attachStreamHandler,
releaseLock
)
}),
callback
)
},
// Check that volumes exist and are directories
_checkVolumes(options, volumes, callback) {
if (callback == null) {
callback = function(error, containerName) {}
}
if (usingSiblingContainers()) {
// Server Pro, with sibling-containers active, skip checks
return callback(null)
}
const checkVolume = (path, cb) =>
fs.stat(path, function(err, stats) {
if (err != null) {
return cb(err)
}
if (!(stats != null ? stats.isDirectory() : undefined)) {
return cb(DockerRunner.ERR_NOT_DIRECTORY)
}
return cb()
})
const jobs = []
for (const vol in volumes) {
;(vol => jobs.push(cb => checkVolume(vol, cb)))(vol)
}
return async.series(jobs, callback)
},
_startContainer(options, volumes, attachStreamHandler, callback) {
if (callback == null) {
callback = function(error, output) {}
}
callback = _.once(callback)
const { name } = options
logger.log({ container_name: name }, 'starting container')
const container = dockerode.getContainer(name)
const createAndStartContainer = () =>
dockerode.createContainer(options, function(error, container) {
if (error != null) {
return callback(error)
}
return startExistingContainer()
})
var startExistingContainer = () =>
DockerRunner.attachToContainer(
options.name,
attachStreamHandler,
function(error) {
if (error != null) {
return callback(error)
}
return container.start(function(error) {
if (
error != null &&
(error != null ? error.statusCode : undefined) !== 304
) {
// already running
return callback(error)
} else {
return callback()
}
})
}
)
return container.inspect(function(error, stats) {
if ((error != null ? error.statusCode : undefined) === 404) {
return createAndStartContainer()
} else if (error != null) {
logger.err(
{ container_name: name, error },
'unable to inspect container to start'
)
return callback(error)
} else {
return startExistingContainer()
}
})
},
attachToContainer(containerId, attachStreamHandler, attachStartCallback) {
const container = dockerode.getContainer(containerId)
return container.attach({ stdout: 1, stderr: 1, stream: 1 }, function(
error,
stream
) {
if (error != null) {
logger.error(
{ err: error, container_id: containerId },
'error attaching to container'
)
return attachStartCallback(error)
} else {
attachStartCallback()
}
logger.log({ container_id: containerId }, 'attached to container')
const MAX_OUTPUT = 1024 * 1024 // limit output to 1MB
const createStringOutputStream = function(name) {
return {
data: '',
overflowed: false,
write(data) {
if (this.overflowed) {
return
}
if (this.data.length < MAX_OUTPUT) {
return (this.data += data)
} else {
logger.error(
{
container_id: containerId,
length: this.data.length,
maxLen: MAX_OUTPUT
},
`${name} exceeds max size`
)
this.data += `(...truncated at ${MAX_OUTPUT} chars...)`
return (this.overflowed = true)
}
}
// kill container if too much output
// docker.containers.kill(containerId, () ->)
}
}
const stdout = createStringOutputStream('stdout')
const stderr = createStringOutputStream('stderr')
container.modem.demuxStream(stream, stdout, stderr)
stream.on('error', err =>
logger.error(
{ err, container_id: containerId },
'error reading from container stream'
)
)
return stream.on('end', () =>
attachStreamHandler(null, { stdout: stdout.data, stderr: stderr.data })
)
})
},
waitForContainer(containerId, timeout, _callback) {
if (_callback == null) {
_callback = function(error, exitCode) {}
}
const callback = function(...args) {
_callback(...Array.from(args || []))
// Only call the callback once
return (_callback = function() {})
}
const container = dockerode.getContainer(containerId)
let timedOut = false
const timeoutId = setTimeout(function() {
timedOut = true
logger.log(
{ container_id: containerId },
'timeout reached, killing container'
)
return container.kill(function() {})
}, timeout)
logger.log({ container_id: containerId }, 'waiting for docker container')
return container.wait(function(error, res) {
if (error != null) {
clearTimeout(timeoutId)
logger.error(
{ err: error, container_id: containerId },
'error waiting for container'
)
return callback(error)
}
if (timedOut) {
logger.log({ containerId }, 'docker container timed out')
error = DockerRunner.ERR_TIMED_OUT
error.timedout = true
return callback(error)
} else {
clearTimeout(timeoutId)
logger.log(
{ container_id: containerId, exitCode: res.StatusCode },
'docker container returned'
)
return callback(null, res.StatusCode)
}
})
},
destroyContainer(containerName, containerId, shouldForce, callback) {
// We want the containerName for the lock and, ideally, the
// containerId to delete. There is a bug in the docker.io module
// where if you delete by name and there is an error, it throws an
// async exception, but if you delete by id it just does a normal
// error callback. We fall back to deleting by name if no id is
// supplied.
if (callback == null) {
callback = function(error) {}
}
return LockManager.runWithLock(
containerName,
releaseLock =>
DockerRunner._destroyContainer(
containerId || containerName,
shouldForce,
releaseLock
),
callback
)
},
_destroyContainer(containerId, shouldForce, callback) {
if (callback == null) {
callback = function(error) {}
}
logger.log({ container_id: containerId }, 'destroying docker container')
const container = dockerode.getContainer(containerId)
return container.remove({ force: shouldForce === true }, function(error) {
if (
error != null &&
(error != null ? error.statusCode : undefined) === 404
) {
logger.warn(
{ err: error, container_id: containerId },
'container not found, continuing'
)
error = null
}
if (error != null) {
logger.error(
{ err: error, container_id: containerId },
'error destroying container'
)
} else {
logger.log({ container_id: containerId }, 'destroyed container')
}
return callback(error)
})
},
// handle expiry of docker containers
MAX_CONTAINER_AGE:
Settings.clsi.docker.maxContainerAge || (oneHour = 60 * 60 * 1000),
examineOldContainer(container, callback) {
if (callback == null) {
callback = function(error, name, id, ttl) {}
}
const name =
container.Name ||
(container.Names != null ? container.Names[0] : undefined)
const created = container.Created * 1000 // creation time is returned in seconds
const now = Date.now()
const age = now - created
const maxAge = DockerRunner.MAX_CONTAINER_AGE
const ttl = maxAge - age
logger.log(
{ containerName: name, created, now, age, maxAge, ttl },
'checking whether to destroy container'
)
return callback(null, name, container.Id, ttl)
},
destroyOldContainers(callback) {
if (callback == null) {
callback = function(error) {}
}
return dockerode.listContainers({ all: true }, function(error, containers) {
if (error != null) {
return callback(error)
}
const jobs = []
for (const container of Array.from(containers || [])) {
;(container =>
DockerRunner.examineOldContainer(container, function(
err,
name,
id,
ttl
) {
if (name.slice(0, 9) === '/project-' && ttl <= 0) {
return jobs.push(cb =>
DockerRunner.destroyContainer(name, id, false, () => cb())
)
}
}))(container)
}
// Ignore errors because some containers get stuck but
// will be destroyed next time
return async.series(jobs, callback)
})
},
startContainerMonitor() {
logger.log(
{ maxAge: DockerRunner.MAX_CONTAINER_AGE },
'starting container expiry'
)
// guarantee only one monitor is running
DockerRunner.stopContainerMonitor()
// randomise the start time
const randomDelay = Math.floor(Math.random() * 5 * 60 * 1000)
containerMonitorTimeout = setTimeout(() => {
containerMonitorInterval = setInterval(
() => DockerRunner.destroyOldContainers(),
(oneHour = 60 * 60 * 1000)
)
}, randomDelay)
},
stopContainerMonitor() {
if (containerMonitorTimeout) {
clearTimeout(containerMonitorTimeout)
containerMonitorTimeout = undefined
}
if (containerMonitorInterval) {
clearInterval(containerMonitorTimeout)
containerMonitorTimeout = undefined
}
}
}
DockerRunner.startContainerMonitor()
function __guard__(value, transform) {
return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
}
function __guardMethod__(obj, methodName, transform) {
if (
typeof obj !== 'undefined' &&
obj !== null &&
typeof obj[methodName] === 'function'
) {
return transform(obj, methodName)
} else {
return undefined
}
}

View file

@ -0,0 +1,57 @@
/* eslint-disable
camelcase,
handle-callback-err,
no-useless-escape,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let DraftModeManager
const fs = require('fs')
const logger = require('logger-sharelatex')
module.exports = DraftModeManager = {
injectDraftMode(filename, callback) {
if (callback == null) {
callback = function(error) {}
}
return fs.readFile(filename, 'utf8', function(error, content) {
if (error != null) {
return callback(error)
}
// avoid adding draft mode more than once
if (
(content != null
? content.indexOf('\\documentclass[draft')
: undefined) >= 0
) {
return callback()
}
const modified_content = DraftModeManager._injectDraftOption(content)
logger.log(
{
content: content.slice(0, 1024), // \documentclass is normally v near the top
modified_content: modified_content.slice(0, 1024),
filename
},
'injected draft class'
)
return fs.writeFile(filename, modified_content, callback)
})
},
_injectDraftOption(content) {
return (
content
// With existing options (must be first, otherwise both are applied)
.replace(/\\documentclass\[/g, '\\documentclass[draft,')
// Without existing options
.replace(/\\documentclass\{/g, '\\documentclass[draft]{')
)
}
}

View file

@ -0,0 +1,36 @@
/* eslint-disable
no-proto,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
let Errors
var NotFoundError = function(message) {
const error = new Error(message)
error.name = 'NotFoundError'
error.__proto__ = NotFoundError.prototype
return error
}
NotFoundError.prototype.__proto__ = Error.prototype
var FilesOutOfSyncError = function(message) {
const error = new Error(message)
error.name = 'FilesOutOfSyncError'
error.__proto__ = FilesOutOfSyncError.prototype
return error
}
FilesOutOfSyncError.prototype.__proto__ = Error.prototype
var AlreadyCompilingError = function(message) {
const error = new Error(message)
error.name = 'AlreadyCompilingError'
error.__proto__ = AlreadyCompilingError.prototype
return error
}
AlreadyCompilingError.prototype.__proto__ = Error.prototype
module.exports = Errors = {
NotFoundError,
FilesOutOfSyncError,
AlreadyCompilingError
}

View file

@ -0,0 +1,204 @@
/* eslint-disable
camelcase,
handle-callback-err,
no-return-assign,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS103: Rewrite code to no longer use __guard__
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let LatexRunner
const Path = require('path')
const Settings = require('settings-sharelatex')
const logger = require('logger-sharelatex')
const Metrics = require('./Metrics')
const CommandRunner = require('./CommandRunner')
const ProcessTable = {} // table of currently running jobs (pids or docker container names)
module.exports = LatexRunner = {
runLatex(project_id, options, callback) {
let command
if (callback == null) {
callback = function(error) {}
}
let {
directory,
mainFile,
compiler,
timeout,
image,
environment,
flags
} = options
if (!compiler) {
compiler = 'pdflatex'
}
if (!timeout) {
timeout = 60000
} // milliseconds
logger.log(
{ directory, compiler, timeout, mainFile, environment, flags },
'starting compile'
)
// We want to run latexmk on the tex file which we will automatically
// generate from the Rtex/Rmd/md file.
mainFile = mainFile.replace(/\.(Rtex|md|Rmd)$/, '.tex')
if (compiler === 'pdflatex') {
command = LatexRunner._pdflatexCommand(mainFile, flags)
} else if (compiler === 'latex') {
command = LatexRunner._latexCommand(mainFile, flags)
} else if (compiler === 'xelatex') {
command = LatexRunner._xelatexCommand(mainFile, flags)
} else if (compiler === 'lualatex') {
command = LatexRunner._lualatexCommand(mainFile, flags)
} else {
return callback(new Error(`unknown compiler: ${compiler}`))
}
if (Settings.clsi != null ? Settings.clsi.strace : undefined) {
command = ['strace', '-o', 'strace', '-ff'].concat(command)
}
const id = `${project_id}` // record running project under this id
return (ProcessTable[id] = CommandRunner.run(
project_id,
command,
directory,
image,
timeout,
environment,
function(error, output) {
delete ProcessTable[id]
if (error != null) {
return callback(error)
}
const runs =
__guard__(
__guard__(output != null ? output.stderr : undefined, x1 =>
x1.match(/^Run number \d+ of .*latex/gm)
),
x => x.length
) || 0
const failed =
__guard__(output != null ? output.stdout : undefined, x2 =>
x2.match(/^Latexmk: Errors/m)
) != null
? 1
: 0
// counters from latexmk output
const stats = {}
stats['latexmk-errors'] = failed
stats['latex-runs'] = runs
stats['latex-runs-with-errors'] = failed ? runs : 0
stats[`latex-runs-${runs}`] = 1
stats[`latex-runs-with-errors-${runs}`] = failed ? 1 : 0
// timing information from /usr/bin/time
const timings = {}
const stderr = output != null ? output.stderr : undefined
timings['cpu-percent'] =
__guard__(
stderr != null
? stderr.match(/Percent of CPU this job got: (\d+)/m)
: undefined,
x3 => x3[1]
) || 0
timings['cpu-time'] =
__guard__(
stderr != null
? stderr.match(/User time.*: (\d+.\d+)/m)
: undefined,
x4 => x4[1]
) || 0
timings['sys-time'] =
__guard__(
stderr != null
? stderr.match(/System time.*: (\d+.\d+)/m)
: undefined,
x5 => x5[1]
) || 0
return callback(error, output, stats, timings)
}
))
},
killLatex(project_id, callback) {
if (callback == null) {
callback = function(error) {}
}
const id = `${project_id}`
logger.log({ id }, 'killing running compile')
if (ProcessTable[id] == null) {
logger.warn({ id }, 'no such project to kill')
return callback(null)
} else {
return CommandRunner.kill(ProcessTable[id], callback)
}
},
_latexmkBaseCommand(flags) {
let args = [
'latexmk',
'-cd',
'-f',
'-jobname=output',
'-auxdir=$COMPILE_DIR',
'-outdir=$COMPILE_DIR',
'-synctex=1',
'-interaction=batchmode'
]
if (flags) {
args = args.concat(flags)
}
return (
__guard__(
Settings != null ? Settings.clsi : undefined,
x => x.latexmkCommandPrefix
) || []
).concat(args)
},
_pdflatexCommand(mainFile, flags) {
return LatexRunner._latexmkBaseCommand(flags).concat([
'-pdf',
Path.join('$COMPILE_DIR', mainFile)
])
},
_latexCommand(mainFile, flags) {
return LatexRunner._latexmkBaseCommand(flags).concat([
'-pdfdvi',
Path.join('$COMPILE_DIR', mainFile)
])
},
_xelatexCommand(mainFile, flags) {
return LatexRunner._latexmkBaseCommand(flags).concat([
'-xelatex',
Path.join('$COMPILE_DIR', mainFile)
])
},
_lualatexCommand(mainFile, flags) {
return LatexRunner._latexmkBaseCommand(flags).concat([
'-lualatex',
Path.join('$COMPILE_DIR', mainFile)
])
}
}
function __guard__(value, transform) {
return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
}

View file

@ -0,0 +1,91 @@
/* eslint-disable
camelcase,
handle-callback-err,
no-return-assign,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let CommandRunner
const { spawn } = require('child_process')
const logger = require('logger-sharelatex')
logger.info('using standard command runner')
module.exports = CommandRunner = {
run(project_id, command, directory, image, timeout, environment, callback) {
let key, value
if (callback == null) {
callback = function(error) {}
}
command = Array.from(command).map(arg =>
arg.toString().replace('$COMPILE_DIR', directory)
)
logger.log({ project_id, command, directory }, 'running command')
logger.warn('timeouts and sandboxing are not enabled with CommandRunner')
// merge environment settings
const env = {}
for (key in process.env) {
value = process.env[key]
env[key] = value
}
for (key in environment) {
value = environment[key]
env[key] = value
}
// run command as detached process so it has its own process group (which can be killed if needed)
const proc = spawn(command[0], command.slice(1), { cwd: directory, env })
let stdout = ''
proc.stdout.on('data', data => (stdout += data))
proc.on('error', function(err) {
logger.err(
{ err, project_id, command, directory },
'error running command'
)
return callback(err)
})
proc.on('close', function(code, signal) {
let err
logger.info({ code, signal, project_id }, 'command exited')
if (signal === 'SIGTERM') {
// signal from kill method below
err = new Error('terminated')
err.terminated = true
return callback(err)
} else if (code === 1) {
// exit status from chktex
err = new Error('exited')
err.code = code
return callback(err)
} else {
return callback(null, { stdout: stdout })
}
})
return proc.pid
}, // return process id to allow job to be killed if necessary
kill(pid, callback) {
if (callback == null) {
callback = function(error) {}
}
try {
process.kill(-pid) // kill all processes in group
} catch (err) {
return callback(err)
}
return callback()
}
}

View file

@ -0,0 +1,72 @@
/* eslint-disable
handle-callback-err,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let LockManager
const Settings = require('settings-sharelatex')
const logger = require('logger-sharelatex')
const Lockfile = require('lockfile') // from https://github.com/npm/lockfile
const Errors = require('./Errors')
const fs = require('fs')
const Path = require('path')
module.exports = LockManager = {
LOCK_TEST_INTERVAL: 1000, // 50ms between each test of the lock
MAX_LOCK_WAIT_TIME: 15000, // 10s maximum time to spend trying to get the lock
LOCK_STALE: 5 * 60 * 1000, // 5 mins time until lock auto expires
runWithLock(path, runner, callback) {
if (callback == null) {
callback = function(error) {}
}
const lockOpts = {
wait: this.MAX_LOCK_WAIT_TIME,
pollPeriod: this.LOCK_TEST_INTERVAL,
stale: this.LOCK_STALE
}
return Lockfile.lock(path, lockOpts, function(error) {
if ((error != null ? error.code : undefined) === 'EEXIST') {
return callback(new Errors.AlreadyCompilingError('compile in progress'))
} else if (error != null) {
return fs.lstat(path, (statLockErr, statLock) =>
fs.lstat(Path.dirname(path), (statDirErr, statDir) =>
fs.readdir(Path.dirname(path), function(readdirErr, readdirDir) {
logger.err(
{
error,
path,
statLock,
statLockErr,
statDir,
statDirErr,
readdirErr,
readdirDir
},
'unable to get lock'
)
return callback(error)
})
)
)
} else {
return runner((error1, ...args) =>
Lockfile.unlock(path, function(error2) {
error = error1 || error2
if (error != null) {
return callback(error)
}
return callback(null, ...Array.from(args))
})
)
}
})
}
}

View file

@ -0,0 +1,3 @@
// TODO: This file was created by bulk-decaffeinate.
// Sanity-check the conversion and remove this comment.
module.exports = require('metrics-sharelatex')

View file

@ -0,0 +1,399 @@
/* eslint-disable
handle-callback-err,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS103: Rewrite code to no longer use __guard__
* DS104: Avoid inline assignments
* DS204: Change includes calls to have a more natural evaluation order
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let OutputCacheManager
const async = require('async')
const fs = require('fs')
const fse = require('fs-extra')
const Path = require('path')
const logger = require('logger-sharelatex')
const _ = require('underscore')
const Settings = require('settings-sharelatex')
const crypto = require('crypto')
const OutputFileOptimiser = require('./OutputFileOptimiser')
module.exports = OutputCacheManager = {
CACHE_SUBDIR: '.cache/clsi',
ARCHIVE_SUBDIR: '.archive/clsi',
// build id is HEXDATE-HEXRANDOM from Date.now()and RandomBytes
// for backwards compatibility, make the randombytes part optional
BUILD_REGEX: /^[0-9a-f]+(-[0-9a-f]+)?$/,
CACHE_LIMIT: 2, // maximum number of cache directories
CACHE_AGE: 60 * 60 * 1000, // up to one hour old
path(buildId, file) {
// used by static server, given build id return '.cache/clsi/buildId'
if (buildId.match(OutputCacheManager.BUILD_REGEX)) {
return Path.join(OutputCacheManager.CACHE_SUBDIR, buildId, file)
} else {
// for invalid build id, return top level
return file
}
},
generateBuildId(callback) {
// generate a secure build id from Date.now() and 8 random bytes in hex
if (callback == null) {
callback = function(error, buildId) {}
}
return crypto.randomBytes(8, function(err, buf) {
if (err != null) {
return callback(err)
}
const random = buf.toString('hex')
const date = Date.now().toString(16)
return callback(err, `${date}-${random}`)
})
},
saveOutputFiles(outputFiles, compileDir, callback) {
if (callback == null) {
callback = function(error) {}
}
return OutputCacheManager.generateBuildId(function(err, buildId) {
if (err != null) {
return callback(err)
}
return OutputCacheManager.saveOutputFilesInBuildDir(
outputFiles,
compileDir,
buildId,
callback
)
})
},
saveOutputFilesInBuildDir(outputFiles, compileDir, buildId, callback) {
// make a compileDir/CACHE_SUBDIR/build_id directory and
// copy all the output files into it
if (callback == null) {
callback = function(error) {}
}
const cacheRoot = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR)
// Put the files into a new cache subdirectory
const cacheDir = Path.join(
compileDir,
OutputCacheManager.CACHE_SUBDIR,
buildId
)
// Is it a per-user compile? check if compile directory is PROJECTID-USERID
const perUser = Path.basename(compileDir).match(
/^[0-9a-f]{24}-[0-9a-f]{24}$/
)
// Archive logs in background
if (
(Settings.clsi != null ? Settings.clsi.archive_logs : undefined) ||
(Settings.clsi != null ? Settings.clsi.strace : undefined)
) {
OutputCacheManager.archiveLogs(outputFiles, compileDir, buildId, function(
err
) {
if (err != null) {
return logger.warn({ err }, 'erroring archiving log files')
}
})
}
// make the new cache directory
return fse.ensureDir(cacheDir, function(err) {
if (err != null) {
logger.error(
{ err, directory: cacheDir },
'error creating cache directory'
)
return callback(err, outputFiles)
} else {
// copy all the output files into the new cache directory
const results = []
return async.mapSeries(
outputFiles,
function(file, cb) {
// don't send dot files as output, express doesn't serve them
if (OutputCacheManager._fileIsHidden(file.path)) {
logger.debug(
{ compileDir, path: file.path },
'ignoring dotfile in output'
)
return cb()
}
// copy other files into cache directory if valid
const newFile = _.clone(file)
const [src, dst] = Array.from([
Path.join(compileDir, file.path),
Path.join(cacheDir, file.path)
])
return OutputCacheManager._checkFileIsSafe(src, function(
err,
isSafe
) {
if (err != null) {
return cb(err)
}
if (!isSafe) {
return cb()
}
return OutputCacheManager._checkIfShouldCopy(src, function(
err,
shouldCopy
) {
if (err != null) {
return cb(err)
}
if (!shouldCopy) {
return cb()
}
return OutputCacheManager._copyFile(src, dst, function(err) {
if (err != null) {
return cb(err)
}
newFile.build = buildId // attach a build id if we cached the file
results.push(newFile)
return cb()
})
})
})
},
function(err) {
if (err != null) {
// pass back the original files if we encountered *any* error
callback(err, outputFiles)
// clean up the directory we just created
return fse.remove(cacheDir, function(err) {
if (err != null) {
return logger.error(
{ err, dir: cacheDir },
'error removing cache dir after failure'
)
}
})
} else {
// pass back the list of new files in the cache
callback(err, results)
// let file expiry run in the background, expire all previous files if per-user
return OutputCacheManager.expireOutputFiles(cacheRoot, {
keep: buildId,
limit: perUser ? 1 : null
})
}
}
)
}
})
},
archiveLogs(outputFiles, compileDir, buildId, callback) {
if (callback == null) {
callback = function(error) {}
}
const archiveDir = Path.join(
compileDir,
OutputCacheManager.ARCHIVE_SUBDIR,
buildId
)
logger.log({ dir: archiveDir }, 'archiving log files for project')
return fse.ensureDir(archiveDir, function(err) {
if (err != null) {
return callback(err)
}
return async.mapSeries(
outputFiles,
function(file, cb) {
const [src, dst] = Array.from([
Path.join(compileDir, file.path),
Path.join(archiveDir, file.path)
])
return OutputCacheManager._checkFileIsSafe(src, function(
err,
isSafe
) {
if (err != null) {
return cb(err)
}
if (!isSafe) {
return cb()
}
return OutputCacheManager._checkIfShouldArchive(src, function(
err,
shouldArchive
) {
if (err != null) {
return cb(err)
}
if (!shouldArchive) {
return cb()
}
return OutputCacheManager._copyFile(src, dst, cb)
})
})
},
callback
)
})
},
expireOutputFiles(cacheRoot, options, callback) {
// look in compileDir for build dirs and delete if > N or age of mod time > T
if (callback == null) {
callback = function(error) {}
}
return fs.readdir(cacheRoot, function(err, results) {
if (err != null) {
if (err.code === 'ENOENT') {
return callback(null)
} // cache directory is empty
logger.error({ err, project_id: cacheRoot }, 'error clearing cache')
return callback(err)
}
const dirs = results.sort().reverse()
const currentTime = Date.now()
const isExpired = function(dir, index) {
if ((options != null ? options.keep : undefined) === dir) {
return false
}
// remove any directories over the requested (non-null) limit
if (
(options != null ? options.limit : undefined) != null &&
index > options.limit
) {
return true
}
// remove any directories over the hard limit
if (index > OutputCacheManager.CACHE_LIMIT) {
return true
}
// we can get the build time from the first part of the directory name DDDD-RRRR
// DDDD is date and RRRR is random bytes
const dirTime = parseInt(
__guard__(dir.split('-'), x => x[0]),
16
)
const age = currentTime - dirTime
return age > OutputCacheManager.CACHE_AGE
}
const toRemove = _.filter(dirs, isExpired)
const removeDir = (dir, cb) =>
fse.remove(Path.join(cacheRoot, dir), function(err, result) {
logger.log({ cache: cacheRoot, dir }, 'removed expired cache dir')
if (err != null) {
logger.error({ err, dir }, 'cache remove error')
}
return cb(err, result)
})
return async.eachSeries(
toRemove,
(dir, cb) => removeDir(dir, cb),
callback
)
})
},
_fileIsHidden(path) {
return (path != null ? path.match(/^\.|\/\./) : undefined) != null
},
_checkFileIsSafe(src, callback) {
// check if we have a valid file to copy into the cache
if (callback == null) {
callback = function(error, isSafe) {}
}
return fs.stat(src, function(err, stats) {
if ((err != null ? err.code : undefined) === 'ENOENT') {
logger.warn(
{ err, file: src },
'file has disappeared before copying to build cache'
)
return callback(err, false)
} else if (err != null) {
// some other problem reading the file
logger.error({ err, file: src }, 'stat error for file in cache')
return callback(err, false)
} else if (!stats.isFile()) {
// other filetype - reject it
logger.warn(
{ src, stat: stats },
'nonfile output - refusing to copy to cache'
)
return callback(null, false)
} else {
// it's a plain file, ok to copy
return callback(null, true)
}
})
},
_copyFile(src, dst, callback) {
// copy output file into the cache
return fse.copy(src, dst, function(err) {
if ((err != null ? err.code : undefined) === 'ENOENT') {
logger.warn(
{ err, file: src },
'file has disappeared when copying to build cache'
)
return callback(err, false)
} else if (err != null) {
logger.error({ err, src, dst }, 'copy error for file in cache')
return callback(err)
} else {
if (
Settings.clsi != null ? Settings.clsi.optimiseInDocker : undefined
) {
// don't run any optimisations on the pdf when they are done
// in the docker container
return callback()
} else {
// call the optimiser for the file too
return OutputFileOptimiser.optimiseFile(src, dst, callback)
}
}
})
},
_checkIfShouldCopy(src, callback) {
if (callback == null) {
callback = function(err, shouldCopy) {}
}
return callback(null, !Path.basename(src).match(/^strace/))
},
_checkIfShouldArchive(src, callback) {
let needle
if (callback == null) {
callback = function(err, shouldCopy) {}
}
if (Path.basename(src).match(/^strace/)) {
return callback(null, true)
}
if (
(Settings.clsi != null ? Settings.clsi.archive_logs : undefined) &&
((needle = Path.basename(src)),
['output.log', 'output.blg'].includes(needle))
) {
return callback(null, true)
}
return callback(null, false)
}
}
function __guard__(value, transform) {
return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
}

View file

@ -0,0 +1,115 @@
/* eslint-disable
handle-callback-err,
no-return-assign,
no-unused-vars,
no-useless-escape,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS103: Rewrite code to no longer use __guard__
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let OutputFileFinder
const async = require('async')
const fs = require('fs')
const Path = require('path')
const { spawn } = require('child_process')
const logger = require('logger-sharelatex')
module.exports = OutputFileFinder = {
findOutputFiles(resources, directory, callback) {
if (callback == null) {
callback = function(error, outputFiles, allFiles) {}
}
const incomingResources = {}
for (const resource of Array.from(resources)) {
incomingResources[resource.path] = true
}
return OutputFileFinder._getAllFiles(directory, function(error, allFiles) {
if (allFiles == null) {
allFiles = []
}
if (error != null) {
logger.err({ err: error }, 'error finding all output files')
return callback(error)
}
const outputFiles = []
for (const file of Array.from(allFiles)) {
if (!incomingResources[file]) {
outputFiles.push({
path: file,
type: __guard__(file.match(/\.([^\.]+)$/), x => x[1])
})
}
}
return callback(null, outputFiles, allFiles)
})
},
_getAllFiles(directory, _callback) {
if (_callback == null) {
_callback = function(error, fileList) {}
}
const callback = function(error, fileList) {
_callback(error, fileList)
return (_callback = function() {})
}
// don't include clsi-specific files/directories in the output list
const EXCLUDE_DIRS = [
'-name',
'.cache',
'-o',
'-name',
'.archive',
'-o',
'-name',
'.project-*'
]
const args = [
directory,
'(',
...Array.from(EXCLUDE_DIRS),
')',
'-prune',
'-o',
'-type',
'f',
'-print'
]
logger.log({ args }, 'running find command')
const proc = spawn('find', args)
let stdout = ''
proc.stdout.on('data', chunk => (stdout += chunk.toString()))
proc.on('error', callback)
return proc.on('close', function(code) {
if (code !== 0) {
logger.warn(
{ directory, code },
"find returned error, directory likely doesn't exist"
)
return callback(null, [])
}
let fileList = stdout.trim().split('\n')
fileList = fileList.map(function(file) {
// Strip leading directory
let path
return (path = Path.relative(directory, file))
})
return callback(null, fileList)
})
}
}
function __guard__(value, transform) {
return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
}

View file

@ -0,0 +1,104 @@
/* eslint-disable
handle-callback-err,
no-return-assign,
no-undef,
no-unused-vars,
node/no-deprecated-api,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let OutputFileOptimiser
const fs = require('fs')
const Path = require('path')
const { spawn } = require('child_process')
const logger = require('logger-sharelatex')
const Metrics = require('./Metrics')
const _ = require('underscore')
module.exports = OutputFileOptimiser = {
optimiseFile(src, dst, callback) {
// check output file (src) and see if we can optimise it, storing
// the result in the build directory (dst)
if (callback == null) {
callback = function(error) {}
}
if (src.match(/\/output\.pdf$/)) {
return OutputFileOptimiser.checkIfPDFIsOptimised(src, function(
err,
isOptimised
) {
if (err != null || isOptimised) {
return callback(null)
}
return OutputFileOptimiser.optimisePDF(src, dst, callback)
})
} else {
return callback(null)
}
},
checkIfPDFIsOptimised(file, callback) {
const SIZE = 16 * 1024 // check the header of the pdf
const result = new Buffer(SIZE)
result.fill(0) // prevent leakage of uninitialised buffer
return fs.open(file, 'r', function(err, fd) {
if (err != null) {
return callback(err)
}
return fs.read(fd, result, 0, SIZE, 0, (errRead, bytesRead, buffer) =>
fs.close(fd, function(errClose) {
if (errRead != null) {
return callback(errRead)
}
if (typeof errReadClose !== 'undefined' && errReadClose !== null) {
return callback(errClose)
}
const isOptimised =
buffer.toString('ascii').indexOf('/Linearized 1') >= 0
return callback(null, isOptimised)
})
)
})
},
optimisePDF(src, dst, callback) {
if (callback == null) {
callback = function(error) {}
}
const tmpOutput = dst + '.opt'
const args = ['--linearize', src, tmpOutput]
logger.log({ args }, 'running qpdf command')
const timer = new Metrics.Timer('qpdf')
const proc = spawn('qpdf', args)
let stdout = ''
proc.stdout.on('data', chunk => (stdout += chunk.toString()))
callback = _.once(callback) // avoid double call back for error and close event
proc.on('error', function(err) {
logger.warn({ err, args }, 'qpdf failed')
return callback(null)
}) // ignore the error
return proc.on('close', function(code) {
timer.done()
if (code !== 0) {
logger.warn({ code, args }, 'qpdf returned error')
return callback(null) // ignore the error
}
return fs.rename(tmpOutput, dst, function(err) {
if (err != null) {
logger.warn(
{ tmpOutput, dst },
'failed to rename output of qpdf command'
)
}
return callback(null)
})
})
} // ignore the error
}

View file

@ -0,0 +1,163 @@
/* eslint-disable
camelcase,
handle-callback-err,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let ProjectPersistenceManager
const UrlCache = require('./UrlCache')
const CompileManager = require('./CompileManager')
const db = require('./db')
const dbQueue = require('./DbQueue')
const async = require('async')
const logger = require('logger-sharelatex')
const oneDay = 24 * 60 * 60 * 1000
const Settings = require('settings-sharelatex')
module.exports = ProjectPersistenceManager = {
EXPIRY_TIMEOUT: Settings.project_cache_length_ms || oneDay * 2.5,
markProjectAsJustAccessed(project_id, callback) {
if (callback == null) {
callback = function(error) {}
}
const job = cb =>
db.Project.findOrCreate({ where: { project_id } })
.spread((project, created) =>
project
.update({ lastAccessed: new Date() })
.then(() => cb())
.error(cb)
)
.error(cb)
return dbQueue.queue.push(job, callback)
},
clearExpiredProjects(callback) {
if (callback == null) {
callback = function(error) {}
}
return ProjectPersistenceManager._findExpiredProjectIds(function(
error,
project_ids
) {
if (error != null) {
return callback(error)
}
logger.log({ project_ids }, 'clearing expired projects')
const jobs = Array.from(project_ids || []).map(project_id =>
(project_id => callback =>
ProjectPersistenceManager.clearProjectFromCache(project_id, function(
err
) {
if (err != null) {
logger.error({ err, project_id }, 'error clearing project')
}
return callback()
}))(project_id)
)
return async.series(jobs, function(error) {
if (error != null) {
return callback(error)
}
return CompileManager.clearExpiredProjects(
ProjectPersistenceManager.EXPIRY_TIMEOUT,
error => callback()
)
})
})
}, // ignore any errors from deleting directories
clearProject(project_id, user_id, callback) {
if (callback == null) {
callback = function(error) {}
}
logger.log({ project_id, user_id }, 'clearing project for user')
return CompileManager.clearProject(project_id, user_id, function(error) {
if (error != null) {
return callback(error)
}
return ProjectPersistenceManager.clearProjectFromCache(
project_id,
function(error) {
if (error != null) {
return callback(error)
}
return callback()
}
)
})
},
clearProjectFromCache(project_id, callback) {
if (callback == null) {
callback = function(error) {}
}
logger.log({ project_id }, 'clearing project from cache')
return UrlCache.clearProject(project_id, function(error) {
if (error != null) {
logger.err({ error, project_id }, 'error clearing project from cache')
return callback(error)
}
return ProjectPersistenceManager._clearProjectFromDatabase(
project_id,
function(error) {
if (error != null) {
logger.err(
{ error, project_id },
'error clearing project from database'
)
}
return callback(error)
}
)
})
},
_clearProjectFromDatabase(project_id, callback) {
if (callback == null) {
callback = function(error) {}
}
logger.log({ project_id }, 'clearing project from database')
const job = cb =>
db.Project.destroy({ where: { project_id } })
.then(() => cb())
.error(cb)
return dbQueue.queue.push(job, callback)
},
_findExpiredProjectIds(callback) {
if (callback == null) {
callback = function(error, project_ids) {}
}
const job = function(cb) {
const keepProjectsFrom = new Date(
Date.now() - ProjectPersistenceManager.EXPIRY_TIMEOUT
)
const q = {}
q[db.op.lt] = keepProjectsFrom
return db.Project.findAll({ where: { lastAccessed: q } })
.then(projects =>
cb(
null,
projects.map(project => project.project_id)
)
)
.error(cb)
}
return dbQueue.queue.push(job, callback)
}
}
logger.log(
{ EXPIRY_TIMEOUT: ProjectPersistenceManager.EXPIRY_TIMEOUT },
'project assets kept timeout'
)

View file

@ -0,0 +1,217 @@
/* eslint-disable
handle-callback-err,
no-control-regex,
no-throw-literal,
no-unused-vars,
no-useless-escape,
standard/no-callback-literal,
valid-typeof,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS205: Consider reworking code to avoid use of IIFEs
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let RequestParser
const settings = require('settings-sharelatex')
module.exports = RequestParser = {
VALID_COMPILERS: ['pdflatex', 'latex', 'xelatex', 'lualatex'],
MAX_TIMEOUT: 600,
parse(body, callback) {
let resource
if (callback == null) {
callback = function(error, data) {}
}
const response = {}
if (body.compile == null) {
return callback('top level object should have a compile attribute')
}
const { compile } = body
if (!compile.options) {
compile.options = {}
}
try {
response.compiler = this._parseAttribute(
'compiler',
compile.options.compiler,
{
validValues: this.VALID_COMPILERS,
default: 'pdflatex',
type: 'string'
}
)
response.timeout = this._parseAttribute(
'timeout',
compile.options.timeout,
{
default: RequestParser.MAX_TIMEOUT,
type: 'number'
}
)
response.imageName = this._parseAttribute(
'imageName',
compile.options.imageName,
{ type: 'string' }
)
response.draft = this._parseAttribute('draft', compile.options.draft, {
default: false,
type: 'boolean'
})
response.check = this._parseAttribute('check', compile.options.check, {
type: 'string'
})
response.flags = this._parseAttribute('flags', compile.options.flags, {
default: [],
type: 'object'
})
// The syncType specifies whether the request contains all
// resources (full) or only those resources to be updated
// in-place (incremental).
response.syncType = this._parseAttribute(
'syncType',
compile.options.syncType,
{
validValues: ['full', 'incremental'],
type: 'string'
}
)
// The syncState is an identifier passed in with the request
// which has the property that it changes when any resource is
// added, deleted, moved or renamed.
//
// on syncType full the syncState identifier is passed in and
// stored
//
// on syncType incremental the syncState identifier must match
// the stored value
response.syncState = this._parseAttribute(
'syncState',
compile.options.syncState,
{ type: 'string' }
)
if (response.timeout > RequestParser.MAX_TIMEOUT) {
response.timeout = RequestParser.MAX_TIMEOUT
}
response.timeout = response.timeout * 1000 // milliseconds
response.resources = (() => {
const result = []
for (resource of Array.from(compile.resources || [])) {
result.push(this._parseResource(resource))
}
return result
})()
const rootResourcePath = this._parseAttribute(
'rootResourcePath',
compile.rootResourcePath,
{
default: 'main.tex',
type: 'string'
}
)
const originalRootResourcePath = rootResourcePath
const sanitizedRootResourcePath = RequestParser._sanitizePath(
rootResourcePath
)
response.rootResourcePath = RequestParser._checkPath(
sanitizedRootResourcePath
)
for (resource of Array.from(response.resources)) {
if (resource.path === originalRootResourcePath) {
resource.path = sanitizedRootResourcePath
}
}
} catch (error1) {
const error = error1
return callback(error)
}
return callback(null, response)
},
_parseResource(resource) {
let modified
if (resource.path == null || typeof resource.path !== 'string') {
throw 'all resources should have a path attribute'
}
if (resource.modified != null) {
modified = new Date(resource.modified)
if (isNaN(modified.getTime())) {
throw `resource modified date could not be understood: ${resource.modified}`
}
}
if (resource.url == null && resource.content == null) {
throw 'all resources should have either a url or content attribute'
}
if (resource.content != null && typeof resource.content !== 'string') {
throw 'content attribute should be a string'
}
if (resource.url != null && typeof resource.url !== 'string') {
throw 'url attribute should be a string'
}
return {
path: resource.path,
modified,
url: resource.url,
content: resource.content
}
},
_parseAttribute(name, attribute, options) {
if (attribute != null) {
if (options.validValues != null) {
if (options.validValues.indexOf(attribute) === -1) {
throw `${name} attribute should be one of: ${options.validValues.join(
', '
)}`
}
}
if (options.type != null) {
if (typeof attribute !== options.type) {
throw `${name} attribute should be a ${options.type}`
}
}
} else {
if (options.default != null) {
return options.default
}
}
return attribute
},
_sanitizePath(path) {
// See http://php.net/manual/en/function.escapeshellcmd.php
return path.replace(
/[\#\&\;\`\|\*\?\~\<\>\^\(\)\[\]\{\}\$\\\x0A\xFF\x00]/g,
''
)
},
_checkPath(path) {
// check that the request does not use a relative path
for (const dir of Array.from(path.split('/'))) {
if (dir === '..') {
throw 'relative path in root resource'
}
}
return path
}
}

View file

@ -0,0 +1,154 @@
/* eslint-disable
handle-callback-err,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS103: Rewrite code to no longer use __guard__
* DS201: Simplify complex destructure assignments
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let ResourceStateManager
const Path = require('path')
const fs = require('fs')
const logger = require('logger-sharelatex')
const settings = require('settings-sharelatex')
const Errors = require('./Errors')
const SafeReader = require('./SafeReader')
module.exports = ResourceStateManager = {
// The sync state is an identifier which must match for an
// incremental update to be allowed.
//
// The initial value is passed in and stored on a full
// compile, along with the list of resources..
//
// Subsequent incremental compiles must come with the same value - if
// not they will be rejected with a 409 Conflict response. The
// previous list of resources is returned.
//
// An incremental compile can only update existing files with new
// content. The sync state identifier must change if any docs or
// files are moved, added, deleted or renamed.
SYNC_STATE_FILE: '.project-sync-state',
SYNC_STATE_MAX_SIZE: 128 * 1024,
saveProjectState(state, resources, basePath, callback) {
if (callback == null) {
callback = function(error) {}
}
const stateFile = Path.join(basePath, this.SYNC_STATE_FILE)
if (state == null) {
// remove the file if no state passed in
logger.log({ state, basePath }, 'clearing sync state')
return fs.unlink(stateFile, function(err) {
if (err != null && err.code !== 'ENOENT') {
return callback(err)
} else {
return callback()
}
})
} else {
logger.log({ state, basePath }, 'writing sync state')
const resourceList = Array.from(resources).map(resource => resource.path)
return fs.writeFile(
stateFile,
[...Array.from(resourceList), `stateHash:${state}`].join('\n'),
callback
)
}
},
checkProjectStateMatches(state, basePath, callback) {
if (callback == null) {
callback = function(error, resources) {}
}
const stateFile = Path.join(basePath, this.SYNC_STATE_FILE)
const size = this.SYNC_STATE_MAX_SIZE
return SafeReader.readFile(stateFile, size, 'utf8', function(
err,
result,
bytesRead
) {
if (err != null) {
return callback(err)
}
if (bytesRead === size) {
logger.error(
{ file: stateFile, size, bytesRead },
'project state file truncated'
)
}
const array =
__guard__(result != null ? result.toString() : undefined, x =>
x.split('\n')
) || []
const adjustedLength = Math.max(array.length, 1)
const resourceList = array.slice(0, adjustedLength - 1)
const oldState = array[adjustedLength - 1]
const newState = `stateHash:${state}`
logger.log(
{ state, oldState, basePath, stateMatches: newState === oldState },
'checking sync state'
)
if (newState !== oldState) {
return callback(
new Errors.FilesOutOfSyncError('invalid state for incremental update')
)
} else {
const resources = Array.from(resourceList).map(path => ({ path }))
return callback(null, resources)
}
})
},
checkResourceFiles(resources, allFiles, basePath, callback) {
// check the paths are all relative to current directory
let file
if (callback == null) {
callback = function(error) {}
}
for (file of Array.from(resources || [])) {
for (const dir of Array.from(
__guard__(file != null ? file.path : undefined, x => x.split('/'))
)) {
if (dir === '..') {
return callback(new Error('relative path in resource file list'))
}
}
}
// check if any of the input files are not present in list of files
const seenFile = {}
for (file of Array.from(allFiles)) {
seenFile[file] = true
}
const missingFiles = Array.from(resources)
.filter(resource => !seenFile[resource.path])
.map(resource => resource.path)
if ((missingFiles != null ? missingFiles.length : undefined) > 0) {
logger.err(
{ missingFiles, basePath, allFiles, resources },
'missing input files for project'
)
return callback(
new Errors.FilesOutOfSyncError(
'resource files missing in incremental update'
)
)
} else {
return callback()
}
}
}
function __guard__(value, transform) {
return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
}

View file

@ -0,0 +1,352 @@
/* eslint-disable
camelcase,
handle-callback-err,
no-return-assign,
no-unused-vars,
no-useless-escape,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let ResourceWriter
const UrlCache = require('./UrlCache')
const Path = require('path')
const fs = require('fs')
const async = require('async')
const OutputFileFinder = require('./OutputFileFinder')
const ResourceStateManager = require('./ResourceStateManager')
const Metrics = require('./Metrics')
const logger = require('logger-sharelatex')
const settings = require('settings-sharelatex')
const parallelFileDownloads = settings.parallelFileDownloads || 1
module.exports = ResourceWriter = {
syncResourcesToDisk(request, basePath, callback) {
if (callback == null) {
callback = function(error, resourceList) {}
}
if (request.syncType === 'incremental') {
logger.log(
{ project_id: request.project_id, user_id: request.user_id },
'incremental sync'
)
return ResourceStateManager.checkProjectStateMatches(
request.syncState,
basePath,
function(error, resourceList) {
if (error != null) {
return callback(error)
}
return ResourceWriter._removeExtraneousFiles(
resourceList,
basePath,
function(error, outputFiles, allFiles) {
if (error != null) {
return callback(error)
}
return ResourceStateManager.checkResourceFiles(
resourceList,
allFiles,
basePath,
function(error) {
if (error != null) {
return callback(error)
}
return ResourceWriter.saveIncrementalResourcesToDisk(
request.project_id,
request.resources,
basePath,
function(error) {
if (error != null) {
return callback(error)
}
return callback(null, resourceList)
}
)
}
)
}
)
}
)
} else {
logger.log(
{ project_id: request.project_id, user_id: request.user_id },
'full sync'
)
return this.saveAllResourcesToDisk(
request.project_id,
request.resources,
basePath,
function(error) {
if (error != null) {
return callback(error)
}
return ResourceStateManager.saveProjectState(
request.syncState,
request.resources,
basePath,
function(error) {
if (error != null) {
return callback(error)
}
return callback(null, request.resources)
}
)
}
)
}
},
saveIncrementalResourcesToDisk(project_id, resources, basePath, callback) {
if (callback == null) {
callback = function(error) {}
}
return this._createDirectory(basePath, error => {
if (error != null) {
return callback(error)
}
const jobs = Array.from(resources).map(resource =>
(resource => {
return callback =>
this._writeResourceToDisk(project_id, resource, basePath, callback)
})(resource)
)
return async.parallelLimit(jobs, parallelFileDownloads, callback)
})
},
saveAllResourcesToDisk(project_id, resources, basePath, callback) {
if (callback == null) {
callback = function(error) {}
}
return this._createDirectory(basePath, error => {
if (error != null) {
return callback(error)
}
return this._removeExtraneousFiles(resources, basePath, error => {
if (error != null) {
return callback(error)
}
const jobs = Array.from(resources).map(resource =>
(resource => {
return callback =>
this._writeResourceToDisk(
project_id,
resource,
basePath,
callback
)
})(resource)
)
return async.parallelLimit(jobs, parallelFileDownloads, callback)
})
})
},
_createDirectory(basePath, callback) {
if (callback == null) {
callback = function(error) {}
}
return fs.mkdir(basePath, function(err) {
if (err != null) {
if (err.code === 'EEXIST') {
return callback()
} else {
logger.log({ err, dir: basePath }, 'error creating directory')
return callback(err)
}
} else {
return callback()
}
})
},
_removeExtraneousFiles(resources, basePath, _callback) {
if (_callback == null) {
_callback = function(error, outputFiles, allFiles) {}
}
const timer = new Metrics.Timer('unlink-output-files')
const callback = function(error, ...result) {
timer.done()
return _callback(error, ...Array.from(result))
}
return OutputFileFinder.findOutputFiles(resources, basePath, function(
error,
outputFiles,
allFiles
) {
if (error != null) {
return callback(error)
}
const jobs = []
for (const file of Array.from(outputFiles || [])) {
;(function(file) {
const { path } = file
let should_delete = true
if (
path.match(/^output\./) ||
path.match(/\.aux$/) ||
path.match(/^cache\//)
) {
// knitr cache
should_delete = false
}
if (path.match(/^output-.*/)) {
// Tikz cached figures (default case)
should_delete = false
}
if (path.match(/\.(pdf|dpth|md5)$/)) {
// Tikz cached figures (by extension)
should_delete = false
}
if (
path.match(/\.(pygtex|pygstyle)$/) ||
path.match(/(^|\/)_minted-[^\/]+\//)
) {
// minted files/directory
should_delete = false
}
if (
path.match(/\.md\.tex$/) ||
path.match(/(^|\/)_markdown_[^\/]+\//)
) {
// markdown files/directory
should_delete = false
}
if (path.match(/-eps-converted-to\.pdf$/)) {
// Epstopdf generated files
should_delete = false
}
if (
path === 'output.pdf' ||
path === 'output.dvi' ||
path === 'output.log' ||
path === 'output.xdv'
) {
should_delete = true
}
if (path === 'output.tex') {
// created by TikzManager if present in output files
should_delete = true
}
if (should_delete) {
return jobs.push(callback =>
ResourceWriter._deleteFileIfNotDirectory(
Path.join(basePath, path),
callback
)
)
}
})(file)
}
return async.series(jobs, function(error) {
if (error != null) {
return callback(error)
}
return callback(null, outputFiles, allFiles)
})
})
},
_deleteFileIfNotDirectory(path, callback) {
if (callback == null) {
callback = function(error) {}
}
return fs.stat(path, function(error, stat) {
if (error != null && error.code === 'ENOENT') {
return callback()
} else if (error != null) {
logger.err(
{ err: error, path },
'error stating file in deleteFileIfNotDirectory'
)
return callback(error)
} else if (stat.isFile()) {
return fs.unlink(path, function(error) {
if (error != null) {
logger.err(
{ err: error, path },
'error removing file in deleteFileIfNotDirectory'
)
return callback(error)
} else {
return callback()
}
})
} else {
return callback()
}
})
},
_writeResourceToDisk(project_id, resource, basePath, callback) {
if (callback == null) {
callback = function(error) {}
}
return ResourceWriter.checkPath(basePath, resource.path, function(
error,
path
) {
if (error != null) {
return callback(error)
}
return fs.mkdir(Path.dirname(path), { recursive: true }, function(error) {
if (error != null) {
return callback(error)
}
// TODO: Don't overwrite file if it hasn't been modified
if (resource.url != null) {
return UrlCache.downloadUrlToFile(
project_id,
resource.url,
path,
resource.modified,
function(err) {
if (err != null) {
logger.err(
{
err,
project_id,
path,
resource_url: resource.url,
modified: resource.modified
},
'error downloading file for resources'
)
Metrics.inc('download-failed')
}
return callback()
}
) // try and continue compiling even if http resource can not be downloaded at this time
} else {
const process = require('process')
fs.writeFile(path, resource.content, callback)
try {
let result
return (result = fs.lstatSync(path))
} catch (e) {}
}
})
})
},
checkPath(basePath, resourcePath, callback) {
const path = Path.normalize(Path.join(basePath, resourcePath))
if (path.slice(0, basePath.length + 1) !== basePath + '/') {
return callback(new Error('resource path is outside root directory'))
} else {
return callback(null, path)
}
}
}

View file

@ -0,0 +1,60 @@
/* eslint-disable
handle-callback-err,
no-unused-vars,
node/no-deprecated-api,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let SafeReader
const fs = require('fs')
const logger = require('logger-sharelatex')
module.exports = SafeReader = {
// safely read up to size bytes from a file and return result as a
// string
readFile(file, size, encoding, callback) {
if (callback == null) {
callback = function(error, result) {}
}
return fs.open(file, 'r', function(err, fd) {
if (err != null && err.code === 'ENOENT') {
return callback()
}
if (err != null) {
return callback(err)
}
// safely return always closing the file
const callbackWithClose = (err, ...result) =>
fs.close(fd, function(err1) {
if (err != null) {
return callback(err)
}
if (err1 != null) {
return callback(err1)
}
return callback(null, ...Array.from(result))
})
const buff = new Buffer(size, 0) // fill with zeros
return fs.read(fd, buff, 0, buff.length, 0, function(
err,
bytesRead,
buffer
) {
if (err != null) {
return callbackWithClose(err)
}
const result = buffer.toString(encoding, 0, bytesRead)
return callbackWithClose(null, result, bytesRead)
})
})
}
}

View file

@ -0,0 +1,94 @@
/* eslint-disable
camelcase,
no-cond-assign,
no-unused-vars,
node/no-deprecated-api,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS103: Rewrite code to no longer use __guard__
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let ForbidSymlinks
const Path = require('path')
const fs = require('fs')
const Settings = require('settings-sharelatex')
const logger = require('logger-sharelatex')
const url = require('url')
module.exports = ForbidSymlinks = function(staticFn, root, options) {
const expressStatic = staticFn(root, options)
const basePath = Path.resolve(root)
return function(req, res, next) {
let file, project_id, result
const path = __guard__(url.parse(req.url), x => x.pathname)
// check that the path is of the form /project_id_or_name/path/to/file.log
if ((result = path.match(/^\/?([a-zA-Z0-9_-]+)\/(.*)/))) {
project_id = result[1]
file = result[2]
} else {
logger.warn({ path }, 'unrecognized file request')
return res.sendStatus(404)
}
// check that the file does not use a relative path
for (const dir of Array.from(file.split('/'))) {
if (dir === '..') {
logger.warn({ path }, 'attempt to use a relative path')
return res.sendStatus(404)
}
}
// check that the requested path is normalized
const requestedFsPath = `${basePath}/${project_id}/${file}`
if (requestedFsPath !== Path.normalize(requestedFsPath)) {
logger.error(
{ path: requestedFsPath },
'requestedFsPath is not normalized'
)
return res.sendStatus(404)
}
// check that the requested path is not a symlink
return fs.realpath(requestedFsPath, function(err, realFsPath) {
if (err != null) {
if (err.code === 'ENOENT') {
return res.sendStatus(404)
} else {
logger.error(
{
err,
requestedFsPath,
realFsPath,
path: req.params[0],
project_id: req.params.project_id
},
'error checking file access'
)
return res.sendStatus(500)
}
} else if (requestedFsPath !== realFsPath) {
logger.warn(
{
requestedFsPath,
realFsPath,
path: req.params[0],
project_id: req.params.project_id
},
'trying to access a different file (symlink), aborting'
)
return res.sendStatus(404)
} else {
return expressStatic(req, res, next)
}
})
}
}
function __guard__(value, transform) {
return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
}

View file

@ -0,0 +1,94 @@
/* eslint-disable
handle-callback-err,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let TikzManager
const fs = require('fs')
const Path = require('path')
const ResourceWriter = require('./ResourceWriter')
const SafeReader = require('./SafeReader')
const logger = require('logger-sharelatex')
// for \tikzexternalize or pstool to work the main file needs to match the
// jobname. Since we set the -jobname to output, we have to create a
// copy of the main file as 'output.tex'.
module.exports = TikzManager = {
checkMainFile(compileDir, mainFile, resources, callback) {
// if there's already an output.tex file, we don't want to touch it
if (callback == null) {
callback = function(error, needsMainFile) {}
}
for (const resource of Array.from(resources)) {
if (resource.path === 'output.tex') {
logger.log({ compileDir, mainFile }, 'output.tex already in resources')
return callback(null, false)
}
}
// if there's no output.tex, see if we are using tikz/pgf or pstool in the main file
return ResourceWriter.checkPath(compileDir, mainFile, function(
error,
path
) {
if (error != null) {
return callback(error)
}
return SafeReader.readFile(path, 65536, 'utf8', function(error, content) {
if (error != null) {
return callback(error)
}
const usesTikzExternalize =
(content != null
? content.indexOf('\\tikzexternalize')
: undefined) >= 0
const usesPsTool =
(content != null ? content.indexOf('{pstool}') : undefined) >= 0
logger.log(
{ compileDir, mainFile, usesTikzExternalize, usesPsTool },
'checked for packages needing main file as output.tex'
)
const needsMainFile = usesTikzExternalize || usesPsTool
return callback(null, needsMainFile)
})
})
},
injectOutputFile(compileDir, mainFile, callback) {
if (callback == null) {
callback = function(error) {}
}
return ResourceWriter.checkPath(compileDir, mainFile, function(
error,
path
) {
if (error != null) {
return callback(error)
}
return fs.readFile(path, 'utf8', function(error, content) {
if (error != null) {
return callback(error)
}
logger.log(
{ compileDir, mainFile },
'copied file to output.tex as project uses packages which require it'
)
// use wx flag to ensure that output file does not already exist
return fs.writeFile(
Path.join(compileDir, 'output.tex'),
content,
{ flag: 'wx' },
callback
)
})
})
}
}

View file

@ -0,0 +1,278 @@
/* eslint-disable
camelcase,
handle-callback-err,
no-return-assign,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let UrlCache
const db = require('./db')
const dbQueue = require('./DbQueue')
const UrlFetcher = require('./UrlFetcher')
const Settings = require('settings-sharelatex')
const crypto = require('crypto')
const fs = require('fs')
const logger = require('logger-sharelatex')
const async = require('async')
module.exports = UrlCache = {
downloadUrlToFile(project_id, url, destPath, lastModified, callback) {
if (callback == null) {
callback = function(error) {}
}
return UrlCache._ensureUrlIsInCache(
project_id,
url,
lastModified,
(error, pathToCachedUrl) => {
if (error != null) {
return callback(error)
}
return UrlCache._copyFile(pathToCachedUrl, destPath, function(error) {
if (error != null) {
return UrlCache._clearUrlDetails(project_id, url, () =>
callback(error)
)
} else {
return callback(error)
}
})
}
)
},
clearProject(project_id, callback) {
if (callback == null) {
callback = function(error) {}
}
return UrlCache._findAllUrlsInProject(project_id, function(error, urls) {
logger.log(
{ project_id, url_count: urls.length },
'clearing project URLs'
)
if (error != null) {
return callback(error)
}
const jobs = Array.from(urls || []).map(url =>
(url => callback =>
UrlCache._clearUrlFromCache(project_id, url, function(error) {
if (error != null) {
logger.error(
{ err: error, project_id, url },
'error clearing project URL'
)
}
return callback()
}))(url)
)
return async.series(jobs, callback)
})
},
_ensureUrlIsInCache(project_id, url, lastModified, callback) {
if (callback == null) {
callback = function(error, pathOnDisk) {}
}
if (lastModified != null) {
// MYSQL only stores dates to an accuracy of a second but the incoming lastModified might have milliseconds.
// So round down to seconds
lastModified = new Date(Math.floor(lastModified.getTime() / 1000) * 1000)
}
return UrlCache._doesUrlNeedDownloading(
project_id,
url,
lastModified,
(error, needsDownloading) => {
if (error != null) {
return callback(error)
}
if (needsDownloading) {
logger.log({ url, lastModified }, 'downloading URL')
return UrlFetcher.pipeUrlToFile(
url,
UrlCache._cacheFilePathForUrl(project_id, url),
error => {
if (error != null) {
return callback(error)
}
return UrlCache._updateOrCreateUrlDetails(
project_id,
url,
lastModified,
error => {
if (error != null) {
return callback(error)
}
return callback(
null,
UrlCache._cacheFilePathForUrl(project_id, url)
)
}
)
}
)
} else {
logger.log({ url, lastModified }, 'URL is up to date in cache')
return callback(null, UrlCache._cacheFilePathForUrl(project_id, url))
}
}
)
},
_doesUrlNeedDownloading(project_id, url, lastModified, callback) {
if (callback == null) {
callback = function(error, needsDownloading) {}
}
if (lastModified == null) {
return callback(null, true)
}
return UrlCache._findUrlDetails(project_id, url, function(
error,
urlDetails
) {
if (error != null) {
return callback(error)
}
if (
urlDetails == null ||
urlDetails.lastModified == null ||
urlDetails.lastModified.getTime() < lastModified.getTime()
) {
return callback(null, true)
} else {
return callback(null, false)
}
})
},
_cacheFileNameForUrl(project_id, url) {
return (
project_id +
':' +
crypto
.createHash('md5')
.update(url)
.digest('hex')
)
},
_cacheFilePathForUrl(project_id, url) {
return `${Settings.path.clsiCacheDir}/${UrlCache._cacheFileNameForUrl(
project_id,
url
)}`
},
_copyFile(from, to, _callback) {
if (_callback == null) {
_callback = function(error) {}
}
const callbackOnce = function(error) {
if (error != null) {
logger.error({ err: error, from, to }, 'error copying file from cache')
}
_callback(error)
return (_callback = function() {})
}
const writeStream = fs.createWriteStream(to)
const readStream = fs.createReadStream(from)
writeStream.on('error', callbackOnce)
readStream.on('error', callbackOnce)
writeStream.on('close', callbackOnce)
return writeStream.on('open', () => readStream.pipe(writeStream))
},
_clearUrlFromCache(project_id, url, callback) {
if (callback == null) {
callback = function(error) {}
}
return UrlCache._clearUrlDetails(project_id, url, function(error) {
if (error != null) {
return callback(error)
}
return UrlCache._deleteUrlCacheFromDisk(project_id, url, function(error) {
if (error != null) {
return callback(error)
}
return callback(null)
})
})
},
_deleteUrlCacheFromDisk(project_id, url, callback) {
if (callback == null) {
callback = function(error) {}
}
return fs.unlink(UrlCache._cacheFilePathForUrl(project_id, url), function(
error
) {
if (error != null && error.code !== 'ENOENT') {
// no error if the file isn't present
return callback(error)
} else {
return callback()
}
})
},
_findUrlDetails(project_id, url, callback) {
if (callback == null) {
callback = function(error, urlDetails) {}
}
const job = cb =>
db.UrlCache.findOne({ where: { url, project_id } })
.then(urlDetails => cb(null, urlDetails))
.error(cb)
return dbQueue.queue.push(job, callback)
},
_updateOrCreateUrlDetails(project_id, url, lastModified, callback) {
if (callback == null) {
callback = function(error) {}
}
const job = cb =>
db.UrlCache.findOrCreate({ where: { url, project_id } })
.spread((urlDetails, created) =>
urlDetails
.update({ lastModified })
.then(() => cb())
.error(cb)
)
.error(cb)
return dbQueue.queue.push(job, callback)
},
_clearUrlDetails(project_id, url, callback) {
if (callback == null) {
callback = function(error) {}
}
const job = cb =>
db.UrlCache.destroy({ where: { url, project_id } })
.then(() => cb(null))
.error(cb)
return dbQueue.queue.push(job, callback)
},
_findAllUrlsInProject(project_id, callback) {
if (callback == null) {
callback = function(error, urls) {}
}
const job = cb =>
db.UrlCache.findAll({ where: { project_id } })
.then(urlEntries =>
cb(
null,
urlEntries.map(entry => entry.url)
)
)
.error(cb)
return dbQueue.queue.push(job, callback)
}
}

View file

@ -0,0 +1,120 @@
/* eslint-disable
handle-callback-err,
no-return-assign,
no-unused-vars,
node/no-deprecated-api,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let UrlFetcher
const request = require('request').defaults({ jar: false })
const fs = require('fs')
const logger = require('logger-sharelatex')
const settings = require('settings-sharelatex')
const URL = require('url')
const oneMinute = 60 * 1000
module.exports = UrlFetcher = {
pipeUrlToFile(url, filePath, _callback) {
if (_callback == null) {
_callback = function(error) {}
}
const callbackOnce = function(error) {
if (timeoutHandler != null) {
clearTimeout(timeoutHandler)
}
_callback(error)
return (_callback = function() {})
}
if (settings.filestoreDomainOveride != null) {
const p = URL.parse(url).path
url = `${settings.filestoreDomainOveride}${p}`
}
var timeoutHandler = setTimeout(
function() {
timeoutHandler = null
logger.error({ url, filePath }, 'Timed out downloading file to cache')
return callbackOnce(
new Error(`Timed out downloading file to cache ${url}`)
)
},
// FIXME: maybe need to close fileStream here
3 * oneMinute
)
logger.log({ url, filePath }, 'started downloading url to cache')
const urlStream = request.get({ url, timeout: oneMinute })
urlStream.pause() // stop data flowing until we are ready
// attach handlers before setting up pipes
urlStream.on('error', function(error) {
logger.error({ err: error, url, filePath }, 'error downloading url')
return callbackOnce(
error || new Error(`Something went wrong downloading the URL ${url}`)
)
})
urlStream.on('end', () =>
logger.log({ url, filePath }, 'finished downloading file into cache')
)
return urlStream.on('response', function(res) {
if (res.statusCode >= 200 && res.statusCode < 300) {
const fileStream = fs.createWriteStream(filePath)
// attach handlers before setting up pipes
fileStream.on('error', function(error) {
logger.error(
{ err: error, url, filePath },
'error writing file into cache'
)
return fs.unlink(filePath, function(err) {
if (err != null) {
logger.err({ err, filePath }, 'error deleting file from cache')
}
return callbackOnce(error)
})
})
fileStream.on('finish', function() {
logger.log({ url, filePath }, 'finished writing file into cache')
return callbackOnce()
})
fileStream.on('pipe', () =>
logger.log({ url, filePath }, 'piping into filestream')
)
urlStream.pipe(fileStream)
return urlStream.resume() // now we are ready to handle the data
} else {
logger.error(
{ statusCode: res.statusCode, url, filePath },
'unexpected status code downloading url to cache'
)
// https://nodejs.org/api/http.html#http_class_http_clientrequest
// If you add a 'response' event handler, then you must consume
// the data from the response object, either by calling
// response.read() whenever there is a 'readable' event, or by
// adding a 'data' handler, or by calling the .resume()
// method. Until the data is consumed, the 'end' event will not
// fire. Also, until the data is read it will consume memory
// that can eventually lead to a 'process out of memory' error.
urlStream.resume() // discard the data
return callbackOnce(
new Error(
`URL returned non-success status code: ${res.statusCode} ${url}`
)
)
}
})
}
}

View file

@ -0,0 +1,67 @@
/* eslint-disable
no-console,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const Sequelize = require('sequelize')
const Settings = require('settings-sharelatex')
const _ = require('underscore')
const logger = require('logger-sharelatex')
const options = _.extend({ logging: false }, Settings.mysql.clsi)
logger.log({ dbPath: Settings.mysql.clsi.storage }, 'connecting to db')
const sequelize = new Sequelize(
Settings.mysql.clsi.database,
Settings.mysql.clsi.username,
Settings.mysql.clsi.password,
options
)
if (Settings.mysql.clsi.dialect === 'sqlite') {
logger.log('running PRAGMA journal_mode=WAL;')
sequelize.query('PRAGMA journal_mode=WAL;')
sequelize.query('PRAGMA synchronous=OFF;')
sequelize.query('PRAGMA read_uncommitted = true;')
}
module.exports = {
UrlCache: sequelize.define(
'UrlCache',
{
url: Sequelize.STRING,
project_id: Sequelize.STRING,
lastModified: Sequelize.DATE
},
{
indexes: [{ fields: ['url', 'project_id'] }, { fields: ['project_id'] }]
}
),
Project: sequelize.define(
'Project',
{
project_id: { type: Sequelize.STRING, primaryKey: true },
lastAccessed: Sequelize.DATE
},
{
indexes: [{ fields: ['lastAccessed'] }]
}
),
op: Sequelize.Op,
sync() {
logger.log({ dbPath: Settings.mysql.clsi.storage }, 'syncing db schema')
return sequelize
.sync()
.then(() => logger.log('db sync complete'))
.catch(err => console.log(err, 'error syncing'))
}
}

View file

@ -1,21 +0,0 @@
#!/bin/sh
METADATA=http://metadata.google.internal./computeMetadata/v1
SVC_ACCT=$METADATA/instance/service-accounts/default
PROJECT_URL=$METADATA/project/project-id
ACCESS_TOKEN=$(curl -s -H 'Metadata-Flavor: Google' $SVC_ACCT/token | cut -d'"' -f 4)
if [ -z "$ACCESS_TOKEN" ]; then
echo "No acccess token to download texlive-full images from google container, continuing without downloading. This is likely not a google cloud enviroment."
exit 0
fi
PROJECT=$(curl -s -H 'Metadata-Flavor: Google' $PROJECT_URL)
if [ -z "$PROJECT" ]; then
echo "No project name to download texlive-full images from google container, continuing without downloading. This is likely not a google cloud enviroment."
exit 0
fi
docker login -u '_token' -p $ACCESS_TOKEN https://gcr.io
docker pull --all-tags gcr.io/$PROJECT/texlive-full
cp /app/bin/synctex /app/bin/synctex-mount/synctex
echo "Finished downloading texlive-full images"

View file

@ -1,10 +1,11 @@
clsi clsi
--language=coffeescript
--node-version=10.15.0
--acceptance-creds=None --acceptance-creds=None
--dependencies=mongo,redis --data-dirs=cache,compiles,db
--dependencies=
--docker-repos=gcr.io/overleaf-ops --docker-repos=gcr.io/overleaf-ops
--env-add=
--env-pass-through=TEXLIVE_IMAGE --env-pass-through=TEXLIVE_IMAGE
--build-target=docker --language=es
--script-version=1.1.24 --node-version=10.19.0
--public-repo=True --public-repo=True
--script-version=2.1.0

View file

@ -1,72 +0,0 @@
Path = require "path"
module.exports =
# Options are passed to Sequelize.
# See http://sequelizejs.com/documentation#usage-options for details
mysql:
clsi:
database: "clsi"
username: "clsi"
dialect: "sqlite"
storage: process.env["SQLITE_PATH"] or Path.resolve(__dirname + "/../db.sqlite")
pool:
max: 1
min: 1
retry:
max: 10
compileSizeLimit: process.env["COMPILE_SIZE_LIMIT"] or "7mb"
path:
compilesDir: Path.resolve(__dirname + "/../compiles")
clsiCacheDir: Path.resolve(__dirname + "/../cache")
synctexBaseDir: (project_id) -> Path.join(@compilesDir, project_id)
internal:
clsi:
port: 3013
host: process.env["LISTEN_ADDRESS"] or "localhost"
load_balancer_agent:
report_load:true
load_port: 3048
local_port: 3049
apis:
clsi:
url: "http://#{process.env['CLSI_HOST'] or 'localhost'}:3013"
smokeTest: process.env["SMOKE_TEST"] or false
project_cache_length_ms: 1000 * 60 * 60 * 24
parallelFileDownloads: process.env["FILESTORE_PARALLEL_FILE_DOWNLOADS"] or 1
parallelSqlQueryLimit: process.env["FILESTORE_PARALLEL_SQL_QUERY_LIMIT"] or 1
filestoreDomainOveride: process.env["FILESTORE_DOMAIN_OVERRIDE"]
texliveImageNameOveride: process.env["TEX_LIVE_IMAGE_NAME_OVERRIDE"]
sentry:
dsn: process.env['SENTRY_DSN']
if process.env["DOCKER_RUNNER"]
module.exports.clsi =
dockerRunner: process.env["DOCKER_RUNNER"] == "true"
docker:
runtime: process.env["DOCKER_RUNTIME"]
image: process.env["TEXLIVE_IMAGE"] or "quay.io/sharelatex/texlive-full:2017.1"
env:
HOME: "/tmp"
socketPath: "/var/run/docker.sock"
user: process.env["TEXLIVE_IMAGE_USER"] or "tex"
expireProjectAfterIdleMs: 24 * 60 * 60 * 1000
checkProjectsIntervalMs: 10 * 60 * 1000
try
seccomp_profile_path = Path.resolve(__dirname + "/../seccomp/clsi-profile.json")
module.exports.clsi.docker.seccomp_profile = JSON.stringify(JSON.parse(require("fs").readFileSync(seccomp_profile_path)))
catch error
console.log error, "could not load seccom profile from #{seccomp_profile_path}"
module.exports.path.synctexBaseDir = -> "/compile"
module.exports.path.sandboxedCompilesHostDir = process.env["COMPILES_HOST_DIR"]
module.exports.path.synctexBinHostPath = process.env["SYNCTEX_BIN_HOST_PATH"]

View file

@ -0,0 +1,102 @@
const Path = require('path')
module.exports = {
// Options are passed to Sequelize.
// See http://sequelizejs.com/documentation#usage-options for details
mysql: {
clsi: {
database: 'clsi',
username: 'clsi',
dialect: 'sqlite',
storage:
process.env.SQLITE_PATH || Path.resolve(__dirname + '/../db/db.sqlite'),
pool: {
max: 1,
min: 1
},
retry: {
max: 10
}
}
},
compileSizeLimit: process.env.COMPILE_SIZE_LIMIT || '7mb',
processLifespanLimitMs:
parseInt(process.env.PROCESS_LIFE_SPAN_LIMIT_MS) || 60 * 60 * 24 * 1000 * 2,
path: {
compilesDir: Path.resolve(__dirname + '/../compiles'),
clsiCacheDir: Path.resolve(__dirname + '/../cache'),
synctexBaseDir(project_id) {
return Path.join(this.compilesDir, project_id)
}
},
internal: {
clsi: {
port: 3013,
host: process.env.LISTEN_ADDRESS || 'localhost'
},
load_balancer_agent: {
report_load: true,
load_port: 3048,
local_port: 3049
}
},
apis: {
clsi: {
url: `http://${process.env.CLSI_HOST || 'localhost'}:3013`
}
},
smokeTest: process.env.SMOKE_TEST || false,
project_cache_length_ms: 1000 * 60 * 60 * 24,
parallelFileDownloads: process.env.FILESTORE_PARALLEL_FILE_DOWNLOADS || 1,
parallelSqlQueryLimit: process.env.FILESTORE_PARALLEL_SQL_QUERY_LIMIT || 1,
filestoreDomainOveride: process.env.FILESTORE_DOMAIN_OVERRIDE,
texliveImageNameOveride: process.env.TEX_LIVE_IMAGE_NAME_OVERRIDE,
sentry: {
dsn: process.env.SENTRY_DSN
}
}
if (process.env.DOCKER_RUNNER) {
let seccomp_profile_path
module.exports.clsi = {
dockerRunner: process.env.DOCKER_RUNNER === 'true',
docker: {
runtime: process.env.DOCKER_RUNTIME,
image:
process.env.TEXLIVE_IMAGE || 'quay.io/sharelatex/texlive-full:2017.1',
env: {
HOME: '/tmp'
},
socketPath: '/var/run/docker.sock',
user: process.env.TEXLIVE_IMAGE_USER || 'tex'
},
expireProjectAfterIdleMs: 24 * 60 * 60 * 1000,
checkProjectsIntervalMs: 10 * 60 * 1000
}
try {
seccomp_profile_path = Path.resolve(
__dirname + '/../seccomp/clsi-profile.json'
)
module.exports.clsi.docker.seccomp_profile = JSON.stringify(
JSON.parse(require('fs').readFileSync(seccomp_profile_path))
)
} catch (error) {
console.log(
error,
`could not load seccom profile from ${seccomp_profile_path}`
)
}
module.exports.path.synctexBaseDir = () => '/compile'
module.exports.path.sandboxedCompilesHostDir = process.env.COMPILES_HOST_DIR
module.exports.path.synctexBinHostPath = process.env.SYNCTEX_BIN_HOST_PATH
}

2
services/clsi/db/.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
*
!.gitignore

View file

@ -1,4 +1,4 @@
version: "2" version: "2.3"
services: services:
dev: dev:

View file

@ -1,9 +1,8 @@
# This file was auto-generated, do not edit it directly. # This file was auto-generated, do not edit it directly.
# Instead run bin/update_build_scripts from # Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment # https://github.com/sharelatex/sharelatex-dev-environment
# Version: 1.1.24
version: "2" version: "2.3"
services: services:
test_unit: test_unit:
@ -27,13 +26,9 @@ services:
MOCHA_GREP: ${MOCHA_GREP} MOCHA_GREP: ${MOCHA_GREP}
NODE_ENV: test NODE_ENV: test
TEXLIVE_IMAGE: TEXLIVE_IMAGE:
depends_on:
- mongo
- redis
command: npm run test:acceptance:_run command: npm run test:acceptance:_run
tar: tar:
build: . build: .
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
@ -41,9 +36,3 @@ services:
- ./:/tmp/build/ - ./:/tmp/build/
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
user: root user: root
redis:
image: redis
mongo:
image: mongo:3.4

View file

@ -1,13 +1,14 @@
# This file was auto-generated, do not edit it directly. # This file was auto-generated, do not edit it directly.
# Instead run bin/update_build_scripts from # Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment # https://github.com/sharelatex/sharelatex-dev-environment
# Version: 1.1.24
version: "2" version: "2.3"
services: services:
test_unit: test_unit:
build: . build:
context: .
target: base
volumes: volumes:
- .:/app - .:/app
working_dir: /app working_dir: /app
@ -17,7 +18,9 @@ services:
command: npm run test:unit command: npm run test:unit
test_acceptance: test_acceptance:
build: . build:
context: .
target: base
volumes: volumes:
- .:/app - .:/app
working_dir: /app working_dir: /app
@ -32,25 +35,5 @@ services:
MOCHA_GREP: ${MOCHA_GREP} MOCHA_GREP: ${MOCHA_GREP}
LOG_LEVEL: ERROR LOG_LEVEL: ERROR
NODE_ENV: test NODE_ENV: test
depends_on:
- mongo
- redis
command: npm run test:acceptance command: npm run test:acceptance
tar:
build: .
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
volumes:
- ./:/tmp/build/
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
user: root
redis:
image: redis
mongo:
image: mongo:3.4

View file

@ -1,25 +1,18 @@
#!/bin/sh #!/bin/sh
echo "Changing permissions of /var/run/docker.sock for sibling containers" docker --version >&2
ls -al /var/run/docker.sock
docker --version
cat /etc/passwd
# add the node user to the docker group on the host
DOCKER_GROUP=$(stat -c '%g' /var/run/docker.sock) DOCKER_GROUP=$(stat -c '%g' /var/run/docker.sock)
groupadd --non-unique --gid ${DOCKER_GROUP} dockeronhost groupadd --non-unique --gid ${DOCKER_GROUP} dockeronhost
usermod -aG dockeronhost node usermod -aG dockeronhost node
mkdir -p /app/cache # compatibility: initial volume setup
chown -R node:node /app/cache chown node:node /app/cache
chown node:node /app/compiles
chown node:node /app/db
mkdir -p /app/compiles # make synctex available for remount in compiles
chown -R node:node /app/compiles cp /app/bin/synctex /app/bin/synctex-mount/synctex
chown -R node:node /app/bin/synctex
mkdir -p /app/test/acceptance/fixtures/tmp/
chown -R node:node /app
chown -R node:node /app/bin
./bin/install_texlive_gce.sh
exec runuser -u node -- "$@" exec runuser -u node -- "$@"

View file

@ -10,10 +10,9 @@
}, },
"watch": [ "watch": [
"app/coffee/", "app/js/",
"app.coffee", "app.js",
"config/" "config/"
], ],
"ext": "coffee" "ext": "js"
} }

File diff suppressed because it is too large Load diff

7179
services/clsi/package-lock.json generated Normal file

File diff suppressed because it is too large Load diff

View file

@ -7,48 +7,62 @@
"url": "https://github.com/sharelatex/clsi-sharelatex.git" "url": "https://github.com/sharelatex/clsi-sharelatex.git"
}, },
"scripts": { "scripts": {
"compile:app": "([ -e app/coffee ] && coffee -m $COFFEE_OPTIONS -o app/js -c app/coffee || echo 'No CoffeeScript folder to compile') && ( [ -e app.coffee ] && coffee -m $COFFEE_OPTIONS -c app.coffee || echo 'No CoffeeScript app to compile')", "start": "node $NODE_APP_OPTIONS app.js",
"start": "npm run compile:app && node $NODE_APP_OPTIONS app.js", "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js",
"test:acceptance:_run": "mocha --recursive --reporter spec --timeout 30000 --exit $@ test/acceptance/js", "test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP",
"test:acceptance": "npm run compile:app && npm run compile:acceptance_tests && npm run test:acceptance:_run -- --grep=$MOCHA_GREP", "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js",
"test:unit:_run": "mocha --recursive --reporter spec --exit $@ test/unit/js", "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP",
"test:unit": "npm run compile:app && npm run compile:unit_tests && npm run test:unit:_run -- --grep=$MOCHA_GREP",
"compile:unit_tests": "[ ! -e test/unit/coffee ] && echo 'No unit tests to compile' || coffee -o test/unit/js -c test/unit/coffee",
"compile:acceptance_tests": "[ ! -e test/acceptance/coffee ] && echo 'No acceptance tests to compile' || coffee -o test/acceptance/js -c test/acceptance/coffee",
"compile:all": "npm run compile:app && npm run compile:unit_tests && npm run compile:acceptance_tests && npm run compile:smoke_tests",
"nodemon": "nodemon --config nodemon.json", "nodemon": "nodemon --config nodemon.json",
"compile:smoke_tests": "[ ! -e test/smoke/coffee ] && echo 'No smoke tests to compile' || coffee -o test/smoke/js -c test/smoke/coffee" "lint": "node_modules/.bin/eslint .",
"format": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --list-different",
"format:fix": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --write"
}, },
"author": "James Allen <james@sharelatex.com>", "author": "James Allen <james@sharelatex.com>",
"dependencies": { "dependencies": {
"async": "0.2.9", "async": "3.2.0",
"body-parser": "^1.2.0", "body-parser": "^1.19.0",
"dockerode": "^2.5.3", "dockerode": "^3.1.0",
"express": "^4.2.0", "express": "^4.17.1",
"fs-extra": "^0.16.3", "fs-extra": "^8.1.0",
"heapdump": "^0.3.5", "heapdump": "^0.3.15",
"lockfile": "^1.0.3", "lockfile": "^1.0.4",
"logger-sharelatex": "^1.9.0", "logger-sharelatex": "^1.9.1",
"lynx": "0.0.11", "lynx": "0.2.0",
"metrics-sharelatex": "^2.3.0", "metrics-sharelatex": "^2.6.0",
"mkdirp": "0.3.5", "mysql": "^2.18.1",
"mysql": "2.6.2", "request": "^2.88.2",
"request": "^2.21.0", "sequelize": "^5.21.5",
"sequelize": "^4.38.0",
"settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.1.0", "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.1.0",
"smoke-test-sharelatex": "git+https://github.com/sharelatex/smoke-test-sharelatex.git#v0.2.0", "smoke-test-sharelatex": "git+https://github.com/sharelatex/smoke-test-sharelatex.git#v0.2.0",
"sqlite3": "^4.0.6", "sqlite3": "^4.1.1",
"underscore": "^1.8.2", "underscore": "^1.9.2",
"v8-profiler-node8": "^6.0.1", "v8-profiler-node8": "^6.1.1",
"wrench": "~1.5.4" "wrench": "~1.5.9"
}, },
"devDependencies": { "devDependencies": {
"bunyan": "^0.22.1", "babel-eslint": "^10.1.0",
"chai": "~1.8.1", "bunyan": "^1.8.12",
"coffeescript": "1.6.0", "chai": "~4.2.0",
"mocha": "^4.0.1", "eslint": "^6.8.0",
"sandboxed-module": "~0.3.0", "eslint-config-prettier": "^6.10.0",
"sinon": "~1.7.3", "eslint-config-standard": "^14.1.0",
"timekeeper": "0.0.4" "eslint-config-standard-jsx": "^8.1.0",
"eslint-config-standard-react": "^9.2.0",
"eslint-plugin-chai-expect": "^2.1.0",
"eslint-plugin-chai-friendly": "^0.5.0",
"eslint-plugin-import": "^2.20.1",
"eslint-plugin-jsx-a11y": "^6.2.3",
"eslint-plugin-mocha": "^6.3.0",
"eslint-plugin-node": "^11.0.0",
"eslint-plugin-prettier": "^3.1.2",
"eslint-plugin-promise": "^4.2.1",
"eslint-plugin-react": "^7.19.0",
"eslint-plugin-standard": "^4.0.1",
"mocha": "^7.1.0",
"prettier": "^1.19.1",
"prettier-eslint-cli": "^5.0.0",
"sandboxed-module": "^2.0.3",
"sinon": "~9.0.1",
"timekeeper": "2.2.0"
} }
} }

View file

@ -1,48 +0,0 @@
Client = require "./helpers/Client"
request = require "request"
require("chai").should()
ClsiApp = require "./helpers/ClsiApp"
describe "Broken LaTeX file", ->
before (done)->
@broken_request =
resources: [
path: "main.tex"
content: '''
\\documentclass{articl % :(
\\begin{documen % :(
Broken
\\end{documen % :(
'''
]
@correct_request =
resources: [
path: "main.tex"
content: '''
\\documentclass{article}
\\begin{document}
Hello world
\\end{document}
'''
]
ClsiApp.ensureRunning done
describe "on first run", ->
before (done) ->
@project_id = Client.randomId()
Client.compile @project_id, @broken_request, (@error, @res, @body) => done()
it "should return a failure status", ->
@body.compile.status.should.equal "failure"
describe "on second run", ->
before (done) ->
@project_id = Client.randomId()
Client.compile @project_id, @correct_request, () =>
Client.compile @project_id, @broken_request, (@error, @res, @body) =>
done()
it "should return a failure status", ->
@body.compile.status.should.equal "failure"

View file

@ -1,36 +0,0 @@
Client = require "./helpers/Client"
request = require "request"
require("chai").should()
ClsiApp = require "./helpers/ClsiApp"
describe "Deleting Old Files", ->
before (done)->
@request =
resources: [
path: "main.tex"
content: '''
\\documentclass{article}
\\begin{document}
Hello world
\\end{document}
'''
]
ClsiApp.ensureRunning done
describe "on first run", ->
before (done) ->
@project_id = Client.randomId()
Client.compile @project_id, @request, (@error, @res, @body) => done()
it "should return a success status", ->
@body.compile.status.should.equal "success"
describe "after file has been deleted", ->
before (done) ->
@request.resources = []
Client.compile @project_id, @request, (@error, @res, @body) =>
done()
it "should return a failure status", ->
@body.compile.status.should.equal "failure"

View file

@ -1,129 +0,0 @@
Client = require "./helpers/Client"
request = require "request"
require("chai").should()
fs = require "fs"
ChildProcess = require "child_process"
ClsiApp = require "./helpers/ClsiApp"
logger = require("logger-sharelatex")
Path = require("path")
fixturePath = (path) -> Path.normalize(__dirname + "/../fixtures/" + path)
process = require "process"
console.log process.pid, process.ppid, process.getuid(),process.getgroups(), "PID"
try
console.log "creating tmp directory", fixturePath("tmp")
fs.mkdirSync(fixturePath("tmp"))
catch err
console.log err, fixturePath("tmp"), "unable to create fixture tmp path"
MOCHA_LATEX_TIMEOUT = 60 * 1000
convertToPng = (pdfPath, pngPath, callback = (error) ->) ->
command = "convert #{fixturePath(pdfPath)} #{fixturePath(pngPath)}"
console.log "COMMAND"
console.log command
convert = ChildProcess.exec command
stdout = ""
convert.stdout.on "data", (chunk) -> console.log "STDOUT", chunk.toString()
convert.stderr.on "data", (chunk) -> console.log "STDERR", chunk.toString()
convert.on "exit", () ->
callback()
compare = (originalPath, generatedPath, callback = (error, same) ->) ->
diff_file = "#{fixturePath(generatedPath)}-diff.png"
proc = ChildProcess.exec "compare -metric mae #{fixturePath(originalPath)} #{fixturePath(generatedPath)} #{diff_file}"
stderr = ""
proc.stderr.on "data", (chunk) -> stderr += chunk
proc.on "exit", () ->
if stderr.trim() == "0 (0)"
# remove output diff if test matches expected image
fs.unlink diff_file, (err) ->
if err
throw err
callback null, true
else
console.log "compare result", stderr
callback null, false
checkPdfInfo = (pdfPath, callback = (error, output) ->) ->
proc = ChildProcess.exec "pdfinfo #{fixturePath(pdfPath)}"
stdout = ""
proc.stdout.on "data", (chunk) -> stdout += chunk
proc.stderr.on "data", (chunk) -> console.log "STDERR", chunk.toString()
proc.on "exit", () ->
if stdout.match(/Optimized:\s+yes/)
callback null, true
else
callback null, false
compareMultiplePages = (project_id, callback = (error) ->) ->
compareNext = (page_no, callback) ->
path = "tmp/#{project_id}-source-#{page_no}.png"
fs.stat fixturePath(path), (error, stat) ->
if error?
callback()
else
compare "tmp/#{project_id}-source-#{page_no}.png", "tmp/#{project_id}-generated-#{page_no}.png", (error, same) =>
throw error if error?
same.should.equal true
compareNext page_no + 1, callback
compareNext 0, callback
comparePdf = (project_id, example_dir, callback = (error) ->) ->
console.log "CONVERT"
console.log "tmp/#{project_id}.pdf", "tmp/#{project_id}-generated.png"
convertToPng "tmp/#{project_id}.pdf", "tmp/#{project_id}-generated.png", (error) =>
throw error if error?
convertToPng "examples/#{example_dir}/output.pdf", "tmp/#{project_id}-source.png", (error) =>
throw error if error?
fs.stat fixturePath("tmp/#{project_id}-source-0.png"), (error, stat) =>
if error?
compare "tmp/#{project_id}-source.png", "tmp/#{project_id}-generated.png", (error, same) =>
throw error if error?
same.should.equal true
callback()
else
compareMultiplePages project_id, (error) ->
throw error if error?
callback()
downloadAndComparePdf = (project_id, example_dir, url, callback = (error) ->) ->
writeStream = fs.createWriteStream(fixturePath("tmp/#{project_id}.pdf"))
request.get(url).pipe(writeStream)
console.log("writing file out", fixturePath("tmp/#{project_id}.pdf"))
writeStream.on "close", () =>
checkPdfInfo "tmp/#{project_id}.pdf", (error, optimised) =>
throw error if error?
optimised.should.equal true
comparePdf project_id, example_dir, callback
Client.runServer(4242, fixturePath("examples"))
describe "Example Documents", ->
before (done) ->
ChildProcess.exec("rm test/acceptance/fixtures/tmp/*").on "exit", () ->
ClsiApp.ensureRunning done
for example_dir in fs.readdirSync fixturePath("examples")
do (example_dir) ->
describe example_dir, ->
before ->
@project_id = Client.randomId() + "_" + example_dir
it "should generate the correct pdf", (done) ->
this.timeout(MOCHA_LATEX_TIMEOUT)
Client.compileDirectory @project_id, fixturePath("examples"), example_dir, 4242, (error, res, body) =>
if error || body?.compile?.status is "failure"
console.log "DEBUG: error", error, "body", JSON.stringify(body)
pdf = Client.getOutputFile body, "pdf"
downloadAndComparePdf(@project_id, example_dir, pdf.url, done)
it "should generate the correct pdf on the second run as well", (done) ->
this.timeout(MOCHA_LATEX_TIMEOUT)
Client.compileDirectory @project_id, fixturePath("examples"), example_dir, 4242, (error, res, body) =>
if error || body?.compile?.status is "failure"
console.log "DEBUG: error", error, "body", JSON.stringify(body)
pdf = Client.getOutputFile body, "pdf"
downloadAndComparePdf(@project_id, example_dir, pdf.url, done)

View file

@ -1,41 +0,0 @@
Client = require "./helpers/Client"
request = require "request"
require("chai").should()
ClsiApp = require "./helpers/ClsiApp"
describe "Simple LaTeX file", ->
before (done) ->
@project_id = Client.randomId()
@request =
resources: [
path: "main.tex"
content: '''
\\documentclass{article}
\\begin{document}
Hello world
\\end{document}
'''
]
ClsiApp.ensureRunning =>
Client.compile @project_id, @request, (@error, @res, @body) => done()
it "should return the PDF", ->
pdf = Client.getOutputFile(@body, "pdf")
pdf.type.should.equal "pdf"
it "should return the log", ->
log = Client.getOutputFile(@body, "log")
log.type.should.equal "log"
it "should provide the pdf for download", (done) ->
pdf = Client.getOutputFile(@body, "pdf")
request.get pdf.url, (error, res, body) ->
res.statusCode.should.equal 200
done()
it "should provide the log for download", (done) ->
log = Client.getOutputFile(@body, "pdf")
request.get log.url, (error, res, body) ->
res.statusCode.should.equal 200
done()

View file

@ -1,41 +0,0 @@
Client = require "./helpers/Client"
request = require "request"
require("chai").should()
expect = require("chai").expect
ClsiApp = require "./helpers/ClsiApp"
crypto = require("crypto")
describe "Syncing", ->
before (done) ->
content = '''
\\documentclass{article}
\\begin{document}
Hello world
\\end{document}
'''
@request =
resources: [
path: "main.tex"
content: content
]
@project_id = Client.randomId()
ClsiApp.ensureRunning =>
Client.compile @project_id, @request, (@error, @res, @body) => done()
describe "from code to pdf", ->
it "should return the correct location", (done) ->
Client.syncFromCode @project_id, "main.tex", 3, 5, (error, pdfPositions) ->
throw error if error?
expect(pdfPositions).to.deep.equal(
pdf: [ { page: 1, h: 133.77, v: 134.76, height: 6.92, width: 343.71 } ]
)
done()
describe "from pdf to code", ->
it "should return the correct location", (done) ->
Client.syncFromPdf @project_id, 1, 100, 200, (error, codePositions) =>
throw error if error?
expect(codePositions).to.deep.equal(
code: [ { file: 'main.tex', line: 3, column: -1 } ]
)
done()

View file

@ -1,34 +0,0 @@
Client = require "./helpers/Client"
request = require "request"
require("chai").should()
ClsiApp = require "./helpers/ClsiApp"
describe "Timed out compile", ->
before (done) ->
@request =
options:
timeout: 10 #seconds
resources: [
path: "main.tex"
content: '''
\\documentclass{article}
\\begin{document}
\\def\\x{Hello!\\par\\x}
\\x
\\end{document}
'''
]
@project_id = Client.randomId()
ClsiApp.ensureRunning =>
Client.compile @project_id, @request, (@error, @res, @body) => done()
it "should return a timeout error", ->
@body.compile.error.should.equal "container timed out"
it "should return a timedout status", ->
@body.compile.status.should.equal "timedout"
it "should return the log output file name", ->
outputFilePaths = @body.compile.outputFiles.map((x) => x.path)
outputFilePaths.should.include('output.log')

View file

@ -1,222 +0,0 @@
Client = require "./helpers/Client"
request = require "request"
require("chai").should()
sinon = require "sinon"
ClsiApp = require "./helpers/ClsiApp"
host = "localhost"
Server =
run: () ->
express = require "express"
app = express()
staticServer = express.static __dirname + "/../fixtures/"
app.get "/:random_id/*", (req, res, next) =>
@getFile(req.url)
req.url = "/" + req.params[0]
staticServer(req, res, next)
app.listen 31415, host
getFile: () ->
randomId: () ->
Math.random().toString(16).slice(2)
Server.run()
describe "Url Caching", ->
describe "Downloading an image for the first time", ->
before (done) ->
@project_id = Client.randomId()
@file = "#{Server.randomId()}/lion.png"
@request =
resources: [{
path: "main.tex"
content: '''
\\documentclass{article}
\\usepackage{graphicx}
\\begin{document}
\\includegraphics{lion.png}
\\end{document}
'''
}, {
path: "lion.png"
url: "http://#{host}:31415/#{@file}"
}]
sinon.spy Server, "getFile"
ClsiApp.ensureRunning =>
Client.compile @project_id, @request, (@error, @res, @body) => done()
afterEach ->
Server.getFile.restore()
it "should download the image", ->
Server.getFile
.calledWith("/" + @file)
.should.equal true
describe "When an image is in the cache and the last modified date is unchanged", ->
before (done) ->
@project_id = Client.randomId()
@file = "#{Server.randomId()}/lion.png"
@request =
resources: [{
path: "main.tex"
content: '''
\\documentclass{article}
\\usepackage{graphicx}
\\begin{document}
\\includegraphics{lion.png}
\\end{document}
'''
}, @image_resource = {
path: "lion.png"
url: "http://#{host}:31415/#{@file}"
modified: Date.now()
}]
Client.compile @project_id, @request, (@error, @res, @body) =>
sinon.spy Server, "getFile"
Client.compile @project_id, @request, (@error, @res, @body) =>
done()
after ->
Server.getFile.restore()
it "should not download the image again", ->
Server.getFile.called.should.equal false
describe "When an image is in the cache and the last modified date is advanced", ->
before (done) ->
@project_id = Client.randomId()
@file = "#{Server.randomId()}/lion.png"
@request =
resources: [{
path: "main.tex"
content: '''
\\documentclass{article}
\\usepackage{graphicx}
\\begin{document}
\\includegraphics{lion.png}
\\end{document}
'''
}, @image_resource = {
path: "lion.png"
url: "http://#{host}:31415/#{@file}"
modified: @last_modified = Date.now()
}]
Client.compile @project_id, @request, (@error, @res, @body) =>
sinon.spy Server, "getFile"
@image_resource.modified = new Date(@last_modified + 3000)
Client.compile @project_id, @request, (@error, @res, @body) =>
done()
afterEach ->
Server.getFile.restore()
it "should download the image again", ->
Server.getFile.called.should.equal true
describe "When an image is in the cache and the last modified date is further in the past", ->
before (done) ->
@project_id = Client.randomId()
@file = "#{Server.randomId()}/lion.png"
@request =
resources: [{
path: "main.tex"
content: '''
\\documentclass{article}
\\usepackage{graphicx}
\\begin{document}
\\includegraphics{lion.png}
\\end{document}
'''
}, @image_resource = {
path: "lion.png"
url: "http://#{host}:31415/#{@file}"
modified: @last_modified = Date.now()
}]
Client.compile @project_id, @request, (@error, @res, @body) =>
sinon.spy Server, "getFile"
@image_resource.modified = new Date(@last_modified - 3000)
Client.compile @project_id, @request, (@error, @res, @body) =>
done()
afterEach ->
Server.getFile.restore()
it "should not download the image again", ->
Server.getFile.called.should.equal false
describe "When an image is in the cache and the last modified date is not specified", ->
before (done) ->
@project_id = Client.randomId()
@file = "#{Server.randomId()}/lion.png"
@request =
resources: [{
path: "main.tex"
content: '''
\\documentclass{article}
\\usepackage{graphicx}
\\begin{document}
\\includegraphics{lion.png}
\\end{document}
'''
}, @image_resource = {
path: "lion.png"
url: "http://#{host}:31415/#{@file}"
modified: @last_modified = Date.now()
}]
Client.compile @project_id, @request, (@error, @res, @body) =>
sinon.spy Server, "getFile"
delete @image_resource.modified
Client.compile @project_id, @request, (@error, @res, @body) =>
done()
afterEach ->
Server.getFile.restore()
it "should download the image again", ->
Server.getFile.called.should.equal true
describe "After clearing the cache", ->
before (done) ->
@project_id = Client.randomId()
@file = "#{Server.randomId()}/lion.png"
@request =
resources: [{
path: "main.tex"
content: '''
\\documentclass{article}
\\usepackage{graphicx}
\\begin{document}
\\includegraphics{lion.png}
\\end{document}
'''
}, @image_resource = {
path: "lion.png"
url: "http://#{host}:31415/#{@file}"
modified: @last_modified = Date.now()
}]
Client.compile @project_id, @request, (error) =>
throw error if error?
Client.clearCache @project_id, (error, res, body) =>
throw error if error?
sinon.spy Server, "getFile"
Client.compile @project_id, @request, (@error, @res, @body) =>
done()
afterEach ->
Server.getFile.restore()
it "should download the image again", ->
Server.getFile.called.should.equal true

View file

@ -1,38 +0,0 @@
Client = require "./helpers/Client"
request = require "request"
require("chai").should()
expect = require("chai").expect
path = require("path")
fs = require("fs")
ClsiApp = require "./helpers/ClsiApp"
describe "Syncing", ->
before (done) ->
@request =
resources: [
path: "main.tex"
content: fs.readFileSync(path.join(__dirname,"../fixtures/naugty_strings.txt"),"utf-8")
]
@project_id = Client.randomId()
ClsiApp.ensureRunning =>
Client.compile @project_id, @request, (@error, @res, @body) => done()
describe "wordcount file", ->
it "should return wordcount info", (done) ->
Client.wordcount @project_id, "main.tex", (error, result) ->
throw error if error?
expect(result).to.deep.equal(
texcount: {
encode: "utf8"
textWords: 2281
headWords: 2
outside: 0
headers: 2
elements: 0
mathInline: 6
mathDisplay: 0
errors: 0
messages: ""
}
)
done()

View file

@ -1,105 +0,0 @@
request = require "request"
fs = require "fs"
Settings = require "settings-sharelatex"
host = "localhost"
module.exports = Client =
host: Settings.apis.clsi.url
randomId: () ->
Math.random().toString(16).slice(2)
compile: (project_id, data, callback = (error, res, body) ->) ->
request.post {
url: "#{@host}/project/#{project_id}/compile"
json:
compile: data
}, callback
clearCache: (project_id, callback = (error, res, body) ->) ->
request.del "#{@host}/project/#{project_id}", callback
getOutputFile: (response, type) ->
for file in response.compile.outputFiles
if file.type == type and file.url.match("output.#{type}")
return file
return null
runServer: (port, directory) ->
express = require("express")
app = express()
app.use express.static(directory)
console.log("starting test server on", port, host)
app.listen(port, host).on "error", (error) ->
console.error "error starting server:", error.message
process.exit(1)
syncFromCode: (project_id, file, line, column, callback = (error, pdfPositions) ->) ->
request.get {
url: "#{@host}/project/#{project_id}/sync/code"
qs: {
file: file
line: line
column: column
}
}, (error, response, body) ->
return callback(error) if error?
callback null, JSON.parse(body)
syncFromPdf: (project_id, page, h, v, callback = (error, pdfPositions) ->) ->
request.get {
url: "#{@host}/project/#{project_id}/sync/pdf"
qs: {
page: page,
h: h, v: v
}
}, (error, response, body) ->
return callback(error) if error?
callback null, JSON.parse(body)
compileDirectory: (project_id, baseDirectory, directory, serverPort, callback = (error, res, body) ->) ->
resources = []
entities = fs.readdirSync("#{baseDirectory}/#{directory}")
rootResourcePath = "main.tex"
while (entities.length > 0)
entity = entities.pop()
stat = fs.statSync("#{baseDirectory}/#{directory}/#{entity}")
if stat.isDirectory()
entities = entities.concat fs.readdirSync("#{baseDirectory}/#{directory}/#{entity}").map (subEntity) ->
if subEntity == "main.tex"
rootResourcePath = "#{entity}/#{subEntity}"
return "#{entity}/#{subEntity}"
else if stat.isFile() and entity != "output.pdf"
extension = entity.split(".").pop()
if ["tex", "bib", "cls", "sty", "pdf_tex", "Rtex", "ist", "md", "Rmd"].indexOf(extension) > -1
resources.push
path: entity
content: fs.readFileSync("#{baseDirectory}/#{directory}/#{entity}").toString()
else if ["eps", "ttf", "png", "jpg", "pdf", "jpeg"].indexOf(extension) > -1
resources.push
path: entity
url: "http://#{host}:#{serverPort}/#{directory}/#{entity}"
modified: stat.mtime
fs.readFile "#{baseDirectory}/#{directory}/options.json", (error, body) =>
req =
resources: resources
rootResourcePath: rootResourcePath
if !error?
body = JSON.parse body
req.options = body
@compile project_id, req, callback
wordcount: (project_id, file, callback = (error, pdfPositions) ->) ->
request.get {
url: "#{@host}/project/#{project_id}/wordcount"
qs: {
file: file
}
}, (error, response, body) ->
return callback(error) if error?
callback null, JSON.parse(body)

View file

@ -1,24 +0,0 @@
app = require('../../../../app')
require("logger-sharelatex").logger.level("info")
logger = require("logger-sharelatex")
Settings = require("settings-sharelatex")
module.exports =
running: false
initing: false
callbacks: []
ensureRunning: (callback = (error) ->) ->
if @running
return callback()
else if @initing
@callbacks.push callback
else
@initing = true
@callbacks.push callback
app.listen Settings.internal?.clsi?.port, "localhost", (error) =>
throw error if error?
@running = true
logger.log("clsi running in dev mode")
for callback in @callbacks
callback()

View file

@ -0,0 +1,88 @@
/* eslint-disable
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const Client = require('./helpers/Client')
const request = require('request')
require('chai').should()
const ClsiApp = require('./helpers/ClsiApp')
describe('Broken LaTeX file', function() {
before(function(done) {
this.broken_request = {
resources: [
{
path: 'main.tex',
content: `\
\\documentclass{articl % :(
\\begin{documen % :(
Broken
\\end{documen % :(\
`
}
]
}
this.correct_request = {
resources: [
{
path: 'main.tex',
content: `\
\\documentclass{article}
\\begin{document}
Hello world
\\end{document}\
`
}
]
}
return ClsiApp.ensureRunning(done)
})
describe('on first run', function() {
before(function(done) {
this.project_id = Client.randomId()
return Client.compile(
this.project_id,
this.broken_request,
(error, res, body) => {
this.error = error
this.res = res
this.body = body
return done()
}
)
})
return it('should return a failure status', function() {
return this.body.compile.status.should.equal('failure')
})
})
return describe('on second run', function() {
before(function(done) {
this.project_id = Client.randomId()
return Client.compile(this.project_id, this.correct_request, () => {
return Client.compile(
this.project_id,
this.broken_request,
(error, res, body) => {
this.error = error
this.res = res
this.body = body
return done()
}
)
})
})
return it('should return a failure status', function() {
return this.body.compile.status.should.equal('failure')
})
})
})

View file

@ -0,0 +1,73 @@
/* eslint-disable
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const Client = require('./helpers/Client')
const request = require('request')
require('chai').should()
const ClsiApp = require('./helpers/ClsiApp')
describe('Deleting Old Files', function() {
before(function(done) {
this.request = {
resources: [
{
path: 'main.tex',
content: `\
\\documentclass{article}
\\begin{document}
Hello world
\\end{document}\
`
}
]
}
return ClsiApp.ensureRunning(done)
})
return describe('on first run', function() {
before(function(done) {
this.project_id = Client.randomId()
return Client.compile(
this.project_id,
this.request,
(error, res, body) => {
this.error = error
this.res = res
this.body = body
return done()
}
)
})
it('should return a success status', function() {
return this.body.compile.status.should.equal('success')
})
return describe('after file has been deleted', function() {
before(function(done) {
this.request.resources = []
return Client.compile(
this.project_id,
this.request,
(error, res, body) => {
this.error = error
this.res = res
this.body = body
return done()
}
)
})
return it('should return a failure status', function() {
return this.body.compile.status.should.equal('failure')
})
})
})
})

View file

@ -0,0 +1,285 @@
/* eslint-disable
camelcase,
handle-callback-err,
no-path-concat,
no-return-assign,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS103: Rewrite code to no longer use __guard__
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const Client = require('./helpers/Client')
const request = require('request')
require('chai').should()
const fs = require('fs')
const fsExtra = require('fs-extra')
const ChildProcess = require('child_process')
const ClsiApp = require('./helpers/ClsiApp')
const logger = require('logger-sharelatex')
const Path = require('path')
const fixturePath = path => {
if (path.slice(0, 3) === 'tmp') {
return '/tmp/clsi_acceptance_tests' + path.slice(3)
}
return Path.normalize(__dirname + '/../fixtures/' + path)
}
const process = require('process')
console.log(
process.pid,
process.ppid,
process.getuid(),
process.getgroups(),
'PID'
)
const MOCHA_LATEX_TIMEOUT = 60 * 1000
const convertToPng = function(pdfPath, pngPath, callback) {
if (callback == null) {
callback = function(error) {}
}
const command = `convert ${fixturePath(pdfPath)} ${fixturePath(pngPath)}`
console.log('COMMAND')
console.log(command)
const convert = ChildProcess.exec(command)
const stdout = ''
convert.stdout.on('data', chunk => console.log('STDOUT', chunk.toString()))
convert.stderr.on('data', chunk => console.log('STDERR', chunk.toString()))
return convert.on('exit', () => callback())
}
const compare = function(originalPath, generatedPath, callback) {
if (callback == null) {
callback = function(error, same) {}
}
const diff_file = `${fixturePath(generatedPath)}-diff.png`
const proc = ChildProcess.exec(
`compare -metric mae ${fixturePath(originalPath)} ${fixturePath(
generatedPath
)} ${diff_file}`
)
let stderr = ''
proc.stderr.on('data', chunk => (stderr += chunk))
return proc.on('exit', () => {
if (stderr.trim() === '0 (0)') {
// remove output diff if test matches expected image
fs.unlink(diff_file, err => {
if (err) {
throw err
}
})
return callback(null, true)
} else {
console.log('compare result', stderr)
return callback(null, false)
}
})
}
const checkPdfInfo = function(pdfPath, callback) {
if (callback == null) {
callback = function(error, output) {}
}
const proc = ChildProcess.exec(`pdfinfo ${fixturePath(pdfPath)}`)
let stdout = ''
proc.stdout.on('data', chunk => (stdout += chunk))
proc.stderr.on('data', chunk => console.log('STDERR', chunk.toString()))
return proc.on('exit', () => {
if (stdout.match(/Optimized:\s+yes/)) {
return callback(null, true)
} else {
return callback(null, false)
}
})
}
const compareMultiplePages = function(project_id, callback) {
if (callback == null) {
callback = function(error) {}
}
var compareNext = function(page_no, callback) {
const path = `tmp/${project_id}-source-${page_no}.png`
return fs.stat(fixturePath(path), (error, stat) => {
if (error != null) {
return callback()
} else {
return compare(
`tmp/${project_id}-source-${page_no}.png`,
`tmp/${project_id}-generated-${page_no}.png`,
(error, same) => {
if (error != null) {
throw error
}
same.should.equal(true)
return compareNext(page_no + 1, callback)
}
)
}
})
}
return compareNext(0, callback)
}
const comparePdf = function(project_id, example_dir, callback) {
if (callback == null) {
callback = function(error) {}
}
console.log('CONVERT')
console.log(`tmp/${project_id}.pdf`, `tmp/${project_id}-generated.png`)
return convertToPng(
`tmp/${project_id}.pdf`,
`tmp/${project_id}-generated.png`,
error => {
if (error != null) {
throw error
}
return convertToPng(
`examples/${example_dir}/output.pdf`,
`tmp/${project_id}-source.png`,
error => {
if (error != null) {
throw error
}
return fs.stat(
fixturePath(`tmp/${project_id}-source-0.png`),
(error, stat) => {
if (error != null) {
return compare(
`tmp/${project_id}-source.png`,
`tmp/${project_id}-generated.png`,
(error, same) => {
if (error != null) {
throw error
}
same.should.equal(true)
return callback()
}
)
} else {
return compareMultiplePages(project_id, error => {
if (error != null) {
throw error
}
return callback()
})
}
}
)
}
)
}
)
}
const downloadAndComparePdf = function(project_id, example_dir, url, callback) {
if (callback == null) {
callback = function(error) {}
}
const writeStream = fs.createWriteStream(fixturePath(`tmp/${project_id}.pdf`))
request.get(url).pipe(writeStream)
console.log('writing file out', fixturePath(`tmp/${project_id}.pdf`))
return writeStream.on('close', () => {
return checkPdfInfo(`tmp/${project_id}.pdf`, (error, optimised) => {
if (error != null) {
throw error
}
optimised.should.equal(true)
return comparePdf(project_id, example_dir, callback)
})
})
}
Client.runServer(4242, fixturePath('examples'))
describe('Example Documents', function() {
before(function(done) {
ClsiApp.ensureRunning(done)
})
before(function(done) {
fsExtra.remove(fixturePath('tmp'), done)
})
before(function(done) {
fs.mkdir(fixturePath('tmp'), done)
})
after(function(done) {
fsExtra.remove(fixturePath('tmp'), done)
})
return Array.from(fs.readdirSync(fixturePath('examples'))).map(example_dir =>
(example_dir =>
describe(example_dir, function() {
before(function() {
return (this.project_id = Client.randomId() + '_' + example_dir)
})
it('should generate the correct pdf', function(done) {
this.timeout(MOCHA_LATEX_TIMEOUT)
return Client.compileDirectory(
this.project_id,
fixturePath('examples'),
example_dir,
4242,
(error, res, body) => {
if (
error ||
__guard__(
body != null ? body.compile : undefined,
x => x.status
) === 'failure'
) {
console.log('DEBUG: error', error, 'body', JSON.stringify(body))
}
const pdf = Client.getOutputFile(body, 'pdf')
return downloadAndComparePdf(
this.project_id,
example_dir,
pdf.url,
done
)
}
)
})
return it('should generate the correct pdf on the second run as well', function(done) {
this.timeout(MOCHA_LATEX_TIMEOUT)
return Client.compileDirectory(
this.project_id,
fixturePath('examples'),
example_dir,
4242,
(error, res, body) => {
if (
error ||
__guard__(
body != null ? body.compile : undefined,
x => x.status
) === 'failure'
) {
console.log('DEBUG: error', error, 'body', JSON.stringify(body))
}
const pdf = Client.getOutputFile(body, 'pdf')
return downloadAndComparePdf(
this.project_id,
example_dir,
pdf.url,
done
)
}
)
})
}))(example_dir)
)
})
function __guard__(value, transform) {
return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
}

View file

@ -0,0 +1,71 @@
/* eslint-disable
handle-callback-err,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const Client = require('./helpers/Client')
const request = require('request')
require('chai').should()
const ClsiApp = require('./helpers/ClsiApp')
describe('Simple LaTeX file', function() {
before(function(done) {
this.project_id = Client.randomId()
this.request = {
resources: [
{
path: 'main.tex',
content: `\
\\documentclass{article}
\\begin{document}
Hello world
\\end{document}\
`
}
]
}
return ClsiApp.ensureRunning(() => {
return Client.compile(
this.project_id,
this.request,
(error, res, body) => {
this.error = error
this.res = res
this.body = body
return done()
}
)
})
})
it('should return the PDF', function() {
const pdf = Client.getOutputFile(this.body, 'pdf')
return pdf.type.should.equal('pdf')
})
it('should return the log', function() {
const log = Client.getOutputFile(this.body, 'log')
return log.type.should.equal('log')
})
it('should provide the pdf for download', function(done) {
const pdf = Client.getOutputFile(this.body, 'pdf')
return request.get(pdf.url, (error, res, body) => {
res.statusCode.should.equal(200)
return done()
})
})
return it('should provide the log for download', function(done) {
const log = Client.getOutputFile(this.body, 'pdf')
return request.get(log.url, (error, res, body) => {
res.statusCode.should.equal(200)
return done()
})
})
})

View file

@ -0,0 +1,91 @@
/* eslint-disable
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const Client = require('./helpers/Client')
const request = require('request')
require('chai').should()
const { expect } = require('chai')
const ClsiApp = require('./helpers/ClsiApp')
const crypto = require('crypto')
describe('Syncing', function() {
before(function(done) {
const content = `\
\\documentclass{article}
\\begin{document}
Hello world
\\end{document}\
`
this.request = {
resources: [
{
path: 'main.tex',
content
}
]
}
this.project_id = Client.randomId()
return ClsiApp.ensureRunning(() => {
return Client.compile(
this.project_id,
this.request,
(error, res, body) => {
this.error = error
this.res = res
this.body = body
return done()
}
)
})
})
describe('from code to pdf', function() {
return it('should return the correct location', function(done) {
return Client.syncFromCode(
this.project_id,
'main.tex',
3,
5,
(error, pdfPositions) => {
if (error != null) {
throw error
}
expect(pdfPositions).to.deep.equal({
pdf: [
{ page: 1, h: 133.77, v: 134.76, height: 6.92, width: 343.71 }
]
})
return done()
}
)
})
})
return describe('from pdf to code', function() {
return it('should return the correct location', function(done) {
return Client.syncFromPdf(
this.project_id,
1,
100,
200,
(error, codePositions) => {
if (error != null) {
throw error
}
expect(codePositions).to.deep.equal({
code: [{ file: 'main.tex', line: 3, column: -1 }]
})
return done()
}
)
})
})
})

View file

@ -0,0 +1,62 @@
/* eslint-disable
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const Client = require('./helpers/Client')
const request = require('request')
require('chai').should()
const ClsiApp = require('./helpers/ClsiApp')
describe('Timed out compile', function() {
before(function(done) {
this.request = {
options: {
timeout: 10
}, // seconds
resources: [
{
path: 'main.tex',
content: `\
\\documentclass{article}
\\begin{document}
\\def\\x{Hello!\\par\\x}
\\x
\\end{document}\
`
}
]
}
this.project_id = Client.randomId()
return ClsiApp.ensureRunning(() => {
return Client.compile(
this.project_id,
this.request,
(error, res, body) => {
this.error = error
this.res = res
this.body = body
return done()
}
)
})
})
it('should return a timeout error', function() {
return this.body.compile.error.should.equal('container timed out')
})
it('should return a timedout status', function() {
return this.body.compile.status.should.equal('timedout')
})
return it('should return the log output file name', function() {
const outputFilePaths = this.body.compile.outputFiles.map(x => x.path)
return outputFilePaths.should.include('output.log')
})
})

View file

@ -0,0 +1,373 @@
/* eslint-disable
no-path-concat,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const Client = require('./helpers/Client')
const request = require('request')
require('chai').should()
const sinon = require('sinon')
const ClsiApp = require('./helpers/ClsiApp')
const host = 'localhost'
const Server = {
run() {
const express = require('express')
const app = express()
const staticServer = express.static(__dirname + '/../fixtures/')
app.get('/:random_id/*', (req, res, next) => {
this.getFile(req.url)
req.url = `/${req.params[0]}`
return staticServer(req, res, next)
})
return app.listen(31415, host)
},
getFile() {},
randomId() {
return Math.random()
.toString(16)
.slice(2)
}
}
Server.run()
describe('Url Caching', function() {
describe('Downloading an image for the first time', function() {
before(function(done) {
this.project_id = Client.randomId()
this.file = `${Server.randomId()}/lion.png`
this.request = {
resources: [
{
path: 'main.tex',
content: `\
\\documentclass{article}
\\usepackage{graphicx}
\\begin{document}
\\includegraphics{lion.png}
\\end{document}\
`
},
{
path: 'lion.png',
url: `http://${host}:31415/${this.file}`
}
]
}
sinon.spy(Server, 'getFile')
return ClsiApp.ensureRunning(() => {
return Client.compile(
this.project_id,
this.request,
(error, res, body) => {
this.error = error
this.res = res
this.body = body
return done()
}
)
})
})
afterEach(function() {
return Server.getFile.restore()
})
return it('should download the image', function() {
return Server.getFile.calledWith(`/${this.file}`).should.equal(true)
})
})
describe('When an image is in the cache and the last modified date is unchanged', function() {
before(function(done) {
this.project_id = Client.randomId()
this.file = `${Server.randomId()}/lion.png`
this.request = {
resources: [
{
path: 'main.tex',
content: `\
\\documentclass{article}
\\usepackage{graphicx}
\\begin{document}
\\includegraphics{lion.png}
\\end{document}\
`
},
(this.image_resource = {
path: 'lion.png',
url: `http://${host}:31415/${this.file}`,
modified: Date.now()
})
]
}
return Client.compile(
this.project_id,
this.request,
(error, res, body) => {
this.error = error
this.res = res
this.body = body
sinon.spy(Server, 'getFile')
return Client.compile(
this.project_id,
this.request,
(error1, res1, body1) => {
this.error = error1
this.res = res1
this.body = body1
return done()
}
)
}
)
})
after(function() {
return Server.getFile.restore()
})
return it('should not download the image again', function() {
return Server.getFile.called.should.equal(false)
})
})
describe('When an image is in the cache and the last modified date is advanced', function() {
before(function(done) {
this.project_id = Client.randomId()
this.file = `${Server.randomId()}/lion.png`
this.request = {
resources: [
{
path: 'main.tex',
content: `\
\\documentclass{article}
\\usepackage{graphicx}
\\begin{document}
\\includegraphics{lion.png}
\\end{document}\
`
},
(this.image_resource = {
path: 'lion.png',
url: `http://${host}:31415/${this.file}`,
modified: (this.last_modified = Date.now())
})
]
}
return Client.compile(
this.project_id,
this.request,
(error, res, body) => {
this.error = error
this.res = res
this.body = body
sinon.spy(Server, 'getFile')
this.image_resource.modified = new Date(this.last_modified + 3000)
return Client.compile(
this.project_id,
this.request,
(error1, res1, body1) => {
this.error = error1
this.res = res1
this.body = body1
return done()
}
)
}
)
})
afterEach(function() {
return Server.getFile.restore()
})
return it('should download the image again', function() {
return Server.getFile.called.should.equal(true)
})
})
describe('When an image is in the cache and the last modified date is further in the past', function() {
before(function(done) {
this.project_id = Client.randomId()
this.file = `${Server.randomId()}/lion.png`
this.request = {
resources: [
{
path: 'main.tex',
content: `\
\\documentclass{article}
\\usepackage{graphicx}
\\begin{document}
\\includegraphics{lion.png}
\\end{document}\
`
},
(this.image_resource = {
path: 'lion.png',
url: `http://${host}:31415/${this.file}`,
modified: (this.last_modified = Date.now())
})
]
}
return Client.compile(
this.project_id,
this.request,
(error, res, body) => {
this.error = error
this.res = res
this.body = body
sinon.spy(Server, 'getFile')
this.image_resource.modified = new Date(this.last_modified - 3000)
return Client.compile(
this.project_id,
this.request,
(error1, res1, body1) => {
this.error = error1
this.res = res1
this.body = body1
return done()
}
)
}
)
})
afterEach(function() {
return Server.getFile.restore()
})
return it('should not download the image again', function() {
return Server.getFile.called.should.equal(false)
})
})
describe('When an image is in the cache and the last modified date is not specified', function() {
before(function(done) {
this.project_id = Client.randomId()
this.file = `${Server.randomId()}/lion.png`
this.request = {
resources: [
{
path: 'main.tex',
content: `\
\\documentclass{article}
\\usepackage{graphicx}
\\begin{document}
\\includegraphics{lion.png}
\\end{document}\
`
},
(this.image_resource = {
path: 'lion.png',
url: `http://${host}:31415/${this.file}`,
modified: (this.last_modified = Date.now())
})
]
}
return Client.compile(
this.project_id,
this.request,
(error, res, body) => {
this.error = error
this.res = res
this.body = body
sinon.spy(Server, 'getFile')
delete this.image_resource.modified
return Client.compile(
this.project_id,
this.request,
(error1, res1, body1) => {
this.error = error1
this.res = res1
this.body = body1
return done()
}
)
}
)
})
afterEach(function() {
return Server.getFile.restore()
})
return it('should download the image again', function() {
return Server.getFile.called.should.equal(true)
})
})
return describe('After clearing the cache', function() {
before(function(done) {
this.project_id = Client.randomId()
this.file = `${Server.randomId()}/lion.png`
this.request = {
resources: [
{
path: 'main.tex',
content: `\
\\documentclass{article}
\\usepackage{graphicx}
\\begin{document}
\\includegraphics{lion.png}
\\end{document}\
`
},
(this.image_resource = {
path: 'lion.png',
url: `http://${host}:31415/${this.file}`,
modified: (this.last_modified = Date.now())
})
]
}
return Client.compile(this.project_id, this.request, error => {
if (error != null) {
throw error
}
return Client.clearCache(this.project_id, (error, res, body) => {
if (error != null) {
throw error
}
sinon.spy(Server, 'getFile')
return Client.compile(
this.project_id,
this.request,
(error1, res1, body1) => {
this.error = error1
this.res = res1
this.body = body1
return done()
}
)
})
})
})
afterEach(function() {
return Server.getFile.restore()
})
return it('should download the image again', function() {
return Server.getFile.called.should.equal(true)
})
})
})

View file

@ -0,0 +1,72 @@
/* eslint-disable
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const Client = require('./helpers/Client')
const request = require('request')
require('chai').should()
const { expect } = require('chai')
const path = require('path')
const fs = require('fs')
const ClsiApp = require('./helpers/ClsiApp')
describe('Syncing', function() {
before(function(done) {
this.request = {
resources: [
{
path: 'main.tex',
content: fs.readFileSync(
path.join(__dirname, '../fixtures/naugty_strings.txt'),
'utf-8'
)
}
]
}
this.project_id = Client.randomId()
return ClsiApp.ensureRunning(() => {
return Client.compile(
this.project_id,
this.request,
(error, res, body) => {
this.error = error
this.res = res
this.body = body
return done()
}
)
})
})
return describe('wordcount file', function() {
return it('should return wordcount info', function(done) {
return Client.wordcount(this.project_id, 'main.tex', (error, result) => {
if (error != null) {
throw error
}
expect(result).to.deep.equal({
texcount: {
encode: 'utf8',
textWords: 2281,
headWords: 2,
outside: 0,
headers: 2,
elements: 0,
mathInline: 6,
mathDisplay: 0,
errors: 0,
messages: ''
}
})
return done()
})
})
})
})

View file

@ -0,0 +1,208 @@
/* eslint-disable
camelcase,
handle-callback-err,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let Client
const request = require('request')
const fs = require('fs')
const Settings = require('settings-sharelatex')
const host = 'localhost'
module.exports = Client = {
host: Settings.apis.clsi.url,
randomId() {
return Math.random()
.toString(16)
.slice(2)
},
compile(project_id, data, callback) {
if (callback == null) {
callback = function(error, res, body) {}
}
return request.post(
{
url: `${this.host}/project/${project_id}/compile`,
json: {
compile: data
}
},
callback
)
},
clearCache(project_id, callback) {
if (callback == null) {
callback = function(error, res, body) {}
}
return request.del(`${this.host}/project/${project_id}`, callback)
},
getOutputFile(response, type) {
for (const file of Array.from(response.compile.outputFiles)) {
if (file.type === type && file.url.match(`output.${type}`)) {
return file
}
}
return null
},
runServer(port, directory) {
const express = require('express')
const app = express()
app.use(express.static(directory))
console.log('starting test server on', port, host)
return app.listen(port, host).on('error', error => {
console.error('error starting server:', error.message)
return process.exit(1)
})
},
syncFromCode(project_id, file, line, column, callback) {
if (callback == null) {
callback = function(error, pdfPositions) {}
}
return request.get(
{
url: `${this.host}/project/${project_id}/sync/code`,
qs: {
file,
line,
column
}
},
(error, response, body) => {
if (error != null) {
return callback(error)
}
return callback(null, JSON.parse(body))
}
)
},
syncFromPdf(project_id, page, h, v, callback) {
if (callback == null) {
callback = function(error, pdfPositions) {}
}
return request.get(
{
url: `${this.host}/project/${project_id}/sync/pdf`,
qs: {
page,
h,
v
}
},
(error, response, body) => {
if (error != null) {
return callback(error)
}
return callback(null, JSON.parse(body))
}
)
},
compileDirectory(project_id, baseDirectory, directory, serverPort, callback) {
if (callback == null) {
callback = function(error, res, body) {}
}
const resources = []
let entities = fs.readdirSync(`${baseDirectory}/${directory}`)
let rootResourcePath = 'main.tex'
while (entities.length > 0) {
var entity = entities.pop()
const stat = fs.statSync(`${baseDirectory}/${directory}/${entity}`)
if (stat.isDirectory()) {
entities = entities.concat(
fs
.readdirSync(`${baseDirectory}/${directory}/${entity}`)
.map(subEntity => {
if (subEntity === 'main.tex') {
rootResourcePath = `${entity}/${subEntity}`
}
return `${entity}/${subEntity}`
})
)
} else if (stat.isFile() && entity !== 'output.pdf') {
const extension = entity.split('.').pop()
if (
[
'tex',
'bib',
'cls',
'sty',
'pdf_tex',
'Rtex',
'ist',
'md',
'Rmd'
].indexOf(extension) > -1
) {
resources.push({
path: entity,
content: fs
.readFileSync(`${baseDirectory}/${directory}/${entity}`)
.toString()
})
} else if (
['eps', 'ttf', 'png', 'jpg', 'pdf', 'jpeg'].indexOf(extension) > -1
) {
resources.push({
path: entity,
url: `http://${host}:${serverPort}/${directory}/${entity}`,
modified: stat.mtime
})
}
}
}
return fs.readFile(
`${baseDirectory}/${directory}/options.json`,
(error, body) => {
const req = {
resources,
rootResourcePath
}
if (error == null) {
body = JSON.parse(body)
req.options = body
}
return this.compile(project_id, req, callback)
}
)
},
wordcount(project_id, file, callback) {
if (callback == null) {
callback = function(error, pdfPositions) {}
}
return request.get(
{
url: `${this.host}/project/${project_id}/wordcount`,
qs: {
file
}
},
(error, response, body) => {
if (error != null) {
return callback(error)
}
return callback(null, JSON.parse(body))
}
)
}
}

View file

@ -0,0 +1,64 @@
/* eslint-disable
handle-callback-err,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS103: Rewrite code to no longer use __guard__
* DS205: Consider reworking code to avoid use of IIFEs
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const app = require('../../../../app')
require('logger-sharelatex').logger.level('info')
const logger = require('logger-sharelatex')
const Settings = require('settings-sharelatex')
module.exports = {
running: false,
initing: false,
callbacks: [],
ensureRunning(callback) {
if (callback == null) {
callback = function(error) {}
}
if (this.running) {
return callback()
} else if (this.initing) {
return this.callbacks.push(callback)
} else {
this.initing = true
this.callbacks.push(callback)
return app.listen(
__guard__(
Settings.internal != null ? Settings.internal.clsi : undefined,
x => x.port
),
'localhost',
error => {
if (error != null) {
throw error
}
this.running = true
logger.log('clsi running in dev mode')
return (() => {
const result = []
for (callback of Array.from(this.callbacks)) {
result.push(callback())
}
return result
})()
}
)
}
}
}
function __guard__(value, transform) {
return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
}

View file

@ -1,71 +0,0 @@
request = require "request"
Settings = require "settings-sharelatex"
async = require("async")
fs = require("fs")
_ = require("underscore")
concurentCompiles = 5
totalCompiles = 50
buildUrl = (path) -> "http://#{Settings.internal.clsi.host}:#{Settings.internal.clsi.port}/#{path}"
mainTexContent = fs.readFileSync("./bulk.tex", "utf-8")
compileTimes = []
failedCount = 0
getAverageCompileTime = ->
totalTime = _.reduce compileTimes, (sum, time)->
sum + time
, 0
return totalTime / compileTimes.length
makeRequest = (compileNumber, callback)->
bulkBodyCount = 7
bodyContent = ""
while --bulkBodyCount
bodyContent = bodyContent+=mainTexContent
startTime = new Date()
request.post {
url: buildUrl("project/loadcompile-#{compileNumber}/compile")
json:
compile:
resources: [
path: "main.tex"
content: """
\\documentclass{article}
\\begin{document}
#{bodyContent}
\\end{document}
"""
]
}, (err, response, body)->
if response.statusCode != 200
failedCount++
return callback("compile #{compileNumber} failed")
if err?
failedCount++
return callback("failed")
totalTime = new Date() - startTime
console.log totalTime+"ms"
compileTimes.push(totalTime)
callback(err)
jobs = _.map [1..totalCompiles], (i)->
return (cb)->
makeRequest(i, cb)
startTime = new Date()
async.parallelLimit jobs, concurentCompiles, (err)->
if err?
console.error err
console.log("total time taken = #{(new Date() - startTime)/1000}s")
console.log("total compiles = #{totalCompiles}")
console.log("concurent compiles = #{concurentCompiles}")
console.log("average time = #{getAverageCompileTime()/1000}s")
console.log("max time = #{_.max(compileTimes)/1000}s")
console.log("min time = #{_.min(compileTimes)/1000}s")
console.log("total failures = #{failedCount}")

View file

@ -0,0 +1,103 @@
/* eslint-disable
standard/no-callback-literal,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const request = require('request')
const Settings = require('settings-sharelatex')
const async = require('async')
const fs = require('fs')
const _ = require('underscore')
const concurentCompiles = 5
const totalCompiles = 50
const buildUrl = path =>
`http://${Settings.internal.clsi.host}:${Settings.internal.clsi.port}/${path}`
const mainTexContent = fs.readFileSync('./bulk.tex', 'utf-8')
const compileTimes = []
let failedCount = 0
const getAverageCompileTime = function() {
const totalTime = _.reduce(compileTimes, (sum, time) => sum + time, 0)
return totalTime / compileTimes.length
}
const makeRequest = function(compileNumber, callback) {
let bulkBodyCount = 7
let bodyContent = ''
while (--bulkBodyCount) {
bodyContent = bodyContent += mainTexContent
}
const startTime = new Date()
return request.post(
{
url: buildUrl(`project/loadcompile-${compileNumber}/compile`),
json: {
compile: {
resources: [
{
path: 'main.tex',
content: `\
\\documentclass{article}
\\begin{document}
${bodyContent}
\\end{document}\
`
}
]
}
}
},
(err, response, body) => {
if (response.statusCode !== 200) {
failedCount++
return callback(`compile ${compileNumber} failed`)
}
if (err != null) {
failedCount++
return callback('failed')
}
const totalTime = new Date() - startTime
console.log(totalTime + 'ms')
compileTimes.push(totalTime)
return callback(err)
}
)
}
const jobs = _.map(__range__(1, totalCompiles, true), i => cb =>
makeRequest(i, cb)
)
const startTime = new Date()
async.parallelLimit(jobs, concurentCompiles, err => {
if (err != null) {
console.error(err)
}
console.log(`total time taken = ${(new Date() - startTime) / 1000}s`)
console.log(`total compiles = ${totalCompiles}`)
console.log(`concurent compiles = ${concurentCompiles}`)
console.log(`average time = ${getAverageCompileTime() / 1000}s`)
console.log(`max time = ${_.max(compileTimes) / 1000}s`)
console.log(`min time = ${_.min(compileTimes) / 1000}s`)
return console.log(`total failures = ${failedCount}`)
})
function __range__(left, right, inclusive) {
const range = []
const ascending = left < right
const end = !inclusive ? right : ascending ? right + 1 : right - 1
for (let i = left; ascending ? i < end : i > end; ascending ? i++ : i--) {
range.push(i)
}
return range
}

View file

@ -1,64 +0,0 @@
chai = require("chai")
chai.should() unless Object.prototype.should?
expect = chai.expect
request = require "request"
Settings = require "settings-sharelatex"
buildUrl = (path) -> "http://#{Settings.internal.clsi.host}:#{Settings.internal.clsi.port}/#{path}"
url = buildUrl("project/smoketest-#{process.pid}/compile")
describe "Running a compile", ->
before (done) ->
request.post {
url: url
json:
compile:
resources: [
path: "main.tex"
content: """
% Membrane-like surface
% Author: Yotam Avital
\\documentclass{article}
\\usepackage{tikz}
\\usetikzlibrary{calc,fadings,decorations.pathreplacing}
\\begin{document}
\\begin{tikzpicture}
\\def\\nuPi{3.1459265}
\\foreach \\i in {5,4,...,2}{% This one doesn't matter
\\foreach \\j in {3,2,...,0}{% This will crate a membrane
% with the front lipids visible
% top layer
\\pgfmathsetmacro{\\dx}{rand*0.1}% A random variance in the x coordinate
\\pgfmathsetmacro{\\dy}{rand*0.1}% A random variance in the y coordinate,
% gives a hight fill to the lipid
\\pgfmathsetmacro{\\rot}{rand*0.1}% A random variance in the
% molecule orientation
\\shade[ball color=red] ({\\i+\\dx+\\rot},{0.5*\\j+\\dy+0.4*sin(\\i*\\nuPi*10)}) circle(0.45);
\\shade[ball color=gray] (\\i+\\dx,{0.5*\\j+\\dy+0.4*sin(\\i*\\nuPi*10)-0.9}) circle(0.45);
\\shade[ball color=gray] (\\i+\\dx-\\rot,{0.5*\\j+\\dy+0.4*sin(\\i*\\nuPi*10)-1.8}) circle(0.45);
% bottom layer
\\pgfmathsetmacro{\\dx}{rand*0.1}
\\pgfmathsetmacro{\\dy}{rand*0.1}
\\pgfmathsetmacro{\\rot}{rand*0.1}
\\shade[ball color=gray] (\\i+\\dx+\\rot,{0.5*\\j+\\dy+0.4*sin(\\i*\\nuPi*10)-2.8}) circle(0.45);
\\shade[ball color=gray] (\\i+\\dx,{0.5*\\j+\\dy+0.4*sin(\\i*\\nuPi*10)-3.7}) circle(0.45);
\\shade[ball color=red] (\\i+\\dx-\\rot,{0.5*\\j+\\dy+0.4*sin(\\i*\\nuPi*10)-4.6}) circle(0.45);
}
}
\\end{tikzpicture}
\\end{document}
"""
]
}, (@error, @response, @body) =>
done()
it "should return the pdf", ->
for file in @body.compile.outputFiles
return if file.type == "pdf"
throw new Error("no pdf returned")
it "should return the log", ->
for file in @body.compile.outputFiles
return if file.type == "log"
throw new Error("no log returned")

Some files were not shown because too many files have changed in this diff Show more