Merge pull request #911 from overleaf/jpa-decaff-and-config-cleanup

[misc] decaff and config cleanup
This commit is contained in:
Jakob Ackermann 2021-07-12 13:35:14 +02:00 committed by GitHub
commit 153f0f5063
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
26 changed files with 3384 additions and 1363 deletions

17
.eslintrc Normal file
View file

@ -0,0 +1,17 @@
{
"extends": [
"eslint:recommended",
"standard",
"prettier"
],
"parserOptions": {
"ecmaVersion": 2018
},
"env": {
"node": true
},
"rules": {
// Do not allow importing of implicit dependencies.
"import/no-extraneous-dependencies": "error"
}
}

8
.prettierrc Normal file
View file

@ -0,0 +1,8 @@
{
"arrowParens": "avoid",
"semi": false,
"singleQuote": true,
"trailingComma": "es5",
"tabWidth": 2,
"useTabs": false
}

View file

@ -5,7 +5,7 @@
ARG SHARELATEX_BASE_TAG=sharelatex/sharelatex-base:latest ARG SHARELATEX_BASE_TAG=sharelatex/sharelatex-base:latest
FROM $SHARELATEX_BASE_TAG FROM $SHARELATEX_BASE_TAG
ENV SHARELATEX_CONFIG /etc/sharelatex/settings.coffee ENV SHARELATEX_CONFIG /etc/sharelatex/settings.js
# Add required source files # Add required source files
@ -13,7 +13,7 @@ ENV SHARELATEX_CONFIG /etc/sharelatex/settings.coffee
ADD ${baseDir}/bin /var/www/sharelatex/bin ADD ${baseDir}/bin /var/www/sharelatex/bin
ADD ${baseDir}/doc /var/www/sharelatex/doc ADD ${baseDir}/doc /var/www/sharelatex/doc
ADD ${baseDir}/tasks /var/www/sharelatex/tasks ADD ${baseDir}/tasks /var/www/sharelatex/tasks
ADD ${baseDir}/Gruntfile.coffee /var/www/sharelatex/Gruntfile.coffee ADD ${baseDir}/Gruntfile.js /var/www/sharelatex/Gruntfile.js
ADD ${baseDir}/package.json /var/www/sharelatex/package.json ADD ${baseDir}/package.json /var/www/sharelatex/package.json
ADD ${baseDir}/package-lock.json /var/www/sharelatex/package-lock.json ADD ${baseDir}/package-lock.json /var/www/sharelatex/package-lock.json
ADD ${baseDir}/services.js /var/www/sharelatex/config/services.js ADD ${baseDir}/services.js /var/www/sharelatex/config/services.js
@ -85,7 +85,7 @@ COPY ${baseDir}/init_scripts/ /etc/my_init.d/
# Copy app settings files # Copy app settings files
# ----------------------- # -----------------------
COPY ${baseDir}/settings.coffee /etc/sharelatex/settings.coffee COPY ${baseDir}/settings.js /etc/sharelatex/settings.js
# Set Environment Variables # Set Environment Variables
# -------------------------------- # --------------------------------

View file

@ -1,216 +0,0 @@
coffee = require("coffee-script")
fs = require "fs"
spawn = require("child_process").spawn
exec = require("child_process").exec
rimraf = require "rimraf"
Path = require "path"
semver = require "semver"
knox = require "knox"
crypto = require "crypto"
async = require "async"
settings = require("settings-sharelatex")
_ = require("underscore")
SERVICES = require("./config/services")
module.exports = (grunt) ->
grunt.loadNpmTasks 'grunt-bunyan'
grunt.loadNpmTasks 'grunt-execute'
grunt.loadNpmTasks 'grunt-available-tasks'
grunt.loadNpmTasks 'grunt-concurrent'
grunt.loadNpmTasks "grunt-contrib-coffee"
grunt.loadNpmTasks "grunt-shell"
grunt.task.loadTasks "./tasks"
execute = {}
for service in SERVICES
execute[service.name] =
src: "#{service.name}/app.js"
grunt.initConfig
execute: execute
concurrent:
all:
tasks: ("run:#{service.name}" for service in SERVICES)
options:
limit: SERVICES.length
logConcurrentOutput: true
availabletasks:
tasks:
options:
filter: 'exclude',
tasks: [
'concurrent'
'execute'
'bunyan'
'availabletasks'
]
groups:
"Run tasks": [
"run"
"run:all"
"default"
].concat ("run:#{service.name}" for service in SERVICES)
"Misc": [
"help"
]
"Install tasks": ("install:#{service.name}" for service in SERVICES).concat(["install:all", "install"])
"Update tasks": ("update:#{service.name}" for service in SERVICES).concat(["update:all", "update"])
"Checks": ["check", "check:redis", "check:latexmk", "check:s3", "check:make", "check:mongo"]
for service in SERVICES
do (service) ->
grunt.registerTask "install:#{service.name}", "Download and set up the #{service.name} service", () ->
done = @async()
Helpers.installService(service, done)
grunt.registerTask 'install:all', "Download and set up all ShareLaTeX services",
[].concat(
("install:#{service.name}" for service in SERVICES)
).concat(['postinstall'])
grunt.registerTask 'install', 'install:all'
grunt.registerTask 'postinstall', 'Explain postinstall steps', () ->
Helpers.postinstallMessage @async()
grunt.registerTask 'update:all', "Checkout and update all ShareLaTeX services",
["check:make"].concat(
("update:#{service.name}" for service in SERVICES)
)
grunt.registerTask 'update', 'update:all'
grunt.registerTask 'run', "Run all of the sharelatex processes", ['concurrent:all']
grunt.registerTask 'run:all', 'run'
grunt.registerTask 'help', 'Display this help list', 'availabletasks'
grunt.registerTask 'default', 'run'
grunt.registerTask "check:redis", "Check that redis is installed and running", () ->
Helpers.checkRedisConnect @async()
grunt.registerTask "check:mongo", "Check that mongo is installed", () ->
Helpers.checkMongoConnect @async()
grunt.registerTask "check", "Check that you have the required dependencies installed", ["check:redis", "check:mongo", "check:make"]
grunt.registerTask "check:make", "Check that make is installed", () ->
Helpers.checkMake @async()
Helpers =
installService: (service, callback = (error) ->) ->
console.log "Installing #{service.name}"
Helpers.cloneGitRepo service, (error) ->
if error?
callback(error)
else
callback()
cloneGitRepo: (service, callback = (error) ->) ->
repo_src = service.repo
dir = service.name
if !fs.existsSync(dir)
proc = spawn "git", [
"clone",
repo_src,
dir
], stdio: "inherit"
proc.on "close", () ->
Helpers.checkoutVersion service, callback
else
console.log "#{dir} already installed, skipping."
callback()
checkoutVersion: (service, callback = (error) ->) ->
dir = service.name
grunt.log.write "checking out #{service.name} #{service.version}"
proc = spawn "git", ["checkout", service.version], stdio: "inherit", cwd: dir
proc.on "close", () ->
callback()
postinstallMessage: (callback = (error) ->) ->
grunt.log.write """
Services cloned:
#{service.name for service in SERVICES}
To install services run:
$ source bin/install-services
This will install the required node versions and run `npm install` for each service.
See https://github.com/sharelatex/sharelatex/pull/549 for more info.
"""
callback()
checkMake: (callback = (error) ->) ->
grunt.log.write "Checking make is installed... "
exec "make --version", (error, stdout, stderr) ->
if error? and error.message.match("not found")
grunt.log.error "FAIL."
grunt.log.errorlns """
Either make is not installed or is not in your path.
On Ubuntu you can install make with:
sudo apt-get install build-essential
"""
return callback(error)
else if error?
return callback(error)
else
grunt.log.write "OK."
return callback()
checkMongoConnect: (callback = (error) ->) ->
grunt.log.write "Checking can connect to mongo"
mongojs = require("mongojs")
db = mongojs(settings.mongo.url, ["tags"])
db.runCommand { ping: 1 }, (err, res) ->
if !err and res.ok
grunt.log.write "OK."
return callback()
db.on 'error', (err)->
err = "Can not connect to mongodb"
grunt.log.error "FAIL."
grunt.log.errorlns """
!!!!!!!!!!!!!! MONGO ERROR !!!!!!!!!!!!!!
ShareLaTeX can not talk to the mongodb instance
Check the mongodb instance is running and accessible on env var SHARELATEX_MONGO_URL
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
"""
throw new Error("Can not connect to Mongodb")
return callback(err)
checkRedisConnect: (callback = (error) ->) ->
grunt.log.write "Checking can connect to redis\n"
rclient = require("redis").createClient(settings.redis.web)
rclient.ping (err, res) ->
if !err?
grunt.log.write "OK."
else
throw new Error("Can not connect to redis")
return callback()
errorHandler = _.once (err)->
err = "Can not connect to redis"
grunt.log.error "FAIL."
grunt.log.errorlns """
!!!!!!!!!!!!!! REDIS ERROR !!!!!!!!!!!!!!
ShareLaTeX can not talk to the redis instance
Check the redis instance is running and accessible on env var SHARELATEX_REDIS_HOST
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
"""
throw new Error("Can not connect to redis")
return callback(err)
rclient.on 'error', errorHandler

355
Gruntfile.js Normal file
View file

@ -0,0 +1,355 @@
/* eslint-disable
camelcase,
no-return-assign,
no-unreachable,
no-unused-vars,
node/handle-callback-err,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS103: Rewrite code to no longer use __guard__, or convert again using --optional-chaining
* DS205: Consider reworking code to avoid use of IIFEs
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const fs = require('fs')
const { spawn } = require('child_process')
const { exec } = require('child_process')
const rimraf = require('rimraf')
const Path = require('path')
const semver = require('semver')
const knox = require('knox')
const crypto = require('crypto')
const async = require('async')
const settings = require('@overleaf/settings')
const _ = require('underscore')
const SERVICES = require('./config/services')
module.exports = function (grunt) {
let Helpers
let service
grunt.loadNpmTasks('grunt-bunyan')
grunt.loadNpmTasks('grunt-execute')
grunt.loadNpmTasks('grunt-available-tasks')
grunt.loadNpmTasks('grunt-concurrent')
grunt.loadNpmTasks('grunt-shell')
grunt.task.loadTasks('./tasks')
const execute = {}
for (service of Array.from(SERVICES)) {
execute[service.name] = { src: `${service.name}/app.js` }
}
grunt.initConfig({
execute,
concurrent: {
all: {
tasks: (() => {
const result = []
for (service of Array.from(SERVICES)) {
result.push(`run:${service.name}`)
}
return result
})(),
options: {
limit: SERVICES.length,
logConcurrentOutput: true,
},
},
},
availabletasks: {
tasks: {
options: {
filter: 'exclude',
tasks: ['concurrent', 'execute', 'bunyan', 'availabletasks'],
groups: {
'Run tasks': ['run', 'run:all', 'default'].concat(
(() => {
const result1 = []
for (service of Array.from(SERVICES)) {
result1.push(`run:${service.name}`)
}
return result1
})()
),
Misc: ['help'],
'Install tasks': (() => {
const result2 = []
for (service of Array.from(SERVICES)) {
result2.push(`install:${service.name}`)
}
return result2
})().concat(['install:all', 'install']),
'Update tasks': (() => {
const result3 = []
for (service of Array.from(SERVICES)) {
result3.push(`update:${service.name}`)
}
return result3
})().concat(['update:all', 'update']),
Checks: [
'check',
'check:redis',
'check:latexmk',
'check:s3',
'check:make',
'check:mongo',
],
},
},
},
},
})
for (service of Array.from(SERVICES)) {
;(service =>
grunt.registerTask(
`install:${service.name}`,
`Download and set up the ${service.name} service`,
function () {
const done = this.async()
return Helpers.installService(service, done)
}
))(service)
}
grunt.registerTask(
'install:all',
'Download and set up all ShareLaTeX services',
[]
.concat(
(() => {
const result4 = []
for (service of Array.from(SERVICES)) {
result4.push(`install:${service.name}`)
}
return result4
})()
)
.concat(['postinstall'])
)
grunt.registerTask('install', 'install:all')
grunt.registerTask('postinstall', 'Explain postinstall steps', function () {
return Helpers.postinstallMessage(this.async())
})
grunt.registerTask(
'update:all',
'Checkout and update all ShareLaTeX services',
['check:make'].concat(
(() => {
const result5 = []
for (service of Array.from(SERVICES)) {
result5.push(`update:${service.name}`)
}
return result5
})()
)
)
grunt.registerTask('update', 'update:all')
grunt.registerTask('run', 'Run all of the sharelatex processes', [
'concurrent:all',
])
grunt.registerTask('run:all', 'run')
grunt.registerTask('help', 'Display this help list', 'availabletasks')
grunt.registerTask('default', 'run')
grunt.registerTask(
'check:redis',
'Check that redis is installed and running',
function () {
return Helpers.checkRedisConnect(this.async())
}
)
grunt.registerTask(
'check:mongo',
'Check that mongo is installed',
function () {
return Helpers.checkMongoConnect(this.async())
}
)
grunt.registerTask(
'check',
'Check that you have the required dependencies installed',
['check:redis', 'check:mongo', 'check:make']
)
grunt.registerTask('check:make', 'Check that make is installed', function () {
return Helpers.checkMake(this.async())
})
return (Helpers = {
installService(service, callback) {
if (callback == null) {
callback = function (error) {}
}
console.log(`Installing ${service.name}`)
return Helpers.cloneGitRepo(service, function (error) {
if (error != null) {
return callback(error)
} else {
return callback()
}
})
},
cloneGitRepo(service, callback) {
if (callback == null) {
callback = function (error) {}
}
const repo_src = service.repo
const dir = service.name
if (!fs.existsSync(dir)) {
const proc = spawn('git', ['clone', repo_src, dir], {
stdio: 'inherit',
})
return proc.on('close', () =>
Helpers.checkoutVersion(service, callback)
)
} else {
console.log(`${dir} already installed, skipping.`)
return callback()
}
},
checkoutVersion(service, callback) {
if (callback == null) {
callback = function (error) {}
}
const dir = service.name
grunt.log.write(`checking out ${service.name} ${service.version}`)
const proc = spawn('git', ['checkout', service.version], {
stdio: 'inherit',
cwd: dir,
})
return proc.on('close', () => callback())
},
postinstallMessage(callback) {
if (callback == null) {
callback = function (error) {}
}
grunt.log.write(`\
Services cloned:
${(() => {
const result6 = []
for (service of Array.from(SERVICES)) {
result6.push(service.name)
}
return result6
})()}
To install services run:
$ source bin/install-services
This will install the required node versions and run \`npm install\` for each service.
See https://github.com/sharelatex/sharelatex/pull/549 for more info.\
`)
return callback()
},
checkMake(callback) {
if (callback == null) {
callback = function (error) {}
}
grunt.log.write('Checking make is installed... ')
return exec('make --version', function (error, stdout, stderr) {
if (error != null && error.message.match('not found')) {
grunt.log.error('FAIL.')
grunt.log.errorlns(`\
Either make is not installed or is not in your path.
On Ubuntu you can install make with:
sudo apt-get install build-essential
\
`)
return callback(error)
} else if (error != null) {
return callback(error)
} else {
grunt.log.write('OK.')
return callback()
}
})
},
checkMongoConnect(callback) {
if (callback == null) {
callback = function (error) {}
}
grunt.log.write('Checking can connect to mongo')
const mongojs = require('mongojs')
const db = mongojs(settings.mongo.url, ['tags'])
db.runCommand({ ping: 1 }, function (err, res) {
if (!err && res.ok) {
grunt.log.write('OK.')
}
return callback()
})
return db.on('error', function (err) {
err = 'Can not connect to mongodb'
grunt.log.error('FAIL.')
grunt.log.errorlns(`\
!!!!!!!!!!!!!! MONGO ERROR !!!!!!!!!!!!!!
ShareLaTeX can not talk to the mongodb instance
Check the mongodb instance is running and accessible on env var SHARELATEX_MONGO_URL
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\
`)
throw new Error('Can not connect to Mongodb')
return callback(err)
})
},
checkRedisConnect(callback) {
if (callback == null) {
callback = function (error) {}
}
grunt.log.write('Checking can connect to redis\n')
const rclient = require('redis').createClient(settings.redis.web)
rclient.ping(function (err, res) {
if (err == null) {
grunt.log.write('OK.')
} else {
throw new Error('Can not connect to redis')
}
return callback()
})
const errorHandler = _.once(function (err) {
err = 'Can not connect to redis'
grunt.log.error('FAIL.')
grunt.log.errorlns(`\
!!!!!!!!!!!!!! REDIS ERROR !!!!!!!!!!!!!!
ShareLaTeX can not talk to the redis instance
Check the redis instance is running and accessible on env var SHARELATEX_REDIS_HOST
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\
`)
throw new Error('Can not connect to redis')
return callback(err)
})
return rclient.on('error', errorHandler)
},
})
}
function __guard__(value, transform) {
return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
}

View file

@ -2,19 +2,18 @@
set -e set -e
grep 'name:' config/services.js | \ node ./config/services.js | \
sed 's/.*name: "\(.*\)",/\1/' | \
while read service while read service
do do
pushd $service pushd $service
echo "Compiling Service $service" echo "Compiling Service $service"
case $service in case $service in
web) web)
npm run webpack:production npm run webpack:production
;; ;;
*) *)
echo "$service doesn't require a compilation" echo "$service doesn't require a compilation"
;; ;;
esac esac
popd popd
done done

View file

@ -2,8 +2,7 @@
set -e set -e
grep 'name:' config/services.js | \ node ./config/services.js | \
sed 's/.*name: "\(.*\)",/\1/' | \
while read service while read service
do do
pushd $service pushd $service

2441
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -2,10 +2,16 @@
"name": "sharelatex", "name": "sharelatex",
"version": "0.0.1", "version": "0.0.1",
"description": "An online collaborative LaTeX editor", "description": "An online collaborative LaTeX editor",
"scripts": {
"lint": "eslint --max-warnings 0 --format unix .",
"lint:fix": "eslint --fix .",
"format": "prettier --list-different $PWD/'**/*.js'",
"format:fix": "prettier --write $PWD/'**/*.js'"
},
"dependencies": { "dependencies": {
"@overleaf/settings": "^2.1.1",
"async": "^0.9.0", "async": "^0.9.0",
"bson": "^1.0.4", "bson": "^1.0.4",
"coffee-script": "^1.11.1",
"east": "0.5.7", "east": "0.5.7",
"east-mongo": "0.3.3", "east-mongo": "0.3.3",
"grunt-shell": "^1.1.1", "grunt-shell": "^1.1.1",
@ -15,17 +21,26 @@
"mongojs": "2.4.0", "mongojs": "2.4.0",
"redis": "^2.6.2", "redis": "^2.6.2",
"rimraf": "~2.2.6", "rimraf": "~2.2.6",
"settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git",
"underscore": "^1.7.0" "underscore": "^1.7.0"
}, },
"devDependencies": { "devDependencies": {
"grunt": "~0.4.2", "grunt": "~0.4.2",
"bunyan": "~0.22.1", "bunyan": "~0.22.1",
"eslint": "^7.21.0",
"eslint-config-prettier": "^8.1.0",
"eslint-config-standard": "^16.0.2",
"eslint-plugin-chai-expect": "^2.2.0",
"eslint-plugin-chai-friendly": "^0.6.0",
"eslint-plugin-import": "^2.22.1",
"eslint-plugin-mocha": "^8.0.0",
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-promise": "^4.2.1",
"eslint-plugin-standard": "^5.0.0",
"grunt-bunyan": "~0.5.0", "grunt-bunyan": "~0.5.0",
"grunt-execute": "~0.1.5", "grunt-execute": "~0.1.5",
"grunt-available-tasks": "~0.4.1", "grunt-available-tasks": "~0.4.1",
"grunt-concurrent": "~0.4.3", "grunt-concurrent": "~0.4.3",
"grunt-contrib-coffee": "~0.10.1", "prettier": "^2.2.1",
"semver": "~2.2.1", "semver": "~2.2.1",
"knox": "~0.8.9" "knox": "~0.8.9"
} }

View file

@ -1,5 +1,4 @@
#!/bin/bash #!/bin/bash
export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
NODE_PARAMS="" NODE_PARAMS=""
if [ "$DEBUG_NODE" == "true" ]; then if [ "$DEBUG_NODE" == "true" ]; then

View file

@ -1,5 +1,4 @@
#!/bin/bash #!/bin/bash
export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
NODE_PARAMS="" NODE_PARAMS=""
if [ "$DEBUG_NODE" == "true" ]; then if [ "$DEBUG_NODE" == "true" ]; then

View file

@ -1,5 +1,4 @@
#!/bin/bash #!/bin/bash
export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
NODE_PARAMS="" NODE_PARAMS=""
if [ "$DEBUG_NODE" == "true" ]; then if [ "$DEBUG_NODE" == "true" ]; then

View file

@ -1,5 +1,4 @@
#!/bin/bash #!/bin/bash
export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
NODE_PARAMS="" NODE_PARAMS=""
if [ "$DEBUG_NODE" == "true" ]; then if [ "$DEBUG_NODE" == "true" ]; then

View file

@ -1,5 +1,4 @@
#!/bin/bash #!/bin/bash
export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
NODE_PARAMS="" NODE_PARAMS=""
if [ "$DEBUG_NODE" == "true" ]; then if [ "$DEBUG_NODE" == "true" ]; then

View file

@ -1,3 +1,2 @@
#!/bin/bash #!/bin/bash
export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
exec /sbin/setuser www-data /usr/bin/node /var/www/sharelatex/filestore/app.js >> /var/log/sharelatex/filestore.log 2>&1 exec /sbin/setuser www-data /usr/bin/node /var/www/sharelatex/filestore/app.js >> /var/log/sharelatex/filestore.log 2>&1

View file

@ -1,5 +1,4 @@
#!/bin/bash #!/bin/bash
export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
NODE_PARAMS="" NODE_PARAMS=""
if [ "$DEBUG_NODE" == "true" ]; then if [ "$DEBUG_NODE" == "true" ]; then

View file

@ -1,3 +1,2 @@
#!/bin/bash #!/bin/bash
export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
exec /sbin/setuser www-data /usr/bin/node /var/www/sharelatex/real-time/app.js >> /var/log/sharelatex/real-time.log 2>&1 exec /sbin/setuser www-data /usr/bin/node /var/www/sharelatex/real-time/app.js >> /var/log/sharelatex/real-time.log 2>&1

View file

@ -1,5 +1,4 @@
#!/bin/bash #!/bin/bash
export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
NODE_PARAMS="" NODE_PARAMS=""
if [ "$DEBUG_NODE" == "true" ]; then if [ "$DEBUG_NODE" == "true" ]; then

View file

@ -1,5 +1,4 @@
#!/bin/bash #!/bin/bash
export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
NODE_PARAMS="" NODE_PARAMS=""
if [ "$DEBUG_NODE" == "true" ]; then if [ "$DEBUG_NODE" == "true" ]; then

View file

@ -1,5 +1,4 @@
#!/bin/bash #!/bin/bash
export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
NODE_PARAMS="" NODE_PARAMS=""
if [ "$DEBUG_NODE" == "true" ]; then if [ "$DEBUG_NODE" == "true" ]; then

View file

@ -1,47 +1,63 @@
module.exports = module.exports = [
{
name: 'web',
repo: 'https://github.com/sharelatex/web-sharelatex.git',
version: 'master',
},
{
name: 'real-time',
repo: 'https://github.com/sharelatex/real-time-sharelatex.git',
version: 'master',
},
{
name: 'document-updater',
repo: 'https://github.com/sharelatex/document-updater-sharelatex.git',
version: 'master',
},
{
name: 'clsi',
repo: 'https://github.com/sharelatex/clsi-sharelatex.git',
version: 'master',
},
{
name: 'filestore',
repo: 'https://github.com/sharelatex/filestore-sharelatex.git',
version: 'master',
},
{
name: 'track-changes',
repo: 'https://github.com/sharelatex/track-changes-sharelatex.git',
version: 'master',
},
{
name: 'docstore',
repo: 'https://github.com/sharelatex/docstore-sharelatex.git',
version: 'master',
},
{
name: 'chat',
repo: 'https://github.com/sharelatex/chat-sharelatex.git',
version: 'master',
},
{
name: 'spelling',
repo: 'https://github.com/sharelatex/spelling-sharelatex.git',
version: 'master',
},
{
name: 'contacts',
repo: 'https://github.com/sharelatex/contacts-sharelatex.git',
version: 'master',
},
{
name: 'notifications',
repo: 'https://github.com/sharelatex/notifications-sharelatex.git',
version: 'master',
},
]
[{ if (require.main === module) {
name: "web", for (const service of module.exports) {
repo: "https://github.com/sharelatex/web-sharelatex.git", console.log(service.name)
version: "master" }
}, { }
name: "real-time",
repo: "https://github.com/sharelatex/real-time-sharelatex.git",
version: "master"
}, {
name: "document-updater",
repo: "https://github.com/sharelatex/document-updater-sharelatex.git",
version: "master"
}, {
name: "clsi",
repo: "https://github.com/sharelatex/clsi-sharelatex.git",
version: "master"
}, {
name: "filestore",
repo: "https://github.com/sharelatex/filestore-sharelatex.git",
version: "master"
}, {
name: "track-changes",
repo: "https://github.com/sharelatex/track-changes-sharelatex.git",
version: "master"
}, {
name: "docstore",
repo: "https://github.com/sharelatex/docstore-sharelatex.git",
version: "master"
}, {
name: "chat",
repo: "https://github.com/sharelatex/chat-sharelatex.git",
version: "master"
}, {
name: "spelling",
repo: "https://github.com/sharelatex/spelling-sharelatex.git",
version: "master"
}, {
name: "contacts",
repo: "https://github.com/sharelatex/contacts-sharelatex.git",
version: "master"
}, {
name: "notifications",
repo: "https://github.com/sharelatex/notifications-sharelatex.git",
version: "master"
}]

View file

@ -1,576 +0,0 @@
Path = require('path')
# These credentials are used for authenticating api requests
# between services that may need to go over public channels
httpAuthUser = "sharelatex"
httpAuthPass = process.env["WEB_API_PASSWORD"]
httpAuthUsers = {}
httpAuthUsers[httpAuthUser] = httpAuthPass
parse = (option)->
if option?
try
opt = JSON.parse(option)
return opt
catch err
throw new Error("problem parsing #{option}, invalid JSON")
parseIntOrFail = (value)->
parsedValue = parseInt(value, 10)
if isNaN(parsedValue)
throw new Error("'#{value}' is an invalid integer")
return parsedValue
DATA_DIR = '/var/lib/sharelatex/data'
TMP_DIR = '/var/lib/sharelatex/tmp'
settings =
clsi:
optimiseInDocker: process.env['OPTIMISE_PDF'] == 'true'
brandPrefix: ""
allowAnonymousReadAndWriteSharing:
process.env['SHARELATEX_ALLOW_ANONYMOUS_READ_AND_WRITE_SHARING'] == 'true'
# Databases
# ---------
# ShareLaTeX's main persistent data store is MongoDB (http://www.mongodb.org/)
# Documentation about the URL connection string format can be found at:
#
# http://docs.mongodb.org/manual/reference/connection-string/
#
# The following works out of the box with Mongo's default settings:
mongo:
url : process.env["SHARELATEX_MONGO_URL"] or 'mongodb://dockerhost/sharelatex'
# Redis is used in ShareLaTeX for high volume queries, like real-time
# editing, and session management.
#
# The following config will work with Redis's default settings:
redis:
web: redisConfig =
host: process.env["SHARELATEX_REDIS_HOST"] or "dockerhost"
port: process.env["SHARELATEX_REDIS_PORT"] or "6379"
password: process.env["SHARELATEX_REDIS_PASS"] or undefined
key_schema:
# document-updater
blockingKey: ({doc_id}) -> "Blocking:#{doc_id}"
docLines: ({doc_id}) -> "doclines:#{doc_id}"
docOps: ({doc_id}) -> "DocOps:#{doc_id}"
docVersion: ({doc_id}) -> "DocVersion:#{doc_id}"
docHash: ({doc_id}) -> "DocHash:#{doc_id}"
projectKey: ({doc_id}) -> "ProjectId:#{doc_id}"
docsInProject: ({project_id}) -> "DocsIn:#{project_id}"
ranges: ({doc_id}) -> "Ranges:#{doc_id}"
# document-updater:realtime
pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}"
# document-updater:history
uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}"
docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}"
# document-updater:lock
blockingKey: ({doc_id}) -> "Blocking:#{doc_id}"
# track-changes:lock
historyLock: ({doc_id}) -> "HistoryLock:#{doc_id}"
historyIndexLock: ({project_id}) -> "HistoryIndexLock:#{project_id}"
# track-changes:history
uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}"
docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}"
# realtime
clientsInProject: ({project_id}) -> "clients_in_project:#{project_id}"
connectedUser: ({project_id, client_id})-> "connected_user:#{project_id}:#{client_id}"
fairy: redisConfig
# track-changes and document-updater
realtime: redisConfig
documentupdater: redisConfig
lock: redisConfig
history: redisConfig
websessions: redisConfig
api: redisConfig
pubsub: redisConfig
project_history: redisConfig
# The compile server (the clsi) uses a SQL database to cache files and
# meta-data. sqlite is the default, and the load is low enough that this will
# be fine in production (we use sqlite at sharelatex.com).
#
# If you want to configure a different database, see the Sequelize documentation
# for available options:
#
# https://github.com/sequelize/sequelize/wiki/API-Reference-Sequelize#example-usage
#
mysql:
clsi:
database: "clsi"
username: "clsi"
password: ""
dialect: "sqlite"
storage: Path.join(DATA_DIR, "db.sqlite")
# File storage
# ------------
# ShareLaTeX can store binary files like images either locally or in Amazon
# S3. The default is locally:
filestore:
backend: "fs"
stores:
user_files: Path.join(DATA_DIR, "user_files")
template_files: Path.join(DATA_DIR, "template_files")
# To use Amazon S3 as a storage backend, comment out the above config, and
# uncomment the following, filling in your key, secret, and bucket name:
#
# filestore:
# backend: "s3"
# stores:
# user_files: "BUCKET_NAME"
# s3:
# key: "AWS_KEY"
# secret: "AWS_SECRET"
#
trackchanges:
continueOnError: true
# Local disk caching
# ------------------
path:
# If we ever need to write something to disk (e.g. incoming requests
# that need processing but may be too big for memory), then write
# them to disk here:
dumpFolder: Path.join(TMP_DIR, "dumpFolder")
# Where to write uploads before they are processed
uploadFolder: Path.join(TMP_DIR, "uploads")
# Where to write the project to disk before running LaTeX on it
compilesDir: Path.join(DATA_DIR, "compiles")
# Where to cache downloaded URLs for the CLSI
clsiCacheDir: Path.join(DATA_DIR, "cache")
# Where to write the output files to disk after running LaTeX
outputDir: Path.join(DATA_DIR, "output")
# Server Config
# -------------
# Where your instance of ShareLaTeX can be found publicly. This is used
# when emails are sent out and in generated links:
siteUrl: siteUrl = process.env["SHARELATEX_SITE_URL"] or 'http://localhost'
# The name this is used to describe your ShareLaTeX Installation
appName: process.env["SHARELATEX_APP_NAME"] or "ShareLaTeX (Community Edition)"
restrictInvitesToExistingAccounts: process.env["SHARELATEX_RESTRICT_INVITES_TO_EXISTING_ACCOUNTS"] == 'true'
nav:
title: process.env["SHARELATEX_NAV_TITLE"] or process.env["SHARELATEX_APP_NAME"] or "ShareLaTeX Community Edition"
# The email address which users will be directed to as the main point of
# contact for this installation of ShareLaTeX.
adminEmail: process.env["SHARELATEX_ADMIN_EMAIL"] or "placeholder@example.com"
# If provided, a sessionSecret is used to sign cookies so that they cannot be
# spoofed. This is recommended.
security:
sessionSecret: process.env["SHARELATEX_SESSION_SECRET"] or process.env["CRYPTO_RANDOM"]
# These credentials are used for authenticating api requests
# between services that may need to go over public channels
httpAuthUsers: httpAuthUsers
# Should javascript assets be served minified or not.
useMinifiedJs: true
# Should static assets be sent with a header to tell the browser to cache
# them. This should be false in development where changes are being made,
# but should be set to true in production.
cacheStaticAssets: true
# If you are running ShareLaTeX over https, set this to true to send the
# cookie with a secure flag (recommended).
secureCookie: process.env["SHARELATEX_SECURE_COOKIE"]?
# If you are running ShareLaTeX behind a proxy (like Apache, Nginx, etc)
# then set this to true to allow it to correctly detect the forwarded IP
# address and http/https protocol information.
behindProxy: process.env["SHARELATEX_BEHIND_PROXY"] or false
i18n:
subdomainLang:
www: {lngCode:process.env["SHARELATEX_SITE_LANGUAGE"] or "en", url: siteUrl}
defaultLng: process.env["SHARELATEX_SITE_LANGUAGE"] or "en"
currentImageName: process.env["TEX_LIVE_DOCKER_IMAGE"]
apis:
web:
url: "http://localhost:3000"
user: httpAuthUser
pass: httpAuthPass
project_history:
enabled: false
references:{}
notifications:undefined
defaultFeatures:
collaborators: -1
dropbox: true
versioning: true
compileTimeout: parseIntOrFail(process.env["COMPILE_TIMEOUT"] or 180)
compileGroup: "standard"
trackChanges: true
templates: true
references: true
## OPTIONAL CONFIGURABLE SETTINGS
if process.env["SHARELATEX_LEFT_FOOTER"]?
try
settings.nav.left_footer = JSON.parse(process.env["SHARELATEX_LEFT_FOOTER"])
catch e
console.error("could not parse SHARELATEX_LEFT_FOOTER, not valid JSON")
if process.env["SHARELATEX_RIGHT_FOOTER"]?
settings.nav.right_footer = process.env["SHARELATEX_RIGHT_FOOTER"]
try
settings.nav.right_footer = JSON.parse(process.env["SHARELATEX_RIGHT_FOOTER"])
catch e
console.error("could not parse SHARELATEX_RIGHT_FOOTER, not valid JSON")
if process.env["SHARELATEX_HEADER_IMAGE_URL"]?
settings.nav.custom_logo = process.env["SHARELATEX_HEADER_IMAGE_URL"]
if process.env["SHARELATEX_HEADER_NAV_LINKS"]?
console.error """
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#
# WARNING: SHARELATEX_HEADER_NAV_LINKS is no longer supported
# See https://github.com/sharelatex/sharelatex/wiki/Configuring-Headers,-Footers-&-Logo
#
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
"""
if process.env["SHARELATEX_HEADER_EXTRAS"]?
try
settings.nav.header_extras = JSON.parse(process.env["SHARELATEX_HEADER_EXTRAS"])
catch e
console.error("could not parse SHARELATEX_HEADER_EXTRAS, not valid JSON")
# Sending Email
# -------------
#
# You must configure a mail server to be able to send invite emails from
# ShareLaTeX. The config settings are passed to nodemailer. See the nodemailer
# documentation for available options:
#
# http://www.nodemailer.com/docs/transports
if process.env["SHARELATEX_EMAIL_FROM_ADDRESS"]?
settings.email =
fromAddress: process.env["SHARELATEX_EMAIL_FROM_ADDRESS"]
replyTo: process.env["SHARELATEX_EMAIL_REPLY_TO"] or ""
driver: process.env["SHARELATEX_EMAIL_DRIVER"]
parameters:
#AWS Creds
AWSAccessKeyID: process.env["SHARELATEX_EMAIL_AWS_SES_ACCESS_KEY_ID"]
AWSSecretKey: process.env["SHARELATEX_EMAIL_AWS_SES_SECRET_KEY"]
#SMTP Creds
host: process.env["SHARELATEX_EMAIL_SMTP_HOST"]
port: process.env["SHARELATEX_EMAIL_SMTP_PORT"],
secure: parse(process.env["SHARELATEX_EMAIL_SMTP_SECURE"])
ignoreTLS: parse(process.env["SHARELATEX_EMAIL_SMTP_IGNORE_TLS"])
name: process.env["SHARELATEX_EMAIL_SMTP_NAME"]
logger: process.env["SHARELATEX_EMAIL_SMTP_LOGGER"] == 'true'
textEncoding: process.env["SHARELATEX_EMAIL_TEXT_ENCODING"]
template:
customFooter: process.env["SHARELATEX_CUSTOM_EMAIL_FOOTER"]
if process.env["SHARELATEX_EMAIL_AWS_SES_REGION"]?
settings.email.parameters.region = process.env["SHARELATEX_EMAIL_AWS_SES_REGION"]
if process.env["SHARELATEX_EMAIL_SMTP_USER"]? or process.env["SHARELATEX_EMAIL_SMTP_PASS"]?
settings.email.parameters.auth =
user: process.env["SHARELATEX_EMAIL_SMTP_USER"]
pass: process.env["SHARELATEX_EMAIL_SMTP_PASS"]
if process.env["SHARELATEX_EMAIL_SMTP_TLS_REJECT_UNAUTH"]?
settings.email.parameters.tls =
rejectUnauthorized: parse(process.env["SHARELATEX_EMAIL_SMTP_TLS_REJECT_UNAUTH"])
# i18n
if process.env["SHARELATEX_LANG_DOMAIN_MAPPING"]?
settings.i18n.subdomainLang = parse(process.env["SHARELATEX_LANG_DOMAIN_MAPPING"])
# Password Settings
# -----------
# These restrict the passwords users can use when registering
# opts are from http://antelle.github.io/passfield
if process.env["SHARELATEX_PASSWORD_VALIDATION_PATTERN"] or process.env["SHARELATEX_PASSWORD_VALIDATION_MIN_LENGTH"] or process.env["SHARELATEX_PASSWORD_VALIDATION_MAX_LENGTH"]
settings.passwordStrengthOptions =
pattern: process.env["SHARELATEX_PASSWORD_VALIDATION_PATTERN"] or "aA$3"
length: {min:process.env["SHARELATEX_PASSWORD_VALIDATION_MIN_LENGTH"] or 8, max: process.env["SHARELATEX_PASSWORD_VALIDATION_MAX_LENGTH"] or 150}
#######################
# ShareLaTeX Server Pro
#######################
if parse(process.env["SHARELATEX_IS_SERVER_PRO"]) == true
settings.bypassPercentageRollouts = true
settings.apis.references =
url: "http://localhost:3040"
# LDAP - SERVER PRO ONLY
# ----------
if process.env["SHARELATEX_LDAP_HOST"]
console.error """
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#
# WARNING: The LDAP configuration format has changed in version 0.5.1
# See https://github.com/sharelatex/sharelatex/wiki/Server-Pro:-LDAP-Config
#
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
"""
if process.env["SHARELATEX_LDAP_URL"]
settings.externalAuth = true
settings.ldap =
emailAtt: process.env["SHARELATEX_LDAP_EMAIL_ATT"]
nameAtt: process.env["SHARELATEX_LDAP_NAME_ATT"]
lastNameAtt: process.env["SHARELATEX_LDAP_LAST_NAME_ATT"]
updateUserDetailsOnLogin: process.env["SHARELATEX_LDAP_UPDATE_USER_DETAILS_ON_LOGIN"] == 'true'
placeholder: process.env["SHARELATEX_LDAP_PLACEHOLDER"]
server:
url: process.env["SHARELATEX_LDAP_URL"]
bindDn: process.env["SHARELATEX_LDAP_BIND_DN"]
bindCredentials: process.env["SHARELATEX_LDAP_BIND_CREDENTIALS"]
bindProperty: process.env["SHARELATEX_LDAP_BIND_PROPERTY"]
searchBase: process.env["SHARELATEX_LDAP_SEARCH_BASE"]
searchScope: process.env["SHARELATEX_LDAP_SEARCH_SCOPE"]
searchFilter: process.env["SHARELATEX_LDAP_SEARCH_FILTER"]
searchAttributes: (
if _ldap_search_attribs = process.env["SHARELATEX_LDAP_SEARCH_ATTRIBUTES"]
try
JSON.parse(_ldap_search_attribs)
catch e
console.error "could not parse SHARELATEX_LDAP_SEARCH_ATTRIBUTES"
else
undefined
)
groupDnProperty: process.env["SHARELATEX_LDAP_GROUP_DN_PROPERTY"]
groupSearchBase: process.env["SHARELATEX_LDAP_GROUP_SEARCH_BASE"]
groupSearchScope: process.env["SHARELATEX_LDAP_GROUP_SEARCH_SCOPE"]
groupSearchFilter: process.env["SHARELATEX_LDAP_GROUP_SEARCH_FILTER"]
groupSearchAttributes: (
if _ldap_group_search_attribs = process.env["SHARELATEX_LDAP_GROUP_SEARCH_ATTRIBUTES"]
try
JSON.parse(_ldap_group_search_attribs)
catch e
console.error "could not parse SHARELATEX_LDAP_GROUP_SEARCH_ATTRIBUTES"
else
undefined
)
cache: process.env["SHARELATEX_LDAP_CACHE"] == 'true'
timeout: (
if _ldap_timeout = process.env["SHARELATEX_LDAP_TIMEOUT"]
try
parseIntOrFail(_ldap_timeout)
catch e
console.error "Cannot parse SHARELATEX_LDAP_TIMEOUT"
else
undefined
)
connectTimeout: (
if _ldap_connect_timeout = process.env["SHARELATEX_LDAP_CONNECT_TIMEOUT"]
try
parseIntOrFail(_ldap_connect_timeout)
catch e
console.error "Cannot parse SHARELATEX_LDAP_CONNECT_TIMEOUT"
else
undefined
)
if process.env["SHARELATEX_LDAP_TLS_OPTS_CA_PATH"]
try
ca = JSON.parse(process.env["SHARELATEX_LDAP_TLS_OPTS_CA_PATH"])
catch e
console.error "could not parse SHARELATEX_LDAP_TLS_OPTS_CA_PATH, invalid JSON"
if typeof(ca) == 'string'
ca_paths = [ca]
else if typeof(ca) == 'object' && ca?.length?
ca_paths = ca
else
console.error "problem parsing SHARELATEX_LDAP_TLS_OPTS_CA_PATH"
settings.ldap.server.tlsOptions =
rejectUnauthorized: process.env["SHARELATEX_LDAP_TLS_OPTS_REJECT_UNAUTH"] == "true"
ca:ca_paths # e.g.'/etc/ldap/ca_certs.pem'
if process.env["SHARELATEX_SAML_ENTRYPOINT"]
# NOTE: see https://github.com/node-saml/passport-saml/blob/master/README.md for docs of `server` options
settings.externalAuth = true
settings.saml =
updateUserDetailsOnLogin: process.env["SHARELATEX_SAML_UPDATE_USER_DETAILS_ON_LOGIN"] == 'true'
identityServiceName: process.env["SHARELATEX_SAML_IDENTITY_SERVICE_NAME"]
emailField: process.env["SHARELATEX_SAML_EMAIL_FIELD"] || process.env["SHARELATEX_SAML_EMAIL_FIELD_NAME"]
firstNameField: process.env["SHARELATEX_SAML_FIRST_NAME_FIELD"]
lastNameField: process.env["SHARELATEX_SAML_LAST_NAME_FIELD"]
server:
# strings
entryPoint: process.env["SHARELATEX_SAML_ENTRYPOINT"]
callbackUrl: process.env["SHARELATEX_SAML_CALLBACK_URL"]
issuer: process.env["SHARELATEX_SAML_ISSUER"]
decryptionPvk: process.env["SHARELATEX_SAML_DECRYPTION_PVK"]
decryptionCert: process.env["SHARELATEX_SAML_DECRYPTION_CERT"]
signatureAlgorithm: process.env["SHARELATEX_SAML_SIGNATURE_ALGORITHM"]
identifierFormat: process.env["SHARELATEX_SAML_IDENTIFIER_FORMAT"]
attributeConsumingServiceIndex: process.env["SHARELATEX_SAML_ATTRIBUTE_CONSUMING_SERVICE_INDEX"]
authnContext: process.env["SHARELATEX_SAML_AUTHN_CONTEXT"]
authnRequestBinding: process.env["SHARELATEX_SAML_AUTHN_REQUEST_BINDING"]
validateInResponseTo: process.env["SHARELATEX_SAML_VALIDATE_IN_RESPONSE_TO"]
cacheProvider: process.env["SHARELATEX_SAML_CACHE_PROVIDER"]
logoutUrl: process.env["SHARELATEX_SAML_LOGOUT_URL"]
logoutCallbackUrl: process.env["SHARELATEX_SAML_LOGOUT_CALLBACK_URL"]
disableRequestedAuthnContext: process.env["SHARELATEX_SAML_DISABLE_REQUESTED_AUTHN_CONTEXT"] == 'true'
forceAuthn: process.env["SHARELATEX_SAML_FORCE_AUTHN"] == 'true'
skipRequestCompression: process.env["SHARELATEX_SAML_SKIP_REQUEST_COMPRESSION"] == 'true'
acceptedClockSkewMs: (
if _saml_skew = process.env["SHARELATEX_SAML_ACCEPTED_CLOCK_SKEW_MS"]
try
parseIntOrFail(_saml_skew)
catch e
console.error "Cannot parse SHARELATEX_SAML_ACCEPTED_CLOCK_SKEW_MS"
else
undefined
)
requestIdExpirationPeriodMs: (
if _saml_expiration = process.env["SHARELATEX_SAML_REQUEST_ID_EXPIRATION_PERIOD_MS"]
try
parseIntOrFail(_saml_expiration)
catch e
console.error "Cannot parse SHARELATEX_SAML_REQUEST_ID_EXPIRATION_PERIOD_MS"
else
undefined
)
additionalParams: (
if _saml_additionalParams = process.env["SHARELATEX_SAML_ADDITIONAL_PARAMS"]
try
JSON.parse(_saml_additionalParams)
catch e
console.error "Cannot parse SHARELATEX_SAML_ADDITIONAL_PARAMS"
else
undefined
)
additionalAuthorizeParams: (
if _saml_additionalAuthorizeParams = process.env["SHARELATEX_SAML_ADDITIONAL_AUTHORIZE_PARAMS"]
try
JSON.parse(_saml_additionalAuthorizeParams )
catch e
console.error "Cannot parse SHARELATEX_SAML_ADDITIONAL_AUTHORIZE_PARAMS"
else
undefined
)
additionalLogoutParams: (
if _saml_additionalLogoutParams = process.env["SHARELATEX_SAML_ADDITIONAL_LOGOUT_PARAMS"]
try
JSON.parse(_saml_additionalLogoutParams )
catch e
console.error "Cannot parse SHARELATEX_SAML_ADDITIONAL_LOGOUT_PARAMS"
else
undefined
)
# SHARELATEX_SAML_CERT cannot be empty
# https://github.com/node-saml/passport-saml/commit/f6b1c885c0717f1083c664345556b535f217c102
if process.env["SHARELATEX_SAML_CERT"]
settings.saml.server.cert = process.env["SHARELATEX_SAML_CERT"]
settings.saml.server.privateCert = process.env["SHARELATEX_SAML_PRIVATE_CERT"]
# Compiler
# --------
if process.env["SANDBOXED_COMPILES"] == "true"
settings.clsi =
dockerRunner: true
docker:
image: process.env["TEX_LIVE_DOCKER_IMAGE"]
env:
HOME: "/tmp"
PATH: process.env["COMPILER_PATH"] or "/usr/local/texlive/2015/bin/x86_64-linux:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
user: "www-data"
if !settings.path?
settings.path = {}
settings.path.synctexBaseDir = () -> "/compile"
if process.env['SANDBOXED_COMPILES_SIBLING_CONTAINERS'] == 'true'
console.log("Using sibling containers for sandboxed compiles")
if process.env['SANDBOXED_COMPILES_HOST_DIR']
settings.path.sandboxedCompilesHostDir = process.env['SANDBOXED_COMPILES_HOST_DIR']
else
console.error('Sibling containers, but SANDBOXED_COMPILES_HOST_DIR not set')
# Templates
# ---------
if process.env["SHARELATEX_TEMPLATES_USER_ID"]
settings.templates =
mountPointUrl: "/templates"
user_id: process.env["SHARELATEX_TEMPLATES_USER_ID"]
settings.templateLinks = parse(process.env["SHARELATEX_NEW_PROJECT_TEMPLATE_LINKS"])
# /Learn
# -------
if process.env["SHARELATEX_PROXY_LEARN"]?
settings.proxyLearn = parse(process.env["SHARELATEX_PROXY_LEARN"])
# /References
# -----------
if process.env["SHARELATEX_ELASTICSEARCH_URL"]?
settings.references.elasticsearch =
host: process.env["SHARELATEX_ELASTICSEARCH_URL"]
# TeX Live Images
# -----------
if process.env["ALL_TEX_LIVE_DOCKER_IMAGES"]?
allTexLiveDockerImages = process.env["ALL_TEX_LIVE_DOCKER_IMAGES"].split(',')
if process.env["ALL_TEX_LIVE_DOCKER_IMAGE_NAMES"]?
allTexLiveDockerImageNames = process.env["ALL_TEX_LIVE_DOCKER_IMAGE_NAMES"].split(',')
if allTexLiveDockerImages?
settings.allowedImageNames = []
for fullImageName, index in allTexLiveDockerImages
imageName = Path.basename(fullImageName)
imageDesc = if allTexLiveDockerImageNames? then allTexLiveDockerImageNames[index] else imageName
settings.allowedImageNames.push({ imageName, imageDesc })
# With lots of incoming and outgoing HTTP connections to different services,
# sometimes long running, it is a good idea to increase the default number
# of sockets that Node will hold open.
http = require('http')
http.globalAgent.maxSockets = 300
https = require('https')
https.globalAgent.maxSockets = 300
module.exports = settings

778
settings.js Normal file
View file

@ -0,0 +1,778 @@
/* eslint-disable
camelcase,
no-cond-assign,
no-dupe-keys,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS205: Consider reworking code to avoid use of IIFEs
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let allTexLiveDockerImageNames, allTexLiveDockerImages, redisConfig, siteUrl
let e
const Path = require('path')
// These credentials are used for authenticating api requests
// between services that may need to go over public channels
const httpAuthUser = 'sharelatex'
const httpAuthPass = process.env.WEB_API_PASSWORD
const httpAuthUsers = {}
httpAuthUsers[httpAuthUser] = httpAuthPass
const parse = function (option) {
if (option != null) {
try {
const opt = JSON.parse(option)
return opt
} catch (err) {
throw new Error(`problem parsing ${option}, invalid JSON`)
}
}
}
const parseIntOrFail = function (value) {
const parsedValue = parseInt(value, 10)
if (isNaN(parsedValue)) {
throw new Error(`'${value}' is an invalid integer`)
}
return parsedValue
}
const DATA_DIR = '/var/lib/sharelatex/data'
const TMP_DIR = '/var/lib/sharelatex/tmp'
const settings = {
clsi: {
optimiseInDocker: process.env.OPTIMISE_PDF === 'true',
},
brandPrefix: '',
allowAnonymousReadAndWriteSharing:
process.env.SHARELATEX_ALLOW_ANONYMOUS_READ_AND_WRITE_SHARING === 'true',
// Databases
// ---------
// ShareLaTeX's main persistent data store is MongoDB (http://www.mongodb.org/)
// Documentation about the URL connection string format can be found at:
//
// http://docs.mongodb.org/manual/reference/connection-string/
//
// The following works out of the box with Mongo's default settings:
mongo: {
url: process.env.SHARELATEX_MONGO_URL || 'mongodb://dockerhost/sharelatex',
},
// Redis is used in ShareLaTeX for high volume queries, like real-time
// editing, and session management.
//
// The following config will work with Redis's default settings:
redis: {
web: (redisConfig = {
host: process.env.SHARELATEX_REDIS_HOST || 'dockerhost',
port: process.env.SHARELATEX_REDIS_PORT || '6379',
password: process.env.SHARELATEX_REDIS_PASS || undefined,
key_schema: {
// document-updater
blockingKey({ doc_id }) {
return `Blocking:${doc_id}`
},
docLines({ doc_id }) {
return `doclines:${doc_id}`
},
docOps({ doc_id }) {
return `DocOps:${doc_id}`
},
docVersion({ doc_id }) {
return `DocVersion:${doc_id}`
},
docHash({ doc_id }) {
return `DocHash:${doc_id}`
},
projectKey({ doc_id }) {
return `ProjectId:${doc_id}`
},
docsInProject({ project_id }) {
return `DocsIn:${project_id}`
},
ranges({ doc_id }) {
return `Ranges:${doc_id}`
},
// document-updater:realtime
pendingUpdates({ doc_id }) {
return `PendingUpdates:${doc_id}`
},
// document-updater:history
uncompressedHistoryOps({ doc_id }) {
return `UncompressedHistoryOps:${doc_id}`
},
docsWithHistoryOps({ project_id }) {
return `DocsWithHistoryOps:${project_id}`
},
// document-updater:lock
blockingKey({ doc_id }) {
return `Blocking:${doc_id}`
},
// track-changes:lock
historyLock({ doc_id }) {
return `HistoryLock:${doc_id}`
},
historyIndexLock({ project_id }) {
return `HistoryIndexLock:${project_id}`
},
// track-changes:history
uncompressedHistoryOps({ doc_id }) {
return `UncompressedHistoryOps:${doc_id}`
},
docsWithHistoryOps({ project_id }) {
return `DocsWithHistoryOps:${project_id}`
},
// realtime
clientsInProject({ project_id }) {
return `clients_in_project:${project_id}`
},
connectedUser({ project_id, client_id }) {
return `connected_user:${project_id}:${client_id}`
},
},
}),
fairy: redisConfig,
// track-changes and document-updater
realtime: redisConfig,
documentupdater: redisConfig,
lock: redisConfig,
history: redisConfig,
websessions: redisConfig,
api: redisConfig,
pubsub: redisConfig,
project_history: redisConfig,
},
// The compile server (the clsi) uses a SQL database to cache files and
// meta-data. sqlite is the default, and the load is low enough that this will
// be fine in production (we use sqlite at sharelatex.com).
//
// If you want to configure a different database, see the Sequelize documentation
// for available options:
//
// https://github.com/sequelize/sequelize/wiki/API-Reference-Sequelize#example-usage
//
mysql: {
clsi: {
database: 'clsi',
username: 'clsi',
password: '',
dialect: 'sqlite',
storage: Path.join(DATA_DIR, 'db.sqlite'),
},
},
// File storage
// ------------
// ShareLaTeX can store binary files like images either locally or in Amazon
// S3. The default is locally:
filestore: {
backend: 'fs',
stores: {
user_files: Path.join(DATA_DIR, 'user_files'),
template_files: Path.join(DATA_DIR, 'template_files'),
},
},
// To use Amazon S3 as a storage backend, comment out the above config, and
// uncomment the following, filling in your key, secret, and bucket name:
//
// filestore:
// backend: "s3"
// stores:
// user_files: "BUCKET_NAME"
// s3:
// key: "AWS_KEY"
// secret: "AWS_SECRET"
//
trackchanges: {
continueOnError: true,
},
// Local disk caching
// ------------------
path: {
// If we ever need to write something to disk (e.g. incoming requests
// that need processing but may be too big for memory), then write
// them to disk here:
dumpFolder: Path.join(TMP_DIR, 'dumpFolder'),
// Where to write uploads before they are processed
uploadFolder: Path.join(TMP_DIR, 'uploads'),
// Where to write the project to disk before running LaTeX on it
compilesDir: Path.join(DATA_DIR, 'compiles'),
// Where to cache downloaded URLs for the CLSI
clsiCacheDir: Path.join(DATA_DIR, 'cache'),
// Where to write the output files to disk after running LaTeX
outputDir: Path.join(DATA_DIR, 'output'),
},
// Server Config
// -------------
// Where your instance of ShareLaTeX can be found publicly. This is used
// when emails are sent out and in generated links:
siteUrl: (siteUrl = process.env.SHARELATEX_SITE_URL || 'http://localhost'),
// The name this is used to describe your ShareLaTeX Installation
appName: process.env.SHARELATEX_APP_NAME || 'ShareLaTeX (Community Edition)',
restrictInvitesToExistingAccounts:
process.env.SHARELATEX_RESTRICT_INVITES_TO_EXISTING_ACCOUNTS === 'true',
nav: {
title:
process.env.SHARELATEX_NAV_TITLE ||
process.env.SHARELATEX_APP_NAME ||
'ShareLaTeX Community Edition',
},
// The email address which users will be directed to as the main point of
// contact for this installation of ShareLaTeX.
adminEmail: process.env.SHARELATEX_ADMIN_EMAIL || 'placeholder@example.com',
// If provided, a sessionSecret is used to sign cookies so that they cannot be
// spoofed. This is recommended.
security: {
sessionSecret:
process.env.SHARELATEX_SESSION_SECRET || process.env.CRYPTO_RANDOM,
},
// These credentials are used for authenticating api requests
// between services that may need to go over public channels
httpAuthUsers,
// Should javascript assets be served minified or not.
useMinifiedJs: true,
// Should static assets be sent with a header to tell the browser to cache
// them. This should be false in development where changes are being made,
// but should be set to true in production.
cacheStaticAssets: true,
// If you are running ShareLaTeX over https, set this to true to send the
// cookie with a secure flag (recommended).
secureCookie: process.env.SHARELATEX_SECURE_COOKIE != null,
// If you are running ShareLaTeX behind a proxy (like Apache, Nginx, etc)
// then set this to true to allow it to correctly detect the forwarded IP
// address and http/https protocol information.
behindProxy: process.env.SHARELATEX_BEHIND_PROXY || false,
i18n: {
subdomainLang: {
www: {
lngCode: process.env.SHARELATEX_SITE_LANGUAGE || 'en',
url: siteUrl,
},
},
defaultLng: process.env.SHARELATEX_SITE_LANGUAGE || 'en',
},
currentImageName: process.env.TEX_LIVE_DOCKER_IMAGE,
apis: {
web: {
url: 'http://localhost:3000',
user: httpAuthUser,
pass: httpAuthPass,
},
project_history: {
enabled: false,
},
},
references: {},
notifications: undefined,
defaultFeatures: {
collaborators: -1,
dropbox: true,
versioning: true,
compileTimeout: parseIntOrFail(process.env.COMPILE_TIMEOUT || 180),
compileGroup: 'standard',
trackChanges: true,
templates: true,
references: true,
},
}
// # OPTIONAL CONFIGURABLE SETTINGS
if (process.env.SHARELATEX_LEFT_FOOTER != null) {
try {
settings.nav.left_footer = JSON.parse(process.env.SHARELATEX_LEFT_FOOTER)
} catch (error) {
e = error
console.error('could not parse SHARELATEX_LEFT_FOOTER, not valid JSON')
}
}
if (process.env.SHARELATEX_RIGHT_FOOTER != null) {
settings.nav.right_footer = process.env.SHARELATEX_RIGHT_FOOTER
try {
settings.nav.right_footer = JSON.parse(process.env.SHARELATEX_RIGHT_FOOTER)
} catch (error1) {
e = error1
console.error('could not parse SHARELATEX_RIGHT_FOOTER, not valid JSON')
}
}
if (process.env.SHARELATEX_HEADER_IMAGE_URL != null) {
settings.nav.custom_logo = process.env.SHARELATEX_HEADER_IMAGE_URL
}
if (process.env.SHARELATEX_HEADER_NAV_LINKS != null) {
console.error(`\
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#
# WARNING: SHARELATEX_HEADER_NAV_LINKS is no longer supported
# See https://github.com/sharelatex/sharelatex/wiki/Configuring-Headers,-Footers-&-Logo
#
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #\
`)
}
if (process.env.SHARELATEX_HEADER_EXTRAS != null) {
try {
settings.nav.header_extras = JSON.parse(
process.env.SHARELATEX_HEADER_EXTRAS
)
} catch (error2) {
e = error2
console.error('could not parse SHARELATEX_HEADER_EXTRAS, not valid JSON')
}
}
// Sending Email
// -------------
//
// You must configure a mail server to be able to send invite emails from
// ShareLaTeX. The config settings are passed to nodemailer. See the nodemailer
// documentation for available options:
//
// http://www.nodemailer.com/docs/transports
if (process.env.SHARELATEX_EMAIL_FROM_ADDRESS != null) {
settings.email = {
fromAddress: process.env.SHARELATEX_EMAIL_FROM_ADDRESS,
replyTo: process.env.SHARELATEX_EMAIL_REPLY_TO || '',
driver: process.env.SHARELATEX_EMAIL_DRIVER,
parameters: {
// AWS Creds
AWSAccessKeyID: process.env.SHARELATEX_EMAIL_AWS_SES_ACCESS_KEY_ID,
AWSSecretKey: process.env.SHARELATEX_EMAIL_AWS_SES_SECRET_KEY,
// SMTP Creds
host: process.env.SHARELATEX_EMAIL_SMTP_HOST,
port: process.env.SHARELATEX_EMAIL_SMTP_PORT,
secure: parse(process.env.SHARELATEX_EMAIL_SMTP_SECURE),
ignoreTLS: parse(process.env.SHARELATEX_EMAIL_SMTP_IGNORE_TLS),
name: process.env.SHARELATEX_EMAIL_SMTP_NAME,
logger: process.env.SHARELATEX_EMAIL_SMTP_LOGGER === 'true',
},
textEncoding: process.env.SHARELATEX_EMAIL_TEXT_ENCODING,
template: {
customFooter: process.env.SHARELATEX_CUSTOM_EMAIL_FOOTER,
},
}
if (process.env.SHARELATEX_EMAIL_AWS_SES_REGION != null) {
settings.email.parameters.region =
process.env.SHARELATEX_EMAIL_AWS_SES_REGION
}
if (
process.env.SHARELATEX_EMAIL_SMTP_USER != null ||
process.env.SHARELATEX_EMAIL_SMTP_PASS != null
) {
settings.email.parameters.auth = {
user: process.env.SHARELATEX_EMAIL_SMTP_USER,
pass: process.env.SHARELATEX_EMAIL_SMTP_PASS,
}
}
if (process.env.SHARELATEX_EMAIL_SMTP_TLS_REJECT_UNAUTH != null) {
settings.email.parameters.tls = {
rejectUnauthorized: parse(
process.env.SHARELATEX_EMAIL_SMTP_TLS_REJECT_UNAUTH
),
}
}
}
// i18n
if (process.env.SHARELATEX_LANG_DOMAIN_MAPPING != null) {
settings.i18n.subdomainLang = parse(
process.env.SHARELATEX_LANG_DOMAIN_MAPPING
)
}
// Password Settings
// -----------
// These restrict the passwords users can use when registering
// opts are from http://antelle.github.io/passfield
if (
process.env.SHARELATEX_PASSWORD_VALIDATION_PATTERN ||
process.env.SHARELATEX_PASSWORD_VALIDATION_MIN_LENGTH ||
process.env.SHARELATEX_PASSWORD_VALIDATION_MAX_LENGTH
) {
settings.passwordStrengthOptions = {
pattern: process.env.SHARELATEX_PASSWORD_VALIDATION_PATTERN || 'aA$3',
length: {
min: process.env.SHARELATEX_PASSWORD_VALIDATION_MIN_LENGTH || 8,
max: process.env.SHARELATEX_PASSWORD_VALIDATION_MAX_LENGTH || 150,
},
}
}
// ######################
// ShareLaTeX Server Pro
// ######################
if (parse(process.env.SHARELATEX_IS_SERVER_PRO) === true) {
settings.bypassPercentageRollouts = true
settings.apis.references = { url: 'http://localhost:3040' }
}
// LDAP - SERVER PRO ONLY
// ----------
if (process.env.SHARELATEX_LDAP_HOST) {
console.error(`\
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#
# WARNING: The LDAP configuration format has changed in version 0.5.1
# See https://github.com/sharelatex/sharelatex/wiki/Server-Pro:-LDAP-Config
#
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #\
`)
}
if (process.env.SHARELATEX_LDAP_URL) {
let _ldap_connect_timeout,
_ldap_group_search_attribs,
_ldap_search_attribs,
_ldap_timeout
settings.externalAuth = true
settings.ldap = {
emailAtt: process.env.SHARELATEX_LDAP_EMAIL_ATT,
nameAtt: process.env.SHARELATEX_LDAP_NAME_ATT,
lastNameAtt: process.env.SHARELATEX_LDAP_LAST_NAME_ATT,
updateUserDetailsOnLogin:
process.env.SHARELATEX_LDAP_UPDATE_USER_DETAILS_ON_LOGIN === 'true',
placeholder: process.env.SHARELATEX_LDAP_PLACEHOLDER,
server: {
url: process.env.SHARELATEX_LDAP_URL,
bindDn: process.env.SHARELATEX_LDAP_BIND_DN,
bindCredentials: process.env.SHARELATEX_LDAP_BIND_CREDENTIALS,
bindProperty: process.env.SHARELATEX_LDAP_BIND_PROPERTY,
searchBase: process.env.SHARELATEX_LDAP_SEARCH_BASE,
searchScope: process.env.SHARELATEX_LDAP_SEARCH_SCOPE,
searchFilter: process.env.SHARELATEX_LDAP_SEARCH_FILTER,
searchAttributes: (_ldap_search_attribs =
process.env.SHARELATEX_LDAP_SEARCH_ATTRIBUTES)
? (() => {
try {
return JSON.parse(_ldap_search_attribs)
} catch (error3) {
e = error3
return console.error(
'could not parse SHARELATEX_LDAP_SEARCH_ATTRIBUTES'
)
}
})()
: undefined,
groupDnProperty: process.env.SHARELATEX_LDAP_GROUP_DN_PROPERTY,
groupSearchBase: process.env.SHARELATEX_LDAP_GROUP_SEARCH_BASE,
groupSearchScope: process.env.SHARELATEX_LDAP_GROUP_SEARCH_SCOPE,
groupSearchFilter: process.env.SHARELATEX_LDAP_GROUP_SEARCH_FILTER,
groupSearchAttributes: (_ldap_group_search_attribs =
process.env.SHARELATEX_LDAP_GROUP_SEARCH_ATTRIBUTES)
? (() => {
try {
return JSON.parse(_ldap_group_search_attribs)
} catch (error4) {
e = error4
return console.error(
'could not parse SHARELATEX_LDAP_GROUP_SEARCH_ATTRIBUTES'
)
}
})()
: undefined,
cache: process.env.SHARELATEX_LDAP_CACHE === 'true',
timeout: (_ldap_timeout = process.env.SHARELATEX_LDAP_TIMEOUT)
? (() => {
try {
return parseIntOrFail(_ldap_timeout)
} catch (error5) {
e = error5
return console.error('Cannot parse SHARELATEX_LDAP_TIMEOUT')
}
})()
: undefined,
connectTimeout: (_ldap_connect_timeout =
process.env.SHARELATEX_LDAP_CONNECT_TIMEOUT)
? (() => {
try {
return parseIntOrFail(_ldap_connect_timeout)
} catch (error6) {
e = error6
return console.error(
'Cannot parse SHARELATEX_LDAP_CONNECT_TIMEOUT'
)
}
})()
: undefined,
},
}
if (process.env.SHARELATEX_LDAP_TLS_OPTS_CA_PATH) {
let ca, ca_paths
try {
ca = JSON.parse(process.env.SHARELATEX_LDAP_TLS_OPTS_CA_PATH)
} catch (error7) {
e = error7
console.error(
'could not parse SHARELATEX_LDAP_TLS_OPTS_CA_PATH, invalid JSON'
)
}
if (typeof ca === 'string') {
ca_paths = [ca]
} else if (
typeof ca === 'object' &&
(ca != null ? ca.length : undefined) != null
) {
ca_paths = ca
} else {
console.error('problem parsing SHARELATEX_LDAP_TLS_OPTS_CA_PATH')
}
settings.ldap.server.tlsOptions = {
rejectUnauthorized:
process.env.SHARELATEX_LDAP_TLS_OPTS_REJECT_UNAUTH === 'true',
ca: ca_paths, // e.g.'/etc/ldap/ca_certs.pem'
}
}
}
if (process.env.SHARELATEX_SAML_ENTRYPOINT) {
// NOTE: see https://github.com/node-saml/passport-saml/blob/master/README.md for docs of `server` options
let _saml_additionalAuthorizeParams,
_saml_additionalLogoutParams,
_saml_additionalParams,
_saml_expiration,
_saml_skew
settings.externalAuth = true
settings.saml = {
updateUserDetailsOnLogin:
process.env.SHARELATEX_SAML_UPDATE_USER_DETAILS_ON_LOGIN === 'true',
identityServiceName: process.env.SHARELATEX_SAML_IDENTITY_SERVICE_NAME,
emailField:
process.env.SHARELATEX_SAML_EMAIL_FIELD ||
process.env.SHARELATEX_SAML_EMAIL_FIELD_NAME,
firstNameField: process.env.SHARELATEX_SAML_FIRST_NAME_FIELD,
lastNameField: process.env.SHARELATEX_SAML_LAST_NAME_FIELD,
server: {
// strings
entryPoint: process.env.SHARELATEX_SAML_ENTRYPOINT,
callbackUrl: process.env.SHARELATEX_SAML_CALLBACK_URL,
issuer: process.env.SHARELATEX_SAML_ISSUER,
decryptionPvk: process.env.SHARELATEX_SAML_DECRYPTION_PVK,
decryptionCert: process.env.SHARELATEX_SAML_DECRYPTION_CERT,
signatureAlgorithm: process.env.SHARELATEX_SAML_SIGNATURE_ALGORITHM,
identifierFormat: process.env.SHARELATEX_SAML_IDENTIFIER_FORMAT,
attributeConsumingServiceIndex:
process.env.SHARELATEX_SAML_ATTRIBUTE_CONSUMING_SERVICE_INDEX,
authnContext: process.env.SHARELATEX_SAML_AUTHN_CONTEXT,
authnRequestBinding: process.env.SHARELATEX_SAML_AUTHN_REQUEST_BINDING,
validateInResponseTo: process.env.SHARELATEX_SAML_VALIDATE_IN_RESPONSE_TO,
cacheProvider: process.env.SHARELATEX_SAML_CACHE_PROVIDER,
logoutUrl: process.env.SHARELATEX_SAML_LOGOUT_URL,
logoutCallbackUrl: process.env.SHARELATEX_SAML_LOGOUT_CALLBACK_URL,
disableRequestedAuthnContext:
process.env.SHARELATEX_SAML_DISABLE_REQUESTED_AUTHN_CONTEXT === 'true',
forceAuthn: process.env.SHARELATEX_SAML_FORCE_AUTHN === 'true',
skipRequestCompression:
process.env.SHARELATEX_SAML_SKIP_REQUEST_COMPRESSION === 'true',
acceptedClockSkewMs: (_saml_skew =
process.env.SHARELATEX_SAML_ACCEPTED_CLOCK_SKEW_MS)
? (() => {
try {
return parseIntOrFail(_saml_skew)
} catch (error8) {
e = error8
return console.error(
'Cannot parse SHARELATEX_SAML_ACCEPTED_CLOCK_SKEW_MS'
)
}
})()
: undefined,
requestIdExpirationPeriodMs: (_saml_expiration =
process.env.SHARELATEX_SAML_REQUEST_ID_EXPIRATION_PERIOD_MS)
? (() => {
try {
return parseIntOrFail(_saml_expiration)
} catch (error9) {
e = error9
return console.error(
'Cannot parse SHARELATEX_SAML_REQUEST_ID_EXPIRATION_PERIOD_MS'
)
}
})()
: undefined,
additionalParams: (_saml_additionalParams =
process.env.SHARELATEX_SAML_ADDITIONAL_PARAMS)
? (() => {
try {
return JSON.parse(_saml_additionalParams)
} catch (error10) {
e = error10
return console.error(
'Cannot parse SHARELATEX_SAML_ADDITIONAL_PARAMS'
)
}
})()
: undefined,
additionalAuthorizeParams: (_saml_additionalAuthorizeParams =
process.env.SHARELATEX_SAML_ADDITIONAL_AUTHORIZE_PARAMS)
? (() => {
try {
return JSON.parse(_saml_additionalAuthorizeParams)
} catch (error11) {
e = error11
return console.error(
'Cannot parse SHARELATEX_SAML_ADDITIONAL_AUTHORIZE_PARAMS'
)
}
})()
: undefined,
additionalLogoutParams: (_saml_additionalLogoutParams =
process.env.SHARELATEX_SAML_ADDITIONAL_LOGOUT_PARAMS)
? (() => {
try {
return JSON.parse(_saml_additionalLogoutParams)
} catch (error12) {
e = error12
return console.error(
'Cannot parse SHARELATEX_SAML_ADDITIONAL_LOGOUT_PARAMS'
)
}
})()
: undefined,
},
}
// SHARELATEX_SAML_CERT cannot be empty
// https://github.com/node-saml/passport-saml/commit/f6b1c885c0717f1083c664345556b535f217c102
if (process.env.SHARELATEX_SAML_CERT) {
settings.saml.server.cert = process.env.SHARELATEX_SAML_CERT
settings.saml.server.privateCert = process.env.SHARELATEX_SAML_PRIVATE_CERT
}
}
// Compiler
// --------
if (process.env.SANDBOXED_COMPILES === 'true') {
settings.clsi = {
dockerRunner: true,
docker: {
image: process.env.TEX_LIVE_DOCKER_IMAGE,
env: {
HOME: '/tmp',
PATH:
process.env.COMPILER_PATH ||
'/usr/local/texlive/2015/bin/x86_64-linux:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin',
},
user: 'www-data',
},
}
if (settings.path == null) {
settings.path = {}
}
settings.path.synctexBaseDir = () => '/compile'
if (process.env.SANDBOXED_COMPILES_SIBLING_CONTAINERS === 'true') {
console.log('Using sibling containers for sandboxed compiles')
if (process.env.SANDBOXED_COMPILES_HOST_DIR) {
settings.path.sandboxedCompilesHostDir =
process.env.SANDBOXED_COMPILES_HOST_DIR
} else {
console.error(
'Sibling containers, but SANDBOXED_COMPILES_HOST_DIR not set'
)
}
}
}
// Templates
// ---------
if (process.env.SHARELATEX_TEMPLATES_USER_ID) {
settings.templates = {
mountPointUrl: '/templates',
user_id: process.env.SHARELATEX_TEMPLATES_USER_ID,
}
settings.templateLinks = parse(
process.env.SHARELATEX_NEW_PROJECT_TEMPLATE_LINKS
)
}
// /Learn
// -------
if (process.env.SHARELATEX_PROXY_LEARN != null) {
settings.proxyLearn = parse(process.env.SHARELATEX_PROXY_LEARN)
}
// /References
// -----------
if (process.env.SHARELATEX_ELASTICSEARCH_URL != null) {
settings.references.elasticsearch = {
host: process.env.SHARELATEX_ELASTICSEARCH_URL,
}
}
// TeX Live Images
// -----------
if (process.env.ALL_TEX_LIVE_DOCKER_IMAGES != null) {
allTexLiveDockerImages = process.env.ALL_TEX_LIVE_DOCKER_IMAGES.split(',')
}
if (process.env.ALL_TEX_LIVE_DOCKER_IMAGE_NAMES != null) {
allTexLiveDockerImageNames =
process.env.ALL_TEX_LIVE_DOCKER_IMAGE_NAMES.split(',')
}
if (allTexLiveDockerImages != null) {
settings.allowedImageNames = []
for (let index = 0; index < allTexLiveDockerImages.length; index++) {
const fullImageName = allTexLiveDockerImages[index]
const imageName = Path.basename(fullImageName)
const imageDesc =
allTexLiveDockerImageNames != null
? allTexLiveDockerImageNames[index]
: imageName
settings.allowedImageNames.push({ imageName, imageDesc })
}
}
// With lots of incoming and outgoing HTTP connections to different services,
// sometimes long running, it is a good idea to increase the default number
// of sockets that Node will hold open.
const http = require('http')
http.globalAgent.maxSockets = 300
const https = require('https')
https.globalAgent.maxSockets = 300
module.exports = settings

View file

@ -1,60 +0,0 @@
module.exports = (grunt) ->
grunt.registerTask 'user:create-admin', "Create a user with the given email address and make them an admin. Update in place if the user already exists. Usage: grunt user:create-admin --email joe@example.com", () ->
done = @async()
email = grunt.option("email")
if !email?
console.error "Usage: grunt user:create-admin --email=joe@example.com"
process.exit(1)
settings = require "settings-sharelatex"
mongodb = require "../web/app/src/infrastructure/mongodb"
UserRegistrationHandler = require "../web/app/src/Features/User/UserRegistrationHandler"
OneTimeTokenHandler = require "../web/app/src/Features/Security/OneTimeTokenHandler"
mongodb.waitForDb().then () ->
UserRegistrationHandler.registerNewUser {
email: email
password: require("crypto").randomBytes(32).toString("hex")
}, (error, user) ->
if error? and error?.message != "EmailAlreadyRegistered"
throw error
user.isAdmin = true
user.save (error) ->
throw error if error?
ONE_WEEK = 7 * 24 * 60 * 60 # seconds
OneTimeTokenHandler.getNewToken "password", { expiresIn: ONE_WEEK, email:user.email, user_id: user._id.toString() }, (err, token)->
return next(err) if err?
console.log ""
console.log """
Successfully created #{email} as an admin user.
Please visit the following URL to set a password for #{email} and log in:
#{settings.siteUrl}/user/password/set?passwordResetToken=#{token}
"""
done()
grunt.registerTask 'user:delete', "deletes a user and all their data, Usage: grunt user:delete --email joe@example.com", () ->
done = @async()
email = grunt.option("email")
if !email?
console.error "Usage: grunt user:delete --email=joe@example.com"
process.exit(1)
settings = require "settings-sharelatex"
mongodb = require "../web/app/src/infrastructure/mongodb"
UserGetter = require "../web/app/src/Features/User/UserGetter"
UserDeleter = require "../web/app/src/Features/User/UserDeleter"
mongodb.waitForDb().then () ->
UserGetter.getUser email:email, (error, user) ->
if error?
throw error
if !user?
console.log("user #{email} not in database, potentially already deleted")
return done()
UserDeleter.deleteUser user._id, (err)->
if err?
throw err
done()

View file

@ -0,0 +1,116 @@
/* eslint-disable
no-undef,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
module.exports = function (grunt) {
grunt.registerTask(
'user:create-admin',
'Create a user with the given email address and make them an admin. Update in place if the user already exists. Usage: grunt user:create-admin --email joe@example.com',
function () {
const done = this.async()
const email = grunt.option('email')
if (email == null) {
console.error('Usage: grunt user:create-admin --email=joe@example.com')
process.exit(1)
}
const settings = require('@overleaf/settings')
const mongodb = require('../web/app/src/infrastructure/mongodb')
const UserRegistrationHandler = require('../web/app/src/Features/User/UserRegistrationHandler')
const OneTimeTokenHandler = require('../web/app/src/Features/Security/OneTimeTokenHandler')
return mongodb.waitForDb().then(() =>
UserRegistrationHandler.registerNewUser(
{
email,
password: require('crypto').randomBytes(32).toString('hex'),
},
function (error, user) {
if (
error != null &&
(error != null ? error.message : undefined) !==
'EmailAlreadyRegistered'
) {
throw error
}
user.isAdmin = true
return user.save(function (error) {
if (error != null) {
throw error
}
const ONE_WEEK = 7 * 24 * 60 * 60 // seconds
return OneTimeTokenHandler.getNewToken(
'password',
{
expiresIn: ONE_WEEK,
email: user.email,
user_id: user._id.toString(),
},
function (err, token) {
if (err != null) {
return next(err)
}
console.log('')
console.log(`\
Successfully created ${email} as an admin user.
Please visit the following URL to set a password for ${email} and log in:
${settings.siteUrl}/user/password/set?passwordResetToken=${token}
\
`)
return done()
}
)
})
}
)
)
}
)
return grunt.registerTask(
'user:delete',
'deletes a user and all their data, Usage: grunt user:delete --email joe@example.com',
function () {
const done = this.async()
const email = grunt.option('email')
if (email == null) {
console.error('Usage: grunt user:delete --email=joe@example.com')
process.exit(1)
}
const settings = require('@overleaf/settings')
const mongodb = require('../web/app/src/infrastructure/mongodb')
const UserGetter = require('../web/app/src/Features/User/UserGetter')
const UserDeleter = require('../web/app/src/Features/User/UserDeleter')
return mongodb.waitForDb().then(() =>
UserGetter.getUser({ email }, function (error, user) {
if (error != null) {
throw error
}
if (user == null) {
console.log(
`user ${email} not in database, potentially already deleted`
)
return done()
}
return UserDeleter.deleteUser(user._id, function (err) {
if (err != null) {
throw err
}
return done()
})
})
)
}
)
}

View file

@ -1,24 +0,0 @@
# require("coffee-script")
# fs = require("fs")
# _ = require("underscore")
# if not process.argv[2]
# console.log "Usage: coffee project_size.coffee user_files_path"
# else
# dirPath = process.argv[2]
# if not fs.lstatSync(dirPath).isDirectory()
# console.log dirPath + " directory not exist"
# else
# fs.readdir dirPath, (err, files)->
# projects = []
# files.forEach (file)->
# project_id = file.split("_")[0]
# if !projects[project_id]
# projects[project_id] = 0
# projects[project_id] += fs.lstatSync(dirPath+"/"+file).size
# ids = _.keys projects
# console.log "project \t size"
# ids.forEach (id)->
# console.log id + "\t" + projects[id]