From efd16d99d6136c1d06164b5897d73e07006103f8 Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Wed, 7 Jul 2021 11:39:42 +0000 Subject: [PATCH] decaffeinate: Convert coffee files to JS --- server-ce/Gruntfile.js | 417 ++++++---- server-ce/settings.js | 992 +++++++++++++---------- server-ce/tasks/CreateAndDestroyUsers.js | 125 +-- server-ce/tasks/ProjectSize.js | 42 +- 4 files changed, 899 insertions(+), 677 deletions(-) diff --git a/server-ce/Gruntfile.js b/server-ce/Gruntfile.js index 9375096d62..a1173ed864 100644 --- a/server-ce/Gruntfile.js +++ b/server-ce/Gruntfile.js @@ -1,216 +1,311 @@ -coffee = require("coffee-script") -fs = require "fs" -spawn = require("child_process").spawn -exec = require("child_process").exec -rimraf = require "rimraf" -Path = require "path" -semver = require "semver" -knox = require "knox" -crypto = require "crypto" -async = require "async" -settings = require("settings-sharelatex") -_ = require("underscore") +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__, or convert again using --optional-chaining + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const coffee = require("coffee-script"); +const fs = require("fs"); +const { + spawn +} = require("child_process"); +const { + exec +} = require("child_process"); +const rimraf = require("rimraf"); +const Path = require("path"); +const semver = require("semver"); +const knox = require("knox"); +const crypto = require("crypto"); +const async = require("async"); +const settings = require("settings-sharelatex"); +const _ = require("underscore"); -SERVICES = require("./config/services") +const SERVICES = require("./config/services"); -module.exports = (grunt) -> - grunt.loadNpmTasks 'grunt-bunyan' - grunt.loadNpmTasks 'grunt-execute' - grunt.loadNpmTasks 'grunt-available-tasks' - grunt.loadNpmTasks 'grunt-concurrent' - grunt.loadNpmTasks "grunt-contrib-coffee" - grunt.loadNpmTasks "grunt-shell" +module.exports = function(grunt) { + let Helpers; + let service; + grunt.loadNpmTasks('grunt-bunyan'); + grunt.loadNpmTasks('grunt-execute'); + grunt.loadNpmTasks('grunt-available-tasks'); + grunt.loadNpmTasks('grunt-concurrent'); + grunt.loadNpmTasks("grunt-contrib-coffee"); + grunt.loadNpmTasks("grunt-shell"); - grunt.task.loadTasks "./tasks" + grunt.task.loadTasks("./tasks"); - execute = {} - for service in SERVICES + const execute = {}; + for (service of Array.from(SERVICES)) { execute[service.name] = - src: "#{service.name}/app.js" + {src: `${service.name}/app.js`}; + } - grunt.initConfig - execute: execute + grunt.initConfig({ + execute, - concurrent: - all: - tasks: ("run:#{service.name}" for service in SERVICES) - options: - limit: SERVICES.length + concurrent: { + all: { + tasks: (((() => { + const result = []; + for (service of Array.from(SERVICES)) { result.push(`run:${service.name}`); + } + return result; + })())), + options: { + limit: SERVICES.length, logConcurrentOutput: true - availabletasks: - tasks: - options: + availabletasks: { + tasks: { + options: { filter: 'exclude', tasks: [ - 'concurrent' - 'execute' - 'bunyan' + 'concurrent', + 'execute', + 'bunyan', 'availabletasks' - ] - groups: + ], + groups: { "Run tasks": [ - "run" - "run:all" + "run", + "run:all", "default" - ].concat ("run:#{service.name}" for service in SERVICES) + ].concat(((() => { + const result1 = []; + for (service of Array.from(SERVICES)) { result1.push(`run:${service.name}`); + } + return result1; + })())), "Misc": [ "help" - ] - "Install tasks": ("install:#{service.name}" for service in SERVICES).concat(["install:all", "install"]) - "Update tasks": ("update:#{service.name}" for service in SERVICES).concat(["update:all", "update"]) + ], + "Install tasks": ((() => { + const result2 = []; + for (service of Array.from(SERVICES)) { result2.push(`install:${service.name}`); + } + return result2; + })()).concat(["install:all", "install"]), + "Update tasks": ((() => { + const result3 = []; + for (service of Array.from(SERVICES)) { result3.push(`update:${service.name}`); + } + return result3; + })()).concat(["update:all", "update"]), "Checks": ["check", "check:redis", "check:latexmk", "check:s3", "check:make", "check:mongo"] + } + } + } + }}); - for service in SERVICES - do (service) -> - grunt.registerTask "install:#{service.name}", "Download and set up the #{service.name} service", () -> - done = @async() - Helpers.installService(service, done) + for (service of Array.from(SERVICES)) { + ((service => grunt.registerTask(`install:${service.name}`, `Download and set up the ${service.name} service`, function() { + const done = this.async(); + return Helpers.installService(service, done); + })))(service); + } - grunt.registerTask 'install:all', "Download and set up all ShareLaTeX services", + grunt.registerTask('install:all', "Download and set up all ShareLaTeX services", [].concat( - ("install:#{service.name}" for service in SERVICES) + ((() => { + const result4 = []; + for (service of Array.from(SERVICES)) { result4.push(`install:${service.name}`); + } + return result4; + })()) ).concat(['postinstall']) + ); - grunt.registerTask 'install', 'install:all' - grunt.registerTask 'postinstall', 'Explain postinstall steps', () -> - Helpers.postinstallMessage @async() + grunt.registerTask('install', 'install:all'); + grunt.registerTask('postinstall', 'Explain postinstall steps', function() { + return Helpers.postinstallMessage(this.async()); + }); - grunt.registerTask 'update:all', "Checkout and update all ShareLaTeX services", + grunt.registerTask('update:all', "Checkout and update all ShareLaTeX services", ["check:make"].concat( - ("update:#{service.name}" for service in SERVICES) + ((() => { + const result5 = []; + for (service of Array.from(SERVICES)) { result5.push(`update:${service.name}`); + } + return result5; + })()) ) - grunt.registerTask 'update', 'update:all' - grunt.registerTask 'run', "Run all of the sharelatex processes", ['concurrent:all'] - grunt.registerTask 'run:all', 'run' + ); + grunt.registerTask('update', 'update:all'); + grunt.registerTask('run', "Run all of the sharelatex processes", ['concurrent:all']); + grunt.registerTask('run:all', 'run'); - grunt.registerTask 'help', 'Display this help list', 'availabletasks' - grunt.registerTask 'default', 'run' + grunt.registerTask('help', 'Display this help list', 'availabletasks'); + grunt.registerTask('default', 'run'); - grunt.registerTask "check:redis", "Check that redis is installed and running", () -> - Helpers.checkRedisConnect @async() + grunt.registerTask("check:redis", "Check that redis is installed and running", function() { + return Helpers.checkRedisConnect(this.async()); + }); - grunt.registerTask "check:mongo", "Check that mongo is installed", () -> - Helpers.checkMongoConnect @async() + grunt.registerTask("check:mongo", "Check that mongo is installed", function() { + return Helpers.checkMongoConnect(this.async()); + }); - grunt.registerTask "check", "Check that you have the required dependencies installed", ["check:redis", "check:mongo", "check:make"] + grunt.registerTask("check", "Check that you have the required dependencies installed", ["check:redis", "check:mongo", "check:make"]); - grunt.registerTask "check:make", "Check that make is installed", () -> - Helpers.checkMake @async() + grunt.registerTask("check:make", "Check that make is installed", function() { + return Helpers.checkMake(this.async()); + }); - Helpers = - installService: (service, callback = (error) ->) -> - console.log "Installing #{service.name}" - Helpers.cloneGitRepo service, (error) -> - if error? - callback(error) - else - callback() + return Helpers = { + installService(service, callback) { + if (callback == null) { callback = function(error) {}; } + console.log(`Installing ${service.name}`); + return Helpers.cloneGitRepo(service, function(error) { + if (error != null) { + return callback(error); + } else { + return callback(); + } + }); + }, - cloneGitRepo: (service, callback = (error) ->) -> - repo_src = service.repo - dir = service.name - if !fs.existsSync(dir) - proc = spawn "git", [ + cloneGitRepo(service, callback) { + if (callback == null) { callback = function(error) {}; } + const repo_src = service.repo; + const dir = service.name; + if (!fs.existsSync(dir)) { + const proc = spawn("git", [ "clone", repo_src, dir - ], stdio: "inherit" - proc.on "close", () -> - Helpers.checkoutVersion service, callback - else - console.log "#{dir} already installed, skipping." - callback() + ], {stdio: "inherit"}); + return proc.on("close", () => Helpers.checkoutVersion(service, callback)); + } else { + console.log(`${dir} already installed, skipping.`); + return callback(); + } + }, - checkoutVersion: (service, callback = (error) ->) -> - dir = service.name - grunt.log.write "checking out #{service.name} #{service.version}" - proc = spawn "git", ["checkout", service.version], stdio: "inherit", cwd: dir - proc.on "close", () -> - callback() + checkoutVersion(service, callback) { + if (callback == null) { callback = function(error) {}; } + const dir = service.name; + grunt.log.write(`checking out ${service.name} ${service.version}`); + const proc = spawn("git", ["checkout", service.version], {stdio: "inherit", cwd: dir}); + return proc.on("close", () => callback()); + }, - postinstallMessage: (callback = (error) ->) -> - grunt.log.write """ - Services cloned: - #{service.name for service in SERVICES} - To install services run: - $ source bin/install-services - This will install the required node versions and run `npm install` for each service. - See https://github.com/sharelatex/sharelatex/pull/549 for more info. - """ - callback() + postinstallMessage(callback) { + if (callback == null) { callback = function(error) {}; } + grunt.log.write(`\ +Services cloned: + ${(() => { + const result6 = []; + for (service of Array.from(SERVICES)) { result6.push(service.name); + } + return result6; + })()} +To install services run: + $ source bin/install-services +This will install the required node versions and run \`npm install\` for each service. +See https://github.com/sharelatex/sharelatex/pull/549 for more info.\ +` + ); + return callback(); + }, - checkMake: (callback = (error) ->) -> - grunt.log.write "Checking make is installed... " - exec "make --version", (error, stdout, stderr) -> - if error? and error.message.match("not found") - grunt.log.error "FAIL." - grunt.log.errorlns """ - Either make is not installed or is not in your path. + checkMake(callback) { + if (callback == null) { callback = function(error) {}; } + grunt.log.write("Checking make is installed... "); + return exec("make --version", function(error, stdout, stderr) { + if ((error != null) && error.message.match("not found")) { + grunt.log.error("FAIL."); + grunt.log.errorlns(`\ +Either make is not installed or is not in your path. - On Ubuntu you can install make with: +On Ubuntu you can install make with: - sudo apt-get install build-essential + sudo apt-get install build-essential +\ +` + ); + return callback(error); + } else if (error != null) { + return callback(error); + } else { + grunt.log.write("OK."); + return callback(); + } + }); + }, + checkMongoConnect(callback) { + if (callback == null) { callback = function(error) {}; } + grunt.log.write("Checking can connect to mongo"); + const mongojs = require("mongojs"); + const db = mongojs(settings.mongo.url, ["tags"]); + db.runCommand({ ping: 1 }, function(err, res) { + if (!err && res.ok) { + grunt.log.write("OK."); + } + return callback(); + }); + return db.on('error', function(err){ + err = "Can not connect to mongodb"; + grunt.log.error("FAIL."); + grunt.log.errorlns(`\ +!!!!!!!!!!!!!! MONGO ERROR !!!!!!!!!!!!!! - """ - return callback(error) - else if error? - return callback(error) - else - grunt.log.write "OK." - return callback() - checkMongoConnect: (callback = (error) ->) -> - grunt.log.write "Checking can connect to mongo" - mongojs = require("mongojs") - db = mongojs(settings.mongo.url, ["tags"]) - db.runCommand { ping: 1 }, (err, res) -> - if !err and res.ok - grunt.log.write "OK." - return callback() - db.on 'error', (err)-> - err = "Can not connect to mongodb" - grunt.log.error "FAIL." - grunt.log.errorlns """ - !!!!!!!!!!!!!! MONGO ERROR !!!!!!!!!!!!!! +ShareLaTeX can not talk to the mongodb instance - ShareLaTeX can not talk to the mongodb instance +Check the mongodb instance is running and accessible on env var SHARELATEX_MONGO_URL - Check the mongodb instance is running and accessible on env var SHARELATEX_MONGO_URL - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - """ - throw new Error("Can not connect to Mongodb") - return callback(err) +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\ +` + ); + throw new Error("Can not connect to Mongodb"); + return callback(err); + }); + }, - checkRedisConnect: (callback = (error) ->) -> - grunt.log.write "Checking can connect to redis\n" - rclient = require("redis").createClient(settings.redis.web) + checkRedisConnect(callback) { + if (callback == null) { callback = function(error) {}; } + grunt.log.write("Checking can connect to redis\n"); + const rclient = require("redis").createClient(settings.redis.web); - rclient.ping (err, res) -> - if !err? - grunt.log.write "OK." - else - throw new Error("Can not connect to redis") - return callback() - errorHandler = _.once (err)-> - err = "Can not connect to redis" - grunt.log.error "FAIL." - grunt.log.errorlns """ - !!!!!!!!!!!!!! REDIS ERROR !!!!!!!!!!!!!! + rclient.ping(function(err, res) { + if ((err == null)) { + grunt.log.write("OK."); + } else { + throw new Error("Can not connect to redis"); + } + return callback(); + }); + const errorHandler = _.once(function(err){ + err = "Can not connect to redis"; + grunt.log.error("FAIL."); + grunt.log.errorlns(`\ +!!!!!!!!!!!!!! REDIS ERROR !!!!!!!!!!!!!! - ShareLaTeX can not talk to the redis instance +ShareLaTeX can not talk to the redis instance - Check the redis instance is running and accessible on env var SHARELATEX_REDIS_HOST +Check the redis instance is running and accessible on env var SHARELATEX_REDIS_HOST - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - """ - throw new Error("Can not connect to redis") - return callback(err) - rclient.on 'error', errorHandler +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\ +` + ); + throw new Error("Can not connect to redis"); + return callback(err); + }); + return rclient.on('error', errorHandler); + } + }; +}; + +function __guard__(value, transform) { + return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; +} diff --git a/server-ce/settings.js b/server-ce/settings.js index 684bf18d53..1d2fc7a018 100644 --- a/server-ce/settings.js +++ b/server-ce/settings.js @@ -1,576 +1,684 @@ -Path = require('path') +/* + * decaffeinate suggestions: + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let allTexLiveDockerImageNames, allTexLiveDockerImages, redisConfig, siteUrl; +let e; +const Path = require('path'); -# These credentials are used for authenticating api requests -# between services that may need to go over public channels -httpAuthUser = "sharelatex" -httpAuthPass = process.env["WEB_API_PASSWORD"] -httpAuthUsers = {} -httpAuthUsers[httpAuthUser] = httpAuthPass +// These credentials are used for authenticating api requests +// between services that may need to go over public channels +const httpAuthUser = "sharelatex"; +const httpAuthPass = process.env["WEB_API_PASSWORD"]; +const httpAuthUsers = {}; +httpAuthUsers[httpAuthUser] = httpAuthPass; -parse = (option)-> - if option? - try - opt = JSON.parse(option) - return opt - catch err - throw new Error("problem parsing #{option}, invalid JSON") +const parse = function(option){ + if (option != null) { + try { + const opt = JSON.parse(option); + return opt; + } catch (err) { + throw new Error(`problem parsing ${option}, invalid JSON`); + } + } +}; -parseIntOrFail = (value)-> - parsedValue = parseInt(value, 10) - if isNaN(parsedValue) - throw new Error("'#{value}' is an invalid integer") - return parsedValue +const parseIntOrFail = function(value){ + const parsedValue = parseInt(value, 10); + if (isNaN(parsedValue)) { + throw new Error(`'${value}' is an invalid integer`); + } + return parsedValue; +}; -DATA_DIR = '/var/lib/sharelatex/data' -TMP_DIR = '/var/lib/sharelatex/tmp' +const DATA_DIR = '/var/lib/sharelatex/data'; +const TMP_DIR = '/var/lib/sharelatex/tmp'; -settings = +const settings = { - clsi: - optimiseInDocker: process.env['OPTIMISE_PDF'] == 'true' + clsi: { + optimiseInDocker: process.env['OPTIMISE_PDF'] === 'true' + }, - brandPrefix: "" + brandPrefix: "", allowAnonymousReadAndWriteSharing: - process.env['SHARELATEX_ALLOW_ANONYMOUS_READ_AND_WRITE_SHARING'] == 'true' + process.env['SHARELATEX_ALLOW_ANONYMOUS_READ_AND_WRITE_SHARING'] === 'true', - # Databases - # --------- + // Databases + // --------- - # ShareLaTeX's main persistent data store is MongoDB (http://www.mongodb.org/) - # Documentation about the URL connection string format can be found at: - # - # http://docs.mongodb.org/manual/reference/connection-string/ - # - # The following works out of the box with Mongo's default settings: - mongo: - url : process.env["SHARELATEX_MONGO_URL"] or 'mongodb://dockerhost/sharelatex' + // ShareLaTeX's main persistent data store is MongoDB (http://www.mongodb.org/) + // Documentation about the URL connection string format can be found at: + // + // http://docs.mongodb.org/manual/reference/connection-string/ + // + // The following works out of the box with Mongo's default settings: + mongo: { + url : process.env["SHARELATEX_MONGO_URL"] || 'mongodb://dockerhost/sharelatex' + }, - # Redis is used in ShareLaTeX for high volume queries, like real-time - # editing, and session management. - # - # The following config will work with Redis's default settings: - redis: - web: redisConfig = - host: process.env["SHARELATEX_REDIS_HOST"] or "dockerhost" - port: process.env["SHARELATEX_REDIS_PORT"] or "6379" - password: process.env["SHARELATEX_REDIS_PASS"] or undefined - key_schema: - # document-updater - blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" - docLines: ({doc_id}) -> "doclines:#{doc_id}" - docOps: ({doc_id}) -> "DocOps:#{doc_id}" - docVersion: ({doc_id}) -> "DocVersion:#{doc_id}" - docHash: ({doc_id}) -> "DocHash:#{doc_id}" - projectKey: ({doc_id}) -> "ProjectId:#{doc_id}" - docsInProject: ({project_id}) -> "DocsIn:#{project_id}" - ranges: ({doc_id}) -> "Ranges:#{doc_id}" - # document-updater:realtime - pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}" - # document-updater:history - uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}" - docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}" - # document-updater:lock - blockingKey: ({doc_id}) -> "Blocking:#{doc_id}" - # track-changes:lock - historyLock: ({doc_id}) -> "HistoryLock:#{doc_id}" - historyIndexLock: ({project_id}) -> "HistoryIndexLock:#{project_id}" - # track-changes:history - uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}" - docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}" - # realtime - clientsInProject: ({project_id}) -> "clients_in_project:#{project_id}" - connectedUser: ({project_id, client_id})-> "connected_user:#{project_id}:#{client_id}" - fairy: redisConfig - # track-changes and document-updater - realtime: redisConfig - documentupdater: redisConfig - lock: redisConfig - history: redisConfig - websessions: redisConfig - api: redisConfig - pubsub: redisConfig + // Redis is used in ShareLaTeX for high volume queries, like real-time + // editing, and session management. + // + // The following config will work with Redis's default settings: + redis: { + web: (redisConfig = { + host: process.env["SHARELATEX_REDIS_HOST"] || "dockerhost", + port: process.env["SHARELATEX_REDIS_PORT"] || "6379", + password: process.env["SHARELATEX_REDIS_PASS"] || undefined, + key_schema: { + // document-updater + blockingKey({doc_id}) { return `Blocking:${doc_id}`; }, + docLines({doc_id}) { return `doclines:${doc_id}`; }, + docOps({doc_id}) { return `DocOps:${doc_id}`; }, + docVersion({doc_id}) { return `DocVersion:${doc_id}`; }, + docHash({doc_id}) { return `DocHash:${doc_id}`; }, + projectKey({doc_id}) { return `ProjectId:${doc_id}`; }, + docsInProject({project_id}) { return `DocsIn:${project_id}`; }, + ranges({doc_id}) { return `Ranges:${doc_id}`; }, + // document-updater:realtime + pendingUpdates({doc_id}) { return `PendingUpdates:${doc_id}`; }, + // document-updater:history + uncompressedHistoryOps({doc_id}) { return `UncompressedHistoryOps:${doc_id}`; }, + docsWithHistoryOps({project_id}) { return `DocsWithHistoryOps:${project_id}`; }, + // document-updater:lock + blockingKey({doc_id}) { return `Blocking:${doc_id}`; }, + // track-changes:lock + historyLock({doc_id}) { return `HistoryLock:${doc_id}`; }, + historyIndexLock({project_id}) { return `HistoryIndexLock:${project_id}`; }, + // track-changes:history + uncompressedHistoryOps({doc_id}) { return `UncompressedHistoryOps:${doc_id}`; }, + docsWithHistoryOps({project_id}) { return `DocsWithHistoryOps:${project_id}`; }, + // realtime + clientsInProject({project_id}) { return `clients_in_project:${project_id}`; }, + connectedUser({project_id, client_id}){ return `connected_user:${project_id}:${client_id}`; } + } + }), + fairy: redisConfig, + // track-changes and document-updater + realtime: redisConfig, + documentupdater: redisConfig, + lock: redisConfig, + history: redisConfig, + websessions: redisConfig, + api: redisConfig, + pubsub: redisConfig, project_history: redisConfig + }, - # The compile server (the clsi) uses a SQL database to cache files and - # meta-data. sqlite is the default, and the load is low enough that this will - # be fine in production (we use sqlite at sharelatex.com). - # - # If you want to configure a different database, see the Sequelize documentation - # for available options: - # - # https://github.com/sequelize/sequelize/wiki/API-Reference-Sequelize#example-usage - # - mysql: - clsi: - database: "clsi" - username: "clsi" - password: "" - dialect: "sqlite" + // The compile server (the clsi) uses a SQL database to cache files and + // meta-data. sqlite is the default, and the load is low enough that this will + // be fine in production (we use sqlite at sharelatex.com). + // + // If you want to configure a different database, see the Sequelize documentation + // for available options: + // + // https://github.com/sequelize/sequelize/wiki/API-Reference-Sequelize#example-usage + // + mysql: { + clsi: { + database: "clsi", + username: "clsi", + password: "", + dialect: "sqlite", storage: Path.join(DATA_DIR, "db.sqlite") + } + }, - # File storage - # ------------ + // File storage + // ------------ - # ShareLaTeX can store binary files like images either locally or in Amazon - # S3. The default is locally: - filestore: - backend: "fs" - stores: - user_files: Path.join(DATA_DIR, "user_files") + // ShareLaTeX can store binary files like images either locally or in Amazon + // S3. The default is locally: + filestore: { + backend: "fs", + stores: { + user_files: Path.join(DATA_DIR, "user_files"), template_files: Path.join(DATA_DIR, "template_files") + } + }, - # To use Amazon S3 as a storage backend, comment out the above config, and - # uncomment the following, filling in your key, secret, and bucket name: - # - # filestore: - # backend: "s3" - # stores: - # user_files: "BUCKET_NAME" - # s3: - # key: "AWS_KEY" - # secret: "AWS_SECRET" - # + // To use Amazon S3 as a storage backend, comment out the above config, and + // uncomment the following, filling in your key, secret, and bucket name: + // + // filestore: + // backend: "s3" + // stores: + // user_files: "BUCKET_NAME" + // s3: + // key: "AWS_KEY" + // secret: "AWS_SECRET" + // - trackchanges: + trackchanges: { continueOnError: true + }, - # Local disk caching - # ------------------ - path: - # If we ever need to write something to disk (e.g. incoming requests - # that need processing but may be too big for memory), then write - # them to disk here: - dumpFolder: Path.join(TMP_DIR, "dumpFolder") - # Where to write uploads before they are processed - uploadFolder: Path.join(TMP_DIR, "uploads") - # Where to write the project to disk before running LaTeX on it - compilesDir: Path.join(DATA_DIR, "compiles") - # Where to cache downloaded URLs for the CLSI - clsiCacheDir: Path.join(DATA_DIR, "cache") - # Where to write the output files to disk after running LaTeX + // Local disk caching + // ------------------ + path: { + // If we ever need to write something to disk (e.g. incoming requests + // that need processing but may be too big for memory), then write + // them to disk here: + dumpFolder: Path.join(TMP_DIR, "dumpFolder"), + // Where to write uploads before they are processed + uploadFolder: Path.join(TMP_DIR, "uploads"), + // Where to write the project to disk before running LaTeX on it + compilesDir: Path.join(DATA_DIR, "compiles"), + // Where to cache downloaded URLs for the CLSI + clsiCacheDir: Path.join(DATA_DIR, "cache"), + // Where to write the output files to disk after running LaTeX outputDir: Path.join(DATA_DIR, "output") + }, - # Server Config - # ------------- + // Server Config + // ------------- - # Where your instance of ShareLaTeX can be found publicly. This is used - # when emails are sent out and in generated links: - siteUrl: siteUrl = process.env["SHARELATEX_SITE_URL"] or 'http://localhost' + // Where your instance of ShareLaTeX can be found publicly. This is used + // when emails are sent out and in generated links: + siteUrl: (siteUrl = process.env["SHARELATEX_SITE_URL"] || 'http://localhost'), - # The name this is used to describe your ShareLaTeX Installation - appName: process.env["SHARELATEX_APP_NAME"] or "ShareLaTeX (Community Edition)" + // The name this is used to describe your ShareLaTeX Installation + appName: process.env["SHARELATEX_APP_NAME"] || "ShareLaTeX (Community Edition)", - restrictInvitesToExistingAccounts: process.env["SHARELATEX_RESTRICT_INVITES_TO_EXISTING_ACCOUNTS"] == 'true' + restrictInvitesToExistingAccounts: process.env["SHARELATEX_RESTRICT_INVITES_TO_EXISTING_ACCOUNTS"] === 'true', - nav: - title: process.env["SHARELATEX_NAV_TITLE"] or process.env["SHARELATEX_APP_NAME"] or "ShareLaTeX Community Edition" + nav: { + title: process.env["SHARELATEX_NAV_TITLE"] || process.env["SHARELATEX_APP_NAME"] || "ShareLaTeX Community Edition" + }, - # The email address which users will be directed to as the main point of - # contact for this installation of ShareLaTeX. - adminEmail: process.env["SHARELATEX_ADMIN_EMAIL"] or "placeholder@example.com" + // The email address which users will be directed to as the main point of + // contact for this installation of ShareLaTeX. + adminEmail: process.env["SHARELATEX_ADMIN_EMAIL"] || "placeholder@example.com", - # If provided, a sessionSecret is used to sign cookies so that they cannot be - # spoofed. This is recommended. - security: - sessionSecret: process.env["SHARELATEX_SESSION_SECRET"] or process.env["CRYPTO_RANDOM"] + // If provided, a sessionSecret is used to sign cookies so that they cannot be + // spoofed. This is recommended. + security: { + sessionSecret: process.env["SHARELATEX_SESSION_SECRET"] || process.env["CRYPTO_RANDOM"] + }, - # These credentials are used for authenticating api requests - # between services that may need to go over public channels - httpAuthUsers: httpAuthUsers + // These credentials are used for authenticating api requests + // between services that may need to go over public channels + httpAuthUsers, - # Should javascript assets be served minified or not. - useMinifiedJs: true + // Should javascript assets be served minified or not. + useMinifiedJs: true, - # Should static assets be sent with a header to tell the browser to cache - # them. This should be false in development where changes are being made, - # but should be set to true in production. - cacheStaticAssets: true + // Should static assets be sent with a header to tell the browser to cache + // them. This should be false in development where changes are being made, + // but should be set to true in production. + cacheStaticAssets: true, - # If you are running ShareLaTeX over https, set this to true to send the - # cookie with a secure flag (recommended). - secureCookie: process.env["SHARELATEX_SECURE_COOKIE"]? + // If you are running ShareLaTeX over https, set this to true to send the + // cookie with a secure flag (recommended). + secureCookie: (process.env["SHARELATEX_SECURE_COOKIE"] != null), - # If you are running ShareLaTeX behind a proxy (like Apache, Nginx, etc) - # then set this to true to allow it to correctly detect the forwarded IP - # address and http/https protocol information. + // If you are running ShareLaTeX behind a proxy (like Apache, Nginx, etc) + // then set this to true to allow it to correctly detect the forwarded IP + // address and http/https protocol information. - behindProxy: process.env["SHARELATEX_BEHIND_PROXY"] or false + behindProxy: process.env["SHARELATEX_BEHIND_PROXY"] || false, - i18n: - subdomainLang: - www: {lngCode:process.env["SHARELATEX_SITE_LANGUAGE"] or "en", url: siteUrl} - defaultLng: process.env["SHARELATEX_SITE_LANGUAGE"] or "en" + i18n: { + subdomainLang: { + www: {lngCode:process.env["SHARELATEX_SITE_LANGUAGE"] || "en", url: siteUrl} + }, + defaultLng: process.env["SHARELATEX_SITE_LANGUAGE"] || "en" + }, - currentImageName: process.env["TEX_LIVE_DOCKER_IMAGE"] + currentImageName: process.env["TEX_LIVE_DOCKER_IMAGE"], - apis: - web: - url: "http://localhost:3000" - user: httpAuthUser + apis: { + web: { + url: "http://localhost:3000", + user: httpAuthUser, pass: httpAuthPass - project_history: + }, + project_history: { enabled: false - references:{} - notifications:undefined + } + }, + references:{}, + notifications:undefined, - defaultFeatures: - collaborators: -1 - dropbox: true - versioning: true - compileTimeout: parseIntOrFail(process.env["COMPILE_TIMEOUT"] or 180) - compileGroup: "standard" - trackChanges: true - templates: true + defaultFeatures: { + collaborators: -1, + dropbox: true, + versioning: true, + compileTimeout: parseIntOrFail(process.env["COMPILE_TIMEOUT"] || 180), + compileGroup: "standard", + trackChanges: true, + templates: true, references: true + } +}; -## OPTIONAL CONFIGURABLE SETTINGS +//# OPTIONAL CONFIGURABLE SETTINGS -if process.env["SHARELATEX_LEFT_FOOTER"]? - try - settings.nav.left_footer = JSON.parse(process.env["SHARELATEX_LEFT_FOOTER"]) - catch e - console.error("could not parse SHARELATEX_LEFT_FOOTER, not valid JSON") +if (process.env["SHARELATEX_LEFT_FOOTER"] != null) { + try { + settings.nav.left_footer = JSON.parse(process.env["SHARELATEX_LEFT_FOOTER"]); + } catch (error) { + e = error; + console.error("could not parse SHARELATEX_LEFT_FOOTER, not valid JSON"); + } +} -if process.env["SHARELATEX_RIGHT_FOOTER"]? - settings.nav.right_footer = process.env["SHARELATEX_RIGHT_FOOTER"] - try - settings.nav.right_footer = JSON.parse(process.env["SHARELATEX_RIGHT_FOOTER"]) - catch e - console.error("could not parse SHARELATEX_RIGHT_FOOTER, not valid JSON") +if (process.env["SHARELATEX_RIGHT_FOOTER"] != null) { + settings.nav.right_footer = process.env["SHARELATEX_RIGHT_FOOTER"]; + try { + settings.nav.right_footer = JSON.parse(process.env["SHARELATEX_RIGHT_FOOTER"]); + } catch (error1) { + e = error1; + console.error("could not parse SHARELATEX_RIGHT_FOOTER, not valid JSON"); + } +} -if process.env["SHARELATEX_HEADER_IMAGE_URL"]? - settings.nav.custom_logo = process.env["SHARELATEX_HEADER_IMAGE_URL"] +if (process.env["SHARELATEX_HEADER_IMAGE_URL"] != null) { + settings.nav.custom_logo = process.env["SHARELATEX_HEADER_IMAGE_URL"]; +} -if process.env["SHARELATEX_HEADER_NAV_LINKS"]? - console.error """ +if (process.env["SHARELATEX_HEADER_NAV_LINKS"] != null) { + console.error(`\ # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # WARNING: SHARELATEX_HEADER_NAV_LINKS is no longer supported # See https://github.com/sharelatex/sharelatex/wiki/Configuring-Headers,-Footers-&-Logo # -# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -""" +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #\ +` + ); +} -if process.env["SHARELATEX_HEADER_EXTRAS"]? - try - settings.nav.header_extras = JSON.parse(process.env["SHARELATEX_HEADER_EXTRAS"]) - catch e - console.error("could not parse SHARELATEX_HEADER_EXTRAS, not valid JSON") +if (process.env["SHARELATEX_HEADER_EXTRAS"] != null) { + try { + settings.nav.header_extras = JSON.parse(process.env["SHARELATEX_HEADER_EXTRAS"]); + } catch (error2) { + e = error2; + console.error("could not parse SHARELATEX_HEADER_EXTRAS, not valid JSON"); + } +} -# Sending Email -# ------------- -# -# You must configure a mail server to be able to send invite emails from -# ShareLaTeX. The config settings are passed to nodemailer. See the nodemailer -# documentation for available options: -# -# http://www.nodemailer.com/docs/transports +// Sending Email +// ------------- +// +// You must configure a mail server to be able to send invite emails from +// ShareLaTeX. The config settings are passed to nodemailer. See the nodemailer +// documentation for available options: +// +// http://www.nodemailer.com/docs/transports -if process.env["SHARELATEX_EMAIL_FROM_ADDRESS"]? +if (process.env["SHARELATEX_EMAIL_FROM_ADDRESS"] != null) { - settings.email = - fromAddress: process.env["SHARELATEX_EMAIL_FROM_ADDRESS"] - replyTo: process.env["SHARELATEX_EMAIL_REPLY_TO"] or "" - driver: process.env["SHARELATEX_EMAIL_DRIVER"] - parameters: - #AWS Creds - AWSAccessKeyID: process.env["SHARELATEX_EMAIL_AWS_SES_ACCESS_KEY_ID"] - AWSSecretKey: process.env["SHARELATEX_EMAIL_AWS_SES_SECRET_KEY"] + settings.email = { + fromAddress: process.env["SHARELATEX_EMAIL_FROM_ADDRESS"], + replyTo: process.env["SHARELATEX_EMAIL_REPLY_TO"] || "", + driver: process.env["SHARELATEX_EMAIL_DRIVER"], + parameters: { + //AWS Creds + AWSAccessKeyID: process.env["SHARELATEX_EMAIL_AWS_SES_ACCESS_KEY_ID"], + AWSSecretKey: process.env["SHARELATEX_EMAIL_AWS_SES_SECRET_KEY"], - #SMTP Creds - host: process.env["SHARELATEX_EMAIL_SMTP_HOST"] + //SMTP Creds + host: process.env["SHARELATEX_EMAIL_SMTP_HOST"], port: process.env["SHARELATEX_EMAIL_SMTP_PORT"], - secure: parse(process.env["SHARELATEX_EMAIL_SMTP_SECURE"]) - ignoreTLS: parse(process.env["SHARELATEX_EMAIL_SMTP_IGNORE_TLS"]) - name: process.env["SHARELATEX_EMAIL_SMTP_NAME"] - logger: process.env["SHARELATEX_EMAIL_SMTP_LOGGER"] == 'true' + secure: parse(process.env["SHARELATEX_EMAIL_SMTP_SECURE"]), + ignoreTLS: parse(process.env["SHARELATEX_EMAIL_SMTP_IGNORE_TLS"]), + name: process.env["SHARELATEX_EMAIL_SMTP_NAME"], + logger: process.env["SHARELATEX_EMAIL_SMTP_LOGGER"] === 'true' + }, - textEncoding: process.env["SHARELATEX_EMAIL_TEXT_ENCODING"] - template: + textEncoding: process.env["SHARELATEX_EMAIL_TEXT_ENCODING"], + template: { customFooter: process.env["SHARELATEX_CUSTOM_EMAIL_FOOTER"] + } + }; - if process.env["SHARELATEX_EMAIL_AWS_SES_REGION"]? - settings.email.parameters.region = process.env["SHARELATEX_EMAIL_AWS_SES_REGION"] + if (process.env["SHARELATEX_EMAIL_AWS_SES_REGION"] != null) { + settings.email.parameters.region = process.env["SHARELATEX_EMAIL_AWS_SES_REGION"]; + } - if process.env["SHARELATEX_EMAIL_SMTP_USER"]? or process.env["SHARELATEX_EMAIL_SMTP_PASS"]? - settings.email.parameters.auth = - user: process.env["SHARELATEX_EMAIL_SMTP_USER"] + if ((process.env["SHARELATEX_EMAIL_SMTP_USER"] != null) || (process.env["SHARELATEX_EMAIL_SMTP_PASS"] != null)) { + settings.email.parameters.auth = { + user: process.env["SHARELATEX_EMAIL_SMTP_USER"], pass: process.env["SHARELATEX_EMAIL_SMTP_PASS"] + }; + } - if process.env["SHARELATEX_EMAIL_SMTP_TLS_REJECT_UNAUTH"]? + if (process.env["SHARELATEX_EMAIL_SMTP_TLS_REJECT_UNAUTH"] != null) { settings.email.parameters.tls = - rejectUnauthorized: parse(process.env["SHARELATEX_EMAIL_SMTP_TLS_REJECT_UNAUTH"]) + {rejectUnauthorized: parse(process.env["SHARELATEX_EMAIL_SMTP_TLS_REJECT_UNAUTH"])}; + } +} -# i18n -if process.env["SHARELATEX_LANG_DOMAIN_MAPPING"]? +// i18n +if (process.env["SHARELATEX_LANG_DOMAIN_MAPPING"] != null) { - settings.i18n.subdomainLang = parse(process.env["SHARELATEX_LANG_DOMAIN_MAPPING"]) + settings.i18n.subdomainLang = parse(process.env["SHARELATEX_LANG_DOMAIN_MAPPING"]); +} -# Password Settings -# ----------- -# These restrict the passwords users can use when registering -# opts are from http://antelle.github.io/passfield -if process.env["SHARELATEX_PASSWORD_VALIDATION_PATTERN"] or process.env["SHARELATEX_PASSWORD_VALIDATION_MIN_LENGTH"] or process.env["SHARELATEX_PASSWORD_VALIDATION_MAX_LENGTH"] +// Password Settings +// ----------- +// These restrict the passwords users can use when registering +// opts are from http://antelle.github.io/passfield +if (process.env["SHARELATEX_PASSWORD_VALIDATION_PATTERN"] || process.env["SHARELATEX_PASSWORD_VALIDATION_MIN_LENGTH"] || process.env["SHARELATEX_PASSWORD_VALIDATION_MAX_LENGTH"]) { - settings.passwordStrengthOptions = - pattern: process.env["SHARELATEX_PASSWORD_VALIDATION_PATTERN"] or "aA$3" - length: {min:process.env["SHARELATEX_PASSWORD_VALIDATION_MIN_LENGTH"] or 8, max: process.env["SHARELATEX_PASSWORD_VALIDATION_MAX_LENGTH"] or 150} + settings.passwordStrengthOptions = { + pattern: process.env["SHARELATEX_PASSWORD_VALIDATION_PATTERN"] || "aA$3", + length: {min:process.env["SHARELATEX_PASSWORD_VALIDATION_MIN_LENGTH"] || 8, max: process.env["SHARELATEX_PASSWORD_VALIDATION_MAX_LENGTH"] || 150} + }; +} -####################### -# ShareLaTeX Server Pro -####################### +//###################### +// ShareLaTeX Server Pro +//###################### -if parse(process.env["SHARELATEX_IS_SERVER_PRO"]) == true - settings.bypassPercentageRollouts = true +if (parse(process.env["SHARELATEX_IS_SERVER_PRO"]) === true) { + settings.bypassPercentageRollouts = true; settings.apis.references = - url: "http://localhost:3040" + {url: "http://localhost:3040"}; +} -# LDAP - SERVER PRO ONLY -# ---------- +// LDAP - SERVER PRO ONLY +// ---------- -if process.env["SHARELATEX_LDAP_HOST"] - console.error """ +if (process.env["SHARELATEX_LDAP_HOST"]) { + console.error(`\ # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # WARNING: The LDAP configuration format has changed in version 0.5.1 # See https://github.com/sharelatex/sharelatex/wiki/Server-Pro:-LDAP-Config # -# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -""" +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #\ +` + ); +} -if process.env["SHARELATEX_LDAP_URL"] - settings.externalAuth = true - settings.ldap = - emailAtt: process.env["SHARELATEX_LDAP_EMAIL_ATT"] - nameAtt: process.env["SHARELATEX_LDAP_NAME_ATT"] - lastNameAtt: process.env["SHARELATEX_LDAP_LAST_NAME_ATT"] - updateUserDetailsOnLogin: process.env["SHARELATEX_LDAP_UPDATE_USER_DETAILS_ON_LOGIN"] == 'true' - placeholder: process.env["SHARELATEX_LDAP_PLACEHOLDER"] - server: - url: process.env["SHARELATEX_LDAP_URL"] - bindDn: process.env["SHARELATEX_LDAP_BIND_DN"] - bindCredentials: process.env["SHARELATEX_LDAP_BIND_CREDENTIALS"] - bindProperty: process.env["SHARELATEX_LDAP_BIND_PROPERTY"] - searchBase: process.env["SHARELATEX_LDAP_SEARCH_BASE"] - searchScope: process.env["SHARELATEX_LDAP_SEARCH_SCOPE"] - searchFilter: process.env["SHARELATEX_LDAP_SEARCH_FILTER"] +if (process.env["SHARELATEX_LDAP_URL"]) { + let _ldap_connect_timeout, _ldap_group_search_attribs, _ldap_search_attribs, _ldap_timeout; + settings.externalAuth = true; + settings.ldap = { + emailAtt: process.env["SHARELATEX_LDAP_EMAIL_ATT"], + nameAtt: process.env["SHARELATEX_LDAP_NAME_ATT"], + lastNameAtt: process.env["SHARELATEX_LDAP_LAST_NAME_ATT"], + updateUserDetailsOnLogin: process.env["SHARELATEX_LDAP_UPDATE_USER_DETAILS_ON_LOGIN"] === 'true', + placeholder: process.env["SHARELATEX_LDAP_PLACEHOLDER"], + server: { + url: process.env["SHARELATEX_LDAP_URL"], + bindDn: process.env["SHARELATEX_LDAP_BIND_DN"], + bindCredentials: process.env["SHARELATEX_LDAP_BIND_CREDENTIALS"], + bindProperty: process.env["SHARELATEX_LDAP_BIND_PROPERTY"], + searchBase: process.env["SHARELATEX_LDAP_SEARCH_BASE"], + searchScope: process.env["SHARELATEX_LDAP_SEARCH_SCOPE"], + searchFilter: process.env["SHARELATEX_LDAP_SEARCH_FILTER"], searchAttributes: ( - if _ldap_search_attribs = process.env["SHARELATEX_LDAP_SEARCH_ATTRIBUTES"] - try - JSON.parse(_ldap_search_attribs) - catch e - console.error "could not parse SHARELATEX_LDAP_SEARCH_ATTRIBUTES" - else + (_ldap_search_attribs = process.env["SHARELATEX_LDAP_SEARCH_ATTRIBUTES"]) ? + (() => { try { + return JSON.parse(_ldap_search_attribs); + } catch (error3) { + e = error3; + return console.error("could not parse SHARELATEX_LDAP_SEARCH_ATTRIBUTES"); + } })() + : undefined - ) - groupDnProperty: process.env["SHARELATEX_LDAP_GROUP_DN_PROPERTY"] - groupSearchBase: process.env["SHARELATEX_LDAP_GROUP_SEARCH_BASE"] - groupSearchScope: process.env["SHARELATEX_LDAP_GROUP_SEARCH_SCOPE"] - groupSearchFilter: process.env["SHARELATEX_LDAP_GROUP_SEARCH_FILTER"] + ), + groupDnProperty: process.env["SHARELATEX_LDAP_GROUP_DN_PROPERTY"], + groupSearchBase: process.env["SHARELATEX_LDAP_GROUP_SEARCH_BASE"], + groupSearchScope: process.env["SHARELATEX_LDAP_GROUP_SEARCH_SCOPE"], + groupSearchFilter: process.env["SHARELATEX_LDAP_GROUP_SEARCH_FILTER"], groupSearchAttributes: ( - if _ldap_group_search_attribs = process.env["SHARELATEX_LDAP_GROUP_SEARCH_ATTRIBUTES"] - try - JSON.parse(_ldap_group_search_attribs) - catch e - console.error "could not parse SHARELATEX_LDAP_GROUP_SEARCH_ATTRIBUTES" - else + (_ldap_group_search_attribs = process.env["SHARELATEX_LDAP_GROUP_SEARCH_ATTRIBUTES"]) ? + (() => { try { + return JSON.parse(_ldap_group_search_attribs); + } catch (error4) { + e = error4; + return console.error("could not parse SHARELATEX_LDAP_GROUP_SEARCH_ATTRIBUTES"); + } })() + : undefined - ) - cache: process.env["SHARELATEX_LDAP_CACHE"] == 'true' + ), + cache: process.env["SHARELATEX_LDAP_CACHE"] === 'true', timeout: ( - if _ldap_timeout = process.env["SHARELATEX_LDAP_TIMEOUT"] - try - parseIntOrFail(_ldap_timeout) - catch e - console.error "Cannot parse SHARELATEX_LDAP_TIMEOUT" - else + (_ldap_timeout = process.env["SHARELATEX_LDAP_TIMEOUT"]) ? + (() => { try { + return parseIntOrFail(_ldap_timeout); + } catch (error5) { + e = error5; + return console.error("Cannot parse SHARELATEX_LDAP_TIMEOUT"); + } })() + : undefined - ) + ), connectTimeout: ( - if _ldap_connect_timeout = process.env["SHARELATEX_LDAP_CONNECT_TIMEOUT"] - try - parseIntOrFail(_ldap_connect_timeout) - catch e - console.error "Cannot parse SHARELATEX_LDAP_CONNECT_TIMEOUT" - else + (_ldap_connect_timeout = process.env["SHARELATEX_LDAP_CONNECT_TIMEOUT"]) ? + (() => { try { + return parseIntOrFail(_ldap_connect_timeout); + } catch (error6) { + e = error6; + return console.error("Cannot parse SHARELATEX_LDAP_CONNECT_TIMEOUT"); + } })() + : undefined ) + } + }; - if process.env["SHARELATEX_LDAP_TLS_OPTS_CA_PATH"] - try - ca = JSON.parse(process.env["SHARELATEX_LDAP_TLS_OPTS_CA_PATH"]) - catch e - console.error "could not parse SHARELATEX_LDAP_TLS_OPTS_CA_PATH, invalid JSON" + if (process.env["SHARELATEX_LDAP_TLS_OPTS_CA_PATH"]) { + let ca, ca_paths; + try { + ca = JSON.parse(process.env["SHARELATEX_LDAP_TLS_OPTS_CA_PATH"]); + } catch (error7) { + e = error7; + console.error("could not parse SHARELATEX_LDAP_TLS_OPTS_CA_PATH, invalid JSON"); + } - if typeof(ca) == 'string' - ca_paths = [ca] - else if typeof(ca) == 'object' && ca?.length? - ca_paths = ca - else - console.error "problem parsing SHARELATEX_LDAP_TLS_OPTS_CA_PATH" + if (typeof(ca) === 'string') { + ca_paths = [ca]; + } else if ((typeof(ca) === 'object') && ((ca != null ? ca.length : undefined) != null)) { + ca_paths = ca; + } else { + console.error("problem parsing SHARELATEX_LDAP_TLS_OPTS_CA_PATH"); + } - settings.ldap.server.tlsOptions = - rejectUnauthorized: process.env["SHARELATEX_LDAP_TLS_OPTS_REJECT_UNAUTH"] == "true" - ca:ca_paths # e.g.'/etc/ldap/ca_certs.pem' + settings.ldap.server.tlsOptions = { + rejectUnauthorized: process.env["SHARELATEX_LDAP_TLS_OPTS_REJECT_UNAUTH"] === "true", + ca:ca_paths // e.g.'/etc/ldap/ca_certs.pem' + }; + } +} -if process.env["SHARELATEX_SAML_ENTRYPOINT"] - # NOTE: see https://github.com/node-saml/passport-saml/blob/master/README.md for docs of `server` options - settings.externalAuth = true - settings.saml = - updateUserDetailsOnLogin: process.env["SHARELATEX_SAML_UPDATE_USER_DETAILS_ON_LOGIN"] == 'true' - identityServiceName: process.env["SHARELATEX_SAML_IDENTITY_SERVICE_NAME"] - emailField: process.env["SHARELATEX_SAML_EMAIL_FIELD"] || process.env["SHARELATEX_SAML_EMAIL_FIELD_NAME"] - firstNameField: process.env["SHARELATEX_SAML_FIRST_NAME_FIELD"] - lastNameField: process.env["SHARELATEX_SAML_LAST_NAME_FIELD"] - server: - # strings - entryPoint: process.env["SHARELATEX_SAML_ENTRYPOINT"] - callbackUrl: process.env["SHARELATEX_SAML_CALLBACK_URL"] - issuer: process.env["SHARELATEX_SAML_ISSUER"] - decryptionPvk: process.env["SHARELATEX_SAML_DECRYPTION_PVK"] - decryptionCert: process.env["SHARELATEX_SAML_DECRYPTION_CERT"] - signatureAlgorithm: process.env["SHARELATEX_SAML_SIGNATURE_ALGORITHM"] - identifierFormat: process.env["SHARELATEX_SAML_IDENTIFIER_FORMAT"] - attributeConsumingServiceIndex: process.env["SHARELATEX_SAML_ATTRIBUTE_CONSUMING_SERVICE_INDEX"] - authnContext: process.env["SHARELATEX_SAML_AUTHN_CONTEXT"] - authnRequestBinding: process.env["SHARELATEX_SAML_AUTHN_REQUEST_BINDING"] - validateInResponseTo: process.env["SHARELATEX_SAML_VALIDATE_IN_RESPONSE_TO"] - cacheProvider: process.env["SHARELATEX_SAML_CACHE_PROVIDER"] - logoutUrl: process.env["SHARELATEX_SAML_LOGOUT_URL"] - logoutCallbackUrl: process.env["SHARELATEX_SAML_LOGOUT_CALLBACK_URL"] - disableRequestedAuthnContext: process.env["SHARELATEX_SAML_DISABLE_REQUESTED_AUTHN_CONTEXT"] == 'true' - forceAuthn: process.env["SHARELATEX_SAML_FORCE_AUTHN"] == 'true' - skipRequestCompression: process.env["SHARELATEX_SAML_SKIP_REQUEST_COMPRESSION"] == 'true' +if (process.env["SHARELATEX_SAML_ENTRYPOINT"]) { + // NOTE: see https://github.com/node-saml/passport-saml/blob/master/README.md for docs of `server` options + let _saml_additionalAuthorizeParams, _saml_additionalLogoutParams, _saml_additionalParams, _saml_expiration, _saml_skew; + settings.externalAuth = true; + settings.saml = { + updateUserDetailsOnLogin: process.env["SHARELATEX_SAML_UPDATE_USER_DETAILS_ON_LOGIN"] === 'true', + identityServiceName: process.env["SHARELATEX_SAML_IDENTITY_SERVICE_NAME"], + emailField: process.env["SHARELATEX_SAML_EMAIL_FIELD"] || process.env["SHARELATEX_SAML_EMAIL_FIELD_NAME"], + firstNameField: process.env["SHARELATEX_SAML_FIRST_NAME_FIELD"], + lastNameField: process.env["SHARELATEX_SAML_LAST_NAME_FIELD"], + server: { + // strings + entryPoint: process.env["SHARELATEX_SAML_ENTRYPOINT"], + callbackUrl: process.env["SHARELATEX_SAML_CALLBACK_URL"], + issuer: process.env["SHARELATEX_SAML_ISSUER"], + decryptionPvk: process.env["SHARELATEX_SAML_DECRYPTION_PVK"], + decryptionCert: process.env["SHARELATEX_SAML_DECRYPTION_CERT"], + signatureAlgorithm: process.env["SHARELATEX_SAML_SIGNATURE_ALGORITHM"], + identifierFormat: process.env["SHARELATEX_SAML_IDENTIFIER_FORMAT"], + attributeConsumingServiceIndex: process.env["SHARELATEX_SAML_ATTRIBUTE_CONSUMING_SERVICE_INDEX"], + authnContext: process.env["SHARELATEX_SAML_AUTHN_CONTEXT"], + authnRequestBinding: process.env["SHARELATEX_SAML_AUTHN_REQUEST_BINDING"], + validateInResponseTo: process.env["SHARELATEX_SAML_VALIDATE_IN_RESPONSE_TO"], + cacheProvider: process.env["SHARELATEX_SAML_CACHE_PROVIDER"], + logoutUrl: process.env["SHARELATEX_SAML_LOGOUT_URL"], + logoutCallbackUrl: process.env["SHARELATEX_SAML_LOGOUT_CALLBACK_URL"], + disableRequestedAuthnContext: process.env["SHARELATEX_SAML_DISABLE_REQUESTED_AUTHN_CONTEXT"] === 'true', + forceAuthn: process.env["SHARELATEX_SAML_FORCE_AUTHN"] === 'true', + skipRequestCompression: process.env["SHARELATEX_SAML_SKIP_REQUEST_COMPRESSION"] === 'true', acceptedClockSkewMs: ( - if _saml_skew = process.env["SHARELATEX_SAML_ACCEPTED_CLOCK_SKEW_MS"] - try - parseIntOrFail(_saml_skew) - catch e - console.error "Cannot parse SHARELATEX_SAML_ACCEPTED_CLOCK_SKEW_MS" - else + (_saml_skew = process.env["SHARELATEX_SAML_ACCEPTED_CLOCK_SKEW_MS"]) ? + (() => { try { + return parseIntOrFail(_saml_skew); + } catch (error8) { + e = error8; + return console.error("Cannot parse SHARELATEX_SAML_ACCEPTED_CLOCK_SKEW_MS"); + } })() + : undefined - ) + ), requestIdExpirationPeriodMs: ( - if _saml_expiration = process.env["SHARELATEX_SAML_REQUEST_ID_EXPIRATION_PERIOD_MS"] - try - parseIntOrFail(_saml_expiration) - catch e - console.error "Cannot parse SHARELATEX_SAML_REQUEST_ID_EXPIRATION_PERIOD_MS" - else + (_saml_expiration = process.env["SHARELATEX_SAML_REQUEST_ID_EXPIRATION_PERIOD_MS"]) ? + (() => { try { + return parseIntOrFail(_saml_expiration); + } catch (error9) { + e = error9; + return console.error("Cannot parse SHARELATEX_SAML_REQUEST_ID_EXPIRATION_PERIOD_MS"); + } })() + : undefined - ) + ), additionalParams: ( - if _saml_additionalParams = process.env["SHARELATEX_SAML_ADDITIONAL_PARAMS"] - try - JSON.parse(_saml_additionalParams) - catch e - console.error "Cannot parse SHARELATEX_SAML_ADDITIONAL_PARAMS" - else + (_saml_additionalParams = process.env["SHARELATEX_SAML_ADDITIONAL_PARAMS"]) ? + (() => { try { + return JSON.parse(_saml_additionalParams); + } catch (error10) { + e = error10; + return console.error("Cannot parse SHARELATEX_SAML_ADDITIONAL_PARAMS"); + } })() + : undefined - ) + ), additionalAuthorizeParams: ( - if _saml_additionalAuthorizeParams = process.env["SHARELATEX_SAML_ADDITIONAL_AUTHORIZE_PARAMS"] - try - JSON.parse(_saml_additionalAuthorizeParams ) - catch e - console.error "Cannot parse SHARELATEX_SAML_ADDITIONAL_AUTHORIZE_PARAMS" - else + (_saml_additionalAuthorizeParams = process.env["SHARELATEX_SAML_ADDITIONAL_AUTHORIZE_PARAMS"]) ? + (() => { try { + return JSON.parse(_saml_additionalAuthorizeParams ); + } catch (error11) { + e = error11; + return console.error("Cannot parse SHARELATEX_SAML_ADDITIONAL_AUTHORIZE_PARAMS"); + } })() + : undefined - ) + ), additionalLogoutParams: ( - if _saml_additionalLogoutParams = process.env["SHARELATEX_SAML_ADDITIONAL_LOGOUT_PARAMS"] - try - JSON.parse(_saml_additionalLogoutParams ) - catch e - console.error "Cannot parse SHARELATEX_SAML_ADDITIONAL_LOGOUT_PARAMS" - else + (_saml_additionalLogoutParams = process.env["SHARELATEX_SAML_ADDITIONAL_LOGOUT_PARAMS"]) ? + (() => { try { + return JSON.parse(_saml_additionalLogoutParams ); + } catch (error12) { + e = error12; + return console.error("Cannot parse SHARELATEX_SAML_ADDITIONAL_LOGOUT_PARAMS"); + } })() + : undefined ) + } + }; - # SHARELATEX_SAML_CERT cannot be empty - # https://github.com/node-saml/passport-saml/commit/f6b1c885c0717f1083c664345556b535f217c102 - if process.env["SHARELATEX_SAML_CERT"] - settings.saml.server.cert = process.env["SHARELATEX_SAML_CERT"] - settings.saml.server.privateCert = process.env["SHARELATEX_SAML_PRIVATE_CERT"] + // SHARELATEX_SAML_CERT cannot be empty + // https://github.com/node-saml/passport-saml/commit/f6b1c885c0717f1083c664345556b535f217c102 + if (process.env["SHARELATEX_SAML_CERT"]) { + settings.saml.server.cert = process.env["SHARELATEX_SAML_CERT"]; + settings.saml.server.privateCert = process.env["SHARELATEX_SAML_PRIVATE_CERT"]; + } +} -# Compiler -# -------- -if process.env["SANDBOXED_COMPILES"] == "true" - settings.clsi = - dockerRunner: true - docker: - image: process.env["TEX_LIVE_DOCKER_IMAGE"] - env: - HOME: "/tmp" - PATH: process.env["COMPILER_PATH"] or "/usr/local/texlive/2015/bin/x86_64-linux:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin" +// Compiler +// -------- +if (process.env["SANDBOXED_COMPILES"] === "true") { + settings.clsi = { + dockerRunner: true, + docker: { + image: process.env["TEX_LIVE_DOCKER_IMAGE"], + env: { + HOME: "/tmp", + PATH: process.env["COMPILER_PATH"] || "/usr/local/texlive/2015/bin/x86_64-linux:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin" + }, user: "www-data" + } + }; - if !settings.path? - settings.path = {} - settings.path.synctexBaseDir = () -> "/compile" - if process.env['SANDBOXED_COMPILES_SIBLING_CONTAINERS'] == 'true' - console.log("Using sibling containers for sandboxed compiles") - if process.env['SANDBOXED_COMPILES_HOST_DIR'] - settings.path.sandboxedCompilesHostDir = process.env['SANDBOXED_COMPILES_HOST_DIR'] - else - console.error('Sibling containers, but SANDBOXED_COMPILES_HOST_DIR not set') + if ((settings.path == null)) { + settings.path = {}; + } + settings.path.synctexBaseDir = () => "/compile"; + if (process.env['SANDBOXED_COMPILES_SIBLING_CONTAINERS'] === 'true') { + console.log("Using sibling containers for sandboxed compiles"); + if (process.env['SANDBOXED_COMPILES_HOST_DIR']) { + settings.path.sandboxedCompilesHostDir = process.env['SANDBOXED_COMPILES_HOST_DIR']; + } else { + console.error('Sibling containers, but SANDBOXED_COMPILES_HOST_DIR not set'); + } + } +} -# Templates -# --------- -if process.env["SHARELATEX_TEMPLATES_USER_ID"] - settings.templates = - mountPointUrl: "/templates" +// Templates +// --------- +if (process.env["SHARELATEX_TEMPLATES_USER_ID"]) { + settings.templates = { + mountPointUrl: "/templates", user_id: process.env["SHARELATEX_TEMPLATES_USER_ID"] + }; - settings.templateLinks = parse(process.env["SHARELATEX_NEW_PROJECT_TEMPLATE_LINKS"]) + settings.templateLinks = parse(process.env["SHARELATEX_NEW_PROJECT_TEMPLATE_LINKS"]); +} -# /Learn -# ------- -if process.env["SHARELATEX_PROXY_LEARN"]? - settings.proxyLearn = parse(process.env["SHARELATEX_PROXY_LEARN"]) +// /Learn +// ------- +if (process.env["SHARELATEX_PROXY_LEARN"] != null) { + settings.proxyLearn = parse(process.env["SHARELATEX_PROXY_LEARN"]); +} -# /References -# ----------- -if process.env["SHARELATEX_ELASTICSEARCH_URL"]? +// /References +// ----------- +if (process.env["SHARELATEX_ELASTICSEARCH_URL"] != null) { settings.references.elasticsearch = - host: process.env["SHARELATEX_ELASTICSEARCH_URL"] + {host: process.env["SHARELATEX_ELASTICSEARCH_URL"]}; +} -# TeX Live Images -# ----------- -if process.env["ALL_TEX_LIVE_DOCKER_IMAGES"]? - allTexLiveDockerImages = process.env["ALL_TEX_LIVE_DOCKER_IMAGES"].split(',') -if process.env["ALL_TEX_LIVE_DOCKER_IMAGE_NAMES"]? - allTexLiveDockerImageNames = process.env["ALL_TEX_LIVE_DOCKER_IMAGE_NAMES"].split(',') -if allTexLiveDockerImages? - settings.allowedImageNames = [] - for fullImageName, index in allTexLiveDockerImages - imageName = Path.basename(fullImageName) - imageDesc = if allTexLiveDockerImageNames? then allTexLiveDockerImageNames[index] else imageName - settings.allowedImageNames.push({ imageName, imageDesc }) +// TeX Live Images +// ----------- +if (process.env["ALL_TEX_LIVE_DOCKER_IMAGES"] != null) { + allTexLiveDockerImages = process.env["ALL_TEX_LIVE_DOCKER_IMAGES"].split(','); +} +if (process.env["ALL_TEX_LIVE_DOCKER_IMAGE_NAMES"] != null) { + allTexLiveDockerImageNames = process.env["ALL_TEX_LIVE_DOCKER_IMAGE_NAMES"].split(','); +} +if (allTexLiveDockerImages != null) { + settings.allowedImageNames = []; + for (let index = 0; index < allTexLiveDockerImages.length; index++) { + const fullImageName = allTexLiveDockerImages[index]; + const imageName = Path.basename(fullImageName); + const imageDesc = (allTexLiveDockerImageNames != null) ? allTexLiveDockerImageNames[index] : imageName; + settings.allowedImageNames.push({ imageName, imageDesc }); + } +} -# With lots of incoming and outgoing HTTP connections to different services, -# sometimes long running, it is a good idea to increase the default number -# of sockets that Node will hold open. -http = require('http') -http.globalAgent.maxSockets = 300 -https = require('https') -https.globalAgent.maxSockets = 300 +// With lots of incoming and outgoing HTTP connections to different services, +// sometimes long running, it is a good idea to increase the default number +// of sockets that Node will hold open. +const http = require('http'); +http.globalAgent.maxSockets = 300; +const https = require('https'); +https.globalAgent.maxSockets = 300; -module.exports = settings +module.exports = settings; diff --git a/server-ce/tasks/CreateAndDestroyUsers.js b/server-ce/tasks/CreateAndDestroyUsers.js index 79da259c67..cff38d35be 100644 --- a/server-ce/tasks/CreateAndDestroyUsers.js +++ b/server-ce/tasks/CreateAndDestroyUsers.js @@ -1,60 +1,79 @@ +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ -module.exports = (grunt) -> +module.exports = function(grunt) { - grunt.registerTask 'user:create-admin', "Create a user with the given email address and make them an admin. Update in place if the user already exists. Usage: grunt user:create-admin --email joe@example.com", () -> - done = @async() - email = grunt.option("email") - if !email? - console.error "Usage: grunt user:create-admin --email=joe@example.com" - process.exit(1) + grunt.registerTask('user:create-admin', "Create a user with the given email address and make them an admin. Update in place if the user already exists. Usage: grunt user:create-admin --email joe@example.com", function() { + const done = this.async(); + const email = grunt.option("email"); + if ((email == null)) { + console.error("Usage: grunt user:create-admin --email=joe@example.com"); + process.exit(1); + } - settings = require "settings-sharelatex" - mongodb = require "../web/app/src/infrastructure/mongodb" - UserRegistrationHandler = require "../web/app/src/Features/User/UserRegistrationHandler" - OneTimeTokenHandler = require "../web/app/src/Features/Security/OneTimeTokenHandler" - mongodb.waitForDb().then () -> - UserRegistrationHandler.registerNewUser { - email: email - password: require("crypto").randomBytes(32).toString("hex") - }, (error, user) -> - if error? and error?.message != "EmailAlreadyRegistered" - throw error - user.isAdmin = true - user.save (error) -> - throw error if error? - ONE_WEEK = 7 * 24 * 60 * 60 # seconds - OneTimeTokenHandler.getNewToken "password", { expiresIn: ONE_WEEK, email:user.email, user_id: user._id.toString() }, (err, token)-> - return next(err) if err? + const settings = require("settings-sharelatex"); + const mongodb = require("../web/app/src/infrastructure/mongodb"); + const UserRegistrationHandler = require("../web/app/src/Features/User/UserRegistrationHandler"); + const OneTimeTokenHandler = require("../web/app/src/Features/Security/OneTimeTokenHandler"); + return mongodb.waitForDb().then(() => UserRegistrationHandler.registerNewUser({ + email, + password: require("crypto").randomBytes(32).toString("hex") + }, function(error, user) { + if ((error != null) && ((error != null ? error.message : undefined) !== "EmailAlreadyRegistered")) { + throw error; + } + user.isAdmin = true; + return user.save(function(error) { + if (error != null) { throw error; } + const ONE_WEEK = 7 * 24 * 60 * 60; // seconds + return OneTimeTokenHandler.getNewToken("password", { expiresIn: ONE_WEEK, email:user.email, user_id: user._id.toString() }, function(err, token){ + if (err != null) { return next(err); } - console.log "" - console.log """ - Successfully created #{email} as an admin user. + console.log(""); + console.log(`\ +Successfully created ${email} as an admin user. - Please visit the following URL to set a password for #{email} and log in: +Please visit the following URL to set a password for ${email} and log in: - #{settings.siteUrl}/user/password/set?passwordResetToken=#{token} +${settings.siteUrl}/user/password/set?passwordResetToken=${token} +\ +` + ); + return done(); + }); + }); + })); + }); - """ - done() - - grunt.registerTask 'user:delete', "deletes a user and all their data, Usage: grunt user:delete --email joe@example.com", () -> - done = @async() - email = grunt.option("email") - if !email? - console.error "Usage: grunt user:delete --email=joe@example.com" - process.exit(1) - settings = require "settings-sharelatex" - mongodb = require "../web/app/src/infrastructure/mongodb" - UserGetter = require "../web/app/src/Features/User/UserGetter" - UserDeleter = require "../web/app/src/Features/User/UserDeleter" - mongodb.waitForDb().then () -> - UserGetter.getUser email:email, (error, user) -> - if error? - throw error - if !user? - console.log("user #{email} not in database, potentially already deleted") - return done() - UserDeleter.deleteUser user._id, (err)-> - if err? - throw err - done() + return grunt.registerTask('user:delete', "deletes a user and all their data, Usage: grunt user:delete --email joe@example.com", function() { + const done = this.async(); + const email = grunt.option("email"); + if ((email == null)) { + console.error("Usage: grunt user:delete --email=joe@example.com"); + process.exit(1); + } + const settings = require("settings-sharelatex"); + const mongodb = require("../web/app/src/infrastructure/mongodb"); + const UserGetter = require("../web/app/src/Features/User/UserGetter"); + const UserDeleter = require("../web/app/src/Features/User/UserDeleter"); + return mongodb.waitForDb().then(() => UserGetter.getUser({email}, function(error, user) { + if (error != null) { + throw error; + } + if ((user == null)) { + console.log(`user ${email} not in database, potentially already deleted`); + return done(); + } + return UserDeleter.deleteUser(user._id, function(err){ + if (err != null) { + throw err; + } + return done(); + }); + })); + }); +}; diff --git a/server-ce/tasks/ProjectSize.js b/server-ce/tasks/ProjectSize.js index 1d02d33d26..77565724cd 100644 --- a/server-ce/tasks/ProjectSize.js +++ b/server-ce/tasks/ProjectSize.js @@ -1,24 +1,24 @@ -# require("coffee-script") +// require("coffee-script") -# fs = require("fs") -# _ = require("underscore") +// fs = require("fs") +// _ = require("underscore") -# if not process.argv[2] -# console.log "Usage: coffee project_size.coffee user_files_path" -# else -# dirPath = process.argv[2] -# if not fs.lstatSync(dirPath).isDirectory() -# console.log dirPath + " directory not exist" -# else -# fs.readdir dirPath, (err, files)-> -# projects = [] -# files.forEach (file)-> -# project_id = file.split("_")[0] -# if !projects[project_id] -# projects[project_id] = 0 -# projects[project_id] += fs.lstatSync(dirPath+"/"+file).size +// if not process.argv[2] +// console.log "Usage: coffee project_size.coffee user_files_path" +// else +// dirPath = process.argv[2] +// if not fs.lstatSync(dirPath).isDirectory() +// console.log dirPath + " directory not exist" +// else +// fs.readdir dirPath, (err, files)-> +// projects = [] +// files.forEach (file)-> +// project_id = file.split("_")[0] +// if !projects[project_id] +// projects[project_id] = 0 +// projects[project_id] += fs.lstatSync(dirPath+"/"+file).size -# ids = _.keys projects -# console.log "project \t size" -# ids.forEach (id)-> -# console.log id + "\t" + projects[id] +// ids = _.keys projects +// console.log "project \t size" +// ids.forEach (id)-> +// console.log id + "\t" + projects[id]