diff --git a/services/document-updater/app.js b/services/document-updater/app.js index 3ca7ef2b6f..602f9be9d0 100644 --- a/services/document-updater/app.js +++ b/services/document-updater/app.js @@ -1,164 +1,196 @@ -Metrics = require "metrics-sharelatex" -Metrics.initialize("doc-updater") +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const Metrics = require("metrics-sharelatex"); +Metrics.initialize("doc-updater"); -express = require('express') -http = require("http") -Settings = require('settings-sharelatex') -logger = require('logger-sharelatex') -logger.initialize("document-updater") +const express = require('express'); +const http = require("http"); +const Settings = require('settings-sharelatex'); +const logger = require('logger-sharelatex'); +logger.initialize("document-updater"); -logger.logger.addSerializers(require("./app/js/LoggerSerializers")) +logger.logger.addSerializers(require("./app/js/LoggerSerializers")); -if Settings.sentry?.dsn? - logger.initializeErrorReporting(Settings.sentry.dsn) +if ((Settings.sentry != null ? Settings.sentry.dsn : undefined) != null) { + logger.initializeErrorReporting(Settings.sentry.dsn); +} -RedisManager = require('./app/js/RedisManager') -DispatchManager = require('./app/js/DispatchManager') -DeleteQueueManager = require('./app/js/DeleteQueueManager') -Errors = require "./app/js/Errors" -HttpController = require "./app/js/HttpController" -mongojs = require "./app/js/mongojs" -async = require "async" +const RedisManager = require('./app/js/RedisManager'); +const DispatchManager = require('./app/js/DispatchManager'); +const DeleteQueueManager = require('./app/js/DeleteQueueManager'); +const Errors = require("./app/js/Errors"); +const HttpController = require("./app/js/HttpController"); +const mongojs = require("./app/js/mongojs"); +const async = require("async"); -Path = require "path" -bodyParser = require "body-parser" +const Path = require("path"); +const bodyParser = require("body-parser"); -Metrics.mongodb.monitor(Path.resolve(__dirname + "/node_modules/mongojs/node_modules/mongodb"), logger) -Metrics.event_loop.monitor(logger, 100) +Metrics.mongodb.monitor(Path.resolve(__dirname + "/node_modules/mongojs/node_modules/mongodb"), logger); +Metrics.event_loop.monitor(logger, 100); -app = express() +const app = express(); app.use(Metrics.http.monitor(logger)); -app.use bodyParser.json({limit: (Settings.max_doc_length + 64 * 1024)}) -Metrics.injectMetricsRoute(app) +app.use(bodyParser.json({limit: (Settings.max_doc_length + (64 * 1024))})); +Metrics.injectMetricsRoute(app); -DispatchManager.createAndStartDispatchers(Settings.dispatcherCount || 10) +DispatchManager.createAndStartDispatchers(Settings.dispatcherCount || 10); -app.param 'project_id', (req, res, next, project_id) -> - if project_id?.match /^[0-9a-f]{24}$/ - next() - else - next new Error("invalid project id") +app.param('project_id', function(req, res, next, project_id) { + if ((project_id != null ? project_id.match(/^[0-9a-f]{24}$/) : undefined)) { + return next(); + } else { + return next(new Error("invalid project id")); + } +}); -app.param 'doc_id', (req, res, next, doc_id) -> - if doc_id?.match /^[0-9a-f]{24}$/ - next() - else - next new Error("invalid doc id") +app.param('doc_id', function(req, res, next, doc_id) { + if ((doc_id != null ? doc_id.match(/^[0-9a-f]{24}$/) : undefined)) { + return next(); + } else { + return next(new Error("invalid doc id")); + } +}); -app.get '/project/:project_id/doc/:doc_id', HttpController.getDoc -# temporarily keep the GET method for backwards compatibility -app.get '/project/:project_id/doc', HttpController.getProjectDocsAndFlushIfOld -# will migrate to the POST method of get_and_flush_if_old instead -app.post '/project/:project_id/get_and_flush_if_old', HttpController.getProjectDocsAndFlushIfOld -app.post '/project/:project_id/clearState', HttpController.clearProjectState -app.post '/project/:project_id/doc/:doc_id', HttpController.setDoc -app.post '/project/:project_id/doc/:doc_id/flush', HttpController.flushDocIfLoaded -app.delete '/project/:project_id/doc/:doc_id', HttpController.deleteDoc -app.delete '/project/:project_id', HttpController.deleteProject -app.delete '/project', HttpController.deleteMultipleProjects -app.post '/project/:project_id', HttpController.updateProject -app.post '/project/:project_id/history/resync', HttpController.resyncProjectHistory -app.post '/project/:project_id/flush', HttpController.flushProject -app.post '/project/:project_id/doc/:doc_id/change/:change_id/accept', HttpController.acceptChanges -app.post '/project/:project_id/doc/:doc_id/change/accept', HttpController.acceptChanges -app.delete '/project/:project_id/doc/:doc_id/comment/:comment_id', HttpController.deleteComment +app.get('/project/:project_id/doc/:doc_id', HttpController.getDoc); +// temporarily keep the GET method for backwards compatibility +app.get('/project/:project_id/doc', HttpController.getProjectDocsAndFlushIfOld); +// will migrate to the POST method of get_and_flush_if_old instead +app.post('/project/:project_id/get_and_flush_if_old', HttpController.getProjectDocsAndFlushIfOld); +app.post('/project/:project_id/clearState', HttpController.clearProjectState); +app.post('/project/:project_id/doc/:doc_id', HttpController.setDoc); +app.post('/project/:project_id/doc/:doc_id/flush', HttpController.flushDocIfLoaded); +app.delete('/project/:project_id/doc/:doc_id', HttpController.deleteDoc); +app.delete('/project/:project_id', HttpController.deleteProject); +app.delete('/project', HttpController.deleteMultipleProjects); +app.post('/project/:project_id', HttpController.updateProject); +app.post('/project/:project_id/history/resync', HttpController.resyncProjectHistory); +app.post('/project/:project_id/flush', HttpController.flushProject); +app.post('/project/:project_id/doc/:doc_id/change/:change_id/accept', HttpController.acceptChanges); +app.post('/project/:project_id/doc/:doc_id/change/accept', HttpController.acceptChanges); +app.delete('/project/:project_id/doc/:doc_id/comment/:comment_id', HttpController.deleteComment); -app.get '/flush_all_projects', HttpController.flushAllProjects -app.get '/flush_queued_projects', HttpController.flushQueuedProjects +app.get('/flush_all_projects', HttpController.flushAllProjects); +app.get('/flush_queued_projects', HttpController.flushQueuedProjects); -app.get '/total', (req, res)-> - timer = new Metrics.Timer("http.allDocList") - RedisManager.getCountOfDocsInMemory (err, count)-> - timer.done() - res.send {total:count} +app.get('/total', function(req, res){ + const timer = new Metrics.Timer("http.allDocList"); + return RedisManager.getCountOfDocsInMemory(function(err, count){ + timer.done(); + return res.send({total:count});}); +}); -app.get '/status', (req, res)-> - if Settings.shuttingDown - res.sendStatus 503 # Service unavailable - else - res.send('document updater is alive') +app.get('/status', function(req, res){ + if (Settings.shuttingDown) { + return res.sendStatus(503); // Service unavailable + } else { + return res.send('document updater is alive'); + } +}); -pubsubClient = require("redis-sharelatex").createClient(Settings.redis.pubsub) -app.get "/health_check/redis", (req, res, next) -> - pubsubClient.healthCheck (error) -> - if error? - logger.err {err: error}, "failed redis health check" - res.sendStatus 500 - else - res.sendStatus 200 +const pubsubClient = require("redis-sharelatex").createClient(Settings.redis.pubsub); +app.get("/health_check/redis", (req, res, next) => pubsubClient.healthCheck(function(error) { + if (error != null) { + logger.err({err: error}, "failed redis health check"); + return res.sendStatus(500); + } else { + return res.sendStatus(200); + } +})); -docUpdaterRedisClient = require("redis-sharelatex").createClient(Settings.redis.documentupdater) -app.get "/health_check/redis_cluster", (req, res, next) -> - docUpdaterRedisClient.healthCheck (error) -> - if error? - logger.err {err: error}, "failed redis cluster health check" - res.sendStatus 500 - else - res.sendStatus 200 +const docUpdaterRedisClient = require("redis-sharelatex").createClient(Settings.redis.documentupdater); +app.get("/health_check/redis_cluster", (req, res, next) => docUpdaterRedisClient.healthCheck(function(error) { + if (error != null) { + logger.err({err: error}, "failed redis cluster health check"); + return res.sendStatus(500); + } else { + return res.sendStatus(200); + } +})); -app.get "/health_check", (req, res, next) -> - async.series [ - (cb) -> - pubsubClient.healthCheck (error) -> - if error? - logger.err {err: error}, "failed redis health check" - cb(error) - (cb) -> - docUpdaterRedisClient.healthCheck (error) -> - if error? - logger.err {err: error}, "failed redis cluster health check" - cb(error) - (cb) -> - mongojs.healthCheck (error) -> - if error? - logger.err {err: error}, "failed mongo health check" - cb(error) - ] , (error) -> - if error? - res.sendStatus 500 - else - res.sendStatus 200 +app.get("/health_check", (req, res, next) => async.series([ + cb => pubsubClient.healthCheck(function(error) { + if (error != null) { + logger.err({err: error}, "failed redis health check"); + } + return cb(error); + }), + cb => docUpdaterRedisClient.healthCheck(function(error) { + if (error != null) { + logger.err({err: error}, "failed redis cluster health check"); + } + return cb(error); + }), + cb => mongojs.healthCheck(function(error) { + if (error != null) { + logger.err({err: error}, "failed mongo health check"); + } + return cb(error); + }) +] , function(error) { + if (error != null) { + return res.sendStatus(500); + } else { + return res.sendStatus(200); + } +})); -app.use (error, req, res, next) -> - if error instanceof Errors.NotFoundError - res.sendStatus 404 - else if error instanceof Errors.OpRangeNotAvailableError - res.sendStatus 422 # Unprocessable Entity - else if error.statusCode is 413 - res.status(413).send("request entity too large") - else - logger.error err: error, req: req, "request errored" - res.status(500).send("Oops, something went wrong") +app.use(function(error, req, res, next) { + if (error instanceof Errors.NotFoundError) { + return res.sendStatus(404); + } else if (error instanceof Errors.OpRangeNotAvailableError) { + return res.sendStatus(422); // Unprocessable Entity + } else if (error.statusCode === 413) { + return res.status(413).send("request entity too large"); + } else { + logger.error({err: error, req}, "request errored"); + return res.status(500).send("Oops, something went wrong"); + } +}); -shutdownCleanly = (signal) -> - return () -> - logger.log signal: signal, "received interrupt, cleaning up" - Settings.shuttingDown = true - setTimeout () -> - logger.log signal: signal, "shutting down" - process.exit() - , 10000 +const shutdownCleanly = signal => (function() { + logger.log({signal}, "received interrupt, cleaning up"); + Settings.shuttingDown = true; + return setTimeout(function() { + logger.log({signal}, "shutting down"); + return process.exit(); + } + , 10000); +}); -watchForEvent = (eventName)-> - docUpdaterRedisClient.on eventName, (e)-> - console.log "redis event: #{eventName} #{e}" +const watchForEvent = eventName => docUpdaterRedisClient.on(eventName, e => console.log(`redis event: ${eventName} ${e}`)); -events = ["connect", "ready", "error", "close", "reconnecting", "end"] -for eventName in events - watchForEvent(eventName) +const events = ["connect", "ready", "error", "close", "reconnecting", "end"]; +for (let eventName of Array.from(events)) { + watchForEvent(eventName); +} -port = Settings.internal?.documentupdater?.port or Settings.apis?.documentupdater?.port or 3003 -host = Settings.internal.documentupdater.host or "localhost" -if !module.parent # Called directly - app.listen port, host, -> - logger.info "Document-updater starting up, listening on #{host}:#{port}" - if Settings.continuousBackgroundFlush - logger.info "Starting continuous background flush" - DeleteQueueManager.startBackgroundFlush() +const port = __guard__(Settings.internal != null ? Settings.internal.documentupdater : undefined, x => x.port) || __guard__(Settings.apis != null ? Settings.apis.documentupdater : undefined, x1 => x1.port) || 3003; +const host = Settings.internal.documentupdater.host || "localhost"; +if (!module.parent) { // Called directly + app.listen(port, host, function() { + logger.info(`Document-updater starting up, listening on ${host}:${port}`); + if (Settings.continuousBackgroundFlush) { + logger.info("Starting continuous background flush"); + return DeleteQueueManager.startBackgroundFlush(); + } + }); +} -module.exports = app +module.exports = app; -for signal in ['SIGINT', 'SIGHUP', 'SIGQUIT', 'SIGUSR1', 'SIGUSR2', 'SIGTERM', 'SIGABRT'] - process.on signal, shutdownCleanly(signal) +for (let signal of ['SIGINT', 'SIGHUP', 'SIGQUIT', 'SIGUSR1', 'SIGUSR2', 'SIGTERM', 'SIGABRT']) { + process.on(signal, shutdownCleanly(signal)); +} + +function __guard__(value, transform) { + return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; +} \ No newline at end of file diff --git a/services/document-updater/config/settings.defaults.js b/services/document-updater/config/settings.defaults.js index 0ced9eeedd..190fbff56f 100755 --- a/services/document-updater/config/settings.defaults.js +++ b/services/document-updater/config/settings.defaults.js @@ -1,97 +1,116 @@ -Path = require('path') -http = require('http') -http.globalAgent.maxSockets = 300 +const Path = require('path'); +const http = require('http'); +http.globalAgent.maxSockets = 300; -module.exports = - internal: - documentupdater: - host: process.env["LISTEN_ADDRESS"] or "localhost" +module.exports = { + internal: { + documentupdater: { + host: process.env["LISTEN_ADDRESS"] || "localhost", port: 3003 + } + }, - apis: - web: - url: "http://#{process.env['WEB_API_HOST'] or process.env['WEB_HOST'] or "localhost"}:#{process.env['WEB_API_PORT'] or process.env['WEB_PORT'] or 3000}" - user: process.env['WEB_API_USER'] or "sharelatex" - pass: process.env['WEB_API_PASSWORD'] or "password" - trackchanges: - url: "http://#{process.env["TRACK_CHANGES_HOST"] or "localhost"}:3015" - project_history: - enabled: true - url: "http://#{process.env["PROJECT_HISTORY_HOST"] or "localhost"}:3054" + apis: { + web: { + url: `http://${process.env['WEB_API_HOST'] || process.env['WEB_HOST'] || "localhost"}:${process.env['WEB_API_PORT'] || process.env['WEB_PORT'] || 3000}`, + user: process.env['WEB_API_USER'] || "sharelatex", + pass: process.env['WEB_API_PASSWORD'] || "password" + }, + trackchanges: { + url: `http://${process.env["TRACK_CHANGES_HOST"] || "localhost"}:3015` + }, + project_history: { + enabled: true, + url: `http://${process.env["PROJECT_HISTORY_HOST"] || "localhost"}:3054` + } + }, - redis: + redis: { - pubsub: - host: process.env['PUBSUB_REDIS_HOST'] or process.env['REDIS_HOST'] or "localhost" - port: process.env['PUBSUB_REDIS_PORT'] or process.env['REDIS_PORT'] or "6379" - password: process.env["PUBSUB_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" - maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] or "20") + pubsub: { + host: process.env['PUBSUB_REDIS_HOST'] || process.env['REDIS_HOST'] || "localhost", + port: process.env['PUBSUB_REDIS_PORT'] || process.env['REDIS_PORT'] || "6379", + password: process.env["PUBSUB_REDIS_PASSWORD"] || process.env["REDIS_PASSWORD"] || "", + maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] || "20") + }, - history: - port: process.env["HISTORY_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" - host: process.env["HISTORY_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" - password: process.env["HISTORY_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" - maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] or "20") - key_schema: - uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:{#{doc_id}}" - docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:{#{project_id}}" + history: { + port: process.env["HISTORY_REDIS_PORT"] || process.env["REDIS_PORT"] || "6379", + host: process.env["HISTORY_REDIS_HOST"] || process.env["REDIS_HOST"] || "localhost", + password: process.env["HISTORY_REDIS_PASSWORD"] || process.env["REDIS_PASSWORD"] || "", + maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] || "20"), + key_schema: { + uncompressedHistoryOps({doc_id}) { return `UncompressedHistoryOps:{${doc_id}}`; }, + docsWithHistoryOps({project_id}) { return `DocsWithHistoryOps:{${project_id}}`; } + } + }, - project_history: - port: process.env["NEW_HISTORY_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" - host: process.env["NEW_HISTORY_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" - password: process.env["NEW_HISTORY_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" - maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] or "20") - key_schema: - projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:{#{project_id}}" - projectHistoryFirstOpTimestamp: ({project_id}) -> "ProjectHistory:FirstOpTimestamp:{#{project_id}}" + project_history: { + port: process.env["NEW_HISTORY_REDIS_PORT"] || process.env["REDIS_PORT"] || "6379", + host: process.env["NEW_HISTORY_REDIS_HOST"] || process.env["REDIS_HOST"] || "localhost", + password: process.env["NEW_HISTORY_REDIS_PASSWORD"] || process.env["REDIS_PASSWORD"] || "", + maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] || "20"), + key_schema: { + projectHistoryOps({project_id}) { return `ProjectHistory:Ops:{${project_id}}`; }, + projectHistoryFirstOpTimestamp({project_id}) { return `ProjectHistory:FirstOpTimestamp:{${project_id}}`; } + } + }, - lock: - port: process.env["LOCK_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" - host: process.env["LOCK_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" - password: process.env["LOCK_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" - maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] or "20") - key_schema: - blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" + lock: { + port: process.env["LOCK_REDIS_PORT"] || process.env["REDIS_PORT"] || "6379", + host: process.env["LOCK_REDIS_HOST"] || process.env["REDIS_HOST"] || "localhost", + password: process.env["LOCK_REDIS_PASSWORD"] || process.env["REDIS_PASSWORD"] || "", + maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] || "20"), + key_schema: { + blockingKey({doc_id}) { return `Blocking:{${doc_id}}`; } + } + }, - documentupdater: - port: process.env["DOC_UPDATER_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379" - host: process.env["DOC_UPDATER_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost" - password: process.env["DOC_UPDATER_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or "" - maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] or "20") - key_schema: - blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}" - docLines: ({doc_id}) -> "doclines:{#{doc_id}}" - docOps: ({doc_id}) -> "DocOps:{#{doc_id}}" - docVersion: ({doc_id}) -> "DocVersion:{#{doc_id}}" - docHash: ({doc_id}) -> "DocHash:{#{doc_id}}" - projectKey: ({doc_id}) -> "ProjectId:{#{doc_id}}" - docsInProject: ({project_id}) -> "DocsIn:{#{project_id}}" - ranges: ({doc_id}) -> "Ranges:{#{doc_id}}" - unflushedTime: ({doc_id}) -> "UnflushedTime:{#{doc_id}}" - pathname: ({doc_id}) -> "Pathname:{#{doc_id}}" - projectHistoryId: ({doc_id}) -> "ProjectHistoryId:{#{doc_id}}" - projectHistoryType: ({doc_id}) -> "ProjectHistoryType:{#{doc_id}}" - projectState: ({project_id}) -> "ProjectState:{#{project_id}}" - pendingUpdates: ({doc_id}) -> "PendingUpdates:{#{doc_id}}" - lastUpdatedBy: ({doc_id}) -> "lastUpdatedBy:{#{doc_id}}" - lastUpdatedAt: ({doc_id}) -> "lastUpdatedAt:{#{doc_id}}" - pendingUpdates: ({doc_id}) -> "PendingUpdates:{#{doc_id}}" - flushAndDeleteQueue: () -> "DocUpdaterFlushAndDeleteQueue" + documentupdater: { + port: process.env["DOC_UPDATER_REDIS_PORT"] || process.env["REDIS_PORT"] || "6379", + host: process.env["DOC_UPDATER_REDIS_HOST"] || process.env["REDIS_HOST"] || "localhost", + password: process.env["DOC_UPDATER_REDIS_PASSWORD"] || process.env["REDIS_PASSWORD"] || "", + maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] || "20"), + key_schema: { + blockingKey({doc_id}) { return `Blocking:{${doc_id}}`; }, + docLines({doc_id}) { return `doclines:{${doc_id}}`; }, + docOps({doc_id}) { return `DocOps:{${doc_id}}`; }, + docVersion({doc_id}) { return `DocVersion:{${doc_id}}`; }, + docHash({doc_id}) { return `DocHash:{${doc_id}}`; }, + projectKey({doc_id}) { return `ProjectId:{${doc_id}}`; }, + docsInProject({project_id}) { return `DocsIn:{${project_id}}`; }, + ranges({doc_id}) { return `Ranges:{${doc_id}}`; }, + unflushedTime({doc_id}) { return `UnflushedTime:{${doc_id}}`; }, + pathname({doc_id}) { return `Pathname:{${doc_id}}`; }, + projectHistoryId({doc_id}) { return `ProjectHistoryId:{${doc_id}}`; }, + projectHistoryType({doc_id}) { return `ProjectHistoryType:{${doc_id}}`; }, + projectState({project_id}) { return `ProjectState:{${project_id}}`; }, + pendingUpdates({doc_id}) { return `PendingUpdates:{${doc_id}}`; }, + lastUpdatedBy({doc_id}) { return `lastUpdatedBy:{${doc_id}}`; }, + lastUpdatedAt({doc_id}) { return `lastUpdatedAt:{${doc_id}}`; }, + pendingUpdates({doc_id}) { return `PendingUpdates:{${doc_id}}`; }, + flushAndDeleteQueue() { return "DocUpdaterFlushAndDeleteQueue"; } + } + } + }, - max_doc_length: 2 * 1024 * 1024 # 2mb + max_doc_length: 2 * 1024 * 1024, // 2mb - dispatcherCount: process.env["DISPATCHER_COUNT"] + dispatcherCount: process.env["DISPATCHER_COUNT"], - mongo: - url : process.env['MONGO_CONNECTION_STRING'] || "mongodb://#{process.env['MONGO_HOST'] or '127.0.0.1'}/sharelatex" + mongo: { + url : process.env['MONGO_CONNECTION_STRING'] || `mongodb://${process.env['MONGO_HOST'] || '127.0.0.1'}/sharelatex` + }, - sentry: + sentry: { dsn: process.env.SENTRY_DSN + }, - publishOnIndividualChannels: process.env['PUBLISH_ON_INDIVIDUAL_CHANNELS'] or false + publishOnIndividualChannels: process.env['PUBLISH_ON_INDIVIDUAL_CHANNELS'] || false, - continuousBackgroundFlush: process.env['CONTINUOUS_BACKGROUND_FLUSH'] or false + continuousBackgroundFlush: process.env['CONTINUOUS_BACKGROUND_FLUSH'] || false, - smoothingOffset: process.env['SMOOTHING_OFFSET'] or 1000 # milliseconds + smoothingOffset: process.env['SMOOTHING_OFFSET'] || 1000, // milliseconds - disableDoubleFlush: process.env['DISABLE_DOUBLE_FLUSH'] or false # don't flush track-changes for projects using project-history + disableDoubleFlush: process.env['DISABLE_DOUBLE_FLUSH'] || false // don't flush track-changes for projects using project-history +};