mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
decaffeinate: convert individual files to js
This commit is contained in:
parent
86c6c38a6e
commit
877bae34b3
2 changed files with 267 additions and 216 deletions
|
@ -1,164 +1,196 @@
|
||||||
Metrics = require "metrics-sharelatex"
|
/*
|
||||||
Metrics.initialize("doc-updater")
|
* decaffeinate suggestions:
|
||||||
|
* DS101: Remove unnecessary use of Array.from
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS103: Rewrite code to no longer use __guard__
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
const Metrics = require("metrics-sharelatex");
|
||||||
|
Metrics.initialize("doc-updater");
|
||||||
|
|
||||||
express = require('express')
|
const express = require('express');
|
||||||
http = require("http")
|
const http = require("http");
|
||||||
Settings = require('settings-sharelatex')
|
const Settings = require('settings-sharelatex');
|
||||||
logger = require('logger-sharelatex')
|
const logger = require('logger-sharelatex');
|
||||||
logger.initialize("document-updater")
|
logger.initialize("document-updater");
|
||||||
|
|
||||||
logger.logger.addSerializers(require("./app/js/LoggerSerializers"))
|
logger.logger.addSerializers(require("./app/js/LoggerSerializers"));
|
||||||
|
|
||||||
if Settings.sentry?.dsn?
|
if ((Settings.sentry != null ? Settings.sentry.dsn : undefined) != null) {
|
||||||
logger.initializeErrorReporting(Settings.sentry.dsn)
|
logger.initializeErrorReporting(Settings.sentry.dsn);
|
||||||
|
}
|
||||||
|
|
||||||
RedisManager = require('./app/js/RedisManager')
|
const RedisManager = require('./app/js/RedisManager');
|
||||||
DispatchManager = require('./app/js/DispatchManager')
|
const DispatchManager = require('./app/js/DispatchManager');
|
||||||
DeleteQueueManager = require('./app/js/DeleteQueueManager')
|
const DeleteQueueManager = require('./app/js/DeleteQueueManager');
|
||||||
Errors = require "./app/js/Errors"
|
const Errors = require("./app/js/Errors");
|
||||||
HttpController = require "./app/js/HttpController"
|
const HttpController = require("./app/js/HttpController");
|
||||||
mongojs = require "./app/js/mongojs"
|
const mongojs = require("./app/js/mongojs");
|
||||||
async = require "async"
|
const async = require("async");
|
||||||
|
|
||||||
Path = require "path"
|
const Path = require("path");
|
||||||
bodyParser = require "body-parser"
|
const bodyParser = require("body-parser");
|
||||||
|
|
||||||
Metrics.mongodb.monitor(Path.resolve(__dirname + "/node_modules/mongojs/node_modules/mongodb"), logger)
|
Metrics.mongodb.monitor(Path.resolve(__dirname + "/node_modules/mongojs/node_modules/mongodb"), logger);
|
||||||
Metrics.event_loop.monitor(logger, 100)
|
Metrics.event_loop.monitor(logger, 100);
|
||||||
|
|
||||||
app = express()
|
const app = express();
|
||||||
app.use(Metrics.http.monitor(logger));
|
app.use(Metrics.http.monitor(logger));
|
||||||
app.use bodyParser.json({limit: (Settings.max_doc_length + 64 * 1024)})
|
app.use(bodyParser.json({limit: (Settings.max_doc_length + (64 * 1024))}));
|
||||||
Metrics.injectMetricsRoute(app)
|
Metrics.injectMetricsRoute(app);
|
||||||
|
|
||||||
DispatchManager.createAndStartDispatchers(Settings.dispatcherCount || 10)
|
DispatchManager.createAndStartDispatchers(Settings.dispatcherCount || 10);
|
||||||
|
|
||||||
app.param 'project_id', (req, res, next, project_id) ->
|
app.param('project_id', function(req, res, next, project_id) {
|
||||||
if project_id?.match /^[0-9a-f]{24}$/
|
if ((project_id != null ? project_id.match(/^[0-9a-f]{24}$/) : undefined)) {
|
||||||
next()
|
return next();
|
||||||
else
|
} else {
|
||||||
next new Error("invalid project id")
|
return next(new Error("invalid project id"));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
app.param 'doc_id', (req, res, next, doc_id) ->
|
app.param('doc_id', function(req, res, next, doc_id) {
|
||||||
if doc_id?.match /^[0-9a-f]{24}$/
|
if ((doc_id != null ? doc_id.match(/^[0-9a-f]{24}$/) : undefined)) {
|
||||||
next()
|
return next();
|
||||||
else
|
} else {
|
||||||
next new Error("invalid doc id")
|
return next(new Error("invalid doc id"));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
app.get '/project/:project_id/doc/:doc_id', HttpController.getDoc
|
app.get('/project/:project_id/doc/:doc_id', HttpController.getDoc);
|
||||||
# temporarily keep the GET method for backwards compatibility
|
// temporarily keep the GET method for backwards compatibility
|
||||||
app.get '/project/:project_id/doc', HttpController.getProjectDocsAndFlushIfOld
|
app.get('/project/:project_id/doc', HttpController.getProjectDocsAndFlushIfOld);
|
||||||
# will migrate to the POST method of get_and_flush_if_old instead
|
// will migrate to the POST method of get_and_flush_if_old instead
|
||||||
app.post '/project/:project_id/get_and_flush_if_old', HttpController.getProjectDocsAndFlushIfOld
|
app.post('/project/:project_id/get_and_flush_if_old', HttpController.getProjectDocsAndFlushIfOld);
|
||||||
app.post '/project/:project_id/clearState', HttpController.clearProjectState
|
app.post('/project/:project_id/clearState', HttpController.clearProjectState);
|
||||||
app.post '/project/:project_id/doc/:doc_id', HttpController.setDoc
|
app.post('/project/:project_id/doc/:doc_id', HttpController.setDoc);
|
||||||
app.post '/project/:project_id/doc/:doc_id/flush', HttpController.flushDocIfLoaded
|
app.post('/project/:project_id/doc/:doc_id/flush', HttpController.flushDocIfLoaded);
|
||||||
app.delete '/project/:project_id/doc/:doc_id', HttpController.deleteDoc
|
app.delete('/project/:project_id/doc/:doc_id', HttpController.deleteDoc);
|
||||||
app.delete '/project/:project_id', HttpController.deleteProject
|
app.delete('/project/:project_id', HttpController.deleteProject);
|
||||||
app.delete '/project', HttpController.deleteMultipleProjects
|
app.delete('/project', HttpController.deleteMultipleProjects);
|
||||||
app.post '/project/:project_id', HttpController.updateProject
|
app.post('/project/:project_id', HttpController.updateProject);
|
||||||
app.post '/project/:project_id/history/resync', HttpController.resyncProjectHistory
|
app.post('/project/:project_id/history/resync', HttpController.resyncProjectHistory);
|
||||||
app.post '/project/:project_id/flush', HttpController.flushProject
|
app.post('/project/:project_id/flush', HttpController.flushProject);
|
||||||
app.post '/project/:project_id/doc/:doc_id/change/:change_id/accept', HttpController.acceptChanges
|
app.post('/project/:project_id/doc/:doc_id/change/:change_id/accept', HttpController.acceptChanges);
|
||||||
app.post '/project/:project_id/doc/:doc_id/change/accept', HttpController.acceptChanges
|
app.post('/project/:project_id/doc/:doc_id/change/accept', HttpController.acceptChanges);
|
||||||
app.delete '/project/:project_id/doc/:doc_id/comment/:comment_id', HttpController.deleteComment
|
app.delete('/project/:project_id/doc/:doc_id/comment/:comment_id', HttpController.deleteComment);
|
||||||
|
|
||||||
app.get '/flush_all_projects', HttpController.flushAllProjects
|
app.get('/flush_all_projects', HttpController.flushAllProjects);
|
||||||
app.get '/flush_queued_projects', HttpController.flushQueuedProjects
|
app.get('/flush_queued_projects', HttpController.flushQueuedProjects);
|
||||||
|
|
||||||
app.get '/total', (req, res)->
|
app.get('/total', function(req, res){
|
||||||
timer = new Metrics.Timer("http.allDocList")
|
const timer = new Metrics.Timer("http.allDocList");
|
||||||
RedisManager.getCountOfDocsInMemory (err, count)->
|
return RedisManager.getCountOfDocsInMemory(function(err, count){
|
||||||
timer.done()
|
timer.done();
|
||||||
res.send {total:count}
|
return res.send({total:count});});
|
||||||
|
});
|
||||||
|
|
||||||
app.get '/status', (req, res)->
|
app.get('/status', function(req, res){
|
||||||
if Settings.shuttingDown
|
if (Settings.shuttingDown) {
|
||||||
res.sendStatus 503 # Service unavailable
|
return res.sendStatus(503); // Service unavailable
|
||||||
else
|
} else {
|
||||||
res.send('document updater is alive')
|
return res.send('document updater is alive');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
pubsubClient = require("redis-sharelatex").createClient(Settings.redis.pubsub)
|
const pubsubClient = require("redis-sharelatex").createClient(Settings.redis.pubsub);
|
||||||
app.get "/health_check/redis", (req, res, next) ->
|
app.get("/health_check/redis", (req, res, next) => pubsubClient.healthCheck(function(error) {
|
||||||
pubsubClient.healthCheck (error) ->
|
if (error != null) {
|
||||||
if error?
|
logger.err({err: error}, "failed redis health check");
|
||||||
logger.err {err: error}, "failed redis health check"
|
return res.sendStatus(500);
|
||||||
res.sendStatus 500
|
} else {
|
||||||
else
|
return res.sendStatus(200);
|
||||||
res.sendStatus 200
|
}
|
||||||
|
}));
|
||||||
|
|
||||||
docUpdaterRedisClient = require("redis-sharelatex").createClient(Settings.redis.documentupdater)
|
const docUpdaterRedisClient = require("redis-sharelatex").createClient(Settings.redis.documentupdater);
|
||||||
app.get "/health_check/redis_cluster", (req, res, next) ->
|
app.get("/health_check/redis_cluster", (req, res, next) => docUpdaterRedisClient.healthCheck(function(error) {
|
||||||
docUpdaterRedisClient.healthCheck (error) ->
|
if (error != null) {
|
||||||
if error?
|
logger.err({err: error}, "failed redis cluster health check");
|
||||||
logger.err {err: error}, "failed redis cluster health check"
|
return res.sendStatus(500);
|
||||||
res.sendStatus 500
|
} else {
|
||||||
else
|
return res.sendStatus(200);
|
||||||
res.sendStatus 200
|
}
|
||||||
|
}));
|
||||||
|
|
||||||
app.get "/health_check", (req, res, next) ->
|
app.get("/health_check", (req, res, next) => async.series([
|
||||||
async.series [
|
cb => pubsubClient.healthCheck(function(error) {
|
||||||
(cb) ->
|
if (error != null) {
|
||||||
pubsubClient.healthCheck (error) ->
|
logger.err({err: error}, "failed redis health check");
|
||||||
if error?
|
}
|
||||||
logger.err {err: error}, "failed redis health check"
|
return cb(error);
|
||||||
cb(error)
|
}),
|
||||||
(cb) ->
|
cb => docUpdaterRedisClient.healthCheck(function(error) {
|
||||||
docUpdaterRedisClient.healthCheck (error) ->
|
if (error != null) {
|
||||||
if error?
|
logger.err({err: error}, "failed redis cluster health check");
|
||||||
logger.err {err: error}, "failed redis cluster health check"
|
}
|
||||||
cb(error)
|
return cb(error);
|
||||||
(cb) ->
|
}),
|
||||||
mongojs.healthCheck (error) ->
|
cb => mongojs.healthCheck(function(error) {
|
||||||
if error?
|
if (error != null) {
|
||||||
logger.err {err: error}, "failed mongo health check"
|
logger.err({err: error}, "failed mongo health check");
|
||||||
cb(error)
|
}
|
||||||
] , (error) ->
|
return cb(error);
|
||||||
if error?
|
})
|
||||||
res.sendStatus 500
|
] , function(error) {
|
||||||
else
|
if (error != null) {
|
||||||
res.sendStatus 200
|
return res.sendStatus(500);
|
||||||
|
} else {
|
||||||
|
return res.sendStatus(200);
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
|
||||||
app.use (error, req, res, next) ->
|
app.use(function(error, req, res, next) {
|
||||||
if error instanceof Errors.NotFoundError
|
if (error instanceof Errors.NotFoundError) {
|
||||||
res.sendStatus 404
|
return res.sendStatus(404);
|
||||||
else if error instanceof Errors.OpRangeNotAvailableError
|
} else if (error instanceof Errors.OpRangeNotAvailableError) {
|
||||||
res.sendStatus 422 # Unprocessable Entity
|
return res.sendStatus(422); // Unprocessable Entity
|
||||||
else if error.statusCode is 413
|
} else if (error.statusCode === 413) {
|
||||||
res.status(413).send("request entity too large")
|
return res.status(413).send("request entity too large");
|
||||||
else
|
} else {
|
||||||
logger.error err: error, req: req, "request errored"
|
logger.error({err: error, req}, "request errored");
|
||||||
res.status(500).send("Oops, something went wrong")
|
return res.status(500).send("Oops, something went wrong");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
shutdownCleanly = (signal) ->
|
const shutdownCleanly = signal => (function() {
|
||||||
return () ->
|
logger.log({signal}, "received interrupt, cleaning up");
|
||||||
logger.log signal: signal, "received interrupt, cleaning up"
|
Settings.shuttingDown = true;
|
||||||
Settings.shuttingDown = true
|
return setTimeout(function() {
|
||||||
setTimeout () ->
|
logger.log({signal}, "shutting down");
|
||||||
logger.log signal: signal, "shutting down"
|
return process.exit();
|
||||||
process.exit()
|
}
|
||||||
, 10000
|
, 10000);
|
||||||
|
});
|
||||||
|
|
||||||
watchForEvent = (eventName)->
|
const watchForEvent = eventName => docUpdaterRedisClient.on(eventName, e => console.log(`redis event: ${eventName} ${e}`));
|
||||||
docUpdaterRedisClient.on eventName, (e)->
|
|
||||||
console.log "redis event: #{eventName} #{e}"
|
|
||||||
|
|
||||||
events = ["connect", "ready", "error", "close", "reconnecting", "end"]
|
const events = ["connect", "ready", "error", "close", "reconnecting", "end"];
|
||||||
for eventName in events
|
for (let eventName of Array.from(events)) {
|
||||||
watchForEvent(eventName)
|
watchForEvent(eventName);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
port = Settings.internal?.documentupdater?.port or Settings.apis?.documentupdater?.port or 3003
|
const port = __guard__(Settings.internal != null ? Settings.internal.documentupdater : undefined, x => x.port) || __guard__(Settings.apis != null ? Settings.apis.documentupdater : undefined, x1 => x1.port) || 3003;
|
||||||
host = Settings.internal.documentupdater.host or "localhost"
|
const host = Settings.internal.documentupdater.host || "localhost";
|
||||||
if !module.parent # Called directly
|
if (!module.parent) { // Called directly
|
||||||
app.listen port, host, ->
|
app.listen(port, host, function() {
|
||||||
logger.info "Document-updater starting up, listening on #{host}:#{port}"
|
logger.info(`Document-updater starting up, listening on ${host}:${port}`);
|
||||||
if Settings.continuousBackgroundFlush
|
if (Settings.continuousBackgroundFlush) {
|
||||||
logger.info "Starting continuous background flush"
|
logger.info("Starting continuous background flush");
|
||||||
DeleteQueueManager.startBackgroundFlush()
|
return DeleteQueueManager.startBackgroundFlush();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
module.exports = app
|
module.exports = app;
|
||||||
|
|
||||||
for signal in ['SIGINT', 'SIGHUP', 'SIGQUIT', 'SIGUSR1', 'SIGUSR2', 'SIGTERM', 'SIGABRT']
|
for (let signal of ['SIGINT', 'SIGHUP', 'SIGQUIT', 'SIGUSR1', 'SIGUSR2', 'SIGTERM', 'SIGABRT']) {
|
||||||
process.on signal, shutdownCleanly(signal)
|
process.on(signal, shutdownCleanly(signal));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function __guard__(value, transform) {
|
||||||
|
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined;
|
||||||
|
}
|
|
@ -1,97 +1,116 @@
|
||||||
Path = require('path')
|
const Path = require('path');
|
||||||
http = require('http')
|
const http = require('http');
|
||||||
http.globalAgent.maxSockets = 300
|
http.globalAgent.maxSockets = 300;
|
||||||
|
|
||||||
module.exports =
|
module.exports = {
|
||||||
internal:
|
internal: {
|
||||||
documentupdater:
|
documentupdater: {
|
||||||
host: process.env["LISTEN_ADDRESS"] or "localhost"
|
host: process.env["LISTEN_ADDRESS"] || "localhost",
|
||||||
port: 3003
|
port: 3003
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
apis:
|
apis: {
|
||||||
web:
|
web: {
|
||||||
url: "http://#{process.env['WEB_API_HOST'] or process.env['WEB_HOST'] or "localhost"}:#{process.env['WEB_API_PORT'] or process.env['WEB_PORT'] or 3000}"
|
url: `http://${process.env['WEB_API_HOST'] || process.env['WEB_HOST'] || "localhost"}:${process.env['WEB_API_PORT'] || process.env['WEB_PORT'] || 3000}`,
|
||||||
user: process.env['WEB_API_USER'] or "sharelatex"
|
user: process.env['WEB_API_USER'] || "sharelatex",
|
||||||
pass: process.env['WEB_API_PASSWORD'] or "password"
|
pass: process.env['WEB_API_PASSWORD'] || "password"
|
||||||
trackchanges:
|
},
|
||||||
url: "http://#{process.env["TRACK_CHANGES_HOST"] or "localhost"}:3015"
|
trackchanges: {
|
||||||
project_history:
|
url: `http://${process.env["TRACK_CHANGES_HOST"] || "localhost"}:3015`
|
||||||
enabled: true
|
},
|
||||||
url: "http://#{process.env["PROJECT_HISTORY_HOST"] or "localhost"}:3054"
|
project_history: {
|
||||||
|
enabled: true,
|
||||||
|
url: `http://${process.env["PROJECT_HISTORY_HOST"] || "localhost"}:3054`
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
redis:
|
redis: {
|
||||||
|
|
||||||
pubsub:
|
pubsub: {
|
||||||
host: process.env['PUBSUB_REDIS_HOST'] or process.env['REDIS_HOST'] or "localhost"
|
host: process.env['PUBSUB_REDIS_HOST'] || process.env['REDIS_HOST'] || "localhost",
|
||||||
port: process.env['PUBSUB_REDIS_PORT'] or process.env['REDIS_PORT'] or "6379"
|
port: process.env['PUBSUB_REDIS_PORT'] || process.env['REDIS_PORT'] || "6379",
|
||||||
password: process.env["PUBSUB_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or ""
|
password: process.env["PUBSUB_REDIS_PASSWORD"] || process.env["REDIS_PASSWORD"] || "",
|
||||||
maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] or "20")
|
maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] || "20")
|
||||||
|
},
|
||||||
|
|
||||||
history:
|
history: {
|
||||||
port: process.env["HISTORY_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379"
|
port: process.env["HISTORY_REDIS_PORT"] || process.env["REDIS_PORT"] || "6379",
|
||||||
host: process.env["HISTORY_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost"
|
host: process.env["HISTORY_REDIS_HOST"] || process.env["REDIS_HOST"] || "localhost",
|
||||||
password: process.env["HISTORY_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or ""
|
password: process.env["HISTORY_REDIS_PASSWORD"] || process.env["REDIS_PASSWORD"] || "",
|
||||||
maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] or "20")
|
maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] || "20"),
|
||||||
key_schema:
|
key_schema: {
|
||||||
uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:{#{doc_id}}"
|
uncompressedHistoryOps({doc_id}) { return `UncompressedHistoryOps:{${doc_id}}`; },
|
||||||
docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:{#{project_id}}"
|
docsWithHistoryOps({project_id}) { return `DocsWithHistoryOps:{${project_id}}`; }
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
project_history:
|
project_history: {
|
||||||
port: process.env["NEW_HISTORY_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379"
|
port: process.env["NEW_HISTORY_REDIS_PORT"] || process.env["REDIS_PORT"] || "6379",
|
||||||
host: process.env["NEW_HISTORY_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost"
|
host: process.env["NEW_HISTORY_REDIS_HOST"] || process.env["REDIS_HOST"] || "localhost",
|
||||||
password: process.env["NEW_HISTORY_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or ""
|
password: process.env["NEW_HISTORY_REDIS_PASSWORD"] || process.env["REDIS_PASSWORD"] || "",
|
||||||
maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] or "20")
|
maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] || "20"),
|
||||||
key_schema:
|
key_schema: {
|
||||||
projectHistoryOps: ({project_id}) -> "ProjectHistory:Ops:{#{project_id}}"
|
projectHistoryOps({project_id}) { return `ProjectHistory:Ops:{${project_id}}`; },
|
||||||
projectHistoryFirstOpTimestamp: ({project_id}) -> "ProjectHistory:FirstOpTimestamp:{#{project_id}}"
|
projectHistoryFirstOpTimestamp({project_id}) { return `ProjectHistory:FirstOpTimestamp:{${project_id}}`; }
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
lock:
|
lock: {
|
||||||
port: process.env["LOCK_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379"
|
port: process.env["LOCK_REDIS_PORT"] || process.env["REDIS_PORT"] || "6379",
|
||||||
host: process.env["LOCK_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost"
|
host: process.env["LOCK_REDIS_HOST"] || process.env["REDIS_HOST"] || "localhost",
|
||||||
password: process.env["LOCK_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or ""
|
password: process.env["LOCK_REDIS_PASSWORD"] || process.env["REDIS_PASSWORD"] || "",
|
||||||
maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] or "20")
|
maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] || "20"),
|
||||||
key_schema:
|
key_schema: {
|
||||||
blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}"
|
blockingKey({doc_id}) { return `Blocking:{${doc_id}}`; }
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
documentupdater:
|
documentupdater: {
|
||||||
port: process.env["DOC_UPDATER_REDIS_PORT"] or process.env["REDIS_PORT"] or "6379"
|
port: process.env["DOC_UPDATER_REDIS_PORT"] || process.env["REDIS_PORT"] || "6379",
|
||||||
host: process.env["DOC_UPDATER_REDIS_HOST"] or process.env["REDIS_HOST"] or "localhost"
|
host: process.env["DOC_UPDATER_REDIS_HOST"] || process.env["REDIS_HOST"] || "localhost",
|
||||||
password: process.env["DOC_UPDATER_REDIS_PASSWORD"] or process.env["REDIS_PASSWORD"] or ""
|
password: process.env["DOC_UPDATER_REDIS_PASSWORD"] || process.env["REDIS_PASSWORD"] || "",
|
||||||
maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] or "20")
|
maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] || "20"),
|
||||||
key_schema:
|
key_schema: {
|
||||||
blockingKey: ({doc_id}) -> "Blocking:{#{doc_id}}"
|
blockingKey({doc_id}) { return `Blocking:{${doc_id}}`; },
|
||||||
docLines: ({doc_id}) -> "doclines:{#{doc_id}}"
|
docLines({doc_id}) { return `doclines:{${doc_id}}`; },
|
||||||
docOps: ({doc_id}) -> "DocOps:{#{doc_id}}"
|
docOps({doc_id}) { return `DocOps:{${doc_id}}`; },
|
||||||
docVersion: ({doc_id}) -> "DocVersion:{#{doc_id}}"
|
docVersion({doc_id}) { return `DocVersion:{${doc_id}}`; },
|
||||||
docHash: ({doc_id}) -> "DocHash:{#{doc_id}}"
|
docHash({doc_id}) { return `DocHash:{${doc_id}}`; },
|
||||||
projectKey: ({doc_id}) -> "ProjectId:{#{doc_id}}"
|
projectKey({doc_id}) { return `ProjectId:{${doc_id}}`; },
|
||||||
docsInProject: ({project_id}) -> "DocsIn:{#{project_id}}"
|
docsInProject({project_id}) { return `DocsIn:{${project_id}}`; },
|
||||||
ranges: ({doc_id}) -> "Ranges:{#{doc_id}}"
|
ranges({doc_id}) { return `Ranges:{${doc_id}}`; },
|
||||||
unflushedTime: ({doc_id}) -> "UnflushedTime:{#{doc_id}}"
|
unflushedTime({doc_id}) { return `UnflushedTime:{${doc_id}}`; },
|
||||||
pathname: ({doc_id}) -> "Pathname:{#{doc_id}}"
|
pathname({doc_id}) { return `Pathname:{${doc_id}}`; },
|
||||||
projectHistoryId: ({doc_id}) -> "ProjectHistoryId:{#{doc_id}}"
|
projectHistoryId({doc_id}) { return `ProjectHistoryId:{${doc_id}}`; },
|
||||||
projectHistoryType: ({doc_id}) -> "ProjectHistoryType:{#{doc_id}}"
|
projectHistoryType({doc_id}) { return `ProjectHistoryType:{${doc_id}}`; },
|
||||||
projectState: ({project_id}) -> "ProjectState:{#{project_id}}"
|
projectState({project_id}) { return `ProjectState:{${project_id}}`; },
|
||||||
pendingUpdates: ({doc_id}) -> "PendingUpdates:{#{doc_id}}"
|
pendingUpdates({doc_id}) { return `PendingUpdates:{${doc_id}}`; },
|
||||||
lastUpdatedBy: ({doc_id}) -> "lastUpdatedBy:{#{doc_id}}"
|
lastUpdatedBy({doc_id}) { return `lastUpdatedBy:{${doc_id}}`; },
|
||||||
lastUpdatedAt: ({doc_id}) -> "lastUpdatedAt:{#{doc_id}}"
|
lastUpdatedAt({doc_id}) { return `lastUpdatedAt:{${doc_id}}`; },
|
||||||
pendingUpdates: ({doc_id}) -> "PendingUpdates:{#{doc_id}}"
|
pendingUpdates({doc_id}) { return `PendingUpdates:{${doc_id}}`; },
|
||||||
flushAndDeleteQueue: () -> "DocUpdaterFlushAndDeleteQueue"
|
flushAndDeleteQueue() { return "DocUpdaterFlushAndDeleteQueue"; }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
max_doc_length: 2 * 1024 * 1024 # 2mb
|
max_doc_length: 2 * 1024 * 1024, // 2mb
|
||||||
|
|
||||||
dispatcherCount: process.env["DISPATCHER_COUNT"]
|
dispatcherCount: process.env["DISPATCHER_COUNT"],
|
||||||
|
|
||||||
mongo:
|
mongo: {
|
||||||
url : process.env['MONGO_CONNECTION_STRING'] || "mongodb://#{process.env['MONGO_HOST'] or '127.0.0.1'}/sharelatex"
|
url : process.env['MONGO_CONNECTION_STRING'] || `mongodb://${process.env['MONGO_HOST'] || '127.0.0.1'}/sharelatex`
|
||||||
|
},
|
||||||
|
|
||||||
sentry:
|
sentry: {
|
||||||
dsn: process.env.SENTRY_DSN
|
dsn: process.env.SENTRY_DSN
|
||||||
|
},
|
||||||
|
|
||||||
publishOnIndividualChannels: process.env['PUBLISH_ON_INDIVIDUAL_CHANNELS'] or false
|
publishOnIndividualChannels: process.env['PUBLISH_ON_INDIVIDUAL_CHANNELS'] || false,
|
||||||
|
|
||||||
continuousBackgroundFlush: process.env['CONTINUOUS_BACKGROUND_FLUSH'] or false
|
continuousBackgroundFlush: process.env['CONTINUOUS_BACKGROUND_FLUSH'] || false,
|
||||||
|
|
||||||
smoothingOffset: process.env['SMOOTHING_OFFSET'] or 1000 # milliseconds
|
smoothingOffset: process.env['SMOOTHING_OFFSET'] || 1000, // milliseconds
|
||||||
|
|
||||||
disableDoubleFlush: process.env['DISABLE_DOUBLE_FLUSH'] or false # don't flush track-changes for projects using project-history
|
disableDoubleFlush: process.env['DISABLE_DOUBLE_FLUSH'] || false // don't flush track-changes for projects using project-history
|
||||||
|
};
|
||||||
|
|
Loading…
Reference in a new issue