mirror of
https://github.com/overleaf/overleaf.git
synced 2025-03-06 02:43:58 +00:00
prettier: convert individual decaffeinated files to Prettier format
This commit is contained in:
parent
877bae34b3
commit
29d5e8f8b9
2 changed files with 409 additions and 248 deletions
|
@ -6,191 +6,250 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const Metrics = require("metrics-sharelatex");
|
||||
Metrics.initialize("doc-updater");
|
||||
const Metrics = require('metrics-sharelatex')
|
||||
Metrics.initialize('doc-updater')
|
||||
|
||||
const express = require('express');
|
||||
const http = require("http");
|
||||
const Settings = require('settings-sharelatex');
|
||||
const logger = require('logger-sharelatex');
|
||||
logger.initialize("document-updater");
|
||||
const express = require('express')
|
||||
const http = require('http')
|
||||
const Settings = require('settings-sharelatex')
|
||||
const logger = require('logger-sharelatex')
|
||||
logger.initialize('document-updater')
|
||||
|
||||
logger.logger.addSerializers(require("./app/js/LoggerSerializers"));
|
||||
logger.logger.addSerializers(require('./app/js/LoggerSerializers'))
|
||||
|
||||
if ((Settings.sentry != null ? Settings.sentry.dsn : undefined) != null) {
|
||||
logger.initializeErrorReporting(Settings.sentry.dsn);
|
||||
logger.initializeErrorReporting(Settings.sentry.dsn)
|
||||
}
|
||||
|
||||
const RedisManager = require('./app/js/RedisManager');
|
||||
const DispatchManager = require('./app/js/DispatchManager');
|
||||
const DeleteQueueManager = require('./app/js/DeleteQueueManager');
|
||||
const Errors = require("./app/js/Errors");
|
||||
const HttpController = require("./app/js/HttpController");
|
||||
const mongojs = require("./app/js/mongojs");
|
||||
const async = require("async");
|
||||
const RedisManager = require('./app/js/RedisManager')
|
||||
const DispatchManager = require('./app/js/DispatchManager')
|
||||
const DeleteQueueManager = require('./app/js/DeleteQueueManager')
|
||||
const Errors = require('./app/js/Errors')
|
||||
const HttpController = require('./app/js/HttpController')
|
||||
const mongojs = require('./app/js/mongojs')
|
||||
const async = require('async')
|
||||
|
||||
const Path = require("path");
|
||||
const bodyParser = require("body-parser");
|
||||
const Path = require('path')
|
||||
const bodyParser = require('body-parser')
|
||||
|
||||
Metrics.mongodb.monitor(Path.resolve(__dirname + "/node_modules/mongojs/node_modules/mongodb"), logger);
|
||||
Metrics.event_loop.monitor(logger, 100);
|
||||
Metrics.mongodb.monitor(
|
||||
Path.resolve(__dirname + '/node_modules/mongojs/node_modules/mongodb'),
|
||||
logger
|
||||
)
|
||||
Metrics.event_loop.monitor(logger, 100)
|
||||
|
||||
const app = express();
|
||||
app.use(Metrics.http.monitor(logger));
|
||||
app.use(bodyParser.json({limit: (Settings.max_doc_length + (64 * 1024))}));
|
||||
Metrics.injectMetricsRoute(app);
|
||||
const app = express()
|
||||
app.use(Metrics.http.monitor(logger))
|
||||
app.use(bodyParser.json({ limit: Settings.max_doc_length + 64 * 1024 }))
|
||||
Metrics.injectMetricsRoute(app)
|
||||
|
||||
DispatchManager.createAndStartDispatchers(Settings.dispatcherCount || 10);
|
||||
DispatchManager.createAndStartDispatchers(Settings.dispatcherCount || 10)
|
||||
|
||||
app.param('project_id', function(req, res, next, project_id) {
|
||||
if ((project_id != null ? project_id.match(/^[0-9a-f]{24}$/) : undefined)) {
|
||||
return next();
|
||||
} else {
|
||||
return next(new Error("invalid project id"));
|
||||
}
|
||||
});
|
||||
app.param('project_id', function (req, res, next, project_id) {
|
||||
if (project_id != null ? project_id.match(/^[0-9a-f]{24}$/) : undefined) {
|
||||
return next()
|
||||
} else {
|
||||
return next(new Error('invalid project id'))
|
||||
}
|
||||
})
|
||||
|
||||
app.param('doc_id', function(req, res, next, doc_id) {
|
||||
if ((doc_id != null ? doc_id.match(/^[0-9a-f]{24}$/) : undefined)) {
|
||||
return next();
|
||||
} else {
|
||||
return next(new Error("invalid doc id"));
|
||||
}
|
||||
});
|
||||
app.param('doc_id', function (req, res, next, doc_id) {
|
||||
if (doc_id != null ? doc_id.match(/^[0-9a-f]{24}$/) : undefined) {
|
||||
return next()
|
||||
} else {
|
||||
return next(new Error('invalid doc id'))
|
||||
}
|
||||
})
|
||||
|
||||
app.get('/project/:project_id/doc/:doc_id', HttpController.getDoc);
|
||||
app.get('/project/:project_id/doc/:doc_id', HttpController.getDoc)
|
||||
// temporarily keep the GET method for backwards compatibility
|
||||
app.get('/project/:project_id/doc', HttpController.getProjectDocsAndFlushIfOld);
|
||||
app.get('/project/:project_id/doc', HttpController.getProjectDocsAndFlushIfOld)
|
||||
// will migrate to the POST method of get_and_flush_if_old instead
|
||||
app.post('/project/:project_id/get_and_flush_if_old', HttpController.getProjectDocsAndFlushIfOld);
|
||||
app.post('/project/:project_id/clearState', HttpController.clearProjectState);
|
||||
app.post('/project/:project_id/doc/:doc_id', HttpController.setDoc);
|
||||
app.post('/project/:project_id/doc/:doc_id/flush', HttpController.flushDocIfLoaded);
|
||||
app.delete('/project/:project_id/doc/:doc_id', HttpController.deleteDoc);
|
||||
app.delete('/project/:project_id', HttpController.deleteProject);
|
||||
app.delete('/project', HttpController.deleteMultipleProjects);
|
||||
app.post('/project/:project_id', HttpController.updateProject);
|
||||
app.post('/project/:project_id/history/resync', HttpController.resyncProjectHistory);
|
||||
app.post('/project/:project_id/flush', HttpController.flushProject);
|
||||
app.post('/project/:project_id/doc/:doc_id/change/:change_id/accept', HttpController.acceptChanges);
|
||||
app.post('/project/:project_id/doc/:doc_id/change/accept', HttpController.acceptChanges);
|
||||
app.delete('/project/:project_id/doc/:doc_id/comment/:comment_id', HttpController.deleteComment);
|
||||
app.post(
|
||||
'/project/:project_id/get_and_flush_if_old',
|
||||
HttpController.getProjectDocsAndFlushIfOld
|
||||
)
|
||||
app.post('/project/:project_id/clearState', HttpController.clearProjectState)
|
||||
app.post('/project/:project_id/doc/:doc_id', HttpController.setDoc)
|
||||
app.post(
|
||||
'/project/:project_id/doc/:doc_id/flush',
|
||||
HttpController.flushDocIfLoaded
|
||||
)
|
||||
app.delete('/project/:project_id/doc/:doc_id', HttpController.deleteDoc)
|
||||
app.delete('/project/:project_id', HttpController.deleteProject)
|
||||
app.delete('/project', HttpController.deleteMultipleProjects)
|
||||
app.post('/project/:project_id', HttpController.updateProject)
|
||||
app.post(
|
||||
'/project/:project_id/history/resync',
|
||||
HttpController.resyncProjectHistory
|
||||
)
|
||||
app.post('/project/:project_id/flush', HttpController.flushProject)
|
||||
app.post(
|
||||
'/project/:project_id/doc/:doc_id/change/:change_id/accept',
|
||||
HttpController.acceptChanges
|
||||
)
|
||||
app.post(
|
||||
'/project/:project_id/doc/:doc_id/change/accept',
|
||||
HttpController.acceptChanges
|
||||
)
|
||||
app.delete(
|
||||
'/project/:project_id/doc/:doc_id/comment/:comment_id',
|
||||
HttpController.deleteComment
|
||||
)
|
||||
|
||||
app.get('/flush_all_projects', HttpController.flushAllProjects);
|
||||
app.get('/flush_queued_projects', HttpController.flushQueuedProjects);
|
||||
app.get('/flush_all_projects', HttpController.flushAllProjects)
|
||||
app.get('/flush_queued_projects', HttpController.flushQueuedProjects)
|
||||
|
||||
app.get('/total', function(req, res){
|
||||
const timer = new Metrics.Timer("http.allDocList");
|
||||
return RedisManager.getCountOfDocsInMemory(function(err, count){
|
||||
timer.done();
|
||||
return res.send({total:count});});
|
||||
});
|
||||
app.get('/total', function (req, res) {
|
||||
const timer = new Metrics.Timer('http.allDocList')
|
||||
return RedisManager.getCountOfDocsInMemory(function (err, count) {
|
||||
timer.done()
|
||||
return res.send({ total: count })
|
||||
})
|
||||
})
|
||||
|
||||
app.get('/status', function(req, res){
|
||||
if (Settings.shuttingDown) {
|
||||
return res.sendStatus(503); // Service unavailable
|
||||
} else {
|
||||
return res.send('document updater is alive');
|
||||
}
|
||||
});
|
||||
app.get('/status', function (req, res) {
|
||||
if (Settings.shuttingDown) {
|
||||
return res.sendStatus(503) // Service unavailable
|
||||
} else {
|
||||
return res.send('document updater is alive')
|
||||
}
|
||||
})
|
||||
|
||||
const pubsubClient = require("redis-sharelatex").createClient(Settings.redis.pubsub);
|
||||
app.get("/health_check/redis", (req, res, next) => pubsubClient.healthCheck(function(error) {
|
||||
const pubsubClient = require('redis-sharelatex').createClient(
|
||||
Settings.redis.pubsub
|
||||
)
|
||||
app.get('/health_check/redis', (req, res, next) =>
|
||||
pubsubClient.healthCheck(function (error) {
|
||||
if (error != null) {
|
||||
logger.err({err: error}, "failed redis health check");
|
||||
return res.sendStatus(500);
|
||||
logger.err({ err: error }, 'failed redis health check')
|
||||
return res.sendStatus(500)
|
||||
} else {
|
||||
return res.sendStatus(200);
|
||||
return res.sendStatus(200)
|
||||
}
|
||||
}));
|
||||
})
|
||||
)
|
||||
|
||||
const docUpdaterRedisClient = require("redis-sharelatex").createClient(Settings.redis.documentupdater);
|
||||
app.get("/health_check/redis_cluster", (req, res, next) => docUpdaterRedisClient.healthCheck(function(error) {
|
||||
const docUpdaterRedisClient = require('redis-sharelatex').createClient(
|
||||
Settings.redis.documentupdater
|
||||
)
|
||||
app.get('/health_check/redis_cluster', (req, res, next) =>
|
||||
docUpdaterRedisClient.healthCheck(function (error) {
|
||||
if (error != null) {
|
||||
logger.err({err: error}, "failed redis cluster health check");
|
||||
return res.sendStatus(500);
|
||||
logger.err({ err: error }, 'failed redis cluster health check')
|
||||
return res.sendStatus(500)
|
||||
} else {
|
||||
return res.sendStatus(200);
|
||||
return res.sendStatus(200)
|
||||
}
|
||||
}));
|
||||
})
|
||||
)
|
||||
|
||||
app.get("/health_check", (req, res, next) => async.series([
|
||||
cb => pubsubClient.healthCheck(function(error) {
|
||||
if (error != null) {
|
||||
logger.err({err: error}, "failed redis health check");
|
||||
}
|
||||
return cb(error);
|
||||
}),
|
||||
cb => docUpdaterRedisClient.healthCheck(function(error) {
|
||||
if (error != null) {
|
||||
logger.err({err: error}, "failed redis cluster health check");
|
||||
}
|
||||
return cb(error);
|
||||
}),
|
||||
cb => mongojs.healthCheck(function(error) {
|
||||
if (error != null) {
|
||||
logger.err({err: error}, "failed mongo health check");
|
||||
}
|
||||
return cb(error);
|
||||
})
|
||||
] , function(error) {
|
||||
if (error != null) {
|
||||
return res.sendStatus(500);
|
||||
} else {
|
||||
return res.sendStatus(200);
|
||||
app.get('/health_check', (req, res, next) =>
|
||||
async.series(
|
||||
[
|
||||
(cb) =>
|
||||
pubsubClient.healthCheck(function (error) {
|
||||
if (error != null) {
|
||||
logger.err({ err: error }, 'failed redis health check')
|
||||
}
|
||||
return cb(error)
|
||||
}),
|
||||
(cb) =>
|
||||
docUpdaterRedisClient.healthCheck(function (error) {
|
||||
if (error != null) {
|
||||
logger.err({ err: error }, 'failed redis cluster health check')
|
||||
}
|
||||
return cb(error)
|
||||
}),
|
||||
(cb) =>
|
||||
mongojs.healthCheck(function (error) {
|
||||
if (error != null) {
|
||||
logger.err({ err: error }, 'failed mongo health check')
|
||||
}
|
||||
return cb(error)
|
||||
})
|
||||
],
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
return res.sendStatus(500)
|
||||
} else {
|
||||
return res.sendStatus(200)
|
||||
}
|
||||
}
|
||||
}));
|
||||
)
|
||||
)
|
||||
|
||||
app.use(function(error, req, res, next) {
|
||||
if (error instanceof Errors.NotFoundError) {
|
||||
return res.sendStatus(404);
|
||||
} else if (error instanceof Errors.OpRangeNotAvailableError) {
|
||||
return res.sendStatus(422); // Unprocessable Entity
|
||||
} else if (error.statusCode === 413) {
|
||||
return res.status(413).send("request entity too large");
|
||||
} else {
|
||||
logger.error({err: error, req}, "request errored");
|
||||
return res.status(500).send("Oops, something went wrong");
|
||||
}
|
||||
});
|
||||
app.use(function (error, req, res, next) {
|
||||
if (error instanceof Errors.NotFoundError) {
|
||||
return res.sendStatus(404)
|
||||
} else if (error instanceof Errors.OpRangeNotAvailableError) {
|
||||
return res.sendStatus(422) // Unprocessable Entity
|
||||
} else if (error.statusCode === 413) {
|
||||
return res.status(413).send('request entity too large')
|
||||
} else {
|
||||
logger.error({ err: error, req }, 'request errored')
|
||||
return res.status(500).send('Oops, something went wrong')
|
||||
}
|
||||
})
|
||||
|
||||
const shutdownCleanly = signal => (function() {
|
||||
logger.log({signal}, "received interrupt, cleaning up");
|
||||
Settings.shuttingDown = true;
|
||||
return setTimeout(function() {
|
||||
logger.log({signal}, "shutting down");
|
||||
return process.exit();
|
||||
}
|
||||
, 10000);
|
||||
});
|
||||
const shutdownCleanly = (signal) =>
|
||||
function () {
|
||||
logger.log({ signal }, 'received interrupt, cleaning up')
|
||||
Settings.shuttingDown = true
|
||||
return setTimeout(function () {
|
||||
logger.log({ signal }, 'shutting down')
|
||||
return process.exit()
|
||||
}, 10000)
|
||||
}
|
||||
|
||||
const watchForEvent = eventName => docUpdaterRedisClient.on(eventName, e => console.log(`redis event: ${eventName} ${e}`));
|
||||
const watchForEvent = (eventName) =>
|
||||
docUpdaterRedisClient.on(eventName, (e) =>
|
||||
console.log(`redis event: ${eventName} ${e}`)
|
||||
)
|
||||
|
||||
const events = ["connect", "ready", "error", "close", "reconnecting", "end"];
|
||||
for (let eventName of Array.from(events)) {
|
||||
watchForEvent(eventName);
|
||||
const events = ['connect', 'ready', 'error', 'close', 'reconnecting', 'end']
|
||||
for (const eventName of Array.from(events)) {
|
||||
watchForEvent(eventName)
|
||||
}
|
||||
|
||||
|
||||
const port = __guard__(Settings.internal != null ? Settings.internal.documentupdater : undefined, x => x.port) || __guard__(Settings.apis != null ? Settings.apis.documentupdater : undefined, x1 => x1.port) || 3003;
|
||||
const host = Settings.internal.documentupdater.host || "localhost";
|
||||
if (!module.parent) { // Called directly
|
||||
app.listen(port, host, function() {
|
||||
logger.info(`Document-updater starting up, listening on ${host}:${port}`);
|
||||
if (Settings.continuousBackgroundFlush) {
|
||||
logger.info("Starting continuous background flush");
|
||||
return DeleteQueueManager.startBackgroundFlush();
|
||||
}
|
||||
});
|
||||
const port =
|
||||
__guard__(
|
||||
Settings.internal != null ? Settings.internal.documentupdater : undefined,
|
||||
(x) => x.port
|
||||
) ||
|
||||
__guard__(
|
||||
Settings.apis != null ? Settings.apis.documentupdater : undefined,
|
||||
(x1) => x1.port
|
||||
) ||
|
||||
3003
|
||||
const host = Settings.internal.documentupdater.host || 'localhost'
|
||||
if (!module.parent) {
|
||||
// Called directly
|
||||
app.listen(port, host, function () {
|
||||
logger.info(`Document-updater starting up, listening on ${host}:${port}`)
|
||||
if (Settings.continuousBackgroundFlush) {
|
||||
logger.info('Starting continuous background flush')
|
||||
return DeleteQueueManager.startBackgroundFlush()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = app;
|
||||
module.exports = app
|
||||
|
||||
for (let signal of ['SIGINT', 'SIGHUP', 'SIGQUIT', 'SIGUSR1', 'SIGUSR2', 'SIGTERM', 'SIGABRT']) {
|
||||
process.on(signal, shutdownCleanly(signal));
|
||||
for (const signal of [
|
||||
'SIGINT',
|
||||
'SIGHUP',
|
||||
'SIGQUIT',
|
||||
'SIGUSR1',
|
||||
'SIGUSR2',
|
||||
'SIGTERM',
|
||||
'SIGABRT'
|
||||
]) {
|
||||
process.on(signal, shutdownCleanly(signal))
|
||||
}
|
||||
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined;
|
||||
}
|
||||
return typeof value !== 'undefined' && value !== null
|
||||
? transform(value)
|
||||
: undefined
|
||||
}
|
||||
|
|
|
@ -1,116 +1,218 @@
|
|||
const Path = require('path');
|
||||
const http = require('http');
|
||||
http.globalAgent.maxSockets = 300;
|
||||
const Path = require('path')
|
||||
const http = require('http')
|
||||
http.globalAgent.maxSockets = 300
|
||||
|
||||
module.exports = {
|
||||
internal: {
|
||||
documentupdater: {
|
||||
host: process.env["LISTEN_ADDRESS"] || "localhost",
|
||||
port: 3003
|
||||
}
|
||||
},
|
||||
internal: {
|
||||
documentupdater: {
|
||||
host: process.env.LISTEN_ADDRESS || 'localhost',
|
||||
port: 3003
|
||||
}
|
||||
},
|
||||
|
||||
apis: {
|
||||
web: {
|
||||
url: `http://${process.env['WEB_API_HOST'] || process.env['WEB_HOST'] || "localhost"}:${process.env['WEB_API_PORT'] || process.env['WEB_PORT'] || 3000}`,
|
||||
user: process.env['WEB_API_USER'] || "sharelatex",
|
||||
pass: process.env['WEB_API_PASSWORD'] || "password"
|
||||
},
|
||||
trackchanges: {
|
||||
url: `http://${process.env["TRACK_CHANGES_HOST"] || "localhost"}:3015`
|
||||
},
|
||||
project_history: {
|
||||
enabled: true,
|
||||
url: `http://${process.env["PROJECT_HISTORY_HOST"] || "localhost"}:3054`
|
||||
}
|
||||
},
|
||||
apis: {
|
||||
web: {
|
||||
url: `http://${
|
||||
process.env.WEB_API_HOST || process.env.WEB_HOST || 'localhost'
|
||||
}:${process.env.WEB_API_PORT || process.env.WEB_PORT || 3000}`,
|
||||
user: process.env.WEB_API_USER || 'sharelatex',
|
||||
pass: process.env.WEB_API_PASSWORD || 'password'
|
||||
},
|
||||
trackchanges: {
|
||||
url: `http://${process.env.TRACK_CHANGES_HOST || 'localhost'}:3015`
|
||||
},
|
||||
project_history: {
|
||||
enabled: true,
|
||||
url: `http://${process.env.PROJECT_HISTORY_HOST || 'localhost'}:3054`
|
||||
}
|
||||
},
|
||||
|
||||
redis: {
|
||||
redis: {
|
||||
pubsub: {
|
||||
host:
|
||||
process.env.PUBSUB_REDIS_HOST ||
|
||||
process.env.REDIS_HOST ||
|
||||
'localhost',
|
||||
port:
|
||||
process.env.PUBSUB_REDIS_PORT || process.env.REDIS_PORT || '6379',
|
||||
password:
|
||||
process.env.PUBSUB_REDIS_PASSWORD ||
|
||||
process.env.REDIS_PASSWORD ||
|
||||
'',
|
||||
maxRetriesPerRequest: parseInt(
|
||||
process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20'
|
||||
)
|
||||
},
|
||||
|
||||
pubsub: {
|
||||
host: process.env['PUBSUB_REDIS_HOST'] || process.env['REDIS_HOST'] || "localhost",
|
||||
port: process.env['PUBSUB_REDIS_PORT'] || process.env['REDIS_PORT'] || "6379",
|
||||
password: process.env["PUBSUB_REDIS_PASSWORD"] || process.env["REDIS_PASSWORD"] || "",
|
||||
maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] || "20")
|
||||
},
|
||||
history: {
|
||||
port:
|
||||
process.env.HISTORY_REDIS_PORT ||
|
||||
process.env.REDIS_PORT ||
|
||||
'6379',
|
||||
host:
|
||||
process.env.HISTORY_REDIS_HOST ||
|
||||
process.env.REDIS_HOST ||
|
||||
'localhost',
|
||||
password:
|
||||
process.env.HISTORY_REDIS_PASSWORD ||
|
||||
process.env.REDIS_PASSWORD ||
|
||||
'',
|
||||
maxRetriesPerRequest: parseInt(
|
||||
process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20'
|
||||
),
|
||||
key_schema: {
|
||||
uncompressedHistoryOps({ doc_id }) {
|
||||
return `UncompressedHistoryOps:{${doc_id}}`
|
||||
},
|
||||
docsWithHistoryOps({ project_id }) {
|
||||
return `DocsWithHistoryOps:{${project_id}}`
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
history: {
|
||||
port: process.env["HISTORY_REDIS_PORT"] || process.env["REDIS_PORT"] || "6379",
|
||||
host: process.env["HISTORY_REDIS_HOST"] || process.env["REDIS_HOST"] || "localhost",
|
||||
password: process.env["HISTORY_REDIS_PASSWORD"] || process.env["REDIS_PASSWORD"] || "",
|
||||
maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] || "20"),
|
||||
key_schema: {
|
||||
uncompressedHistoryOps({doc_id}) { return `UncompressedHistoryOps:{${doc_id}}`; },
|
||||
docsWithHistoryOps({project_id}) { return `DocsWithHistoryOps:{${project_id}}`; }
|
||||
}
|
||||
},
|
||||
project_history: {
|
||||
port:
|
||||
process.env.NEW_HISTORY_REDIS_PORT ||
|
||||
process.env.REDIS_PORT ||
|
||||
'6379',
|
||||
host:
|
||||
process.env.NEW_HISTORY_REDIS_HOST ||
|
||||
process.env.REDIS_HOST ||
|
||||
'localhost',
|
||||
password:
|
||||
process.env.NEW_HISTORY_REDIS_PASSWORD ||
|
||||
process.env.REDIS_PASSWORD ||
|
||||
'',
|
||||
maxRetriesPerRequest: parseInt(
|
||||
process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20'
|
||||
),
|
||||
key_schema: {
|
||||
projectHistoryOps({ project_id }) {
|
||||
return `ProjectHistory:Ops:{${project_id}}`
|
||||
},
|
||||
projectHistoryFirstOpTimestamp({ project_id }) {
|
||||
return `ProjectHistory:FirstOpTimestamp:{${project_id}}`
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
project_history: {
|
||||
port: process.env["NEW_HISTORY_REDIS_PORT"] || process.env["REDIS_PORT"] || "6379",
|
||||
host: process.env["NEW_HISTORY_REDIS_HOST"] || process.env["REDIS_HOST"] || "localhost",
|
||||
password: process.env["NEW_HISTORY_REDIS_PASSWORD"] || process.env["REDIS_PASSWORD"] || "",
|
||||
maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] || "20"),
|
||||
key_schema: {
|
||||
projectHistoryOps({project_id}) { return `ProjectHistory:Ops:{${project_id}}`; },
|
||||
projectHistoryFirstOpTimestamp({project_id}) { return `ProjectHistory:FirstOpTimestamp:{${project_id}}`; }
|
||||
}
|
||||
},
|
||||
lock: {
|
||||
port:
|
||||
process.env.LOCK_REDIS_PORT || process.env.REDIS_PORT || '6379',
|
||||
host:
|
||||
process.env.LOCK_REDIS_HOST ||
|
||||
process.env.REDIS_HOST ||
|
||||
'localhost',
|
||||
password:
|
||||
process.env.LOCK_REDIS_PASSWORD ||
|
||||
process.env.REDIS_PASSWORD ||
|
||||
'',
|
||||
maxRetriesPerRequest: parseInt(
|
||||
process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20'
|
||||
),
|
||||
key_schema: {
|
||||
blockingKey({ doc_id }) {
|
||||
return `Blocking:{${doc_id}}`
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
lock: {
|
||||
port: process.env["LOCK_REDIS_PORT"] || process.env["REDIS_PORT"] || "6379",
|
||||
host: process.env["LOCK_REDIS_HOST"] || process.env["REDIS_HOST"] || "localhost",
|
||||
password: process.env["LOCK_REDIS_PASSWORD"] || process.env["REDIS_PASSWORD"] || "",
|
||||
maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] || "20"),
|
||||
key_schema: {
|
||||
blockingKey({doc_id}) { return `Blocking:{${doc_id}}`; }
|
||||
}
|
||||
},
|
||||
documentupdater: {
|
||||
port:
|
||||
process.env.DOC_UPDATER_REDIS_PORT ||
|
||||
process.env.REDIS_PORT ||
|
||||
'6379',
|
||||
host:
|
||||
process.env.DOC_UPDATER_REDIS_HOST ||
|
||||
process.env.REDIS_HOST ||
|
||||
'localhost',
|
||||
password:
|
||||
process.env.DOC_UPDATER_REDIS_PASSWORD ||
|
||||
process.env.REDIS_PASSWORD ||
|
||||
'',
|
||||
maxRetriesPerRequest: parseInt(
|
||||
process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20'
|
||||
),
|
||||
key_schema: {
|
||||
blockingKey({ doc_id }) {
|
||||
return `Blocking:{${doc_id}}`
|
||||
},
|
||||
docLines({ doc_id }) {
|
||||
return `doclines:{${doc_id}}`
|
||||
},
|
||||
docOps({ doc_id }) {
|
||||
return `DocOps:{${doc_id}}`
|
||||
},
|
||||
docVersion({ doc_id }) {
|
||||
return `DocVersion:{${doc_id}}`
|
||||
},
|
||||
docHash({ doc_id }) {
|
||||
return `DocHash:{${doc_id}}`
|
||||
},
|
||||
projectKey({ doc_id }) {
|
||||
return `ProjectId:{${doc_id}}`
|
||||
},
|
||||
docsInProject({ project_id }) {
|
||||
return `DocsIn:{${project_id}}`
|
||||
},
|
||||
ranges({ doc_id }) {
|
||||
return `Ranges:{${doc_id}}`
|
||||
},
|
||||
unflushedTime({ doc_id }) {
|
||||
return `UnflushedTime:{${doc_id}}`
|
||||
},
|
||||
pathname({ doc_id }) {
|
||||
return `Pathname:{${doc_id}}`
|
||||
},
|
||||
projectHistoryId({ doc_id }) {
|
||||
return `ProjectHistoryId:{${doc_id}}`
|
||||
},
|
||||
projectHistoryType({ doc_id }) {
|
||||
return `ProjectHistoryType:{${doc_id}}`
|
||||
},
|
||||
projectState({ project_id }) {
|
||||
return `ProjectState:{${project_id}}`
|
||||
},
|
||||
pendingUpdates({ doc_id }) {
|
||||
return `PendingUpdates:{${doc_id}}`
|
||||
},
|
||||
lastUpdatedBy({ doc_id }) {
|
||||
return `lastUpdatedBy:{${doc_id}}`
|
||||
},
|
||||
lastUpdatedAt({ doc_id }) {
|
||||
return `lastUpdatedAt:{${doc_id}}`
|
||||
},
|
||||
pendingUpdates({ doc_id }) {
|
||||
return `PendingUpdates:{${doc_id}}`
|
||||
},
|
||||
flushAndDeleteQueue() {
|
||||
return 'DocUpdaterFlushAndDeleteQueue'
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
documentupdater: {
|
||||
port: process.env["DOC_UPDATER_REDIS_PORT"] || process.env["REDIS_PORT"] || "6379",
|
||||
host: process.env["DOC_UPDATER_REDIS_HOST"] || process.env["REDIS_HOST"] || "localhost",
|
||||
password: process.env["DOC_UPDATER_REDIS_PASSWORD"] || process.env["REDIS_PASSWORD"] || "",
|
||||
maxRetriesPerRequest: parseInt(process.env['REDIS_MAX_RETRIES_PER_REQUEST'] || "20"),
|
||||
key_schema: {
|
||||
blockingKey({doc_id}) { return `Blocking:{${doc_id}}`; },
|
||||
docLines({doc_id}) { return `doclines:{${doc_id}}`; },
|
||||
docOps({doc_id}) { return `DocOps:{${doc_id}}`; },
|
||||
docVersion({doc_id}) { return `DocVersion:{${doc_id}}`; },
|
||||
docHash({doc_id}) { return `DocHash:{${doc_id}}`; },
|
||||
projectKey({doc_id}) { return `ProjectId:{${doc_id}}`; },
|
||||
docsInProject({project_id}) { return `DocsIn:{${project_id}}`; },
|
||||
ranges({doc_id}) { return `Ranges:{${doc_id}}`; },
|
||||
unflushedTime({doc_id}) { return `UnflushedTime:{${doc_id}}`; },
|
||||
pathname({doc_id}) { return `Pathname:{${doc_id}}`; },
|
||||
projectHistoryId({doc_id}) { return `ProjectHistoryId:{${doc_id}}`; },
|
||||
projectHistoryType({doc_id}) { return `ProjectHistoryType:{${doc_id}}`; },
|
||||
projectState({project_id}) { return `ProjectState:{${project_id}}`; },
|
||||
pendingUpdates({doc_id}) { return `PendingUpdates:{${doc_id}}`; },
|
||||
lastUpdatedBy({doc_id}) { return `lastUpdatedBy:{${doc_id}}`; },
|
||||
lastUpdatedAt({doc_id}) { return `lastUpdatedAt:{${doc_id}}`; },
|
||||
pendingUpdates({doc_id}) { return `PendingUpdates:{${doc_id}}`; },
|
||||
flushAndDeleteQueue() { return "DocUpdaterFlushAndDeleteQueue"; }
|
||||
}
|
||||
}
|
||||
},
|
||||
max_doc_length: 2 * 1024 * 1024, // 2mb
|
||||
|
||||
max_doc_length: 2 * 1024 * 1024, // 2mb
|
||||
dispatcherCount: process.env.DISPATCHER_COUNT,
|
||||
|
||||
dispatcherCount: process.env["DISPATCHER_COUNT"],
|
||||
mongo: {
|
||||
url:
|
||||
process.env.MONGO_CONNECTION_STRING ||
|
||||
`mongodb://${process.env.MONGO_HOST || '127.0.0.1'}/sharelatex`
|
||||
},
|
||||
|
||||
mongo: {
|
||||
url : process.env['MONGO_CONNECTION_STRING'] || `mongodb://${process.env['MONGO_HOST'] || '127.0.0.1'}/sharelatex`
|
||||
},
|
||||
sentry: {
|
||||
dsn: process.env.SENTRY_DSN
|
||||
},
|
||||
|
||||
sentry: {
|
||||
dsn: process.env.SENTRY_DSN
|
||||
},
|
||||
publishOnIndividualChannels:
|
||||
process.env.PUBLISH_ON_INDIVIDUAL_CHANNELS || false,
|
||||
|
||||
publishOnIndividualChannels: process.env['PUBLISH_ON_INDIVIDUAL_CHANNELS'] || false,
|
||||
continuousBackgroundFlush:
|
||||
process.env.CONTINUOUS_BACKGROUND_FLUSH || false,
|
||||
|
||||
continuousBackgroundFlush: process.env['CONTINUOUS_BACKGROUND_FLUSH'] || false,
|
||||
smoothingOffset: process.env.SMOOTHING_OFFSET || 1000, // milliseconds
|
||||
|
||||
smoothingOffset: process.env['SMOOTHING_OFFSET'] || 1000, // milliseconds
|
||||
|
||||
disableDoubleFlush: process.env['DISABLE_DOUBLE_FLUSH'] || false // don't flush track-changes for projects using project-history
|
||||
};
|
||||
disableDoubleFlush: process.env.DISABLE_DOUBLE_FLUSH || false // don't flush track-changes for projects using project-history
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue