mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
decaffeinate: convert individual files to js
This commit is contained in:
parent
66ce5847a3
commit
d23250a4bb
2 changed files with 311 additions and 239 deletions
|
@ -1,244 +1,298 @@
|
||||||
Metrics = require "metrics-sharelatex"
|
/*
|
||||||
Metrics.initialize("clsi")
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS103: Rewrite code to no longer use __guard__
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let tenMinutes;
|
||||||
|
const Metrics = require("metrics-sharelatex");
|
||||||
|
Metrics.initialize("clsi");
|
||||||
|
|
||||||
CompileController = require "./app/js/CompileController"
|
const CompileController = require("./app/js/CompileController");
|
||||||
Settings = require "settings-sharelatex"
|
const Settings = require("settings-sharelatex");
|
||||||
logger = require "logger-sharelatex"
|
const logger = require("logger-sharelatex");
|
||||||
logger.initialize("clsi")
|
logger.initialize("clsi");
|
||||||
if Settings.sentry?.dsn?
|
if ((Settings.sentry != null ? Settings.sentry.dsn : undefined) != null) {
|
||||||
logger.initializeErrorReporting(Settings.sentry.dsn)
|
logger.initializeErrorReporting(Settings.sentry.dsn);
|
||||||
|
}
|
||||||
|
|
||||||
smokeTest = require "smoke-test-sharelatex"
|
const smokeTest = require("smoke-test-sharelatex");
|
||||||
ContentTypeMapper = require "./app/js/ContentTypeMapper"
|
const ContentTypeMapper = require("./app/js/ContentTypeMapper");
|
||||||
Errors = require './app/js/Errors'
|
const Errors = require('./app/js/Errors');
|
||||||
|
|
||||||
Path = require "path"
|
const Path = require("path");
|
||||||
fs = require "fs"
|
const fs = require("fs");
|
||||||
|
|
||||||
|
|
||||||
Metrics.open_sockets.monitor(logger)
|
Metrics.open_sockets.monitor(logger);
|
||||||
Metrics.memory.monitor(logger)
|
Metrics.memory.monitor(logger);
|
||||||
|
|
||||||
ProjectPersistenceManager = require "./app/js/ProjectPersistenceManager"
|
const ProjectPersistenceManager = require("./app/js/ProjectPersistenceManager");
|
||||||
OutputCacheManager = require "./app/js/OutputCacheManager"
|
const OutputCacheManager = require("./app/js/OutputCacheManager");
|
||||||
|
|
||||||
require("./app/js/db").sync()
|
require("./app/js/db").sync();
|
||||||
|
|
||||||
express = require "express"
|
const express = require("express");
|
||||||
bodyParser = require "body-parser"
|
const bodyParser = require("body-parser");
|
||||||
app = express()
|
const app = express();
|
||||||
|
|
||||||
Metrics.injectMetricsRoute(app)
|
Metrics.injectMetricsRoute(app);
|
||||||
app.use Metrics.http.monitor(logger)
|
app.use(Metrics.http.monitor(logger));
|
||||||
|
|
||||||
# Compile requests can take longer than the default two
|
// Compile requests can take longer than the default two
|
||||||
# minutes (including file download time), so bump up the
|
// minutes (including file download time), so bump up the
|
||||||
# timeout a bit.
|
// timeout a bit.
|
||||||
TIMEOUT = 10 * 60 * 1000
|
const TIMEOUT = 10 * 60 * 1000;
|
||||||
app.use (req, res, next) ->
|
app.use(function(req, res, next) {
|
||||||
req.setTimeout TIMEOUT
|
req.setTimeout(TIMEOUT);
|
||||||
res.setTimeout TIMEOUT
|
res.setTimeout(TIMEOUT);
|
||||||
res.removeHeader("X-Powered-By")
|
res.removeHeader("X-Powered-By");
|
||||||
next()
|
return next();
|
||||||
|
});
|
||||||
|
|
||||||
app.param 'project_id', (req, res, next, project_id) ->
|
app.param('project_id', function(req, res, next, project_id) {
|
||||||
if project_id?.match /^[a-zA-Z0-9_-]+$/
|
if ((project_id != null ? project_id.match(/^[a-zA-Z0-9_-]+$/) : undefined)) {
|
||||||
next()
|
return next();
|
||||||
else
|
} else {
|
||||||
next new Error("invalid project id")
|
return next(new Error("invalid project id"));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
app.param 'user_id', (req, res, next, user_id) ->
|
app.param('user_id', function(req, res, next, user_id) {
|
||||||
if user_id?.match /^[0-9a-f]{24}$/
|
if ((user_id != null ? user_id.match(/^[0-9a-f]{24}$/) : undefined)) {
|
||||||
next()
|
return next();
|
||||||
else
|
} else {
|
||||||
next new Error("invalid user id")
|
return next(new Error("invalid user id"));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
app.param 'build_id', (req, res, next, build_id) ->
|
app.param('build_id', function(req, res, next, build_id) {
|
||||||
if build_id?.match OutputCacheManager.BUILD_REGEX
|
if ((build_id != null ? build_id.match(OutputCacheManager.BUILD_REGEX) : undefined)) {
|
||||||
next()
|
return next();
|
||||||
else
|
} else {
|
||||||
next new Error("invalid build id #{build_id}")
|
return next(new Error(`invalid build id ${build_id}`));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
app.post "/project/:project_id/compile", bodyParser.json(limit: Settings.compileSizeLimit), CompileController.compile
|
app.post("/project/:project_id/compile", bodyParser.json({limit: Settings.compileSizeLimit}), CompileController.compile);
|
||||||
app.post "/project/:project_id/compile/stop", CompileController.stopCompile
|
app.post("/project/:project_id/compile/stop", CompileController.stopCompile);
|
||||||
app.delete "/project/:project_id", CompileController.clearCache
|
app.delete("/project/:project_id", CompileController.clearCache);
|
||||||
|
|
||||||
app.get "/project/:project_id/sync/code", CompileController.syncFromCode
|
app.get("/project/:project_id/sync/code", CompileController.syncFromCode);
|
||||||
app.get "/project/:project_id/sync/pdf", CompileController.syncFromPdf
|
app.get("/project/:project_id/sync/pdf", CompileController.syncFromPdf);
|
||||||
app.get "/project/:project_id/wordcount", CompileController.wordcount
|
app.get("/project/:project_id/wordcount", CompileController.wordcount);
|
||||||
app.get "/project/:project_id/status", CompileController.status
|
app.get("/project/:project_id/status", CompileController.status);
|
||||||
|
|
||||||
# Per-user containers
|
// Per-user containers
|
||||||
app.post "/project/:project_id/user/:user_id/compile", bodyParser.json(limit: Settings.compileSizeLimit), CompileController.compile
|
app.post("/project/:project_id/user/:user_id/compile", bodyParser.json({limit: Settings.compileSizeLimit}), CompileController.compile);
|
||||||
app.post "/project/:project_id/user/:user_id/compile/stop", CompileController.stopCompile
|
app.post("/project/:project_id/user/:user_id/compile/stop", CompileController.stopCompile);
|
||||||
app.delete "/project/:project_id/user/:user_id", CompileController.clearCache
|
app.delete("/project/:project_id/user/:user_id", CompileController.clearCache);
|
||||||
|
|
||||||
app.get "/project/:project_id/user/:user_id/sync/code", CompileController.syncFromCode
|
app.get("/project/:project_id/user/:user_id/sync/code", CompileController.syncFromCode);
|
||||||
app.get "/project/:project_id/user/:user_id/sync/pdf", CompileController.syncFromPdf
|
app.get("/project/:project_id/user/:user_id/sync/pdf", CompileController.syncFromPdf);
|
||||||
app.get "/project/:project_id/user/:user_id/wordcount", CompileController.wordcount
|
app.get("/project/:project_id/user/:user_id/wordcount", CompileController.wordcount);
|
||||||
|
|
||||||
ForbidSymlinks = require "./app/js/StaticServerForbidSymlinks"
|
const ForbidSymlinks = require("./app/js/StaticServerForbidSymlinks");
|
||||||
|
|
||||||
# create a static server which does not allow access to any symlinks
|
// create a static server which does not allow access to any symlinks
|
||||||
# avoids possible mismatch of root directory between middleware check
|
// avoids possible mismatch of root directory between middleware check
|
||||||
# and serving the files
|
// and serving the files
|
||||||
staticServer = ForbidSymlinks express.static, Settings.path.compilesDir, setHeaders: (res, path, stat) ->
|
const staticServer = ForbidSymlinks(express.static, Settings.path.compilesDir, { setHeaders(res, path, stat) {
|
||||||
if Path.basename(path) == "output.pdf"
|
if (Path.basename(path) === "output.pdf") {
|
||||||
# Calculate an etag in the same way as nginx
|
// Calculate an etag in the same way as nginx
|
||||||
# https://github.com/tj/send/issues/65
|
// https://github.com/tj/send/issues/65
|
||||||
etag = (path, stat) ->
|
const etag = (path, stat) =>
|
||||||
'"' + Math.ceil(+stat.mtime / 1000).toString(16) +
|
`"${Math.ceil(+stat.mtime / 1000).toString(16)}` +
|
||||||
'-' + Number(stat.size).toString(16) + '"'
|
'-' + Number(stat.size).toString(16) + '"'
|
||||||
res.set("Etag", etag(path, stat))
|
;
|
||||||
res.set("Content-Type", ContentTypeMapper.map(path))
|
res.set("Etag", etag(path, stat));
|
||||||
|
}
|
||||||
|
return res.set("Content-Type", ContentTypeMapper.map(path));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
app.get "/project/:project_id/user/:user_id/build/:build_id/output/*", (req, res, next) ->
|
app.get("/project/:project_id/user/:user_id/build/:build_id/output/*", function(req, res, next) {
|
||||||
# for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
|
// for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
|
||||||
req.url = "/#{req.params.project_id}-#{req.params.user_id}/" + OutputCacheManager.path(req.params.build_id, "/#{req.params[0]}")
|
req.url = `/${req.params.project_id}-${req.params.user_id}/` + OutputCacheManager.path(req.params.build_id, `/${req.params[0]}`);
|
||||||
staticServer(req, res, next)
|
return staticServer(req, res, next);
|
||||||
|
});
|
||||||
|
|
||||||
app.get "/project/:project_id/build/:build_id/output/*", (req, res, next) ->
|
app.get("/project/:project_id/build/:build_id/output/*", function(req, res, next) {
|
||||||
# for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
|
// for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
|
||||||
req.url = "/#{req.params.project_id}/" + OutputCacheManager.path(req.params.build_id, "/#{req.params[0]}")
|
req.url = `/${req.params.project_id}/` + OutputCacheManager.path(req.params.build_id, `/${req.params[0]}`);
|
||||||
staticServer(req, res, next)
|
return staticServer(req, res, next);
|
||||||
|
});
|
||||||
|
|
||||||
app.get "/project/:project_id/user/:user_id/output/*", (req, res, next) ->
|
app.get("/project/:project_id/user/:user_id/output/*", function(req, res, next) {
|
||||||
# for specific user get the path to the top level file
|
// for specific user get the path to the top level file
|
||||||
req.url = "/#{req.params.project_id}-#{req.params.user_id}/#{req.params[0]}"
|
req.url = `/${req.params.project_id}-${req.params.user_id}/${req.params[0]}`;
|
||||||
staticServer(req, res, next)
|
return staticServer(req, res, next);
|
||||||
|
});
|
||||||
|
|
||||||
app.get "/project/:project_id/output/*", (req, res, next) ->
|
app.get("/project/:project_id/output/*", function(req, res, next) {
|
||||||
if req.query?.build? && req.query.build.match(OutputCacheManager.BUILD_REGEX)
|
if (((req.query != null ? req.query.build : undefined) != null) && req.query.build.match(OutputCacheManager.BUILD_REGEX)) {
|
||||||
# for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
|
// for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
|
||||||
req.url = "/#{req.params.project_id}/" + OutputCacheManager.path(req.query.build, "/#{req.params[0]}")
|
req.url = `/${req.params.project_id}/` + OutputCacheManager.path(req.query.build, `/${req.params[0]}`);
|
||||||
else
|
} else {
|
||||||
req.url = "/#{req.params.project_id}/#{req.params[0]}"
|
req.url = `/${req.params.project_id}/${req.params[0]}`;
|
||||||
staticServer(req, res, next)
|
}
|
||||||
|
return staticServer(req, res, next);
|
||||||
|
});
|
||||||
|
|
||||||
app.get "/oops", (req, res, next) ->
|
app.get("/oops", function(req, res, next) {
|
||||||
logger.error {err: "hello"}, "test error"
|
logger.error({err: "hello"}, "test error");
|
||||||
res.send "error\n"
|
return res.send("error\n");
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
app.get "/status", (req, res, next) ->
|
app.get("/status", (req, res, next) => res.send("CLSI is alive\n"));
|
||||||
res.send "CLSI is alive\n"
|
|
||||||
|
|
||||||
resCacher =
|
const resCacher = {
|
||||||
contentType:(@setContentType)->
|
contentType(setContentType){
|
||||||
send:(@code, @body)->
|
this.setContentType = setContentType;
|
||||||
|
},
|
||||||
|
send(code, body){
|
||||||
|
this.code = code;
|
||||||
|
this.body = body;
|
||||||
|
},
|
||||||
|
|
||||||
#default the server to be down
|
//default the server to be down
|
||||||
code:500
|
code:500,
|
||||||
body:{}
|
body:{},
|
||||||
setContentType:"application/json"
|
setContentType:"application/json"
|
||||||
|
};
|
||||||
|
|
||||||
if Settings.smokeTest
|
if (Settings.smokeTest) {
|
||||||
do runSmokeTest = ->
|
let runSmokeTest;
|
||||||
logger.log("running smoke tests")
|
(runSmokeTest = function() {
|
||||||
smokeTest.run(require.resolve(__dirname + "/test/smoke/js/SmokeTests.js"))({}, resCacher)
|
logger.log("running smoke tests");
|
||||||
setTimeout(runSmokeTest, 30 * 1000)
|
smokeTest.run(require.resolve(__dirname + "/test/smoke/js/SmokeTests.js"))({}, resCacher);
|
||||||
|
return setTimeout(runSmokeTest, 30 * 1000);
|
||||||
|
})();
|
||||||
|
}
|
||||||
|
|
||||||
app.get "/health_check", (req, res)->
|
app.get("/health_check", function(req, res){
|
||||||
res.contentType(resCacher?.setContentType)
|
res.contentType(resCacher != null ? resCacher.setContentType : undefined);
|
||||||
res.status(resCacher?.code).send(resCacher?.body)
|
return res.status(resCacher != null ? resCacher.code : undefined).send(resCacher != null ? resCacher.body : undefined);
|
||||||
|
});
|
||||||
|
|
||||||
app.get "/smoke_test_force", (req, res)->
|
app.get("/smoke_test_force", (req, res)=> smokeTest.run(require.resolve(__dirname + "/test/smoke/js/SmokeTests.js"))(req, res));
|
||||||
smokeTest.run(require.resolve(__dirname + "/test/smoke/js/SmokeTests.js"))(req, res)
|
|
||||||
|
|
||||||
profiler = require "v8-profiler-node8"
|
const profiler = require("v8-profiler-node8");
|
||||||
app.get "/profile", (req, res) ->
|
app.get("/profile", function(req, res) {
|
||||||
time = parseInt(req.query.time || "1000")
|
const time = parseInt(req.query.time || "1000");
|
||||||
profiler.startProfiling("test")
|
profiler.startProfiling("test");
|
||||||
setTimeout () ->
|
return setTimeout(function() {
|
||||||
profile = profiler.stopProfiling("test")
|
const profile = profiler.stopProfiling("test");
|
||||||
res.json(profile)
|
return res.json(profile);
|
||||||
, time
|
}
|
||||||
|
, time);
|
||||||
|
});
|
||||||
|
|
||||||
app.get "/heapdump", (req, res)->
|
app.get("/heapdump", (req, res)=>
|
||||||
require('heapdump').writeSnapshot '/tmp/' + Date.now() + '.clsi.heapsnapshot', (err, filename)->
|
require('heapdump').writeSnapshot(`/tmp/${Date.now()}.clsi.heapsnapshot`, (err, filename)=> res.send(filename))
|
||||||
res.send filename
|
);
|
||||||
|
|
||||||
app.use (error, req, res, next) ->
|
app.use(function(error, req, res, next) {
|
||||||
if error instanceof Errors.NotFoundError
|
if (error instanceof Errors.NotFoundError) {
|
||||||
logger.warn {err: error, url: req.url}, "not found error"
|
logger.warn({err: error, url: req.url}, "not found error");
|
||||||
return res.sendStatus(404)
|
return res.sendStatus(404);
|
||||||
else
|
} else {
|
||||||
logger.error {err: error, url: req.url}, "server error"
|
logger.error({err: error, url: req.url}, "server error");
|
||||||
res.sendStatus(error?.statusCode || 500)
|
return res.sendStatus((error != null ? error.statusCode : undefined) || 500);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
net = require "net"
|
const net = require("net");
|
||||||
os = require "os"
|
const os = require("os");
|
||||||
|
|
||||||
STATE = "up"
|
let STATE = "up";
|
||||||
|
|
||||||
|
|
||||||
loadTcpServer = net.createServer (socket) ->
|
const loadTcpServer = net.createServer(function(socket) {
|
||||||
socket.on "error", (err)->
|
socket.on("error", function(err){
|
||||||
if err.code == "ECONNRESET"
|
if (err.code === "ECONNRESET") {
|
||||||
# this always comes up, we don't know why
|
// this always comes up, we don't know why
|
||||||
return
|
return;
|
||||||
logger.err err:err, "error with socket on load check"
|
}
|
||||||
socket.destroy()
|
logger.err({err}, "error with socket on load check");
|
||||||
|
return socket.destroy();
|
||||||
|
});
|
||||||
|
|
||||||
if STATE == "up" and Settings.internal.load_balancer_agent.report_load
|
if ((STATE === "up") && Settings.internal.load_balancer_agent.report_load) {
|
||||||
currentLoad = os.loadavg()[0]
|
let availableWorkingCpus;
|
||||||
|
const currentLoad = os.loadavg()[0];
|
||||||
|
|
||||||
# staging clis's have 1 cpu core only
|
// staging clis's have 1 cpu core only
|
||||||
if os.cpus().length == 1
|
if (os.cpus().length === 1) {
|
||||||
availableWorkingCpus = 1
|
availableWorkingCpus = 1;
|
||||||
else
|
} else {
|
||||||
availableWorkingCpus = os.cpus().length - 1
|
availableWorkingCpus = os.cpus().length - 1;
|
||||||
|
}
|
||||||
|
|
||||||
freeLoad = availableWorkingCpus - currentLoad
|
const freeLoad = availableWorkingCpus - currentLoad;
|
||||||
freeLoadPercentage = Math.round((freeLoad / availableWorkingCpus) * 100)
|
let freeLoadPercentage = Math.round((freeLoad / availableWorkingCpus) * 100);
|
||||||
if freeLoadPercentage <= 0
|
if (freeLoadPercentage <= 0) {
|
||||||
freeLoadPercentage = 1 # when its 0 the server is set to drain and will move projects to different servers
|
freeLoadPercentage = 1; // when its 0 the server is set to drain and will move projects to different servers
|
||||||
socket.write("up, #{freeLoadPercentage}%\n", "ASCII")
|
}
|
||||||
socket.end()
|
socket.write(`up, ${freeLoadPercentage}%\n`, "ASCII");
|
||||||
else
|
return socket.end();
|
||||||
socket.write("#{STATE}\n", "ASCII")
|
} else {
|
||||||
socket.end()
|
socket.write(`${STATE}\n`, "ASCII");
|
||||||
|
return socket.end();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
loadHttpServer = express()
|
const loadHttpServer = express();
|
||||||
|
|
||||||
loadHttpServer.post "/state/up", (req, res, next) ->
|
loadHttpServer.post("/state/up", function(req, res, next) {
|
||||||
STATE = "up"
|
STATE = "up";
|
||||||
logger.info "getting message to set server to down"
|
logger.info("getting message to set server to down");
|
||||||
res.sendStatus 204
|
return res.sendStatus(204);
|
||||||
|
});
|
||||||
|
|
||||||
loadHttpServer.post "/state/down", (req, res, next) ->
|
loadHttpServer.post("/state/down", function(req, res, next) {
|
||||||
STATE = "down"
|
STATE = "down";
|
||||||
logger.info "getting message to set server to down"
|
logger.info("getting message to set server to down");
|
||||||
res.sendStatus 204
|
return res.sendStatus(204);
|
||||||
|
});
|
||||||
|
|
||||||
loadHttpServer.post "/state/maint", (req, res, next) ->
|
loadHttpServer.post("/state/maint", function(req, res, next) {
|
||||||
STATE = "maint"
|
STATE = "maint";
|
||||||
logger.info "getting message to set server to maint"
|
logger.info("getting message to set server to maint");
|
||||||
res.sendStatus 204
|
return res.sendStatus(204);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
port = (Settings.internal?.clsi?.port or 3013)
|
const port = (__guard__(Settings.internal != null ? Settings.internal.clsi : undefined, x => x.port) || 3013);
|
||||||
host = (Settings.internal?.clsi?.host or "localhost")
|
const host = (__guard__(Settings.internal != null ? Settings.internal.clsi : undefined, x1 => x1.host) || "localhost");
|
||||||
|
|
||||||
load_tcp_port = Settings.internal.load_balancer_agent.load_port
|
const load_tcp_port = Settings.internal.load_balancer_agent.load_port;
|
||||||
load_http_port = Settings.internal.load_balancer_agent.local_port
|
const load_http_port = Settings.internal.load_balancer_agent.local_port;
|
||||||
|
|
||||||
if !module.parent # Called directly
|
if (!module.parent) { // Called directly
|
||||||
app.listen port, host, (error) ->
|
app.listen(port, host, error => logger.info(`CLSI starting up, listening on ${host}:${port}`));
|
||||||
logger.info "CLSI starting up, listening on #{host}:#{port}"
|
|
||||||
|
|
||||||
loadTcpServer.listen load_tcp_port, host, (error) ->
|
loadTcpServer.listen(load_tcp_port, host, function(error) {
|
||||||
throw error if error?
|
if (error != null) { throw error; }
|
||||||
logger.info "Load tcp agent listening on load port #{load_tcp_port}"
|
return logger.info(`Load tcp agent listening on load port ${load_tcp_port}`);
|
||||||
|
});
|
||||||
|
|
||||||
loadHttpServer.listen load_http_port, host, (error) ->
|
loadHttpServer.listen(load_http_port, host, function(error) {
|
||||||
throw error if error?
|
if (error != null) { throw error; }
|
||||||
logger.info "Load http agent listening on load port #{load_http_port}"
|
return logger.info(`Load http agent listening on load port ${load_http_port}`);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
module.exports = app
|
module.exports = app;
|
||||||
|
|
||||||
setInterval () ->
|
setInterval(() => ProjectPersistenceManager.clearExpiredProjects()
|
||||||
ProjectPersistenceManager.clearExpiredProjects()
|
, (tenMinutes = 10 * 60 * 1000));
|
||||||
, tenMinutes = 10 * 60 * 1000
|
|
||||||
|
|
||||||
|
|
||||||
|
function __guard__(value, transform) {
|
||||||
|
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined;
|
||||||
|
}
|
|
@ -1,71 +1,89 @@
|
||||||
Path = require "path"
|
const Path = require("path");
|
||||||
|
|
||||||
module.exports =
|
module.exports = {
|
||||||
# Options are passed to Sequelize.
|
// Options are passed to Sequelize.
|
||||||
# See http://sequelizejs.com/documentation#usage-options for details
|
// See http://sequelizejs.com/documentation#usage-options for details
|
||||||
mysql:
|
mysql: {
|
||||||
clsi:
|
clsi: {
|
||||||
database: "clsi"
|
database: "clsi",
|
||||||
username: "clsi"
|
username: "clsi",
|
||||||
dialect: "sqlite"
|
dialect: "sqlite",
|
||||||
storage: process.env["SQLITE_PATH"] or Path.resolve(__dirname + "/../db.sqlite")
|
storage: process.env["SQLITE_PATH"] || Path.resolve(__dirname + "/../db.sqlite"),
|
||||||
pool:
|
pool: {
|
||||||
max: 1
|
max: 1,
|
||||||
min: 1
|
min: 1
|
||||||
retry:
|
},
|
||||||
|
retry: {
|
||||||
max: 10
|
max: 10
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
compileSizeLimit: process.env["COMPILE_SIZE_LIMIT"] or "7mb"
|
compileSizeLimit: process.env["COMPILE_SIZE_LIMIT"] || "7mb",
|
||||||
|
|
||||||
path:
|
path: {
|
||||||
compilesDir: Path.resolve(__dirname + "/../compiles")
|
compilesDir: Path.resolve(__dirname + "/../compiles"),
|
||||||
clsiCacheDir: Path.resolve(__dirname + "/../cache")
|
clsiCacheDir: Path.resolve(__dirname + "/../cache"),
|
||||||
synctexBaseDir: (project_id) -> Path.join(@compilesDir, project_id)
|
synctexBaseDir(project_id) { return Path.join(this.compilesDir, project_id); }
|
||||||
|
},
|
||||||
|
|
||||||
internal:
|
internal: {
|
||||||
clsi:
|
clsi: {
|
||||||
port: 3013
|
port: 3013,
|
||||||
host: process.env["LISTEN_ADDRESS"] or "localhost"
|
host: process.env["LISTEN_ADDRESS"] || "localhost"
|
||||||
|
},
|
||||||
|
|
||||||
load_balancer_agent:
|
load_balancer_agent: {
|
||||||
report_load:true
|
report_load:true,
|
||||||
load_port: 3048
|
load_port: 3048,
|
||||||
local_port: 3049
|
local_port: 3049
|
||||||
apis:
|
}
|
||||||
clsi:
|
},
|
||||||
url: "http://#{process.env['CLSI_HOST'] or 'localhost'}:3013"
|
apis: {
|
||||||
|
clsi: {
|
||||||
|
url: `http://${process.env['CLSI_HOST'] || 'localhost'}:3013`
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
|
||||||
smokeTest: process.env["SMOKE_TEST"] or false
|
smokeTest: process.env["SMOKE_TEST"] || false,
|
||||||
project_cache_length_ms: 1000 * 60 * 60 * 24
|
project_cache_length_ms: 1000 * 60 * 60 * 24,
|
||||||
parallelFileDownloads: process.env["FILESTORE_PARALLEL_FILE_DOWNLOADS"] or 1
|
parallelFileDownloads: process.env["FILESTORE_PARALLEL_FILE_DOWNLOADS"] || 1,
|
||||||
parallelSqlQueryLimit: process.env["FILESTORE_PARALLEL_SQL_QUERY_LIMIT"] or 1
|
parallelSqlQueryLimit: process.env["FILESTORE_PARALLEL_SQL_QUERY_LIMIT"] || 1,
|
||||||
filestoreDomainOveride: process.env["FILESTORE_DOMAIN_OVERRIDE"]
|
filestoreDomainOveride: process.env["FILESTORE_DOMAIN_OVERRIDE"],
|
||||||
texliveImageNameOveride: process.env["TEX_LIVE_IMAGE_NAME_OVERRIDE"]
|
texliveImageNameOveride: process.env["TEX_LIVE_IMAGE_NAME_OVERRIDE"],
|
||||||
sentry:
|
sentry: {
|
||||||
dsn: process.env['SENTRY_DSN']
|
dsn: process.env['SENTRY_DSN']
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
if process.env["DOCKER_RUNNER"]
|
if (process.env["DOCKER_RUNNER"]) {
|
||||||
module.exports.clsi =
|
let seccomp_profile_path;
|
||||||
dockerRunner: process.env["DOCKER_RUNNER"] == "true"
|
module.exports.clsi = {
|
||||||
docker:
|
dockerRunner: process.env["DOCKER_RUNNER"] === "true",
|
||||||
image: process.env["TEXLIVE_IMAGE"] or "quay.io/sharelatex/texlive-full:2017.1"
|
docker: {
|
||||||
env:
|
image: process.env["TEXLIVE_IMAGE"] || "quay.io/sharelatex/texlive-full:2017.1",
|
||||||
|
env: {
|
||||||
HOME: "/tmp"
|
HOME: "/tmp"
|
||||||
socketPath: "/var/run/docker.sock"
|
},
|
||||||
user: process.env["TEXLIVE_IMAGE_USER"] or "tex"
|
socketPath: "/var/run/docker.sock",
|
||||||
expireProjectAfterIdleMs: 24 * 60 * 60 * 1000
|
user: process.env["TEXLIVE_IMAGE_USER"] || "tex"
|
||||||
|
},
|
||||||
|
expireProjectAfterIdleMs: 24 * 60 * 60 * 1000,
|
||||||
checkProjectsIntervalMs: 10 * 60 * 1000
|
checkProjectsIntervalMs: 10 * 60 * 1000
|
||||||
|
};
|
||||||
|
|
||||||
try
|
try {
|
||||||
seccomp_profile_path = Path.resolve(__dirname + "/../seccomp/clsi-profile.json")
|
seccomp_profile_path = Path.resolve(__dirname + "/../seccomp/clsi-profile.json");
|
||||||
module.exports.clsi.docker.seccomp_profile = JSON.stringify(JSON.parse(require("fs").readFileSync(seccomp_profile_path)))
|
module.exports.clsi.docker.seccomp_profile = JSON.stringify(JSON.parse(require("fs").readFileSync(seccomp_profile_path)));
|
||||||
catch error
|
} catch (error) {
|
||||||
console.log error, "could not load seccom profile from #{seccomp_profile_path}"
|
console.log(error, `could not load seccom profile from ${seccomp_profile_path}`);
|
||||||
|
}
|
||||||
|
|
||||||
module.exports.path.synctexBaseDir = -> "/compile"
|
module.exports.path.synctexBaseDir = () => "/compile";
|
||||||
|
|
||||||
module.exports.path.sandboxedCompilesHostDir = process.env["COMPILES_HOST_DIR"]
|
module.exports.path.sandboxedCompilesHostDir = process.env["COMPILES_HOST_DIR"];
|
||||||
|
|
||||||
module.exports.path.synctexBinHostPath = process.env["SYNCTEX_BIN_HOST_PATH"]
|
module.exports.path.synctexBinHostPath = process.env["SYNCTEX_BIN_HOST_PATH"];
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in a new issue