mirror of
https://github.com/overleaf/overleaf.git
synced 2025-04-05 02:46:55 +00:00
decaffeinate: convert individual files to js
This commit is contained in:
parent
0c8873fd2e
commit
0a8e936c47
2 changed files with 154 additions and 113 deletions
|
@ -1,98 +1,121 @@
|
|||
Metrics = require "metrics-sharelatex"
|
||||
Metrics.initialize("track-changes")
|
||||
Settings = require "settings-sharelatex"
|
||||
logger = require "logger-sharelatex"
|
||||
TrackChangesLogger = logger.initialize("track-changes").logger
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS103: Rewrite code to no longer use __guard__
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const Metrics = require("metrics-sharelatex");
|
||||
Metrics.initialize("track-changes");
|
||||
const Settings = require("settings-sharelatex");
|
||||
const logger = require("logger-sharelatex");
|
||||
const TrackChangesLogger = logger.initialize("track-changes").logger;
|
||||
|
||||
if Settings.sentry?.dsn?
|
||||
logger.initializeErrorReporting(Settings.sentry.dsn)
|
||||
|
||||
# log updates as truncated strings
|
||||
truncateFn = (updates) ->
|
||||
JSON.parse(
|
||||
JSON.stringify updates, (key, value) ->
|
||||
if typeof value == 'string' && (len = value.length) > 80
|
||||
return value.substr(0,32) + "...(message of length #{len} truncated)..." + value.substr(-32)
|
||||
else
|
||||
return value
|
||||
)
|
||||
|
||||
TrackChangesLogger.addSerializers {
|
||||
rawUpdate: truncateFn
|
||||
rawUpdates: truncateFn
|
||||
newUpdates: truncateFn
|
||||
lastUpdate: truncateFn
|
||||
if ((Settings.sentry != null ? Settings.sentry.dsn : undefined) != null) {
|
||||
logger.initializeErrorReporting(Settings.sentry.dsn);
|
||||
}
|
||||
|
||||
Path = require "path"
|
||||
// log updates as truncated strings
|
||||
const truncateFn = updates =>
|
||||
JSON.parse(
|
||||
JSON.stringify(updates, function(key, value) {
|
||||
let len;
|
||||
if ((typeof value === 'string') && ((len = value.length) > 80)) {
|
||||
return value.substr(0,32) + `...(message of length ${len} truncated)...` + value.substr(-32);
|
||||
} else {
|
||||
return value;
|
||||
}
|
||||
})
|
||||
)
|
||||
;
|
||||
|
||||
Metrics.memory.monitor(logger)
|
||||
TrackChangesLogger.addSerializers({
|
||||
rawUpdate: truncateFn,
|
||||
rawUpdates: truncateFn,
|
||||
newUpdates: truncateFn,
|
||||
lastUpdate: truncateFn
|
||||
});
|
||||
|
||||
child_process = require "child_process"
|
||||
const Path = require("path");
|
||||
|
||||
HttpController = require "./app/js/HttpController"
|
||||
express = require "express"
|
||||
app = express()
|
||||
Metrics.memory.monitor(logger);
|
||||
|
||||
app.use Metrics.http.monitor(logger)
|
||||
const child_process = require("child_process");
|
||||
|
||||
Metrics.injectMetricsRoute(app)
|
||||
const HttpController = require("./app/js/HttpController");
|
||||
const express = require("express");
|
||||
const app = express();
|
||||
|
||||
app.post "/project/:project_id/doc/:doc_id/flush", HttpController.flushDoc
|
||||
app.use(Metrics.http.monitor(logger));
|
||||
|
||||
app.get "/project/:project_id/doc/:doc_id/diff", HttpController.getDiff
|
||||
Metrics.injectMetricsRoute(app);
|
||||
|
||||
app.get "/project/:project_id/doc/:doc_id/check", HttpController.checkDoc
|
||||
app.post("/project/:project_id/doc/:doc_id/flush", HttpController.flushDoc);
|
||||
|
||||
app.get "/project/:project_id/updates", HttpController.getUpdates
|
||||
app.get("/project/:project_id/doc/:doc_id/diff", HttpController.getDiff);
|
||||
|
||||
app.post "/project/:project_id/flush", HttpController.flushProject
|
||||
app.get("/project/:project_id/doc/:doc_id/check", HttpController.checkDoc);
|
||||
|
||||
app.post "/project/:project_id/doc/:doc_id/version/:version/restore", HttpController.restore
|
||||
app.get("/project/:project_id/updates", HttpController.getUpdates);
|
||||
|
||||
app.post '/project/:project_id/doc/:doc_id/push', HttpController.pushDocHistory
|
||||
app.post '/project/:project_id/doc/:doc_id/pull', HttpController.pullDocHistory
|
||||
app.post("/project/:project_id/flush", HttpController.flushProject);
|
||||
|
||||
app.post '/flush/all', HttpController.flushAll
|
||||
app.post '/check/dangling', HttpController.checkDanglingUpdates
|
||||
app.post("/project/:project_id/doc/:doc_id/version/:version/restore", HttpController.restore);
|
||||
|
||||
packWorker = null # use a single packing worker
|
||||
app.post('/project/:project_id/doc/:doc_id/push', HttpController.pushDocHistory);
|
||||
app.post('/project/:project_id/doc/:doc_id/pull', HttpController.pullDocHistory);
|
||||
|
||||
app.post "/pack", (req, res, next) ->
|
||||
if packWorker?
|
||||
res.send "pack already running"
|
||||
else
|
||||
logger.log "running pack"
|
||||
app.post('/flush/all', HttpController.flushAll);
|
||||
app.post('/check/dangling', HttpController.checkDanglingUpdates);
|
||||
|
||||
let packWorker = null; // use a single packing worker
|
||||
|
||||
app.post("/pack", function(req, res, next) {
|
||||
if (packWorker != null) {
|
||||
return res.send("pack already running");
|
||||
} else {
|
||||
logger.log("running pack");
|
||||
packWorker = child_process.fork(__dirname + '/app/js/PackWorker.js',
|
||||
[req.query.limit || 1000, req.query.delay || 1000, req.query.timeout || 30*60*1000])
|
||||
packWorker.on 'exit', (code, signal) ->
|
||||
logger.log {code, signal}, "history auto pack exited"
|
||||
packWorker = null
|
||||
res.send "pack started"
|
||||
[req.query.limit || 1000, req.query.delay || 1000, req.query.timeout || (30*60*1000)]);
|
||||
packWorker.on('exit', function(code, signal) {
|
||||
logger.log({code, signal}, "history auto pack exited");
|
||||
return packWorker = null;
|
||||
});
|
||||
return res.send("pack started");
|
||||
}
|
||||
});
|
||||
|
||||
app.get "/status", (req, res, next) ->
|
||||
res.send "track-changes is alive"
|
||||
app.get("/status", (req, res, next) => res.send("track-changes is alive"));
|
||||
|
||||
app.get "/oops", (req, res, next) ->
|
||||
throw new Error("dummy test error")
|
||||
app.get("/oops", function(req, res, next) {
|
||||
throw new Error("dummy test error");
|
||||
});
|
||||
|
||||
app.get "/check_lock", HttpController.checkLock
|
||||
app.get("/check_lock", HttpController.checkLock);
|
||||
|
||||
app.get "/health_check", HttpController.healthCheck
|
||||
app.get("/health_check", HttpController.healthCheck);
|
||||
|
||||
app.use (error, req, res, next) ->
|
||||
logger.error err: error, req: req, "an internal error occured"
|
||||
res.send 500
|
||||
app.use(function(error, req, res, next) {
|
||||
logger.error({err: error, req}, "an internal error occured");
|
||||
return res.send(500);
|
||||
});
|
||||
|
||||
port = Settings.internal?.trackchanges?.port or 3015
|
||||
host = Settings.internal?.trackchanges?.host or "localhost"
|
||||
const port = __guard__(Settings.internal != null ? Settings.internal.trackchanges : undefined, x => x.port) || 3015;
|
||||
const host = __guard__(Settings.internal != null ? Settings.internal.trackchanges : undefined, x1 => x1.host) || "localhost";
|
||||
|
||||
if !module.parent # Called directly
|
||||
app.listen port, host, (error) ->
|
||||
if error?
|
||||
logger.error err: error, "could not start track-changes server"
|
||||
else
|
||||
logger.info "trackchanges starting up, listening on #{host}:#{port}"
|
||||
if (!module.parent) { // Called directly
|
||||
app.listen(port, host, function(error) {
|
||||
if (error != null) {
|
||||
return logger.error({err: error}, "could not start track-changes server");
|
||||
} else {
|
||||
return logger.info(`trackchanges starting up, listening on ${host}:${port}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = app
|
||||
module.exports = app;
|
||||
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined;
|
||||
}
|
|
@ -1,51 +1,69 @@
|
|||
Path = require('path')
|
||||
TMP_DIR = process.env["TMP_PATH"] or Path.resolve(Path.join(__dirname, "../../", "tmp"))
|
||||
const Path = require('path');
|
||||
const TMP_DIR = process.env["TMP_PATH"] || Path.resolve(Path.join(__dirname, "../../", "tmp"));
|
||||
|
||||
module.exports =
|
||||
mongo:
|
||||
url: process.env['MONGO_CONNECTION_STRING'] or "mongodb://#{process.env["MONGO_HOST"] or "localhost"}/sharelatex"
|
||||
module.exports = {
|
||||
mongo: {
|
||||
url: process.env['MONGO_CONNECTION_STRING'] || `mongodb://${process.env["MONGO_HOST"] || "localhost"}/sharelatex`
|
||||
},
|
||||
|
||||
internal:
|
||||
trackchanges:
|
||||
port: 3015
|
||||
host: process.env["LISTEN_ADDRESS"] or "localhost"
|
||||
apis:
|
||||
documentupdater:
|
||||
url: "http://#{process.env["DOCUMENT_UPDATER_HOST"] or process.env["DOCUPDATER_HOST"] or "localhost"}:3003"
|
||||
docstore:
|
||||
url: "http://#{process.env["DOCSTORE_HOST"] or "localhost"}:3016"
|
||||
web:
|
||||
url: "http://#{process.env['WEB_API_HOST'] or process.env['WEB_HOST'] or "localhost"}:#{process.env['WEB_API_PORT'] or process.env['WEB_PORT'] or 3000}"
|
||||
user: process.env['WEB_API_USER'] or "sharelatex"
|
||||
pass: process.env['WEB_API_PASSWORD'] or "password"
|
||||
redis:
|
||||
lock:
|
||||
host: process.env["REDIS_HOST"] or "localhost"
|
||||
port: process.env['REDIS_PORT'] or 6379
|
||||
password: process.env["REDIS_PASSWORD"] or ""
|
||||
key_schema:
|
||||
historyLock: ({doc_id}) -> "HistoryLock:{#{doc_id}}"
|
||||
historyIndexLock: ({project_id}) -> "HistoryIndexLock:{#{project_id}}"
|
||||
history:
|
||||
host: process.env["REDIS_HOST"] or "localhost"
|
||||
port: process.env['REDIS_PORT'] or 6379
|
||||
password: process.env["REDIS_PASSWORD"] or ""
|
||||
key_schema:
|
||||
uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:{#{doc_id}}"
|
||||
docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:{#{project_id}}"
|
||||
internal: {
|
||||
trackchanges: {
|
||||
port: 3015,
|
||||
host: process.env["LISTEN_ADDRESS"] || "localhost"
|
||||
}
|
||||
},
|
||||
apis: {
|
||||
documentupdater: {
|
||||
url: `http://${process.env["DOCUMENT_UPDATER_HOST"] || process.env["DOCUPDATER_HOST"] || "localhost"}:3003`
|
||||
},
|
||||
docstore: {
|
||||
url: `http://${process.env["DOCSTORE_HOST"] || "localhost"}:3016`
|
||||
},
|
||||
web: {
|
||||
url: `http://${process.env['WEB_API_HOST'] || process.env['WEB_HOST'] || "localhost"}:${process.env['WEB_API_PORT'] || process.env['WEB_PORT'] || 3000}`,
|
||||
user: process.env['WEB_API_USER'] || "sharelatex",
|
||||
pass: process.env['WEB_API_PASSWORD'] || "password"
|
||||
}
|
||||
},
|
||||
redis: {
|
||||
lock: {
|
||||
host: process.env["REDIS_HOST"] || "localhost",
|
||||
port: process.env['REDIS_PORT'] || 6379,
|
||||
password: process.env["REDIS_PASSWORD"] || "",
|
||||
key_schema: {
|
||||
historyLock({doc_id}) { return `HistoryLock:{${doc_id}}`; },
|
||||
historyIndexLock({project_id}) { return `HistoryIndexLock:{${project_id}}`; }
|
||||
}
|
||||
},
|
||||
history: {
|
||||
host: process.env["REDIS_HOST"] || "localhost",
|
||||
port: process.env['REDIS_PORT'] || 6379,
|
||||
password: process.env["REDIS_PASSWORD"] || "",
|
||||
key_schema: {
|
||||
uncompressedHistoryOps({doc_id}) { return `UncompressedHistoryOps:{${doc_id}}`; },
|
||||
docsWithHistoryOps({project_id}) { return `DocsWithHistoryOps:{${project_id}}`; }
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
trackchanges:
|
||||
s3:
|
||||
key: process.env['AWS_ACCESS_KEY_ID']
|
||||
secret: process.env['AWS_SECRET_ACCESS_KEY']
|
||||
endpoint: process.env['AWS_S3_ENDPOINT']
|
||||
pathStyle: process.env['AWS_S3_PATH_STYLE'] == 'true'
|
||||
stores:
|
||||
trackchanges: {
|
||||
s3: {
|
||||
key: process.env['AWS_ACCESS_KEY_ID'],
|
||||
secret: process.env['AWS_SECRET_ACCESS_KEY'],
|
||||
endpoint: process.env['AWS_S3_ENDPOINT'],
|
||||
pathStyle: process.env['AWS_S3_PATH_STYLE'] === 'true'
|
||||
},
|
||||
stores: {
|
||||
doc_history: process.env['AWS_BUCKET']
|
||||
continueOnError: process.env['TRACK_CHANGES_CONTINUE_ON_ERROR'] or false
|
||||
},
|
||||
continueOnError: process.env['TRACK_CHANGES_CONTINUE_ON_ERROR'] || false
|
||||
},
|
||||
|
||||
path:
|
||||
path: {
|
||||
dumpFolder: Path.join(TMP_DIR, "dumpFolder")
|
||||
},
|
||||
|
||||
sentry:
|
||||
sentry: {
|
||||
dsn: process.env.SENTRY_DSN
|
||||
}
|
||||
};
|
||||
|
|
Loading…
Reference in a new issue