prettier: convert individual decaffeinated files to Prettier format

This commit is contained in:
mserranom 2020-02-17 18:36:02 +01:00
parent 0a8e936c47
commit 572446956e
2 changed files with 183 additions and 143 deletions

View file

@ -5,117 +5,140 @@
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const Metrics = require("metrics-sharelatex");
Metrics.initialize("track-changes");
const Settings = require("settings-sharelatex");
const logger = require("logger-sharelatex");
const TrackChangesLogger = logger.initialize("track-changes").logger;
const Metrics = require('metrics-sharelatex')
Metrics.initialize('track-changes')
const Settings = require('settings-sharelatex')
const logger = require('logger-sharelatex')
const TrackChangesLogger = logger.initialize('track-changes').logger
if ((Settings.sentry != null ? Settings.sentry.dsn : undefined) != null) {
logger.initializeErrorReporting(Settings.sentry.dsn);
logger.initializeErrorReporting(Settings.sentry.dsn)
}
// log updates as truncated strings
const truncateFn = updates =>
JSON.parse(
JSON.stringify(updates, function(key, value) {
let len;
if ((typeof value === 'string') && ((len = value.length) > 80)) {
return value.substr(0,32) + `...(message of length ${len} truncated)...` + value.substr(-32);
} else {
return value;
}
})
)
;
JSON.parse(
JSON.stringify(updates, function(key, value) {
let len
if (typeof value === 'string' && (len = value.length) > 80) {
return (
value.substr(0, 32) +
`...(message of length ${len} truncated)...` +
value.substr(-32)
)
} else {
return value
}
})
)
TrackChangesLogger.addSerializers({
rawUpdate: truncateFn,
rawUpdates: truncateFn,
newUpdates: truncateFn,
lastUpdate: truncateFn
});
rawUpdate: truncateFn,
rawUpdates: truncateFn,
newUpdates: truncateFn,
lastUpdate: truncateFn
})
const Path = require("path");
const Path = require('path')
Metrics.memory.monitor(logger);
Metrics.memory.monitor(logger)
const child_process = require("child_process");
const child_process = require('child_process')
const HttpController = require("./app/js/HttpController");
const express = require("express");
const app = express();
const HttpController = require('./app/js/HttpController')
const express = require('express')
const app = express()
app.use(Metrics.http.monitor(logger));
app.use(Metrics.http.monitor(logger))
Metrics.injectMetricsRoute(app);
Metrics.injectMetricsRoute(app)
app.post("/project/:project_id/doc/:doc_id/flush", HttpController.flushDoc);
app.post('/project/:project_id/doc/:doc_id/flush', HttpController.flushDoc)
app.get("/project/:project_id/doc/:doc_id/diff", HttpController.getDiff);
app.get('/project/:project_id/doc/:doc_id/diff', HttpController.getDiff)
app.get("/project/:project_id/doc/:doc_id/check", HttpController.checkDoc);
app.get('/project/:project_id/doc/:doc_id/check', HttpController.checkDoc)
app.get("/project/:project_id/updates", HttpController.getUpdates);
app.get('/project/:project_id/updates', HttpController.getUpdates)
app.post("/project/:project_id/flush", HttpController.flushProject);
app.post('/project/:project_id/flush', HttpController.flushProject)
app.post("/project/:project_id/doc/:doc_id/version/:version/restore", HttpController.restore);
app.post(
'/project/:project_id/doc/:doc_id/version/:version/restore',
HttpController.restore
)
app.post('/project/:project_id/doc/:doc_id/push', HttpController.pushDocHistory);
app.post('/project/:project_id/doc/:doc_id/pull', HttpController.pullDocHistory);
app.post('/project/:project_id/doc/:doc_id/push', HttpController.pushDocHistory)
app.post('/project/:project_id/doc/:doc_id/pull', HttpController.pullDocHistory)
app.post('/flush/all', HttpController.flushAll);
app.post('/check/dangling', HttpController.checkDanglingUpdates);
app.post('/flush/all', HttpController.flushAll)
app.post('/check/dangling', HttpController.checkDanglingUpdates)
let packWorker = null; // use a single packing worker
let packWorker = null // use a single packing worker
app.post("/pack", function(req, res, next) {
if (packWorker != null) {
return res.send("pack already running");
} else {
logger.log("running pack");
packWorker = child_process.fork(__dirname + '/app/js/PackWorker.js',
[req.query.limit || 1000, req.query.delay || 1000, req.query.timeout || (30*60*1000)]);
packWorker.on('exit', function(code, signal) {
logger.log({code, signal}, "history auto pack exited");
return packWorker = null;
});
return res.send("pack started");
}
});
app.post('/pack', function(req, res, next) {
if (packWorker != null) {
return res.send('pack already running')
} else {
logger.log('running pack')
packWorker = child_process.fork(__dirname + '/app/js/PackWorker.js', [
req.query.limit || 1000,
req.query.delay || 1000,
req.query.timeout || 30 * 60 * 1000
])
packWorker.on('exit', function(code, signal) {
logger.log({ code, signal }, 'history auto pack exited')
return (packWorker = null)
})
return res.send('pack started')
}
})
app.get("/status", (req, res, next) => res.send("track-changes is alive"));
app.get('/status', (req, res, next) => res.send('track-changes is alive'))
app.get("/oops", function(req, res, next) {
throw new Error("dummy test error");
});
app.get('/oops', function(req, res, next) {
throw new Error('dummy test error')
})
app.get("/check_lock", HttpController.checkLock);
app.get('/check_lock', HttpController.checkLock)
app.get("/health_check", HttpController.healthCheck);
app.get('/health_check', HttpController.healthCheck)
app.use(function(error, req, res, next) {
logger.error({err: error, req}, "an internal error occured");
return res.send(500);
});
logger.error({ err: error, req }, 'an internal error occured')
return res.send(500)
})
const port = __guard__(Settings.internal != null ? Settings.internal.trackchanges : undefined, x => x.port) || 3015;
const host = __guard__(Settings.internal != null ? Settings.internal.trackchanges : undefined, x1 => x1.host) || "localhost";
const port =
__guard__(
Settings.internal != null ? Settings.internal.trackchanges : undefined,
x => x.port
) || 3015
const host =
__guard__(
Settings.internal != null ? Settings.internal.trackchanges : undefined,
x1 => x1.host
) || 'localhost'
if (!module.parent) { // Called directly
app.listen(port, host, function(error) {
if (error != null) {
return logger.error({err: error}, "could not start track-changes server");
} else {
return logger.info(`trackchanges starting up, listening on ${host}:${port}`);
}
});
if (!module.parent) {
// Called directly
app.listen(port, host, function(error) {
if (error != null) {
return logger.error(
{ err: error },
'could not start track-changes server'
)
} else {
return logger.info(
`trackchanges starting up, listening on ${host}:${port}`
)
}
})
}
module.exports = app;
module.exports = app
function __guard__(value, transform) {
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined;
}
return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
}

View file

@ -1,69 +1,86 @@
const Path = require('path');
const TMP_DIR = process.env["TMP_PATH"] || Path.resolve(Path.join(__dirname, "../../", "tmp"));
const Path = require('path')
const TMP_DIR =
process.env.TMP_PATH || Path.resolve(Path.join(__dirname, '../../', 'tmp'))
module.exports = {
mongo: {
url: process.env['MONGO_CONNECTION_STRING'] || `mongodb://${process.env["MONGO_HOST"] || "localhost"}/sharelatex`
},
mongo: {
url:
process.env.MONGO_CONNECTION_STRING ||
`mongodb://${process.env.MONGO_HOST || 'localhost'}/sharelatex`
},
internal: {
trackchanges: {
port: 3015,
host: process.env["LISTEN_ADDRESS"] || "localhost"
}
},
apis: {
documentupdater: {
url: `http://${process.env["DOCUMENT_UPDATER_HOST"] || process.env["DOCUPDATER_HOST"] || "localhost"}:3003`
},
docstore: {
url: `http://${process.env["DOCSTORE_HOST"] || "localhost"}:3016`
},
web: {
url: `http://${process.env['WEB_API_HOST'] || process.env['WEB_HOST'] || "localhost"}:${process.env['WEB_API_PORT'] || process.env['WEB_PORT'] || 3000}`,
user: process.env['WEB_API_USER'] || "sharelatex",
pass: process.env['WEB_API_PASSWORD'] || "password"
}
},
redis: {
lock: {
host: process.env["REDIS_HOST"] || "localhost",
port: process.env['REDIS_PORT'] || 6379,
password: process.env["REDIS_PASSWORD"] || "",
key_schema: {
historyLock({doc_id}) { return `HistoryLock:{${doc_id}}`; },
historyIndexLock({project_id}) { return `HistoryIndexLock:{${project_id}}`; }
}
},
history: {
host: process.env["REDIS_HOST"] || "localhost",
port: process.env['REDIS_PORT'] || 6379,
password: process.env["REDIS_PASSWORD"] || "",
key_schema: {
uncompressedHistoryOps({doc_id}) { return `UncompressedHistoryOps:{${doc_id}}`; },
docsWithHistoryOps({project_id}) { return `DocsWithHistoryOps:{${project_id}}`; }
}
}
},
internal: {
trackchanges: {
port: 3015,
host: process.env.LISTEN_ADDRESS || 'localhost'
}
},
apis: {
documentupdater: {
url: `http://${process.env.DOCUMENT_UPDATER_HOST ||
process.env.DOCUPDATER_HOST ||
'localhost'}:3003`
},
docstore: {
url: `http://${process.env.DOCSTORE_HOST || 'localhost'}:3016`
},
web: {
url: `http://${process.env.WEB_API_HOST ||
process.env.WEB_HOST ||
'localhost'}:${process.env.WEB_API_PORT ||
process.env.WEB_PORT ||
3000}`,
user: process.env.WEB_API_USER || 'sharelatex',
pass: process.env.WEB_API_PASSWORD || 'password'
}
},
redis: {
lock: {
host: process.env.REDIS_HOST || 'localhost',
port: process.env.REDIS_PORT || 6379,
password: process.env.REDIS_PASSWORD || '',
key_schema: {
historyLock({ doc_id }) {
return `HistoryLock:{${doc_id}}`
},
historyIndexLock({ project_id }) {
return `HistoryIndexLock:{${project_id}}`
}
}
},
history: {
host: process.env.REDIS_HOST || 'localhost',
port: process.env.REDIS_PORT || 6379,
password: process.env.REDIS_PASSWORD || '',
key_schema: {
uncompressedHistoryOps({ doc_id }) {
return `UncompressedHistoryOps:{${doc_id}}`
},
docsWithHistoryOps({ project_id }) {
return `DocsWithHistoryOps:{${project_id}}`
}
}
}
},
trackchanges: {
s3: {
key: process.env['AWS_ACCESS_KEY_ID'],
secret: process.env['AWS_SECRET_ACCESS_KEY'],
endpoint: process.env['AWS_S3_ENDPOINT'],
pathStyle: process.env['AWS_S3_PATH_STYLE'] === 'true'
},
stores: {
doc_history: process.env['AWS_BUCKET']
},
continueOnError: process.env['TRACK_CHANGES_CONTINUE_ON_ERROR'] || false
},
path: {
dumpFolder: Path.join(TMP_DIR, "dumpFolder")
},
trackchanges: {
s3: {
key: process.env.AWS_ACCESS_KEY_ID,
secret: process.env.AWS_SECRET_ACCESS_KEY,
endpoint: process.env.AWS_S3_ENDPOINT,
pathStyle: process.env.AWS_S3_PATH_STYLE === 'true'
},
stores: {
doc_history: process.env.AWS_BUCKET
},
continueOnError: process.env.TRACK_CHANGES_CONTINUE_ON_ERROR || false
},
sentry: {
dsn: process.env.SENTRY_DSN
}
};
path: {
dumpFolder: Path.join(TMP_DIR, 'dumpFolder')
},
sentry: {
dsn: process.env.SENTRY_DSN
}
}