diff --git a/libraries/metrics/memory.js b/libraries/metrics/memory.js index aa8f667fb7..73dc18cc50 100644 --- a/libraries/metrics/memory.js +++ b/libraries/metrics/memory.js @@ -87,13 +87,13 @@ module.exports = MemoryMonitor = { Metrics.gauge('memory.gc-interval', gcInterval) // Metrics.gauge("memory.cpu-time-bucket", CpuTimeBucket) - logger.log(mem, 'process.memoryUsage()') + logger.debug(mem, 'process.memoryUsage()') if (global.gc != null && readyToGc()) { const gcTime = executeAndTime(global.gc).toFixed(2) const memAfterGc = inMegaBytes(process.memoryUsage()) const deltaMem = updateMemoryStats(memBeforeGc, memAfterGc) - logger.log( + logger.debug( { gcTime, memBeforeGc, diff --git a/libraries/metrics/mongodb.js b/libraries/metrics/mongodb.js index 130a5d07fa..bc0a910366 100644 --- a/libraries/metrics/mongodb.js +++ b/libraries/metrics/mongodb.js @@ -53,7 +53,7 @@ module.exports = { const start = new Date() return _method.call(this, dbCommand, options, function () { timer.done() - logger.log( + logger.debug( { query: dbCommand.query, query_type: type, @@ -89,7 +89,7 @@ module.exports = { const start = new Date() return _method.call(this, ns, ops, options, function () { timer.done() - logger.log( + logger.debug( { query: ops[0].q, query_type: type, diff --git a/libraries/metrics/test/unit/js/timeAsyncMethodTests.js b/libraries/metrics/test/unit/js/timeAsyncMethodTests.js index 15bd5fc194..f39d257408 100644 --- a/libraries/metrics/test/unit/js/timeAsyncMethodTests.js +++ b/libraries/metrics/test/unit/js/timeAsyncMethodTests.js @@ -130,10 +130,10 @@ describe('timeAsyncMethod', function () { describe('when a logger is supplied', function () { beforeEach(function () { - return (this.logger = { log: sinon.stub() }) + return (this.logger = { debug: sinon.stub(), log: sinon.stub() }) }) - return it('should also call logger.log', function (done) { + return it('should also call logger.debug', function (done) { this.timeAsyncMethod( this.testObject, 'nextNumber', @@ -145,7 +145,7 @@ describe('timeAsyncMethod', function () { expect(result).to.equal(3) expect(this.TimerConstructor.callCount).to.equal(1) expect(this.Timer.done.callCount).to.equal(1) - expect(this.logger.log.callCount).to.equal(1) + expect(this.logger.debug.callCount).to.equal(1) return done() }) }) diff --git a/libraries/metrics/timeAsyncMethod.js b/libraries/metrics/timeAsyncMethod.js index beaba83564..3bf0eb40d6 100644 --- a/libraries/metrics/timeAsyncMethod.js +++ b/libraries/metrics/timeAsyncMethod.js @@ -36,7 +36,7 @@ module.exports = function (obj, methodName, prefix, logger) { if (callback == null || typeof callback !== 'function') { if (logger != null) { - logger.log( + logger.debug( `[Metrics] expected wrapped method '${methodName}' to be invoked with a callback` ) } @@ -74,7 +74,7 @@ module.exports = function (obj, methodName, prefix, logger) { } } } catch (error) {} - logger.log( + logger.debug( { key, args: loggableArgs, elapsedTime }, '[Metrics] timed async method call' ) diff --git a/services/chat/app.js b/services/chat/app.js index f7d8167e61..d0f1a6dba4 100644 --- a/services/chat/app.js +++ b/services/chat/app.js @@ -16,7 +16,7 @@ if (!module.parent) { logger.fatal({ err }, `Cannot bind to ${host}:${port}. Exiting.`) process.exit(1) } - logger.info(`Chat starting up, listening on ${host}:${port}`) + logger.debug(`Chat starting up, listening on ${host}:${port}`) }) }) .catch(err => { diff --git a/services/chat/app/js/Features/Messages/MessageHttpController.js b/services/chat/app/js/Features/Messages/MessageHttpController.js index 9c9815ed5e..e05dacf611 100644 --- a/services/chat/app/js/Features/Messages/MessageHttpController.js +++ b/services/chat/app/js/Features/Messages/MessageHttpController.js @@ -22,7 +22,7 @@ async function sendThreadMessage(req, res) { async function getAllThreads(req, res) { const { projectId } = req.params - logger.log({ projectId }, 'getting all threads') + logger.debug({ projectId }, 'getting all threads') const rooms = await ThreadManager.findAllThreadRooms(projectId) const roomIds = rooms.map(r => r._id) const messages = await MessageManager.findAllMessagesInRooms(roomIds) @@ -33,21 +33,21 @@ async function getAllThreads(req, res) { async function resolveThread(req, res) { const { projectId, threadId } = req.params const { user_id: userId } = req.body - logger.log({ userId, projectId, threadId }, 'marking thread as resolved') + logger.debug({ userId, projectId, threadId }, 'marking thread as resolved') await ThreadManager.resolveThread(projectId, threadId, userId) res.sendStatus(204) } async function reopenThread(req, res) { const { projectId, threadId } = req.params - logger.log({ projectId, threadId }, 'reopening thread') + logger.debug({ projectId, threadId }, 'reopening thread') await ThreadManager.reopenThread(projectId, threadId) res.sendStatus(204) } async function deleteThread(req, res) { const { projectId, threadId } = req.params - logger.log({ projectId, threadId }, 'deleting thread') + logger.debug({ projectId, threadId }, 'deleting thread') const roomId = await ThreadManager.deleteThread(projectId, threadId) await MessageManager.deleteAllMessagesInRoom(roomId) res.sendStatus(204) @@ -56,7 +56,7 @@ async function deleteThread(req, res) { async function editMessage(req, res) { const { content, userId } = req.body const { projectId, threadId, messageId } = req.params - logger.log({ projectId, threadId, messageId, content }, 'editing message') + logger.debug({ projectId, threadId, messageId, content }, 'editing message') const room = await ThreadManager.findOrCreateThread(projectId, threadId) const found = await MessageManager.updateMessage( room._id, @@ -73,7 +73,7 @@ async function editMessage(req, res) { async function deleteMessage(req, res) { const { projectId, threadId, messageId } = req.params - logger.log({ projectId, threadId, messageId }, 'deleting message') + logger.debug({ projectId, threadId, messageId }, 'deleting message') const room = await ThreadManager.findOrCreateThread(projectId, threadId) await MessageManager.deleteMessage(room._id, messageId) res.sendStatus(204) @@ -81,12 +81,12 @@ async function deleteMessage(req, res) { async function destroyProject(req, res) { const { projectId } = req.params - logger.log({ projectId }, 'destroying project') + logger.debug({ projectId }, 'destroying project') const rooms = await ThreadManager.findAllThreadRoomsAndGlobalThread(projectId) const roomIds = rooms.map(r => r._id) - logger.log({ projectId, roomIds }, 'deleting all messages in rooms') + logger.debug({ projectId, roomIds }, 'deleting all messages in rooms') await MessageManager.deleteAllMessagesInRooms(roomIds) - logger.log({ projectId }, 'deleting all threads in project') + logger.debug({ projectId }, 'deleting all threads in project') await ThreadManager.deleteAllThreadsInProject(projectId) res.sendStatus(204) } @@ -105,7 +105,7 @@ async function _sendMessage(clientThreadId, req, res) { .status(400) .send(`Content too long (> ${MAX_MESSAGE_LENGTH} bytes)`) } - logger.log( + logger.debug( { clientThreadId, projectId, userId, content }, 'new message received' ) @@ -137,7 +137,7 @@ async function _getMessages(clientThreadId, req, res) { } else { limit = DEFAULT_MESSAGE_LIMIT } - logger.log( + logger.debug( { limit, before, projectId, clientThreadId }, 'get message request received' ) @@ -146,13 +146,13 @@ async function _getMessages(clientThreadId, req, res) { clientThreadId ) const threadObjectId = thread._id - logger.log( + logger.debug( { limit, before, projectId, clientThreadId, threadObjectId }, 'found or created thread' ) let messages = await MessageManager.getMessages(threadObjectId, limit, before) messages = MessageFormatter.formatMessagesForClientSide(messages) - logger.log({ projectId, messages }, 'got messages') + logger.debug({ projectId, messages }, 'got messages') res.status(200).send(messages) } diff --git a/services/clsi/app.js b/services/clsi/app.js index bd1e9419cf..623fa9b3f2 100644 --- a/services/clsi/app.js +++ b/services/clsi/app.js @@ -258,13 +258,13 @@ Settings.processTooOld = false if (Settings.processLifespanLimitMs) { Settings.processLifespanLimitMs += Settings.processLifespanLimitMs * (Math.random() / 10) - logger.info( + logger.debug( 'Lifespan limited to ', Date.now() + Settings.processLifespanLimitMs ) setTimeout(() => { - logger.log('shutting down, process is too old') + logger.debug('shutting down, process is too old') Settings.processTooOld = true }, Settings.processLifespanLimitMs) } @@ -276,10 +276,10 @@ function runSmokeTest() { smokeTest.lastRunSuccessful() && Date.now() - CompileController.lastSuccessfulCompile < INTERVAL / 2 ) { - logger.log('skipping smoke tests, got recent successful user compile') + logger.debug('skipping smoke tests, got recent successful user compile') return setTimeout(runSmokeTest, INTERVAL / 2) } - logger.log('running smoke tests') + logger.debug('running smoke tests') smokeTest.triggerRun(err => { if (err) logger.error({ err }, 'smoke tests failed') setTimeout(runSmokeTest, INTERVAL) @@ -300,7 +300,7 @@ app.get('/smoke_test_force', (req, res) => smokeTest.sendNewResult(res)) app.use(function (error, req, res, next) { if (error instanceof Errors.NotFoundError) { - logger.log({ err: error, url: req.url }, 'not found error') + logger.debug({ err: error, url: req.url }, 'not found error') return res.sendStatus(404) } else if (error.code === 'EPIPE') { // inspect container returns EPIPE when shutting down @@ -362,19 +362,19 @@ const loadHttpServer = express() loadHttpServer.post('/state/up', function (req, res, next) { STATE = 'up' - logger.info('getting message to set server to down') + logger.debug('getting message to set server to down') return res.sendStatus(204) }) loadHttpServer.post('/state/down', function (req, res, next) { STATE = 'down' - logger.info('getting message to set server to down') + logger.debug('getting message to set server to down') return res.sendStatus(204) }) loadHttpServer.post('/state/maint', function (req, res, next) { STATE = 'maint' - logger.info('getting message to set server to maint') + logger.debug('getting message to set server to maint') return res.sendStatus(204) }) @@ -407,7 +407,7 @@ if (!module.parent) { if (error) { logger.fatal({ error }, `Error starting CLSI on ${host}:${port}`) } else { - logger.info(`CLSI starting up, listening on ${host}:${port}`) + logger.debug(`CLSI starting up, listening on ${host}:${port}`) } }) @@ -415,14 +415,16 @@ if (!module.parent) { if (error != null) { throw error } - return logger.info(`Load tcp agent listening on load port ${loadTcpPort}`) + return logger.debug(`Load tcp agent listening on load port ${loadTcpPort}`) }) loadHttpServer.listen(loadHttpPort, host, function (error) { if (error != null) { throw error } - return logger.info(`Load http agent listening on load port ${loadHttpPort}`) + return logger.debug( + `Load http agent listening on load port ${loadHttpPort}` + ) }) } diff --git a/services/clsi/app/js/CommandRunner.js b/services/clsi/app/js/CommandRunner.js index 2dce56a942..61d58ddd14 100644 --- a/services/clsi/app/js/CommandRunner.js +++ b/services/clsi/app/js/CommandRunner.js @@ -14,7 +14,7 @@ if ((Settings.clsi != null ? Settings.clsi.dockerRunner : undefined) === true) { } else { commandRunnerPath = './LocalCommandRunner' } -logger.info({ commandRunnerPath }, 'selecting command runner for clsi') +logger.debug({ commandRunnerPath }, 'selecting command runner for clsi') const CommandRunner = require(commandRunnerPath) module.exports = CommandRunner diff --git a/services/clsi/app/js/CompileController.js b/services/clsi/app/js/CompileController.js index e570838051..dcf56d695b 100644 --- a/services/clsi/app/js/CompileController.js +++ b/services/clsi/app/js/CompileController.js @@ -72,7 +72,7 @@ module.exports = CompileController = { status = `validation-${error.validate}` } else if (error != null ? error.timedout : undefined) { status = 'timedout' - logger.log( + logger.debug( { err: error, project_id: request.project_id }, 'timeout running compile' ) @@ -245,7 +245,7 @@ module.exports = CompileController = { if (image && !isImageNameAllowed(image)) { return res.status(400).send('invalid image') } - logger.log({ image, file, project_id }, 'word count request') + logger.debug({ image, file, project_id }, 'word count request') return CompileManager.wordcount( project_id, diff --git a/services/clsi/app/js/CompileManager.js b/services/clsi/app/js/CompileManager.js index 1cf362cd1f..cf61f16ded 100644 --- a/services/clsi/app/js/CompileManager.js +++ b/services/clsi/app/js/CompileManager.js @@ -69,7 +69,7 @@ function doCompile(request, callback) { COMPILE_TIME_BUCKETS ) const timer = new Metrics.Timer('write-to-disk', 1, request.metricsOpts) - logger.log( + logger.debug( { projectId: request.project_id, userId: request.user_id }, 'syncing resources to disk' ) @@ -95,7 +95,7 @@ function doCompile(request, callback) { ) return callback(error) } - logger.log( + logger.debug( { projectId: request.project_id, userId: request.user_id, @@ -257,7 +257,7 @@ function doCompile(request, callback) { Metrics.gauge('load-avg', loadavg[0]) } const ts = timer.done() - logger.log( + logger.debug( { projectId: request.project_id, userId: request.user_id, @@ -498,7 +498,10 @@ function syncFromPdf(projectId, userId, page, h, v, imageName, callback) { if (error != null) { return callback(error) } - logger.log({ projectId, userId, page, h, v, stdout }, 'synctex pdf output') + logger.debug( + { projectId, userId, page, h, v, stdout }, + 'synctex pdf output' + ) callback(null, SynctexOutputParser.parseEditOutput(stdout, baseDir)) }) } @@ -561,7 +564,7 @@ function _runSynctex(projectId, userId, command, imageName, callback) { } function wordcount(projectId, userId, filename, image, callback) { - logger.log({ projectId, userId, filename, image }, 'running wordcount') + logger.debug({ projectId, userId, filename, image }, 'running wordcount') const filePath = `$COMPILE_DIR/${filename}` const command = [ 'texcount', @@ -607,7 +610,7 @@ function wordcount(projectId, userId, filename, image, callback) { return callback(err) } const results = _parseWordcountFromOutput(stdout) - logger.log( + logger.debug( { projectId, userId, wordcount: results }, 'word count results' ) diff --git a/services/clsi/app/js/DockerRunner.js b/services/clsi/app/js/DockerRunner.js index d876705c8d..f699ebf16e 100644 --- a/services/clsi/app/js/DockerRunner.js +++ b/services/clsi/app/js/DockerRunner.js @@ -10,7 +10,7 @@ const Path = require('path') const _ = require('lodash') const ONE_HOUR_IN_MS = 60 * 60 * 1000 -logger.info('using docker runner') +logger.debug('using docker runner') function usingSiblingContainers() { return ( @@ -36,7 +36,7 @@ const DockerRunner = { ) { if (usingSiblingContainers()) { const _newPath = Settings.path.sandboxedCompilesHostDir - logger.log( + logger.debug( { path: _newPath }, 'altering bind path for sibling containers' ) @@ -85,14 +85,14 @@ const DockerRunner = { // logOptions = _.clone(options) // logOptions?.HostConfig?.SecurityOpt = "secomp used, removed in logging" - logger.log({ projectId }, 'running docker container') + logger.debug({ projectId }, 'running docker container') DockerRunner._runAndWaitForContainer( options, volumes, timeout, (error, output) => { if (error && error.statusCode === 500) { - logger.log( + logger.debug( { err: error, projectId }, 'error running container so destroying and retrying' ) @@ -118,7 +118,7 @@ const DockerRunner = { }, kill(containerId, callback) { - logger.log({ containerId }, 'sending kill signal to container') + logger.debug({ containerId }, 'sending kill signal to container') const container = dockerode.getContainer(containerId) container.kill(error => { if ( @@ -193,7 +193,7 @@ const DockerRunner = { if (options != null && options.HostConfig != null) { options.HostConfig.SecurityOpt = null } - logger.log({ exitCode, options }, 'docker container has exited') + logger.debug({ exitCode, options }, 'docker container has exited') callbackIfFinished() }) } @@ -352,7 +352,7 @@ const DockerRunner = { callback = _.once(callback) const { name } = options - logger.log({ container_name: name }, 'starting container') + logger.debug({ container_name: name }, 'starting container') const container = dockerode.getContainer(name) function createAndStartContainer() { @@ -412,7 +412,7 @@ const DockerRunner = { attachStartCallback() } - logger.log({ containerId }, 'attached to container') + logger.debug({ containerId }, 'attached to container') const MAX_OUTPUT = 1024 * 1024 // limit output to 1MB function createStringOutputStream(name) { @@ -469,13 +469,13 @@ const DockerRunner = { let timedOut = false const timeoutId = setTimeout(() => { timedOut = true - logger.log({ containerId }, 'timeout reached, killing container') + logger.debug({ containerId }, 'timeout reached, killing container') container.kill(err => { logger.warn({ err, containerId }, 'failed to kill container') }) }, timeout) - logger.log({ containerId }, 'waiting for docker container') + logger.debug({ containerId }, 'waiting for docker container') container.wait((error, res) => { if (error != null) { clearTimeout(timeoutId) @@ -483,13 +483,13 @@ const DockerRunner = { return callback(error) } if (timedOut) { - logger.log({ containerId }, 'docker container timed out') + logger.debug({ containerId }, 'docker container timed out') error = new Error('container timed out') error.timedout = true callback(error) } else { clearTimeout(timeoutId) - logger.log( + logger.debug( { containerId, exitCode: res.StatusCode }, 'docker container returned' ) @@ -518,7 +518,7 @@ const DockerRunner = { }, _destroyContainer(containerId, shouldForce, callback) { - logger.log({ containerId }, 'destroying docker container') + logger.debug({ containerId }, 'destroying docker container') const container = dockerode.getContainer(containerId) container.remove({ force: shouldForce === true, v: true }, error => { if (error != null && error.statusCode === 404) { @@ -531,7 +531,7 @@ const DockerRunner = { if (error != null) { logger.error({ err: error, containerId }, 'error destroying container') } else { - logger.log({ containerId }, 'destroyed container') + logger.debug({ containerId }, 'destroyed container') } callback(error) }) @@ -548,7 +548,7 @@ const DockerRunner = { const age = now - created const maxAge = DockerRunner.MAX_CONTAINER_AGE const ttl = maxAge - age - logger.log( + logger.debug( { containerName: name, created, now, age, maxAge, ttl }, 'checking whether to destroy container' ) @@ -579,7 +579,7 @@ const DockerRunner = { }, startContainerMonitor() { - logger.log( + logger.debug( { maxAge: DockerRunner.MAX_CONTAINER_AGE }, 'starting container expiry' ) diff --git a/services/clsi/app/js/DraftModeManager.js b/services/clsi/app/js/DraftModeManager.js index b421fb2e62..a3b078fc0f 100644 --- a/services/clsi/app/js/DraftModeManager.js +++ b/services/clsi/app/js/DraftModeManager.js @@ -32,7 +32,7 @@ module.exports = DraftModeManager = { return callback() } const modified_content = DraftModeManager._injectDraftOption(content) - logger.log( + logger.debug( { content: content.slice(0, 1024), // \documentclass is normally v near the top modified_content: modified_content.slice(0, 1024), diff --git a/services/clsi/app/js/LatexRunner.js b/services/clsi/app/js/LatexRunner.js index fc8670047a..68bf6a299f 100644 --- a/services/clsi/app/js/LatexRunner.js +++ b/services/clsi/app/js/LatexRunner.js @@ -51,7 +51,7 @@ module.exports = LatexRunner = { timeout = 60000 } // milliseconds - logger.log( + logger.debug( { directory, compiler, @@ -169,7 +169,7 @@ module.exports = LatexRunner = { callback = function () {} } const id = `${project_id}` - logger.log({ id }, 'killing running compile') + logger.debug({ id }, 'killing running compile') if (ProcessTable[id] == null) { logger.warn({ id }, 'no such project to kill') return callback(null) diff --git a/services/clsi/app/js/LocalCommandRunner.js b/services/clsi/app/js/LocalCommandRunner.js index f8470f7b05..d1a4e7250a 100644 --- a/services/clsi/app/js/LocalCommandRunner.js +++ b/services/clsi/app/js/LocalCommandRunner.js @@ -17,7 +17,7 @@ const { spawn } = require('child_process') const _ = require('lodash') const logger = require('@overleaf/logger') -logger.info('using standard command runner') +logger.debug('using standard command runner') module.exports = CommandRunner = { run( @@ -35,7 +35,7 @@ module.exports = CommandRunner = { command = Array.from(command).map(arg => arg.toString().replace('$COMPILE_DIR', directory) ) - logger.log({ project_id, command, directory }, 'running command') + logger.debug({ project_id, command, directory }, 'running command') logger.warn('timeouts and sandboxing are not enabled with CommandRunner') // merge environment settings @@ -69,7 +69,7 @@ module.exports = CommandRunner = { proc.on('close', function (code, signal) { let err - logger.info({ code, signal, project_id }, 'command exited') + logger.debug({ code, signal, project_id }, 'command exited') if (signal === 'SIGTERM') { // signal from kill method below err = new Error('terminated') diff --git a/services/clsi/app/js/OutputCacheManager.js b/services/clsi/app/js/OutputCacheManager.js index 616b4f6c8d..fa9f0050a6 100644 --- a/services/clsi/app/js/OutputCacheManager.js +++ b/services/clsi/app/js/OutputCacheManager.js @@ -464,7 +464,7 @@ module.exports = OutputCacheManager = { OutputCacheManager.ARCHIVE_SUBDIR, buildId ) - logger.log({ dir: archiveDir }, 'archiving log files for project') + logger.debug({ dir: archiveDir }, 'archiving log files for project') return fse.ensureDir(archiveDir, function (err) { if (err != null) { return callback(err) @@ -577,7 +577,7 @@ module.exports = OutputCacheManager = { const removeDir = (dir, cb) => fse.remove(Path.join(cacheRoot, dir), function (err, result) { - logger.log({ cache: cacheRoot, dir }, 'removed expired cache dir') + logger.debug({ cache: cacheRoot, dir }, 'removed expired cache dir') if (err != null) { logger.error({ err, dir }, 'cache remove error') } diff --git a/services/clsi/app/js/OutputFileFinder.js b/services/clsi/app/js/OutputFileFinder.js index 718fc1852a..c34945fcba 100644 --- a/services/clsi/app/js/OutputFileFinder.js +++ b/services/clsi/app/js/OutputFileFinder.js @@ -53,7 +53,7 @@ module.exports = OutputFileFinder = { 'f', '-print', ] - logger.log({ args }, 'running find command') + logger.debug({ args }, 'running find command') const proc = spawn('find', args) let stdout = '' diff --git a/services/clsi/app/js/OutputFileOptimiser.js b/services/clsi/app/js/OutputFileOptimiser.js index af3a460e7f..6725841529 100644 --- a/services/clsi/app/js/OutputFileOptimiser.js +++ b/services/clsi/app/js/OutputFileOptimiser.js @@ -71,7 +71,7 @@ module.exports = OutputFileOptimiser = { } const tmpOutput = dst + '.opt' const args = ['--linearize', '--newline-before-endstream', src, tmpOutput] - logger.log({ args }, 'running qpdf command') + logger.debug({ args }, 'running qpdf command') const timer = new Metrics.Timer('qpdf') const proc = spawn('qpdf', args) diff --git a/services/clsi/app/js/ProjectPersistenceManager.js b/services/clsi/app/js/ProjectPersistenceManager.js index 6798700f9f..48982314a8 100644 --- a/services/clsi/app/js/ProjectPersistenceManager.js +++ b/services/clsi/app/js/ProjectPersistenceManager.js @@ -121,7 +121,7 @@ module.exports = ProjectPersistenceManager = { if (error != null) { return callback(error) } - logger.log({ project_ids }, 'clearing expired projects') + logger.debug({ project_ids }, 'clearing expired projects') const jobs = Array.from(project_ids || []).map(project_id => ( project_id => callback => @@ -152,7 +152,7 @@ module.exports = ProjectPersistenceManager = { if (callback == null) { callback = function () {} } - logger.log({ project_id, user_id }, 'clearing project for user') + logger.debug({ project_id, user_id }, 'clearing project for user') return CompileManager.clearProject(project_id, user_id, function (error) { if (error != null) { return callback(error) @@ -173,7 +173,7 @@ module.exports = ProjectPersistenceManager = { if (callback == null) { callback = function () {} } - logger.log({ project_id }, 'clearing project from cache') + logger.debug({ project_id }, 'clearing project from cache') return UrlCache.clearProject(project_id, function (error) { if (error != null) { logger.err({ error, project_id }, 'error clearing project from cache') @@ -212,7 +212,7 @@ module.exports = ProjectPersistenceManager = { }, } -logger.log( +logger.debug( { EXPIRY_TIMEOUT: ProjectPersistenceManager.EXPIRY_TIMEOUT }, 'project assets kept timeout' ) diff --git a/services/clsi/app/js/ResourceStateManager.js b/services/clsi/app/js/ResourceStateManager.js index 2d92c6c4f6..dbfb3c9fc4 100644 --- a/services/clsi/app/js/ResourceStateManager.js +++ b/services/clsi/app/js/ResourceStateManager.js @@ -26,7 +26,7 @@ module.exports = { const stateFile = Path.join(basePath, this.SYNC_STATE_FILE) if (state == null) { // remove the file if no state passed in - logger.log({ state, basePath }, 'clearing sync state') + logger.debug({ state, basePath }, 'clearing sync state') fs.unlink(stateFile, function (err) { if (err && err.code !== 'ENOENT') { return callback(err) @@ -35,7 +35,7 @@ module.exports = { } }) } else { - logger.log({ state, basePath }, 'writing sync state') + logger.debug({ state, basePath }, 'writing sync state') const resourceList = resources.map(resource => resource.path) fs.writeFile( stateFile, @@ -67,7 +67,7 @@ module.exports = { const resourceList = array.slice(0, adjustedLength - 1) const oldState = array[adjustedLength - 1] const newState = `stateHash:${state}` - logger.log( + logger.debug( { state, oldState, basePath, stateMatches: newState === oldState }, 'checking sync state' ) diff --git a/services/clsi/app/js/ResourceWriter.js b/services/clsi/app/js/ResourceWriter.js index 254d5e5a00..6c897f8d8d 100644 --- a/services/clsi/app/js/ResourceWriter.js +++ b/services/clsi/app/js/ResourceWriter.js @@ -32,7 +32,7 @@ module.exports = ResourceWriter = { callback = function () {} } if (request.syncType === 'incremental') { - logger.log( + logger.debug( { project_id: request.project_id, user_id: request.user_id }, 'incremental sync' ) @@ -77,7 +77,7 @@ module.exports = ResourceWriter = { } ) } - logger.log( + logger.debug( { project_id: request.project_id, user_id: request.user_id }, 'full sync' ) @@ -160,7 +160,7 @@ module.exports = ResourceWriter = { if (err.code === 'EEXIST') { return callback() } else { - logger.log({ err, dir: basePath }, 'error creating directory') + logger.debug({ err, dir: basePath }, 'error creating directory') return callback(err) } } else { diff --git a/services/clsi/app/js/TikzManager.js b/services/clsi/app/js/TikzManager.js index 7a221d8e68..505f1776ba 100644 --- a/services/clsi/app/js/TikzManager.js +++ b/services/clsi/app/js/TikzManager.js @@ -29,7 +29,10 @@ module.exports = TikzManager = { } for (const resource of Array.from(resources)) { if (resource.path === 'output.tex') { - logger.log({ compileDir, mainFile }, 'output.tex already in resources') + logger.debug( + { compileDir, mainFile }, + 'output.tex already in resources' + ) return callback(null, false) } } @@ -55,7 +58,7 @@ module.exports = TikzManager = { : undefined) >= 0 const usesPsTool = (content != null ? content.indexOf('{pstool}') : undefined) >= 0 - logger.log( + logger.debug( { compileDir, mainFile, usesTikzExternalize, usesPsTool }, 'checked for packages needing main file as output.tex' ) @@ -82,7 +85,7 @@ module.exports = TikzManager = { if (error != null) { return callback(error) } - logger.log( + logger.debug( { compileDir, mainFile }, 'copied file to output.tex as project uses packages which require it' ) diff --git a/services/clsi/app/js/UrlFetcher.js b/services/clsi/app/js/UrlFetcher.js index b4ebca2c57..b02d9ee8c1 100644 --- a/services/clsi/app/js/UrlFetcher.js +++ b/services/clsi/app/js/UrlFetcher.js @@ -61,7 +61,7 @@ module.exports = UrlFetcher = { 3 * oneMinute ) - logger.log({ url, filePath }, 'started downloading url to cache') + logger.debug({ url, filePath }, 'started downloading url to cache') const urlStream = request.get({ url, timeout: oneMinute }) urlStream.pause() // stop data flowing until we are ready @@ -74,7 +74,7 @@ module.exports = UrlFetcher = { }) urlStream.on('end', () => - logger.log({ url, filePath }, 'finished downloading file into cache') + logger.debug({ url, filePath }, 'finished downloading file into cache') ) return urlStream.on('response', function (res) { @@ -97,7 +97,7 @@ module.exports = UrlFetcher = { }) fileStream.on('finish', function () { - logger.log({ url, filePath }, 'finished writing file into cache') + logger.debug({ url, filePath }, 'finished writing file into cache') fs.rename(atomicWrite, filePath, error => { if (error) { fs.unlink(atomicWrite, () => callbackOnce(error)) @@ -108,7 +108,7 @@ module.exports = UrlFetcher = { }) fileStream.on('pipe', () => - logger.log({ url, filePath }, 'piping into filestream') + logger.debug({ url, filePath }, 'piping into filestream') ) urlStream.pipe(fileStream) diff --git a/services/contacts/app.js b/services/contacts/app.js index 8867e7b268..7b5453a158 100644 --- a/services/contacts/app.js +++ b/services/contacts/app.js @@ -57,7 +57,9 @@ if (!module.parent) { logger.fatal({ err }, `Cannot bind to ${host}:${port}. Exiting.`) process.exit(1) } - return logger.info(`contacts starting up, listening on ${host}:${port}`) + return logger.debug( + `contacts starting up, listening on ${host}:${port}` + ) }) }) .catch(err => { diff --git a/services/contacts/app/js/HttpController.js b/services/contacts/app/js/HttpController.js index 785020650d..f1e65f59a5 100644 --- a/services/contacts/app/js/HttpController.js +++ b/services/contacts/app/js/HttpController.js @@ -23,7 +23,7 @@ module.exports = HttpController = { return } - logger.log({ user_id, contact_id }, 'adding contact') + logger.debug({ user_id, contact_id }, 'adding contact') return ContactManager.touchContact(user_id, contact_id, function (error) { if (error != null) { @@ -50,7 +50,7 @@ module.exports = HttpController = { } limit = Math.min(limit, HttpController.CONTACT_LIMIT) - logger.log({ user_id }, 'getting contacts') + logger.debug({ user_id }, 'getting contacts') return ContactManager.getContacts(user_id, function (error, contact_dict) { if (error != null) { diff --git a/services/contacts/test/setup.js b/services/contacts/test/setup.js index 42df904729..cc542536eb 100644 --- a/services/contacts/test/setup.js +++ b/services/contacts/test/setup.js @@ -2,7 +2,13 @@ const SandboxedModule = require('sandboxed-module') SandboxedModule.configure({ requires: { - '@overleaf/logger': { log() {} }, + '@overleaf/logger': { + debug() {}, + info() {}, + log() {}, + warn() {}, + error() {}, + }, '@overleaf/metrics': { timeAsyncMethod() {} }, }, globals: { Buffer, console, process }, diff --git a/services/docstore/app.js b/services/docstore/app.js index 9b318627f3..b09b6d362b 100644 --- a/services/docstore/app.js +++ b/services/docstore/app.js @@ -109,7 +109,9 @@ if (!module.parent) { logger.fatal({ err }, `Cannot bind to ${host}:${port}. Exiting.`) process.exit(1) } - return logger.info(`Docstore starting up, listening on ${host}:${port}`) + return logger.debug( + `Docstore starting up, listening on ${host}:${port}` + ) }) server.timeout = 120000 server.keepAliveTimeout = 5000 diff --git a/services/docstore/app/js/DocArchiveManager.js b/services/docstore/app/js/DocArchiveManager.js index 99875c0e07..5ea98f85e7 100644 --- a/services/docstore/app/js/DocArchiveManager.js +++ b/services/docstore/app/js/DocArchiveManager.js @@ -70,7 +70,7 @@ async function archiveDocById(projectId, docId) { } async function archiveDoc(projectId, doc) { - logger.log( + logger.debug( { project_id: projectId, doc_id: doc._id }, 'sending doc to persistor' ) @@ -154,7 +154,7 @@ async function getDoc(projectId, docId) { // get the doc and unarchive it to mongo async function unarchiveDoc(projectId, docId) { - logger.log({ projectId, docId }, 'getting doc from persistor') + logger.debug({ projectId, docId }, 'getting doc from persistor') const mongoDoc = await MongoManager.findDoc(projectId, docId, { inS3: 1, rev: 1, diff --git a/services/docstore/app/js/DocManager.js b/services/docstore/app/js/DocManager.js index d3d21769f4..15b1efabff 100644 --- a/services/docstore/app/js/DocManager.js +++ b/services/docstore/app/js/DocManager.js @@ -289,7 +289,10 @@ module.exports = DocManager = { if (updateRanges) { update.ranges = ranges } - logger.log({ project_id, doc_id }, 'updating doc lines and ranges') + logger.debug( + { project_id, doc_id }, + 'updating doc lines and ranges' + ) modified = true rev += 1 // rev will be incremented in mongo by MongoManager.upsertIntoDocCollection @@ -300,7 +303,7 @@ module.exports = DocManager = { cb ) } else { - logger.log( + logger.debug( { project_id, doc_id }, 'doc lines have not changed - not updating' ) @@ -310,7 +313,7 @@ module.exports = DocManager = { const updateVersionIfNeeded = function (cb) { if (updateVersion) { - logger.log( + logger.debug( { project_id, doc_id, @@ -322,7 +325,7 @@ module.exports = DocManager = { modified = true return MongoManager.setDocVersion(doc_id, version, cb) } else { - logger.log( + logger.debug( { project_id, doc_id, version }, 'doc version has not changed - not updating' ) diff --git a/services/docstore/app/js/HealthChecker.js b/services/docstore/app/js/HealthChecker.js index 93abd8ec5e..6af4339b4b 100644 --- a/services/docstore/app/js/HealthChecker.js +++ b/services/docstore/app/js/HealthChecker.js @@ -31,7 +31,7 @@ module.exports = { url, timeout: 3000, }) - logger.log({ lines, url, doc_id, project_id }, 'running health check') + logger.debug({ lines, url, doc_id, project_id }, 'running health check') const jobs = [ function (cb) { const opts = getOpts() diff --git a/services/docstore/app/js/HttpController.js b/services/docstore/app/js/HttpController.js index 55f0c1226e..4f3adf2aa4 100644 --- a/services/docstore/app/js/HttpController.js +++ b/services/docstore/app/js/HttpController.js @@ -8,12 +8,12 @@ const Settings = require('@overleaf/settings') function getDoc(req, res, next) { const { doc_id: docId, project_id: projectId } = req.params const includeDeleted = req.query.include_deleted === 'true' - logger.log({ projectId, docId }, 'getting doc') + logger.debug({ projectId, docId }, 'getting doc') DocManager.getFullDoc(projectId, docId, function (error, doc) { if (error) { return next(error) } - logger.log({ docId, projectId }, 'got doc') + logger.debug({ docId, projectId }, 'got doc') if (doc == null) { res.sendStatus(404) } else if (doc.deleted && !includeDeleted) { @@ -26,7 +26,7 @@ function getDoc(req, res, next) { function peekDoc(req, res, next) { const { doc_id: docId, project_id: projectId } = req.params - logger.log({ projectId, docId }, 'peeking doc') + logger.debug({ projectId, docId }, 'peeking doc') DocManager.peekDoc(projectId, docId, function (error, doc) { if (error) { return next(error) @@ -52,7 +52,7 @@ function isDocDeleted(req, res, next) { function getRawDoc(req, res, next) { const { doc_id: docId, project_id: projectId } = req.params - logger.log({ projectId, docId }, 'getting raw doc') + logger.debug({ projectId, docId }, 'getting raw doc') DocManager.getDocLines(projectId, docId, function (error, doc) { if (error) { return next(error) @@ -68,7 +68,7 @@ function getRawDoc(req, res, next) { function getAllDocs(req, res, next) { const { project_id: projectId } = req.params - logger.log({ projectId }, 'getting all docs') + logger.debug({ projectId }, 'getting all docs') DocManager.getAllNonDeletedDocs( projectId, { lines: true, rev: true }, @@ -86,7 +86,7 @@ function getAllDocs(req, res, next) { function getAllDeletedDocs(req, res, next) { const { project_id: projectId } = req.params - logger.log({ projectId }, 'getting all deleted docs') + logger.debug({ projectId }, 'getting all deleted docs') DocManager.getAllDeletedDocs( projectId, { name: true, deletedAt: true }, @@ -107,7 +107,7 @@ function getAllDeletedDocs(req, res, next) { function getAllRanges(req, res, next) { const { project_id: projectId } = req.params - logger.log({ projectId }, 'getting all ranges') + logger.debug({ projectId }, 'getting all ranges') DocManager.getAllNonDeletedDocs( projectId, { ranges: true }, @@ -154,7 +154,7 @@ function updateDoc(req, res, next) { return } - logger.log({ projectId, docId }, 'got http request to update doc') + logger.debug({ projectId, docId }, 'got http request to update doc') DocManager.updateDoc( projectId, docId, @@ -175,7 +175,7 @@ function updateDoc(req, res, next) { function patchDoc(req, res, next) { const { doc_id: docId, project_id: projectId } = req.params - logger.log({ projectId, docId }, 'patching doc') + logger.debug({ projectId, docId }, 'patching doc') const allowedFields = ['deleted', 'deletedAt', 'name'] const meta = {} @@ -226,7 +226,7 @@ function _buildDocsArrayView(projectId, docs) { function archiveAllDocs(req, res, next) { const { project_id: projectId } = req.params - logger.log({ projectId }, 'archiving all docs') + logger.debug({ projectId }, 'archiving all docs') DocArchive.archiveAllDocs(projectId, function (error) { if (error) { return next(error) @@ -237,7 +237,7 @@ function archiveAllDocs(req, res, next) { function archiveDoc(req, res, next) { const { doc_id: docId, project_id: projectId } = req.params - logger.log({ projectId, docId }, 'archiving a doc') + logger.debug({ projectId, docId }, 'archiving a doc') DocArchive.archiveDocById(projectId, docId, function (error) { if (error) { return next(error) @@ -248,7 +248,7 @@ function archiveDoc(req, res, next) { function unArchiveAllDocs(req, res, next) { const { project_id: projectId } = req.params - logger.log({ projectId }, 'unarchiving all docs') + logger.debug({ projectId }, 'unarchiving all docs') DocArchive.unArchiveAllDocs(projectId, function (err) { if (err) { if (err instanceof Errors.DocRevValueError) { @@ -263,7 +263,7 @@ function unArchiveAllDocs(req, res, next) { function destroyProject(req, res, next) { const { project_id: projectId } = req.params - logger.log({ projectId }, 'destroying all docs') + logger.debug({ projectId }, 'destroying all docs') DocArchive.destroyProject(projectId, function (error) { if (error) { return next(error) diff --git a/services/docstore/test/setup.js b/services/docstore/test/setup.js index aadb0f3a41..9fef936003 100644 --- a/services/docstore/test/setup.js +++ b/services/docstore/test/setup.js @@ -15,7 +15,9 @@ chai.use(chaiAsPromised) const sandbox = sinon.createSandbox() const stubs = { logger: { + debug: sandbox.stub(), log: sandbox.stub(), + info: sandbox.stub(), warn: sandbox.stub(), err: sandbox.stub(), error: sandbox.stub(), diff --git a/services/filestore/app.js b/services/filestore/app.js index fc89743cf9..48cbd1071d 100644 --- a/services/filestore/app.js +++ b/services/filestore/app.js @@ -145,7 +145,7 @@ if (!module.parent) { logger.error('Error starting Filestore', error) throw error } - logger.info(`Filestore starting up, listening on ${host}:${port}`) + logger.debug(`Filestore starting up, listening on ${host}:${port}`) }) } diff --git a/services/notifications/app.js b/services/notifications/app.js index eb27a575e1..18de1616e4 100644 --- a/services/notifications/app.js +++ b/services/notifications/app.js @@ -68,7 +68,7 @@ mongodb .waitForDb() .then(() => { app.listen(port, host, () => - logger.info(`notifications starting up, listening on ${host}:${port}`) + logger.debug(`notifications starting up, listening on ${host}:${port}`) ) }) .catch(err => { diff --git a/services/notifications/app/js/HealthCheckController.js b/services/notifications/app/js/HealthCheckController.js index 21b3d5b67e..d7c97df40d 100644 --- a/services/notifications/app/js/HealthCheckController.js +++ b/services/notifications/app/js/HealthCheckController.js @@ -29,7 +29,7 @@ module.exports = { url: `http://localhost:${port}/user/${user_id}${endPath}`, timeout: 5000, }) - logger.log( + logger.debug( { user_id, opts: getOpts(), key: notification_key, user_id }, 'Health Check: running' ) @@ -82,7 +82,7 @@ module.exports = { const notification_id = body[1][0]._id notification_key = body[1][0].key let opts = getOpts(`/notification/${notification_id}`) - logger.log( + logger.debug( { notification_id, notification_key }, 'Health Check: doing cleanup' ) diff --git a/services/notifications/app/js/NotificationsController.js b/services/notifications/app/js/NotificationsController.js index 6c3a59bea7..706c03525b 100644 --- a/services/notifications/app/js/NotificationsController.js +++ b/services/notifications/app/js/NotificationsController.js @@ -15,7 +15,7 @@ const metrics = require('@overleaf/metrics') module.exports = { getUserNotifications(req, res, next) { - logger.log( + logger.debug( { user_id: req.params.user_id }, 'getting user unread notifications' ) @@ -30,7 +30,7 @@ module.exports = { }, addNotification(req, res) { - logger.log( + logger.debug( { user_id: req.params.user_id, notification: req.body }, 'adding notification' ) @@ -49,7 +49,7 @@ module.exports = { }, removeNotificationId(req, res, next) { - logger.log( + logger.debug( { user_id: req.params.user_id, notification_id: req.params.notification_id, @@ -68,7 +68,7 @@ module.exports = { }, removeNotificationKey(req, res, next) { - logger.log( + logger.debug( { user_id: req.params.user_id, notification_key: req.body.key }, 'mark key notification as read' ) @@ -85,7 +85,7 @@ module.exports = { removeNotificationByKeyOnly(req, res, next) { const notification_key = req.params.key - logger.log({ notification_key }, 'mark notification as read by key only') + logger.debug({ notification_key }, 'mark notification as read by key only') metrics.inc('removeNotificationKey') return Notifications.removeNotificationByKeyOnly(notification_key, err => { if (err) return next(err) diff --git a/services/spelling/app.js b/services/spelling/app.js index 8dbcd2a30f..70364afefe 100644 --- a/services/spelling/app.js +++ b/services/spelling/app.js @@ -44,7 +44,7 @@ if (!module.parent) { if (error != null) { throw error } - return logger.info(`spelling starting up, listening on ${host}:${port}`) + return logger.debug(`spelling starting up, listening on ${host}:${port}`) }) } diff --git a/services/spelling/app/js/ASpell.js b/services/spelling/app/js/ASpell.js index 658111067a..69926bc83a 100644 --- a/services/spelling/app/js/ASpell.js +++ b/services/spelling/app/js/ASpell.js @@ -28,7 +28,7 @@ try { const oldCache = fs.readFileSync(cacheFsPath) cache.load(JSON.parse(oldCache)) } catch (error) { - logger.log( + logger.debug( OError.tag(error, 'could not load the cache file', { cacheFsPath }) ) } @@ -38,14 +38,14 @@ const cacheDump = setInterval(function () { const dump = JSON.stringify(cache.dump()) return fs.writeFile(cacheFsPathTmp, dump, function (err) { if (err != null) { - logger.log(OError.tag(err, 'error writing cache file')) + logger.debug(OError.tag(err, 'error writing cache file')) fs.unlink(cacheFsPathTmp, () => {}) } else { fs.rename(cacheFsPathTmp, cacheFsPath, err => { if (err) { logger.error(OError.tag(err, 'error renaming cache file')) } else { - logger.log({ len: dump.length, cacheFsPath }, 'wrote cache file') + logger.debug({ len: dump.length, cacheFsPath }, 'wrote cache file') } }) } @@ -96,7 +96,7 @@ class ASpellRunner { cache.set(k, v) } - logger.info( + logger.debug( { hits, total: words.length, diff --git a/services/spelling/app/js/ASpellWorker.js b/services/spelling/app/js/ASpellWorker.js index a5a700508c..86ea15c84f 100644 --- a/services/spelling/app/js/ASpellWorker.js +++ b/services/spelling/app/js/ASpellWorker.js @@ -27,14 +27,14 @@ class ASpellWorker { '-d', language, ]) - logger.info( + logger.debug( { process: this.pipe.pid, lang: this.language }, 'starting new aspell worker' ) metrics.inc('aspellWorker', 1, { status: 'start', method: this.language }) this.pipe.on('exit', () => { this.state = 'killed' - logger.info( + logger.debug( { process: this.pipe.pid, lang: this.language }, 'aspell worker has exited' ) @@ -201,14 +201,14 @@ class ASpellWorker { } shutdown(reason) { - logger.info({ process: this.pipe.pid, reason }, 'shutting down') + logger.debug({ process: this.pipe.pid, reason }, 'shutting down') this.state = 'closing' this.closeReason = reason return this.pipe.stdin.end() } kill(reason) { - logger.info({ process: this.pipe.pid, reason }, 'killing') + logger.debug({ process: this.pipe.pid, reason }, 'killing') this.closeReason = reason if (this.state === 'killed') { return diff --git a/services/spelling/app/js/ASpellWorkerPool.js b/services/spelling/app/js/ASpellWorkerPool.js index c99ba0f3ef..cc08f0cef4 100644 --- a/services/spelling/app/js/ASpellWorkerPool.js +++ b/services/spelling/app/js/ASpellWorkerPool.js @@ -28,7 +28,7 @@ class ASpellWorkerPool { create(language) { if (this.PROCESS_POOL.length >= this.MAX_WORKERS) { - logger.log( + logger.debug( { maxworkers: this.MAX_WORKERS }, 'maximum number of workers already running' ) @@ -44,7 +44,7 @@ class ASpellWorkerPool { clearTimeout(worker.idleTimer) worker.idleTimer = null } - logger.info( + logger.debug( { process: worker.pipe.pid, lang: language }, 'removing aspell worker from pool' ) diff --git a/services/spelling/app/js/HealthCheckController.js b/services/spelling/app/js/HealthCheckController.js index 40513c2eb2..453184a518 100644 --- a/services/spelling/app/js/HealthCheckController.js +++ b/services/spelling/app/js/HealthCheckController.js @@ -26,7 +26,7 @@ module.exports = { : 0 if (numberOfSuggestions > 10) { - logger.log('health check passed') + logger.debug('health check passed') res.sendStatus(200) } else { logger.err( diff --git a/services/spelling/app/js/SpellingAPIController.js b/services/spelling/app/js/SpellingAPIController.js index 77d8f52105..c71301ec49 100644 --- a/services/spelling/app/js/SpellingAPIController.js +++ b/services/spelling/app/js/SpellingAPIController.js @@ -14,7 +14,7 @@ module.exports = { check(req, res) { metrics.inc('spelling-check', 0.1) const { token, wordCount } = extractCheckRequestData(req) - logger.info({ token, wordCount }, 'running check') + logger.debug({ token, wordCount }, 'running check') SpellingAPIManager.runRequest(token, req.body, function (error, result) { if (error != null) { logger.error( diff --git a/services/track-changes/app.js b/services/track-changes/app.js index e0f02c2622..5be3447040 100644 --- a/services/track-changes/app.js +++ b/services/track-changes/app.js @@ -87,7 +87,7 @@ app.post('/pack', function (req, res, next) { if (packWorker != null) { return res.send('pack already running') } else { - logger.log('running pack') + logger.debug('running pack') packWorker = childProcess.fork( Path.join(__dirname, '/app/js/PackWorker.js'), [ @@ -97,7 +97,7 @@ app.post('/pack', function (req, res, next) { ] ) packWorker.on('exit', function (code, signal) { - logger.log({ code, signal }, 'history auto pack exited') + logger.debug({ code, signal }, 'history auto pack exited') return (packWorker = null) }) return res.send('pack started') @@ -142,7 +142,7 @@ if (!module.parent) { 'could not start track-changes server' ) } else { - return logger.info( + return logger.debug( `trackchanges starting up, listening on ${host}:${port}` ) } diff --git a/services/track-changes/app/js/DiffManager.js b/services/track-changes/app/js/DiffManager.js index 40e3cb9a3e..a1d5fd8b9d 100644 --- a/services/track-changes/app/js/DiffManager.js +++ b/services/track-changes/app/js/DiffManager.js @@ -127,7 +127,7 @@ module.exports = DiffManager = { if (callback == null) { callback = function () {} } - logger.log( + logger.debug( { project_id, doc_id, version }, 'getting document before version' ) @@ -162,7 +162,7 @@ module.exports = DiffManager = { return callback(error) } - logger.log( + logger.debug( { docVersion: version, lastUpdateVersion: lastUpdate != null ? lastUpdate.v : undefined, diff --git a/services/track-changes/app/js/DocstoreManager.js b/services/track-changes/app/js/DocstoreManager.js index 5235cb7372..99337ff29f 100644 --- a/services/track-changes/app/js/DocstoreManager.js +++ b/services/track-changes/app/js/DocstoreManager.js @@ -5,7 +5,7 @@ const Errors = require('./Errors') function peekDocument(projectId, docId, callback) { const url = `${Settings.apis.docstore.url}/project/${projectId}/doc/${docId}/peek` - logger.log( + logger.debug( { project_id: projectId, doc_id: docId }, 'getting doc from docstore' ) @@ -19,7 +19,7 @@ function peekDocument(projectId, docId, callback) { } catch (error) { return callback(error) } - logger.log( + logger.debug( { project_id: projectId, doc_id: docId, version: body.version }, 'got doc from docstore' ) diff --git a/services/track-changes/app/js/DocumentUpdaterManager.js b/services/track-changes/app/js/DocumentUpdaterManager.js index 50d9716263..24ccf96304 100644 --- a/services/track-changes/app/js/DocumentUpdaterManager.js +++ b/services/track-changes/app/js/DocumentUpdaterManager.js @@ -21,7 +21,7 @@ module.exports = DocumentUpdaterManager = { callback = function () {} } - logger.log({ project_id, doc_id }, 'getting doc from document updater') + logger.debug({ project_id, doc_id }, 'getting doc from document updater') return request.get(url, function (error, res, body) { if (error != null) { return callback(error) @@ -33,7 +33,7 @@ module.exports = DocumentUpdaterManager = { error = error1 return callback(error) } - logger.log( + logger.debug( { project_id, doc_id, version: body.version }, 'got doc from document updater' ) @@ -66,7 +66,7 @@ module.exports = DocumentUpdaterManager = { callback = function () {} } const url = `${Settings.apis.documentupdater.url}/project/${project_id}/doc/${doc_id}` - logger.log({ project_id, doc_id }, 'setting doc in document updater') + logger.debug({ project_id, doc_id }, 'setting doc in document updater') return request.post( { url, diff --git a/services/track-changes/app/js/HealthChecker.js b/services/track-changes/app/js/HealthChecker.js index 9459a1a8f2..0a558674ef 100644 --- a/services/track-changes/app/js/HealthChecker.js +++ b/services/track-changes/app/js/HealthChecker.js @@ -21,7 +21,7 @@ module.exports = { check(callback) { const project_id = ObjectId(settings.trackchanges.healthCheck.project_id) const url = `http://localhost:${port}/project/${project_id}` - logger.log({ project_id }, 'running health check') + logger.debug({ project_id }, 'running health check') const jobs = [ cb => request.get( diff --git a/services/track-changes/app/js/HttpController.js b/services/track-changes/app/js/HttpController.js index 5194d7bcd7..7df4224ed7 100644 --- a/services/track-changes/app/js/HttpController.js +++ b/services/track-changes/app/js/HttpController.js @@ -30,7 +30,7 @@ module.exports = HttpController = { } const { doc_id } = req.params const { project_id } = req.params - logger.log({ project_id, doc_id }, 'compressing doc history') + logger.debug({ project_id, doc_id }, 'compressing doc history') return UpdatesManager.processUncompressedUpdatesWithLock( project_id, doc_id, @@ -48,7 +48,7 @@ module.exports = HttpController = { next = function () {} } const { project_id } = req.params - logger.log({ project_id }, 'compressing project history') + logger.debug({ project_id }, 'compressing project history') return UpdatesManager.processUncompressedUpdatesForProject( project_id, function (error) { @@ -66,7 +66,7 @@ module.exports = HttpController = { next = function () {} } const limit = req.query.limit != null ? parseInt(req.query.limit, 10) : -1 - logger.log({ limit }, 'flushing all projects') + logger.debug({ limit }, 'flushing all projects') return UpdatesManager.flushAll(limit, function (error, result) { if (error != null) { return next(error) @@ -78,7 +78,7 @@ module.exports = HttpController = { .status(200) .send(`${status}\nwould flush:\n${all.join('\n')}\n`) } else if (failed.length > 0) { - logger.log({ failed, succeeded }, 'error flushing projects') + logger.debug({ failed, succeeded }, 'error flushing projects') return res .status(500) .send(`${status}\nfailed to flush:\n${failed.join('\n')}\n`) @@ -96,13 +96,13 @@ module.exports = HttpController = { if (next == null) { next = function () {} } - logger.log('checking dangling updates') + logger.debug('checking dangling updates') return UpdatesManager.getDanglingUpdates(function (error, result) { if (error != null) { return next(error) } if (result.length > 0) { - logger.log({ dangling: result }, 'found dangling updates') + logger.debug({ dangling: result }, 'found dangling updates') return res.status(500).send(`dangling updates:\n${result.join('\n')}\n`) } else { return res.status(200).send('no dangling updates found\n') @@ -116,7 +116,7 @@ module.exports = HttpController = { } const { doc_id } = req.params const { project_id } = req.params - logger.log({ project_id, doc_id }, 'checking doc history') + logger.debug({ project_id, doc_id }, 'checking doc history') return DiffManager.getDocumentBeforeVersion( project_id, doc_id, @@ -161,7 +161,7 @@ module.exports = HttpController = { to = null } - logger.log({ project_id, doc_id, from, to }, 'getting diff') + logger.debug({ project_id, doc_id, from, to }, 'getting diff') return DiffManager.getDiff( project_id, doc_id, @@ -207,7 +207,7 @@ module.exports = HttpController = { zipProject(req, res, next) { const { project_id: projectId } = req.params - logger.log({ projectId }, 'exporting project history as zip file') + logger.debug({ projectId }, 'exporting project history as zip file') ZipManager.makeTempDirectory((err, tmpdir) => { if (err) { return next(err) @@ -234,7 +234,7 @@ module.exports = HttpController = { // - multiple updates form a pack // Flush updates per pack onto the wire. const { project_id } = req.params - logger.log({ project_id }, 'exporting project history') + logger.debug({ project_id }, 'exporting project history') UpdatesManager.exportProject( project_id, function (err, { updates, userIds }, confirmWrite) { @@ -317,7 +317,7 @@ module.exports = HttpController = { } const { project_id } = req.params const { doc_id } = req.params - logger.log({ project_id, doc_id }, 'pushing all finalised changes to s3') + logger.debug({ project_id, doc_id }, 'pushing all finalised changes to s3') return PackManager.pushOldPacks(project_id, doc_id, function (error) { if (error != null) { return next(error) @@ -332,7 +332,7 @@ module.exports = HttpController = { } const { project_id } = req.params const { doc_id } = req.params - logger.log({ project_id, doc_id }, 'pulling all packs from s3') + logger.debug({ project_id, doc_id }, 'pulling all packs from s3') return PackManager.pullOldPacks(project_id, doc_id, function (error) { if (error != null) { return next(error) diff --git a/services/track-changes/app/js/MongoAWS.js b/services/track-changes/app/js/MongoAWS.js index 75ba2d4fd4..9e905d05b5 100644 --- a/services/track-changes/app/js/MongoAWS.js +++ b/services/track-changes/app/js/MongoAWS.js @@ -64,7 +64,7 @@ module.exports = MongoAWS = { return callback(new Error('invalid pack id')) } - logger.log({ project_id, doc_id, pack_id }, 'uploading data to s3') + logger.debug({ project_id, doc_id, pack_id }, 'uploading data to s3') const upload = createStream(S3S.WriteStream, project_id, doc_id, pack_id) @@ -85,7 +85,7 @@ module.exports = MongoAWS = { return callback(error) } return zlib.gzip(uncompressedData, function (err, buf) { - logger.log( + logger.debug( { project_id, doc_id, @@ -101,7 +101,10 @@ module.exports = MongoAWS = { upload.on('error', err => callback(err)) upload.on('finish', function () { Metrics.inc('archive-pack') - logger.log({ project_id, doc_id, pack_id }, 'upload to s3 completed') + logger.debug( + { project_id, doc_id, pack_id }, + 'upload to s3 completed' + ) return callback(null) }) upload.write(buf) @@ -129,7 +132,7 @@ module.exports = MongoAWS = { return callback(new Error('invalid pack id')) } - logger.log({ project_id, doc_id, pack_id }, 'downloading data from s3') + logger.debug({ project_id, doc_id, pack_id }, 'downloading data from s3') const download = createStream(S3S.ReadStream, project_id, doc_id, pack_id) @@ -140,7 +143,7 @@ module.exports = MongoAWS = { const gunzip = zlib.createGunzip() gunzip.setEncoding('utf8') gunzip.on('error', function (err) { - logger.log( + logger.debug( { project_id, doc_id, pack_id, err }, 'error uncompressing gzip stream' ) @@ -152,7 +155,10 @@ module.exports = MongoAWS = { outputStream.on('error', err => callback(err)) outputStream.on('end', function () { let object - logger.log({ project_id, doc_id, pack_id }, 'download from s3 completed') + logger.debug( + { project_id, doc_id, pack_id }, + 'download from s3 completed' + ) try { object = JSON.parse(parts.join('')) } catch (e) { @@ -186,7 +192,10 @@ module.exports = MongoAWS = { Metrics.inc('unarchive-pack') // allow the object to expire, we can always retrieve it again object.expiresAt = new Date(Date.now() + 7 * DAYS) - logger.log({ project_id, doc_id, pack_id }, 'inserting object from s3') + logger.debug( + { project_id, doc_id, pack_id }, + 'inserting object from s3' + ) return db.docHistory.insertOne(object, (err, confirmation) => { if (err) return callback(err) object._id = confirmation.insertedId diff --git a/services/track-changes/app/js/PackManager.js b/services/track-changes/app/js/PackManager.js index bb336e4525..ebe979deca 100644 --- a/services/track-changes/app/js/PackManager.js +++ b/services/track-changes/app/js/PackManager.js @@ -217,7 +217,7 @@ module.exports = PackManager = { newPack.expiresAt = new Date(Date.now() + 7 * DAYS) newPack.last_checked = new Date(Date.now() + 30 * DAYS) // never check temporary packs } - logger.log( + logger.debug( { project_id, doc_id, newUpdates }, 'inserting updates into new pack' ) @@ -271,7 +271,7 @@ module.exports = PackManager = { if (lastUpdate.expiresAt && temporary) { update.$set.expiresAt = new Date(Date.now() + 7 * DAYS) } - logger.log( + logger.debug( { project_id, doc_id, lastUpdate, newUpdates }, 'appending updates to existing pack' ) @@ -402,7 +402,7 @@ module.exports = PackManager = { pack._id.toString() ) const packIdsToFetch = _.difference(allPackIds, loadedPackIds) - logger.log( + logger.debug( { project_id, doc_id, loadedPackIds, allPackIds, packIdsToFetch }, 'analysed packs' ) @@ -418,7 +418,7 @@ module.exports = PackManager = { if (err != null) { return callback(err) } - logger.log({ project_id, doc_id }, 'done unarchiving') + logger.debug({ project_id, doc_id }, 'done unarchiving') return callback() } ) @@ -663,7 +663,7 @@ module.exports = PackManager = { if (err != null) { return callback(err) } - logger.log( + logger.debug( { project_id, doc_id, newPacks }, 'added new packs to index' ) @@ -769,7 +769,7 @@ module.exports = PackManager = { return result1 })() if (newPacks.length) { - logger.log( + logger.debug( { project_id, doc_id, n: newPacks.length }, 'found new packs' ) @@ -959,7 +959,7 @@ module.exports = PackManager = { return callback(err) } ) - logger.log({ project_id, doc_id }, 'processing old packs') + logger.debug({ project_id, doc_id }, 'processing old packs') return db.docHistory.findOne({ _id: pack_id }, function (err, pack) { if (err != null) { return markAsChecked(err) @@ -998,7 +998,7 @@ module.exports = PackManager = { ? unarchivedPacks.length : undefined) ) { - logger.log( + logger.debug( { project_id, doc_id }, 'no packs need archiving' ) @@ -1012,7 +1012,7 @@ module.exports = PackManager = { if (err != null) { return markAsChecked(err) } - logger.log({ project_id, doc_id }, 'done processing') + logger.debug({ project_id, doc_id }, 'done processing') return markAsChecked() } ) @@ -1031,13 +1031,16 @@ module.exports = PackManager = { const age = (Date.now() - pack.meta.end_ts) / DAYS if (age < 30) { // always keep if less than 1 month old - logger.log({ project_id, doc_id, pack_id, age }, 'less than 30 days old') + logger.debug( + { project_id, doc_id, pack_id, age }, + 'less than 30 days old' + ) return callback() } // compute an archiving threshold which decreases for each month of age const archive_threshold = 30 / age if (sz > archive_threshold || n > archive_threshold || age > 90) { - logger.log( + logger.debug( { project_id, doc_id, pack_id, age, archive_threshold, sz, n }, 'meets archive threshold' ) @@ -1048,7 +1051,7 @@ module.exports = PackManager = { callback ) } else { - logger.log( + logger.debug( { project_id, doc_id, pack_id, age, archive_threshold, sz, n }, 'does not meet archive threshold' ) @@ -1071,7 +1074,7 @@ module.exports = PackManager = { }, _markPackAsFinalised(project_id, doc_id, pack_id, callback) { - logger.log({ project_id, doc_id, pack_id }, 'marking pack as finalised') + logger.debug({ project_id, doc_id, pack_id }, 'marking pack as finalised') return db.docHistory.updateOne( { _id: pack_id }, { $set: { finalised: true } }, @@ -1080,7 +1083,7 @@ module.exports = PackManager = { }, updateIndexIfNeeded(project_id, doc_id, callback) { - logger.log({ project_id, doc_id }, 'archiving old packs') + logger.debug({ project_id, doc_id }, 'archiving old packs') return PackManager.getIndexWithKeys(doc_id, function (err, index) { if (err != null) { return callback(err) @@ -1094,7 +1097,7 @@ module.exports = PackManager = { }, markPackAsChecked(project_id, doc_id, pack_id, callback) { - logger.log({ project_id, doc_id, pack_id }, 'marking pack as checked') + logger.debug({ project_id, doc_id, pack_id }, 'marking pack as checked') return db.docHistory.updateOne( { _id: pack_id }, { $currentDate: { last_checked: true } }, @@ -1119,7 +1122,7 @@ module.exports = PackManager = { return result })() if (unArchivedPacks.length) { - logger.log( + logger.debug( { project_id, doc_id, n: unArchivedPacks.length }, 'find unarchived packs' ) @@ -1131,7 +1134,7 @@ module.exports = PackManager = { // Archive locking flags checkArchiveNotInProgress(project_id, doc_id, pack_id, callback) { - logger.log( + logger.debug( { project_id, doc_id, pack_id }, 'checking if archive in progress' ) @@ -1157,7 +1160,7 @@ module.exports = PackManager = { }, markPackAsArchiveInProgress(project_id, doc_id, pack_id, callback) { - logger.log( + logger.debug( { project_id, doc_id }, 'marking pack as archive in progress status' ) @@ -1175,7 +1178,7 @@ module.exports = PackManager = { if (!result.value) { return callback(new Error('archive is already in progress')) } - logger.log( + logger.debug( { project_id, doc_id, pack_id }, 'marked as archive in progress' ) @@ -1185,7 +1188,7 @@ module.exports = PackManager = { }, clearPackAsArchiveInProgress(project_id, doc_id, pack_id, callback) { - logger.log( + logger.debug( { project_id, doc_id, pack_id }, 'clearing as archive in progress' ) @@ -1200,7 +1203,7 @@ module.exports = PackManager = { }, markPackAsArchived(project_id, doc_id, pack_id, callback) { - logger.log({ project_id, doc_id, pack_id }, 'marking pack as archived') + logger.debug({ project_id, doc_id, pack_id }, 'marking pack as archived') return db.docHistoryIndex.findOneAndUpdate( { _id: ObjectId(doc_id.toString()), @@ -1215,7 +1218,7 @@ module.exports = PackManager = { if (!result.value) { return callback(new Error('archive is not marked as progress')) } - logger.log({ project_id, doc_id, pack_id }, 'marked as archived') + logger.debug({ project_id, doc_id, pack_id }, 'marked as archived') return callback() } ) @@ -1229,7 +1232,7 @@ module.exports = PackManager = { if (err) { return callback(err) } - logger.log({ project_id, doc_id, pack_id }, 'set expiry on pack') + logger.debug({ project_id, doc_id, pack_id }, 'set expiry on pack') return callback() } ) diff --git a/services/track-changes/app/js/PackWorker.js b/services/track-changes/app/js/PackWorker.js index 45f2efa82e..a726c7d9c8 100644 --- a/services/track-changes/app/js/PackWorker.js +++ b/services/track-changes/app/js/PackWorker.js @@ -64,7 +64,7 @@ if (!source.match(/^[0-9]+$/)) { let shutDownRequested = false const shutDownTimer = setTimeout(function () { - logger.log('pack timed out, requesting shutdown') + logger.debug('pack timed out, requesting shutdown') // start the shutdown on the next pack shutDownRequested = true // do a hard shutdown after a further 5 minutes @@ -75,20 +75,20 @@ const shutDownTimer = setTimeout(function () { return hardTimeout.unref() }, TIMEOUT) -logger.log( +logger.debug( `checking for updates, limit=${LIMIT}, delay=${DOCUMENT_PACK_DELAY}, timeout=${TIMEOUT}` ) const finish = function () { if (shutDownTimer != null) { - logger.log('cancelling timeout') + logger.debug('cancelling timeout') clearTimeout(shutDownTimer) } - logger.log('closing db') + logger.debug('closing db') callbackify(closeDb)(function () { - logger.log('closing LockManager Redis Connection') + logger.debug('closing LockManager Redis Connection') return LockManager.close(function () { - logger.log( + logger.debug( { processedCount: COUNT, allCount: TOTAL }, 'ready to exit from pack archive worker' ) @@ -101,7 +101,7 @@ const finish = function () { }) } -process.on('exit', code => logger.log({ code }, 'pack archive worker exited')) +process.on('exit', code => logger.debug({ code }, 'pack archive worker exited')) const processUpdates = pending => async.eachSeries( @@ -110,9 +110,9 @@ const processUpdates = pending => let _id ;({ _id, project_id, doc_id } = result) COUNT++ - logger.log({ project_id, doc_id }, `processing ${COUNT}/${TOTAL}`) + logger.debug({ project_id, doc_id }, `processing ${COUNT}/${TOTAL}`) if (project_id == null || doc_id == null) { - logger.log( + logger.debug( { project_id, doc_id }, 'skipping pack, missing project/doc id' ) @@ -164,7 +164,7 @@ const ObjectIdFromDate = function (date) { waitForDb() .then(() => { if (pending != null) { - logger.log(`got ${pending.length} entries from ${source}`) + logger.debug(`got ${pending.length} entries from ${source}`) processUpdates(pending) } else { processFromOneWeekAgo() @@ -194,13 +194,13 @@ function processFromOneWeekAgo() { .limit(LIMIT) .toArray(function (err, results) { if (err != null) { - logger.log({ err }, 'error checking for updates') + logger.debug({ err }, 'error checking for updates') finish() return } pending = _.uniq(results, false, result => result.doc_id.toString()) TOTAL = pending.length - logger.log(`found ${TOTAL} documents to archive`) + logger.debug(`found ${TOTAL} documents to archive`) return processUpdates(pending) }) } diff --git a/services/track-changes/app/js/RestoreManager.js b/services/track-changes/app/js/RestoreManager.js index 2556b53afc..07d46712e4 100644 --- a/services/track-changes/app/js/RestoreManager.js +++ b/services/track-changes/app/js/RestoreManager.js @@ -20,7 +20,7 @@ module.exports = RestoreManager = { if (callback == null) { callback = function () {} } - logger.log({ project_id, doc_id, version, user_id }, 'restoring document') + logger.debug({ project_id, doc_id, version, user_id }, 'restoring document') return DiffManager.getDocumentBeforeVersion( project_id, doc_id, diff --git a/services/track-changes/app/js/UpdateTrimmer.js b/services/track-changes/app/js/UpdateTrimmer.js index 296c62d382..5481b5f487 100644 --- a/services/track-changes/app/js/UpdateTrimmer.js +++ b/services/track-changes/app/js/UpdateTrimmer.js @@ -36,7 +36,7 @@ module.exports = UpdateTrimmer = { if (error != null) { return callback(error) } - logger.log({ project_id, details }, 'got details') + logger.debug({ project_id, details }, 'got details') if ( __guard__( details != null ? details.features : undefined, diff --git a/services/track-changes/app/js/UpdatesManager.js b/services/track-changes/app/js/UpdatesManager.js index ca52e34773..58962108cf 100644 --- a/services/track-changes/app/js/UpdatesManager.js +++ b/services/track-changes/app/js/UpdatesManager.js @@ -182,7 +182,7 @@ module.exports = UpdatesManager = { return callback(error) } if (result != null) { - logger.log( + logger.debug( { project_id, doc_id, @@ -257,7 +257,7 @@ module.exports = UpdatesManager = { ) return callback(error) } - logger.log( + logger.debug( { project_id, doc_id, rawUpdates }, 'retrieved raw updates from redis' ) @@ -270,7 +270,7 @@ module.exports = UpdatesManager = { if (error != null) { return callback(error) } - logger.log( + logger.debug( { project_id, doc_id }, 'compressed and saved doc updates' ) @@ -285,7 +285,7 @@ module.exports = UpdatesManager = { } if (length === UpdatesManager.REDIS_READ_BATCH_SIZE) { // There might be more updates - logger.log( + logger.debug( { project_id, doc_id }, 'continuing processing updates' ) @@ -300,7 +300,7 @@ module.exports = UpdatesManager = { 0 ) } else { - logger.log( + logger.debug( { project_id, doc_id }, 'all raw updates processed' ) @@ -416,7 +416,7 @@ module.exports = UpdatesManager = { if (error != null) { return callback(error) } - logger.log( + logger.debug( { count: project_ids != null ? project_ids.length : undefined, project_ids, @@ -497,7 +497,7 @@ module.exports = UpdatesManager = { return task(function (error, project_doc_ids) { if (error) return callback(error) const dangling_doc_ids = _.difference(all_doc_ids, project_doc_ids) - logger.log( + logger.debug( { all_doc_ids, all_project_ids, project_doc_ids, dangling_doc_ids }, 'checking for dangling doc ids' ) diff --git a/services/track-changes/app/js/WebApiManager.js b/services/track-changes/app/js/WebApiManager.js index fda1ad2cb4..598c563ba3 100644 --- a/services/track-changes/app/js/WebApiManager.js +++ b/services/track-changes/app/js/WebApiManager.js @@ -43,7 +43,7 @@ module.exports = WebApiManager = { return callback(error) } if (res.statusCode === 404) { - logger.log({ url }, 'got 404 from web api') + logger.debug({ url }, 'got 404 from web api') return callback(null, null) } if (res.statusCode >= 200 && res.statusCode < 300) { @@ -63,7 +63,7 @@ module.exports = WebApiManager = { callback = function () {} } const url = `/user/${user_id}/personal_info` - logger.log({ user_id }, 'getting user info from web') + logger.debug({ user_id }, 'getting user info from web') return WebApiManager.sendRequest(url, function (error, body) { let user if (error != null) { @@ -95,7 +95,7 @@ module.exports = WebApiManager = { callback = function () {} } const url = `/project/${project_id}/details` - logger.log({ project_id }, 'getting project details from web') + logger.debug({ project_id }, 'getting project details from web') return WebApiManager.sendRequest(url, function (error, body) { let project if (error != null) { diff --git a/services/track-changes/app/js/ZipManager.js b/services/track-changes/app/js/ZipManager.js index b302cede97..26d99e6025 100644 --- a/services/track-changes/app/js/ZipManager.js +++ b/services/track-changes/app/js/ZipManager.js @@ -33,7 +33,7 @@ async function getLatestContent(projectId, docId, lastUpdateVersion) { } async function rewindDoc(projectId, docId, zipfile) { - logger.log({ projectId, docId }, 'rewinding document') + logger.debug({ projectId, docId }, 'rewinding document') // Prepare to rewind content diff --git a/services/web/app.js b/services/web/app.js index 5975e0b04f..6fee4dfc9b 100644 --- a/services/web/app.js +++ b/services/web/app.js @@ -60,8 +60,10 @@ if (!module.parent) { Promise.all([mongodb.waitForDb(), mongoose.connectionPromise]) .then(() => { Server.server.listen(port, host, function () { - logger.info(`web starting up, listening on ${host}:${port}`) - logger.info(`${require('http').globalAgent.maxSockets} sockets enabled`) + logger.debug(`web starting up, listening on ${host}:${port}`) + logger.debug( + `${require('http').globalAgent.maxSockets} sockets enabled` + ) // wait until the process is ready before monitoring the event loop metrics.event_loop.monitor(logger) }) diff --git a/services/web/app/src/Features/Analytics/AnalyticsManager.js b/services/web/app/src/Features/Analytics/AnalyticsManager.js index c4beeb6f23..56d4e0dfaa 100644 --- a/services/web/app/src/Features/Analytics/AnalyticsManager.js +++ b/services/web/app/src/Features/Analytics/AnalyticsManager.js @@ -65,7 +65,7 @@ function recordEventForSession(session, event, segmentation) { if (_isAnalyticsDisabled() || _isSmokeTestUser(userId)) { return } - logger.info({ + logger.debug({ analyticsId, userId, event, diff --git a/services/web/app/src/Features/Authentication/AuthenticationController.js b/services/web/app/src/Features/Authentication/AuthenticationController.js index 9bf34345a7..fecaf35299 100644 --- a/services/web/app/src/Features/Authentication/AuthenticationController.js +++ b/services/web/app/src/Features/Authentication/AuthenticationController.js @@ -190,7 +190,7 @@ const AuthenticationController = { return done(err) } if (!isAllowed) { - logger.log({ email }, 'too many login requests') + logger.debug({ email }, 'too many login requests') return done(null, null, { text: req.i18n.translate('to_many_login_requests_2_mins'), type: 'error', @@ -222,7 +222,7 @@ const AuthenticationController = { done(null, user) } else { AuthenticationController._recordFailedLogin() - logger.log({ email }, 'failed log in') + logger.debug({ email }, 'failed log in') done(null, false, { text: req.i18n.translate('email_or_password_wrong_try_again'), type: 'error', @@ -372,7 +372,7 @@ const AuthenticationController = { } else if (SessionManager.isUserLoggedIn(req.session)) { next() } else { - logger.log( + logger.debug( { url: req.url }, 'user trying to access endpoint not in global whitelist' ) @@ -475,7 +475,7 @@ const AuthenticationController = { }, _redirectToLoginPage(req, res) { - logger.log( + logger.debug( { url: req.url }, 'user not logged in so redirecting to login page' ) @@ -486,7 +486,7 @@ const AuthenticationController = { }, _redirectToReconfirmPage(req, res, user) { - logger.log( + logger.debug( { url: req.url }, 'user needs to reconfirm so redirecting to reconfirm page' ) @@ -496,7 +496,7 @@ const AuthenticationController = { }, _redirectToRegisterPage(req, res) { - logger.log( + logger.debug( { url: req.url }, 'user not logged in so redirecting to register page' ) @@ -617,7 +617,7 @@ function _loginAsyncHandlers(req, user, anonymousAnalyticsId, isNewUser) { }) Analytics.identifyUser(user._id, anonymousAnalyticsId, isNewUser) - logger.log( + logger.debug( { email: user.email, user_id: user._id.toString() }, 'successful log in' ) diff --git a/services/web/app/src/Features/Authorization/AuthorizationMiddleware.js b/services/web/app/src/Features/Authorization/AuthorizationMiddleware.js index f43fc493a4..d4f639224b 100644 --- a/services/web/app/src/Features/Authorization/AuthorizationMiddleware.js +++ b/services/web/app/src/Features/Authorization/AuthorizationMiddleware.js @@ -64,10 +64,10 @@ async function ensureUserCanReadProject(req, res, next) { token ) if (canRead) { - logger.log({ userId, projectId }, 'allowing user read access to project') + logger.debug({ userId, projectId }, 'allowing user read access to project') return next() } - logger.log({ userId, projectId }, 'denying user read access to project') + logger.debug({ userId, projectId }, 'denying user read access to project') HttpErrorHandler.forbidden(req, res) } @@ -114,13 +114,13 @@ async function ensureUserCanWriteProjectContent(req, res, next) { token ) if (canWrite) { - logger.log( + logger.debug( { userId, projectId }, 'allowing user write access to project content' ) return next() } - logger.log( + logger.debug( { userId, projectId }, 'denying user write access to project settings' ) @@ -137,21 +137,21 @@ async function ensureUserCanAdminProject(req, res, next) { token ) if (canAdmin) { - logger.log({ userId, projectId }, 'allowing user admin access to project') + logger.debug({ userId, projectId }, 'allowing user admin access to project') return next() } - logger.log({ userId, projectId }, 'denying user admin access to project') + logger.debug({ userId, projectId }, 'denying user admin access to project') HttpErrorHandler.forbidden(req, res) } async function ensureUserIsSiteAdmin(req, res, next) { const userId = _getUserId(req) if (await AuthorizationManager.promises.isUserSiteAdmin(userId)) { - logger.log({ userId }, 'allowing user admin access to site') + logger.debug({ userId }, 'allowing user admin access to site') return next() } if (handleAdminDomainRedirect(req, res)) return - logger.log({ userId }, 'denying user admin access to site') + logger.debug({ userId }, 'denying user admin access to site') _redirectToRestricted(req, res, next) } @@ -184,7 +184,7 @@ function restricted(req, res, next) { return res.render('user/restricted', { title: 'restricted' }) } const { from } = req.query - logger.log({ from }, 'redirecting to login') + logger.debug({ from }, 'redirecting to login') if (from) { AuthenticationController.setRedirectInSession(req, from) } diff --git a/services/web/app/src/Features/BetaProgram/BetaProgramController.js b/services/web/app/src/Features/BetaProgram/BetaProgramController.js index 9c29b87578..64d9dd7070 100644 --- a/services/web/app/src/Features/BetaProgram/BetaProgramController.js +++ b/services/web/app/src/Features/BetaProgram/BetaProgramController.js @@ -8,7 +8,7 @@ const SessionManager = require('../Authentication/SessionManager') const BetaProgramController = { optIn(req, res, next) { const userId = SessionManager.getLoggedInUserId(req.session) - logger.log({ userId }, 'user opting in to beta program') + logger.debug({ userId }, 'user opting in to beta program') if (userId == null) { return next(new Error('no user id in session')) } @@ -22,7 +22,7 @@ const BetaProgramController = { optOut(req, res, next) { const userId = SessionManager.getLoggedInUserId(req.session) - logger.log({ userId }, 'user opting out of beta program') + logger.debug({ userId }, 'user opting out of beta program') if (userId == null) { return next(new Error('no user id in session')) } @@ -36,7 +36,10 @@ const BetaProgramController = { optInPage(req, res, next) { const userId = SessionManager.getLoggedInUserId(req.session) - logger.log({ user_id: userId }, 'showing beta participation page for user') + logger.debug( + { user_id: userId }, + 'showing beta participation page for user' + ) UserGetter.getUser(userId, function (err, user) { if (err) { OError.tag(err, 'error fetching user', { diff --git a/services/web/app/src/Features/BrandVariations/BrandVariationsHandler.js b/services/web/app/src/Features/BrandVariations/BrandVariationsHandler.js index f8ab1836b0..a3c3b452e4 100644 --- a/services/web/app/src/Features/BrandVariations/BrandVariationsHandler.js +++ b/services/web/app/src/Features/BrandVariations/BrandVariationsHandler.js @@ -13,7 +13,7 @@ function getBrandVariationById(brandVariationId, callback) { if (brandVariationId == null || brandVariationId === '') { return callback(new Error('Branding variation id not provided')) } - logger.log({ brandVariationId }, 'fetching brand variation details from v1') + logger.debug({ brandVariationId }, 'fetching brand variation details from v1') V1Api.request( { uri: `/api/v2/brand_variations/${brandVariationId}`, diff --git a/services/web/app/src/Features/Collaborators/CollaboratorsController.js b/services/web/app/src/Features/Collaborators/CollaboratorsController.js index 330dae2ae6..69a6583693 100644 --- a/services/web/app/src/Features/Collaborators/CollaboratorsController.js +++ b/services/web/app/src/Features/Collaborators/CollaboratorsController.js @@ -39,7 +39,7 @@ async function removeSelfFromProject(req, res, next) { async function getAllMembers(req, res, next) { const projectId = req.params.Project_id - logger.log({ projectId }, 'getting all active members for project') + logger.debug({ projectId }, 'getting all active members for project') let members try { members = await CollaboratorsGetter.promises.getAllInvitedMembers(projectId) diff --git a/services/web/app/src/Features/Collaborators/CollaboratorsHandler.js b/services/web/app/src/Features/Collaborators/CollaboratorsHandler.js index d8d0d1b6e1..446d6878d9 100644 --- a/services/web/app/src/Features/Collaborators/CollaboratorsHandler.js +++ b/services/web/app/src/Features/Collaborators/CollaboratorsHandler.js @@ -110,10 +110,13 @@ async function addUserIdToProject( } if (privilegeLevel === PrivilegeLevels.READ_AND_WRITE) { level = { collaberator_refs: userId } - logger.log({ privileges: 'readAndWrite', userId, projectId }, 'adding user') + logger.debug( + { privileges: 'readAndWrite', userId, projectId }, + 'adding user' + ) } else if (privilegeLevel === PrivilegeLevels.READ_ONLY) { level = { readOnly_refs: userId } - logger.log({ privileges: 'readOnly', userId, projectId }, 'adding user') + logger.debug({ privileges: 'readOnly', userId, projectId }, 'adding user') } else { throw new Error(`unknown privilegeLevel: ${privilegeLevel}`) } @@ -146,7 +149,7 @@ async function transferProjects(fromUserId, toUserId) { { _id: 1 } ).exec() const projectIds = projects.map(p => p._id) - logger.log({ projectIds, fromUserId, toUserId }, 'transferring projects') + logger.debug({ projectIds, fromUserId, toUserId }, 'transferring projects') await Project.updateMany( { owner_ref: fromUserId }, diff --git a/services/web/app/src/Features/Collaborators/CollaboratorsInviteController.js b/services/web/app/src/Features/Collaborators/CollaboratorsInviteController.js index 3a27f652e3..04bd4fb482 100644 --- a/services/web/app/src/Features/Collaborators/CollaboratorsInviteController.js +++ b/services/web/app/src/Features/Collaborators/CollaboratorsInviteController.js @@ -30,7 +30,7 @@ const rateLimiter = require('../../infrastructure/RateLimiter') module.exports = CollaboratorsInviteController = { getAllInvites(req, res, next) { const projectId = req.params.Project_id - logger.log({ projectId }, 'getting all active invites for project') + logger.debug({ projectId }, 'getting all active invites for project') return CollaboratorsInviteHandler.getAllInvites( projectId, function (err, invites) { @@ -50,7 +50,7 @@ module.exports = CollaboratorsInviteController = { callback = function () {} } if (Settings.restrictInvitesToExistingAccounts === true) { - logger.log({ email }, 'checking if user exists with this email') + logger.debug({ email }, 'checking if user exists with this email') return UserGetter.getUserByAnyEmail( email, { _id: 1 }, @@ -102,13 +102,13 @@ module.exports = CollaboratorsInviteController = { const sendingUser = SessionManager.getSessionUser(req.session) const sendingUserId = sendingUser._id if (email === sendingUser.email) { - logger.log( + logger.debug( { projectId, email, sendingUserId }, 'cannot invite yourself to project' ) return res.json({ invite: null, error: 'cannot_invite_self' }) } - logger.log({ projectId, email, sendingUserId }, 'inviting to project') + logger.debug({ projectId, email, sendingUserId }, 'inviting to project') return LimitationsManager.canAddXCollaborators( projectId, 1, @@ -118,7 +118,7 @@ module.exports = CollaboratorsInviteController = { return next(error) } if (!allowed) { - logger.log( + logger.debug( { projectId, email, sendingUserId }, 'not allowed to invite more users to project' ) @@ -127,7 +127,7 @@ module.exports = CollaboratorsInviteController = { ;({ email, privileges } = req.body) email = EmailHelper.parseEmail(email) if (email == null || email === '') { - logger.log( + logger.debug( { projectId, email, sendingUserId }, 'invalid email address' ) @@ -158,7 +158,7 @@ module.exports = CollaboratorsInviteController = { return next(err) } if (!shouldAllowInvite) { - logger.log( + logger.debug( { email, projectId, sendingUserId }, 'not allowed to send an invite to this email address' ) @@ -181,7 +181,7 @@ module.exports = CollaboratorsInviteController = { }) return next(err) } - logger.log( + logger.debug( { projectId, email, sendingUserId }, 'invite created' ) @@ -204,7 +204,7 @@ module.exports = CollaboratorsInviteController = { revokeInvite(req, res, next) { const projectId = req.params.Project_id const inviteId = req.params.invite_id - logger.log({ projectId, inviteId }, 'revoking invite') + logger.debug({ projectId, inviteId }, 'revoking invite') return CollaboratorsInviteHandler.revokeInvite( projectId, inviteId, @@ -229,7 +229,7 @@ module.exports = CollaboratorsInviteController = { resendInvite(req, res, next) { const projectId = req.params.Project_id const inviteId = req.params.invite_id - logger.log({ projectId, inviteId }, 'resending invite') + logger.debug({ projectId, inviteId }, 'resending invite') const sendingUser = SessionManager.getSessionUser(req.session) return CollaboratorsInviteController._checkRateLimit( sendingUser._id, @@ -263,7 +263,7 @@ module.exports = CollaboratorsInviteController = { const projectId = req.params.Project_id const { token } = req.params const _renderInvalidPage = function () { - logger.log({ projectId }, 'invite not valid, rendering not-valid page') + logger.debug({ projectId }, 'invite not valid, rendering not-valid page') return res.render('project/invite/not-valid', { title: 'Invalid Invite' }) } // check if the user is already a member of the project @@ -279,7 +279,7 @@ module.exports = CollaboratorsInviteController = { return next(err) } if (isMember) { - logger.log( + logger.debug( { projectId, userId: currentUser._id }, 'user is already a member of this project, redirecting' ) @@ -298,7 +298,7 @@ module.exports = CollaboratorsInviteController = { } // check if invite is gone, or otherwise non-existent if (invite == null) { - logger.log({ projectId }, 'no invite found for this token') + logger.debug({ projectId }, 'no invite found for this token') return _renderInvalidPage() } // check the user who sent the invite exists @@ -313,7 +313,7 @@ module.exports = CollaboratorsInviteController = { return next(err) } if (owner == null) { - logger.log({ projectId }, 'no project owner found') + logger.debug({ projectId }, 'no project owner found') return _renderInvalidPage() } // fetch the project name @@ -328,7 +328,7 @@ module.exports = CollaboratorsInviteController = { return next(err) } if (project == null) { - logger.log({ projectId }, 'no project found') + logger.debug({ projectId }, 'no project found') return _renderInvalidPage() } // finally render the invite @@ -352,7 +352,7 @@ module.exports = CollaboratorsInviteController = { const projectId = req.params.Project_id const { token } = req.params const currentUser = SessionManager.getSessionUser(req.session) - logger.log( + logger.debug( { projectId, userId: currentUser._id }, 'got request to accept invite' ) diff --git a/services/web/app/src/Features/Collaborators/CollaboratorsInviteHandler.js b/services/web/app/src/Features/Collaborators/CollaboratorsInviteHandler.js index d117a3891e..42eecb9135 100644 --- a/services/web/app/src/Features/Collaborators/CollaboratorsInviteHandler.js +++ b/services/web/app/src/Features/Collaborators/CollaboratorsInviteHandler.js @@ -28,7 +28,7 @@ const CollaboratorsInviteHandler = { if (callback == null) { callback = function () {} } - logger.log({ projectId }, 'fetching invites for project') + logger.debug({ projectId }, 'fetching invites for project') return ProjectInvite.find({ projectId }, function (err, invites) { if (err != null) { OError.tag(err, 'error getting invites from mongo', { @@ -36,7 +36,7 @@ const CollaboratorsInviteHandler = { }) return callback(err) } - logger.log( + logger.debug( { projectId, count: invites.length }, 'found invites for project' ) @@ -48,7 +48,7 @@ const CollaboratorsInviteHandler = { if (callback == null) { callback = function () {} } - logger.log({ projectId }, 'counting invites for project') + logger.debug({ projectId }, 'counting invites for project') return ProjectInvite.countDocuments({ projectId }, function (err, count) { if (err != null) { OError.tag(err, 'error getting invites from mongo', { @@ -77,7 +77,10 @@ const CollaboratorsInviteHandler = { return callback(err) } if (existingUser == null) { - logger.log({ projectId, email }, 'no existing user found, returning') + logger.debug( + { projectId, email }, + 'no existing user found, returning' + ) return callback(null) } return ProjectGetter.getProject( @@ -92,7 +95,7 @@ const CollaboratorsInviteHandler = { return callback(err) } if (project == null) { - logger.log( + logger.debug( { projectId }, 'no project found while sending notification, returning' ) @@ -126,7 +129,7 @@ const CollaboratorsInviteHandler = { if (callback == null) { callback = function () {} } - logger.log( + logger.debug( { projectId, inviteId: invite._id }, 'sending notification and email for invite' ) @@ -158,7 +161,7 @@ const CollaboratorsInviteHandler = { if (callback == null) { callback = function () {} } - logger.log( + logger.debug( { projectId, sendingUserId: sendingUser._id, email, privileges }, 'adding invite' ) @@ -211,7 +214,7 @@ const CollaboratorsInviteHandler = { if (callback == null) { callback = function () {} } - logger.log({ projectId, inviteId }, 'removing invite') + logger.debug({ projectId, inviteId }, 'removing invite') return ProjectInvite.deleteOne( { projectId, _id: inviteId }, function (err) { @@ -235,7 +238,7 @@ const CollaboratorsInviteHandler = { if (callback == null) { callback = function () {} } - logger.log({ projectId, inviteId }, 'resending invite email') + logger.debug({ projectId, inviteId }, 'resending invite email') return ProjectInvite.findOne( { _id: inviteId, projectId }, function (err, invite) { @@ -276,7 +279,7 @@ const CollaboratorsInviteHandler = { if (callback == null) { callback = function () {} } - logger.log({ projectId }, 'fetching invite by token') + logger.debug({ projectId }, 'fetching invite by token') return ProjectInvite.findOne( { projectId, token: tokenString }, function (err, invite) { @@ -299,7 +302,7 @@ const CollaboratorsInviteHandler = { if (callback == null) { callback = function () {} } - logger.log({ projectId, userId: user._id }, 'accepting invite') + logger.debug({ projectId, userId: user._id }, 'accepting invite') return CollaboratorsInviteHandler.getInviteByToken( projectId, tokenString, @@ -313,7 +316,7 @@ const CollaboratorsInviteHandler = { } if (!invite) { err = new Errors.NotFoundError('no matching invite found') - logger.log({ err, projectId }, 'no matching invite found') + logger.debug({ err, projectId }, 'no matching invite found') return callback(err) } const inviteId = invite._id @@ -332,7 +335,7 @@ const CollaboratorsInviteHandler = { return callback(err) } // Remove invite - logger.log({ projectId, inviteId }, 'removing invite') + logger.debug({ projectId, inviteId }, 'removing invite') return ProjectInvite.deleteOne({ _id: inviteId }, function (err) { if (err != null) { OError.tag(err, 'error removing invite', { diff --git a/services/web/app/src/Features/Compile/ClsiManager.js b/services/web/app/src/Features/Compile/ClsiManager.js index e8c8923cec..c9a73e9eae 100644 --- a/services/web/app/src/Features/Compile/ClsiManager.js +++ b/services/web/app/src/Features/Compile/ClsiManager.js @@ -220,7 +220,7 @@ const ClsiManager = { ) } if (validationProblems != null) { - logger.log( + logger.debug( { projectId, validationProblems }, 'problems with users latex before compile was attempted' ) @@ -395,7 +395,7 @@ const ClsiManager = { const currentCompileTime = results.currentBackend.finishTime const newBackendCompileTime = results.newBackend.finishTime || 0 const timeDifference = newBackendCompileTime - currentCompileTime - logger.log( + logger.debug( { statusCodeSame, timeDifference, diff --git a/services/web/app/src/Features/Compile/CompileController.js b/services/web/app/src/Features/Compile/CompileController.js index 451cc52cb8..11d2475138 100644 --- a/services/web/app/src/Features/Compile/CompileController.js +++ b/services/web/app/src/Features/Compile/CompileController.js @@ -229,7 +229,7 @@ module.exports = CompileController = { logger.err({ err }, 'error checking rate limit for pdf download') return res.sendStatus(500) } else if (!canContinue) { - logger.log( + logger.debug( { project_id, ip: req.ip }, 'rate limit hit downloading pdf' ) diff --git a/services/web/app/src/Features/Cooldown/CooldownManager.js b/services/web/app/src/Features/Cooldown/CooldownManager.js index 70cb3b4b1a..336f9fad81 100644 --- a/services/web/app/src/Features/Cooldown/CooldownManager.js +++ b/services/web/app/src/Features/Cooldown/CooldownManager.js @@ -26,7 +26,7 @@ module.exports = CooldownManager = { if (callback == null) { callback = function () {} } - logger.log( + logger.debug( { projectId }, `[Cooldown] putting project on cooldown for ${COOLDOWN_IN_SECONDS} seconds` ) diff --git a/services/web/app/src/Features/Cooldown/CooldownMiddleware.js b/services/web/app/src/Features/Cooldown/CooldownMiddleware.js index 8d1b456cb4..d9cef35155 100644 --- a/services/web/app/src/Features/Cooldown/CooldownMiddleware.js +++ b/services/web/app/src/Features/Cooldown/CooldownMiddleware.js @@ -27,7 +27,7 @@ module.exports = CooldownMiddleware = { return next(err) } if (projectIsOnCooldown) { - logger.log( + logger.debug( { projectId }, '[Cooldown] project is on cooldown, denying request' ) diff --git a/services/web/app/src/Features/Docstore/DocstoreManager.js b/services/web/app/src/Features/Docstore/DocstoreManager.js index 05f22783cb..6490530971 100644 --- a/services/web/app/src/Features/Docstore/DocstoreManager.js +++ b/services/web/app/src/Features/Docstore/DocstoreManager.js @@ -130,7 +130,7 @@ function getDoc(projectId, docId, options, callback) { return callback(error) } if (res.statusCode >= 200 && res.statusCode < 300) { - logger.log( + logger.debug( { docId, projectId, version: doc.version, rev: doc.rev }, 'got doc from docstore api' ) @@ -199,7 +199,10 @@ function updateDoc(projectId, docId, lines, version, ranges, callback) { return callback(error) } if (res.statusCode >= 200 && res.statusCode < 300) { - logger.log({ projectId, docId }, 'update doc in docstore url finished') + logger.debug( + { projectId, docId }, + 'update doc in docstore url finished' + ) callback(null, result.modified, result.rev) } else { error = new OError( @@ -226,7 +229,7 @@ function destroyProject(projectId, callback) { function _operateOnProject(projectId, method, callback) { const url = `${settings.apis.docstore.url}/project/${projectId}/${method}` - logger.log({ projectId }, `calling ${method} for project in docstore`) + logger.debug({ projectId }, `calling ${method} for project in docstore`) // use default timeout for archiving/unarchiving/destroying request.post(url, (err, res, docs) => { if (err) { diff --git a/services/web/app/src/Features/Documents/DocumentController.js b/services/web/app/src/Features/Documents/DocumentController.js index 069f139f91..9624bea4fd 100644 --- a/services/web/app/src/Features/Documents/DocumentController.js +++ b/services/web/app/src/Features/Documents/DocumentController.js @@ -102,7 +102,7 @@ function setDocument(req, res, next) { }) return next(error) } - logger.log( + logger.debug( { docId, projectId }, 'finished receiving set document request from api (docupdater)' ) diff --git a/services/web/app/src/Features/Downloads/ProjectZipStreamManager.js b/services/web/app/src/Features/Downloads/ProjectZipStreamManager.js index 735b57dacb..a5e57a4f9c 100644 --- a/services/web/app/src/Features/Downloads/ProjectZipStreamManager.js +++ b/services/web/app/src/Features/Downloads/ProjectZipStreamManager.js @@ -24,13 +24,13 @@ module.exports = ProjectZipStreamManager = { return cb(error) } if (!project) { - logger.log( + logger.debug( { projectId }, 'cannot append project to zip stream: project not found' ) return cb() } - logger.log( + logger.debug( { projectId, name: project.name }, 'appending project to zip stream' ) @@ -42,7 +42,10 @@ module.exports = ProjectZipStreamManager = { } archive.append(stream, { name: `${project.name}.zip` }) stream.on('end', () => { - logger.log({ projectId, name: project.name }, 'zip stream ended') + logger.debug( + { projectId, name: project.name }, + 'zip stream ended' + ) cb() }) } @@ -51,7 +54,7 @@ module.exports = ProjectZipStreamManager = { }) async.series(jobs, () => { - logger.log( + logger.debug( { projectIds }, 'finished creating zip stream of multiple projects' ) @@ -97,7 +100,7 @@ module.exports = ProjectZipStreamManager = { if (path[0] === '/') { path = path.slice(1) } - logger.log({ projectId }, 'Adding doc') + logger.debug({ projectId }, 'Adding doc') archive.append(doc.lines.join('\n'), { name: path }) setImmediate(cb) }) diff --git a/services/web/app/src/Features/Editor/EditorController.js b/services/web/app/src/Features/Editor/EditorController.js index 6fc8959bdd..2598ae7c1b 100644 --- a/services/web/app/src/Features/Editor/EditorController.js +++ b/services/web/app/src/Features/Editor/EditorController.js @@ -356,7 +356,7 @@ const EditorController = { if (callback == null) { callback = function () {} } - logger.log({ project_id, path }, "making directories if they don't exist") + logger.debug({ project_id, path }, "making directories if they don't exist") return ProjectEntityUpdateHandler.mkdirp( project_id, path, @@ -402,7 +402,7 @@ const EditorController = { }) return callback(err) } - logger.log( + logger.debug( { project_id, entity_id, entityType }, 'telling users entity has been deleted' ) @@ -441,7 +441,7 @@ const EditorController = { if (callback == null) { callback = function () {} } - logger.log({ project_id, description }, 'updating project description') + logger.debug({ project_id, description }, 'updating project description') return ProjectDetailsHandler.setProjectDescription( project_id, description, diff --git a/services/web/app/src/Features/Email/EmailSender.js b/services/web/app/src/Features/Email/EmailSender.js index 9a9aa5380b..93ee7a5724 100644 --- a/services/web/app/src/Features/Email/EmailSender.js +++ b/services/web/app/src/Features/Email/EmailSender.js @@ -25,14 +25,14 @@ function getClient() { if (EMAIL_SETTINGS.parameters) { const emailParameters = EMAIL_SETTINGS.parameters if (emailParameters.AWSAccessKeyID || EMAIL_SETTINGS.driver === 'ses') { - logger.log('using aws ses for email') + logger.debug('using aws ses for email') client = nodemailer.createTransport(sesTransport(emailParameters)) } else if (emailParameters.sendgridApiKey) { throw new OError( 'sendgridApiKey configuration option is deprecated, use SMTP instead' ) } else if (emailParameters.MandrillApiKey) { - logger.log('using mandril for email') + logger.debug('using mandril for email') client = nodemailer.createTransport( mandrillTransport({ auth: { @@ -41,7 +41,7 @@ function getClient() { }) ) } else { - logger.log('using smtp for email') + logger.debug('using smtp for email') const smtp = _.pick( emailParameters, 'host', @@ -60,7 +60,7 @@ function getClient() { ) client = { async sendMail(options) { - logger.log({ options }, 'Would send email if enabled.') + logger.debug({ options }, 'Would send email if enabled.') }, } } @@ -71,7 +71,7 @@ async function sendEmail(options) { try { const canContinue = await checkCanSendEmail(options) if (!canContinue) { - logger.log( + logger.debug( { sendingUser_id: options.sendingUser_id, to: options.to, diff --git a/services/web/app/src/Features/Exports/ExportsController.js b/services/web/app/src/Features/Exports/ExportsController.js index 01e1a358c0..51899f620c 100644 --- a/services/web/app/src/Features/Exports/ExportsController.js +++ b/services/web/app/src/Features/Exports/ExportsController.js @@ -54,7 +54,7 @@ module.exports = { function (err, export_data) { if (err != null) { if (err.forwardResponse != null) { - logger.log( + logger.debug( { responseError: err.forwardResponse }, 'forwarding response' ) @@ -64,7 +64,7 @@ module.exports = { return next(err) } } - logger.log( + logger.debug( { user_id, project_id, diff --git a/services/web/app/src/Features/FileStore/FileStoreHandler.js b/services/web/app/src/Features/FileStore/FileStoreHandler.js index 85aac4d748..def67f5823 100644 --- a/services/web/app/src/Features/FileStore/FileStoreHandler.js +++ b/services/web/app/src/Features/FileStore/FileStoreHandler.js @@ -30,7 +30,7 @@ const FileStoreHandler = { return callback(new Error('error getting stat, not available')) } if (!stat.isFile()) { - logger.log( + logger.debug( { projectId, fileArgs, fsPath }, 'tried to upload symlink, not continuing' ) @@ -162,7 +162,7 @@ const FileStoreHandler = { }, deleteFile(projectId, fileId, callback) { - logger.log({ projectId, fileId }, 'telling file store to delete file') + logger.debug({ projectId, fileId }, 'telling file store to delete file') const opts = { method: 'delete', uri: this._buildUrl(projectId, fileId), @@ -202,7 +202,7 @@ const FileStoreHandler = { }, copyFile(oldProjectId, oldFileId, newProjectId, newFileId, callback) { - logger.log( + logger.debug( { oldProjectId, oldFileId, newProjectId, newFileId }, 'telling filestore to copy a file' ) diff --git a/services/web/app/src/Features/InactiveData/InactiveProjectManager.js b/services/web/app/src/Features/InactiveData/InactiveProjectManager.js index afc808ca9f..6190f831df 100644 --- a/services/web/app/src/Features/InactiveData/InactiveProjectManager.js +++ b/services/web/app/src/Features/InactiveData/InactiveProjectManager.js @@ -34,7 +34,7 @@ module.exports = InactiveProjectManager = { }) return callback(err) } - logger.log( + logger.debug( { project_id, active: project.active }, 'seeing if need to reactivate project' ) @@ -94,7 +94,7 @@ module.exports = InactiveProjectManager = { } ) ) - logger.log( + logger.debug( { numberOfProjects: projects && projects.length }, 'deactivating projects' ) @@ -108,7 +108,7 @@ module.exports = InactiveProjectManager = { }, deactivateProject(project_id, callback) { - logger.log({ project_id }, 'deactivating inactive project') + logger.debug({ project_id }, 'deactivating inactive project') const jobs = [ cb => DocstoreManager.archiveProject(project_id, cb), cb => ProjectUpdateHandler.markAsInactive(project_id, cb), diff --git a/services/web/app/src/Features/Metadata/MetaController.js b/services/web/app/src/Features/Metadata/MetaController.js index 44173b461c..6697af93df 100644 --- a/services/web/app/src/Features/Metadata/MetaController.js +++ b/services/web/app/src/Features/Metadata/MetaController.js @@ -20,7 +20,7 @@ const logger = require('@overleaf/logger') module.exports = MetaController = { getMetadata(req, res, next) { const { project_id } = req.params - logger.log({ project_id }, 'getting all labels for project') + logger.debug({ project_id }, 'getting all labels for project') return MetaHandler.getAllMetaForProject( project_id, function (err, projectMeta) { @@ -43,7 +43,7 @@ module.exports = MetaController = { const { project_id } = req.params const { doc_id } = req.params const { broadcast } = req.body - logger.log({ project_id, doc_id, broadcast }, 'getting labels for doc') + logger.debug({ project_id, doc_id, broadcast }, 'getting labels for doc') return MetaHandler.getMetaForDoc( project_id, doc_id, diff --git a/services/web/app/src/Features/Newsletter/NewsletterManager.js b/services/web/app/src/Features/Newsletter/NewsletterManager.js index 7f05d5b7b4..18ac40dd5f 100644 --- a/services/web/app/src/Features/Newsletter/NewsletterManager.js +++ b/services/web/app/src/Features/Newsletter/NewsletterManager.js @@ -23,10 +23,10 @@ class NonFatalEmailUpdateError extends OError { function getProvider() { if (mailchimpIsConfigured()) { - logger.info('Using newsletter provider: mailchimp') + logger.debug('Using newsletter provider: mailchimp') return makeMailchimpProvider() } else { - logger.info('Using newsletter provider: none') + logger.debug('Using newsletter provider: none') return makeNullProvider() } } @@ -70,7 +70,7 @@ function makeMailchimpProvider() { status_if_new: 'subscribed', merge_fields: getMergeFields(user), }) - logger.info({ user }, 'finished subscribing user to newsletter') + logger.debug({ user }, 'finished subscribing user to newsletter') } catch (err) { throw OError.tag(err, 'error subscribing user to newsletter', { userId: user._id, @@ -89,7 +89,7 @@ function makeMailchimpProvider() { merge_fields: getMergeFields(user), }) } - logger.info( + logger.debug( { user, options }, 'finished unsubscribing user from newsletter' ) @@ -100,7 +100,7 @@ function makeMailchimpProvider() { } if (err.message.includes('looks fake or invalid')) { - logger.info( + logger.debug( { err, user, options }, 'Mailchimp declined to unsubscribe user because it finds the email looks fake' ) @@ -121,7 +121,7 @@ function makeMailchimpProvider() { } catch (updateError) { // if we failed to update the user, delete their old email address so that // we don't leave it stuck in mailchimp - logger.info( + logger.debug( { oldEmail, newEmail, updateError }, 'unable to change email in newsletter, removing old mail' ) @@ -161,7 +161,7 @@ function makeMailchimpProvider() { email_address: newEmail, merge_fields: getMergeFields(user), }) - logger.info('finished changing email in the newsletter') + logger.debug('finished changing email in the newsletter') } catch (err) { // silently ignore users who were never subscribed if (err.status === 404) { @@ -173,7 +173,7 @@ function makeMailchimpProvider() { if (err.message.includes(key)) { const message = `unable to change email in newsletter, ${errors[key]}` - logger.info({ oldEmail, newEmail }, message) + logger.debug({ oldEmail, newEmail }, message) throw new NonFatalEmailUpdateError( message, @@ -218,7 +218,7 @@ function makeNullProvider() { } async function subscribed(user) { - logger.info( + logger.debug( { user }, 'Not checking user because no newsletter provider is configured' ) @@ -226,21 +226,21 @@ function makeNullProvider() { } async function subscribe(user) { - logger.info( + logger.debug( { user }, 'Not subscribing user to newsletter because no newsletter provider is configured' ) } async function unsubscribe(user) { - logger.info( + logger.debug( { user }, 'Not unsubscribing user from newsletter because no newsletter provider is configured' ) } async function changeEmail(oldEmail, newEmail) { - logger.info( + logger.debug( { oldEmail, newEmail }, 'Not changing email in newsletter for user because no newsletter provider is configured' ) diff --git a/services/web/app/src/Features/Project/ProjectController.js b/services/web/app/src/Features/Project/ProjectController.js index b4bae7d2ec..5f5402f9c6 100644 --- a/services/web/app/src/Features/Project/ProjectController.js +++ b/services/web/app/src/Features/Project/ProjectController.js @@ -247,7 +247,7 @@ const ProjectController = { metrics.inc('cloned-project') const projectId = req.params.Project_id const { projectName } = req.body - logger.log({ projectId, projectName }, 'cloning project') + logger.debug({ projectId, projectName }, 'cloning project') if (!SessionManager.isUserLoggedIn(req.session)) { return res.json({ redir: '/register' }) } @@ -743,7 +743,7 @@ const ProjectController = { if (err) { return cb(err) } - logger.log({ projectId, userId }, 'got user') + logger.debug({ projectId, userId }, 'got user') if (FeaturesUpdater.featuresEpochIsCurrent(user)) { return cb(null, user) } diff --git a/services/web/app/src/Features/Project/ProjectDeleter.js b/services/web/app/src/Features/Project/ProjectDeleter.js index fd290e1ac2..33db60479d 100644 --- a/services/web/app/src/Features/Project/ProjectDeleter.js +++ b/services/web/app/src/Features/Project/ProjectDeleter.js @@ -56,7 +56,7 @@ module.exports = { } async function markAsDeletedByExternalSource(projectId) { - logger.log( + logger.debug( { project_id: projectId }, 'marking project as deleted by external data source' ) @@ -277,7 +277,7 @@ async function deleteProject(projectId, options = {}) { throw err } - logger.log({ project_id: projectId }, 'successfully deleted project') + logger.debug({ project_id: projectId }, 'successfully deleted project') } async function undeleteProject(projectId, options = {}) { diff --git a/services/web/app/src/Features/Project/ProjectDetailsHandler.js b/services/web/app/src/Features/Project/ProjectDetailsHandler.js index 6abd6a7c68..48de5f01a3 100644 --- a/services/web/app/src/Features/Project/ProjectDetailsHandler.js +++ b/services/web/app/src/Features/Project/ProjectDetailsHandler.js @@ -85,7 +85,7 @@ async function getProjectDescription(projectId) { async function setProjectDescription(projectId, description) { const conditions = { _id: projectId } const update = { description } - logger.log( + logger.debug( { conditions, update, projectId, description }, 'setting project description' ) @@ -99,7 +99,7 @@ async function setProjectDescription(projectId, description) { async function renameProject(projectId, newName) { newName = newName.trim() await validateProjectName(newName) - logger.log({ projectId, newName }, 'renaming project') + logger.debug({ projectId, newName }, 'renaming project') let project try { project = await ProjectGetter.promises.getProject(projectId, { name: true }) diff --git a/services/web/app/src/Features/Project/ProjectEntityUpdateHandler.js b/services/web/app/src/Features/Project/ProjectEntityUpdateHandler.js index 50a41d1a96..5bb7ec5d37 100644 --- a/services/web/app/src/Features/Project/ProjectEntityUpdateHandler.js +++ b/services/web/app/src/Features/Project/ProjectEntityUpdateHandler.js @@ -117,7 +117,7 @@ function getDocContext(projectId, docId, callback) { // 3. web triggers (soft)-delete in docstore // Specifically when an update comes in after 1 // and before 3 completes. - logger.info( + logger.debug( { projectId, docId }, 'updating doc that is in process of getting soft-deleted' ) @@ -174,7 +174,10 @@ const ProjectEntityUpdateHandler = { return callback(err) } const { projectName, isDeletedDoc, path } = ctx - logger.log({ projectId, docId }, 'telling docstore manager to update doc') + logger.debug( + { projectId, docId }, + 'telling docstore manager to update doc' + ) DocstoreManager.updateDoc( projectId, docId, @@ -189,7 +192,7 @@ const ProjectEntityUpdateHandler = { }) return callback(err) } - logger.log( + logger.debug( { projectId, docId, modified }, 'finished updating doc lines' ) @@ -219,7 +222,7 @@ const ProjectEntityUpdateHandler = { }, setRootDoc(projectId, newRootDocID, callback) { - logger.log({ projectId, rootDocId: newRootDocID }, 'setting root doc') + logger.debug({ projectId, rootDocId: newRootDocID }, 'setting root doc') if (projectId == null || newRootDocID == null) { return callback( new Errors.InvalidError('missing arguments (project or doc)') @@ -251,7 +254,7 @@ const ProjectEntityUpdateHandler = { }, unsetRootDoc(projectId, callback) { - logger.log({ projectId }, 'removing root doc') + logger.debug({ projectId }, 'removing root doc') Project.updateOne( { _id: projectId }, { $unset: { rootDoc_id: true } }, @@ -758,7 +761,7 @@ const ProjectEntityUpdateHandler = { if (err != null) { return callback(err) } - logger.log( + logger.debug( { projectId, docId: existingDoc._id }, 'notifying users that the document has been updated' ) @@ -1109,7 +1112,7 @@ const ProjectEntityUpdateHandler = { userId, callback ) { - logger.log({ entityId, entityType, projectId }, 'deleting project entity') + logger.debug({ entityId, entityType, projectId }, 'deleting project entity') if (entityType == null) { logger.warn({ err: 'No entityType set', projectId, entityId }) return callback(new Error('No entityType set')) @@ -1227,7 +1230,7 @@ const ProjectEntityUpdateHandler = { userId, callback ) { - logger.log( + logger.debug( { entityType, entityId, projectId, destFolderId }, 'moving entity' ) @@ -1283,7 +1286,7 @@ const ProjectEntityUpdateHandler = { if (!SafePath.isCleanFilename(newName)) { return callback(new Errors.InvalidNameError('invalid element name')) } - logger.log({ entityId, projectId }, `renaming ${entityType}`) + logger.debug({ entityId, projectId }, `renaming ${entityType}`) if (entityType == null) { logger.warn({ err: 'No entityType set', projectId, entityId }) return callback(new Error('No entityType set')) diff --git a/services/web/app/src/Features/References/ReferencesHandler.js b/services/web/app/src/Features/References/ReferencesHandler.js index d8312d9e1b..7b86e36d9a 100644 --- a/services/web/app/src/Features/References/ReferencesHandler.js +++ b/services/web/app/src/Features/References/ReferencesHandler.js @@ -29,7 +29,7 @@ const oneMinInMs = 60 * 1000 const fiveMinsInMs = oneMinInMs * 5 if (!Features.hasFeature('references')) { - logger.log('references search not enabled') + logger.debug('references search not enabled') } module.exports = ReferencesHandler = { @@ -117,7 +117,7 @@ module.exports = ReferencesHandler = { new Errors.NotFoundError(`project does not exist: ${projectId}`) ) } - logger.log({ projectId }, 'indexing all bib files in project') + logger.debug({ projectId }, 'indexing all bib files in project') const docIds = ReferencesHandler._findBibDocIds(project) const fileIds = ReferencesHandler._findBibFileIds(project) return ReferencesHandler._doIndexOperation( @@ -172,7 +172,7 @@ module.exports = ReferencesHandler = { }) return callback(err) } - logger.log( + logger.debug( { projectId, docIds }, 'flushing docs to mongo before calling references service' ) @@ -213,7 +213,7 @@ module.exports = ReferencesHandler = { return callback(err) } if (res.statusCode >= 200 && res.statusCode < 300) { - logger.log({ projectId }, 'got keys from references api') + logger.debug({ projectId }, 'got keys from references api') return callback(null, data) } else { err = new Error( diff --git a/services/web/app/src/Features/ServerAdmin/AdminController.js b/services/web/app/src/Features/ServerAdmin/AdminController.js index 11f09b5313..b42ec2d7f0 100644 --- a/services/web/app/src/Features/ServerAdmin/AdminController.js +++ b/services/web/app/src/Features/ServerAdmin/AdminController.js @@ -127,10 +127,10 @@ const AdminController = { }, writeAllToMongo(req, res) { - logger.log('writing all docs to mongo') + logger.debug('writing all docs to mongo') Settings.mongo.writeAll = true return DocumentUpdaterHandler.flushAllDocsToMongo(function () { - logger.log('all docs have been saved to mongo') + logger.debug('all docs have been saved to mongo') return res.sendStatus(200) }) }, diff --git a/services/web/app/src/Features/Spelling/SpellingController.js b/services/web/app/src/Features/Spelling/SpellingController.js index 22daa50b3f..84efab4f55 100644 --- a/services/web/app/src/Features/Spelling/SpellingController.js +++ b/services/web/app/src/Features/Spelling/SpellingController.js @@ -33,7 +33,7 @@ module.exports = { if (!languageCodeIsSupported(language)) { // this log statement can be changed to 'error' once projects with // unsupported languages are removed from the DB - logger.info({ language }, 'language not supported') + logger.debug({ language }, 'language not supported') return res.status(422).json({ misspellings: [] }) } } diff --git a/services/web/app/src/Features/Subscription/FeaturesUpdater.js b/services/web/app/src/Features/Subscription/FeaturesUpdater.js index 0f418fceb5..91e8ef4e23 100644 --- a/services/web/app/src/Features/Subscription/FeaturesUpdater.js +++ b/services/web/app/src/Features/Subscription/FeaturesUpdater.js @@ -39,7 +39,7 @@ async function refreshFeatures(userId, reason) { }) const oldFeatures = _.clone(user.features) const features = await computeFeatures(userId) - logger.log({ userId, features }, 'updating user features') + logger.debug({ userId, features }, 'updating user features') const matchedFeatureSet = FeaturesHelper.getMatchedFeatureSet(features) AnalyticsManager.setUserPropertyForUser( @@ -51,7 +51,7 @@ async function refreshFeatures(userId, reason) { const { features: newFeatures, featuresChanged } = await UserFeaturesUpdater.promises.updateFeatures(userId, features) if (oldFeatures.dropbox === true && features.dropbox === false) { - logger.log({ userId }, '[FeaturesUpdater] must unlink dropbox') + logger.debug({ userId }, '[FeaturesUpdater] must unlink dropbox') const Modules = require('../../infrastructure/Modules') try { await Modules.promises.hooks.fire('removeDropbox', userId, reason) @@ -77,7 +77,7 @@ async function computeFeatures(userId) { const v1Features = await _getV1Features(user) const bonusFeatures = await ReferalFeatures.promises.getBonusFeatures(userId) const featuresOverrides = await _getFeaturesOverrides(user) - logger.log( + logger.debug( { userId, individualFeatures, @@ -161,7 +161,7 @@ function _planCodeToFeatures(planCode) { } async function doSyncFromV1(v1UserId) { - logger.log({ v1UserId }, '[AccountSync] starting account sync') + logger.debug({ v1UserId }, '[AccountSync] starting account sync') const user = await UserGetter.promises.getUser( { 'overleaf.id': v1UserId }, { _id: 1 } @@ -170,7 +170,7 @@ async function doSyncFromV1(v1UserId) { logger.warn({ v1UserId }, '[AccountSync] no user found for v1 id') return } - logger.log( + logger.debug( { v1UserId, userId: user._id }, '[AccountSync] updating user subscription and features' ) diff --git a/services/web/app/src/Features/Subscription/RecurlyClient.js b/services/web/app/src/Features/Subscription/RecurlyClient.js index 5be35ee39d..ab8f7c7db3 100644 --- a/services/web/app/src/Features/Subscription/RecurlyClient.js +++ b/services/web/app/src/Features/Subscription/RecurlyClient.js @@ -36,7 +36,7 @@ async function createAccountForUserId(userId) { lastName: user.last_name, } const account = await client.createAccount(accountCreate) - logger.log({ userId, account }, 'created recurly account') + logger.debug({ userId, account }, 'created recurly account') return account } @@ -46,7 +46,7 @@ async function getSubscription(subscriptionId) { async function changeSubscription(subscriptionId, body) { const change = await client.createSubscriptionChange(subscriptionId, body) - logger.log( + logger.debug( { subscriptionId, changeId: change.id }, 'created subscription change' ) @@ -59,7 +59,7 @@ async function changeSubscriptionByUuid(subscriptionUuid, ...args) { async function removeSubscriptionChange(subscriptionId) { const removed = await client.removeSubscriptionChange(subscriptionId) - logger.log({ subscriptionId }, 'removed pending subscription change') + logger.debug({ subscriptionId }, 'removed pending subscription change') return removed } @@ -79,7 +79,7 @@ async function cancelSubscriptionByUuid(subscriptionUuid) { if ( err.message === 'Only active and future subscriptions can be canceled.' ) { - logger.log( + logger.debug( { subscriptionUuid }, 'subscription cancellation failed, subscription not active' ) diff --git a/services/web/app/src/Features/Subscription/RecurlyWrapper.js b/services/web/app/src/Features/Subscription/RecurlyWrapper.js index a3c4400b23..ec5f1f405d 100644 --- a/services/web/app/src/Features/Subscription/RecurlyWrapper.js +++ b/services/web/app/src/Features/Subscription/RecurlyWrapper.js @@ -62,7 +62,7 @@ const RecurlyWrapper = { checkAccountExists(cache, next) { const { user } = cache const { subscriptionDetails } = cache - logger.log( + logger.debug( { user_id: user._id }, 'checking if recurly account exists for user' ) @@ -85,14 +85,17 @@ const RecurlyWrapper = { } if (response.statusCode === 404) { // actually not an error in this case, just no existing account - logger.log( + logger.debug( { user_id: user._id }, 'user does not currently exist in recurly, proceed' ) cache.userExists = false return next(null, cache) } - logger.log({ user_id: user._id }, 'user appears to exist in recurly') + logger.debug( + { user_id: user._id }, + 'user appears to exist in recurly' + ) return RecurlyWrapper._parseAccountXml( responseBody, function (err, account) { @@ -176,7 +179,7 @@ const RecurlyWrapper = { const { user } = cache const { recurlyTokenIds } = cache const { subscriptionDetails } = cache - logger.log({ user_id: user._id }, 'creating billing info in recurly') + logger.debug({ user_id: user._id }, 'creating billing info in recurly') const accountCode = __guard__( cache != null ? cache.account : undefined, x1 => x1.account_code @@ -231,7 +234,7 @@ const RecurlyWrapper = { setAddressAndCompanyBillingInfo(cache, next) { const { user } = cache const { subscriptionDetails } = cache - logger.log( + logger.debug( { user_id: user._id }, 'setting billing address and company info in recurly' ) @@ -303,7 +306,7 @@ const RecurlyWrapper = { createSubscription(cache, next) { const { user } = cache const { subscriptionDetails } = cache - logger.log({ user_id: user._id }, 'creating subscription in recurly') + logger.debug({ user_id: user._id }, 'creating subscription in recurly') const data = { plan_code: subscriptionDetails.plan_code, currency: subscriptionDetails.currencyCode, @@ -367,7 +370,7 @@ const RecurlyWrapper = { recurlyTokenIds, callback ) { - logger.log( + logger.debug( { user_id: user._id }, 'starting process of creating paypal subscription' ) @@ -397,7 +400,7 @@ const RecurlyWrapper = { }) return callback(err) } - logger.log( + logger.debug( { user_id: user._id }, 'done creating paypal subscription for user' ) @@ -467,7 +470,7 @@ const RecurlyWrapper = { createSubscription(user, subscriptionDetails, recurlyTokenIds, callback) { const { isPaypal } = subscriptionDetails - logger.log( + logger.debug( { user_id: user._id, isPaypal }, 'setting up subscription in recurly' ) @@ -615,7 +618,7 @@ const RecurlyWrapper = { } const items = data[resource] allItems = allItems.concat(items) - logger.log( + logger.debug( `got another ${items.length}, total now ${allItems.length}` ) cursor = __guard__( @@ -736,7 +739,7 @@ const RecurlyWrapper = { }, updateSubscription(subscriptionId, options, callback) { - logger.log( + logger.debug( { subscriptionId, options }, 'telling recurly to update subscription' ) @@ -796,7 +799,7 @@ const RecurlyWrapper = { ) } - logger.log({ coupon_code, requestBody }, 'creating coupon') + logger.debug({ coupon_code, requestBody }, 'creating coupon') return RecurlyWrapper.apiRequest( { url: 'coupons', @@ -840,7 +843,7 @@ const RecurlyWrapper = { ) } - logger.log( + logger.debug( { account_code, coupon_code, requestBody }, 'redeeming coupon for user' ) @@ -868,7 +871,7 @@ const RecurlyWrapper = { } const next_renewal_date = new Date() next_renewal_date.setDate(next_renewal_date.getDate() + daysUntilExpire) - logger.log( + logger.debug( { subscriptionId, daysUntilExpire }, 'Exending Free trial for user' ) diff --git a/services/web/app/src/Features/Subscription/SubscriptionController.js b/services/web/app/src/Features/Subscription/SubscriptionController.js index 8d2b95d968..334d1721bd 100644 --- a/services/web/app/src/Features/Subscription/SubscriptionController.js +++ b/services/web/app/src/Features/Subscription/SubscriptionController.js @@ -285,7 +285,7 @@ function successfulSubscription(req, res, next) { function cancelSubscription(req, res, next) { const user = SessionManager.getSessionUser(req.session) - logger.log({ user_id: user._id }, 'canceling subscription') + logger.debug({ user_id: user._id }, 'canceling subscription') SubscriptionHandler.cancelSubscription(user, function (err) { if (err) { OError.tag(err, 'something went wrong canceling subscription', { @@ -307,7 +307,7 @@ function canceledSubscription(req, res, next) { function cancelV1Subscription(req, res, next) { const userId = SessionManager.getLoggedInUserId(req.session) - logger.log({ userId }, 'canceling v1 subscription') + logger.debug({ userId }, 'canceling v1 subscription') V1SubscriptionManager.cancelV1Subscription(userId, function (err) { if (err) { OError.tag(err, 'something went wrong canceling v1 subscription', { @@ -331,7 +331,7 @@ function updateSubscription(req, res, next) { ) return next(err) } - logger.log({ planCode, user_id: user._id }, 'updating subscription') + logger.debug({ planCode, user_id: user._id }, 'updating subscription') SubscriptionHandler.updateSubscription(user, planCode, null, function (err) { if (err) { OError.tag(err, 'something went wrong updating subscription', { @@ -345,7 +345,7 @@ function updateSubscription(req, res, next) { function cancelPendingSubscriptionChange(req, res, next) { const user = SessionManager.getSessionUser(req.session) - logger.log({ user_id: user._id }, 'canceling pending subscription change') + logger.debug({ user_id: user._id }, 'canceling pending subscription change') SubscriptionHandler.cancelPendingSubscriptionChange(user, function (err) { if (err) { OError.tag( @@ -377,7 +377,7 @@ function updateAccountEmailAddress(req, res, next) { function reactivateSubscription(req, res, next) { const user = SessionManager.getSessionUser(req.session) - logger.log({ user_id: user._id }, 'reactivating subscription') + logger.debug({ user_id: user._id }, 'reactivating subscription') SubscriptionHandler.reactivateSubscription(user, function (err) { if (err) { OError.tag(err, 'something went wrong reactivating subscription', { @@ -390,7 +390,7 @@ function reactivateSubscription(req, res, next) { } function recurlyCallback(req, res, next) { - logger.log({ data: req.body }, 'received recurly callback') + logger.debug({ data: req.body }, 'received recurly callback') const event = Object.keys(req.body)[0] const eventData = req.body[event] @@ -473,7 +473,7 @@ function processUpgradeToAnnualPlan(req, res, next) { const { planName } = req.body const couponCode = Settings.coupon_codes.upgradeToAnnualPromo[planName] const annualPlanName = `${planName}-annual` - logger.log( + logger.debug( { user_id: user._id, planName: annualPlanName }, 'user is upgrading to annual billing with discount' ) diff --git a/services/web/app/src/Features/Subscription/SubscriptionGroupController.js b/services/web/app/src/Features/Subscription/SubscriptionGroupController.js index 2724727171..32203c3276 100644 --- a/services/web/app/src/Features/Subscription/SubscriptionGroupController.js +++ b/services/web/app/src/Features/Subscription/SubscriptionGroupController.js @@ -7,7 +7,7 @@ const SessionManager = require('../Authentication/SessionManager') function removeUserFromGroup(req, res, next) { const subscription = req.entity const userToRemoveId = req.params.user_id - logger.log( + logger.debug( { subscriptionId: subscription._id, userToRemove_id: userToRemoveId }, 'removing user from group subscription' ) diff --git a/services/web/app/src/Features/Subscription/SubscriptionLocator.js b/services/web/app/src/Features/Subscription/SubscriptionLocator.js index b731bde829..a6359aac4d 100644 --- a/services/web/app/src/Features/Subscription/SubscriptionLocator.js +++ b/services/web/app/src/Features/Subscription/SubscriptionLocator.js @@ -20,7 +20,7 @@ const SubscriptionLocator = { getUsersSubscription(user_or_id, callback) { const user_id = SubscriptionLocator._getUserId(user_or_id) Subscription.findOne({ admin_id: user_id }, function (err, subscription) { - logger.log({ user_id }, 'got users subscription') + logger.debug({ user_id }, 'got users subscription') callback(err, subscription) }) }, @@ -30,7 +30,7 @@ const SubscriptionLocator = { Subscription.findOne( { admin_id: user_id, groupPlan: false }, function (err, subscription) { - logger.log({ user_id }, 'got users individual subscription') + logger.debug({ user_id }, 'got users individual subscription') callback(err, subscription) } ) diff --git a/services/web/app/src/Features/Subscription/TeamInvitesHandler.js b/services/web/app/src/Features/Subscription/TeamInvitesHandler.js index e4e8d12f11..828085370c 100644 --- a/services/web/app/src/Features/Subscription/TeamInvitesHandler.js +++ b/services/web/app/src/Features/Subscription/TeamInvitesHandler.js @@ -233,7 +233,7 @@ const removeLegacyInvite = (subscriptionId, email, callback) => function checkIfInviteIsPossible(subscription, email, callback) { if (!subscription.groupPlan) { - logger.log( + logger.debug( { subscriptionId: subscription.id }, 'can not add members to a subscription that is not in a group plan' ) @@ -241,7 +241,7 @@ function checkIfInviteIsPossible(subscription, email, callback) { } if (LimitationsManager.teamHasReachedMemberLimit(subscription)) { - logger.log( + logger.debug( { subscriptionId: subscription.id }, 'team has reached member limit' ) @@ -261,7 +261,7 @@ function checkIfInviteIsPossible(subscription, email, callback) { ) if (existingMember) { - logger.log( + logger.debug( { subscriptionId: subscription.id, email }, 'user already in team' ) diff --git a/services/web/app/src/Features/ThirdPartyDataStore/UpdateMerger.js b/services/web/app/src/Features/ThirdPartyDataStore/UpdateMerger.js index b6053282f9..04f1886872 100644 --- a/services/web/app/src/Features/ThirdPartyDataStore/UpdateMerger.js +++ b/services/web/app/src/Features/ThirdPartyDataStore/UpdateMerger.js @@ -212,7 +212,7 @@ module.exports = UpdateMerger = { ) return callback(err) } - logger.log({ docLines }, 'processing doc update from tpds') + logger.debug({ docLines }, 'processing doc update from tpds') return EditorController.upsertDocWithPath( project_id, path, diff --git a/services/web/app/src/Features/TokenAccess/TokenAccessController.js b/services/web/app/src/Features/TokenAccess/TokenAccessController.js index 8ecd1ba965..1472e78aaf 100644 --- a/services/web/app/src/Features/TokenAccess/TokenAccessController.js +++ b/services/web/app/src/Features/TokenAccess/TokenAccessController.js @@ -147,7 +147,7 @@ async function checkAndGetProjectOrResponseAction( if (isAnonymousUser && tokenAccessEnabled) { if (tokenType === TokenAccessHandler.TOKEN_TYPES.READ_AND_WRITE) { if (TokenAccessHandler.ANONYMOUS_READ_AND_WRITE_ENABLED) { - logger.info({ projectId }, 'granting read-write anonymous access') + logger.debug({ projectId }, 'granting read-write anonymous access') TokenAccessHandler.grantSessionTokenAccess(req, projectId, token) return [ null, @@ -178,7 +178,7 @@ async function checkAndGetProjectOrResponseAction( ] } } else if (tokenType === TokenAccessHandler.TOKEN_TYPES.READ_ONLY) { - logger.info({ projectId }, 'granting read-only anonymous access') + logger.debug({ projectId }, 'granting read-only anonymous access') TokenAccessHandler.grantSessionTokenAccess(req, projectId, token) return [ null, diff --git a/services/web/app/src/Features/Uploads/ArchiveManager.js b/services/web/app/src/Features/Uploads/ArchiveManager.js index 2a8f1aa61a..e8c7e348a0 100644 --- a/services/web/app/src/Features/Uploads/ArchiveManager.js +++ b/services/web/app/src/Features/Uploads/ArchiveManager.js @@ -219,7 +219,7 @@ const ArchiveManager = { } const timer = new metrics.Timer('unzipDirectory') - logger.log({ source, destination }, 'unzipping file') + logger.debug({ source, destination }, 'unzipping file') return ArchiveManager._extractZipFiles( source, diff --git a/services/web/app/src/Features/Uploads/FileSystemImportManager.js b/services/web/app/src/Features/Uploads/FileSystemImportManager.js index bf80372375..f4c656a102 100644 --- a/services/web/app/src/Features/Uploads/FileSystemImportManager.js +++ b/services/web/app/src/Features/Uploads/FileSystemImportManager.js @@ -86,7 +86,7 @@ async function addFolderContents( replace ) { if (!(await _isSafeOnFileSystem(folderPath))) { - logger.log( + logger.debug( { userId, projectId, parentFolderId, folderPath }, 'add folder contents is from symlink, stopping insert' ) @@ -110,7 +110,7 @@ async function addFolderContents( async function addEntity(userId, projectId, folderId, name, fsPath, replace) { if (!(await _isSafeOnFileSystem(fsPath))) { - logger.log( + logger.debug( { userId, projectId, folderId, fsPath }, 'add entry is from symlink, stopping insert' ) diff --git a/services/web/app/src/Features/User/UserController.js b/services/web/app/src/Features/User/UserController.js index be3b8e7431..8495376413 100644 --- a/services/web/app/src/Features/User/UserController.js +++ b/services/web/app/src/Features/User/UserController.js @@ -428,7 +428,7 @@ const UserController = { doLogout(req, cb) { metrics.inc('user.logout') const user = SessionManager.getSessionUser(req.session) - logger.log({ user }, 'logging out') + logger.debug({ user }, 'logging out') const sessionId = req.sessionID if (typeof req.logout === 'function') { req.logout() diff --git a/services/web/app/src/Features/User/UserDeleter.js b/services/web/app/src/Features/User/UserDeleter.js index 159c13bdf4..c5465576cf 100644 --- a/services/web/app/src/Features/User/UserDeleter.js +++ b/services/web/app/src/Features/User/UserDeleter.js @@ -37,7 +37,7 @@ async function deleteUser(userId, options = {}) { try { const user = await User.findById(userId).exec() - logger.log({ user }, 'deleting user') + logger.debug({ user }, 'deleting user') await ensureCanDeleteUser(user) await _cleanupUser(user) diff --git a/services/web/app/src/Features/User/UserPagesController.js b/services/web/app/src/Features/User/UserPagesController.js index 922eb99cb3..01d4117a72 100644 --- a/services/web/app/src/Features/User/UserPagesController.js +++ b/services/web/app/src/Features/User/UserPagesController.js @@ -197,7 +197,7 @@ const UserPagesController = { sessionsPage(req, res, next) { const user = SessionManager.getSessionUser(req.session) - logger.log({ userId: user._id }, 'loading sessions page') + logger.debug({ userId: user._id }, 'loading sessions page') const currentSession = { ip_address: user.ip_address, session_created: user.session_created, diff --git a/services/web/app/src/Features/User/UserRegistrationHandler.js b/services/web/app/src/Features/User/UserRegistrationHandler.js index 7f07a11655..ae49b87131 100644 --- a/services/web/app/src/Features/User/UserRegistrationHandler.js +++ b/services/web/app/src/Features/User/UserRegistrationHandler.js @@ -106,7 +106,10 @@ const UserRegistrationHandler = { } if (error && error.message === 'EmailAlreadyRegistered') { - logger.log({ email }, 'user already exists, resending welcome email') + logger.debug( + { email }, + 'user already exists, resending welcome email' + ) } const ONE_WEEK = 7 * 24 * 60 * 60 // seconds diff --git a/services/web/app/src/Features/User/UserSessionsManager.js b/services/web/app/src/Features/User/UserSessionsManager.js index 43f355b8b1..4190a7a3f9 100644 --- a/services/web/app/src/Features/User/UserSessionsManager.js +++ b/services/web/app/src/Features/User/UserSessionsManager.js @@ -88,7 +88,10 @@ const UserSessionsManager = { } sessionKeys = _.filter(sessionKeys, k => !_.contains(exclude, k)) if (sessionKeys.length === 0) { - logger.log({ user_id: user._id }, 'no other sessions found, returning') + logger.debug( + { user_id: user._id }, + 'no other sessions found, returning' + ) return callback(null, []) } @@ -148,13 +151,13 @@ const UserSessionsManager = { k => !Array.from(retain).includes(k) ) if (keysToDelete.length === 0) { - logger.log( + logger.debug( { user_id: user._id }, 'no sessions in UserSessions set to delete, returning' ) return callback(null) } - logger.log( + logger.debug( { user_id: user._id, count: keysToDelete.length }, 'deleting sessions for user' ) diff --git a/services/web/app/src/Features/User/UserUpdater.js b/services/web/app/src/Features/User/UserUpdater.js index 8ab2dd28a2..383572585c 100644 --- a/services/web/app/src/Features/User/UserUpdater.js +++ b/services/web/app/src/Features/User/UserUpdater.js @@ -231,7 +231,7 @@ async function confirmEmail(userId, email) { if (email == null) { throw new Error('invalid email') } - logger.log({ userId, email }, 'confirming user email') + logger.debug({ userId, email }, 'confirming user email') try { await InstitutionsAPI.promises.addAffiliation(userId, email, { diff --git a/services/web/app/src/Features/V1/V1Handler.js b/services/web/app/src/Features/V1/V1Handler.js index a76bfa4220..2fd3481fbf 100644 --- a/services/web/app/src/Features/V1/V1Handler.js +++ b/services/web/app/src/Features/V1/V1Handler.js @@ -37,7 +37,7 @@ module.exports = V1Handler = { if ([200, 403].includes(response.statusCode)) { const isValid = body.valid const userProfile = body.user_profile - logger.log( + logger.debug( { email, isValid, @@ -78,7 +78,7 @@ module.exports = V1Handler = { return callback(err, false) } if ([200].includes(response.statusCode)) { - logger.log( + logger.debug( { v1_user_id, changed: true }, 'got success response from v1 password reset api' ) diff --git a/services/web/app/src/infrastructure/ExpressLocals.js b/services/web/app/src/infrastructure/ExpressLocals.js index e3fdd2db11..b66f630468 100644 --- a/services/web/app/src/infrastructure/ExpressLocals.js +++ b/services/web/app/src/infrastructure/ExpressLocals.js @@ -112,7 +112,7 @@ module.exports = function (webRouter, privateApiRouter, publicApiRouter) { const cdnBlocked = req.query.nocdn === 'true' || req.session.cdnBlocked const userId = SessionManager.getLoggedInUserId(req.session) if (cdnBlocked && req.session.cdnBlocked == null) { - logger.log( + logger.debug( { user_id: userId, ip: req != null ? req.ip : undefined }, 'cdnBlocked for user, not using it and turning it off for future requets' ) diff --git a/services/web/app/src/infrastructure/FileWriter.js b/services/web/app/src/infrastructure/FileWriter.js index 5cdf5ca7f1..20364eb7d0 100644 --- a/services/web/app/src/infrastructure/FileWriter.js +++ b/services/web/app/src/infrastructure/FileWriter.js @@ -153,7 +153,7 @@ const FileWriter = { return callback(err) } - logger.log( + logger.debug( { identifier, fsPath }, '[writeStreamToDisk] write stream finished' ) diff --git a/services/web/app/src/infrastructure/GeoIpLookup.js b/services/web/app/src/infrastructure/GeoIpLookup.js index 07b90308a7..274bb1b239 100644 --- a/services/web/app/src/infrastructure/GeoIpLookup.js +++ b/services/web/app/src/infrastructure/GeoIpLookup.js @@ -68,7 +68,7 @@ function getDetails(ip, callback) { timeout: 1000, json: true, } - logger.log({ ip, opts }, 'getting geo ip details') + logger.debug({ ip, opts }, 'getting geo ip details') request.get(opts, function (err, res, ipDetails) { if (err) { logger.warn({ err, ip }, 'error getting ip details') @@ -91,7 +91,7 @@ function getCurrencyCode(ip, callback) { ? ipDetails.country_code.toUpperCase() : undefined const currencyCode = currencyMappings[countryCode] || 'USD' - logger.log({ ip, currencyCode, ipDetails }, 'got currencyCode for ip') + logger.debug({ ip, currencyCode, ipDetails }, 'got currencyCode for ip') callback(err, currencyCode, countryCode) }) } diff --git a/services/web/app/src/infrastructure/LockManager.js b/services/web/app/src/infrastructure/LockManager.js index 4d58b4be9f..ebccc8ec0e 100644 --- a/services/web/app/src/infrastructure/LockManager.js +++ b/services/web/app/src/infrastructure/LockManager.js @@ -15,7 +15,7 @@ let COUNT = 0 const LOCK_QUEUES = new Map() // queue lock requests for each name/id so they get the lock on a first-come first-served basis -logger.log( +logger.debug( { lockManagerSettings: settings.lockManager }, 'LockManager initialising' ) @@ -60,7 +60,7 @@ const LockManager = { // is designed to log if this happens. function countIfExceededLockTimeout() { metrics.inc(`lock.${namespace}.exceeded_lock_timeout`) - logger.log('exceeded lock timeout', { + logger.debug('exceeded lock timeout', { namespace, id, slowExecutionError, @@ -77,7 +77,7 @@ const LockManager = { const timeTaken = new Date() - timer.start if (timeTaken > LockManager.SLOW_EXECUTION_THRESHOLD) { - logger.log('slow execution during lock', { + logger.debug('slow execution during lock', { namespace, id, timeTaken, @@ -113,7 +113,7 @@ const LockManager = { callback(err, true, lockValue) } else { metrics.inc(`lock.${namespace}.try.failed`) - logger.log({ key, redis_response: gotLock }, 'lock is locked') + logger.debug({ key, redis_response: gotLock }, 'lock is locked') callback(err, false) } } diff --git a/services/web/app/src/infrastructure/Mongoose.js b/services/web/app/src/infrastructure/Mongoose.js index b4eee7fa80..9a1f2b430e 100644 --- a/services/web/app/src/infrastructure/Mongoose.js +++ b/services/web/app/src/infrastructure/Mongoose.js @@ -27,7 +27,7 @@ const connectionPromise = mongoose.connect( ) mongoose.connection.on('connected', () => - logger.log('mongoose default connection open') + logger.debug('mongoose default connection open') ) mongoose.connection.on('error', err => @@ -35,7 +35,7 @@ mongoose.connection.on('error', err => ) mongoose.connection.on('disconnected', () => - logger.log('mongoose default connection disconnected') + logger.debug('mongoose default connection disconnected') ) if (process.env.MONGOOSE_DEBUG) { diff --git a/services/web/app/src/infrastructure/ProxyManager.js b/services/web/app/src/infrastructure/ProxyManager.js index 6cc0ecebed..d9e6138a96 100644 --- a/services/web/app/src/infrastructure/ProxyManager.js +++ b/services/web/app/src/infrastructure/ProxyManager.js @@ -41,7 +41,7 @@ module.exports = ProxyManager = { createProxy(target) { return function (req, res, next) { const targetUrl = makeTargetUrl(target, req) - logger.log({ targetUrl, reqUrl: req.url }, 'proxying url') + logger.debug({ targetUrl, reqUrl: req.url }, 'proxying url') const options = { url: targetUrl } if (req.headers != null ? req.headers.cookie : undefined) { diff --git a/services/web/app/src/infrastructure/Server.js b/services/web/app/src/infrastructure/Server.js index 1863284770..fbccb2f39f 100644 --- a/services/web/app/src/infrastructure/Server.js +++ b/services/web/app/src/infrastructure/Server.js @@ -277,30 +277,30 @@ webRouter.use( // add CSP header to HTML-rendering routes, if enabled if (Settings.csp && Settings.csp.enabled) { - logger.info('adding CSP header to rendered routes', Settings.csp) + logger.debug('adding CSP header to rendered routes', Settings.csp) app.use(csp(Settings.csp)) } -logger.info('creating HTTP server'.yellow) +logger.debug('creating HTTP server'.yellow) const server = require('http').createServer(app) // provide settings for separate web and api processes if (Settings.enabledServices.includes('api')) { - logger.info('providing api router') + logger.debug('providing api router') app.use(privateApiRouter) app.use(Validation.errorMiddleware) app.use(ErrorController.handleApiError) } if (Settings.enabledServices.includes('web')) { - logger.info('providing web router') + logger.debug('providing web router') if (Settings.precompilePugTemplatesAtBootTime) { - logger.info('precompiling views for web in production environment') + logger.debug('precompiling views for web in production environment') Views.precompileViews(app) } if (app.get('env') === 'test') { - logger.info('enabling view cache for acceptance tests') + logger.debug('enabling view cache for acceptance tests') app.enable('view cache') } diff --git a/services/web/app/src/infrastructure/Views.js b/services/web/app/src/infrastructure/Views.js index 1460006eb7..df3725e669 100644 --- a/services/web/app/src/infrastructure/Views.js +++ b/services/web/app/src/infrastructure/Views.js @@ -38,14 +38,14 @@ module.exports = { cache: true, compileDebug: Settings.debugPugTemplates, }) - logger.log({ filename }, 'compiled') + logger.debug({ filename }, 'compiled') success++ } catch (err) { logger.error({ filename, err: err.message }, 'error compiling') failures++ } }) - logger.log( + logger.debug( { timeTaken: Date.now() - startTime, failures, success }, 'compiled templates' ) diff --git a/services/web/modules/launchpad/app/src/LaunchpadController.js b/services/web/modules/launchpad/app/src/LaunchpadController.js index 480bb2f87f..f2b327ffcc 100644 --- a/services/web/modules/launchpad/app/src/LaunchpadController.js +++ b/services/web/modules/launchpad/app/src/LaunchpadController.js @@ -104,12 +104,12 @@ module.exports = LaunchpadController = { sendTestEmail(req, res, next) { const { email } = req.body if (!email) { - logger.log({}, 'no email address supplied') + logger.debug({}, 'no email address supplied') return res.status(400).json({ message: 'no email address supplied', }) } - logger.log({ email }, 'sending test email') + logger.debug({ email }, 'sending test email') const emailOptions = { to: email } return EmailHandler.sendEmail('testEmail', emailOptions, function (err) { if (err != null) { @@ -118,7 +118,7 @@ module.exports = LaunchpadController = { }) return next(err) } - logger.log({ email }, 'sent test email') + logger.debug({ email }, 'sent test email') res.json({ message: res.locals.translate('email_sent') }) }) }, @@ -126,7 +126,7 @@ module.exports = LaunchpadController = { registerExternalAuthAdmin(authMethod) { return function (req, res, next) { if (LaunchpadController._getAuthMethod() !== authMethod) { - logger.log( + logger.debug( { authMethod }, 'trying to register external admin, but that auth service is not enabled, disallow' ) @@ -134,18 +134,18 @@ module.exports = LaunchpadController = { } const { email } = req.body if (!email) { - logger.log({ authMethod }, 'no email supplied, disallow') + logger.debug({ authMethod }, 'no email supplied, disallow') return res.sendStatus(400) } - logger.log({ email }, 'attempted register first admin user') + logger.debug({ email }, 'attempted register first admin user') return LaunchpadController._atLeastOneAdminExists(function (err, exists) { if (err != null) { return next(err) } if (exists) { - logger.log( + logger.debug( { email }, 'already have at least one admin user, disallow' ) @@ -158,7 +158,7 @@ module.exports = LaunchpadController = { first_name: email, last_name: '', } - logger.log( + logger.debug( { body, authMethod }, 'creating admin account for specified external-auth user' ) @@ -189,7 +189,7 @@ module.exports = LaunchpadController = { } AuthenticationController.setRedirectInSession(req, '/launchpad') - logger.log( + logger.debug( { email, user_id: user._id, authMethod }, 'created first admin account' ) @@ -207,18 +207,18 @@ module.exports = LaunchpadController = { const { email } = req.body const { password } = req.body if (!email || !password) { - logger.log({}, 'must supply both email and password, disallow') + logger.debug({}, 'must supply both email and password, disallow') return res.sendStatus(400) } - logger.log({ email }, 'attempted register first admin user') + logger.debug({ email }, 'attempted register first admin user') return LaunchpadController._atLeastOneAdminExists(function (err, exists) { if (err != null) { return next(err) } if (exists) { - logger.log( + logger.debug( { email: req.body.email }, 'already have at least one admin user, disallow' ) @@ -233,7 +233,7 @@ module.exports = LaunchpadController = { return next(err) } - logger.log({ user_id: user._id }, 'making user an admin') + logger.debug({ user_id: user._id }, 'making user an admin') User.updateOne( { _id: user._id }, { @@ -250,7 +250,7 @@ module.exports = LaunchpadController = { return next(err) } - logger.log( + logger.debug( { email, user_id: user._id }, 'created first admin account' ) diff --git a/services/web/modules/launchpad/app/src/LaunchpadRouter.js b/services/web/modules/launchpad/app/src/LaunchpadRouter.js index 6ac3008d4b..410d399519 100644 --- a/services/web/modules/launchpad/app/src/LaunchpadRouter.js +++ b/services/web/modules/launchpad/app/src/LaunchpadRouter.js @@ -16,7 +16,7 @@ const AuthorizationMiddleware = require('../../../../app/src/Features/Authorizat module.exports = { apply(webRouter) { - logger.log({}, 'Init launchpad router') + logger.debug({}, 'Init launchpad router') webRouter.get('/launchpad', LaunchpadController.launchpadPage) webRouter.post( diff --git a/services/web/modules/user-activate/app/src/UserActivateRouter.js b/services/web/modules/user-activate/app/src/UserActivateRouter.js index c6294ed04c..a9088dd544 100644 --- a/services/web/modules/user-activate/app/src/UserActivateRouter.js +++ b/services/web/modules/user-activate/app/src/UserActivateRouter.js @@ -5,7 +5,7 @@ const AuthorizationMiddleware = require('../../../../app/src/Features/Authorizat module.exports = { apply(webRouter) { - logger.log({}, 'Init UserActivate router') + logger.debug({}, 'Init UserActivate router') webRouter.get( '/admin/user', diff --git a/services/web/test/unit/src/Authentication/AuthenticationControllerTests.js b/services/web/test/unit/src/Authentication/AuthenticationControllerTests.js index a3c37c4c48..a1a5f08a3c 100644 --- a/services/web/test/unit/src/Authentication/AuthenticationControllerTests.js +++ b/services/web/test/unit/src/Authentication/AuthenticationControllerTests.js @@ -471,7 +471,7 @@ describe('AuthenticationController', function () { }) it('should log the failed login', function () { - this.logger.log + this.logger.debug .calledWith({ email: this.email.toLowerCase() }, 'failed log in') .should.equal(true) }) @@ -911,7 +911,7 @@ describe('AuthenticationController', function () { }) it('should log out a message', function () { - this.logger.log + this.logger.debug .calledWith( { url: this.url }, 'user not logged in so redirecting to register page' @@ -1417,7 +1417,7 @@ describe('AuthenticationController', function () { }) it('should log the successful login', function () { - this.logger.log + this.logger.debug .calledWith( { email: this.user.email, user_id: this.user._id.toString() }, 'successful log in'