mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-07 20:31:06 -05:00
Merge pull request #7906 from overleaf/em-downgrade-logs
Downgrade all INFO logs to DEBUG GitOrigin-RevId: 05ed582ef0721fcada059f0ad158565f50feca27
This commit is contained in:
parent
efcb06f0df
commit
e0d91eaa26
116 changed files with 487 additions and 423 deletions
|
@ -87,13 +87,13 @@ module.exports = MemoryMonitor = {
|
|||
Metrics.gauge('memory.gc-interval', gcInterval)
|
||||
// Metrics.gauge("memory.cpu-time-bucket", CpuTimeBucket)
|
||||
|
||||
logger.log(mem, 'process.memoryUsage()')
|
||||
logger.debug(mem, 'process.memoryUsage()')
|
||||
|
||||
if (global.gc != null && readyToGc()) {
|
||||
const gcTime = executeAndTime(global.gc).toFixed(2)
|
||||
const memAfterGc = inMegaBytes(process.memoryUsage())
|
||||
const deltaMem = updateMemoryStats(memBeforeGc, memAfterGc)
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{
|
||||
gcTime,
|
||||
memBeforeGc,
|
||||
|
|
|
@ -53,7 +53,7 @@ module.exports = {
|
|||
const start = new Date()
|
||||
return _method.call(this, dbCommand, options, function () {
|
||||
timer.done()
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{
|
||||
query: dbCommand.query,
|
||||
query_type: type,
|
||||
|
@ -89,7 +89,7 @@ module.exports = {
|
|||
const start = new Date()
|
||||
return _method.call(this, ns, ops, options, function () {
|
||||
timer.done()
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{
|
||||
query: ops[0].q,
|
||||
query_type: type,
|
||||
|
|
|
@ -130,10 +130,10 @@ describe('timeAsyncMethod', function () {
|
|||
|
||||
describe('when a logger is supplied', function () {
|
||||
beforeEach(function () {
|
||||
return (this.logger = { log: sinon.stub() })
|
||||
return (this.logger = { debug: sinon.stub(), log: sinon.stub() })
|
||||
})
|
||||
|
||||
return it('should also call logger.log', function (done) {
|
||||
return it('should also call logger.debug', function (done) {
|
||||
this.timeAsyncMethod(
|
||||
this.testObject,
|
||||
'nextNumber',
|
||||
|
@ -145,7 +145,7 @@ describe('timeAsyncMethod', function () {
|
|||
expect(result).to.equal(3)
|
||||
expect(this.TimerConstructor.callCount).to.equal(1)
|
||||
expect(this.Timer.done.callCount).to.equal(1)
|
||||
expect(this.logger.log.callCount).to.equal(1)
|
||||
expect(this.logger.debug.callCount).to.equal(1)
|
||||
return done()
|
||||
})
|
||||
})
|
||||
|
|
|
@ -36,7 +36,7 @@ module.exports = function (obj, methodName, prefix, logger) {
|
|||
|
||||
if (callback == null || typeof callback !== 'function') {
|
||||
if (logger != null) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
`[Metrics] expected wrapped method '${methodName}' to be invoked with a callback`
|
||||
)
|
||||
}
|
||||
|
@ -74,7 +74,7 @@ module.exports = function (obj, methodName, prefix, logger) {
|
|||
}
|
||||
}
|
||||
} catch (error) {}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ key, args: loggableArgs, elapsedTime },
|
||||
'[Metrics] timed async method call'
|
||||
)
|
||||
|
|
|
@ -16,7 +16,7 @@ if (!module.parent) {
|
|||
logger.fatal({ err }, `Cannot bind to ${host}:${port}. Exiting.`)
|
||||
process.exit(1)
|
||||
}
|
||||
logger.info(`Chat starting up, listening on ${host}:${port}`)
|
||||
logger.debug(`Chat starting up, listening on ${host}:${port}`)
|
||||
})
|
||||
})
|
||||
.catch(err => {
|
||||
|
|
|
@ -22,7 +22,7 @@ async function sendThreadMessage(req, res) {
|
|||
|
||||
async function getAllThreads(req, res) {
|
||||
const { projectId } = req.params
|
||||
logger.log({ projectId }, 'getting all threads')
|
||||
logger.debug({ projectId }, 'getting all threads')
|
||||
const rooms = await ThreadManager.findAllThreadRooms(projectId)
|
||||
const roomIds = rooms.map(r => r._id)
|
||||
const messages = await MessageManager.findAllMessagesInRooms(roomIds)
|
||||
|
@ -33,21 +33,21 @@ async function getAllThreads(req, res) {
|
|||
async function resolveThread(req, res) {
|
||||
const { projectId, threadId } = req.params
|
||||
const { user_id: userId } = req.body
|
||||
logger.log({ userId, projectId, threadId }, 'marking thread as resolved')
|
||||
logger.debug({ userId, projectId, threadId }, 'marking thread as resolved')
|
||||
await ThreadManager.resolveThread(projectId, threadId, userId)
|
||||
res.sendStatus(204)
|
||||
}
|
||||
|
||||
async function reopenThread(req, res) {
|
||||
const { projectId, threadId } = req.params
|
||||
logger.log({ projectId, threadId }, 'reopening thread')
|
||||
logger.debug({ projectId, threadId }, 'reopening thread')
|
||||
await ThreadManager.reopenThread(projectId, threadId)
|
||||
res.sendStatus(204)
|
||||
}
|
||||
|
||||
async function deleteThread(req, res) {
|
||||
const { projectId, threadId } = req.params
|
||||
logger.log({ projectId, threadId }, 'deleting thread')
|
||||
logger.debug({ projectId, threadId }, 'deleting thread')
|
||||
const roomId = await ThreadManager.deleteThread(projectId, threadId)
|
||||
await MessageManager.deleteAllMessagesInRoom(roomId)
|
||||
res.sendStatus(204)
|
||||
|
@ -56,7 +56,7 @@ async function deleteThread(req, res) {
|
|||
async function editMessage(req, res) {
|
||||
const { content, userId } = req.body
|
||||
const { projectId, threadId, messageId } = req.params
|
||||
logger.log({ projectId, threadId, messageId, content }, 'editing message')
|
||||
logger.debug({ projectId, threadId, messageId, content }, 'editing message')
|
||||
const room = await ThreadManager.findOrCreateThread(projectId, threadId)
|
||||
const found = await MessageManager.updateMessage(
|
||||
room._id,
|
||||
|
@ -73,7 +73,7 @@ async function editMessage(req, res) {
|
|||
|
||||
async function deleteMessage(req, res) {
|
||||
const { projectId, threadId, messageId } = req.params
|
||||
logger.log({ projectId, threadId, messageId }, 'deleting message')
|
||||
logger.debug({ projectId, threadId, messageId }, 'deleting message')
|
||||
const room = await ThreadManager.findOrCreateThread(projectId, threadId)
|
||||
await MessageManager.deleteMessage(room._id, messageId)
|
||||
res.sendStatus(204)
|
||||
|
@ -81,12 +81,12 @@ async function deleteMessage(req, res) {
|
|||
|
||||
async function destroyProject(req, res) {
|
||||
const { projectId } = req.params
|
||||
logger.log({ projectId }, 'destroying project')
|
||||
logger.debug({ projectId }, 'destroying project')
|
||||
const rooms = await ThreadManager.findAllThreadRoomsAndGlobalThread(projectId)
|
||||
const roomIds = rooms.map(r => r._id)
|
||||
logger.log({ projectId, roomIds }, 'deleting all messages in rooms')
|
||||
logger.debug({ projectId, roomIds }, 'deleting all messages in rooms')
|
||||
await MessageManager.deleteAllMessagesInRooms(roomIds)
|
||||
logger.log({ projectId }, 'deleting all threads in project')
|
||||
logger.debug({ projectId }, 'deleting all threads in project')
|
||||
await ThreadManager.deleteAllThreadsInProject(projectId)
|
||||
res.sendStatus(204)
|
||||
}
|
||||
|
@ -105,7 +105,7 @@ async function _sendMessage(clientThreadId, req, res) {
|
|||
.status(400)
|
||||
.send(`Content too long (> ${MAX_MESSAGE_LENGTH} bytes)`)
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ clientThreadId, projectId, userId, content },
|
||||
'new message received'
|
||||
)
|
||||
|
@ -137,7 +137,7 @@ async function _getMessages(clientThreadId, req, res) {
|
|||
} else {
|
||||
limit = DEFAULT_MESSAGE_LIMIT
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ limit, before, projectId, clientThreadId },
|
||||
'get message request received'
|
||||
)
|
||||
|
@ -146,13 +146,13 @@ async function _getMessages(clientThreadId, req, res) {
|
|||
clientThreadId
|
||||
)
|
||||
const threadObjectId = thread._id
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ limit, before, projectId, clientThreadId, threadObjectId },
|
||||
'found or created thread'
|
||||
)
|
||||
let messages = await MessageManager.getMessages(threadObjectId, limit, before)
|
||||
messages = MessageFormatter.formatMessagesForClientSide(messages)
|
||||
logger.log({ projectId, messages }, 'got messages')
|
||||
logger.debug({ projectId, messages }, 'got messages')
|
||||
res.status(200).send(messages)
|
||||
}
|
||||
|
||||
|
|
|
@ -258,13 +258,13 @@ Settings.processTooOld = false
|
|||
if (Settings.processLifespanLimitMs) {
|
||||
Settings.processLifespanLimitMs +=
|
||||
Settings.processLifespanLimitMs * (Math.random() / 10)
|
||||
logger.info(
|
||||
logger.debug(
|
||||
'Lifespan limited to ',
|
||||
Date.now() + Settings.processLifespanLimitMs
|
||||
)
|
||||
|
||||
setTimeout(() => {
|
||||
logger.log('shutting down, process is too old')
|
||||
logger.debug('shutting down, process is too old')
|
||||
Settings.processTooOld = true
|
||||
}, Settings.processLifespanLimitMs)
|
||||
}
|
||||
|
@ -276,10 +276,10 @@ function runSmokeTest() {
|
|||
smokeTest.lastRunSuccessful() &&
|
||||
Date.now() - CompileController.lastSuccessfulCompile < INTERVAL / 2
|
||||
) {
|
||||
logger.log('skipping smoke tests, got recent successful user compile')
|
||||
logger.debug('skipping smoke tests, got recent successful user compile')
|
||||
return setTimeout(runSmokeTest, INTERVAL / 2)
|
||||
}
|
||||
logger.log('running smoke tests')
|
||||
logger.debug('running smoke tests')
|
||||
smokeTest.triggerRun(err => {
|
||||
if (err) logger.error({ err }, 'smoke tests failed')
|
||||
setTimeout(runSmokeTest, INTERVAL)
|
||||
|
@ -300,7 +300,7 @@ app.get('/smoke_test_force', (req, res) => smokeTest.sendNewResult(res))
|
|||
|
||||
app.use(function (error, req, res, next) {
|
||||
if (error instanceof Errors.NotFoundError) {
|
||||
logger.log({ err: error, url: req.url }, 'not found error')
|
||||
logger.debug({ err: error, url: req.url }, 'not found error')
|
||||
return res.sendStatus(404)
|
||||
} else if (error.code === 'EPIPE') {
|
||||
// inspect container returns EPIPE when shutting down
|
||||
|
@ -362,19 +362,19 @@ const loadHttpServer = express()
|
|||
|
||||
loadHttpServer.post('/state/up', function (req, res, next) {
|
||||
STATE = 'up'
|
||||
logger.info('getting message to set server to down')
|
||||
logger.debug('getting message to set server to down')
|
||||
return res.sendStatus(204)
|
||||
})
|
||||
|
||||
loadHttpServer.post('/state/down', function (req, res, next) {
|
||||
STATE = 'down'
|
||||
logger.info('getting message to set server to down')
|
||||
logger.debug('getting message to set server to down')
|
||||
return res.sendStatus(204)
|
||||
})
|
||||
|
||||
loadHttpServer.post('/state/maint', function (req, res, next) {
|
||||
STATE = 'maint'
|
||||
logger.info('getting message to set server to maint')
|
||||
logger.debug('getting message to set server to maint')
|
||||
return res.sendStatus(204)
|
||||
})
|
||||
|
||||
|
@ -407,7 +407,7 @@ if (!module.parent) {
|
|||
if (error) {
|
||||
logger.fatal({ error }, `Error starting CLSI on ${host}:${port}`)
|
||||
} else {
|
||||
logger.info(`CLSI starting up, listening on ${host}:${port}`)
|
||||
logger.debug(`CLSI starting up, listening on ${host}:${port}`)
|
||||
}
|
||||
})
|
||||
|
||||
|
@ -415,14 +415,16 @@ if (!module.parent) {
|
|||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return logger.info(`Load tcp agent listening on load port ${loadTcpPort}`)
|
||||
return logger.debug(`Load tcp agent listening on load port ${loadTcpPort}`)
|
||||
})
|
||||
|
||||
loadHttpServer.listen(loadHttpPort, host, function (error) {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return logger.info(`Load http agent listening on load port ${loadHttpPort}`)
|
||||
return logger.debug(
|
||||
`Load http agent listening on load port ${loadHttpPort}`
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ if ((Settings.clsi != null ? Settings.clsi.dockerRunner : undefined) === true) {
|
|||
} else {
|
||||
commandRunnerPath = './LocalCommandRunner'
|
||||
}
|
||||
logger.info({ commandRunnerPath }, 'selecting command runner for clsi')
|
||||
logger.debug({ commandRunnerPath }, 'selecting command runner for clsi')
|
||||
const CommandRunner = require(commandRunnerPath)
|
||||
|
||||
module.exports = CommandRunner
|
||||
|
|
|
@ -72,7 +72,7 @@ module.exports = CompileController = {
|
|||
status = `validation-${error.validate}`
|
||||
} else if (error != null ? error.timedout : undefined) {
|
||||
status = 'timedout'
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ err: error, project_id: request.project_id },
|
||||
'timeout running compile'
|
||||
)
|
||||
|
@ -245,7 +245,7 @@ module.exports = CompileController = {
|
|||
if (image && !isImageNameAllowed(image)) {
|
||||
return res.status(400).send('invalid image')
|
||||
}
|
||||
logger.log({ image, file, project_id }, 'word count request')
|
||||
logger.debug({ image, file, project_id }, 'word count request')
|
||||
|
||||
return CompileManager.wordcount(
|
||||
project_id,
|
||||
|
|
|
@ -69,7 +69,7 @@ function doCompile(request, callback) {
|
|||
COMPILE_TIME_BUCKETS
|
||||
)
|
||||
const timer = new Metrics.Timer('write-to-disk', 1, request.metricsOpts)
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ projectId: request.project_id, userId: request.user_id },
|
||||
'syncing resources to disk'
|
||||
)
|
||||
|
@ -95,7 +95,7 @@ function doCompile(request, callback) {
|
|||
)
|
||||
return callback(error)
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{
|
||||
projectId: request.project_id,
|
||||
userId: request.user_id,
|
||||
|
@ -257,7 +257,7 @@ function doCompile(request, callback) {
|
|||
Metrics.gauge('load-avg', loadavg[0])
|
||||
}
|
||||
const ts = timer.done()
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{
|
||||
projectId: request.project_id,
|
||||
userId: request.user_id,
|
||||
|
@ -498,7 +498,10 @@ function syncFromPdf(projectId, userId, page, h, v, imageName, callback) {
|
|||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
logger.log({ projectId, userId, page, h, v, stdout }, 'synctex pdf output')
|
||||
logger.debug(
|
||||
{ projectId, userId, page, h, v, stdout },
|
||||
'synctex pdf output'
|
||||
)
|
||||
callback(null, SynctexOutputParser.parseEditOutput(stdout, baseDir))
|
||||
})
|
||||
}
|
||||
|
@ -561,7 +564,7 @@ function _runSynctex(projectId, userId, command, imageName, callback) {
|
|||
}
|
||||
|
||||
function wordcount(projectId, userId, filename, image, callback) {
|
||||
logger.log({ projectId, userId, filename, image }, 'running wordcount')
|
||||
logger.debug({ projectId, userId, filename, image }, 'running wordcount')
|
||||
const filePath = `$COMPILE_DIR/${filename}`
|
||||
const command = [
|
||||
'texcount',
|
||||
|
@ -607,7 +610,7 @@ function wordcount(projectId, userId, filename, image, callback) {
|
|||
return callback(err)
|
||||
}
|
||||
const results = _parseWordcountFromOutput(stdout)
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ projectId, userId, wordcount: results },
|
||||
'word count results'
|
||||
)
|
||||
|
|
|
@ -10,7 +10,7 @@ const Path = require('path')
|
|||
const _ = require('lodash')
|
||||
|
||||
const ONE_HOUR_IN_MS = 60 * 60 * 1000
|
||||
logger.info('using docker runner')
|
||||
logger.debug('using docker runner')
|
||||
|
||||
function usingSiblingContainers() {
|
||||
return (
|
||||
|
@ -36,7 +36,7 @@ const DockerRunner = {
|
|||
) {
|
||||
if (usingSiblingContainers()) {
|
||||
const _newPath = Settings.path.sandboxedCompilesHostDir
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ path: _newPath },
|
||||
'altering bind path for sibling containers'
|
||||
)
|
||||
|
@ -85,14 +85,14 @@ const DockerRunner = {
|
|||
|
||||
// logOptions = _.clone(options)
|
||||
// logOptions?.HostConfig?.SecurityOpt = "secomp used, removed in logging"
|
||||
logger.log({ projectId }, 'running docker container')
|
||||
logger.debug({ projectId }, 'running docker container')
|
||||
DockerRunner._runAndWaitForContainer(
|
||||
options,
|
||||
volumes,
|
||||
timeout,
|
||||
(error, output) => {
|
||||
if (error && error.statusCode === 500) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ err: error, projectId },
|
||||
'error running container so destroying and retrying'
|
||||
)
|
||||
|
@ -118,7 +118,7 @@ const DockerRunner = {
|
|||
},
|
||||
|
||||
kill(containerId, callback) {
|
||||
logger.log({ containerId }, 'sending kill signal to container')
|
||||
logger.debug({ containerId }, 'sending kill signal to container')
|
||||
const container = dockerode.getContainer(containerId)
|
||||
container.kill(error => {
|
||||
if (
|
||||
|
@ -193,7 +193,7 @@ const DockerRunner = {
|
|||
if (options != null && options.HostConfig != null) {
|
||||
options.HostConfig.SecurityOpt = null
|
||||
}
|
||||
logger.log({ exitCode, options }, 'docker container has exited')
|
||||
logger.debug({ exitCode, options }, 'docker container has exited')
|
||||
callbackIfFinished()
|
||||
})
|
||||
}
|
||||
|
@ -352,7 +352,7 @@ const DockerRunner = {
|
|||
callback = _.once(callback)
|
||||
const { name } = options
|
||||
|
||||
logger.log({ container_name: name }, 'starting container')
|
||||
logger.debug({ container_name: name }, 'starting container')
|
||||
const container = dockerode.getContainer(name)
|
||||
|
||||
function createAndStartContainer() {
|
||||
|
@ -412,7 +412,7 @@ const DockerRunner = {
|
|||
attachStartCallback()
|
||||
}
|
||||
|
||||
logger.log({ containerId }, 'attached to container')
|
||||
logger.debug({ containerId }, 'attached to container')
|
||||
|
||||
const MAX_OUTPUT = 1024 * 1024 // limit output to 1MB
|
||||
function createStringOutputStream(name) {
|
||||
|
@ -469,13 +469,13 @@ const DockerRunner = {
|
|||
let timedOut = false
|
||||
const timeoutId = setTimeout(() => {
|
||||
timedOut = true
|
||||
logger.log({ containerId }, 'timeout reached, killing container')
|
||||
logger.debug({ containerId }, 'timeout reached, killing container')
|
||||
container.kill(err => {
|
||||
logger.warn({ err, containerId }, 'failed to kill container')
|
||||
})
|
||||
}, timeout)
|
||||
|
||||
logger.log({ containerId }, 'waiting for docker container')
|
||||
logger.debug({ containerId }, 'waiting for docker container')
|
||||
container.wait((error, res) => {
|
||||
if (error != null) {
|
||||
clearTimeout(timeoutId)
|
||||
|
@ -483,13 +483,13 @@ const DockerRunner = {
|
|||
return callback(error)
|
||||
}
|
||||
if (timedOut) {
|
||||
logger.log({ containerId }, 'docker container timed out')
|
||||
logger.debug({ containerId }, 'docker container timed out')
|
||||
error = new Error('container timed out')
|
||||
error.timedout = true
|
||||
callback(error)
|
||||
} else {
|
||||
clearTimeout(timeoutId)
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ containerId, exitCode: res.StatusCode },
|
||||
'docker container returned'
|
||||
)
|
||||
|
@ -518,7 +518,7 @@ const DockerRunner = {
|
|||
},
|
||||
|
||||
_destroyContainer(containerId, shouldForce, callback) {
|
||||
logger.log({ containerId }, 'destroying docker container')
|
||||
logger.debug({ containerId }, 'destroying docker container')
|
||||
const container = dockerode.getContainer(containerId)
|
||||
container.remove({ force: shouldForce === true, v: true }, error => {
|
||||
if (error != null && error.statusCode === 404) {
|
||||
|
@ -531,7 +531,7 @@ const DockerRunner = {
|
|||
if (error != null) {
|
||||
logger.error({ err: error, containerId }, 'error destroying container')
|
||||
} else {
|
||||
logger.log({ containerId }, 'destroyed container')
|
||||
logger.debug({ containerId }, 'destroyed container')
|
||||
}
|
||||
callback(error)
|
||||
})
|
||||
|
@ -548,7 +548,7 @@ const DockerRunner = {
|
|||
const age = now - created
|
||||
const maxAge = DockerRunner.MAX_CONTAINER_AGE
|
||||
const ttl = maxAge - age
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ containerName: name, created, now, age, maxAge, ttl },
|
||||
'checking whether to destroy container'
|
||||
)
|
||||
|
@ -579,7 +579,7 @@ const DockerRunner = {
|
|||
},
|
||||
|
||||
startContainerMonitor() {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ maxAge: DockerRunner.MAX_CONTAINER_AGE },
|
||||
'starting container expiry'
|
||||
)
|
||||
|
|
|
@ -32,7 +32,7 @@ module.exports = DraftModeManager = {
|
|||
return callback()
|
||||
}
|
||||
const modified_content = DraftModeManager._injectDraftOption(content)
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{
|
||||
content: content.slice(0, 1024), // \documentclass is normally v near the top
|
||||
modified_content: modified_content.slice(0, 1024),
|
||||
|
|
|
@ -51,7 +51,7 @@ module.exports = LatexRunner = {
|
|||
timeout = 60000
|
||||
} // milliseconds
|
||||
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{
|
||||
directory,
|
||||
compiler,
|
||||
|
@ -169,7 +169,7 @@ module.exports = LatexRunner = {
|
|||
callback = function () {}
|
||||
}
|
||||
const id = `${project_id}`
|
||||
logger.log({ id }, 'killing running compile')
|
||||
logger.debug({ id }, 'killing running compile')
|
||||
if (ProcessTable[id] == null) {
|
||||
logger.warn({ id }, 'no such project to kill')
|
||||
return callback(null)
|
||||
|
|
|
@ -17,7 +17,7 @@ const { spawn } = require('child_process')
|
|||
const _ = require('lodash')
|
||||
const logger = require('@overleaf/logger')
|
||||
|
||||
logger.info('using standard command runner')
|
||||
logger.debug('using standard command runner')
|
||||
|
||||
module.exports = CommandRunner = {
|
||||
run(
|
||||
|
@ -35,7 +35,7 @@ module.exports = CommandRunner = {
|
|||
command = Array.from(command).map(arg =>
|
||||
arg.toString().replace('$COMPILE_DIR', directory)
|
||||
)
|
||||
logger.log({ project_id, command, directory }, 'running command')
|
||||
logger.debug({ project_id, command, directory }, 'running command')
|
||||
logger.warn('timeouts and sandboxing are not enabled with CommandRunner')
|
||||
|
||||
// merge environment settings
|
||||
|
@ -69,7 +69,7 @@ module.exports = CommandRunner = {
|
|||
|
||||
proc.on('close', function (code, signal) {
|
||||
let err
|
||||
logger.info({ code, signal, project_id }, 'command exited')
|
||||
logger.debug({ code, signal, project_id }, 'command exited')
|
||||
if (signal === 'SIGTERM') {
|
||||
// signal from kill method below
|
||||
err = new Error('terminated')
|
||||
|
|
|
@ -464,7 +464,7 @@ module.exports = OutputCacheManager = {
|
|||
OutputCacheManager.ARCHIVE_SUBDIR,
|
||||
buildId
|
||||
)
|
||||
logger.log({ dir: archiveDir }, 'archiving log files for project')
|
||||
logger.debug({ dir: archiveDir }, 'archiving log files for project')
|
||||
return fse.ensureDir(archiveDir, function (err) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
|
@ -577,7 +577,7 @@ module.exports = OutputCacheManager = {
|
|||
|
||||
const removeDir = (dir, cb) =>
|
||||
fse.remove(Path.join(cacheRoot, dir), function (err, result) {
|
||||
logger.log({ cache: cacheRoot, dir }, 'removed expired cache dir')
|
||||
logger.debug({ cache: cacheRoot, dir }, 'removed expired cache dir')
|
||||
if (err != null) {
|
||||
logger.error({ err, dir }, 'cache remove error')
|
||||
}
|
||||
|
|
|
@ -53,7 +53,7 @@ module.exports = OutputFileFinder = {
|
|||
'f',
|
||||
'-print',
|
||||
]
|
||||
logger.log({ args }, 'running find command')
|
||||
logger.debug({ args }, 'running find command')
|
||||
|
||||
const proc = spawn('find', args)
|
||||
let stdout = ''
|
||||
|
|
|
@ -71,7 +71,7 @@ module.exports = OutputFileOptimiser = {
|
|||
}
|
||||
const tmpOutput = dst + '.opt'
|
||||
const args = ['--linearize', '--newline-before-endstream', src, tmpOutput]
|
||||
logger.log({ args }, 'running qpdf command')
|
||||
logger.debug({ args }, 'running qpdf command')
|
||||
|
||||
const timer = new Metrics.Timer('qpdf')
|
||||
const proc = spawn('qpdf', args)
|
||||
|
|
|
@ -121,7 +121,7 @@ module.exports = ProjectPersistenceManager = {
|
|||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
logger.log({ project_ids }, 'clearing expired projects')
|
||||
logger.debug({ project_ids }, 'clearing expired projects')
|
||||
const jobs = Array.from(project_ids || []).map(project_id =>
|
||||
(
|
||||
project_id => callback =>
|
||||
|
@ -152,7 +152,7 @@ module.exports = ProjectPersistenceManager = {
|
|||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
logger.log({ project_id, user_id }, 'clearing project for user')
|
||||
logger.debug({ project_id, user_id }, 'clearing project for user')
|
||||
return CompileManager.clearProject(project_id, user_id, function (error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
|
@ -173,7 +173,7 @@ module.exports = ProjectPersistenceManager = {
|
|||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
logger.log({ project_id }, 'clearing project from cache')
|
||||
logger.debug({ project_id }, 'clearing project from cache')
|
||||
return UrlCache.clearProject(project_id, function (error) {
|
||||
if (error != null) {
|
||||
logger.err({ error, project_id }, 'error clearing project from cache')
|
||||
|
@ -212,7 +212,7 @@ module.exports = ProjectPersistenceManager = {
|
|||
},
|
||||
}
|
||||
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ EXPIRY_TIMEOUT: ProjectPersistenceManager.EXPIRY_TIMEOUT },
|
||||
'project assets kept timeout'
|
||||
)
|
||||
|
|
|
@ -26,7 +26,7 @@ module.exports = {
|
|||
const stateFile = Path.join(basePath, this.SYNC_STATE_FILE)
|
||||
if (state == null) {
|
||||
// remove the file if no state passed in
|
||||
logger.log({ state, basePath }, 'clearing sync state')
|
||||
logger.debug({ state, basePath }, 'clearing sync state')
|
||||
fs.unlink(stateFile, function (err) {
|
||||
if (err && err.code !== 'ENOENT') {
|
||||
return callback(err)
|
||||
|
@ -35,7 +35,7 @@ module.exports = {
|
|||
}
|
||||
})
|
||||
} else {
|
||||
logger.log({ state, basePath }, 'writing sync state')
|
||||
logger.debug({ state, basePath }, 'writing sync state')
|
||||
const resourceList = resources.map(resource => resource.path)
|
||||
fs.writeFile(
|
||||
stateFile,
|
||||
|
@ -67,7 +67,7 @@ module.exports = {
|
|||
const resourceList = array.slice(0, adjustedLength - 1)
|
||||
const oldState = array[adjustedLength - 1]
|
||||
const newState = `stateHash:${state}`
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ state, oldState, basePath, stateMatches: newState === oldState },
|
||||
'checking sync state'
|
||||
)
|
||||
|
|
|
@ -32,7 +32,7 @@ module.exports = ResourceWriter = {
|
|||
callback = function () {}
|
||||
}
|
||||
if (request.syncType === 'incremental') {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id: request.project_id, user_id: request.user_id },
|
||||
'incremental sync'
|
||||
)
|
||||
|
@ -77,7 +77,7 @@ module.exports = ResourceWriter = {
|
|||
}
|
||||
)
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id: request.project_id, user_id: request.user_id },
|
||||
'full sync'
|
||||
)
|
||||
|
@ -160,7 +160,7 @@ module.exports = ResourceWriter = {
|
|||
if (err.code === 'EEXIST') {
|
||||
return callback()
|
||||
} else {
|
||||
logger.log({ err, dir: basePath }, 'error creating directory')
|
||||
logger.debug({ err, dir: basePath }, 'error creating directory')
|
||||
return callback(err)
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -29,7 +29,10 @@ module.exports = TikzManager = {
|
|||
}
|
||||
for (const resource of Array.from(resources)) {
|
||||
if (resource.path === 'output.tex') {
|
||||
logger.log({ compileDir, mainFile }, 'output.tex already in resources')
|
||||
logger.debug(
|
||||
{ compileDir, mainFile },
|
||||
'output.tex already in resources'
|
||||
)
|
||||
return callback(null, false)
|
||||
}
|
||||
}
|
||||
|
@ -55,7 +58,7 @@ module.exports = TikzManager = {
|
|||
: undefined) >= 0
|
||||
const usesPsTool =
|
||||
(content != null ? content.indexOf('{pstool}') : undefined) >= 0
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ compileDir, mainFile, usesTikzExternalize, usesPsTool },
|
||||
'checked for packages needing main file as output.tex'
|
||||
)
|
||||
|
@ -82,7 +85,7 @@ module.exports = TikzManager = {
|
|||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ compileDir, mainFile },
|
||||
'copied file to output.tex as project uses packages which require it'
|
||||
)
|
||||
|
|
|
@ -61,7 +61,7 @@ module.exports = UrlFetcher = {
|
|||
3 * oneMinute
|
||||
)
|
||||
|
||||
logger.log({ url, filePath }, 'started downloading url to cache')
|
||||
logger.debug({ url, filePath }, 'started downloading url to cache')
|
||||
const urlStream = request.get({ url, timeout: oneMinute })
|
||||
urlStream.pause() // stop data flowing until we are ready
|
||||
|
||||
|
@ -74,7 +74,7 @@ module.exports = UrlFetcher = {
|
|||
})
|
||||
|
||||
urlStream.on('end', () =>
|
||||
logger.log({ url, filePath }, 'finished downloading file into cache')
|
||||
logger.debug({ url, filePath }, 'finished downloading file into cache')
|
||||
)
|
||||
|
||||
return urlStream.on('response', function (res) {
|
||||
|
@ -97,7 +97,7 @@ module.exports = UrlFetcher = {
|
|||
})
|
||||
|
||||
fileStream.on('finish', function () {
|
||||
logger.log({ url, filePath }, 'finished writing file into cache')
|
||||
logger.debug({ url, filePath }, 'finished writing file into cache')
|
||||
fs.rename(atomicWrite, filePath, error => {
|
||||
if (error) {
|
||||
fs.unlink(atomicWrite, () => callbackOnce(error))
|
||||
|
@ -108,7 +108,7 @@ module.exports = UrlFetcher = {
|
|||
})
|
||||
|
||||
fileStream.on('pipe', () =>
|
||||
logger.log({ url, filePath }, 'piping into filestream')
|
||||
logger.debug({ url, filePath }, 'piping into filestream')
|
||||
)
|
||||
|
||||
urlStream.pipe(fileStream)
|
||||
|
|
|
@ -57,7 +57,9 @@ if (!module.parent) {
|
|||
logger.fatal({ err }, `Cannot bind to ${host}:${port}. Exiting.`)
|
||||
process.exit(1)
|
||||
}
|
||||
return logger.info(`contacts starting up, listening on ${host}:${port}`)
|
||||
return logger.debug(
|
||||
`contacts starting up, listening on ${host}:${port}`
|
||||
)
|
||||
})
|
||||
})
|
||||
.catch(err => {
|
||||
|
|
|
@ -23,7 +23,7 @@ module.exports = HttpController = {
|
|||
return
|
||||
}
|
||||
|
||||
logger.log({ user_id, contact_id }, 'adding contact')
|
||||
logger.debug({ user_id, contact_id }, 'adding contact')
|
||||
|
||||
return ContactManager.touchContact(user_id, contact_id, function (error) {
|
||||
if (error != null) {
|
||||
|
@ -50,7 +50,7 @@ module.exports = HttpController = {
|
|||
}
|
||||
limit = Math.min(limit, HttpController.CONTACT_LIMIT)
|
||||
|
||||
logger.log({ user_id }, 'getting contacts')
|
||||
logger.debug({ user_id }, 'getting contacts')
|
||||
|
||||
return ContactManager.getContacts(user_id, function (error, contact_dict) {
|
||||
if (error != null) {
|
||||
|
|
|
@ -2,7 +2,13 @@ const SandboxedModule = require('sandboxed-module')
|
|||
|
||||
SandboxedModule.configure({
|
||||
requires: {
|
||||
'@overleaf/logger': { log() {} },
|
||||
'@overleaf/logger': {
|
||||
debug() {},
|
||||
info() {},
|
||||
log() {},
|
||||
warn() {},
|
||||
error() {},
|
||||
},
|
||||
'@overleaf/metrics': { timeAsyncMethod() {} },
|
||||
},
|
||||
globals: { Buffer, console, process },
|
||||
|
|
|
@ -109,7 +109,9 @@ if (!module.parent) {
|
|||
logger.fatal({ err }, `Cannot bind to ${host}:${port}. Exiting.`)
|
||||
process.exit(1)
|
||||
}
|
||||
return logger.info(`Docstore starting up, listening on ${host}:${port}`)
|
||||
return logger.debug(
|
||||
`Docstore starting up, listening on ${host}:${port}`
|
||||
)
|
||||
})
|
||||
server.timeout = 120000
|
||||
server.keepAliveTimeout = 5000
|
||||
|
|
|
@ -70,7 +70,7 @@ async function archiveDocById(projectId, docId) {
|
|||
}
|
||||
|
||||
async function archiveDoc(projectId, doc) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id: projectId, doc_id: doc._id },
|
||||
'sending doc to persistor'
|
||||
)
|
||||
|
@ -154,7 +154,7 @@ async function getDoc(projectId, docId) {
|
|||
|
||||
// get the doc and unarchive it to mongo
|
||||
async function unarchiveDoc(projectId, docId) {
|
||||
logger.log({ projectId, docId }, 'getting doc from persistor')
|
||||
logger.debug({ projectId, docId }, 'getting doc from persistor')
|
||||
const mongoDoc = await MongoManager.findDoc(projectId, docId, {
|
||||
inS3: 1,
|
||||
rev: 1,
|
||||
|
|
|
@ -289,7 +289,10 @@ module.exports = DocManager = {
|
|||
if (updateRanges) {
|
||||
update.ranges = ranges
|
||||
}
|
||||
logger.log({ project_id, doc_id }, 'updating doc lines and ranges')
|
||||
logger.debug(
|
||||
{ project_id, doc_id },
|
||||
'updating doc lines and ranges'
|
||||
)
|
||||
|
||||
modified = true
|
||||
rev += 1 // rev will be incremented in mongo by MongoManager.upsertIntoDocCollection
|
||||
|
@ -300,7 +303,7 @@ module.exports = DocManager = {
|
|||
cb
|
||||
)
|
||||
} else {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id, doc_id },
|
||||
'doc lines have not changed - not updating'
|
||||
)
|
||||
|
@ -310,7 +313,7 @@ module.exports = DocManager = {
|
|||
|
||||
const updateVersionIfNeeded = function (cb) {
|
||||
if (updateVersion) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{
|
||||
project_id,
|
||||
doc_id,
|
||||
|
@ -322,7 +325,7 @@ module.exports = DocManager = {
|
|||
modified = true
|
||||
return MongoManager.setDocVersion(doc_id, version, cb)
|
||||
} else {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id, doc_id, version },
|
||||
'doc version has not changed - not updating'
|
||||
)
|
||||
|
|
|
@ -31,7 +31,7 @@ module.exports = {
|
|||
url,
|
||||
timeout: 3000,
|
||||
})
|
||||
logger.log({ lines, url, doc_id, project_id }, 'running health check')
|
||||
logger.debug({ lines, url, doc_id, project_id }, 'running health check')
|
||||
const jobs = [
|
||||
function (cb) {
|
||||
const opts = getOpts()
|
||||
|
|
|
@ -8,12 +8,12 @@ const Settings = require('@overleaf/settings')
|
|||
function getDoc(req, res, next) {
|
||||
const { doc_id: docId, project_id: projectId } = req.params
|
||||
const includeDeleted = req.query.include_deleted === 'true'
|
||||
logger.log({ projectId, docId }, 'getting doc')
|
||||
logger.debug({ projectId, docId }, 'getting doc')
|
||||
DocManager.getFullDoc(projectId, docId, function (error, doc) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
logger.log({ docId, projectId }, 'got doc')
|
||||
logger.debug({ docId, projectId }, 'got doc')
|
||||
if (doc == null) {
|
||||
res.sendStatus(404)
|
||||
} else if (doc.deleted && !includeDeleted) {
|
||||
|
@ -26,7 +26,7 @@ function getDoc(req, res, next) {
|
|||
|
||||
function peekDoc(req, res, next) {
|
||||
const { doc_id: docId, project_id: projectId } = req.params
|
||||
logger.log({ projectId, docId }, 'peeking doc')
|
||||
logger.debug({ projectId, docId }, 'peeking doc')
|
||||
DocManager.peekDoc(projectId, docId, function (error, doc) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
|
@ -52,7 +52,7 @@ function isDocDeleted(req, res, next) {
|
|||
|
||||
function getRawDoc(req, res, next) {
|
||||
const { doc_id: docId, project_id: projectId } = req.params
|
||||
logger.log({ projectId, docId }, 'getting raw doc')
|
||||
logger.debug({ projectId, docId }, 'getting raw doc')
|
||||
DocManager.getDocLines(projectId, docId, function (error, doc) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
|
@ -68,7 +68,7 @@ function getRawDoc(req, res, next) {
|
|||
|
||||
function getAllDocs(req, res, next) {
|
||||
const { project_id: projectId } = req.params
|
||||
logger.log({ projectId }, 'getting all docs')
|
||||
logger.debug({ projectId }, 'getting all docs')
|
||||
DocManager.getAllNonDeletedDocs(
|
||||
projectId,
|
||||
{ lines: true, rev: true },
|
||||
|
@ -86,7 +86,7 @@ function getAllDocs(req, res, next) {
|
|||
|
||||
function getAllDeletedDocs(req, res, next) {
|
||||
const { project_id: projectId } = req.params
|
||||
logger.log({ projectId }, 'getting all deleted docs')
|
||||
logger.debug({ projectId }, 'getting all deleted docs')
|
||||
DocManager.getAllDeletedDocs(
|
||||
projectId,
|
||||
{ name: true, deletedAt: true },
|
||||
|
@ -107,7 +107,7 @@ function getAllDeletedDocs(req, res, next) {
|
|||
|
||||
function getAllRanges(req, res, next) {
|
||||
const { project_id: projectId } = req.params
|
||||
logger.log({ projectId }, 'getting all ranges')
|
||||
logger.debug({ projectId }, 'getting all ranges')
|
||||
DocManager.getAllNonDeletedDocs(
|
||||
projectId,
|
||||
{ ranges: true },
|
||||
|
@ -154,7 +154,7 @@ function updateDoc(req, res, next) {
|
|||
return
|
||||
}
|
||||
|
||||
logger.log({ projectId, docId }, 'got http request to update doc')
|
||||
logger.debug({ projectId, docId }, 'got http request to update doc')
|
||||
DocManager.updateDoc(
|
||||
projectId,
|
||||
docId,
|
||||
|
@ -175,7 +175,7 @@ function updateDoc(req, res, next) {
|
|||
|
||||
function patchDoc(req, res, next) {
|
||||
const { doc_id: docId, project_id: projectId } = req.params
|
||||
logger.log({ projectId, docId }, 'patching doc')
|
||||
logger.debug({ projectId, docId }, 'patching doc')
|
||||
|
||||
const allowedFields = ['deleted', 'deletedAt', 'name']
|
||||
const meta = {}
|
||||
|
@ -226,7 +226,7 @@ function _buildDocsArrayView(projectId, docs) {
|
|||
|
||||
function archiveAllDocs(req, res, next) {
|
||||
const { project_id: projectId } = req.params
|
||||
logger.log({ projectId }, 'archiving all docs')
|
||||
logger.debug({ projectId }, 'archiving all docs')
|
||||
DocArchive.archiveAllDocs(projectId, function (error) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
|
@ -237,7 +237,7 @@ function archiveAllDocs(req, res, next) {
|
|||
|
||||
function archiveDoc(req, res, next) {
|
||||
const { doc_id: docId, project_id: projectId } = req.params
|
||||
logger.log({ projectId, docId }, 'archiving a doc')
|
||||
logger.debug({ projectId, docId }, 'archiving a doc')
|
||||
DocArchive.archiveDocById(projectId, docId, function (error) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
|
@ -248,7 +248,7 @@ function archiveDoc(req, res, next) {
|
|||
|
||||
function unArchiveAllDocs(req, res, next) {
|
||||
const { project_id: projectId } = req.params
|
||||
logger.log({ projectId }, 'unarchiving all docs')
|
||||
logger.debug({ projectId }, 'unarchiving all docs')
|
||||
DocArchive.unArchiveAllDocs(projectId, function (err) {
|
||||
if (err) {
|
||||
if (err instanceof Errors.DocRevValueError) {
|
||||
|
@ -263,7 +263,7 @@ function unArchiveAllDocs(req, res, next) {
|
|||
|
||||
function destroyProject(req, res, next) {
|
||||
const { project_id: projectId } = req.params
|
||||
logger.log({ projectId }, 'destroying all docs')
|
||||
logger.debug({ projectId }, 'destroying all docs')
|
||||
DocArchive.destroyProject(projectId, function (error) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
|
|
|
@ -15,7 +15,9 @@ chai.use(chaiAsPromised)
|
|||
const sandbox = sinon.createSandbox()
|
||||
const stubs = {
|
||||
logger: {
|
||||
debug: sandbox.stub(),
|
||||
log: sandbox.stub(),
|
||||
info: sandbox.stub(),
|
||||
warn: sandbox.stub(),
|
||||
err: sandbox.stub(),
|
||||
error: sandbox.stub(),
|
||||
|
|
|
@ -145,7 +145,7 @@ if (!module.parent) {
|
|||
logger.error('Error starting Filestore', error)
|
||||
throw error
|
||||
}
|
||||
logger.info(`Filestore starting up, listening on ${host}:${port}`)
|
||||
logger.debug(`Filestore starting up, listening on ${host}:${port}`)
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -68,7 +68,7 @@ mongodb
|
|||
.waitForDb()
|
||||
.then(() => {
|
||||
app.listen(port, host, () =>
|
||||
logger.info(`notifications starting up, listening on ${host}:${port}`)
|
||||
logger.debug(`notifications starting up, listening on ${host}:${port}`)
|
||||
)
|
||||
})
|
||||
.catch(err => {
|
||||
|
|
|
@ -29,7 +29,7 @@ module.exports = {
|
|||
url: `http://localhost:${port}/user/${user_id}${endPath}`,
|
||||
timeout: 5000,
|
||||
})
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ user_id, opts: getOpts(), key: notification_key, user_id },
|
||||
'Health Check: running'
|
||||
)
|
||||
|
@ -82,7 +82,7 @@ module.exports = {
|
|||
const notification_id = body[1][0]._id
|
||||
notification_key = body[1][0].key
|
||||
let opts = getOpts(`/notification/${notification_id}`)
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ notification_id, notification_key },
|
||||
'Health Check: doing cleanup'
|
||||
)
|
||||
|
|
|
@ -15,7 +15,7 @@ const metrics = require('@overleaf/metrics')
|
|||
|
||||
module.exports = {
|
||||
getUserNotifications(req, res, next) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ user_id: req.params.user_id },
|
||||
'getting user unread notifications'
|
||||
)
|
||||
|
@ -30,7 +30,7 @@ module.exports = {
|
|||
},
|
||||
|
||||
addNotification(req, res) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ user_id: req.params.user_id, notification: req.body },
|
||||
'adding notification'
|
||||
)
|
||||
|
@ -49,7 +49,7 @@ module.exports = {
|
|||
},
|
||||
|
||||
removeNotificationId(req, res, next) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{
|
||||
user_id: req.params.user_id,
|
||||
notification_id: req.params.notification_id,
|
||||
|
@ -68,7 +68,7 @@ module.exports = {
|
|||
},
|
||||
|
||||
removeNotificationKey(req, res, next) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ user_id: req.params.user_id, notification_key: req.body.key },
|
||||
'mark key notification as read'
|
||||
)
|
||||
|
@ -85,7 +85,7 @@ module.exports = {
|
|||
|
||||
removeNotificationByKeyOnly(req, res, next) {
|
||||
const notification_key = req.params.key
|
||||
logger.log({ notification_key }, 'mark notification as read by key only')
|
||||
logger.debug({ notification_key }, 'mark notification as read by key only')
|
||||
metrics.inc('removeNotificationKey')
|
||||
return Notifications.removeNotificationByKeyOnly(notification_key, err => {
|
||||
if (err) return next(err)
|
||||
|
|
|
@ -44,7 +44,7 @@ if (!module.parent) {
|
|||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return logger.info(`spelling starting up, listening on ${host}:${port}`)
|
||||
return logger.debug(`spelling starting up, listening on ${host}:${port}`)
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@ try {
|
|||
const oldCache = fs.readFileSync(cacheFsPath)
|
||||
cache.load(JSON.parse(oldCache))
|
||||
} catch (error) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
OError.tag(error, 'could not load the cache file', { cacheFsPath })
|
||||
)
|
||||
}
|
||||
|
@ -38,14 +38,14 @@ const cacheDump = setInterval(function () {
|
|||
const dump = JSON.stringify(cache.dump())
|
||||
return fs.writeFile(cacheFsPathTmp, dump, function (err) {
|
||||
if (err != null) {
|
||||
logger.log(OError.tag(err, 'error writing cache file'))
|
||||
logger.debug(OError.tag(err, 'error writing cache file'))
|
||||
fs.unlink(cacheFsPathTmp, () => {})
|
||||
} else {
|
||||
fs.rename(cacheFsPathTmp, cacheFsPath, err => {
|
||||
if (err) {
|
||||
logger.error(OError.tag(err, 'error renaming cache file'))
|
||||
} else {
|
||||
logger.log({ len: dump.length, cacheFsPath }, 'wrote cache file')
|
||||
logger.debug({ len: dump.length, cacheFsPath }, 'wrote cache file')
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -96,7 +96,7 @@ class ASpellRunner {
|
|||
cache.set(k, v)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
logger.debug(
|
||||
{
|
||||
hits,
|
||||
total: words.length,
|
||||
|
|
|
@ -27,14 +27,14 @@ class ASpellWorker {
|
|||
'-d',
|
||||
language,
|
||||
])
|
||||
logger.info(
|
||||
logger.debug(
|
||||
{ process: this.pipe.pid, lang: this.language },
|
||||
'starting new aspell worker'
|
||||
)
|
||||
metrics.inc('aspellWorker', 1, { status: 'start', method: this.language })
|
||||
this.pipe.on('exit', () => {
|
||||
this.state = 'killed'
|
||||
logger.info(
|
||||
logger.debug(
|
||||
{ process: this.pipe.pid, lang: this.language },
|
||||
'aspell worker has exited'
|
||||
)
|
||||
|
@ -201,14 +201,14 @@ class ASpellWorker {
|
|||
}
|
||||
|
||||
shutdown(reason) {
|
||||
logger.info({ process: this.pipe.pid, reason }, 'shutting down')
|
||||
logger.debug({ process: this.pipe.pid, reason }, 'shutting down')
|
||||
this.state = 'closing'
|
||||
this.closeReason = reason
|
||||
return this.pipe.stdin.end()
|
||||
}
|
||||
|
||||
kill(reason) {
|
||||
logger.info({ process: this.pipe.pid, reason }, 'killing')
|
||||
logger.debug({ process: this.pipe.pid, reason }, 'killing')
|
||||
this.closeReason = reason
|
||||
if (this.state === 'killed') {
|
||||
return
|
||||
|
|
|
@ -28,7 +28,7 @@ class ASpellWorkerPool {
|
|||
|
||||
create(language) {
|
||||
if (this.PROCESS_POOL.length >= this.MAX_WORKERS) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ maxworkers: this.MAX_WORKERS },
|
||||
'maximum number of workers already running'
|
||||
)
|
||||
|
@ -44,7 +44,7 @@ class ASpellWorkerPool {
|
|||
clearTimeout(worker.idleTimer)
|
||||
worker.idleTimer = null
|
||||
}
|
||||
logger.info(
|
||||
logger.debug(
|
||||
{ process: worker.pipe.pid, lang: language },
|
||||
'removing aspell worker from pool'
|
||||
)
|
||||
|
|
|
@ -26,7 +26,7 @@ module.exports = {
|
|||
: 0
|
||||
|
||||
if (numberOfSuggestions > 10) {
|
||||
logger.log('health check passed')
|
||||
logger.debug('health check passed')
|
||||
res.sendStatus(200)
|
||||
} else {
|
||||
logger.err(
|
||||
|
|
|
@ -14,7 +14,7 @@ module.exports = {
|
|||
check(req, res) {
|
||||
metrics.inc('spelling-check', 0.1)
|
||||
const { token, wordCount } = extractCheckRequestData(req)
|
||||
logger.info({ token, wordCount }, 'running check')
|
||||
logger.debug({ token, wordCount }, 'running check')
|
||||
SpellingAPIManager.runRequest(token, req.body, function (error, result) {
|
||||
if (error != null) {
|
||||
logger.error(
|
||||
|
|
|
@ -87,7 +87,7 @@ app.post('/pack', function (req, res, next) {
|
|||
if (packWorker != null) {
|
||||
return res.send('pack already running')
|
||||
} else {
|
||||
logger.log('running pack')
|
||||
logger.debug('running pack')
|
||||
packWorker = childProcess.fork(
|
||||
Path.join(__dirname, '/app/js/PackWorker.js'),
|
||||
[
|
||||
|
@ -97,7 +97,7 @@ app.post('/pack', function (req, res, next) {
|
|||
]
|
||||
)
|
||||
packWorker.on('exit', function (code, signal) {
|
||||
logger.log({ code, signal }, 'history auto pack exited')
|
||||
logger.debug({ code, signal }, 'history auto pack exited')
|
||||
return (packWorker = null)
|
||||
})
|
||||
return res.send('pack started')
|
||||
|
@ -142,7 +142,7 @@ if (!module.parent) {
|
|||
'could not start track-changes server'
|
||||
)
|
||||
} else {
|
||||
return logger.info(
|
||||
return logger.debug(
|
||||
`trackchanges starting up, listening on ${host}:${port}`
|
||||
)
|
||||
}
|
||||
|
|
|
@ -127,7 +127,7 @@ module.exports = DiffManager = {
|
|||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id, doc_id, version },
|
||||
'getting document before version'
|
||||
)
|
||||
|
@ -162,7 +162,7 @@ module.exports = DiffManager = {
|
|||
return callback(error)
|
||||
}
|
||||
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{
|
||||
docVersion: version,
|
||||
lastUpdateVersion: lastUpdate != null ? lastUpdate.v : undefined,
|
||||
|
|
|
@ -5,7 +5,7 @@ const Errors = require('./Errors')
|
|||
|
||||
function peekDocument(projectId, docId, callback) {
|
||||
const url = `${Settings.apis.docstore.url}/project/${projectId}/doc/${docId}/peek`
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id: projectId, doc_id: docId },
|
||||
'getting doc from docstore'
|
||||
)
|
||||
|
@ -19,7 +19,7 @@ function peekDocument(projectId, docId, callback) {
|
|||
} catch (error) {
|
||||
return callback(error)
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id: projectId, doc_id: docId, version: body.version },
|
||||
'got doc from docstore'
|
||||
)
|
||||
|
|
|
@ -21,7 +21,7 @@ module.exports = DocumentUpdaterManager = {
|
|||
callback = function () {}
|
||||
}
|
||||
|
||||
logger.log({ project_id, doc_id }, 'getting doc from document updater')
|
||||
logger.debug({ project_id, doc_id }, 'getting doc from document updater')
|
||||
return request.get(url, function (error, res, body) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
|
@ -33,7 +33,7 @@ module.exports = DocumentUpdaterManager = {
|
|||
error = error1
|
||||
return callback(error)
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id, doc_id, version: body.version },
|
||||
'got doc from document updater'
|
||||
)
|
||||
|
@ -66,7 +66,7 @@ module.exports = DocumentUpdaterManager = {
|
|||
callback = function () {}
|
||||
}
|
||||
const url = `${Settings.apis.documentupdater.url}/project/${project_id}/doc/${doc_id}`
|
||||
logger.log({ project_id, doc_id }, 'setting doc in document updater')
|
||||
logger.debug({ project_id, doc_id }, 'setting doc in document updater')
|
||||
return request.post(
|
||||
{
|
||||
url,
|
||||
|
|
|
@ -21,7 +21,7 @@ module.exports = {
|
|||
check(callback) {
|
||||
const project_id = ObjectId(settings.trackchanges.healthCheck.project_id)
|
||||
const url = `http://localhost:${port}/project/${project_id}`
|
||||
logger.log({ project_id }, 'running health check')
|
||||
logger.debug({ project_id }, 'running health check')
|
||||
const jobs = [
|
||||
cb =>
|
||||
request.get(
|
||||
|
|
|
@ -30,7 +30,7 @@ module.exports = HttpController = {
|
|||
}
|
||||
const { doc_id } = req.params
|
||||
const { project_id } = req.params
|
||||
logger.log({ project_id, doc_id }, 'compressing doc history')
|
||||
logger.debug({ project_id, doc_id }, 'compressing doc history')
|
||||
return UpdatesManager.processUncompressedUpdatesWithLock(
|
||||
project_id,
|
||||
doc_id,
|
||||
|
@ -48,7 +48,7 @@ module.exports = HttpController = {
|
|||
next = function () {}
|
||||
}
|
||||
const { project_id } = req.params
|
||||
logger.log({ project_id }, 'compressing project history')
|
||||
logger.debug({ project_id }, 'compressing project history')
|
||||
return UpdatesManager.processUncompressedUpdatesForProject(
|
||||
project_id,
|
||||
function (error) {
|
||||
|
@ -66,7 +66,7 @@ module.exports = HttpController = {
|
|||
next = function () {}
|
||||
}
|
||||
const limit = req.query.limit != null ? parseInt(req.query.limit, 10) : -1
|
||||
logger.log({ limit }, 'flushing all projects')
|
||||
logger.debug({ limit }, 'flushing all projects')
|
||||
return UpdatesManager.flushAll(limit, function (error, result) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
|
@ -78,7 +78,7 @@ module.exports = HttpController = {
|
|||
.status(200)
|
||||
.send(`${status}\nwould flush:\n${all.join('\n')}\n`)
|
||||
} else if (failed.length > 0) {
|
||||
logger.log({ failed, succeeded }, 'error flushing projects')
|
||||
logger.debug({ failed, succeeded }, 'error flushing projects')
|
||||
return res
|
||||
.status(500)
|
||||
.send(`${status}\nfailed to flush:\n${failed.join('\n')}\n`)
|
||||
|
@ -96,13 +96,13 @@ module.exports = HttpController = {
|
|||
if (next == null) {
|
||||
next = function () {}
|
||||
}
|
||||
logger.log('checking dangling updates')
|
||||
logger.debug('checking dangling updates')
|
||||
return UpdatesManager.getDanglingUpdates(function (error, result) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
if (result.length > 0) {
|
||||
logger.log({ dangling: result }, 'found dangling updates')
|
||||
logger.debug({ dangling: result }, 'found dangling updates')
|
||||
return res.status(500).send(`dangling updates:\n${result.join('\n')}\n`)
|
||||
} else {
|
||||
return res.status(200).send('no dangling updates found\n')
|
||||
|
@ -116,7 +116,7 @@ module.exports = HttpController = {
|
|||
}
|
||||
const { doc_id } = req.params
|
||||
const { project_id } = req.params
|
||||
logger.log({ project_id, doc_id }, 'checking doc history')
|
||||
logger.debug({ project_id, doc_id }, 'checking doc history')
|
||||
return DiffManager.getDocumentBeforeVersion(
|
||||
project_id,
|
||||
doc_id,
|
||||
|
@ -161,7 +161,7 @@ module.exports = HttpController = {
|
|||
to = null
|
||||
}
|
||||
|
||||
logger.log({ project_id, doc_id, from, to }, 'getting diff')
|
||||
logger.debug({ project_id, doc_id, from, to }, 'getting diff')
|
||||
return DiffManager.getDiff(
|
||||
project_id,
|
||||
doc_id,
|
||||
|
@ -207,7 +207,7 @@ module.exports = HttpController = {
|
|||
|
||||
zipProject(req, res, next) {
|
||||
const { project_id: projectId } = req.params
|
||||
logger.log({ projectId }, 'exporting project history as zip file')
|
||||
logger.debug({ projectId }, 'exporting project history as zip file')
|
||||
ZipManager.makeTempDirectory((err, tmpdir) => {
|
||||
if (err) {
|
||||
return next(err)
|
||||
|
@ -234,7 +234,7 @@ module.exports = HttpController = {
|
|||
// - multiple updates form a pack
|
||||
// Flush updates per pack onto the wire.
|
||||
const { project_id } = req.params
|
||||
logger.log({ project_id }, 'exporting project history')
|
||||
logger.debug({ project_id }, 'exporting project history')
|
||||
UpdatesManager.exportProject(
|
||||
project_id,
|
||||
function (err, { updates, userIds }, confirmWrite) {
|
||||
|
@ -317,7 +317,7 @@ module.exports = HttpController = {
|
|||
}
|
||||
const { project_id } = req.params
|
||||
const { doc_id } = req.params
|
||||
logger.log({ project_id, doc_id }, 'pushing all finalised changes to s3')
|
||||
logger.debug({ project_id, doc_id }, 'pushing all finalised changes to s3')
|
||||
return PackManager.pushOldPacks(project_id, doc_id, function (error) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
|
@ -332,7 +332,7 @@ module.exports = HttpController = {
|
|||
}
|
||||
const { project_id } = req.params
|
||||
const { doc_id } = req.params
|
||||
logger.log({ project_id, doc_id }, 'pulling all packs from s3')
|
||||
logger.debug({ project_id, doc_id }, 'pulling all packs from s3')
|
||||
return PackManager.pullOldPacks(project_id, doc_id, function (error) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
|
|
|
@ -64,7 +64,7 @@ module.exports = MongoAWS = {
|
|||
return callback(new Error('invalid pack id'))
|
||||
}
|
||||
|
||||
logger.log({ project_id, doc_id, pack_id }, 'uploading data to s3')
|
||||
logger.debug({ project_id, doc_id, pack_id }, 'uploading data to s3')
|
||||
|
||||
const upload = createStream(S3S.WriteStream, project_id, doc_id, pack_id)
|
||||
|
||||
|
@ -85,7 +85,7 @@ module.exports = MongoAWS = {
|
|||
return callback(error)
|
||||
}
|
||||
return zlib.gzip(uncompressedData, function (err, buf) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{
|
||||
project_id,
|
||||
doc_id,
|
||||
|
@ -101,7 +101,10 @@ module.exports = MongoAWS = {
|
|||
upload.on('error', err => callback(err))
|
||||
upload.on('finish', function () {
|
||||
Metrics.inc('archive-pack')
|
||||
logger.log({ project_id, doc_id, pack_id }, 'upload to s3 completed')
|
||||
logger.debug(
|
||||
{ project_id, doc_id, pack_id },
|
||||
'upload to s3 completed'
|
||||
)
|
||||
return callback(null)
|
||||
})
|
||||
upload.write(buf)
|
||||
|
@ -129,7 +132,7 @@ module.exports = MongoAWS = {
|
|||
return callback(new Error('invalid pack id'))
|
||||
}
|
||||
|
||||
logger.log({ project_id, doc_id, pack_id }, 'downloading data from s3')
|
||||
logger.debug({ project_id, doc_id, pack_id }, 'downloading data from s3')
|
||||
|
||||
const download = createStream(S3S.ReadStream, project_id, doc_id, pack_id)
|
||||
|
||||
|
@ -140,7 +143,7 @@ module.exports = MongoAWS = {
|
|||
const gunzip = zlib.createGunzip()
|
||||
gunzip.setEncoding('utf8')
|
||||
gunzip.on('error', function (err) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id, doc_id, pack_id, err },
|
||||
'error uncompressing gzip stream'
|
||||
)
|
||||
|
@ -152,7 +155,10 @@ module.exports = MongoAWS = {
|
|||
outputStream.on('error', err => callback(err))
|
||||
outputStream.on('end', function () {
|
||||
let object
|
||||
logger.log({ project_id, doc_id, pack_id }, 'download from s3 completed')
|
||||
logger.debug(
|
||||
{ project_id, doc_id, pack_id },
|
||||
'download from s3 completed'
|
||||
)
|
||||
try {
|
||||
object = JSON.parse(parts.join(''))
|
||||
} catch (e) {
|
||||
|
@ -186,7 +192,10 @@ module.exports = MongoAWS = {
|
|||
Metrics.inc('unarchive-pack')
|
||||
// allow the object to expire, we can always retrieve it again
|
||||
object.expiresAt = new Date(Date.now() + 7 * DAYS)
|
||||
logger.log({ project_id, doc_id, pack_id }, 'inserting object from s3')
|
||||
logger.debug(
|
||||
{ project_id, doc_id, pack_id },
|
||||
'inserting object from s3'
|
||||
)
|
||||
return db.docHistory.insertOne(object, (err, confirmation) => {
|
||||
if (err) return callback(err)
|
||||
object._id = confirmation.insertedId
|
||||
|
|
|
@ -217,7 +217,7 @@ module.exports = PackManager = {
|
|||
newPack.expiresAt = new Date(Date.now() + 7 * DAYS)
|
||||
newPack.last_checked = new Date(Date.now() + 30 * DAYS) // never check temporary packs
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id, doc_id, newUpdates },
|
||||
'inserting updates into new pack'
|
||||
)
|
||||
|
@ -271,7 +271,7 @@ module.exports = PackManager = {
|
|||
if (lastUpdate.expiresAt && temporary) {
|
||||
update.$set.expiresAt = new Date(Date.now() + 7 * DAYS)
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id, doc_id, lastUpdate, newUpdates },
|
||||
'appending updates to existing pack'
|
||||
)
|
||||
|
@ -402,7 +402,7 @@ module.exports = PackManager = {
|
|||
pack._id.toString()
|
||||
)
|
||||
const packIdsToFetch = _.difference(allPackIds, loadedPackIds)
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id, doc_id, loadedPackIds, allPackIds, packIdsToFetch },
|
||||
'analysed packs'
|
||||
)
|
||||
|
@ -418,7 +418,7 @@ module.exports = PackManager = {
|
|||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
logger.log({ project_id, doc_id }, 'done unarchiving')
|
||||
logger.debug({ project_id, doc_id }, 'done unarchiving')
|
||||
return callback()
|
||||
}
|
||||
)
|
||||
|
@ -663,7 +663,7 @@ module.exports = PackManager = {
|
|||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id, doc_id, newPacks },
|
||||
'added new packs to index'
|
||||
)
|
||||
|
@ -769,7 +769,7 @@ module.exports = PackManager = {
|
|||
return result1
|
||||
})()
|
||||
if (newPacks.length) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id, doc_id, n: newPacks.length },
|
||||
'found new packs'
|
||||
)
|
||||
|
@ -959,7 +959,7 @@ module.exports = PackManager = {
|
|||
return callback(err)
|
||||
}
|
||||
)
|
||||
logger.log({ project_id, doc_id }, 'processing old packs')
|
||||
logger.debug({ project_id, doc_id }, 'processing old packs')
|
||||
return db.docHistory.findOne({ _id: pack_id }, function (err, pack) {
|
||||
if (err != null) {
|
||||
return markAsChecked(err)
|
||||
|
@ -998,7 +998,7 @@ module.exports = PackManager = {
|
|||
? unarchivedPacks.length
|
||||
: undefined)
|
||||
) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id, doc_id },
|
||||
'no packs need archiving'
|
||||
)
|
||||
|
@ -1012,7 +1012,7 @@ module.exports = PackManager = {
|
|||
if (err != null) {
|
||||
return markAsChecked(err)
|
||||
}
|
||||
logger.log({ project_id, doc_id }, 'done processing')
|
||||
logger.debug({ project_id, doc_id }, 'done processing')
|
||||
return markAsChecked()
|
||||
}
|
||||
)
|
||||
|
@ -1031,13 +1031,16 @@ module.exports = PackManager = {
|
|||
const age = (Date.now() - pack.meta.end_ts) / DAYS
|
||||
if (age < 30) {
|
||||
// always keep if less than 1 month old
|
||||
logger.log({ project_id, doc_id, pack_id, age }, 'less than 30 days old')
|
||||
logger.debug(
|
||||
{ project_id, doc_id, pack_id, age },
|
||||
'less than 30 days old'
|
||||
)
|
||||
return callback()
|
||||
}
|
||||
// compute an archiving threshold which decreases for each month of age
|
||||
const archive_threshold = 30 / age
|
||||
if (sz > archive_threshold || n > archive_threshold || age > 90) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id, doc_id, pack_id, age, archive_threshold, sz, n },
|
||||
'meets archive threshold'
|
||||
)
|
||||
|
@ -1048,7 +1051,7 @@ module.exports = PackManager = {
|
|||
callback
|
||||
)
|
||||
} else {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id, doc_id, pack_id, age, archive_threshold, sz, n },
|
||||
'does not meet archive threshold'
|
||||
)
|
||||
|
@ -1071,7 +1074,7 @@ module.exports = PackManager = {
|
|||
},
|
||||
|
||||
_markPackAsFinalised(project_id, doc_id, pack_id, callback) {
|
||||
logger.log({ project_id, doc_id, pack_id }, 'marking pack as finalised')
|
||||
logger.debug({ project_id, doc_id, pack_id }, 'marking pack as finalised')
|
||||
return db.docHistory.updateOne(
|
||||
{ _id: pack_id },
|
||||
{ $set: { finalised: true } },
|
||||
|
@ -1080,7 +1083,7 @@ module.exports = PackManager = {
|
|||
},
|
||||
|
||||
updateIndexIfNeeded(project_id, doc_id, callback) {
|
||||
logger.log({ project_id, doc_id }, 'archiving old packs')
|
||||
logger.debug({ project_id, doc_id }, 'archiving old packs')
|
||||
return PackManager.getIndexWithKeys(doc_id, function (err, index) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
|
@ -1094,7 +1097,7 @@ module.exports = PackManager = {
|
|||
},
|
||||
|
||||
markPackAsChecked(project_id, doc_id, pack_id, callback) {
|
||||
logger.log({ project_id, doc_id, pack_id }, 'marking pack as checked')
|
||||
logger.debug({ project_id, doc_id, pack_id }, 'marking pack as checked')
|
||||
return db.docHistory.updateOne(
|
||||
{ _id: pack_id },
|
||||
{ $currentDate: { last_checked: true } },
|
||||
|
@ -1119,7 +1122,7 @@ module.exports = PackManager = {
|
|||
return result
|
||||
})()
|
||||
if (unArchivedPacks.length) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id, doc_id, n: unArchivedPacks.length },
|
||||
'find unarchived packs'
|
||||
)
|
||||
|
@ -1131,7 +1134,7 @@ module.exports = PackManager = {
|
|||
// Archive locking flags
|
||||
|
||||
checkArchiveNotInProgress(project_id, doc_id, pack_id, callback) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id, doc_id, pack_id },
|
||||
'checking if archive in progress'
|
||||
)
|
||||
|
@ -1157,7 +1160,7 @@ module.exports = PackManager = {
|
|||
},
|
||||
|
||||
markPackAsArchiveInProgress(project_id, doc_id, pack_id, callback) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id, doc_id },
|
||||
'marking pack as archive in progress status'
|
||||
)
|
||||
|
@ -1175,7 +1178,7 @@ module.exports = PackManager = {
|
|||
if (!result.value) {
|
||||
return callback(new Error('archive is already in progress'))
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id, doc_id, pack_id },
|
||||
'marked as archive in progress'
|
||||
)
|
||||
|
@ -1185,7 +1188,7 @@ module.exports = PackManager = {
|
|||
},
|
||||
|
||||
clearPackAsArchiveInProgress(project_id, doc_id, pack_id, callback) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id, doc_id, pack_id },
|
||||
'clearing as archive in progress'
|
||||
)
|
||||
|
@ -1200,7 +1203,7 @@ module.exports = PackManager = {
|
|||
},
|
||||
|
||||
markPackAsArchived(project_id, doc_id, pack_id, callback) {
|
||||
logger.log({ project_id, doc_id, pack_id }, 'marking pack as archived')
|
||||
logger.debug({ project_id, doc_id, pack_id }, 'marking pack as archived')
|
||||
return db.docHistoryIndex.findOneAndUpdate(
|
||||
{
|
||||
_id: ObjectId(doc_id.toString()),
|
||||
|
@ -1215,7 +1218,7 @@ module.exports = PackManager = {
|
|||
if (!result.value) {
|
||||
return callback(new Error('archive is not marked as progress'))
|
||||
}
|
||||
logger.log({ project_id, doc_id, pack_id }, 'marked as archived')
|
||||
logger.debug({ project_id, doc_id, pack_id }, 'marked as archived')
|
||||
return callback()
|
||||
}
|
||||
)
|
||||
|
@ -1229,7 +1232,7 @@ module.exports = PackManager = {
|
|||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
logger.log({ project_id, doc_id, pack_id }, 'set expiry on pack')
|
||||
logger.debug({ project_id, doc_id, pack_id }, 'set expiry on pack')
|
||||
return callback()
|
||||
}
|
||||
)
|
||||
|
|
|
@ -64,7 +64,7 @@ if (!source.match(/^[0-9]+$/)) {
|
|||
|
||||
let shutDownRequested = false
|
||||
const shutDownTimer = setTimeout(function () {
|
||||
logger.log('pack timed out, requesting shutdown')
|
||||
logger.debug('pack timed out, requesting shutdown')
|
||||
// start the shutdown on the next pack
|
||||
shutDownRequested = true
|
||||
// do a hard shutdown after a further 5 minutes
|
||||
|
@ -75,20 +75,20 @@ const shutDownTimer = setTimeout(function () {
|
|||
return hardTimeout.unref()
|
||||
}, TIMEOUT)
|
||||
|
||||
logger.log(
|
||||
logger.debug(
|
||||
`checking for updates, limit=${LIMIT}, delay=${DOCUMENT_PACK_DELAY}, timeout=${TIMEOUT}`
|
||||
)
|
||||
|
||||
const finish = function () {
|
||||
if (shutDownTimer != null) {
|
||||
logger.log('cancelling timeout')
|
||||
logger.debug('cancelling timeout')
|
||||
clearTimeout(shutDownTimer)
|
||||
}
|
||||
logger.log('closing db')
|
||||
logger.debug('closing db')
|
||||
callbackify(closeDb)(function () {
|
||||
logger.log('closing LockManager Redis Connection')
|
||||
logger.debug('closing LockManager Redis Connection')
|
||||
return LockManager.close(function () {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ processedCount: COUNT, allCount: TOTAL },
|
||||
'ready to exit from pack archive worker'
|
||||
)
|
||||
|
@ -101,7 +101,7 @@ const finish = function () {
|
|||
})
|
||||
}
|
||||
|
||||
process.on('exit', code => logger.log({ code }, 'pack archive worker exited'))
|
||||
process.on('exit', code => logger.debug({ code }, 'pack archive worker exited'))
|
||||
|
||||
const processUpdates = pending =>
|
||||
async.eachSeries(
|
||||
|
@ -110,9 +110,9 @@ const processUpdates = pending =>
|
|||
let _id
|
||||
;({ _id, project_id, doc_id } = result)
|
||||
COUNT++
|
||||
logger.log({ project_id, doc_id }, `processing ${COUNT}/${TOTAL}`)
|
||||
logger.debug({ project_id, doc_id }, `processing ${COUNT}/${TOTAL}`)
|
||||
if (project_id == null || doc_id == null) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id, doc_id },
|
||||
'skipping pack, missing project/doc id'
|
||||
)
|
||||
|
@ -164,7 +164,7 @@ const ObjectIdFromDate = function (date) {
|
|||
waitForDb()
|
||||
.then(() => {
|
||||
if (pending != null) {
|
||||
logger.log(`got ${pending.length} entries from ${source}`)
|
||||
logger.debug(`got ${pending.length} entries from ${source}`)
|
||||
processUpdates(pending)
|
||||
} else {
|
||||
processFromOneWeekAgo()
|
||||
|
@ -194,13 +194,13 @@ function processFromOneWeekAgo() {
|
|||
.limit(LIMIT)
|
||||
.toArray(function (err, results) {
|
||||
if (err != null) {
|
||||
logger.log({ err }, 'error checking for updates')
|
||||
logger.debug({ err }, 'error checking for updates')
|
||||
finish()
|
||||
return
|
||||
}
|
||||
pending = _.uniq(results, false, result => result.doc_id.toString())
|
||||
TOTAL = pending.length
|
||||
logger.log(`found ${TOTAL} documents to archive`)
|
||||
logger.debug(`found ${TOTAL} documents to archive`)
|
||||
return processUpdates(pending)
|
||||
})
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ module.exports = RestoreManager = {
|
|||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
logger.log({ project_id, doc_id, version, user_id }, 'restoring document')
|
||||
logger.debug({ project_id, doc_id, version, user_id }, 'restoring document')
|
||||
return DiffManager.getDocumentBeforeVersion(
|
||||
project_id,
|
||||
doc_id,
|
||||
|
|
|
@ -36,7 +36,7 @@ module.exports = UpdateTrimmer = {
|
|||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
logger.log({ project_id, details }, 'got details')
|
||||
logger.debug({ project_id, details }, 'got details')
|
||||
if (
|
||||
__guard__(
|
||||
details != null ? details.features : undefined,
|
||||
|
|
|
@ -182,7 +182,7 @@ module.exports = UpdatesManager = {
|
|||
return callback(error)
|
||||
}
|
||||
if (result != null) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{
|
||||
project_id,
|
||||
doc_id,
|
||||
|
@ -257,7 +257,7 @@ module.exports = UpdatesManager = {
|
|||
)
|
||||
return callback(error)
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id, doc_id, rawUpdates },
|
||||
'retrieved raw updates from redis'
|
||||
)
|
||||
|
@ -270,7 +270,7 @@ module.exports = UpdatesManager = {
|
|||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id, doc_id },
|
||||
'compressed and saved doc updates'
|
||||
)
|
||||
|
@ -285,7 +285,7 @@ module.exports = UpdatesManager = {
|
|||
}
|
||||
if (length === UpdatesManager.REDIS_READ_BATCH_SIZE) {
|
||||
// There might be more updates
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id, doc_id },
|
||||
'continuing processing updates'
|
||||
)
|
||||
|
@ -300,7 +300,7 @@ module.exports = UpdatesManager = {
|
|||
0
|
||||
)
|
||||
} else {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id, doc_id },
|
||||
'all raw updates processed'
|
||||
)
|
||||
|
@ -416,7 +416,7 @@ module.exports = UpdatesManager = {
|
|||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{
|
||||
count: project_ids != null ? project_ids.length : undefined,
|
||||
project_ids,
|
||||
|
@ -497,7 +497,7 @@ module.exports = UpdatesManager = {
|
|||
return task(function (error, project_doc_ids) {
|
||||
if (error) return callback(error)
|
||||
const dangling_doc_ids = _.difference(all_doc_ids, project_doc_ids)
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ all_doc_ids, all_project_ids, project_doc_ids, dangling_doc_ids },
|
||||
'checking for dangling doc ids'
|
||||
)
|
||||
|
|
|
@ -43,7 +43,7 @@ module.exports = WebApiManager = {
|
|||
return callback(error)
|
||||
}
|
||||
if (res.statusCode === 404) {
|
||||
logger.log({ url }, 'got 404 from web api')
|
||||
logger.debug({ url }, 'got 404 from web api')
|
||||
return callback(null, null)
|
||||
}
|
||||
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
|
@ -63,7 +63,7 @@ module.exports = WebApiManager = {
|
|||
callback = function () {}
|
||||
}
|
||||
const url = `/user/${user_id}/personal_info`
|
||||
logger.log({ user_id }, 'getting user info from web')
|
||||
logger.debug({ user_id }, 'getting user info from web')
|
||||
return WebApiManager.sendRequest(url, function (error, body) {
|
||||
let user
|
||||
if (error != null) {
|
||||
|
@ -95,7 +95,7 @@ module.exports = WebApiManager = {
|
|||
callback = function () {}
|
||||
}
|
||||
const url = `/project/${project_id}/details`
|
||||
logger.log({ project_id }, 'getting project details from web')
|
||||
logger.debug({ project_id }, 'getting project details from web')
|
||||
return WebApiManager.sendRequest(url, function (error, body) {
|
||||
let project
|
||||
if (error != null) {
|
||||
|
|
|
@ -33,7 +33,7 @@ async function getLatestContent(projectId, docId, lastUpdateVersion) {
|
|||
}
|
||||
|
||||
async function rewindDoc(projectId, docId, zipfile) {
|
||||
logger.log({ projectId, docId }, 'rewinding document')
|
||||
logger.debug({ projectId, docId }, 'rewinding document')
|
||||
|
||||
// Prepare to rewind content
|
||||
|
||||
|
|
|
@ -60,8 +60,10 @@ if (!module.parent) {
|
|||
Promise.all([mongodb.waitForDb(), mongoose.connectionPromise])
|
||||
.then(() => {
|
||||
Server.server.listen(port, host, function () {
|
||||
logger.info(`web starting up, listening on ${host}:${port}`)
|
||||
logger.info(`${require('http').globalAgent.maxSockets} sockets enabled`)
|
||||
logger.debug(`web starting up, listening on ${host}:${port}`)
|
||||
logger.debug(
|
||||
`${require('http').globalAgent.maxSockets} sockets enabled`
|
||||
)
|
||||
// wait until the process is ready before monitoring the event loop
|
||||
metrics.event_loop.monitor(logger)
|
||||
})
|
||||
|
|
|
@ -65,7 +65,7 @@ function recordEventForSession(session, event, segmentation) {
|
|||
if (_isAnalyticsDisabled() || _isSmokeTestUser(userId)) {
|
||||
return
|
||||
}
|
||||
logger.info({
|
||||
logger.debug({
|
||||
analyticsId,
|
||||
userId,
|
||||
event,
|
||||
|
|
|
@ -190,7 +190,7 @@ const AuthenticationController = {
|
|||
return done(err)
|
||||
}
|
||||
if (!isAllowed) {
|
||||
logger.log({ email }, 'too many login requests')
|
||||
logger.debug({ email }, 'too many login requests')
|
||||
return done(null, null, {
|
||||
text: req.i18n.translate('to_many_login_requests_2_mins'),
|
||||
type: 'error',
|
||||
|
@ -222,7 +222,7 @@ const AuthenticationController = {
|
|||
done(null, user)
|
||||
} else {
|
||||
AuthenticationController._recordFailedLogin()
|
||||
logger.log({ email }, 'failed log in')
|
||||
logger.debug({ email }, 'failed log in')
|
||||
done(null, false, {
|
||||
text: req.i18n.translate('email_or_password_wrong_try_again'),
|
||||
type: 'error',
|
||||
|
@ -372,7 +372,7 @@ const AuthenticationController = {
|
|||
} else if (SessionManager.isUserLoggedIn(req.session)) {
|
||||
next()
|
||||
} else {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ url: req.url },
|
||||
'user trying to access endpoint not in global whitelist'
|
||||
)
|
||||
|
@ -475,7 +475,7 @@ const AuthenticationController = {
|
|||
},
|
||||
|
||||
_redirectToLoginPage(req, res) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ url: req.url },
|
||||
'user not logged in so redirecting to login page'
|
||||
)
|
||||
|
@ -486,7 +486,7 @@ const AuthenticationController = {
|
|||
},
|
||||
|
||||
_redirectToReconfirmPage(req, res, user) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ url: req.url },
|
||||
'user needs to reconfirm so redirecting to reconfirm page'
|
||||
)
|
||||
|
@ -496,7 +496,7 @@ const AuthenticationController = {
|
|||
},
|
||||
|
||||
_redirectToRegisterPage(req, res) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ url: req.url },
|
||||
'user not logged in so redirecting to register page'
|
||||
)
|
||||
|
@ -617,7 +617,7 @@ function _loginAsyncHandlers(req, user, anonymousAnalyticsId, isNewUser) {
|
|||
})
|
||||
Analytics.identifyUser(user._id, anonymousAnalyticsId, isNewUser)
|
||||
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ email: user.email, user_id: user._id.toString() },
|
||||
'successful log in'
|
||||
)
|
||||
|
|
|
@ -64,10 +64,10 @@ async function ensureUserCanReadProject(req, res, next) {
|
|||
token
|
||||
)
|
||||
if (canRead) {
|
||||
logger.log({ userId, projectId }, 'allowing user read access to project')
|
||||
logger.debug({ userId, projectId }, 'allowing user read access to project')
|
||||
return next()
|
||||
}
|
||||
logger.log({ userId, projectId }, 'denying user read access to project')
|
||||
logger.debug({ userId, projectId }, 'denying user read access to project')
|
||||
HttpErrorHandler.forbidden(req, res)
|
||||
}
|
||||
|
||||
|
@ -114,13 +114,13 @@ async function ensureUserCanWriteProjectContent(req, res, next) {
|
|||
token
|
||||
)
|
||||
if (canWrite) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ userId, projectId },
|
||||
'allowing user write access to project content'
|
||||
)
|
||||
return next()
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ userId, projectId },
|
||||
'denying user write access to project settings'
|
||||
)
|
||||
|
@ -137,21 +137,21 @@ async function ensureUserCanAdminProject(req, res, next) {
|
|||
token
|
||||
)
|
||||
if (canAdmin) {
|
||||
logger.log({ userId, projectId }, 'allowing user admin access to project')
|
||||
logger.debug({ userId, projectId }, 'allowing user admin access to project')
|
||||
return next()
|
||||
}
|
||||
logger.log({ userId, projectId }, 'denying user admin access to project')
|
||||
logger.debug({ userId, projectId }, 'denying user admin access to project')
|
||||
HttpErrorHandler.forbidden(req, res)
|
||||
}
|
||||
|
||||
async function ensureUserIsSiteAdmin(req, res, next) {
|
||||
const userId = _getUserId(req)
|
||||
if (await AuthorizationManager.promises.isUserSiteAdmin(userId)) {
|
||||
logger.log({ userId }, 'allowing user admin access to site')
|
||||
logger.debug({ userId }, 'allowing user admin access to site')
|
||||
return next()
|
||||
}
|
||||
if (handleAdminDomainRedirect(req, res)) return
|
||||
logger.log({ userId }, 'denying user admin access to site')
|
||||
logger.debug({ userId }, 'denying user admin access to site')
|
||||
_redirectToRestricted(req, res, next)
|
||||
}
|
||||
|
||||
|
@ -184,7 +184,7 @@ function restricted(req, res, next) {
|
|||
return res.render('user/restricted', { title: 'restricted' })
|
||||
}
|
||||
const { from } = req.query
|
||||
logger.log({ from }, 'redirecting to login')
|
||||
logger.debug({ from }, 'redirecting to login')
|
||||
if (from) {
|
||||
AuthenticationController.setRedirectInSession(req, from)
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ const SessionManager = require('../Authentication/SessionManager')
|
|||
const BetaProgramController = {
|
||||
optIn(req, res, next) {
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
logger.log({ userId }, 'user opting in to beta program')
|
||||
logger.debug({ userId }, 'user opting in to beta program')
|
||||
if (userId == null) {
|
||||
return next(new Error('no user id in session'))
|
||||
}
|
||||
|
@ -22,7 +22,7 @@ const BetaProgramController = {
|
|||
|
||||
optOut(req, res, next) {
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
logger.log({ userId }, 'user opting out of beta program')
|
||||
logger.debug({ userId }, 'user opting out of beta program')
|
||||
if (userId == null) {
|
||||
return next(new Error('no user id in session'))
|
||||
}
|
||||
|
@ -36,7 +36,10 @@ const BetaProgramController = {
|
|||
|
||||
optInPage(req, res, next) {
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
logger.log({ user_id: userId }, 'showing beta participation page for user')
|
||||
logger.debug(
|
||||
{ user_id: userId },
|
||||
'showing beta participation page for user'
|
||||
)
|
||||
UserGetter.getUser(userId, function (err, user) {
|
||||
if (err) {
|
||||
OError.tag(err, 'error fetching user', {
|
||||
|
|
|
@ -13,7 +13,7 @@ function getBrandVariationById(brandVariationId, callback) {
|
|||
if (brandVariationId == null || brandVariationId === '') {
|
||||
return callback(new Error('Branding variation id not provided'))
|
||||
}
|
||||
logger.log({ brandVariationId }, 'fetching brand variation details from v1')
|
||||
logger.debug({ brandVariationId }, 'fetching brand variation details from v1')
|
||||
V1Api.request(
|
||||
{
|
||||
uri: `/api/v2/brand_variations/${brandVariationId}`,
|
||||
|
|
|
@ -39,7 +39,7 @@ async function removeSelfFromProject(req, res, next) {
|
|||
|
||||
async function getAllMembers(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
logger.log({ projectId }, 'getting all active members for project')
|
||||
logger.debug({ projectId }, 'getting all active members for project')
|
||||
let members
|
||||
try {
|
||||
members = await CollaboratorsGetter.promises.getAllInvitedMembers(projectId)
|
||||
|
|
|
@ -110,10 +110,13 @@ async function addUserIdToProject(
|
|||
}
|
||||
if (privilegeLevel === PrivilegeLevels.READ_AND_WRITE) {
|
||||
level = { collaberator_refs: userId }
|
||||
logger.log({ privileges: 'readAndWrite', userId, projectId }, 'adding user')
|
||||
logger.debug(
|
||||
{ privileges: 'readAndWrite', userId, projectId },
|
||||
'adding user'
|
||||
)
|
||||
} else if (privilegeLevel === PrivilegeLevels.READ_ONLY) {
|
||||
level = { readOnly_refs: userId }
|
||||
logger.log({ privileges: 'readOnly', userId, projectId }, 'adding user')
|
||||
logger.debug({ privileges: 'readOnly', userId, projectId }, 'adding user')
|
||||
} else {
|
||||
throw new Error(`unknown privilegeLevel: ${privilegeLevel}`)
|
||||
}
|
||||
|
@ -146,7 +149,7 @@ async function transferProjects(fromUserId, toUserId) {
|
|||
{ _id: 1 }
|
||||
).exec()
|
||||
const projectIds = projects.map(p => p._id)
|
||||
logger.log({ projectIds, fromUserId, toUserId }, 'transferring projects')
|
||||
logger.debug({ projectIds, fromUserId, toUserId }, 'transferring projects')
|
||||
|
||||
await Project.updateMany(
|
||||
{ owner_ref: fromUserId },
|
||||
|
|
|
@ -30,7 +30,7 @@ const rateLimiter = require('../../infrastructure/RateLimiter')
|
|||
module.exports = CollaboratorsInviteController = {
|
||||
getAllInvites(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
logger.log({ projectId }, 'getting all active invites for project')
|
||||
logger.debug({ projectId }, 'getting all active invites for project')
|
||||
return CollaboratorsInviteHandler.getAllInvites(
|
||||
projectId,
|
||||
function (err, invites) {
|
||||
|
@ -50,7 +50,7 @@ module.exports = CollaboratorsInviteController = {
|
|||
callback = function () {}
|
||||
}
|
||||
if (Settings.restrictInvitesToExistingAccounts === true) {
|
||||
logger.log({ email }, 'checking if user exists with this email')
|
||||
logger.debug({ email }, 'checking if user exists with this email')
|
||||
return UserGetter.getUserByAnyEmail(
|
||||
email,
|
||||
{ _id: 1 },
|
||||
|
@ -102,13 +102,13 @@ module.exports = CollaboratorsInviteController = {
|
|||
const sendingUser = SessionManager.getSessionUser(req.session)
|
||||
const sendingUserId = sendingUser._id
|
||||
if (email === sendingUser.email) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ projectId, email, sendingUserId },
|
||||
'cannot invite yourself to project'
|
||||
)
|
||||
return res.json({ invite: null, error: 'cannot_invite_self' })
|
||||
}
|
||||
logger.log({ projectId, email, sendingUserId }, 'inviting to project')
|
||||
logger.debug({ projectId, email, sendingUserId }, 'inviting to project')
|
||||
return LimitationsManager.canAddXCollaborators(
|
||||
projectId,
|
||||
1,
|
||||
|
@ -118,7 +118,7 @@ module.exports = CollaboratorsInviteController = {
|
|||
return next(error)
|
||||
}
|
||||
if (!allowed) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ projectId, email, sendingUserId },
|
||||
'not allowed to invite more users to project'
|
||||
)
|
||||
|
@ -127,7 +127,7 @@ module.exports = CollaboratorsInviteController = {
|
|||
;({ email, privileges } = req.body)
|
||||
email = EmailHelper.parseEmail(email)
|
||||
if (email == null || email === '') {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ projectId, email, sendingUserId },
|
||||
'invalid email address'
|
||||
)
|
||||
|
@ -158,7 +158,7 @@ module.exports = CollaboratorsInviteController = {
|
|||
return next(err)
|
||||
}
|
||||
if (!shouldAllowInvite) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ email, projectId, sendingUserId },
|
||||
'not allowed to send an invite to this email address'
|
||||
)
|
||||
|
@ -181,7 +181,7 @@ module.exports = CollaboratorsInviteController = {
|
|||
})
|
||||
return next(err)
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ projectId, email, sendingUserId },
|
||||
'invite created'
|
||||
)
|
||||
|
@ -204,7 +204,7 @@ module.exports = CollaboratorsInviteController = {
|
|||
revokeInvite(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
const inviteId = req.params.invite_id
|
||||
logger.log({ projectId, inviteId }, 'revoking invite')
|
||||
logger.debug({ projectId, inviteId }, 'revoking invite')
|
||||
return CollaboratorsInviteHandler.revokeInvite(
|
||||
projectId,
|
||||
inviteId,
|
||||
|
@ -229,7 +229,7 @@ module.exports = CollaboratorsInviteController = {
|
|||
resendInvite(req, res, next) {
|
||||
const projectId = req.params.Project_id
|
||||
const inviteId = req.params.invite_id
|
||||
logger.log({ projectId, inviteId }, 'resending invite')
|
||||
logger.debug({ projectId, inviteId }, 'resending invite')
|
||||
const sendingUser = SessionManager.getSessionUser(req.session)
|
||||
return CollaboratorsInviteController._checkRateLimit(
|
||||
sendingUser._id,
|
||||
|
@ -263,7 +263,7 @@ module.exports = CollaboratorsInviteController = {
|
|||
const projectId = req.params.Project_id
|
||||
const { token } = req.params
|
||||
const _renderInvalidPage = function () {
|
||||
logger.log({ projectId }, 'invite not valid, rendering not-valid page')
|
||||
logger.debug({ projectId }, 'invite not valid, rendering not-valid page')
|
||||
return res.render('project/invite/not-valid', { title: 'Invalid Invite' })
|
||||
}
|
||||
// check if the user is already a member of the project
|
||||
|
@ -279,7 +279,7 @@ module.exports = CollaboratorsInviteController = {
|
|||
return next(err)
|
||||
}
|
||||
if (isMember) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ projectId, userId: currentUser._id },
|
||||
'user is already a member of this project, redirecting'
|
||||
)
|
||||
|
@ -298,7 +298,7 @@ module.exports = CollaboratorsInviteController = {
|
|||
}
|
||||
// check if invite is gone, or otherwise non-existent
|
||||
if (invite == null) {
|
||||
logger.log({ projectId }, 'no invite found for this token')
|
||||
logger.debug({ projectId }, 'no invite found for this token')
|
||||
return _renderInvalidPage()
|
||||
}
|
||||
// check the user who sent the invite exists
|
||||
|
@ -313,7 +313,7 @@ module.exports = CollaboratorsInviteController = {
|
|||
return next(err)
|
||||
}
|
||||
if (owner == null) {
|
||||
logger.log({ projectId }, 'no project owner found')
|
||||
logger.debug({ projectId }, 'no project owner found')
|
||||
return _renderInvalidPage()
|
||||
}
|
||||
// fetch the project name
|
||||
|
@ -328,7 +328,7 @@ module.exports = CollaboratorsInviteController = {
|
|||
return next(err)
|
||||
}
|
||||
if (project == null) {
|
||||
logger.log({ projectId }, 'no project found')
|
||||
logger.debug({ projectId }, 'no project found')
|
||||
return _renderInvalidPage()
|
||||
}
|
||||
// finally render the invite
|
||||
|
@ -352,7 +352,7 @@ module.exports = CollaboratorsInviteController = {
|
|||
const projectId = req.params.Project_id
|
||||
const { token } = req.params
|
||||
const currentUser = SessionManager.getSessionUser(req.session)
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ projectId, userId: currentUser._id },
|
||||
'got request to accept invite'
|
||||
)
|
||||
|
|
|
@ -28,7 +28,7 @@ const CollaboratorsInviteHandler = {
|
|||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
logger.log({ projectId }, 'fetching invites for project')
|
||||
logger.debug({ projectId }, 'fetching invites for project')
|
||||
return ProjectInvite.find({ projectId }, function (err, invites) {
|
||||
if (err != null) {
|
||||
OError.tag(err, 'error getting invites from mongo', {
|
||||
|
@ -36,7 +36,7 @@ const CollaboratorsInviteHandler = {
|
|||
})
|
||||
return callback(err)
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ projectId, count: invites.length },
|
||||
'found invites for project'
|
||||
)
|
||||
|
@ -48,7 +48,7 @@ const CollaboratorsInviteHandler = {
|
|||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
logger.log({ projectId }, 'counting invites for project')
|
||||
logger.debug({ projectId }, 'counting invites for project')
|
||||
return ProjectInvite.countDocuments({ projectId }, function (err, count) {
|
||||
if (err != null) {
|
||||
OError.tag(err, 'error getting invites from mongo', {
|
||||
|
@ -77,7 +77,10 @@ const CollaboratorsInviteHandler = {
|
|||
return callback(err)
|
||||
}
|
||||
if (existingUser == null) {
|
||||
logger.log({ projectId, email }, 'no existing user found, returning')
|
||||
logger.debug(
|
||||
{ projectId, email },
|
||||
'no existing user found, returning'
|
||||
)
|
||||
return callback(null)
|
||||
}
|
||||
return ProjectGetter.getProject(
|
||||
|
@ -92,7 +95,7 @@ const CollaboratorsInviteHandler = {
|
|||
return callback(err)
|
||||
}
|
||||
if (project == null) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ projectId },
|
||||
'no project found while sending notification, returning'
|
||||
)
|
||||
|
@ -126,7 +129,7 @@ const CollaboratorsInviteHandler = {
|
|||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ projectId, inviteId: invite._id },
|
||||
'sending notification and email for invite'
|
||||
)
|
||||
|
@ -158,7 +161,7 @@ const CollaboratorsInviteHandler = {
|
|||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ projectId, sendingUserId: sendingUser._id, email, privileges },
|
||||
'adding invite'
|
||||
)
|
||||
|
@ -211,7 +214,7 @@ const CollaboratorsInviteHandler = {
|
|||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
logger.log({ projectId, inviteId }, 'removing invite')
|
||||
logger.debug({ projectId, inviteId }, 'removing invite')
|
||||
return ProjectInvite.deleteOne(
|
||||
{ projectId, _id: inviteId },
|
||||
function (err) {
|
||||
|
@ -235,7 +238,7 @@ const CollaboratorsInviteHandler = {
|
|||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
logger.log({ projectId, inviteId }, 'resending invite email')
|
||||
logger.debug({ projectId, inviteId }, 'resending invite email')
|
||||
return ProjectInvite.findOne(
|
||||
{ _id: inviteId, projectId },
|
||||
function (err, invite) {
|
||||
|
@ -276,7 +279,7 @@ const CollaboratorsInviteHandler = {
|
|||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
logger.log({ projectId }, 'fetching invite by token')
|
||||
logger.debug({ projectId }, 'fetching invite by token')
|
||||
return ProjectInvite.findOne(
|
||||
{ projectId, token: tokenString },
|
||||
function (err, invite) {
|
||||
|
@ -299,7 +302,7 @@ const CollaboratorsInviteHandler = {
|
|||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
logger.log({ projectId, userId: user._id }, 'accepting invite')
|
||||
logger.debug({ projectId, userId: user._id }, 'accepting invite')
|
||||
return CollaboratorsInviteHandler.getInviteByToken(
|
||||
projectId,
|
||||
tokenString,
|
||||
|
@ -313,7 +316,7 @@ const CollaboratorsInviteHandler = {
|
|||
}
|
||||
if (!invite) {
|
||||
err = new Errors.NotFoundError('no matching invite found')
|
||||
logger.log({ err, projectId }, 'no matching invite found')
|
||||
logger.debug({ err, projectId }, 'no matching invite found')
|
||||
return callback(err)
|
||||
}
|
||||
const inviteId = invite._id
|
||||
|
@ -332,7 +335,7 @@ const CollaboratorsInviteHandler = {
|
|||
return callback(err)
|
||||
}
|
||||
// Remove invite
|
||||
logger.log({ projectId, inviteId }, 'removing invite')
|
||||
logger.debug({ projectId, inviteId }, 'removing invite')
|
||||
return ProjectInvite.deleteOne({ _id: inviteId }, function (err) {
|
||||
if (err != null) {
|
||||
OError.tag(err, 'error removing invite', {
|
||||
|
|
|
@ -220,7 +220,7 @@ const ClsiManager = {
|
|||
)
|
||||
}
|
||||
if (validationProblems != null) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ projectId, validationProblems },
|
||||
'problems with users latex before compile was attempted'
|
||||
)
|
||||
|
@ -395,7 +395,7 @@ const ClsiManager = {
|
|||
const currentCompileTime = results.currentBackend.finishTime
|
||||
const newBackendCompileTime = results.newBackend.finishTime || 0
|
||||
const timeDifference = newBackendCompileTime - currentCompileTime
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{
|
||||
statusCodeSame,
|
||||
timeDifference,
|
||||
|
|
|
@ -229,7 +229,7 @@ module.exports = CompileController = {
|
|||
logger.err({ err }, 'error checking rate limit for pdf download')
|
||||
return res.sendStatus(500)
|
||||
} else if (!canContinue) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id, ip: req.ip },
|
||||
'rate limit hit downloading pdf'
|
||||
)
|
||||
|
|
|
@ -26,7 +26,7 @@ module.exports = CooldownManager = {
|
|||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ projectId },
|
||||
`[Cooldown] putting project on cooldown for ${COOLDOWN_IN_SECONDS} seconds`
|
||||
)
|
||||
|
|
|
@ -27,7 +27,7 @@ module.exports = CooldownMiddleware = {
|
|||
return next(err)
|
||||
}
|
||||
if (projectIsOnCooldown) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ projectId },
|
||||
'[Cooldown] project is on cooldown, denying request'
|
||||
)
|
||||
|
|
|
@ -130,7 +130,7 @@ function getDoc(projectId, docId, options, callback) {
|
|||
return callback(error)
|
||||
}
|
||||
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ docId, projectId, version: doc.version, rev: doc.rev },
|
||||
'got doc from docstore api'
|
||||
)
|
||||
|
@ -199,7 +199,10 @@ function updateDoc(projectId, docId, lines, version, ranges, callback) {
|
|||
return callback(error)
|
||||
}
|
||||
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
logger.log({ projectId, docId }, 'update doc in docstore url finished')
|
||||
logger.debug(
|
||||
{ projectId, docId },
|
||||
'update doc in docstore url finished'
|
||||
)
|
||||
callback(null, result.modified, result.rev)
|
||||
} else {
|
||||
error = new OError(
|
||||
|
@ -226,7 +229,7 @@ function destroyProject(projectId, callback) {
|
|||
|
||||
function _operateOnProject(projectId, method, callback) {
|
||||
const url = `${settings.apis.docstore.url}/project/${projectId}/${method}`
|
||||
logger.log({ projectId }, `calling ${method} for project in docstore`)
|
||||
logger.debug({ projectId }, `calling ${method} for project in docstore`)
|
||||
// use default timeout for archiving/unarchiving/destroying
|
||||
request.post(url, (err, res, docs) => {
|
||||
if (err) {
|
||||
|
|
|
@ -102,7 +102,7 @@ function setDocument(req, res, next) {
|
|||
})
|
||||
return next(error)
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ docId, projectId },
|
||||
'finished receiving set document request from api (docupdater)'
|
||||
)
|
||||
|
|
|
@ -24,13 +24,13 @@ module.exports = ProjectZipStreamManager = {
|
|||
return cb(error)
|
||||
}
|
||||
if (!project) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ projectId },
|
||||
'cannot append project to zip stream: project not found'
|
||||
)
|
||||
return cb()
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ projectId, name: project.name },
|
||||
'appending project to zip stream'
|
||||
)
|
||||
|
@ -42,7 +42,10 @@ module.exports = ProjectZipStreamManager = {
|
|||
}
|
||||
archive.append(stream, { name: `${project.name}.zip` })
|
||||
stream.on('end', () => {
|
||||
logger.log({ projectId, name: project.name }, 'zip stream ended')
|
||||
logger.debug(
|
||||
{ projectId, name: project.name },
|
||||
'zip stream ended'
|
||||
)
|
||||
cb()
|
||||
})
|
||||
}
|
||||
|
@ -51,7 +54,7 @@ module.exports = ProjectZipStreamManager = {
|
|||
})
|
||||
|
||||
async.series(jobs, () => {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ projectIds },
|
||||
'finished creating zip stream of multiple projects'
|
||||
)
|
||||
|
@ -97,7 +100,7 @@ module.exports = ProjectZipStreamManager = {
|
|||
if (path[0] === '/') {
|
||||
path = path.slice(1)
|
||||
}
|
||||
logger.log({ projectId }, 'Adding doc')
|
||||
logger.debug({ projectId }, 'Adding doc')
|
||||
archive.append(doc.lines.join('\n'), { name: path })
|
||||
setImmediate(cb)
|
||||
})
|
||||
|
|
|
@ -356,7 +356,7 @@ const EditorController = {
|
|||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
logger.log({ project_id, path }, "making directories if they don't exist")
|
||||
logger.debug({ project_id, path }, "making directories if they don't exist")
|
||||
return ProjectEntityUpdateHandler.mkdirp(
|
||||
project_id,
|
||||
path,
|
||||
|
@ -402,7 +402,7 @@ const EditorController = {
|
|||
})
|
||||
return callback(err)
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id, entity_id, entityType },
|
||||
'telling users entity has been deleted'
|
||||
)
|
||||
|
@ -441,7 +441,7 @@ const EditorController = {
|
|||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
logger.log({ project_id, description }, 'updating project description')
|
||||
logger.debug({ project_id, description }, 'updating project description')
|
||||
return ProjectDetailsHandler.setProjectDescription(
|
||||
project_id,
|
||||
description,
|
||||
|
|
|
@ -25,14 +25,14 @@ function getClient() {
|
|||
if (EMAIL_SETTINGS.parameters) {
|
||||
const emailParameters = EMAIL_SETTINGS.parameters
|
||||
if (emailParameters.AWSAccessKeyID || EMAIL_SETTINGS.driver === 'ses') {
|
||||
logger.log('using aws ses for email')
|
||||
logger.debug('using aws ses for email')
|
||||
client = nodemailer.createTransport(sesTransport(emailParameters))
|
||||
} else if (emailParameters.sendgridApiKey) {
|
||||
throw new OError(
|
||||
'sendgridApiKey configuration option is deprecated, use SMTP instead'
|
||||
)
|
||||
} else if (emailParameters.MandrillApiKey) {
|
||||
logger.log('using mandril for email')
|
||||
logger.debug('using mandril for email')
|
||||
client = nodemailer.createTransport(
|
||||
mandrillTransport({
|
||||
auth: {
|
||||
|
@ -41,7 +41,7 @@ function getClient() {
|
|||
})
|
||||
)
|
||||
} else {
|
||||
logger.log('using smtp for email')
|
||||
logger.debug('using smtp for email')
|
||||
const smtp = _.pick(
|
||||
emailParameters,
|
||||
'host',
|
||||
|
@ -60,7 +60,7 @@ function getClient() {
|
|||
)
|
||||
client = {
|
||||
async sendMail(options) {
|
||||
logger.log({ options }, 'Would send email if enabled.')
|
||||
logger.debug({ options }, 'Would send email if enabled.')
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -71,7 +71,7 @@ async function sendEmail(options) {
|
|||
try {
|
||||
const canContinue = await checkCanSendEmail(options)
|
||||
if (!canContinue) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{
|
||||
sendingUser_id: options.sendingUser_id,
|
||||
to: options.to,
|
||||
|
|
|
@ -54,7 +54,7 @@ module.exports = {
|
|||
function (err, export_data) {
|
||||
if (err != null) {
|
||||
if (err.forwardResponse != null) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ responseError: err.forwardResponse },
|
||||
'forwarding response'
|
||||
)
|
||||
|
@ -64,7 +64,7 @@ module.exports = {
|
|||
return next(err)
|
||||
}
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{
|
||||
user_id,
|
||||
project_id,
|
||||
|
|
|
@ -30,7 +30,7 @@ const FileStoreHandler = {
|
|||
return callback(new Error('error getting stat, not available'))
|
||||
}
|
||||
if (!stat.isFile()) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ projectId, fileArgs, fsPath },
|
||||
'tried to upload symlink, not continuing'
|
||||
)
|
||||
|
@ -162,7 +162,7 @@ const FileStoreHandler = {
|
|||
},
|
||||
|
||||
deleteFile(projectId, fileId, callback) {
|
||||
logger.log({ projectId, fileId }, 'telling file store to delete file')
|
||||
logger.debug({ projectId, fileId }, 'telling file store to delete file')
|
||||
const opts = {
|
||||
method: 'delete',
|
||||
uri: this._buildUrl(projectId, fileId),
|
||||
|
@ -202,7 +202,7 @@ const FileStoreHandler = {
|
|||
},
|
||||
|
||||
copyFile(oldProjectId, oldFileId, newProjectId, newFileId, callback) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ oldProjectId, oldFileId, newProjectId, newFileId },
|
||||
'telling filestore to copy a file'
|
||||
)
|
||||
|
|
|
@ -34,7 +34,7 @@ module.exports = InactiveProjectManager = {
|
|||
})
|
||||
return callback(err)
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id, active: project.active },
|
||||
'seeing if need to reactivate project'
|
||||
)
|
||||
|
@ -94,7 +94,7 @@ module.exports = InactiveProjectManager = {
|
|||
}
|
||||
)
|
||||
)
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ numberOfProjects: projects && projects.length },
|
||||
'deactivating projects'
|
||||
)
|
||||
|
@ -108,7 +108,7 @@ module.exports = InactiveProjectManager = {
|
|||
},
|
||||
|
||||
deactivateProject(project_id, callback) {
|
||||
logger.log({ project_id }, 'deactivating inactive project')
|
||||
logger.debug({ project_id }, 'deactivating inactive project')
|
||||
const jobs = [
|
||||
cb => DocstoreManager.archiveProject(project_id, cb),
|
||||
cb => ProjectUpdateHandler.markAsInactive(project_id, cb),
|
||||
|
|
|
@ -20,7 +20,7 @@ const logger = require('@overleaf/logger')
|
|||
module.exports = MetaController = {
|
||||
getMetadata(req, res, next) {
|
||||
const { project_id } = req.params
|
||||
logger.log({ project_id }, 'getting all labels for project')
|
||||
logger.debug({ project_id }, 'getting all labels for project')
|
||||
return MetaHandler.getAllMetaForProject(
|
||||
project_id,
|
||||
function (err, projectMeta) {
|
||||
|
@ -43,7 +43,7 @@ module.exports = MetaController = {
|
|||
const { project_id } = req.params
|
||||
const { doc_id } = req.params
|
||||
const { broadcast } = req.body
|
||||
logger.log({ project_id, doc_id, broadcast }, 'getting labels for doc')
|
||||
logger.debug({ project_id, doc_id, broadcast }, 'getting labels for doc')
|
||||
return MetaHandler.getMetaForDoc(
|
||||
project_id,
|
||||
doc_id,
|
||||
|
|
|
@ -23,10 +23,10 @@ class NonFatalEmailUpdateError extends OError {
|
|||
|
||||
function getProvider() {
|
||||
if (mailchimpIsConfigured()) {
|
||||
logger.info('Using newsletter provider: mailchimp')
|
||||
logger.debug('Using newsletter provider: mailchimp')
|
||||
return makeMailchimpProvider()
|
||||
} else {
|
||||
logger.info('Using newsletter provider: none')
|
||||
logger.debug('Using newsletter provider: none')
|
||||
return makeNullProvider()
|
||||
}
|
||||
}
|
||||
|
@ -70,7 +70,7 @@ function makeMailchimpProvider() {
|
|||
status_if_new: 'subscribed',
|
||||
merge_fields: getMergeFields(user),
|
||||
})
|
||||
logger.info({ user }, 'finished subscribing user to newsletter')
|
||||
logger.debug({ user }, 'finished subscribing user to newsletter')
|
||||
} catch (err) {
|
||||
throw OError.tag(err, 'error subscribing user to newsletter', {
|
||||
userId: user._id,
|
||||
|
@ -89,7 +89,7 @@ function makeMailchimpProvider() {
|
|||
merge_fields: getMergeFields(user),
|
||||
})
|
||||
}
|
||||
logger.info(
|
||||
logger.debug(
|
||||
{ user, options },
|
||||
'finished unsubscribing user from newsletter'
|
||||
)
|
||||
|
@ -100,7 +100,7 @@ function makeMailchimpProvider() {
|
|||
}
|
||||
|
||||
if (err.message.includes('looks fake or invalid')) {
|
||||
logger.info(
|
||||
logger.debug(
|
||||
{ err, user, options },
|
||||
'Mailchimp declined to unsubscribe user because it finds the email looks fake'
|
||||
)
|
||||
|
@ -121,7 +121,7 @@ function makeMailchimpProvider() {
|
|||
} catch (updateError) {
|
||||
// if we failed to update the user, delete their old email address so that
|
||||
// we don't leave it stuck in mailchimp
|
||||
logger.info(
|
||||
logger.debug(
|
||||
{ oldEmail, newEmail, updateError },
|
||||
'unable to change email in newsletter, removing old mail'
|
||||
)
|
||||
|
@ -161,7 +161,7 @@ function makeMailchimpProvider() {
|
|||
email_address: newEmail,
|
||||
merge_fields: getMergeFields(user),
|
||||
})
|
||||
logger.info('finished changing email in the newsletter')
|
||||
logger.debug('finished changing email in the newsletter')
|
||||
} catch (err) {
|
||||
// silently ignore users who were never subscribed
|
||||
if (err.status === 404) {
|
||||
|
@ -173,7 +173,7 @@ function makeMailchimpProvider() {
|
|||
if (err.message.includes(key)) {
|
||||
const message = `unable to change email in newsletter, ${errors[key]}`
|
||||
|
||||
logger.info({ oldEmail, newEmail }, message)
|
||||
logger.debug({ oldEmail, newEmail }, message)
|
||||
|
||||
throw new NonFatalEmailUpdateError(
|
||||
message,
|
||||
|
@ -218,7 +218,7 @@ function makeNullProvider() {
|
|||
}
|
||||
|
||||
async function subscribed(user) {
|
||||
logger.info(
|
||||
logger.debug(
|
||||
{ user },
|
||||
'Not checking user because no newsletter provider is configured'
|
||||
)
|
||||
|
@ -226,21 +226,21 @@ function makeNullProvider() {
|
|||
}
|
||||
|
||||
async function subscribe(user) {
|
||||
logger.info(
|
||||
logger.debug(
|
||||
{ user },
|
||||
'Not subscribing user to newsletter because no newsletter provider is configured'
|
||||
)
|
||||
}
|
||||
|
||||
async function unsubscribe(user) {
|
||||
logger.info(
|
||||
logger.debug(
|
||||
{ user },
|
||||
'Not unsubscribing user from newsletter because no newsletter provider is configured'
|
||||
)
|
||||
}
|
||||
|
||||
async function changeEmail(oldEmail, newEmail) {
|
||||
logger.info(
|
||||
logger.debug(
|
||||
{ oldEmail, newEmail },
|
||||
'Not changing email in newsletter for user because no newsletter provider is configured'
|
||||
)
|
||||
|
|
|
@ -247,7 +247,7 @@ const ProjectController = {
|
|||
metrics.inc('cloned-project')
|
||||
const projectId = req.params.Project_id
|
||||
const { projectName } = req.body
|
||||
logger.log({ projectId, projectName }, 'cloning project')
|
||||
logger.debug({ projectId, projectName }, 'cloning project')
|
||||
if (!SessionManager.isUserLoggedIn(req.session)) {
|
||||
return res.json({ redir: '/register' })
|
||||
}
|
||||
|
@ -743,7 +743,7 @@ const ProjectController = {
|
|||
if (err) {
|
||||
return cb(err)
|
||||
}
|
||||
logger.log({ projectId, userId }, 'got user')
|
||||
logger.debug({ projectId, userId }, 'got user')
|
||||
if (FeaturesUpdater.featuresEpochIsCurrent(user)) {
|
||||
return cb(null, user)
|
||||
}
|
||||
|
|
|
@ -56,7 +56,7 @@ module.exports = {
|
|||
}
|
||||
|
||||
async function markAsDeletedByExternalSource(projectId) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ project_id: projectId },
|
||||
'marking project as deleted by external data source'
|
||||
)
|
||||
|
@ -277,7 +277,7 @@ async function deleteProject(projectId, options = {}) {
|
|||
throw err
|
||||
}
|
||||
|
||||
logger.log({ project_id: projectId }, 'successfully deleted project')
|
||||
logger.debug({ project_id: projectId }, 'successfully deleted project')
|
||||
}
|
||||
|
||||
async function undeleteProject(projectId, options = {}) {
|
||||
|
|
|
@ -85,7 +85,7 @@ async function getProjectDescription(projectId) {
|
|||
async function setProjectDescription(projectId, description) {
|
||||
const conditions = { _id: projectId }
|
||||
const update = { description }
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ conditions, update, projectId, description },
|
||||
'setting project description'
|
||||
)
|
||||
|
@ -99,7 +99,7 @@ async function setProjectDescription(projectId, description) {
|
|||
async function renameProject(projectId, newName) {
|
||||
newName = newName.trim()
|
||||
await validateProjectName(newName)
|
||||
logger.log({ projectId, newName }, 'renaming project')
|
||||
logger.debug({ projectId, newName }, 'renaming project')
|
||||
let project
|
||||
try {
|
||||
project = await ProjectGetter.promises.getProject(projectId, { name: true })
|
||||
|
|
|
@ -117,7 +117,7 @@ function getDocContext(projectId, docId, callback) {
|
|||
// 3. web triggers (soft)-delete in docstore
|
||||
// Specifically when an update comes in after 1
|
||||
// and before 3 completes.
|
||||
logger.info(
|
||||
logger.debug(
|
||||
{ projectId, docId },
|
||||
'updating doc that is in process of getting soft-deleted'
|
||||
)
|
||||
|
@ -174,7 +174,10 @@ const ProjectEntityUpdateHandler = {
|
|||
return callback(err)
|
||||
}
|
||||
const { projectName, isDeletedDoc, path } = ctx
|
||||
logger.log({ projectId, docId }, 'telling docstore manager to update doc')
|
||||
logger.debug(
|
||||
{ projectId, docId },
|
||||
'telling docstore manager to update doc'
|
||||
)
|
||||
DocstoreManager.updateDoc(
|
||||
projectId,
|
||||
docId,
|
||||
|
@ -189,7 +192,7 @@ const ProjectEntityUpdateHandler = {
|
|||
})
|
||||
return callback(err)
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ projectId, docId, modified },
|
||||
'finished updating doc lines'
|
||||
)
|
||||
|
@ -219,7 +222,7 @@ const ProjectEntityUpdateHandler = {
|
|||
},
|
||||
|
||||
setRootDoc(projectId, newRootDocID, callback) {
|
||||
logger.log({ projectId, rootDocId: newRootDocID }, 'setting root doc')
|
||||
logger.debug({ projectId, rootDocId: newRootDocID }, 'setting root doc')
|
||||
if (projectId == null || newRootDocID == null) {
|
||||
return callback(
|
||||
new Errors.InvalidError('missing arguments (project or doc)')
|
||||
|
@ -251,7 +254,7 @@ const ProjectEntityUpdateHandler = {
|
|||
},
|
||||
|
||||
unsetRootDoc(projectId, callback) {
|
||||
logger.log({ projectId }, 'removing root doc')
|
||||
logger.debug({ projectId }, 'removing root doc')
|
||||
Project.updateOne(
|
||||
{ _id: projectId },
|
||||
{ $unset: { rootDoc_id: true } },
|
||||
|
@ -758,7 +761,7 @@ const ProjectEntityUpdateHandler = {
|
|||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ projectId, docId: existingDoc._id },
|
||||
'notifying users that the document has been updated'
|
||||
)
|
||||
|
@ -1109,7 +1112,7 @@ const ProjectEntityUpdateHandler = {
|
|||
userId,
|
||||
callback
|
||||
) {
|
||||
logger.log({ entityId, entityType, projectId }, 'deleting project entity')
|
||||
logger.debug({ entityId, entityType, projectId }, 'deleting project entity')
|
||||
if (entityType == null) {
|
||||
logger.warn({ err: 'No entityType set', projectId, entityId })
|
||||
return callback(new Error('No entityType set'))
|
||||
|
@ -1227,7 +1230,7 @@ const ProjectEntityUpdateHandler = {
|
|||
userId,
|
||||
callback
|
||||
) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ entityType, entityId, projectId, destFolderId },
|
||||
'moving entity'
|
||||
)
|
||||
|
@ -1283,7 +1286,7 @@ const ProjectEntityUpdateHandler = {
|
|||
if (!SafePath.isCleanFilename(newName)) {
|
||||
return callback(new Errors.InvalidNameError('invalid element name'))
|
||||
}
|
||||
logger.log({ entityId, projectId }, `renaming ${entityType}`)
|
||||
logger.debug({ entityId, projectId }, `renaming ${entityType}`)
|
||||
if (entityType == null) {
|
||||
logger.warn({ err: 'No entityType set', projectId, entityId })
|
||||
return callback(new Error('No entityType set'))
|
||||
|
|
|
@ -29,7 +29,7 @@ const oneMinInMs = 60 * 1000
|
|||
const fiveMinsInMs = oneMinInMs * 5
|
||||
|
||||
if (!Features.hasFeature('references')) {
|
||||
logger.log('references search not enabled')
|
||||
logger.debug('references search not enabled')
|
||||
}
|
||||
|
||||
module.exports = ReferencesHandler = {
|
||||
|
@ -117,7 +117,7 @@ module.exports = ReferencesHandler = {
|
|||
new Errors.NotFoundError(`project does not exist: ${projectId}`)
|
||||
)
|
||||
}
|
||||
logger.log({ projectId }, 'indexing all bib files in project')
|
||||
logger.debug({ projectId }, 'indexing all bib files in project')
|
||||
const docIds = ReferencesHandler._findBibDocIds(project)
|
||||
const fileIds = ReferencesHandler._findBibFileIds(project)
|
||||
return ReferencesHandler._doIndexOperation(
|
||||
|
@ -172,7 +172,7 @@ module.exports = ReferencesHandler = {
|
|||
})
|
||||
return callback(err)
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ projectId, docIds },
|
||||
'flushing docs to mongo before calling references service'
|
||||
)
|
||||
|
@ -213,7 +213,7 @@ module.exports = ReferencesHandler = {
|
|||
return callback(err)
|
||||
}
|
||||
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
logger.log({ projectId }, 'got keys from references api')
|
||||
logger.debug({ projectId }, 'got keys from references api')
|
||||
return callback(null, data)
|
||||
} else {
|
||||
err = new Error(
|
||||
|
|
|
@ -127,10 +127,10 @@ const AdminController = {
|
|||
},
|
||||
|
||||
writeAllToMongo(req, res) {
|
||||
logger.log('writing all docs to mongo')
|
||||
logger.debug('writing all docs to mongo')
|
||||
Settings.mongo.writeAll = true
|
||||
return DocumentUpdaterHandler.flushAllDocsToMongo(function () {
|
||||
logger.log('all docs have been saved to mongo')
|
||||
logger.debug('all docs have been saved to mongo')
|
||||
return res.sendStatus(200)
|
||||
})
|
||||
},
|
||||
|
|
|
@ -33,7 +33,7 @@ module.exports = {
|
|||
if (!languageCodeIsSupported(language)) {
|
||||
// this log statement can be changed to 'error' once projects with
|
||||
// unsupported languages are removed from the DB
|
||||
logger.info({ language }, 'language not supported')
|
||||
logger.debug({ language }, 'language not supported')
|
||||
return res.status(422).json({ misspellings: [] })
|
||||
}
|
||||
}
|
||||
|
|
|
@ -39,7 +39,7 @@ async function refreshFeatures(userId, reason) {
|
|||
})
|
||||
const oldFeatures = _.clone(user.features)
|
||||
const features = await computeFeatures(userId)
|
||||
logger.log({ userId, features }, 'updating user features')
|
||||
logger.debug({ userId, features }, 'updating user features')
|
||||
|
||||
const matchedFeatureSet = FeaturesHelper.getMatchedFeatureSet(features)
|
||||
AnalyticsManager.setUserPropertyForUser(
|
||||
|
@ -51,7 +51,7 @@ async function refreshFeatures(userId, reason) {
|
|||
const { features: newFeatures, featuresChanged } =
|
||||
await UserFeaturesUpdater.promises.updateFeatures(userId, features)
|
||||
if (oldFeatures.dropbox === true && features.dropbox === false) {
|
||||
logger.log({ userId }, '[FeaturesUpdater] must unlink dropbox')
|
||||
logger.debug({ userId }, '[FeaturesUpdater] must unlink dropbox')
|
||||
const Modules = require('../../infrastructure/Modules')
|
||||
try {
|
||||
await Modules.promises.hooks.fire('removeDropbox', userId, reason)
|
||||
|
@ -77,7 +77,7 @@ async function computeFeatures(userId) {
|
|||
const v1Features = await _getV1Features(user)
|
||||
const bonusFeatures = await ReferalFeatures.promises.getBonusFeatures(userId)
|
||||
const featuresOverrides = await _getFeaturesOverrides(user)
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{
|
||||
userId,
|
||||
individualFeatures,
|
||||
|
@ -161,7 +161,7 @@ function _planCodeToFeatures(planCode) {
|
|||
}
|
||||
|
||||
async function doSyncFromV1(v1UserId) {
|
||||
logger.log({ v1UserId }, '[AccountSync] starting account sync')
|
||||
logger.debug({ v1UserId }, '[AccountSync] starting account sync')
|
||||
const user = await UserGetter.promises.getUser(
|
||||
{ 'overleaf.id': v1UserId },
|
||||
{ _id: 1 }
|
||||
|
@ -170,7 +170,7 @@ async function doSyncFromV1(v1UserId) {
|
|||
logger.warn({ v1UserId }, '[AccountSync] no user found for v1 id')
|
||||
return
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ v1UserId, userId: user._id },
|
||||
'[AccountSync] updating user subscription and features'
|
||||
)
|
||||
|
|
|
@ -36,7 +36,7 @@ async function createAccountForUserId(userId) {
|
|||
lastName: user.last_name,
|
||||
}
|
||||
const account = await client.createAccount(accountCreate)
|
||||
logger.log({ userId, account }, 'created recurly account')
|
||||
logger.debug({ userId, account }, 'created recurly account')
|
||||
return account
|
||||
}
|
||||
|
||||
|
@ -46,7 +46,7 @@ async function getSubscription(subscriptionId) {
|
|||
|
||||
async function changeSubscription(subscriptionId, body) {
|
||||
const change = await client.createSubscriptionChange(subscriptionId, body)
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ subscriptionId, changeId: change.id },
|
||||
'created subscription change'
|
||||
)
|
||||
|
@ -59,7 +59,7 @@ async function changeSubscriptionByUuid(subscriptionUuid, ...args) {
|
|||
|
||||
async function removeSubscriptionChange(subscriptionId) {
|
||||
const removed = await client.removeSubscriptionChange(subscriptionId)
|
||||
logger.log({ subscriptionId }, 'removed pending subscription change')
|
||||
logger.debug({ subscriptionId }, 'removed pending subscription change')
|
||||
return removed
|
||||
}
|
||||
|
||||
|
@ -79,7 +79,7 @@ async function cancelSubscriptionByUuid(subscriptionUuid) {
|
|||
if (
|
||||
err.message === 'Only active and future subscriptions can be canceled.'
|
||||
) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ subscriptionUuid },
|
||||
'subscription cancellation failed, subscription not active'
|
||||
)
|
||||
|
|
|
@ -62,7 +62,7 @@ const RecurlyWrapper = {
|
|||
checkAccountExists(cache, next) {
|
||||
const { user } = cache
|
||||
const { subscriptionDetails } = cache
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ user_id: user._id },
|
||||
'checking if recurly account exists for user'
|
||||
)
|
||||
|
@ -85,14 +85,17 @@ const RecurlyWrapper = {
|
|||
}
|
||||
if (response.statusCode === 404) {
|
||||
// actually not an error in this case, just no existing account
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ user_id: user._id },
|
||||
'user does not currently exist in recurly, proceed'
|
||||
)
|
||||
cache.userExists = false
|
||||
return next(null, cache)
|
||||
}
|
||||
logger.log({ user_id: user._id }, 'user appears to exist in recurly')
|
||||
logger.debug(
|
||||
{ user_id: user._id },
|
||||
'user appears to exist in recurly'
|
||||
)
|
||||
return RecurlyWrapper._parseAccountXml(
|
||||
responseBody,
|
||||
function (err, account) {
|
||||
|
@ -176,7 +179,7 @@ const RecurlyWrapper = {
|
|||
const { user } = cache
|
||||
const { recurlyTokenIds } = cache
|
||||
const { subscriptionDetails } = cache
|
||||
logger.log({ user_id: user._id }, 'creating billing info in recurly')
|
||||
logger.debug({ user_id: user._id }, 'creating billing info in recurly')
|
||||
const accountCode = __guard__(
|
||||
cache != null ? cache.account : undefined,
|
||||
x1 => x1.account_code
|
||||
|
@ -231,7 +234,7 @@ const RecurlyWrapper = {
|
|||
setAddressAndCompanyBillingInfo(cache, next) {
|
||||
const { user } = cache
|
||||
const { subscriptionDetails } = cache
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ user_id: user._id },
|
||||
'setting billing address and company info in recurly'
|
||||
)
|
||||
|
@ -303,7 +306,7 @@ const RecurlyWrapper = {
|
|||
createSubscription(cache, next) {
|
||||
const { user } = cache
|
||||
const { subscriptionDetails } = cache
|
||||
logger.log({ user_id: user._id }, 'creating subscription in recurly')
|
||||
logger.debug({ user_id: user._id }, 'creating subscription in recurly')
|
||||
const data = {
|
||||
plan_code: subscriptionDetails.plan_code,
|
||||
currency: subscriptionDetails.currencyCode,
|
||||
|
@ -367,7 +370,7 @@ const RecurlyWrapper = {
|
|||
recurlyTokenIds,
|
||||
callback
|
||||
) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ user_id: user._id },
|
||||
'starting process of creating paypal subscription'
|
||||
)
|
||||
|
@ -397,7 +400,7 @@ const RecurlyWrapper = {
|
|||
})
|
||||
return callback(err)
|
||||
}
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ user_id: user._id },
|
||||
'done creating paypal subscription for user'
|
||||
)
|
||||
|
@ -467,7 +470,7 @@ const RecurlyWrapper = {
|
|||
|
||||
createSubscription(user, subscriptionDetails, recurlyTokenIds, callback) {
|
||||
const { isPaypal } = subscriptionDetails
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ user_id: user._id, isPaypal },
|
||||
'setting up subscription in recurly'
|
||||
)
|
||||
|
@ -615,7 +618,7 @@ const RecurlyWrapper = {
|
|||
}
|
||||
const items = data[resource]
|
||||
allItems = allItems.concat(items)
|
||||
logger.log(
|
||||
logger.debug(
|
||||
`got another ${items.length}, total now ${allItems.length}`
|
||||
)
|
||||
cursor = __guard__(
|
||||
|
@ -736,7 +739,7 @@ const RecurlyWrapper = {
|
|||
},
|
||||
|
||||
updateSubscription(subscriptionId, options, callback) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ subscriptionId, options },
|
||||
'telling recurly to update subscription'
|
||||
)
|
||||
|
@ -796,7 +799,7 @@ const RecurlyWrapper = {
|
|||
)
|
||||
}
|
||||
|
||||
logger.log({ coupon_code, requestBody }, 'creating coupon')
|
||||
logger.debug({ coupon_code, requestBody }, 'creating coupon')
|
||||
return RecurlyWrapper.apiRequest(
|
||||
{
|
||||
url: 'coupons',
|
||||
|
@ -840,7 +843,7 @@ const RecurlyWrapper = {
|
|||
)
|
||||
}
|
||||
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ account_code, coupon_code, requestBody },
|
||||
'redeeming coupon for user'
|
||||
)
|
||||
|
@ -868,7 +871,7 @@ const RecurlyWrapper = {
|
|||
}
|
||||
const next_renewal_date = new Date()
|
||||
next_renewal_date.setDate(next_renewal_date.getDate() + daysUntilExpire)
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ subscriptionId, daysUntilExpire },
|
||||
'Exending Free trial for user'
|
||||
)
|
||||
|
|
|
@ -285,7 +285,7 @@ function successfulSubscription(req, res, next) {
|
|||
|
||||
function cancelSubscription(req, res, next) {
|
||||
const user = SessionManager.getSessionUser(req.session)
|
||||
logger.log({ user_id: user._id }, 'canceling subscription')
|
||||
logger.debug({ user_id: user._id }, 'canceling subscription')
|
||||
SubscriptionHandler.cancelSubscription(user, function (err) {
|
||||
if (err) {
|
||||
OError.tag(err, 'something went wrong canceling subscription', {
|
||||
|
@ -307,7 +307,7 @@ function canceledSubscription(req, res, next) {
|
|||
|
||||
function cancelV1Subscription(req, res, next) {
|
||||
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||
logger.log({ userId }, 'canceling v1 subscription')
|
||||
logger.debug({ userId }, 'canceling v1 subscription')
|
||||
V1SubscriptionManager.cancelV1Subscription(userId, function (err) {
|
||||
if (err) {
|
||||
OError.tag(err, 'something went wrong canceling v1 subscription', {
|
||||
|
@ -331,7 +331,7 @@ function updateSubscription(req, res, next) {
|
|||
)
|
||||
return next(err)
|
||||
}
|
||||
logger.log({ planCode, user_id: user._id }, 'updating subscription')
|
||||
logger.debug({ planCode, user_id: user._id }, 'updating subscription')
|
||||
SubscriptionHandler.updateSubscription(user, planCode, null, function (err) {
|
||||
if (err) {
|
||||
OError.tag(err, 'something went wrong updating subscription', {
|
||||
|
@ -345,7 +345,7 @@ function updateSubscription(req, res, next) {
|
|||
|
||||
function cancelPendingSubscriptionChange(req, res, next) {
|
||||
const user = SessionManager.getSessionUser(req.session)
|
||||
logger.log({ user_id: user._id }, 'canceling pending subscription change')
|
||||
logger.debug({ user_id: user._id }, 'canceling pending subscription change')
|
||||
SubscriptionHandler.cancelPendingSubscriptionChange(user, function (err) {
|
||||
if (err) {
|
||||
OError.tag(
|
||||
|
@ -377,7 +377,7 @@ function updateAccountEmailAddress(req, res, next) {
|
|||
|
||||
function reactivateSubscription(req, res, next) {
|
||||
const user = SessionManager.getSessionUser(req.session)
|
||||
logger.log({ user_id: user._id }, 'reactivating subscription')
|
||||
logger.debug({ user_id: user._id }, 'reactivating subscription')
|
||||
SubscriptionHandler.reactivateSubscription(user, function (err) {
|
||||
if (err) {
|
||||
OError.tag(err, 'something went wrong reactivating subscription', {
|
||||
|
@ -390,7 +390,7 @@ function reactivateSubscription(req, res, next) {
|
|||
}
|
||||
|
||||
function recurlyCallback(req, res, next) {
|
||||
logger.log({ data: req.body }, 'received recurly callback')
|
||||
logger.debug({ data: req.body }, 'received recurly callback')
|
||||
const event = Object.keys(req.body)[0]
|
||||
const eventData = req.body[event]
|
||||
|
||||
|
@ -473,7 +473,7 @@ function processUpgradeToAnnualPlan(req, res, next) {
|
|||
const { planName } = req.body
|
||||
const couponCode = Settings.coupon_codes.upgradeToAnnualPromo[planName]
|
||||
const annualPlanName = `${planName}-annual`
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ user_id: user._id, planName: annualPlanName },
|
||||
'user is upgrading to annual billing with discount'
|
||||
)
|
||||
|
|
|
@ -7,7 +7,7 @@ const SessionManager = require('../Authentication/SessionManager')
|
|||
function removeUserFromGroup(req, res, next) {
|
||||
const subscription = req.entity
|
||||
const userToRemoveId = req.params.user_id
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ subscriptionId: subscription._id, userToRemove_id: userToRemoveId },
|
||||
'removing user from group subscription'
|
||||
)
|
||||
|
|
|
@ -20,7 +20,7 @@ const SubscriptionLocator = {
|
|||
getUsersSubscription(user_or_id, callback) {
|
||||
const user_id = SubscriptionLocator._getUserId(user_or_id)
|
||||
Subscription.findOne({ admin_id: user_id }, function (err, subscription) {
|
||||
logger.log({ user_id }, 'got users subscription')
|
||||
logger.debug({ user_id }, 'got users subscription')
|
||||
callback(err, subscription)
|
||||
})
|
||||
},
|
||||
|
@ -30,7 +30,7 @@ const SubscriptionLocator = {
|
|||
Subscription.findOne(
|
||||
{ admin_id: user_id, groupPlan: false },
|
||||
function (err, subscription) {
|
||||
logger.log({ user_id }, 'got users individual subscription')
|
||||
logger.debug({ user_id }, 'got users individual subscription')
|
||||
callback(err, subscription)
|
||||
}
|
||||
)
|
||||
|
|
|
@ -233,7 +233,7 @@ const removeLegacyInvite = (subscriptionId, email, callback) =>
|
|||
|
||||
function checkIfInviteIsPossible(subscription, email, callback) {
|
||||
if (!subscription.groupPlan) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ subscriptionId: subscription.id },
|
||||
'can not add members to a subscription that is not in a group plan'
|
||||
)
|
||||
|
@ -241,7 +241,7 @@ function checkIfInviteIsPossible(subscription, email, callback) {
|
|||
}
|
||||
|
||||
if (LimitationsManager.teamHasReachedMemberLimit(subscription)) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ subscriptionId: subscription.id },
|
||||
'team has reached member limit'
|
||||
)
|
||||
|
@ -261,7 +261,7 @@ function checkIfInviteIsPossible(subscription, email, callback) {
|
|||
)
|
||||
|
||||
if (existingMember) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ subscriptionId: subscription.id, email },
|
||||
'user already in team'
|
||||
)
|
||||
|
|
|
@ -212,7 +212,7 @@ module.exports = UpdateMerger = {
|
|||
)
|
||||
return callback(err)
|
||||
}
|
||||
logger.log({ docLines }, 'processing doc update from tpds')
|
||||
logger.debug({ docLines }, 'processing doc update from tpds')
|
||||
return EditorController.upsertDocWithPath(
|
||||
project_id,
|
||||
path,
|
||||
|
|
|
@ -147,7 +147,7 @@ async function checkAndGetProjectOrResponseAction(
|
|||
if (isAnonymousUser && tokenAccessEnabled) {
|
||||
if (tokenType === TokenAccessHandler.TOKEN_TYPES.READ_AND_WRITE) {
|
||||
if (TokenAccessHandler.ANONYMOUS_READ_AND_WRITE_ENABLED) {
|
||||
logger.info({ projectId }, 'granting read-write anonymous access')
|
||||
logger.debug({ projectId }, 'granting read-write anonymous access')
|
||||
TokenAccessHandler.grantSessionTokenAccess(req, projectId, token)
|
||||
return [
|
||||
null,
|
||||
|
@ -178,7 +178,7 @@ async function checkAndGetProjectOrResponseAction(
|
|||
]
|
||||
}
|
||||
} else if (tokenType === TokenAccessHandler.TOKEN_TYPES.READ_ONLY) {
|
||||
logger.info({ projectId }, 'granting read-only anonymous access')
|
||||
logger.debug({ projectId }, 'granting read-only anonymous access')
|
||||
TokenAccessHandler.grantSessionTokenAccess(req, projectId, token)
|
||||
return [
|
||||
null,
|
||||
|
|
|
@ -219,7 +219,7 @@ const ArchiveManager = {
|
|||
}
|
||||
|
||||
const timer = new metrics.Timer('unzipDirectory')
|
||||
logger.log({ source, destination }, 'unzipping file')
|
||||
logger.debug({ source, destination }, 'unzipping file')
|
||||
|
||||
return ArchiveManager._extractZipFiles(
|
||||
source,
|
||||
|
|
|
@ -86,7 +86,7 @@ async function addFolderContents(
|
|||
replace
|
||||
) {
|
||||
if (!(await _isSafeOnFileSystem(folderPath))) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ userId, projectId, parentFolderId, folderPath },
|
||||
'add folder contents is from symlink, stopping insert'
|
||||
)
|
||||
|
@ -110,7 +110,7 @@ async function addFolderContents(
|
|||
|
||||
async function addEntity(userId, projectId, folderId, name, fsPath, replace) {
|
||||
if (!(await _isSafeOnFileSystem(fsPath))) {
|
||||
logger.log(
|
||||
logger.debug(
|
||||
{ userId, projectId, folderId, fsPath },
|
||||
'add entry is from symlink, stopping insert'
|
||||
)
|
||||
|
|
|
@ -428,7 +428,7 @@ const UserController = {
|
|||
doLogout(req, cb) {
|
||||
metrics.inc('user.logout')
|
||||
const user = SessionManager.getSessionUser(req.session)
|
||||
logger.log({ user }, 'logging out')
|
||||
logger.debug({ user }, 'logging out')
|
||||
const sessionId = req.sessionID
|
||||
if (typeof req.logout === 'function') {
|
||||
req.logout()
|
||||
|
|
|
@ -37,7 +37,7 @@ async function deleteUser(userId, options = {}) {
|
|||
|
||||
try {
|
||||
const user = await User.findById(userId).exec()
|
||||
logger.log({ user }, 'deleting user')
|
||||
logger.debug({ user }, 'deleting user')
|
||||
|
||||
await ensureCanDeleteUser(user)
|
||||
await _cleanupUser(user)
|
||||
|
|
|
@ -197,7 +197,7 @@ const UserPagesController = {
|
|||
|
||||
sessionsPage(req, res, next) {
|
||||
const user = SessionManager.getSessionUser(req.session)
|
||||
logger.log({ userId: user._id }, 'loading sessions page')
|
||||
logger.debug({ userId: user._id }, 'loading sessions page')
|
||||
const currentSession = {
|
||||
ip_address: user.ip_address,
|
||||
session_created: user.session_created,
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue