mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-07 20:31:06 -05:00
Merge pull request #5488 from overleaf/em-refactor-logger
Refactor logger module to separate concerns GitOrigin-RevId: cf9b1e367d881fb9036b2cb0f5c0529763a44695
This commit is contained in:
parent
03b4e02c0f
commit
c5778fdd05
19 changed files with 14097 additions and 890 deletions
3
libraries/logger/.mocharc.json
Normal file
3
libraries/logger/.mocharc.json
Normal file
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"require": "test/setup.js"
|
||||
}
|
6
libraries/logger/CHANGELOG.md
Normal file
6
libraries/logger/CHANGELOG.md
Normal file
|
@ -0,0 +1,6 @@
|
|||
## v3.0.0
|
||||
|
||||
* Improve logging in Google Cloud Platform. Set environment variable `GCP_LOGGING=true` to enable.
|
||||
|
||||
This version of the metrics module only works with versions of the `@overleaf/metrics` module greater than v4.0.0
|
||||
|
114
libraries/logger/gcp-manager.js
Normal file
114
libraries/logger/gcp-manager.js
Normal file
|
@ -0,0 +1,114 @@
|
|||
const bunyan = require('bunyan')
|
||||
|
||||
/**
|
||||
* When we copy log entry fields, omit some bunyan core fields that are not
|
||||
* interesting, that have a special meaning in GCP, or that we will process
|
||||
* separately.
|
||||
*/
|
||||
const ENTRY_FIELDS_TO_OMIT = [
|
||||
'level',
|
||||
'name',
|
||||
'hostname',
|
||||
'v',
|
||||
'pid',
|
||||
'msg',
|
||||
'err',
|
||||
'req',
|
||||
'res'
|
||||
]
|
||||
|
||||
/**
|
||||
* Convert a bunyan log entry to a format that GCP understands
|
||||
*/
|
||||
function convertLogEntry(entry) {
|
||||
const gcpEntry = omit(entry, ENTRY_FIELDS_TO_OMIT)
|
||||
|
||||
// Error information. In GCP, the stack trace goes in the message property.
|
||||
// This enables the error reporting feature.
|
||||
if (entry.err) {
|
||||
if (entry.err.info) {
|
||||
Object.assign(gcpEntry, entry.err.info)
|
||||
}
|
||||
if (entry.err.code) {
|
||||
gcpEntry.code = entry.err.code
|
||||
}
|
||||
if (entry.err.signal) {
|
||||
gcpEntry.signal = entry.err.signal
|
||||
}
|
||||
if (entry.err.stack) {
|
||||
gcpEntry.message = entry.err.stack
|
||||
}
|
||||
if (entry.name) {
|
||||
gcpEntry.serviceContext = { service: entry.name }
|
||||
}
|
||||
} else {
|
||||
gcpEntry.message = entry.msg
|
||||
}
|
||||
|
||||
// Severity
|
||||
if (entry.level) {
|
||||
gcpEntry.severity = bunyan.nameFromLevel[entry.level]
|
||||
}
|
||||
|
||||
// HTTP request information
|
||||
if (entry.req || entry.res || entry.responseTimeMs) {
|
||||
const httpRequest = {}
|
||||
if (entry.req) {
|
||||
const req = entry.req
|
||||
httpRequest.requestMethod = req.method
|
||||
httpRequest.requestUrl = req.url
|
||||
httpRequest.remoteIp = req.remoteAddress
|
||||
if (req.headers) {
|
||||
if (req.headers['content-length']) {
|
||||
httpRequest.requestSize = parseInt(req.headers['content-length'], 10)
|
||||
}
|
||||
httpRequest.userAgent = req.headers['user-agent']
|
||||
httpRequest.referer = req.headers.referer
|
||||
}
|
||||
}
|
||||
|
||||
if (entry.res) {
|
||||
const res = entry.res
|
||||
httpRequest.status = res.statusCode
|
||||
if (res.headers && res.headers['content-length']) {
|
||||
if (res.headers['content-length']) {
|
||||
httpRequest.responseSize = parseInt(res.headers['content-length'], 10)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (entry.responseTimeMs) {
|
||||
const responseTimeSec = entry.responseTimeMs / 1000
|
||||
httpRequest.latency = `${responseTimeSec}s`
|
||||
}
|
||||
gcpEntry.httpRequest = httpRequest
|
||||
}
|
||||
|
||||
// Labels are indexed in GCP. We copy the project, doc and user ids to labels to enable fast filtering
|
||||
const projectId = gcpEntry.projectId || gcpEntry.project_id
|
||||
const userId = gcpEntry.userId || gcpEntry.user_id
|
||||
const docId = gcpEntry.docId || gcpEntry.doc_id
|
||||
if (projectId || userId || docId) {
|
||||
const labels = {}
|
||||
if (projectId) {
|
||||
labels.projectId = projectId
|
||||
}
|
||||
if (userId) {
|
||||
labels.userId = userId
|
||||
}
|
||||
if (docId) {
|
||||
labels.docId = docId
|
||||
}
|
||||
gcpEntry['logging.googleapis.com/labels'] = labels
|
||||
}
|
||||
|
||||
return gcpEntry
|
||||
}
|
||||
|
||||
function omit(obj, excludedFields) {
|
||||
return Object.fromEntries(
|
||||
Object.entries(obj).filter(([key]) => !excludedFields.includes(key))
|
||||
)
|
||||
}
|
||||
|
||||
module.exports = { convertLogEntry }
|
63
libraries/logger/log-level-checker.js
Normal file
63
libraries/logger/log-level-checker.js
Normal file
|
@ -0,0 +1,63 @@
|
|||
const fetch = require('node-fetch')
|
||||
const fs = require('fs')
|
||||
|
||||
class LogLevelChecker {
|
||||
constructor(logger, defaultLevel) {
|
||||
this.logger = logger
|
||||
this.defaultLevel = defaultLevel
|
||||
}
|
||||
|
||||
start() {
|
||||
// check for log level override on startup
|
||||
this.checkLogLevel()
|
||||
// re-check log level every minute
|
||||
this.checkInterval = setInterval(this.checkLogLevel.bind(this), 1000 * 60)
|
||||
}
|
||||
|
||||
stop() {
|
||||
clearInterval(this.checkInterval)
|
||||
}
|
||||
|
||||
async checkLogLevel() {
|
||||
try {
|
||||
const end = await this.getTracingEndTime()
|
||||
if (end > Date.now()) {
|
||||
this.logger.level('trace')
|
||||
} else {
|
||||
this.logger.level(this.defaultLevel)
|
||||
}
|
||||
} catch {
|
||||
this.logger.level(this.defaultLevel)
|
||||
}
|
||||
}
|
||||
|
||||
async getTracingEndTime() {
|
||||
return 0
|
||||
}
|
||||
}
|
||||
|
||||
class FileLogLevelChecker extends LogLevelChecker {
|
||||
async getTracingEndTime() {
|
||||
const strEndTime = await fs.promises.readFile('/logging/tracingEndTime')
|
||||
return parseInt(strEndTime, 10)
|
||||
}
|
||||
}
|
||||
|
||||
class GCEMetadataLogLevelChecker extends LogLevelChecker {
|
||||
async getTracingEndTime() {
|
||||
const options = {
|
||||
headers: {
|
||||
'Metadata-Flavor': 'Google'
|
||||
}
|
||||
}
|
||||
const uri = `http://metadata.google.internal/computeMetadata/v1/project/attributes/${this.logger.fields.name}-setLogLevelEndTime`
|
||||
const res = await fetch(uri, options)
|
||||
if (!res.ok) {
|
||||
throw new Error('Metadata not okay')
|
||||
}
|
||||
const strEndTime = await res.text()
|
||||
return parseInt(strEndTime, 10)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { FileLogLevelChecker, GCEMetadataLogLevelChecker }
|
|
@ -1,28 +1,16 @@
|
|||
const Stream = require('stream')
|
||||
const bunyan = require('bunyan')
|
||||
const fetch = require('node-fetch')
|
||||
const fs = require('fs')
|
||||
const yn = require('yn')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const GCPLogging = require('@google-cloud/logging-bunyan')
|
||||
const GCPManager = require('./gcp-manager')
|
||||
const SentryManager = require('./sentry-manager')
|
||||
const Serializers = require('./serializers')
|
||||
const {
|
||||
FileLogLevelChecker,
|
||||
GCEMetadataLogLevelChecker
|
||||
} = require('./log-level-checker')
|
||||
|
||||
// bunyan error serializer
|
||||
const errSerializer = function (err) {
|
||||
if (!err || !err.stack) {
|
||||
return err
|
||||
}
|
||||
return {
|
||||
message: err.message,
|
||||
name: err.name,
|
||||
stack: OError.getFullStack(err),
|
||||
info: OError.getFullInfo(err),
|
||||
code: err.code,
|
||||
signal: err.signal
|
||||
}
|
||||
}
|
||||
|
||||
const Logger = (module.exports = {
|
||||
const LoggingManager = {
|
||||
initialize(name) {
|
||||
this.logLevelSource = (process.env.LOG_LEVEL_SOURCE || 'file').toLowerCase()
|
||||
this.isProduction =
|
||||
(process.env.NODE_ENV || '').toLowerCase() === 'production'
|
||||
this.defaultLevel =
|
||||
|
@ -31,130 +19,19 @@ const Logger = (module.exports = {
|
|||
this.logger = bunyan.createLogger({
|
||||
name,
|
||||
serializers: {
|
||||
err: errSerializer,
|
||||
req: bunyan.stdSerializers.req,
|
||||
res: bunyan.stdSerializers.res
|
||||
err: Serializers.err,
|
||||
req: Serializers.req,
|
||||
res: Serializers.res
|
||||
},
|
||||
streams: [{ level: this.defaultLevel, stream: process.stdout }]
|
||||
streams: [this._getOutputStreamConfig()]
|
||||
})
|
||||
this._setupRingBuffer()
|
||||
this._setupStackdriver()
|
||||
this._setupLogLevelChecker()
|
||||
return this
|
||||
},
|
||||
|
||||
async checkLogLevel() {
|
||||
try {
|
||||
const end = await this.getTracingEndTime()
|
||||
if (parseInt(end, 10) > Date.now()) {
|
||||
this.logger.level('trace')
|
||||
} else {
|
||||
this.logger.level(this.defaultLevel)
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.level(this.defaultLevel)
|
||||
}
|
||||
},
|
||||
|
||||
async getTracingEndTimeFile() {
|
||||
return fs.promises.readFile('/logging/tracingEndTime')
|
||||
},
|
||||
|
||||
async getTracingEndTimeMetadata() {
|
||||
const options = {
|
||||
headers: {
|
||||
'Metadata-Flavor': 'Google'
|
||||
}
|
||||
}
|
||||
const uri = `http://metadata.google.internal/computeMetadata/v1/project/attributes/${this.loggerName}-setLogLevelEndTime`
|
||||
const res = await fetch(uri, options)
|
||||
if (!res.ok) throw new Error('Metadata not okay')
|
||||
return res.text()
|
||||
},
|
||||
|
||||
initializeErrorReporting(dsn, options) {
|
||||
this.Sentry = require('@sentry/node')
|
||||
this.Sentry.init({ dsn, ...options })
|
||||
this.lastErrorTimeStamp = 0 // for rate limiting on sentry reporting
|
||||
this.lastErrorCount = 0
|
||||
},
|
||||
|
||||
captureException(attributes, message, level) {
|
||||
// handle case of logger.error "message"
|
||||
let key, value
|
||||
if (typeof attributes === 'string') {
|
||||
attributes = { err: new Error(attributes) }
|
||||
}
|
||||
// extract any error object
|
||||
let error = attributes.err || attributes.error
|
||||
// avoid reporting errors twice
|
||||
for (key in attributes) {
|
||||
value = attributes[key]
|
||||
if (value instanceof Error && value.reportedToSentry) {
|
||||
return
|
||||
}
|
||||
}
|
||||
// include our log message in the error report
|
||||
if (error == null) {
|
||||
if (typeof message === 'string') {
|
||||
error = { message }
|
||||
}
|
||||
} else if (message != null) {
|
||||
attributes.description = message
|
||||
}
|
||||
// report the error
|
||||
if (error != null) {
|
||||
// capture attributes and use *_id objects as tags
|
||||
const tags = {}
|
||||
const extra = {}
|
||||
for (key in attributes) {
|
||||
value = attributes[key]
|
||||
if (key.match(/_id/) && typeof value === 'string') {
|
||||
tags[key] = value
|
||||
}
|
||||
extra[key] = value
|
||||
}
|
||||
// capture req object if available
|
||||
const { req } = attributes
|
||||
if (req != null) {
|
||||
extra.req = {
|
||||
method: req.method,
|
||||
url: req.originalUrl,
|
||||
query: req.query,
|
||||
headers: req.headers,
|
||||
ip: req.ip
|
||||
}
|
||||
}
|
||||
// recreate error objects that have been converted to a normal object
|
||||
if (!(error instanceof Error) && typeof error === 'object') {
|
||||
const newError = new Error(error.message)
|
||||
for (key of Object.keys(error || {})) {
|
||||
value = error[key]
|
||||
newError[key] = value
|
||||
}
|
||||
error = newError
|
||||
}
|
||||
// filter paths from the message to avoid duplicate errors in sentry
|
||||
// (e.g. errors from `fs` methods which have a path attribute)
|
||||
try {
|
||||
if (error.path) {
|
||||
error.message = error.message.replace(` '${error.path}'`, '')
|
||||
}
|
||||
|
||||
// send the error to sentry
|
||||
this.Sentry.captureException(error, { tags, extra, level })
|
||||
|
||||
// put a flag on the errors to avoid reporting them multiple times
|
||||
for (key in attributes) {
|
||||
value = attributes[key]
|
||||
if (value instanceof Error) {
|
||||
value.reportedToSentry = true
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
// ignore Sentry errors
|
||||
}
|
||||
}
|
||||
this.sentryManager = new SentryManager()
|
||||
},
|
||||
|
||||
debug() {
|
||||
|
@ -176,26 +53,8 @@ const Logger = (module.exports = {
|
|||
})
|
||||
}
|
||||
this.logger.error(attributes, message, ...Array.from(args))
|
||||
if (this.Sentry) {
|
||||
const MAX_ERRORS = 5 // maximum number of errors in 1 minute
|
||||
const now = new Date()
|
||||
// have we recently reported an error?
|
||||
const recentSentryReport = now - this.lastErrorTimeStamp < 60 * 1000
|
||||
// if so, increment the error count
|
||||
if (recentSentryReport) {
|
||||
this.lastErrorCount++
|
||||
} else {
|
||||
this.lastErrorCount = 0
|
||||
this.lastErrorTimeStamp = now
|
||||
}
|
||||
// only report 5 errors every minute to avoid overload
|
||||
if (this.lastErrorCount < MAX_ERRORS) {
|
||||
// add a note if the rate limit has been hit
|
||||
const note =
|
||||
this.lastErrorCount + 1 === MAX_ERRORS ? '(rate limited)' : ''
|
||||
// report the exception
|
||||
return this.captureException(attributes, message, `error${note}`)
|
||||
}
|
||||
if (this.sentryManager) {
|
||||
this.sentryManager.captureExceptionRateLimited(attributes, message)
|
||||
}
|
||||
},
|
||||
|
||||
|
@ -209,8 +68,25 @@ const Logger = (module.exports = {
|
|||
|
||||
fatal(attributes, message) {
|
||||
this.logger.fatal(attributes, message)
|
||||
if (this.Sentry) {
|
||||
this.captureException(attributes, message, 'fatal')
|
||||
if (this.sentryManager) {
|
||||
this.sentryManager.captureException(attributes, message, 'fatal')
|
||||
}
|
||||
},
|
||||
|
||||
_getOutputStreamConfig() {
|
||||
const gcpEnabled = yn(process.env.GCP_LOGGING)
|
||||
if (gcpEnabled) {
|
||||
const stream = new Stream.Writable({
|
||||
objectMode: true,
|
||||
write(entry, encoding, callback) {
|
||||
const gcpEntry = GCPManager.convertLogEntry(entry)
|
||||
console.log(JSON.stringify(gcpEntry, bunyan.safeCycles()))
|
||||
setImmediate(callback)
|
||||
}
|
||||
})
|
||||
return { level: this.defaultLevel, type: 'raw', stream }
|
||||
} else {
|
||||
return { level: this.defaultLevel, stream: process.stdout }
|
||||
}
|
||||
},
|
||||
|
||||
|
@ -228,40 +104,36 @@ const Logger = (module.exports = {
|
|||
}
|
||||
},
|
||||
|
||||
_setupStackdriver() {
|
||||
const stackdriverEnabled = yn(process.env.STACKDRIVER_LOGGING)
|
||||
if (!stackdriverEnabled) {
|
||||
return
|
||||
}
|
||||
const stackdriverClient = new GCPLogging.LoggingBunyan({
|
||||
logName: this.loggerName,
|
||||
serviceContext: { service: this.loggerName }
|
||||
})
|
||||
this.logger.addStream(stackdriverClient.stream(this.defaultLevel))
|
||||
},
|
||||
|
||||
_setupLogLevelChecker() {
|
||||
const logLevelSource = (
|
||||
process.env.LOG_LEVEL_SOURCE || 'file'
|
||||
).toLowerCase()
|
||||
|
||||
if (this.logLevelChecker) {
|
||||
this.logLevelChecker.stop()
|
||||
this.logLevelChecker = null
|
||||
}
|
||||
|
||||
if (this.isProduction) {
|
||||
// clear interval if already set
|
||||
if (this.checkInterval) {
|
||||
clearInterval(this.checkInterval)
|
||||
switch (logLevelSource) {
|
||||
case 'file':
|
||||
this.logLevelChecker = new FileLogLevelChecker(this.logger)
|
||||
break
|
||||
case 'gce_metadata':
|
||||
this.logLevelChecker = new GCEMetadataLogLevelChecker(this.logger)
|
||||
break
|
||||
case 'none':
|
||||
break
|
||||
default:
|
||||
console.log(`Unrecognised log level source: ${logLevelSource}`)
|
||||
}
|
||||
if (this.logLevelSource === 'file') {
|
||||
this.getTracingEndTime = this.getTracingEndTimeFile
|
||||
} else if (this.logLevelSource === 'gce_metadata') {
|
||||
this.getTracingEndTime = this.getTracingEndTimeMetadata
|
||||
} else if (this.logLevelSource === 'none') {
|
||||
return
|
||||
} else {
|
||||
console.log('Unrecognised log level source')
|
||||
return
|
||||
if (this.logLevelChecker) {
|
||||
this.logLevelChecker.start()
|
||||
}
|
||||
// check for log level override on startup
|
||||
this.checkLogLevel()
|
||||
// re-check log level every minute
|
||||
this.checkInterval = setInterval(this.checkLogLevel.bind(this), 1000 * 60)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
Logger.initialize('default-sharelatex')
|
||||
LoggingManager.initialize('default-sharelatex')
|
||||
|
||||
module.exports = LoggingManager
|
||||
|
|
6375
libraries/logger/package-lock.json
generated
6375
libraries/logger/package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -7,7 +7,7 @@
|
|||
"url": "https://github.com/overleaf/overleaf"
|
||||
},
|
||||
"license": "AGPL-3.0-only",
|
||||
"version": "2.3.0",
|
||||
"version": "3.0.0",
|
||||
"scripts": {
|
||||
"test": "mocha --grep=$MOCHA_GREP test/**/*.js",
|
||||
"format": "prettier-eslint $PWD'/**/*.js' --list-different",
|
||||
|
@ -42,5 +42,8 @@
|
|||
"sandboxed-module": "^2.0.4",
|
||||
"sinon": "^9.0.2",
|
||||
"sinon-chai": "^3.5.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@overleaf/metrics": ">= 4.0.0"
|
||||
}
|
||||
}
|
||||
|
|
122
libraries/logger/sentry-manager.js
Normal file
122
libraries/logger/sentry-manager.js
Normal file
|
@ -0,0 +1,122 @@
|
|||
const RATE_LIMIT_MAX_ERRORS = 5
|
||||
const RATE_LIMIT_INTERVAL_MS = 60000
|
||||
|
||||
class SentryManager {
|
||||
constructor(dsn, options) {
|
||||
this.Sentry = require('@sentry/node')
|
||||
this.Sentry.init({ dsn, ...options })
|
||||
// for rate limiting on sentry reporting
|
||||
this.lastErrorTimeStamp = 0
|
||||
this.lastErrorCount = 0
|
||||
}
|
||||
|
||||
captureExceptionRateLimited(attributes, message) {
|
||||
const now = Date.now()
|
||||
// have we recently reported an error?
|
||||
const recentSentryReport =
|
||||
now - this.lastErrorTimeStamp < RATE_LIMIT_INTERVAL_MS
|
||||
// if so, increment the error count
|
||||
if (recentSentryReport) {
|
||||
this.lastErrorCount++
|
||||
} else {
|
||||
this.lastErrorCount = 0
|
||||
this.lastErrorTimeStamp = now
|
||||
}
|
||||
// only report 5 errors every minute to avoid overload
|
||||
if (this.lastErrorCount < RATE_LIMIT_MAX_ERRORS) {
|
||||
// add a note if the rate limit has been hit
|
||||
const note =
|
||||
this.lastErrorCount + 1 === RATE_LIMIT_MAX_ERRORS
|
||||
? '(rate limited)'
|
||||
: ''
|
||||
// report the exception
|
||||
this.captureException(attributes, message, `error${note}`)
|
||||
}
|
||||
}
|
||||
|
||||
captureException(attributes, message, level) {
|
||||
// handle case of logger.error "message"
|
||||
if (typeof attributes === 'string') {
|
||||
attributes = { err: new Error(attributes) }
|
||||
}
|
||||
|
||||
// extract any error object
|
||||
let error = attributes.err || attributes.error
|
||||
|
||||
// avoid reporting errors twice
|
||||
for (const key in attributes) {
|
||||
const value = attributes[key]
|
||||
if (value instanceof Error && value.reportedToSentry) {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// include our log message in the error report
|
||||
if (error == null) {
|
||||
if (typeof message === 'string') {
|
||||
error = { message }
|
||||
}
|
||||
} else if (message != null) {
|
||||
attributes.description = message
|
||||
}
|
||||
|
||||
// report the error
|
||||
if (error != null) {
|
||||
// capture attributes and use *_id objects as tags
|
||||
const tags = {}
|
||||
const extra = {}
|
||||
for (const key in attributes) {
|
||||
const value = attributes[key]
|
||||
if (key.match(/_id/) && typeof value === 'string') {
|
||||
tags[key] = value
|
||||
}
|
||||
extra[key] = value
|
||||
}
|
||||
|
||||
// capture req object if available
|
||||
const { req } = attributes
|
||||
if (req != null) {
|
||||
extra.req = {
|
||||
method: req.method,
|
||||
url: req.originalUrl,
|
||||
query: req.query,
|
||||
headers: req.headers,
|
||||
ip: req.ip
|
||||
}
|
||||
}
|
||||
|
||||
// recreate error objects that have been converted to a normal object
|
||||
if (!(error instanceof Error) && typeof error === 'object') {
|
||||
const newError = new Error(error.message)
|
||||
for (const key of Object.keys(error || {})) {
|
||||
const value = error[key]
|
||||
newError[key] = value
|
||||
}
|
||||
error = newError
|
||||
}
|
||||
|
||||
// filter paths from the message to avoid duplicate errors in sentry
|
||||
// (e.g. errors from `fs` methods which have a path attribute)
|
||||
try {
|
||||
if (error.path) {
|
||||
error.message = error.message.replace(` '${error.path}'`, '')
|
||||
}
|
||||
|
||||
// send the error to sentry
|
||||
this.Sentry.captureException(error, { tags, extra, level })
|
||||
|
||||
// put a flag on the errors to avoid reporting them multiple times
|
||||
for (const key in attributes) {
|
||||
const value = attributes[key]
|
||||
if (value instanceof Error) {
|
||||
value.reportedToSentry = true
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
// ignore Sentry errors
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = SentryManager
|
60
libraries/logger/serializers.js
Normal file
60
libraries/logger/serializers.js
Normal file
|
@ -0,0 +1,60 @@
|
|||
const OError = require('@overleaf/o-error')
|
||||
|
||||
function errSerializer(err) {
|
||||
if (!err) {
|
||||
return err
|
||||
}
|
||||
return {
|
||||
message: err.message,
|
||||
name: err.name,
|
||||
stack: OError.getFullStack(err),
|
||||
info: OError.getFullInfo(err),
|
||||
code: err.code,
|
||||
signal: err.signal
|
||||
}
|
||||
}
|
||||
|
||||
function reqSerializer(req) {
|
||||
if (!req) {
|
||||
return req
|
||||
}
|
||||
const headers = req.headers || {}
|
||||
return {
|
||||
method: req.method,
|
||||
url: req.originalUrl || req.url,
|
||||
remoteAddress: getRemoteIp(req),
|
||||
headers: {
|
||||
referer: headers.referer || headers.referrer,
|
||||
'user-agent': headers['user-agent'],
|
||||
'content-length': headers['content-length']
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function resSerializer(res) {
|
||||
if (!res) {
|
||||
return res
|
||||
}
|
||||
return {
|
||||
statusCode: res.statusCode,
|
||||
headers: {
|
||||
'content-length': res.getHeader && res.getHeader('content-length')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function getRemoteIp(req) {
|
||||
if (req.ip) {
|
||||
return req.ip
|
||||
}
|
||||
if (req.socket) {
|
||||
if (req.socket.socket && req.socket.socket.remoteAddress) {
|
||||
return req.socket.socket.remoteAddress
|
||||
} else if (req.socket.remoteAddress) {
|
||||
return req.socket.remoteAddress
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
module.exports = { err: errSerializer, req: reqSerializer, res: resSerializer }
|
11
libraries/logger/test/setup.js
Normal file
11
libraries/logger/test/setup.js
Normal file
|
@ -0,0 +1,11 @@
|
|||
const chai = require('chai')
|
||||
const sinonChai = require('sinon-chai')
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
|
||||
// Chai configuration
|
||||
chai.should()
|
||||
chai.use(sinonChai)
|
||||
|
||||
SandboxedModule.configure({
|
||||
globals: { Buffer, JSON, console, process }
|
||||
})
|
199
libraries/logger/test/unit/log-level-checker-tests.js
Normal file
199
libraries/logger/test/unit/log-level-checker-tests.js
Normal file
|
@ -0,0 +1,199 @@
|
|||
const Path = require('path')
|
||||
const { promisify } = require('util')
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const sinon = require('sinon')
|
||||
const { expect } = require('chai')
|
||||
|
||||
const MODULE_PATH = Path.join(__dirname, '../../log-level-checker.js')
|
||||
const DEFAULT_LEVEL = 'warn'
|
||||
const TRACE_LEVEL = 'trace'
|
||||
const TRACING_END_TIME_FILE = '/logging/tracingEndTime'
|
||||
const NOW = 10000
|
||||
const PAST = NOW - 1000
|
||||
const FUTURE = NOW + 1000
|
||||
|
||||
const delay = promisify(setTimeout)
|
||||
|
||||
describe('LogLevelChecker', function () {
|
||||
beforeEach(function () {
|
||||
this.logger = {
|
||||
level: sinon.stub(),
|
||||
fields: { name: 'myapp' }
|
||||
}
|
||||
this.fetchResponse = {
|
||||
text: sinon.stub().resolves(''),
|
||||
status: 200,
|
||||
ok: true
|
||||
}
|
||||
this.fetch = sinon
|
||||
.stub()
|
||||
.withArgs(
|
||||
'http://metadata.google.internal/computeMetadata/v1/project/attributes/myapp-setLogLevelEndTime',
|
||||
{ headers: { 'Metadata-Flavor': 'Google' } }
|
||||
)
|
||||
.resolves(this.fetchResponse)
|
||||
|
||||
this.fs = {
|
||||
promises: {
|
||||
readFile: sinon.stub()
|
||||
}
|
||||
}
|
||||
|
||||
this.clock = sinon.useFakeTimers(NOW)
|
||||
|
||||
this.module = SandboxedModule.require(MODULE_PATH, {
|
||||
requires: {
|
||||
'node-fetch': this.fetch,
|
||||
fs: this.fs
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
this.clock.restore()
|
||||
})
|
||||
|
||||
describe('FileLogLevelChecker', function () {
|
||||
beforeEach(function () {
|
||||
this.logLevelChecker = new this.module.FileLogLevelChecker(
|
||||
this.logger,
|
||||
DEFAULT_LEVEL
|
||||
)
|
||||
})
|
||||
|
||||
describe('when the file is empty', function () {
|
||||
setupTracingEndTimeFile('')
|
||||
checkLogLevel()
|
||||
expectLevelSetTo(DEFAULT_LEVEL)
|
||||
})
|
||||
|
||||
describe("when the file can't be read", function () {
|
||||
beforeEach(async function () {
|
||||
this.fs.promises.readFile.rejects(new Error('Read error!'))
|
||||
})
|
||||
checkLogLevel()
|
||||
expectLevelSetTo(DEFAULT_LEVEL)
|
||||
})
|
||||
|
||||
describe('when the file has a timestamp in the future', function () {
|
||||
setupTracingEndTimeFile(FUTURE.toString())
|
||||
checkLogLevel()
|
||||
expectLevelSetTo(TRACE_LEVEL)
|
||||
})
|
||||
|
||||
describe('when the file has a timestamp in the past', function () {
|
||||
setupTracingEndTimeFile(PAST.toString())
|
||||
checkLogLevel()
|
||||
expectLevelSetTo(DEFAULT_LEVEL)
|
||||
})
|
||||
|
||||
describe('interval checker', function () {
|
||||
beforeEach(function () {
|
||||
this.fs.promises.readFile.resolves('')
|
||||
this.logLevelChecker.start()
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
this.logLevelChecker.stop()
|
||||
})
|
||||
|
||||
it('checks the file every minute', async function () {
|
||||
this.clock.tick(1000)
|
||||
// Yield to the event loop
|
||||
await delay(0)
|
||||
expect(this.logger.level).to.have.been.calledOnceWithExactly(
|
||||
DEFAULT_LEVEL
|
||||
)
|
||||
this.logger.level.reset()
|
||||
|
||||
// Trace until 1.5 minutes in the future
|
||||
const traceUntil = NOW + 90000
|
||||
this.fs.promises.readFile.resolves(traceUntil.toString())
|
||||
|
||||
this.clock.tick(61000)
|
||||
await delay(0)
|
||||
expect(this.logger.level).to.have.been.calledOnceWithExactly(
|
||||
TRACE_LEVEL
|
||||
)
|
||||
this.logger.level.reset()
|
||||
|
||||
this.clock.tick(60000)
|
||||
await delay(0)
|
||||
expect(this.logger.level).to.have.been.calledOnceWithExactly(
|
||||
DEFAULT_LEVEL
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('GCEMetadataLogLevelChecker', function () {
|
||||
beforeEach(function () {
|
||||
this.logLevelChecker = new this.module.GCEMetadataLogLevelChecker(
|
||||
this.logger,
|
||||
DEFAULT_LEVEL
|
||||
)
|
||||
})
|
||||
|
||||
describe('when the response is empty', function () {
|
||||
setupTracingEndTimeGCE('')
|
||||
checkLogLevel()
|
||||
expectLevelSetTo(DEFAULT_LEVEL)
|
||||
})
|
||||
|
||||
describe('when the request errors', function () {
|
||||
beforeEach(async function () {
|
||||
this.fetchResponse.text.rejects(new Error('Read error!'))
|
||||
})
|
||||
checkLogLevel()
|
||||
expectLevelSetTo(DEFAULT_LEVEL)
|
||||
})
|
||||
|
||||
describe('when the request returns a failure status code', function () {
|
||||
beforeEach(async function () {
|
||||
this.fetchResponse.status = 500
|
||||
this.fetchResponse.ok = false
|
||||
})
|
||||
checkLogLevel()
|
||||
expectLevelSetTo(DEFAULT_LEVEL)
|
||||
})
|
||||
|
||||
describe('when the response is a timestamp in the future', function () {
|
||||
setupTracingEndTimeGCE(FUTURE.toString())
|
||||
checkLogLevel()
|
||||
expectLevelSetTo(TRACE_LEVEL)
|
||||
})
|
||||
|
||||
describe('when the response is a timestamp in the past', function () {
|
||||
setupTracingEndTimeGCE(PAST.toString())
|
||||
checkLogLevel()
|
||||
expectLevelSetTo(DEFAULT_LEVEL)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
function setupTracingEndTimeFile(contents) {
|
||||
beforeEach(`set tracing end time in file to ${contents}`, function () {
|
||||
this.fs.promises.readFile.withArgs(TRACING_END_TIME_FILE).resolves(contents)
|
||||
})
|
||||
}
|
||||
|
||||
function setupTracingEndTimeGCE(contents) {
|
||||
beforeEach(
|
||||
`set tracing end time in GCE metadata to ${contents}`,
|
||||
function () {
|
||||
this.fetchResponse.text.resolves(contents)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function checkLogLevel() {
|
||||
beforeEach('Check log level', async function () {
|
||||
await this.logLevelChecker.checkLogLevel()
|
||||
})
|
||||
}
|
||||
|
||||
function expectLevelSetTo(level) {
|
||||
it(`sets the log level to ${level}`, function () {
|
||||
expect(this.logger.level).to.have.been.calledWith(level)
|
||||
})
|
||||
}
|
213
libraries/logger/test/unit/logging-manager-tests.js
Normal file
213
libraries/logger/test/unit/logging-manager-tests.js
Normal file
|
@ -0,0 +1,213 @@
|
|||
const SandboxedModule = require('sandboxed-module')
|
||||
const bunyan = require('bunyan')
|
||||
const { expect } = require('chai')
|
||||
const path = require('path')
|
||||
const sinon = require('sinon')
|
||||
|
||||
const MODULE_PATH = path.join(__dirname, '../../logging-manager.js')
|
||||
|
||||
describe('LoggingManager', function () {
|
||||
beforeEach(function () {
|
||||
this.start = Date.now()
|
||||
this.bunyanLogger = {
|
||||
addStream: sinon.stub(),
|
||||
debug: sinon.stub(),
|
||||
error: sinon.stub(),
|
||||
fatal: sinon.stub(),
|
||||
info: sinon.stub(),
|
||||
level: sinon.stub(),
|
||||
warn: sinon.stub()
|
||||
}
|
||||
this.Bunyan = {
|
||||
createLogger: sinon.stub().returns(this.bunyanLogger),
|
||||
RingBuffer: bunyan.RingBuffer
|
||||
}
|
||||
this.stackdriverStreamConfig = { stream: 'stackdriver' }
|
||||
this.stackdriverClient = {
|
||||
stream: sinon.stub().returns(this.stackdriverStreamConfig)
|
||||
}
|
||||
this.GCPLogging = {
|
||||
LoggingBunyan: sinon.stub().returns(this.stackdriverClient)
|
||||
}
|
||||
this.FileLogLevelChecker = {
|
||||
start: sinon.stub(),
|
||||
stop: sinon.stub()
|
||||
}
|
||||
this.GCEMetadataLogLevelChecker = {
|
||||
start: sinon.stub(),
|
||||
stop: sinon.stub()
|
||||
}
|
||||
this.LogLevelChecker = {
|
||||
FileLogLevelChecker: sinon.stub().returns(this.FileLogLevelChecker),
|
||||
GCEMetadataLogLevelChecker: sinon
|
||||
.stub()
|
||||
.returns(this.GCEMetadataLogLevelChecker)
|
||||
}
|
||||
this.SentryManager = {
|
||||
captureException: sinon.stub(),
|
||||
captureExceptionRateLimited: sinon.stub()
|
||||
}
|
||||
this.LoggingManager = SandboxedModule.require(MODULE_PATH, {
|
||||
requires: {
|
||||
bunyan: this.Bunyan,
|
||||
'@google-cloud/logging-bunyan': this.GCPLogging,
|
||||
'./log-level-checker': this.LogLevelChecker,
|
||||
'./sentry-manager': sinon.stub().returns(this.SentryManager)
|
||||
}
|
||||
})
|
||||
this.loggerName = 'test'
|
||||
this.logger = this.LoggingManager.initialize(this.loggerName)
|
||||
this.logger.initializeErrorReporting('test_dsn')
|
||||
})
|
||||
|
||||
describe('initialize', function () {
|
||||
beforeEach(function () {
|
||||
this.Bunyan.createLogger.reset()
|
||||
})
|
||||
|
||||
describe('not in production', function () {
|
||||
beforeEach(function () {
|
||||
this.logger = this.LoggingManager.initialize(this.loggerName)
|
||||
})
|
||||
|
||||
it('should default to log level debug', function () {
|
||||
this.Bunyan.createLogger.firstCall.args[0].streams[0].level.should.equal(
|
||||
'debug'
|
||||
)
|
||||
})
|
||||
|
||||
it('should not instantiate a log level checker', function () {
|
||||
expect(this.LoggingManager.logLevelChecker).not.to.exist
|
||||
})
|
||||
})
|
||||
|
||||
describe('in production', function () {
|
||||
beforeEach(function () {
|
||||
process.env.NODE_ENV = 'production'
|
||||
this.logger = this.LoggingManager.initialize(this.loggerName)
|
||||
})
|
||||
|
||||
afterEach(() => delete process.env.NODE_ENV)
|
||||
|
||||
it('should default to log level warn', function () {
|
||||
this.Bunyan.createLogger.firstCall.args[0].streams[0].level.should.equal(
|
||||
'warn'
|
||||
)
|
||||
})
|
||||
|
||||
it('should set up a file log level checker', function () {
|
||||
expect(this.logger.logLevelChecker).to.equal(this.FileLogLevelChecker)
|
||||
expect(this.FileLogLevelChecker.start).to.have.been.called
|
||||
})
|
||||
})
|
||||
|
||||
describe('when LOG_LEVEL set in env', function () {
|
||||
beforeEach(function () {
|
||||
process.env.LOG_LEVEL = 'trace'
|
||||
this.LoggingManager.initialize()
|
||||
})
|
||||
|
||||
afterEach(() => delete process.env.LOG_LEVEL)
|
||||
|
||||
it('should use custom log level', function () {
|
||||
this.Bunyan.createLogger.firstCall.args[0].streams[0].level.should.equal(
|
||||
'trace'
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('bunyan logging', function () {
|
||||
beforeEach(function () {
|
||||
this.logArgs = [{ foo: 'bar' }, 'foo', 'bar']
|
||||
})
|
||||
|
||||
it('should log debug', function () {
|
||||
this.logger.debug(this.logArgs)
|
||||
this.bunyanLogger.debug.should.have.been.calledWith(this.logArgs)
|
||||
})
|
||||
|
||||
it('should log error', function () {
|
||||
this.logger.error(this.logArgs)
|
||||
this.bunyanLogger.error.should.have.been.calledWith(this.logArgs)
|
||||
})
|
||||
|
||||
it('should log fatal', function () {
|
||||
this.logger.fatal(this.logArgs)
|
||||
this.bunyanLogger.fatal.should.have.been.calledWith(this.logArgs)
|
||||
})
|
||||
|
||||
it('should log info', function () {
|
||||
this.logger.info(this.logArgs)
|
||||
this.bunyanLogger.info.should.have.been.calledWith(this.logArgs)
|
||||
})
|
||||
|
||||
it('should log warn', function () {
|
||||
this.logger.warn(this.logArgs)
|
||||
this.bunyanLogger.warn.should.have.been.calledWith(this.logArgs)
|
||||
})
|
||||
|
||||
it('should log err', function () {
|
||||
this.logger.err(this.logArgs)
|
||||
this.bunyanLogger.error.should.have.been.calledWith(this.logArgs)
|
||||
})
|
||||
|
||||
it('should log log', function () {
|
||||
this.logger.log(this.logArgs)
|
||||
this.bunyanLogger.info.should.have.been.calledWith(this.logArgs)
|
||||
})
|
||||
})
|
||||
|
||||
describe('logger.error', function () {
|
||||
it('should report errors to Sentry', function () {
|
||||
this.logger.error({ foo: 'bar' }, 'message')
|
||||
expect(this.SentryManager.captureExceptionRateLimited).to.have.been.called
|
||||
})
|
||||
})
|
||||
|
||||
describe('ringbuffer', function () {
|
||||
beforeEach(function () {
|
||||
this.logBufferMock = [
|
||||
{ msg: 'log 1' },
|
||||
{ msg: 'log 2' },
|
||||
{ level: 50, msg: 'error' }
|
||||
]
|
||||
})
|
||||
|
||||
describe('when ring buffer size is positive', function () {
|
||||
beforeEach(function () {
|
||||
process.env.LOG_RING_BUFFER_SIZE = '20'
|
||||
this.logger = this.LoggingManager.initialize(this.loggerName)
|
||||
this.logger.ringBuffer.records = this.logBufferMock
|
||||
this.logger.error({}, 'error')
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
process.env.LOG_RING_BUFFER_SIZE = undefined
|
||||
})
|
||||
|
||||
it('should include buffered logs in error log and filter out error logs in buffer', function () {
|
||||
this.bunyanLogger.error.lastCall.args[0].logBuffer.should.deep.equal([
|
||||
{ msg: 'log 1' },
|
||||
{ msg: 'log 2' }
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe('when ring buffer size is zero', function () {
|
||||
beforeEach(function () {
|
||||
process.env.LOG_RING_BUFFER_SIZE = '0'
|
||||
this.logger = this.LoggingManager.initialize(this.loggerName)
|
||||
this.logger.error({}, 'error')
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
process.env.LOG_RING_BUFFER_SIZE = undefined
|
||||
})
|
||||
|
||||
it('should not include buffered logs in error log', function () {
|
||||
expect(this.bunyanLogger.error.lastCall.args[0].logBuffer).be.undefined
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,568 +0,0 @@
|
|||
const SandboxedModule = require('sandboxed-module')
|
||||
const bunyan = require('bunyan')
|
||||
const chai = require('chai')
|
||||
const path = require('path')
|
||||
const sinon = require('sinon')
|
||||
const sinonChai = require('sinon-chai')
|
||||
|
||||
chai.use(sinonChai)
|
||||
chai.should()
|
||||
const expect = chai.expect
|
||||
|
||||
const modulePath = path.join(__dirname, '../../logging-manager.js')
|
||||
|
||||
describe('LoggingManager', function () {
|
||||
beforeEach(function () {
|
||||
this.start = Date.now()
|
||||
this.clock = sinon.useFakeTimers(this.start)
|
||||
this.captureException = sinon.stub()
|
||||
this.bunyanLogger = {
|
||||
addStream: sinon.stub(),
|
||||
debug: sinon.stub(),
|
||||
error: sinon.stub(),
|
||||
fatal: sinon.stub(),
|
||||
info: sinon.stub(),
|
||||
level: sinon.stub(),
|
||||
warn: sinon.stub()
|
||||
}
|
||||
this.Sentry = {
|
||||
init: sinon.stub(),
|
||||
captureException: this.captureException
|
||||
}
|
||||
this.fetchResponse = {
|
||||
text: sinon.stub().resolves(''),
|
||||
status: 200,
|
||||
ok: true
|
||||
}
|
||||
this.Bunyan = {
|
||||
createLogger: sinon.stub().returns(this.bunyanLogger),
|
||||
RingBuffer: bunyan.RingBuffer,
|
||||
stdSerializers: {
|
||||
req: sinon.stub(),
|
||||
res: sinon.stub()
|
||||
}
|
||||
}
|
||||
this.Fetch = sinon.stub().resolves(this.fetchResponse)
|
||||
this.Fs = {
|
||||
readFile: sinon.stub(),
|
||||
promises: {
|
||||
readFile: sinon.stub()
|
||||
}
|
||||
}
|
||||
this.stackdriverStreamConfig = { stream: 'stackdriver' }
|
||||
this.stackdriverClient = {
|
||||
stream: sinon.stub().returns(this.stackdriverStreamConfig)
|
||||
}
|
||||
this.GCPLogging = {
|
||||
LoggingBunyan: sinon.stub().returns(this.stackdriverClient)
|
||||
}
|
||||
this.LoggingManager = SandboxedModule.require(modulePath, {
|
||||
globals: { console, process },
|
||||
requires: {
|
||||
bunyan: this.Bunyan,
|
||||
'@sentry/node': this.Sentry,
|
||||
'node-fetch': this.Fetch,
|
||||
fs: this.Fs,
|
||||
'@google-cloud/logging-bunyan': this.GCPLogging
|
||||
}
|
||||
})
|
||||
this.loggerName = 'test'
|
||||
this.logger = this.LoggingManager.initialize(this.loggerName)
|
||||
this.logger.initializeErrorReporting('test_dsn')
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
this.clock.restore()
|
||||
})
|
||||
|
||||
describe('initialize', function () {
|
||||
beforeEach(function () {
|
||||
this.checkLogLevelStub = sinon
|
||||
.stub(this.LoggingManager, 'checkLogLevel')
|
||||
.resolves('')
|
||||
this.Bunyan.createLogger.reset()
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
this.checkLogLevelStub.restore()
|
||||
})
|
||||
|
||||
describe('not in production', function () {
|
||||
beforeEach(function () {
|
||||
this.logger = this.LoggingManager.initialize(this.loggerName)
|
||||
})
|
||||
|
||||
it('should default to log level debug', function () {
|
||||
this.Bunyan.createLogger.firstCall.args[0].streams[0].level.should.equal(
|
||||
'debug'
|
||||
)
|
||||
})
|
||||
|
||||
it('should not run checkLogLevel', function () {
|
||||
this.checkLogLevelStub.should.not.have.been.called
|
||||
})
|
||||
})
|
||||
|
||||
describe('in production', function () {
|
||||
beforeEach(function () {
|
||||
process.env.NODE_ENV = 'production'
|
||||
this.logger = this.LoggingManager.initialize(this.loggerName)
|
||||
})
|
||||
|
||||
afterEach(() => delete process.env.NODE_ENV)
|
||||
|
||||
it('should default to log level warn', function () {
|
||||
this.Bunyan.createLogger.firstCall.args[0].streams[0].level.should.equal(
|
||||
'warn'
|
||||
)
|
||||
})
|
||||
|
||||
describe('logLevelSource file', function () {
|
||||
it('should run checkLogLevel', function () {
|
||||
this.checkLogLevelStub.should.have.been.calledOnce
|
||||
})
|
||||
|
||||
describe('after 1 minute', () =>
|
||||
it('should run checkLogLevel again', function () {
|
||||
this.clock.tick(61 * 1000)
|
||||
this.checkLogLevelStub.should.have.been.calledTwice
|
||||
}))
|
||||
|
||||
describe('after 2 minutes', () =>
|
||||
it('should run checkLogLevel again', function () {
|
||||
this.clock.tick(121 * 1000)
|
||||
this.checkLogLevelStub.should.have.been.calledThrice
|
||||
}))
|
||||
})
|
||||
})
|
||||
|
||||
describe('when LOG_LEVEL set in env', function () {
|
||||
beforeEach(function () {
|
||||
process.env.LOG_LEVEL = 'trace'
|
||||
this.LoggingManager.initialize()
|
||||
})
|
||||
|
||||
afterEach(() => delete process.env.LOG_LEVEL)
|
||||
|
||||
it('should use custom log level', function () {
|
||||
this.Bunyan.createLogger.firstCall.args[0].streams[0].level.should.equal(
|
||||
'trace'
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('bunyan logging', function () {
|
||||
beforeEach(function () {
|
||||
this.logArgs = [{ foo: 'bar' }, 'foo', 'bar']
|
||||
})
|
||||
|
||||
it('should log debug', function () {
|
||||
this.logger.debug(this.logArgs)
|
||||
this.bunyanLogger.debug.should.have.been.calledWith(this.logArgs)
|
||||
})
|
||||
|
||||
it('should log error', function () {
|
||||
this.logger.error(this.logArgs)
|
||||
this.bunyanLogger.error.should.have.been.calledWith(this.logArgs)
|
||||
})
|
||||
|
||||
it('should log fatal', function () {
|
||||
this.logger.fatal(this.logArgs)
|
||||
this.bunyanLogger.fatal.should.have.been.calledWith(this.logArgs)
|
||||
})
|
||||
|
||||
it('should log info', function () {
|
||||
this.logger.info(this.logArgs)
|
||||
this.bunyanLogger.info.should.have.been.calledWith(this.logArgs)
|
||||
})
|
||||
|
||||
it('should log warn', function () {
|
||||
this.logger.warn(this.logArgs)
|
||||
this.bunyanLogger.warn.should.have.been.calledWith(this.logArgs)
|
||||
})
|
||||
|
||||
it('should log err', function () {
|
||||
this.logger.err(this.logArgs)
|
||||
this.bunyanLogger.error.should.have.been.calledWith(this.logArgs)
|
||||
})
|
||||
|
||||
it('should log log', function () {
|
||||
this.logger.log(this.logArgs)
|
||||
this.bunyanLogger.info.should.have.been.calledWith(this.logArgs)
|
||||
})
|
||||
})
|
||||
|
||||
describe('logger.error', function () {
|
||||
it('should report a single error to sentry', function () {
|
||||
this.logger.error({ foo: 'bar' }, 'message')
|
||||
this.captureException.called.should.equal(true)
|
||||
})
|
||||
|
||||
it('should report the same error to sentry only once', function () {
|
||||
const error1 = new Error('this is the error')
|
||||
this.logger.error({ foo: error1 }, 'first message')
|
||||
this.logger.error({ bar: error1 }, 'second message')
|
||||
this.captureException.callCount.should.equal(1)
|
||||
})
|
||||
|
||||
it('should report two different errors to sentry individually', function () {
|
||||
const error1 = new Error('this is the error')
|
||||
const error2 = new Error('this is the error')
|
||||
this.logger.error({ foo: error1 }, 'first message')
|
||||
this.logger.error({ bar: error2 }, 'second message')
|
||||
this.captureException.callCount.should.equal(2)
|
||||
})
|
||||
|
||||
it('should remove the path from fs errors', function () {
|
||||
const fsError = new Error(
|
||||
"Error: ENOENT: no such file or directory, stat '/tmp/3279b8d0-da10-11e8-8255-efd98985942b'"
|
||||
)
|
||||
fsError.path = '/tmp/3279b8d0-da10-11e8-8255-efd98985942b'
|
||||
this.logger.error({ err: fsError }, 'message')
|
||||
this.captureException
|
||||
.calledWith(
|
||||
sinon.match.has(
|
||||
'message',
|
||||
'Error: ENOENT: no such file or directory, stat'
|
||||
)
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('for multiple errors should only report a maximum of 5 errors to sentry', function () {
|
||||
this.logger.error({ foo: 'bar' }, 'message')
|
||||
this.logger.error({ foo: 'bar' }, 'message')
|
||||
this.logger.error({ foo: 'bar' }, 'message')
|
||||
this.logger.error({ foo: 'bar' }, 'message')
|
||||
this.logger.error({ foo: 'bar' }, 'message')
|
||||
this.logger.error({ foo: 'bar' }, 'message')
|
||||
this.logger.error({ foo: 'bar' }, 'message')
|
||||
this.logger.error({ foo: 'bar' }, 'message')
|
||||
this.logger.error({ foo: 'bar' }, 'message')
|
||||
this.captureException.callCount.should.equal(5)
|
||||
})
|
||||
|
||||
it('for multiple errors with a minute delay should report 10 errors to sentry', function () {
|
||||
// the first five errors should be reported to sentry
|
||||
this.logger.error({ foo: 'bar' }, 'message')
|
||||
this.logger.error({ foo: 'bar' }, 'message')
|
||||
this.logger.error({ foo: 'bar' }, 'message')
|
||||
this.logger.error({ foo: 'bar' }, 'message')
|
||||
this.logger.error({ foo: 'bar' }, 'message')
|
||||
// the following errors should not be reported
|
||||
this.logger.error({ foo: 'bar' }, 'message')
|
||||
this.logger.error({ foo: 'bar' }, 'message')
|
||||
this.logger.error({ foo: 'bar' }, 'message')
|
||||
this.logger.error({ foo: 'bar' }, 'message')
|
||||
// allow a minute to pass
|
||||
this.clock.tick(this.start + 61 * 1000)
|
||||
// after a minute the next five errors should be reported to sentry
|
||||
this.logger.error({ foo: 'bar' }, 'message')
|
||||
this.logger.error({ foo: 'bar' }, 'message')
|
||||
this.logger.error({ foo: 'bar' }, 'message')
|
||||
this.logger.error({ foo: 'bar' }, 'message')
|
||||
this.logger.error({ foo: 'bar' }, 'message')
|
||||
// the following errors should not be reported to sentry
|
||||
this.logger.error({ foo: 'bar' }, 'message')
|
||||
this.logger.error({ foo: 'bar' }, 'message')
|
||||
this.logger.error({ foo: 'bar' }, 'message')
|
||||
this.logger.error({ foo: 'bar' }, 'message')
|
||||
this.captureException.callCount.should.equal(10)
|
||||
})
|
||||
|
||||
describe('reportedToSentry', function () {
|
||||
it('should mark the error as reported to sentry', function () {
|
||||
const err = new Error()
|
||||
this.logger.error({ err }, 'message')
|
||||
expect(this.captureException.called).to.equal(true)
|
||||
expect(err.reportedToSentry).to.equal(true)
|
||||
})
|
||||
|
||||
it('should mark two errors as reported to sentry', function () {
|
||||
const err1 = new Error()
|
||||
const err2 = new Error()
|
||||
this.logger.error({ err: err1, err2 }, 'message')
|
||||
expect(this.captureException.called).to.equal(true)
|
||||
expect(err1.reportedToSentry).to.equal(true)
|
||||
expect(err2.reportedToSentry).to.equal(true)
|
||||
})
|
||||
|
||||
it('should not mark arbitrary objects as reported to sentry', function () {
|
||||
const err = new Error()
|
||||
const ctx = { foo: 'bar' }
|
||||
this.logger.error({ err, ctx }, 'message')
|
||||
expect(this.captureException.called).to.equal(true)
|
||||
expect(ctx.reportedToSentry).to.equal(undefined)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('checkLogLevelFile', function () {
|
||||
it('should request log level override from the config map', async function () {
|
||||
this.logger.getTracingEndTime = this.logger.getTracingEndTimeFile
|
||||
await this.logger.checkLogLevel()
|
||||
this.Fs.promises.readFile.should.have.been.calledWithMatch(
|
||||
'/logging/tracingEndTime'
|
||||
)
|
||||
})
|
||||
|
||||
describe('when read errors', function () {
|
||||
beforeEach(async function () {
|
||||
this.Fs.promises.readFile.throws(new Error('test read error'))
|
||||
this.logger.getTracingEndTime = this.logger.getTracingEndTimeFile
|
||||
await this.logger.checkLogLevel()
|
||||
})
|
||||
|
||||
it('should only set default level', function () {
|
||||
this.bunyanLogger.level.should.have.been.calledOnce.and.calledWith(
|
||||
'debug'
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the file is empty', function () {
|
||||
beforeEach(async function () {
|
||||
this.Fs.promises.readFile.returns('')
|
||||
this.logger.getTracingEndTime = this.logger.getTracingEndTimeFile
|
||||
await this.logger.checkLogLevel()
|
||||
})
|
||||
|
||||
it('should only set default level', function () {
|
||||
this.bunyanLogger.level.should.have.been.calledOnce.and.calledWith(
|
||||
'debug'
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when time value returned that is less than current time', function () {
|
||||
beforeEach(async function () {
|
||||
this.Fs.promises.readFile.returns('1')
|
||||
this.logger.getTracingEndTime = this.logger.getTracingEndTimeFile
|
||||
await this.logger.checkLogLevel()
|
||||
})
|
||||
|
||||
it('should only set default level', function () {
|
||||
this.bunyanLogger.level.should.have.been.calledOnce.and.calledWith(
|
||||
'debug'
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when time value returned that is more than current time', function () {
|
||||
describe('when level is already set', function () {
|
||||
beforeEach(async function () {
|
||||
this.bunyanLogger.level.returns(10)
|
||||
this.Fs.promises.readFile.returns((this.start + 1000).toString())
|
||||
this.logger.getTracingEndTime = this.logger.getTracingEndTimeFile
|
||||
await this.logger.checkLogLevel()
|
||||
})
|
||||
|
||||
it('should set trace level', function () {
|
||||
this.bunyanLogger.level.should.have.been.calledOnce.and.calledWith(
|
||||
'trace'
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when level is not already set', function () {
|
||||
beforeEach(async function () {
|
||||
this.bunyanLogger.level.returns(20)
|
||||
this.Fs.promises.readFile.returns((this.start + 1000).toString())
|
||||
this.logger.getTracingEndTime = this.logger.getTracingEndTimeFile
|
||||
await this.logger.checkLogLevel()
|
||||
})
|
||||
|
||||
it('should set trace level', function () {
|
||||
this.bunyanLogger.level.should.have.been.calledOnce.and.calledWith(
|
||||
'trace'
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('checkLogLevelMetadata', function () {
|
||||
beforeEach(function () {
|
||||
this.logger = this.LoggingManager.initialize(this.loggerName)
|
||||
})
|
||||
|
||||
describe('checkLogLevel', function () {
|
||||
it('should request log level override from google meta data service', async function () {
|
||||
this.logger.getTracingEndTime = this.logger.getTracingEndTimeMetadata
|
||||
await this.logger.checkLogLevel()
|
||||
const options = {
|
||||
headers: {
|
||||
'Metadata-Flavor': 'Google'
|
||||
}
|
||||
}
|
||||
const uri = `http://metadata.google.internal/computeMetadata/v1/project/attributes/${this.loggerName}-setLogLevelEndTime`
|
||||
this.Fetch.should.have.been.calledWithMatch(uri, options)
|
||||
})
|
||||
|
||||
describe('when request has error', function () {
|
||||
beforeEach(async function () {
|
||||
this.Fetch = sinon.stub().throws()
|
||||
this.logger.getTracingEndTime = this.logger.getTracingEndTimeMetadata
|
||||
await this.logger.checkLogLevel()
|
||||
})
|
||||
|
||||
it('should only set default level', function () {
|
||||
this.bunyanLogger.level.should.have.been.calledOnce.and.calledWith(
|
||||
'debug'
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when statusCode is not 200', function () {
|
||||
beforeEach(async function () {
|
||||
this.fetchResponse.status = 404
|
||||
this.logger.getTracingEndTime = this.logger.getTracingEndTimeMetadata
|
||||
await this.logger.checkLogLevel()
|
||||
})
|
||||
|
||||
it('should only set default level', function () {
|
||||
this.bunyanLogger.level.should.have.been.calledOnce.and.calledWith(
|
||||
'debug'
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when time value returned that is less than current time', function () {
|
||||
beforeEach(async function () {
|
||||
this.logger.getTracingEndTime = this.logger.getTracingEndTimeMetadata
|
||||
this.fetchResponse.text = sinon.stub().resolves('1')
|
||||
await this.logger.checkLogLevel()
|
||||
})
|
||||
|
||||
it('should only set default level', function () {
|
||||
this.bunyanLogger.level.should.have.been.calledOnce.and.calledWith(
|
||||
'debug'
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when time value returned that is more than current time', function () {
|
||||
describe('when level is already set', function () {
|
||||
beforeEach(async function () {
|
||||
this.bunyanLogger.level.returns(10)
|
||||
this.fetchResponse.text = sinon
|
||||
.stub()
|
||||
.resolves((this.start + 1000).toString())
|
||||
this.logger.getTracingEndTime =
|
||||
this.logger.getTracingEndTimeMetadata
|
||||
|
||||
await this.logger.checkLogLevel()
|
||||
})
|
||||
|
||||
it('should set trace level', function () {
|
||||
this.bunyanLogger.level.should.have.been.calledOnce.and.calledWith(
|
||||
'trace'
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when level is not already set', function () {
|
||||
beforeEach(async function () {
|
||||
this.bunyanLogger.level.returns(20)
|
||||
this.fetchResponse.text = sinon
|
||||
.stub()
|
||||
.resolves((this.start + 1000).toString())
|
||||
this.Fetch.fetch = sinon.stub().resolves(this.fetchResponse)
|
||||
this.logger.getTracingEndTime =
|
||||
this.logger.getTracingEndTimeMetadata
|
||||
|
||||
await this.logger.checkLogLevel()
|
||||
})
|
||||
|
||||
it('should set trace level', function () {
|
||||
this.bunyanLogger.level.should.have.been.calledOnce.and.calledWith(
|
||||
'trace'
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('ringbuffer', function () {
|
||||
beforeEach(function () {
|
||||
this.logBufferMock = [
|
||||
{ msg: 'log 1' },
|
||||
{ msg: 'log 2' },
|
||||
{ level: 50, msg: 'error' }
|
||||
]
|
||||
})
|
||||
|
||||
describe('when ring buffer size is positive', function () {
|
||||
beforeEach(function () {
|
||||
process.env.LOG_RING_BUFFER_SIZE = '20'
|
||||
this.logger = this.LoggingManager.initialize(this.loggerName)
|
||||
this.logger.ringBuffer.records = this.logBufferMock
|
||||
this.logger.error({}, 'error')
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
process.env.LOG_RING_BUFFER_SIZE = undefined
|
||||
})
|
||||
|
||||
it('should include buffered logs in error log and filter out error logs in buffer', function () {
|
||||
this.bunyanLogger.error.lastCall.args[0].logBuffer.should.deep.equal([
|
||||
{ msg: 'log 1' },
|
||||
{ msg: 'log 2' }
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe('when ring buffer size is zero', function () {
|
||||
beforeEach(function () {
|
||||
process.env.LOG_RING_BUFFER_SIZE = '0'
|
||||
this.logger = this.LoggingManager.initialize(this.loggerName)
|
||||
this.logger.error({}, 'error')
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
process.env.LOG_RING_BUFFER_SIZE = undefined
|
||||
})
|
||||
|
||||
it('should not include buffered logs in error log', function () {
|
||||
expect(this.bunyanLogger.error.lastCall.args[0].logBuffer).be.undefined
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('stackdriver logging', function () {
|
||||
describe('when STACKDRIVER_LOGGING is unset', function () {
|
||||
beforeEach(function () {
|
||||
process.env.STACKDRIVER_LOGGING = undefined
|
||||
this.LoggingManager.initialize(this.loggerName)
|
||||
})
|
||||
|
||||
it('is disabled', function () {
|
||||
expect(this.bunyanLogger.addStream).not.to.have.been.calledWith(
|
||||
this.stackdriverStreamConfig
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when STACKDRIVER_LOGGING is true', function () {
|
||||
beforeEach(function () {
|
||||
process.env.STACKDRIVER_LOGGING = 'true'
|
||||
this.LoggingManager.initialize(this.loggerName)
|
||||
})
|
||||
|
||||
it('is enabled', function () {
|
||||
expect(this.bunyanLogger.addStream).to.have.been.calledWith(
|
||||
this.stackdriverStreamConfig
|
||||
)
|
||||
})
|
||||
|
||||
it('is configured properly', function () {
|
||||
expect(this.GCPLogging.LoggingBunyan).to.have.been.calledWith({
|
||||
logName: this.loggerName,
|
||||
serviceContext: { service: this.loggerName }
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
133
libraries/logger/test/unit/sentry-manager-tests.js
Normal file
133
libraries/logger/test/unit/sentry-manager-tests.js
Normal file
|
@ -0,0 +1,133 @@
|
|||
const Path = require('path')
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const { expect } = require('chai')
|
||||
const sinon = require('sinon')
|
||||
|
||||
const MODULE_PATH = Path.join(__dirname, '../../sentry-manager.js')
|
||||
|
||||
describe('SentryManager', function () {
|
||||
beforeEach(function () {
|
||||
this.clock = sinon.useFakeTimers(Date.now())
|
||||
this.Sentry = {
|
||||
init: sinon.stub(),
|
||||
captureException: sinon.stub()
|
||||
}
|
||||
this.SentryManager = SandboxedModule.require(MODULE_PATH, {
|
||||
requires: {
|
||||
'@sentry/node': this.Sentry
|
||||
}
|
||||
})
|
||||
this.sentryManager = new this.SentryManager('test_dsn')
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
this.clock.restore()
|
||||
})
|
||||
|
||||
describe('captureExceptionRateLimited', function () {
|
||||
it('should report a single error to sentry', function () {
|
||||
this.sentryManager.captureExceptionRateLimited({ foo: 'bar' }, 'message')
|
||||
expect(this.Sentry.captureException).to.have.been.calledOnce
|
||||
})
|
||||
|
||||
it('should report the same error to sentry only once', function () {
|
||||
const error1 = new Error('this is the error')
|
||||
this.sentryManager.captureExceptionRateLimited(
|
||||
{ foo: error1 },
|
||||
'first message'
|
||||
)
|
||||
this.sentryManager.captureExceptionRateLimited(
|
||||
{ bar: error1 },
|
||||
'second message'
|
||||
)
|
||||
expect(this.Sentry.captureException).to.have.been.calledOnce
|
||||
})
|
||||
|
||||
it('should report two different errors to sentry individually', function () {
|
||||
const error1 = new Error('this is the error')
|
||||
const error2 = new Error('this is the error')
|
||||
this.sentryManager.captureExceptionRateLimited(
|
||||
{ foo: error1 },
|
||||
'first message'
|
||||
)
|
||||
this.sentryManager.captureExceptionRateLimited(
|
||||
{ bar: error2 },
|
||||
'second message'
|
||||
)
|
||||
expect(this.Sentry.captureException).to.have.been.calledTwice
|
||||
})
|
||||
|
||||
it('for multiple errors should only report a maximum of 5 errors to sentry', function () {
|
||||
for (let i = 0; i < 10; i++) {
|
||||
this.sentryManager.captureExceptionRateLimited(
|
||||
{ foo: 'bar' },
|
||||
'message'
|
||||
)
|
||||
}
|
||||
expect(this.Sentry.captureException).to.have.callCount(5)
|
||||
})
|
||||
|
||||
it('for multiple errors with a minute delay should report 10 errors to sentry', function () {
|
||||
for (let i = 0; i < 10; i++) {
|
||||
this.sentryManager.captureExceptionRateLimited(
|
||||
{ foo: 'bar' },
|
||||
'message'
|
||||
)
|
||||
}
|
||||
expect(this.Sentry.captureException).to.have.callCount(5)
|
||||
|
||||
// allow a minute to pass
|
||||
this.clock.tick(61 * 1000)
|
||||
|
||||
for (let i = 0; i < 10; i++) {
|
||||
this.sentryManager.captureExceptionRateLimited(
|
||||
{ foo: 'bar' },
|
||||
'message'
|
||||
)
|
||||
}
|
||||
expect(this.Sentry.captureException).to.have.callCount(10)
|
||||
})
|
||||
})
|
||||
|
||||
describe('captureException', function () {
|
||||
it('should remove the path from fs errors', function () {
|
||||
const fsError = new Error(
|
||||
"Error: ENOENT: no such file or directory, stat '/tmp/3279b8d0-da10-11e8-8255-efd98985942b'"
|
||||
)
|
||||
fsError.path = '/tmp/3279b8d0-da10-11e8-8255-efd98985942b'
|
||||
this.sentryManager.captureException({ err: fsError }, 'message', 'error')
|
||||
expect(this.Sentry.captureException).to.have.been.calledWith(
|
||||
sinon.match.has(
|
||||
'message',
|
||||
'Error: ENOENT: no such file or directory, stat'
|
||||
)
|
||||
)
|
||||
})
|
||||
|
||||
describe('reportedToSentry', function () {
|
||||
it('should mark the error as reported to sentry', function () {
|
||||
const err = new Error()
|
||||
this.sentryManager.captureException({ err }, 'message')
|
||||
expect(this.Sentry.captureException).to.have.been.called
|
||||
expect(err.reportedToSentry).to.equal(true)
|
||||
})
|
||||
|
||||
it('should mark two errors as reported to sentry', function () {
|
||||
const err1 = new Error()
|
||||
const err2 = new Error()
|
||||
this.sentryManager.captureException({ err: err1, err2 }, 'message')
|
||||
expect(this.Sentry.captureException).to.have.been.called
|
||||
expect(err1.reportedToSentry).to.equal(true)
|
||||
expect(err2.reportedToSentry).to.equal(true)
|
||||
})
|
||||
|
||||
it('should not mark arbitrary objects as reported to sentry', function () {
|
||||
const err = new Error()
|
||||
const ctx = { foo: 'bar' }
|
||||
this.sentryManager.captureException({ err, ctx }, 'message')
|
||||
expect(this.Sentry.captureException).to.have.been.called
|
||||
expect(ctx.reportedToSentry).not.to.exist
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
5
libraries/metrics/CHANGELOG.md
Normal file
5
libraries/metrics/CHANGELOG.md
Normal file
|
@ -0,0 +1,5 @@
|
|||
## v4.0.0
|
||||
|
||||
* Send unmodified request and response to logger.
|
||||
|
||||
This version of the metrics module only works with versions of the `@overleaf/logger` module greater than v3.0.0
|
|
@ -1,23 +1,15 @@
|
|||
const yn = require('yn')
|
||||
|
||||
const STACKDRIVER_LOGGING = yn(process.env.STACKDRIVER_LOGGING)
|
||||
const Metrics = require('./index')
|
||||
|
||||
module.exports.monitor = logger =>
|
||||
function(req, res, next) {
|
||||
const Metrics = require('./index')
|
||||
const startTime = process.hrtime()
|
||||
const startTime = Date.now()
|
||||
const { end } = res
|
||||
res.end = function() {
|
||||
end.apply(this, arguments)
|
||||
const responseTime = process.hrtime(startTime)
|
||||
const responseTimeMs = Math.round(
|
||||
responseTime[0] * 1000 + responseTime[1] / 1000000
|
||||
)
|
||||
res.end = function(...args) {
|
||||
end.apply(this, args)
|
||||
const responseTimeMs = Date.now() - startTime
|
||||
const requestSize = parseInt(req.headers['content-length'], 10)
|
||||
const routePath = getRoutePath(req)
|
||||
const remoteIp = getRemoteIp(req)
|
||||
const reqUrl = req.originalUrl || req.url
|
||||
const referrer = req.headers.referer || req.headers.referrer
|
||||
|
||||
if (routePath != null) {
|
||||
Metrics.timing('http_request', responseTimeMs, null, {
|
||||
|
@ -33,44 +25,7 @@ module.exports.monitor = logger =>
|
|||
})
|
||||
}
|
||||
}
|
||||
|
||||
let info
|
||||
if (STACKDRIVER_LOGGING) {
|
||||
info = {
|
||||
httpRequest: {
|
||||
requestMethod: req.method,
|
||||
requestUrl: reqUrl,
|
||||
requestSize,
|
||||
status: res.statusCode,
|
||||
responseSize: res.getHeader('content-length'),
|
||||
userAgent: req.headers['user-agent'],
|
||||
remoteIp,
|
||||
referer: referrer,
|
||||
latency: {
|
||||
seconds: responseTime[0],
|
||||
nanos: responseTime[1]
|
||||
},
|
||||
protocol: req.protocol
|
||||
}
|
||||
}
|
||||
} else {
|
||||
info = {
|
||||
req: {
|
||||
url: reqUrl,
|
||||
method: req.method,
|
||||
referrer,
|
||||
'remote-addr': remoteIp,
|
||||
'user-agent': req.headers['user-agent'],
|
||||
'content-length': req.headers['content-length']
|
||||
},
|
||||
res: {
|
||||
'content-length': res.getHeader('content-length'),
|
||||
statusCode: res.statusCode
|
||||
},
|
||||
'response-time': responseTimeMs
|
||||
}
|
||||
}
|
||||
logger.info(info, '%s %s', req.method, reqUrl)
|
||||
logger.info({ req, res, responseTimeMs }, '%s %s', req.method, reqUrl)
|
||||
}
|
||||
next()
|
||||
}
|
||||
|
@ -88,17 +43,3 @@ function getRoutePath(req) {
|
|||
}
|
||||
return null
|
||||
}
|
||||
|
||||
function getRemoteIp(req) {
|
||||
if (req.ip) {
|
||||
return req.ip
|
||||
}
|
||||
if (req.socket) {
|
||||
if (req.socket.socket && req.socket.socket.remoteAddress) {
|
||||
return req.socket.socket.remoteAddress
|
||||
} else if (req.socket.remoteAddress) {
|
||||
return req.socket.remoteAddress
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
/* eslint-disable no-console */
|
||||
const os = require('os')
|
||||
const ExpressCompression = require('compression')
|
||||
const promClient = require('prom-client')
|
||||
|
@ -169,30 +170,27 @@ function close() {
|
|||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
configure,
|
||||
initialize,
|
||||
registerDestructor,
|
||||
injectMetricsRoute,
|
||||
buildPromKey,
|
||||
sanitizeValue,
|
||||
set,
|
||||
inc,
|
||||
count,
|
||||
summary,
|
||||
timing,
|
||||
Timer,
|
||||
gauge,
|
||||
globalGauge,
|
||||
close,
|
||||
prom: promClient,
|
||||
module.exports.configure = configure
|
||||
module.exports.initialize = initialize
|
||||
module.exports.registerDestructor = registerDestructor
|
||||
module.exports.injectMetricsRoute = injectMetricsRoute
|
||||
module.exports.buildPromKey = buildPromKey
|
||||
module.exports.sanitizeValue = sanitizeValue
|
||||
module.exports.set = set
|
||||
module.exports.inc = inc
|
||||
module.exports.count = count
|
||||
module.exports.summary = summary
|
||||
module.exports.timing = timing
|
||||
module.exports.Timer = Timer
|
||||
module.exports.gauge = gauge
|
||||
module.exports.globalGauge = globalGauge
|
||||
module.exports.close = close
|
||||
module.exports.prom = promClient
|
||||
module.exports.register = promWrapper.registry
|
||||
|
||||
register: promWrapper.registry,
|
||||
|
||||
mongodb: require('./mongodb'),
|
||||
http: require('./http'),
|
||||
open_sockets: require('./open_sockets'),
|
||||
event_loop: require('./event_loop'),
|
||||
memory: require('./memory'),
|
||||
timeAsyncMethod: require('./timeAsyncMethod')
|
||||
}
|
||||
module.exports.http = require('./http')
|
||||
module.exports.mongodb = require('./mongodb')
|
||||
module.exports.open_sockets = require('./open_sockets')
|
||||
module.exports.event_loop = require('./event_loop')
|
||||
module.exports.memory = require('./memory')
|
||||
module.exports.timeAsyncMethod = require('./timeAsyncMethod')
|
||||
|
|
6734
libraries/metrics/package-lock.json
generated
6734
libraries/metrics/package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@overleaf/metrics",
|
||||
"version": "3.5.2",
|
||||
"version": "4.0.0",
|
||||
"description": "A drop-in metrics and monitoring module for node.js apps",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
@ -41,5 +41,8 @@
|
|||
"format": "prettier-eslint $PWD'/**/*.js' --list-different",
|
||||
"format:fix": "prettier-eslint $PWD'/**/*.js' --write",
|
||||
"test:ci": "npm run test"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@overleaf/logger": ">= 3.0.0"
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue