turn down logging, use logger.info for less important data

This commit is contained in:
Henry Oswald 2018-12-13 11:03:08 +00:00
parent 25e1ad7491
commit 2505be283a
14 changed files with 34 additions and 27 deletions

View file

@ -25,7 +25,7 @@ module.exports = DocumentManager =
logger.log {project_id, doc_id}, "doc not in redis so getting from persistence API"
PersistenceManager.getDoc project_id, doc_id, (error, lines, version, ranges, pathname, projectHistoryId) ->
return callback(error) if error?
logger.log {project_id, doc_id, lines, version, pathname, projectHistoryId}, "got doc from persistence API"
logger.info {project_id, doc_id, lines, version, pathname, projectHistoryId}, "got doc from persistence API"
RedisManager.putDocInMemory project_id, doc_id, lines, version, ranges, pathname, projectHistoryId, (error) ->
return callback(error) if error?
callback null, lines, version, ranges, pathname, projectHistoryId, null, false

View file

@ -7,7 +7,7 @@ module.exports = HistoryRedisManager =
recordDocHasHistoryOps: (project_id, doc_id, ops = [], callback = (error) ->) ->
if ops.length == 0
return callback(new Error("cannot push no ops")) # This should never be called with no ops, but protect against a redis error if we sent an empty array to rpush
logger.log project_id: project_id, doc_id: doc_id, "marking doc in project for history ops"
logger.info project_id: project_id, doc_id: doc_id, "marking doc in project for history ops"
rclient.sadd Keys.docsWithHistoryOps({project_id}), doc_id, (error) ->
return callback(error) if error?
callback()

View file

@ -11,7 +11,7 @@ module.exports = HttpController =
getDoc: (req, res, next = (error) ->) ->
doc_id = req.params.doc_id
project_id = req.params.project_id
logger.log project_id: project_id, doc_id: doc_id, "getting doc via http"
logger.info project_id: project_id, doc_id: doc_id, "getting doc via http"
timer = new Metrics.Timer("http.getDoc")
if req.query?.fromVersion?
@ -22,7 +22,7 @@ module.exports = HttpController =
DocumentManager.getDocAndRecentOpsWithLock project_id, doc_id, fromVersion, (error, lines, version, ops, ranges, pathname) ->
timer.done()
return next(error) if error?
logger.log project_id: project_id, doc_id: doc_id, "got doc via http"
logger.info project_id: project_id, doc_id: doc_id, "got doc via http"
if !lines? or !version?
return next(new Errors.NotFoundError("document not found"))
res.send JSON.stringify
@ -44,13 +44,13 @@ module.exports = HttpController =
projectStateHash = req.query?.state
# exclude is string of existing docs "id:version,id:version,..."
excludeItems = req.query?.exclude?.split(',') or []
logger.log project_id: project_id, exclude: excludeItems, "getting docs via http"
logger.info project_id: project_id, exclude: excludeItems, "getting docs via http"
timer = new Metrics.Timer("http.getAllDocs")
excludeVersions = {}
for item in excludeItems
[id,version] = item?.split(':')
excludeVersions[id] = version
logger.log {project_id: project_id, projectStateHash: projectStateHash, excludeVersions: excludeVersions}, "excluding versions"
logger.info {project_id: project_id, projectStateHash: projectStateHash, excludeVersions: excludeVersions}, "excluding versions"
ProjectManager.getProjectDocsAndFlushIfOld project_id, projectStateHash, excludeVersions, (error, result) ->
timer.done()
if error instanceof Errors.ProjectStateChangedError

View file

@ -30,12 +30,12 @@ module.exports = RangesManager =
return callback(error)
response = RangesManager._getRanges rangesTracker
logger.log {project_id, doc_id, changesCount: response.changes?.length, commentsCount: response.comments?.length}, "applied updates to ranges"
logger.info {project_id, doc_id, changesCount: response.changes?.length, commentsCount: response.comments?.length}, "applied updates to ranges"
callback null, response
acceptChanges: (change_ids, ranges, callback = (error, ranges) ->) ->
{changes, comments} = ranges
logger.log "accepting #{ change_ids.length } changes in ranges"
logger.info "accepting #{ change_ids.length } changes in ranges"
rangesTracker = new RangesTracker(changes, comments)
rangesTracker.removeChangeIds(change_ids)
response = RangesManager._getRanges(rangesTracker)
@ -43,7 +43,7 @@ module.exports = RangesManager =
deleteComment: (comment_id, ranges, callback = (error, ranges) ->) ->
{changes, comments} = ranges
logger.log {comment_id}, "deleting comment in ranges"
logger.info {comment_id}, "deleting comment in ranges"
rangesTracker = new RangesTracker(changes, comments)
rangesTracker.removeCommentId(comment_id)
response = RangesManager._getRanges(rangesTracker)

View file

@ -233,7 +233,7 @@ module.exports = RedisManager =
newHash = RedisManager._computeHash(newDocLines)
opVersions = appliedOps.map (op) -> op?.v
logger.log doc_id: doc_id, version: newVersion, hash: newHash, op_versions: opVersions, "updating doc in redis"
logger.info doc_id: doc_id, version: newVersion, hash: newHash, op_versions: opVersions, "updating doc in redis"
RedisManager._serializeRanges ranges, (error, ranges) ->
if error?

View file

@ -20,7 +20,7 @@ module.exports = ShareJsUpdateManager =
return model
applyUpdate: (project_id, doc_id, update, lines, version, callback = (error, updatedDocLines) ->) ->
logger.log project_id: project_id, doc_id: doc_id, update: update, "applying sharejs updates"
logger.info project_id: project_id, doc_id: doc_id, update: update, "applying sharejs updates"
jobs = []
# We could use a global model for all docs, but we're hitting issues with the
@ -39,7 +39,7 @@ module.exports = ShareJsUpdateManager =
ShareJsUpdateManager._sendOp(project_id, doc_id, update)
else
return callback(error)
logger.log project_id: project_id, doc_id: doc_id, error: error, "applied update"
logger.info project_id: project_id, doc_id: doc_id, error: error, "applied update"
model.getSnapshot doc_key, (error, data) =>
return callback(error) if error?
docLines = data.snapshot.split(/\r\n|\n|\r/)

View file

@ -47,7 +47,7 @@ module.exports = UpdateManager =
profile = new Profiler("fetchAndApplyUpdates", {project_id, doc_id})
RealTimeRedisManager.getPendingUpdatesForDoc doc_id, (error, updates) =>
return callback(error) if error?
logger.log {project_id: project_id, doc_id: doc_id, count: updates.length}, "processing updates"
logger.info {project_id: project_id, doc_id: doc_id, count: updates.length}, "processing updates"
if updates.length == 0
return callback()
profile.log("getPendingUpdatesForDoc")

View file

@ -16,7 +16,7 @@ describe "DocumentManager", ->
"./HistoryManager": @HistoryManager =
flushDocChangesAsync: sinon.stub()
flushProjectChangesAsync: sinon.stub()
"logger-sharelatex": @logger = {log: sinon.stub()}
"logger-sharelatex": @logger = {log: sinon.stub(), info: sinon.stub()}
"./DocOpsManager": @DocOpsManager = {}
"./Metrics": @Metrics =
Timer: class Timer

View file

@ -19,7 +19,9 @@ describe "HistoryRedisManager", ->
key_schema:
uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}"
docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}"
"logger-sharelatex": { log: () -> }
"logger-sharelatex":
log: ->
info: ->
@doc_id = "doc-id-123"
@project_id = "project-id-123"
@callback = sinon.stub()

View file

@ -12,7 +12,7 @@ describe "HttpController", ->
"./HistoryManager": @HistoryManager =
flushProjectChangesAsync: sinon.stub()
"./ProjectManager": @ProjectManager = {}
"logger-sharelatex" : @logger = { log: sinon.stub() }
"logger-sharelatex" : @logger = { log: sinon.stub(), info: sinon.stub() }
"./Metrics": @Metrics = {}
"./Errors" : Errors
@Metrics.Timer = class Timer
@ -59,7 +59,7 @@ describe "HttpController", ->
.should.equal true
it "should log the request", ->
@logger.log
@logger.info
.calledWith(doc_id: @doc_id, project_id: @project_id, "getting doc via http")
.should.equal true
@ -88,7 +88,7 @@ describe "HttpController", ->
.should.equal true
it "should log the request", ->
@logger.log
@logger.info
.calledWith(doc_id: @doc_id, project_id: @project_id, "getting doc via http")
.should.equal true
@ -475,7 +475,7 @@ describe "HttpController", ->
.should.equal true
it "should log the request", ->
@logger.log
@logger.info
.calledWith({project_id: @project_id, exclude: []}, "getting docs via http")
.should.equal true

View file

@ -7,10 +7,15 @@ SandboxedModule = require('sandboxed-module')
describe "RangesManager", ->
beforeEach ->
@logger =
error: sinon.stub()
log: sinon.stub()
warn: sinon.stub()
info: sinon.stub()
@RangesManager = SandboxedModule.require modulePath,
requires:
"logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() }
"logger-sharelatex": @logger
@doc_id = "doc-id-123"
@project_id = "project-id-123"
@user_id = "user-id-123"
@ -184,7 +189,7 @@ describe "RangesManager", ->
beforeEach ->
@RangesManager = SandboxedModule.require modulePath,
requires:
"logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() }
"logger-sharelatex": @logger
"./RangesTracker":@RangesTracker = SandboxedModule.require "../../../../app/js/RangesTracker.js"
@ranges = {
@ -226,7 +231,7 @@ describe "RangesManager", ->
done()
it "should log the call with the correct number of changes", ->
@logger.log
@logger.info
.calledWith("accepting 1 changes in ranges")
.should.equal true
@ -258,7 +263,7 @@ describe "RangesManager", ->
done()
it "should log the call with the correct number of changes", ->
@logger.log
@logger.info
.calledWith("accepting #{ @change_ids.length } changes in ranges")
.should.equal true

View file

@ -14,7 +14,7 @@ describe "RedisManager", ->
tk.freeze(new Date())
@RedisManager = SandboxedModule.require modulePath,
requires:
"logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub() }
"logger-sharelatex": @logger = { error: sinon.stub(), log: sinon.stub(), warn: sinon.stub(), info:-> }
"./ProjectHistoryRedisManager": @ProjectHistoryRedisManager = {}
"settings-sharelatex": @settings = {
documentupdater: {logHashErrors: {write:true, read:true}}

View file

@ -16,7 +16,7 @@ describe "ShareJsUpdateManager", ->
constructor: (@db) ->
"./ShareJsDB" : @ShareJsDB = { mockDB: true }
"redis-sharelatex" : createClient: () => @rclient = auth:->
"logger-sharelatex": @logger = { log: sinon.stub() }
"logger-sharelatex": @logger = { log: sinon.stub(), info: -> }
"./RealTimeRedisManager": @RealTimeRedisManager = {}
globals:
clearTimeout: @clearTimeout = sinon.stub()

View file

@ -16,7 +16,7 @@ describe "UpdateManager", ->
"./RealTimeRedisManager" : @RealTimeRedisManager = {}
"./ShareJsUpdateManager" : @ShareJsUpdateManager = {}
"./HistoryManager" : @HistoryManager = {}
"logger-sharelatex": @logger = { log: sinon.stub() }
"logger-sharelatex": @logger = { log: sinon.stub(), info:-> }
"./Metrics": @Metrics =
Timer: class Timer
done: sinon.stub()