Add limit to log parsing for react log viewer (#6213)

* Add limit to log parsing for react log viewer

GitOrigin-RevId: e2e5ffd6eaf1c207cc4f49fb5c637457990c328e
This commit is contained in:
Miguel Serrano 2022-01-11 18:19:38 +01:00 committed by Copybot
parent cf4269d4ee
commit 35396ab61a
3 changed files with 28 additions and 15 deletions

View file

@ -104,7 +104,9 @@ export const handleOutputFiles = async (projectId, data) => {
const log = await response.text() const log = await response.text()
try { try {
const { errors, warnings } = new BibLogParser(log, {}).parse() const { errors, warnings } = new BibLogParser(log, {
maxErrors: 100,
}).parse()
accumulateResults({ errors, warnings }, 'BibTeX:') accumulateResults({ errors, warnings }, 'BibTeX:')
} catch (e) { } catch (e) {
// BibLog parsing errors are ignored // BibLog parsing errors are ignored

View file

@ -18,7 +18,7 @@ const MESSAGE_LEVELS = {
ERROR: 'error', ERROR: 'error',
} }
const parserReducer = function (maxErrors) { const parserReducer = function (maxErrors, buildMaxErrorsReachedMessage) {
return function (accumulator, parser) { return function (accumulator, parser) {
const consume = function (logText, regex, process) { const consume = function (logText, regex, process) {
let match let match
@ -32,16 +32,18 @@ const parserReducer = function (maxErrors) {
// Too many log entries can cause browser crashes // Too many log entries can cause browser crashes
// Construct a too many files error from the last match // Construct a too many files error from the last match
if (iterationCount >= maxErrors) { if (maxErrors != null && iterationCount >= maxErrors) {
const level = newEntry.level + 's' if (buildMaxErrorsReachedMessage) {
newEntry.message = [ const level = newEntry.level + 's'
'Over', newEntry.message = [
maxErrors, 'Over',
level, maxErrors,
'returned. Download raw logs to see full list', level,
].join(' ') 'returned. Download raw logs to see full list',
newEntry.line = undefined ].join(' ')
result.unshift(newEntry) newEntry.line = undefined
result.unshift(newEntry)
}
return [result, ''] return [result, '']
} }
@ -171,11 +173,17 @@ export default class BibLogParser {
} }
// reduce over the parsers, starting with the log text, // reduce over the parsers, starting with the log text,
let [allWarnings, remainingText] = this.warningParsers.reduce( let [allWarnings, remainingText] = this.warningParsers.reduce(
parserReducer(this.options.maxErrors), parserReducer(
this.options.maxErrors,
this.options.buildMaxErrorsReachedMessage
),
[[], this.text] [[], this.text]
) )
;[allErrors, remainingText] = this.errorParsers.reduce( ;[allErrors, remainingText] = this.errorParsers.reduce(
parserReducer(this.options.maxErrors), parserReducer(
this.options.maxErrors,
this.options.buildMaxErrorsReachedMessage
),
[[], remainingText] [[], remainingText]
) )
result.warnings = allWarnings result.warnings = allWarnings

View file

@ -640,7 +640,10 @@ App.controller(
} }
function processBiber(log) { function processBiber(log) {
const bibLogParser = new BibLogParser(log, { maxErrors: 100 }) const bibLogParser = new BibLogParser(log, {
maxErrors: 100,
buildMaxErrorsReachedMessage: true,
})
const { errors, warnings } = bibLogParser.parse(log, {}) const { errors, warnings } = bibLogParser.parse(log, {})
const all = [].concat(errors, warnings) const all = [].concat(errors, warnings)
accumulateResults({ type: 'BibTeX:', all, errors, warnings }) accumulateResults({ type: 'BibTeX:', all, errors, warnings })