mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-29 16:33:39 -05:00
Merge pull request #7060 from overleaf/ii-5273-latex-log-parsers
Clean up log parsers code GitOrigin-RevId: 7dc4dbbb07bba72dda63d75502602570620fe07d
This commit is contained in:
parent
21e753ca21
commit
be0774be8f
2 changed files with 69 additions and 91 deletions
|
@ -18,32 +18,21 @@ const MESSAGE_LEVELS = {
|
||||||
ERROR: 'error',
|
ERROR: 'error',
|
||||||
}
|
}
|
||||||
|
|
||||||
const parserReducer = function (maxErrors, buildMaxErrorsReachedMessage) {
|
const parserReducer = function (maxErrors) {
|
||||||
return function (accumulator, parser) {
|
return function (accumulator, parser) {
|
||||||
const consume = function (logText, regex, process) {
|
const consume = function (logText, regex, process) {
|
||||||
let match
|
let match
|
||||||
let text = logText
|
let text = logText
|
||||||
const result = []
|
const result = []
|
||||||
const re = regex
|
|
||||||
let iterationCount = 0
|
let iterationCount = 0
|
||||||
while ((match = re.exec(text))) {
|
|
||||||
iterationCount += 1
|
while ((match = regex.exec(text))) {
|
||||||
|
iterationCount++
|
||||||
const newEntry = process(match)
|
const newEntry = process(match)
|
||||||
|
|
||||||
// Too many log entries can cause browser crashes
|
// Too many log entries can cause browser crashes
|
||||||
// Construct a too many files error from the last match
|
// Construct a too many files error from the last match
|
||||||
if (maxErrors != null && iterationCount >= maxErrors) {
|
if (maxErrors != null && iterationCount >= maxErrors) {
|
||||||
if (buildMaxErrorsReachedMessage) {
|
|
||||||
const level = newEntry.level + 's'
|
|
||||||
newEntry.message = [
|
|
||||||
'Over',
|
|
||||||
maxErrors,
|
|
||||||
level,
|
|
||||||
'returned. Download raw logs to see full list',
|
|
||||||
].join(' ')
|
|
||||||
newEntry.line = undefined
|
|
||||||
result.unshift(newEntry)
|
|
||||||
}
|
|
||||||
return [result, '']
|
return [result, '']
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -55,6 +44,7 @@ const parserReducer = function (maxErrors, buildMaxErrorsReachedMessage) {
|
||||||
match.input.length
|
match.input.length
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
return [result, text]
|
return [result, text]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -66,12 +56,12 @@ const parserReducer = function (maxErrors, buildMaxErrorsReachedMessage) {
|
||||||
}
|
}
|
||||||
|
|
||||||
export default class BibLogParser {
|
export default class BibLogParser {
|
||||||
constructor(text, options) {
|
constructor(text, options = {}) {
|
||||||
if (typeof text !== 'string') {
|
if (typeof text !== 'string') {
|
||||||
throw new Error('BibLogParser Error: text parameter must be a string')
|
throw new Error('BibLogParser Error: text parameter must be a string')
|
||||||
}
|
}
|
||||||
this.text = text.replace(/(\r\n)|\r/g, '\n')
|
this.text = text.replace(/(\r\n)|\r/g, '\n')
|
||||||
this.options = options || {}
|
this.options = options
|
||||||
this.lines = text.split('\n')
|
this.lines = text.split('\n')
|
||||||
|
|
||||||
// each parser is a pair of [regex, processFunction], where processFunction
|
// each parser is a pair of [regex, processFunction], where processFunction
|
||||||
|
@ -163,33 +153,23 @@ export default class BibLogParser {
|
||||||
}
|
}
|
||||||
|
|
||||||
parseBibtex() {
|
parseBibtex() {
|
||||||
let allErrors
|
// reduce over the parsers, starting with the log text,
|
||||||
const result = {
|
const [allWarnings, remainingText] = this.warningParsers.reduce(
|
||||||
all: [],
|
parserReducer(this.options.maxErrors),
|
||||||
errors: [],
|
[[], this.text]
|
||||||
warnings: [],
|
)
|
||||||
|
const [allErrors] = this.errorParsers.reduce(
|
||||||
|
parserReducer(this.options.maxErrors),
|
||||||
|
[[], remainingText]
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
all: allWarnings.concat(allErrors),
|
||||||
|
errors: allErrors,
|
||||||
|
warnings: allWarnings,
|
||||||
files: [], // not used
|
files: [], // not used
|
||||||
typesetting: [], // not used
|
typesetting: [], // not used
|
||||||
}
|
}
|
||||||
// reduce over the parsers, starting with the log text,
|
|
||||||
let [allWarnings, remainingText] = this.warningParsers.reduce(
|
|
||||||
parserReducer(
|
|
||||||
this.options.maxErrors,
|
|
||||||
this.options.buildMaxErrorsReachedMessage
|
|
||||||
),
|
|
||||||
[[], this.text]
|
|
||||||
)
|
|
||||||
;[allErrors, remainingText] = this.errorParsers.reduce(
|
|
||||||
parserReducer(
|
|
||||||
this.options.maxErrors,
|
|
||||||
this.options.buildMaxErrorsReachedMessage
|
|
||||||
),
|
|
||||||
[[], remainingText]
|
|
||||||
)
|
|
||||||
result.warnings = allWarnings
|
|
||||||
result.errors = allErrors
|
|
||||||
result.all = allWarnings.concat(allErrors)
|
|
||||||
return result
|
|
||||||
}
|
}
|
||||||
|
|
||||||
parseBiber() {
|
parseBiber() {
|
||||||
|
@ -203,9 +183,7 @@ export default class BibLogParser {
|
||||||
this.lines.forEach(function (line) {
|
this.lines.forEach(function (line) {
|
||||||
const match = line.match(LINE_SPLITTER_REGEX)
|
const match = line.match(LINE_SPLITTER_REGEX)
|
||||||
if (match) {
|
if (match) {
|
||||||
const fullLine = match[0]
|
const [fullLine, , messageType, message] = match
|
||||||
const messageType = match[2]
|
|
||||||
const message = match[3]
|
|
||||||
const newEntry = {
|
const newEntry = {
|
||||||
file: '',
|
file: '',
|
||||||
level: MESSAGE_LEVELS[messageType] || 'INFO',
|
level: MESSAGE_LEVELS[messageType] || 'INFO',
|
||||||
|
@ -218,10 +196,8 @@ export default class BibLogParser {
|
||||||
const lineMatch = newEntry.message.match(
|
const lineMatch = newEntry.message.match(
|
||||||
/^BibTeX subsystem: \/.+\/(\w+\.\w+)_.+, line (\d+), (.+)$/
|
/^BibTeX subsystem: \/.+\/(\w+\.\w+)_.+, line (\d+), (.+)$/
|
||||||
)
|
)
|
||||||
if (lineMatch && lineMatch.length === 4) {
|
if (lineMatch) {
|
||||||
const fileName = lineMatch[1]
|
const [, fileName, lineNumber, realMessage] = lineMatch
|
||||||
const lineNumber = lineMatch[2]
|
|
||||||
const realMessage = lineMatch[3]
|
|
||||||
newEntry.file = fileName
|
newEntry.file = fileName
|
||||||
newEntry.line = lineNumber
|
newEntry.line = lineNumber
|
||||||
newEntry.message = realMessage
|
newEntry.message = realMessage
|
||||||
|
|
|
@ -15,9 +15,8 @@ const STATE = {
|
||||||
}
|
}
|
||||||
|
|
||||||
export default class LatexParser {
|
export default class LatexParser {
|
||||||
constructor(text, options) {
|
constructor(text, options = {}) {
|
||||||
this.state = STATE.NORMAL
|
this.state = STATE.NORMAL
|
||||||
options = options || {}
|
|
||||||
this.fileBaseNames = options.fileBaseNames || [/compiles/, /\/usr\/local/]
|
this.fileBaseNames = options.fileBaseNames || [/compiles/, /\/usr\/local/]
|
||||||
this.ignoreDuplicates = options.ignoreDuplicates
|
this.ignoreDuplicates = options.ignoreDuplicates
|
||||||
this.data = []
|
this.data = []
|
||||||
|
@ -159,10 +158,10 @@ export default class LatexParser {
|
||||||
parseMultipleWarningLine() {
|
parseMultipleWarningLine() {
|
||||||
// Some package warnings are multiple lines, let's parse the first line
|
// Some package warnings are multiple lines, let's parse the first line
|
||||||
let warningMatch = this.currentLine.match(PACKAGE_WARNING_REGEX)
|
let warningMatch = this.currentLine.match(PACKAGE_WARNING_REGEX)
|
||||||
|
// Something strange happened, return early
|
||||||
if (!warningMatch) {
|
if (!warningMatch) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
// Something strange happened, return early
|
|
||||||
const warningLines = [warningMatch[1]]
|
const warningLines = [warningMatch[1]]
|
||||||
let lineMatch = this.currentLine.match(LINES_REGEX)
|
let lineMatch = this.currentLine.match(LINES_REGEX)
|
||||||
let line = lineMatch ? parseInt(lineMatch[1], 10) : null
|
let line = lineMatch ? parseInt(lineMatch[1], 10) : null
|
||||||
|
@ -285,62 +284,60 @@ export default class LatexParser {
|
||||||
|
|
||||||
postProcess(data) {
|
postProcess(data) {
|
||||||
const all = []
|
const all = []
|
||||||
const errors = []
|
const errorsByLevel = {
|
||||||
const warnings = []
|
error: [],
|
||||||
const typesetting = []
|
warning: [],
|
||||||
const hashes = []
|
typesetting: [],
|
||||||
|
}
|
||||||
|
const hashes = new Set()
|
||||||
|
|
||||||
const hashEntry = entry => entry.raw
|
const hashEntry = entry => entry.raw
|
||||||
|
|
||||||
let i = 0
|
data.forEach(item => {
|
||||||
while (i < data.length) {
|
const hash = hashEntry(item)
|
||||||
if (this.ignoreDuplicates && hashes.indexOf(hashEntry(data[i])) > -1) {
|
|
||||||
i++
|
if (this.ignoreDuplicates && hashes.has(hash)) {
|
||||||
continue
|
return
|
||||||
}
|
|
||||||
if (data[i].level === 'error') {
|
|
||||||
errors.push(data[i])
|
|
||||||
} else if (data[i].level === 'typesetting') {
|
|
||||||
typesetting.push(data[i])
|
|
||||||
} else if (data[i].level === 'warning') {
|
|
||||||
warnings.push(data[i])
|
|
||||||
}
|
|
||||||
all.push(data[i])
|
|
||||||
hashes.push(hashEntry(data[i]))
|
|
||||||
i++
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
errorsByLevel[item.level]?.push(item)
|
||||||
|
|
||||||
|
all.push(item)
|
||||||
|
hashes.add(hash)
|
||||||
|
})
|
||||||
|
|
||||||
return {
|
return {
|
||||||
errors,
|
errors: errorsByLevel.error,
|
||||||
warnings,
|
warnings: errorsByLevel.warning,
|
||||||
typesetting,
|
typesetting: errorsByLevel.typesetting,
|
||||||
all,
|
all,
|
||||||
files: this.rootFileList,
|
files: this.rootFileList,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const LogText = class LogText {
|
class LogText {
|
||||||
constructor(text) {
|
constructor(text) {
|
||||||
this.text = text.replace(/(\r\n)|\r/g, '\n')
|
this.text = text.replace(/(\r\n)|\r/g, '\n')
|
||||||
// Join any lines which look like they have wrapped.
|
// Join any lines which look like they have wrapped.
|
||||||
const wrappedLines = this.text.split('\n')
|
const wrappedLines = this.text.split('\n')
|
||||||
this.lines = [wrappedLines[0]]
|
this.lines = [wrappedLines[0]]
|
||||||
let i = 1
|
|
||||||
while (i < wrappedLines.length) {
|
for (let i = 1; i < wrappedLines.length; i++) {
|
||||||
// If the previous line is as long as the wrap limit then
|
// If the previous line is as long as the wrap limit then
|
||||||
// append this line to it.
|
// append this line to it.
|
||||||
// Some lines end with ... when LaTeX knows it's hit the limit
|
// Some lines end with ... when LaTeX knows it's hit the limit
|
||||||
// These shouldn't be wrapped.
|
// These shouldn't be wrapped.
|
||||||
if (
|
const prevLine = wrappedLines[i - 1]
|
||||||
wrappedLines[i - 1].length === LOG_WRAP_LIMIT &&
|
const currentLine = wrappedLines[i]
|
||||||
wrappedLines[i - 1].slice(-3) !== '...'
|
|
||||||
) {
|
if (prevLine.length === LOG_WRAP_LIMIT && prevLine.slice(-3) !== '...') {
|
||||||
this.lines[this.lines.length - 1] += wrappedLines[i]
|
this.lines[this.lines.length - 1] += currentLine
|
||||||
} else {
|
} else {
|
||||||
this.lines.push(wrappedLines[i])
|
this.lines.push(currentLine)
|
||||||
}
|
}
|
||||||
i++
|
|
||||||
}
|
}
|
||||||
|
|
||||||
this.row = 0
|
this.row = 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -363,16 +360,21 @@ const LogText = class LogText {
|
||||||
|
|
||||||
linesUpToNextMatchingLine(match) {
|
linesUpToNextMatchingLine(match) {
|
||||||
const lines = []
|
const lines = []
|
||||||
let nextLine = this.nextLine()
|
|
||||||
if (nextLine !== false) {
|
while (true) {
|
||||||
lines.push(nextLine)
|
const nextLine = this.nextLine()
|
||||||
}
|
|
||||||
while (nextLine !== false && !nextLine.match(match) && nextLine !== false) {
|
if (nextLine === false) {
|
||||||
nextLine = this.nextLine()
|
break
|
||||||
if (nextLine !== false) {
|
}
|
||||||
|
|
||||||
lines.push(nextLine)
|
lines.push(nextLine)
|
||||||
|
|
||||||
|
if (nextLine.match(match)) {
|
||||||
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return lines
|
return lines
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue