mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
[web] Fetch and parse compile log files asynchronously (#6220)
GitOrigin-RevId: 91b4c3551b705e7c07205b1bced3ae4768b10edb
This commit is contained in:
parent
74b76c853d
commit
439ba17bd5
3 changed files with 98 additions and 57 deletions
|
@ -11,13 +11,18 @@ export const buildFileList = (outputFiles, clsiServerId) => {
|
|||
params.set('clsiserverid', clsiServerId)
|
||||
}
|
||||
|
||||
const queryString = params.toString()
|
||||
|
||||
const allFiles = []
|
||||
|
||||
// filter out ignored files and set some properties
|
||||
for (const file of outputFiles.values()) {
|
||||
if (!ignoreFiles.includes(file.path)) {
|
||||
file.main = file.path.startsWith('output.')
|
||||
file.url += `?${params}`
|
||||
|
||||
if (queryString.length) {
|
||||
file.url += `?${queryString}`
|
||||
}
|
||||
|
||||
allFiles.push(file)
|
||||
}
|
||||
|
|
|
@ -1,21 +1,15 @@
|
|||
import getMeta from '../../../utils/meta'
|
||||
import HumanReadableLogs from '../../../ide/human-readable-logs/HumanReadableLogs'
|
||||
import BibLogParser from '../../../ide/log-parser/bib-log-parser'
|
||||
import { buildFileList } from './file-list'
|
||||
import { v4 as uuid } from 'uuid'
|
||||
|
||||
const searchParams = new URLSearchParams(window.location.search)
|
||||
|
||||
export const handleOutputFiles = async (projectId, data) => {
|
||||
export const handleOutputFiles = async (outputFiles, projectId, data) => {
|
||||
const result = {}
|
||||
|
||||
const outputFiles = new Map()
|
||||
const pdfDownloadDomain = data.pdfDownloadDomain ?? ''
|
||||
|
||||
for (const outputFile of data.outputFiles) {
|
||||
outputFiles.set(outputFile.path, outputFile)
|
||||
}
|
||||
|
||||
const outputFile = outputFiles.get('output.pdf')
|
||||
|
||||
if (outputFile) {
|
||||
|
@ -46,18 +40,19 @@ export const handleOutputFiles = async (projectId, data) => {
|
|||
result.pdfDownloadUrl = `/download/project/${projectId}/build/${outputFile.build}/output/output.pdf?${params}`
|
||||
}
|
||||
|
||||
const params = new URLSearchParams({
|
||||
compileGroup: data.compileGroup,
|
||||
})
|
||||
return result
|
||||
}
|
||||
|
||||
if (data.clsiServerId) {
|
||||
params.set('clsiserverid', data.clsiServerId)
|
||||
}
|
||||
export const handleLogFiles = async (outputFiles, data, signal) => {
|
||||
const pdfDownloadDomain = data.pdfDownloadDomain ?? ''
|
||||
|
||||
result.logEntries = {
|
||||
errors: [],
|
||||
warnings: [],
|
||||
typesetting: [],
|
||||
const result = {
|
||||
log: null,
|
||||
logEntries: {
|
||||
errors: [],
|
||||
warnings: [],
|
||||
typesetting: [],
|
||||
},
|
||||
}
|
||||
|
||||
function accumulateResults(newEntries, type) {
|
||||
|
@ -80,38 +75,49 @@ export const handleOutputFiles = async (projectId, data) => {
|
|||
const logFile = outputFiles.get('output.log')
|
||||
|
||||
if (logFile) {
|
||||
const response = await fetch(`${pdfDownloadDomain}${logFile.url}?${params}`)
|
||||
try {
|
||||
const response = await fetch(`${pdfDownloadDomain}${logFile.url}`, {
|
||||
signal,
|
||||
})
|
||||
|
||||
const log = await response.text()
|
||||
result.log = await response.text()
|
||||
|
||||
result.log = log
|
||||
const { errors, warnings, typesetting } = HumanReadableLogs.parse(
|
||||
result.log,
|
||||
{
|
||||
ignoreDuplicates: true,
|
||||
}
|
||||
)
|
||||
|
||||
const { errors, warnings, typesetting } = HumanReadableLogs.parse(log, {
|
||||
ignoreDuplicates: true,
|
||||
})
|
||||
|
||||
accumulateResults({ errors, warnings, typesetting })
|
||||
accumulateResults({ errors, warnings, typesetting })
|
||||
} catch (e) {
|
||||
console.warn(e) // ignore failure to fetch/parse the log file, but log a warning
|
||||
}
|
||||
}
|
||||
|
||||
const blgFile = outputFiles.get('output.blg')
|
||||
|
||||
if (blgFile) {
|
||||
const response = await fetch(`${pdfDownloadDomain}${blgFile.url}?${params}`)
|
||||
|
||||
const log = await response.text()
|
||||
|
||||
try {
|
||||
const { errors, warnings } = new BibLogParser(log, {
|
||||
maxErrors: 100,
|
||||
}).parse()
|
||||
accumulateResults({ errors, warnings }, 'BibTeX:')
|
||||
const response = await fetch(`${pdfDownloadDomain}${blgFile.url}`, {
|
||||
signal,
|
||||
})
|
||||
|
||||
const log = await response.text()
|
||||
|
||||
try {
|
||||
const { errors, warnings } = new BibLogParser(log, {
|
||||
maxErrors: 100,
|
||||
}).parse()
|
||||
accumulateResults({ errors, warnings }, 'BibTeX:')
|
||||
} catch (e) {
|
||||
// BibLog parsing errors are ignored
|
||||
}
|
||||
} catch (e) {
|
||||
// BibLog parsing errors are ignored
|
||||
console.warn(e) // ignore failure to fetch/parse the log file, but log a warning
|
||||
}
|
||||
}
|
||||
|
||||
result.fileList = buildFileList(outputFiles, data.clsiServerId)
|
||||
|
||||
result.logEntries.all = [
|
||||
...result.logEntries.errors,
|
||||
...result.logEntries.warnings,
|
||||
|
|
|
@ -16,11 +16,13 @@ import DocumentCompiler from '../../features/pdf-preview/util/compiler'
|
|||
import { send, sendMBSampled } from '../../infrastructure/event-tracking'
|
||||
import {
|
||||
buildLogEntryAnnotations,
|
||||
handleLogFiles,
|
||||
handleOutputFiles,
|
||||
} from '../../features/pdf-preview/util/output-files'
|
||||
import { useIdeContext } from './ide-context'
|
||||
import { useProjectContext } from './project-context'
|
||||
import { useEditorContext } from './editor-context'
|
||||
import { buildFileList } from '../../features/pdf-preview/util/file-list'
|
||||
|
||||
export const CompileContext = createContext()
|
||||
|
||||
|
@ -214,37 +216,61 @@ export function CompileProvider({ children }) {
|
|||
// note: this should _only_ run when `data` changes,
|
||||
// the other dependencies must all be static
|
||||
useEffect(() => {
|
||||
const abortController = new AbortController()
|
||||
|
||||
if (data) {
|
||||
if (data.clsiServerId) {
|
||||
setClsiServerId(data.clsiServerId) // set in scope, for PdfSynctexController
|
||||
}
|
||||
|
||||
if (data.outputFiles) {
|
||||
handleOutputFiles(projectId, data).then(result => {
|
||||
setLogEntryAnnotations(
|
||||
buildLogEntryAnnotations(result.logEntries.all, ide.fileTreeManager)
|
||||
)
|
||||
const outputFiles = new Map()
|
||||
|
||||
for (const outputFile of data.outputFiles) {
|
||||
outputFiles.set(outputFile.path, outputFile)
|
||||
}
|
||||
|
||||
// set the PDF URLs
|
||||
handleOutputFiles(outputFiles, projectId, data).then(result => {
|
||||
if (data.status === 'success') {
|
||||
setPdfDownloadUrl(result.pdfDownloadUrl)
|
||||
setPdfUrl(result.pdfUrl)
|
||||
}
|
||||
setLogEntries(result.logEntries)
|
||||
setFileList(result.fileList)
|
||||
setRawLog(result.log)
|
||||
|
||||
// sample compile stats for real users
|
||||
if (!window.user.alphaProgram && data.status === 'success') {
|
||||
sendMBSampled(
|
||||
'compile-result',
|
||||
{
|
||||
errors: result.logEntries.errors.length,
|
||||
warnings: result.logEntries.warnings.length,
|
||||
typesetting: result.logEntries.typesetting.length,
|
||||
newPdfPreview: true, // TODO: is this useful?
|
||||
},
|
||||
0.01
|
||||
)
|
||||
}
|
||||
setFileList(buildFileList(outputFiles, data.clsiServerId))
|
||||
|
||||
// handle log files
|
||||
// asynchronous (TODO: cancel on new compile?)
|
||||
setLogEntryAnnotations(null)
|
||||
setLogEntries(null)
|
||||
setRawLog(null)
|
||||
|
||||
handleLogFiles(outputFiles, data, abortController.signal).then(
|
||||
result => {
|
||||
setRawLog(result.log)
|
||||
setLogEntries(result.logEntries)
|
||||
setLogEntryAnnotations(
|
||||
buildLogEntryAnnotations(
|
||||
result.logEntries.all,
|
||||
ide.fileTreeManager
|
||||
)
|
||||
)
|
||||
|
||||
// sample compile stats for real users
|
||||
if (!window.user.alphaProgram && data.status === 'success') {
|
||||
sendMBSampled(
|
||||
'compile-result',
|
||||
{
|
||||
errors: result.logEntries.errors.length,
|
||||
warnings: result.logEntries.warnings.length,
|
||||
typesetting: result.logEntries.typesetting.length,
|
||||
newPdfPreview: true, // TODO: is this useful?
|
||||
},
|
||||
0.01
|
||||
)
|
||||
}
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -298,6 +324,10 @@ export function CompileProvider({ children }) {
|
|||
break
|
||||
}
|
||||
}
|
||||
|
||||
return () => {
|
||||
abortController.abort()
|
||||
}
|
||||
}, [
|
||||
data,
|
||||
ide,
|
||||
|
|
Loading…
Reference in a new issue