[web] Fetch and parse compile log files asynchronously (#6220)

GitOrigin-RevId: 91b4c3551b705e7c07205b1bced3ae4768b10edb
This commit is contained in:
Alf Eaton 2022-03-25 09:42:10 +00:00 committed by Copybot
parent 74b76c853d
commit 439ba17bd5
3 changed files with 98 additions and 57 deletions

View file

@ -11,13 +11,18 @@ export const buildFileList = (outputFiles, clsiServerId) => {
params.set('clsiserverid', clsiServerId) params.set('clsiserverid', clsiServerId)
} }
const queryString = params.toString()
const allFiles = [] const allFiles = []
// filter out ignored files and set some properties // filter out ignored files and set some properties
for (const file of outputFiles.values()) { for (const file of outputFiles.values()) {
if (!ignoreFiles.includes(file.path)) { if (!ignoreFiles.includes(file.path)) {
file.main = file.path.startsWith('output.') file.main = file.path.startsWith('output.')
file.url += `?${params}`
if (queryString.length) {
file.url += `?${queryString}`
}
allFiles.push(file) allFiles.push(file)
} }

View file

@ -1,21 +1,15 @@
import getMeta from '../../../utils/meta' import getMeta from '../../../utils/meta'
import HumanReadableLogs from '../../../ide/human-readable-logs/HumanReadableLogs' import HumanReadableLogs from '../../../ide/human-readable-logs/HumanReadableLogs'
import BibLogParser from '../../../ide/log-parser/bib-log-parser' import BibLogParser from '../../../ide/log-parser/bib-log-parser'
import { buildFileList } from './file-list'
import { v4 as uuid } from 'uuid' import { v4 as uuid } from 'uuid'
const searchParams = new URLSearchParams(window.location.search) const searchParams = new URLSearchParams(window.location.search)
export const handleOutputFiles = async (projectId, data) => { export const handleOutputFiles = async (outputFiles, projectId, data) => {
const result = {} const result = {}
const outputFiles = new Map()
const pdfDownloadDomain = data.pdfDownloadDomain ?? '' const pdfDownloadDomain = data.pdfDownloadDomain ?? ''
for (const outputFile of data.outputFiles) {
outputFiles.set(outputFile.path, outputFile)
}
const outputFile = outputFiles.get('output.pdf') const outputFile = outputFiles.get('output.pdf')
if (outputFile) { if (outputFile) {
@ -46,18 +40,19 @@ export const handleOutputFiles = async (projectId, data) => {
result.pdfDownloadUrl = `/download/project/${projectId}/build/${outputFile.build}/output/output.pdf?${params}` result.pdfDownloadUrl = `/download/project/${projectId}/build/${outputFile.build}/output/output.pdf?${params}`
} }
const params = new URLSearchParams({ return result
compileGroup: data.compileGroup, }
})
if (data.clsiServerId) { export const handleLogFiles = async (outputFiles, data, signal) => {
params.set('clsiserverid', data.clsiServerId) const pdfDownloadDomain = data.pdfDownloadDomain ?? ''
}
result.logEntries = { const result = {
log: null,
logEntries: {
errors: [], errors: [],
warnings: [], warnings: [],
typesetting: [], typesetting: [],
},
} }
function accumulateResults(newEntries, type) { function accumulateResults(newEntries, type) {
@ -80,23 +75,33 @@ export const handleOutputFiles = async (projectId, data) => {
const logFile = outputFiles.get('output.log') const logFile = outputFiles.get('output.log')
if (logFile) { if (logFile) {
const response = await fetch(`${pdfDownloadDomain}${logFile.url}?${params}`) try {
const response = await fetch(`${pdfDownloadDomain}${logFile.url}`, {
const log = await response.text() signal,
result.log = log
const { errors, warnings, typesetting } = HumanReadableLogs.parse(log, {
ignoreDuplicates: true,
}) })
result.log = await response.text()
const { errors, warnings, typesetting } = HumanReadableLogs.parse(
result.log,
{
ignoreDuplicates: true,
}
)
accumulateResults({ errors, warnings, typesetting }) accumulateResults({ errors, warnings, typesetting })
} catch (e) {
console.warn(e) // ignore failure to fetch/parse the log file, but log a warning
}
} }
const blgFile = outputFiles.get('output.blg') const blgFile = outputFiles.get('output.blg')
if (blgFile) { if (blgFile) {
const response = await fetch(`${pdfDownloadDomain}${blgFile.url}?${params}`) try {
const response = await fetch(`${pdfDownloadDomain}${blgFile.url}`, {
signal,
})
const log = await response.text() const log = await response.text()
@ -108,9 +113,10 @@ export const handleOutputFiles = async (projectId, data) => {
} catch (e) { } catch (e) {
// BibLog parsing errors are ignored // BibLog parsing errors are ignored
} }
} catch (e) {
console.warn(e) // ignore failure to fetch/parse the log file, but log a warning
}
} }
result.fileList = buildFileList(outputFiles, data.clsiServerId)
result.logEntries.all = [ result.logEntries.all = [
...result.logEntries.errors, ...result.logEntries.errors,

View file

@ -16,11 +16,13 @@ import DocumentCompiler from '../../features/pdf-preview/util/compiler'
import { send, sendMBSampled } from '../../infrastructure/event-tracking' import { send, sendMBSampled } from '../../infrastructure/event-tracking'
import { import {
buildLogEntryAnnotations, buildLogEntryAnnotations,
handleLogFiles,
handleOutputFiles, handleOutputFiles,
} from '../../features/pdf-preview/util/output-files' } from '../../features/pdf-preview/util/output-files'
import { useIdeContext } from './ide-context' import { useIdeContext } from './ide-context'
import { useProjectContext } from './project-context' import { useProjectContext } from './project-context'
import { useEditorContext } from './editor-context' import { useEditorContext } from './editor-context'
import { buildFileList } from '../../features/pdf-preview/util/file-list'
export const CompileContext = createContext() export const CompileContext = createContext()
@ -214,23 +216,45 @@ export function CompileProvider({ children }) {
// note: this should _only_ run when `data` changes, // note: this should _only_ run when `data` changes,
// the other dependencies must all be static // the other dependencies must all be static
useEffect(() => { useEffect(() => {
const abortController = new AbortController()
if (data) { if (data) {
if (data.clsiServerId) { if (data.clsiServerId) {
setClsiServerId(data.clsiServerId) // set in scope, for PdfSynctexController setClsiServerId(data.clsiServerId) // set in scope, for PdfSynctexController
} }
if (data.outputFiles) { if (data.outputFiles) {
handleOutputFiles(projectId, data).then(result => { const outputFiles = new Map()
setLogEntryAnnotations(
buildLogEntryAnnotations(result.logEntries.all, ide.fileTreeManager) for (const outputFile of data.outputFiles) {
) outputFiles.set(outputFile.path, outputFile)
}
// set the PDF URLs
handleOutputFiles(outputFiles, projectId, data).then(result => {
if (data.status === 'success') { if (data.status === 'success') {
setPdfDownloadUrl(result.pdfDownloadUrl) setPdfDownloadUrl(result.pdfDownloadUrl)
setPdfUrl(result.pdfUrl) setPdfUrl(result.pdfUrl)
} }
setLogEntries(result.logEntries)
setFileList(result.fileList) setFileList(buildFileList(outputFiles, data.clsiServerId))
// handle log files
// asynchronous (TODO: cancel on new compile?)
setLogEntryAnnotations(null)
setLogEntries(null)
setRawLog(null)
handleLogFiles(outputFiles, data, abortController.signal).then(
result => {
setRawLog(result.log) setRawLog(result.log)
setLogEntries(result.logEntries)
setLogEntryAnnotations(
buildLogEntryAnnotations(
result.logEntries.all,
ide.fileTreeManager
)
)
// sample compile stats for real users // sample compile stats for real users
if (!window.user.alphaProgram && data.status === 'success') { if (!window.user.alphaProgram && data.status === 'success') {
@ -245,6 +269,8 @@ export function CompileProvider({ children }) {
0.01 0.01
) )
} }
}
)
}) })
} }
@ -298,6 +324,10 @@ export function CompileProvider({ children }) {
break break
} }
} }
return () => {
abortController.abort()
}
}, [ }, [
data, data,
ide, ide,