diff --git a/services/web/frontend/js/features/pdf-preview/util/output-files.js b/services/web/frontend/js/features/pdf-preview/util/output-files.js index e273cf037d..49724942fb 100644 --- a/services/web/frontend/js/features/pdf-preview/util/output-files.js +++ b/services/web/frontend/js/features/pdf-preview/util/output-files.js @@ -9,6 +9,7 @@ import '@/utils/readable-stream-async-iterator-polyfill' const TRANSIENT_WARNING_REGEX = /^(Reference|Citation).+undefined on input line/ const MAX_LOG_SIZE = 1024 * 1024 // 1MB +const MAX_BIB_LOG_SIZE_PER_FILE = MAX_LOG_SIZE export function handleOutputFiles(outputFiles, projectId, data) { const outputFile = outputFiles.get('output.pdf') @@ -77,31 +78,11 @@ export const handleLogFiles = async (outputFiles, data, signal) => { const logFile = outputFiles.get('output.log') if (logFile) { - try { - const logFileAbortController = new AbortController() - - // abort fetching the log file if the main signal is aborted - signal.addEventListener('abort', () => { - logFileAbortController.abort() - }) - - const response = await fetch(buildURL(logFile, data.pdfDownloadDomain), { - signal: logFileAbortController.signal, - }) - - result.log = '' - - const reader = response.body.pipeThrough(new TextDecoderStream()) - for await (const chunk of reader) { - result.log += chunk - if (result.log.length > MAX_LOG_SIZE) { - logFileAbortController.abort() - } - } - } catch (e) { - debugConsole.warn(e) // ignore failure to fetch the log file, but log a warning - } - + result.log = await fetchFileWithSizeLimit( + buildURL(logFile, data.pdfDownloadDomain), + signal, + MAX_LOG_SIZE + ) try { let { errors, warnings, typesetting } = HumanReadableLogs.parse( result.log, @@ -129,23 +110,18 @@ export const handleLogFiles = async (outputFiles, data, signal) => { } } for (const blgFile of blgFiles) { + const log = await fetchFileWithSizeLimit( + buildURL(blgFile, data.pdfDownloadDomain), + signal, + MAX_BIB_LOG_SIZE_PER_FILE + ) try { - const response = await fetch(buildURL(blgFile, data.pdfDownloadDomain), { - signal, - }) - - const log = await response.text() - - try { - const { errors, warnings } = new BibLogParser(log, { - maxErrors: 100, - }).parse() - accumulateResults({ errors, warnings }, 'BibTeX:') - } catch (e) { - // BibLog parsing errors are ignored - } + const { errors, warnings } = new BibLogParser(log, { + maxErrors: 100, + }).parse() + accumulateResults({ errors, warnings }, 'BibTeX:') } catch (e) { - debugConsole.warn(e) // ignore failure to fetch/parse the log file, but log a warning + // BibLog parsing errors are ignored } } @@ -260,3 +236,33 @@ function normalizeFilePath(path, rootDocDirname) { function isTransientWarning(warning) { return TRANSIENT_WARNING_REGEX.test(warning.message) } + +async function fetchFileWithSizeLimit(url, signal, maxSize) { + let result = '' + try { + const abortController = new AbortController() + // abort fetching the log file if the main signal is aborted + signal.addEventListener('abort', () => { + abortController.abort() + }) + + const response = await fetch(url, { + signal: abortController.signal, + }) + + if (!response.ok) { + throw new Error('Failed to fetch log file') + } + + const reader = response.body.pipeThrough(new TextDecoderStream()) + for await (const chunk of reader) { + result += chunk + if (result.length > maxSize) { + abortController.abort() + } + } + } catch (e) { + debugConsole.warn(e) // ignore failure to fetch the log file, but log a warning + } + return result +}