Merge pull request #14871 from overleaf/bg-fix-linked-files-with-fetch-utils-error

handle "File too large" errors from linked-url-proxy in web

GitOrigin-RevId: f370e8855a9f696bfbff3658f79a1f2fc2f02028
This commit is contained in:
Brian Gough 2023-09-20 09:34:16 +01:00 committed by Copybot
parent 2394a32858
commit 1234534a7f
2 changed files with 48 additions and 50 deletions

View file

@ -177,10 +177,16 @@ module.exports = LinkedFilesController = {
plainTextResponse(res, 'Could not get output file')
} else if (error instanceof UrlFetchFailedError) {
res.status(422)
plainTextResponse(
res,
`Your URL could not be reached (${error.statusCode} status code). Please check it and try again.`
)
if (error.cause instanceof FileTooLargeError) {
plainTextResponse(res, 'File too large')
} else {
plainTextResponse(
res,
`Your URL could not be reached (${
error.info?.status || error.cause?.info?.status
} status code). Please check it and try again.`
)
}
} else if (error instanceof InvalidUrlError) {
res.status(422)
plainTextResponse(

View file

@ -1,71 +1,61 @@
const logger = require('@overleaf/logger')
const request = require('request')
const _ = require('underscore')
const urlValidator = require('valid-url')
const { InvalidUrlError, UrlFetchFailedError } = require('./LinkedFilesErrors')
const LinkedFilesHandler = require('./LinkedFilesHandler')
const UrlHelper = require('../Helpers/UrlHelper')
const { fetchStream, RequestFailedError } = require('@overleaf/fetch-utils')
const { callbackify } = require('../../util/promises')
const { FileTooLargeError } = require('../Errors/Errors')
function createLinkedFile(
async function createLinkedFile(
projectId,
linkedFileData,
name,
parentFolderId,
userId,
callback
userId
) {
logger.info(
{ projectId, userId, url: linkedFileData.url },
'create linked file'
)
linkedFileData = _sanitizeData(linkedFileData)
_getUrlStream(projectId, linkedFileData, userId, (err, readStream) => {
if (err) {
return callback(err)
const fetchUrl = _getUrl(projectId, linkedFileData, userId)
try {
const readStream = await fetchStream(fetchUrl)
const file = await LinkedFilesHandler.promises.importFromStream(
projectId,
readStream,
linkedFileData,
name,
parentFolderId,
userId
)
return file._id
} catch (error) {
if (error instanceof RequestFailedError && /too large/.test(error.body)) {
throw new FileTooLargeError('file too large', {
url: linkedFileData.url,
}).withCause(error)
}
readStream.on('error', callback)
readStream.on('response', response => {
if (response.statusCode >= 200 && response.statusCode < 300) {
LinkedFilesHandler.importFromStream(
projectId,
readStream,
linkedFileData,
name,
parentFolderId,
userId,
(err, file) => {
if (err) {
return callback(err)
}
callback(null, file._id)
}
) // Created
} else {
const error = new UrlFetchFailedError(
`url fetch failed: ${linkedFileData.url}`
)
error.statusCode = response.statusCode
callback(error)
}
})
})
throw new UrlFetchFailedError('url fetch failed', {
url: linkedFileData.url,
}).withCause(error)
}
}
function refreshLinkedFile(
async function refreshLinkedFile(
projectId,
linkedFileData,
name,
parentFolderId,
userId,
callback
userId
) {
createLinkedFile(
return await createLinkedFile(
projectId,
linkedFileData,
name,
parentFolderId,
userId,
callback
userId
)
}
@ -76,15 +66,17 @@ function _sanitizeData(data) {
}
}
function _getUrlStream(projectId, data, currentUserId, callback) {
callback = _.once(callback)
function _getUrl(projectId, data, currentUserId) {
let { url } = data
if (!urlValidator.isWebUri(url)) {
return callback(new InvalidUrlError(`invalid url: ${url}`))
throw new InvalidUrlError(`invalid url: ${url}`)
}
url = UrlHelper.wrapUrlWithProxy(url)
const readStream = request.get(url)
callback(null, readStream)
return url
}
module.exports = { createLinkedFile, refreshLinkedFile }
module.exports = {
createLinkedFile: callbackify(createLinkedFile),
refreshLinkedFile: callbackify(refreshLinkedFile),
promises: { createLinkedFile, refreshLinkedFile },
}