2020-02-19 11:14:28 +00:00
|
|
|
/* eslint-disable
|
|
|
|
no-return-assign,
|
|
|
|
no-unused-vars,
|
2022-05-16 14:25:49 +00:00
|
|
|
n/no-deprecated-api,
|
2020-02-19 11:14:28 +00:00
|
|
|
*/
|
|
|
|
// TODO: This file was created by bulk-decaffeinate.
|
|
|
|
// Fix any style issues and re-enable lint.
|
2020-02-19 11:14:14 +00:00
|
|
|
/*
|
|
|
|
* decaffeinate suggestions:
|
|
|
|
* DS102: Remove unnecessary code created because of implicit returns
|
|
|
|
* DS207: Consider shorter variations of null checks
|
|
|
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
|
|
|
*/
|
2020-06-02 08:51:34 +00:00
|
|
|
let UrlFetcher
|
2020-02-19 11:14:37 +00:00
|
|
|
const request = require('request').defaults({ jar: false })
|
|
|
|
const fs = require('fs')
|
2022-03-01 15:09:36 +00:00
|
|
|
const logger = require('@overleaf/logger')
|
2021-07-12 16:47:21 +00:00
|
|
|
const settings = require('@overleaf/settings')
|
2020-05-14 12:09:57 +00:00
|
|
|
const async = require('async')
|
2021-10-20 10:17:59 +00:00
|
|
|
const { URL } = require('url')
|
2021-10-06 08:11:59 +00:00
|
|
|
const { promisify } = require('util')
|
2014-02-12 17:27:43 +00:00
|
|
|
|
2020-02-19 11:14:37 +00:00
|
|
|
const oneMinute = 60 * 1000
|
2015-05-15 13:07:15 +00:00
|
|
|
|
2020-02-19 11:14:37 +00:00
|
|
|
module.exports = UrlFetcher = {
|
2020-05-14 12:09:57 +00:00
|
|
|
pipeUrlToFileWithRetry(url, filePath, callback) {
|
2020-08-10 16:01:11 +00:00
|
|
|
const doDownload = function (cb) {
|
2020-05-14 12:09:57 +00:00
|
|
|
UrlFetcher.pipeUrlToFile(url, filePath, cb)
|
|
|
|
}
|
|
|
|
async.retry(3, doDownload, callback)
|
|
|
|
},
|
|
|
|
|
2020-02-19 11:14:37 +00:00
|
|
|
pipeUrlToFile(url, filePath, _callback) {
|
|
|
|
if (_callback == null) {
|
2021-10-27 09:49:18 +00:00
|
|
|
_callback = function () {}
|
2020-02-19 11:14:37 +00:00
|
|
|
}
|
2020-08-10 16:01:11 +00:00
|
|
|
const callbackOnce = function (error) {
|
2020-02-19 11:14:37 +00:00
|
|
|
if (timeoutHandler != null) {
|
|
|
|
clearTimeout(timeoutHandler)
|
|
|
|
}
|
|
|
|
_callback(error)
|
2020-08-10 16:01:11 +00:00
|
|
|
return (_callback = function () {})
|
2020-02-19 11:14:37 +00:00
|
|
|
}
|
2015-05-15 13:07:15 +00:00
|
|
|
|
2021-10-20 10:17:59 +00:00
|
|
|
const u = new URL(url)
|
2021-07-02 08:17:29 +00:00
|
|
|
if (
|
|
|
|
settings.filestoreDomainOveride &&
|
|
|
|
u.host !== settings.apis.clsiPerf.host
|
|
|
|
) {
|
2021-10-20 10:17:59 +00:00
|
|
|
url = `${settings.filestoreDomainOveride}${u.pathname}${u.search}`
|
2020-02-19 11:14:37 +00:00
|
|
|
}
|
2021-10-26 08:08:56 +00:00
|
|
|
let timeoutHandler = setTimeout(
|
2020-08-10 16:01:11 +00:00
|
|
|
function () {
|
2020-02-19 11:14:37 +00:00
|
|
|
timeoutHandler = null
|
|
|
|
logger.error({ url, filePath }, 'Timed out downloading file to cache')
|
|
|
|
return callbackOnce(
|
|
|
|
new Error(`Timed out downloading file to cache ${url}`)
|
|
|
|
)
|
|
|
|
},
|
|
|
|
// FIXME: maybe need to close fileStream here
|
|
|
|
3 * oneMinute
|
|
|
|
)
|
2015-05-15 13:07:15 +00:00
|
|
|
|
2022-05-16 12:38:18 +00:00
|
|
|
logger.debug({ url, filePath }, 'started downloading url to cache')
|
2020-02-19 11:14:37 +00:00
|
|
|
const urlStream = request.get({ url, timeout: oneMinute })
|
|
|
|
urlStream.pause() // stop data flowing until we are ready
|
2015-05-15 13:07:15 +00:00
|
|
|
|
2020-02-19 11:14:37 +00:00
|
|
|
// attach handlers before setting up pipes
|
2020-08-10 16:01:11 +00:00
|
|
|
urlStream.on('error', function (error) {
|
2020-02-19 11:14:37 +00:00
|
|
|
logger.error({ err: error, url, filePath }, 'error downloading url')
|
|
|
|
return callbackOnce(
|
|
|
|
error || new Error(`Something went wrong downloading the URL ${url}`)
|
|
|
|
)
|
|
|
|
})
|
2014-02-12 17:27:43 +00:00
|
|
|
|
2020-02-19 11:14:37 +00:00
|
|
|
urlStream.on('end', () =>
|
2022-05-16 12:38:18 +00:00
|
|
|
logger.debug({ url, filePath }, 'finished downloading file into cache')
|
2020-02-19 11:14:37 +00:00
|
|
|
)
|
2014-02-12 17:27:43 +00:00
|
|
|
|
2020-08-10 16:01:11 +00:00
|
|
|
return urlStream.on('response', function (res) {
|
2020-02-19 11:14:37 +00:00
|
|
|
if (res.statusCode >= 200 && res.statusCode < 300) {
|
2021-10-06 08:11:59 +00:00
|
|
|
const atomicWrite = filePath + '~'
|
|
|
|
const fileStream = fs.createWriteStream(atomicWrite)
|
2015-05-15 13:07:15 +00:00
|
|
|
|
2020-02-19 11:14:37 +00:00
|
|
|
// attach handlers before setting up pipes
|
2020-08-10 16:01:11 +00:00
|
|
|
fileStream.on('error', function (error) {
|
2020-02-19 11:14:37 +00:00
|
|
|
logger.error(
|
|
|
|
{ err: error, url, filePath },
|
|
|
|
'error writing file into cache'
|
|
|
|
)
|
2021-10-06 08:11:59 +00:00
|
|
|
return fs.unlink(atomicWrite, function (err) {
|
2020-02-19 11:14:37 +00:00
|
|
|
if (err != null) {
|
|
|
|
logger.err({ err, filePath }, 'error deleting file from cache')
|
|
|
|
}
|
|
|
|
return callbackOnce(error)
|
|
|
|
})
|
|
|
|
})
|
2015-05-15 13:07:15 +00:00
|
|
|
|
2020-08-10 16:01:11 +00:00
|
|
|
fileStream.on('finish', function () {
|
2022-05-16 12:38:18 +00:00
|
|
|
logger.debug({ url, filePath }, 'finished writing file into cache')
|
2021-10-06 08:11:59 +00:00
|
|
|
fs.rename(atomicWrite, filePath, error => {
|
|
|
|
if (error) {
|
|
|
|
fs.unlink(atomicWrite, () => callbackOnce(error))
|
|
|
|
} else {
|
|
|
|
callbackOnce()
|
|
|
|
}
|
|
|
|
})
|
2020-02-19 11:14:37 +00:00
|
|
|
})
|
2015-05-15 13:07:15 +00:00
|
|
|
|
2020-02-19 11:14:37 +00:00
|
|
|
fileStream.on('pipe', () =>
|
2022-05-16 12:38:18 +00:00
|
|
|
logger.debug({ url, filePath }, 'piping into filestream')
|
2020-02-19 11:14:37 +00:00
|
|
|
)
|
2015-05-15 13:07:15 +00:00
|
|
|
|
2020-02-19 11:14:37 +00:00
|
|
|
urlStream.pipe(fileStream)
|
|
|
|
return urlStream.resume() // now we are ready to handle the data
|
|
|
|
} else {
|
|
|
|
logger.error(
|
|
|
|
{ statusCode: res.statusCode, url, filePath },
|
|
|
|
'unexpected status code downloading url to cache'
|
|
|
|
)
|
|
|
|
// https://nodejs.org/api/http.html#http_class_http_clientrequest
|
|
|
|
// If you add a 'response' event handler, then you must consume
|
|
|
|
// the data from the response object, either by calling
|
|
|
|
// response.read() whenever there is a 'readable' event, or by
|
|
|
|
// adding a 'data' handler, or by calling the .resume()
|
|
|
|
// method. Until the data is consumed, the 'end' event will not
|
|
|
|
// fire. Also, until the data is read it will consume memory
|
|
|
|
// that can eventually lead to a 'process out of memory' error.
|
|
|
|
urlStream.resume() // discard the data
|
|
|
|
return callbackOnce(
|
|
|
|
new Error(
|
|
|
|
`URL returned non-success status code: ${res.statusCode} ${url}`
|
|
|
|
)
|
|
|
|
)
|
|
|
|
}
|
|
|
|
})
|
2021-07-13 11:04:48 +00:00
|
|
|
},
|
2020-02-19 11:14:37 +00:00
|
|
|
}
|
2021-10-06 08:11:59 +00:00
|
|
|
|
|
|
|
module.exports.promises = {
|
|
|
|
pipeUrlToFileWithRetry: promisify(UrlFetcher.pipeUrlToFileWithRetry),
|
|
|
|
}
|