Merge pull request #21 from sharelatex/fix-urlfetcher-streams

clean up error handling in UrlFetcher
This commit is contained in:
Brian Gough 2015-05-21 12:21:31 +01:00
commit 6ac7161f19
2 changed files with 51 additions and 35 deletions

View file

@ -2,33 +2,55 @@ request = require("request").defaults(jar: false)
fs = require("fs")
logger = require "logger-sharelatex"
oneMinute = 60 * 1000
module.exports = UrlFetcher =
pipeUrlToFile: (url, filePath, _callback = (error) ->) ->
callbackOnce = (error) ->
cleanUp error, (error) ->
_callback(error)
_callback = () ->
clearTimeout timeoutHandler if timeoutHandler?
_callback(error)
_callback = () ->
cleanUp = (error, callback) ->
if error?
logger.log filePath: filePath, "deleting file from cache due to error"
fs.unlink filePath, (err) ->
if err?
logger.err err: err, filePath: filePath, "error deleting file from cache"
callback(error)
else
callback()
timeoutHandler = setTimeout () ->
timeoutHandler = null
logger.error url:url, filePath: filePath, "Timed out downloading file to cache"
callbackOnce(new Error("Timed out downloading file to cache #{url}"))
# FIXME: maybe need to close fileStream here
, 3 * oneMinute
fileStream = fs.createWriteStream(filePath)
fileStream.on 'error', (error) ->
logger.error err: error, url:url, filePath: filePath, "error writing file into cache"
callbackOnce(error)
logger.log url:url, filePath: filePath, "started downloading url to cache"
urlStream = request.get({url: url, timeout: oneMinute})
urlStream.pause() # stop data flowing until we are ready
# attach handlers before setting up pipes
urlStream.on "error", (error) ->
logger.error err: error, url:url, filePath: filePath, "error downloading url"
callbackOnce(error or new Error("Something went wrong downloading the URL #{url}"))
urlStream.on "end", () ->
logger.log url:url, filePath: filePath, "finished downloading file into cache"
logger.log url:url, filePath: filePath, "downloading url to cache"
urlStream = request.get(url)
urlStream.on "response", (res) ->
if res.statusCode >= 200 and res.statusCode < 300
fileStream = fs.createWriteStream(filePath)
# attach handlers before setting up pipes
fileStream.on 'error', (error) ->
logger.error err: error, url:url, filePath: filePath, "error writing file into cache"
fs.unlink filePath, (err) ->
if err?
logger.err err: err, filePath: filePath, "error deleting file from cache"
callbackOnce(error)
fileStream.on 'finish', () ->
logger.log url:url, filePath: filePath, "finished writing file into cache"
callbackOnce()
fileStream.on 'pipe', () ->
logger.log url:url, filePath: filePath, "piping into filestream"
urlStream.pipe(fileStream)
urlStream.resume() # now we are ready to handle the data
else
logger.error statusCode: res.statusCode, url:url, filePath: filePath, "unexpected status code downloading url to cache"
# https://nodejs.org/api/http.html#http_class_http_clientrequest
@ -39,15 +61,5 @@ module.exports = UrlFetcher =
# method. Until the data is consumed, the 'end' event will not
# fire. Also, until the data is read it will consume memory
# that can eventually lead to a 'process out of memory' error.
urlStream.on 'data', () -> # discard the data
urlStream.resume() # discard the data
callbackOnce(new Error("URL returned non-success status code: #{res.statusCode} #{url}"))
urlStream.on "error", (error) ->
logger.error err: error, url:url, filePath: filePath, "error downloading url"
callbackOnce(error or new Error("Something went wrong downloading the URL #{url}"))
urlStream.on "end", () ->
# FIXME: what if we get an error writing the file? Maybe we
# should be using the fileStream end event as the point of
# callback.
callbackOnce()

View file

@ -22,25 +22,29 @@ describe "UrlFetcher", ->
@path = "/path/to/file/on/disk"
@request.get = sinon.stub().returns(@urlStream = new EventEmitter)
@urlStream.pipe = sinon.stub()
@fs.createWriteStream = sinon.stub().returns(@fileStream = { on: () -> })
@urlStream.pause = sinon.stub()
@urlStream.resume = sinon.stub()
@fs.createWriteStream = sinon.stub().returns(@fileStream = new EventEmitter)
@fs.unlink = (file, callback) -> callback()
@UrlFetcher.pipeUrlToFile(@url, @path, @callback)
it "should request the URL", ->
@request.get
.calledWith(@url)
.calledWith(sinon.match {"url": @url})
.should.equal true
it "should open the file for writing", ->
@fs.createWriteStream
.calledWith(@path)
.should.equal true
describe "successfully", ->
beforeEach ->
@res = statusCode: 200
@urlStream.emit "response", @res
@urlStream.emit "end"
@fileStream.emit "finish"
it "should open the file for writing", ->
@fs.createWriteStream
.calledWith(@path)
.should.equal true
it "should pipe the URL to the file", ->
@urlStream.pipe