Simplify logging to log only once per http request

This commit is contained in:
Simon Detheridge 2020-01-07 21:19:26 +00:00
parent c85d211735
commit f40fbe77aa
21 changed files with 204 additions and 254 deletions

View file

@ -12,6 +12,8 @@ const fileController = require('./app/js/FileController')
const keyBuilder = require('./app/js/KeyBuilder')
const healthCheckController = require('./app/js/HealthCheckController')
const RequestLogger = require('./app/js/RequestLogger')
const app = express()
if (settings.sentry && settings.sentry.dsn) {
@ -27,6 +29,7 @@ if (Metrics.event_loop) {
app.use(Metrics.http.monitor(logger))
app.use(function(req, res, next) {
Metrics.inc('http-request')
res.logInfo = {}
next()
})
@ -137,6 +140,9 @@ app.get('/status', function(req, res) {
app.get('/health_check', healthCheckController.check)
app.use(RequestLogger.logRequest)
app.use(RequestLogger.logError)
const port = settings.internal.filestore.port || 3009
const host = '0.0.0.0'

View file

@ -24,6 +24,7 @@ class HealthCheckError extends BackwardCompatibleError {}
class ConversionsDisabledError extends BackwardCompatibleError {}
class ConversionError extends BackwardCompatibleError {}
class SettingsError extends BackwardCompatibleError {}
class TimeoutError extends BackwardCompatibleError {}
class FailedCommandError extends OError {
constructor(command, code, stdout, stderr) {
@ -48,5 +49,6 @@ module.exports = {
ReadError,
ConversionError,
HealthCheckError,
SettingsError
SettingsError,
TimeoutError
}

View file

@ -1,6 +1,5 @@
const fs = require('fs')
const glob = require('glob')
const logger = require('logger-sharelatex')
const path = require('path')
const rimraf = require('rimraf')
const Stream = require('stream')
@ -20,7 +19,6 @@ const filterName = key => key.replace(/\//g, '_')
async function sendFile(location, target, source) {
const filteredTarget = filterName(target)
logger.log({ location, target: filteredTarget, source }, 'sending file')
// actually copy the file (instead of moving it) to maintain consistent behaviour
// between the different implementations
@ -39,8 +37,6 @@ async function sendFile(location, target, source) {
}
async function sendStream(location, target, sourceStream) {
logger.log({ location, target }, 'sending file stream')
const fsPath = await LocalFileWriter.writeStream(sourceStream)
try {
@ -53,13 +49,10 @@ async function sendStream(location, target, sourceStream) {
// opts may be {start: Number, end: Number}
async function getFileStream(location, name, opts) {
const filteredName = filterName(name)
logger.log({ location, filteredName }, 'getting file')
try {
opts.fd = await fsOpen(`${location}/${filteredName}`, 'r')
} catch (err) {
logger.err({ err, location, filteredName: name }, 'Error reading from file')
throw _wrapError(
err,
'failed to open file for streaming',
@ -78,8 +71,6 @@ async function getFileSize(location, filename) {
const stat = await fsStat(fullPath)
return stat.size
} catch (err) {
logger.err({ err, location, filename }, 'failed to stat file')
throw _wrapError(
err,
'failed to stat file',
@ -92,7 +83,6 @@ async function getFileSize(location, filename) {
async function copyFile(location, fromName, toName) {
const filteredFromName = filterName(fromName)
const filteredToName = filterName(toName)
logger.log({ location, filteredFromName, filteredToName }, 'copying file')
try {
const sourceStream = fs.createReadStream(`${location}/${filteredFromName}`)
@ -110,7 +100,6 @@ async function copyFile(location, fromName, toName) {
async function deleteFile(location, name) {
const filteredName = filterName(name)
logger.log({ location, filteredName }, 'delete file')
try {
await fsUnlink(`${location}/${filteredName}`)
} catch (err) {
@ -127,8 +116,6 @@ async function deleteFile(location, name) {
async function deleteDirectory(location, name) {
const filteredName = filterName(name.replace(/\/$/, ''))
logger.log({ location, filteredName }, 'deleting directory')
try {
await rmrf(`${location}/${filteredName}`)
} catch (err) {

View file

@ -1,5 +1,4 @@
const PersistorManager = require('./PersistorManager')
const logger = require('logger-sharelatex')
const FileHandler = require('./FileHandler')
const metrics = require('metrics-sharelatex')
const parseRange = require('range-parser')
@ -26,18 +25,17 @@ function getFile(req, res, next) {
format,
style
}
metrics.inc('getFile')
logger.log({ key, bucket, format, style }, 'receiving request to get file')
res.logMsg = 'getting file'
res.logInfo = { key, bucket, format, style, cacheWarm: req.query.cacheWarm }
if (req.headers.range) {
const range = _getRange(req.headers.range)
if (range) {
options.start = range.start
options.end = range.end
logger.log(
{ start: range.start, end: range.end },
'getting range of bytes from file'
)
res.logInfo.range = range
}
}
@ -45,77 +43,88 @@ function getFile(req, res, next) {
if (err) {
if (err instanceof Errors.NotFoundError) {
res.sendStatus(404)
res.logInfo.notFound = true
next()
} else {
logger.err({ err, key, bucket, format, style }, 'problem getting file')
res.sendStatus(500)
next(err)
}
return
}
if (req.query.cacheWarm) {
logger.log(
{ key, bucket, format, style },
'request is only for cache warm so not sending stream'
)
return res.sendStatus(200)
res.sendStatus(200)
return next()
}
logger.log({ key, bucket, format, style }, 'sending file to response')
pipeline(fileStream, res, err => {
if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') {
logger.err(
next(
new Errors.ReadError({
message: 'error transferring stream',
info: { bucket, key, format, style }
}).withCause(err)
)
} else {
next()
}
})
})
}
function getFileHead(req, res) {
function getFileHead(req, res, next) {
const { key, bucket } = req
metrics.inc('getFileSize')
logger.log({ key, bucket }, 'receiving request to get file metadata')
res.logMsg = 'getting file size'
res.logInfo = { key, bucket }
FileHandler.getFileSize(bucket, key, function(err, fileSize) {
if (err) {
if (err instanceof Errors.NotFoundError) {
res.sendStatus(404)
res.logInfo.notFound = true
next()
} else {
res.sendStatus(500)
next(err)
}
return
}
res.set('Content-Length', fileSize)
res.status(200).end()
next()
})
}
function insertFile(req, res) {
function insertFile(req, res, next) {
metrics.inc('insertFile')
const { key, bucket } = req
logger.log({ key, bucket }, 'receiving request to insert file')
res.logMsg = 'inserting file'
res.logInfo = { key, bucket }
FileHandler.insertFile(bucket, key, req, function(err) {
if (err) {
logger.log({ err, key, bucket }, 'error inserting file')
res.sendStatus(500)
next(err)
} else {
res.sendStatus(200)
next()
}
})
}
function copyFile(req, res) {
function copyFile(req, res, next) {
metrics.inc('copyFile')
const { key, bucket } = req
const oldProjectId = req.body.source.project_id
const oldFileId = req.body.source.file_id
logger.log(
{ key, bucket, oldProject_id: oldProjectId, oldFile_id: oldFileId },
'receiving request to copy file'
)
req.logInfo = {
key,
bucket,
oldProject_id: oldProjectId,
oldFile_id: oldFileId
}
req.logMsg = 'copying file'
PersistorManager.copyFile(
bucket,
@ -125,46 +134,52 @@ function copyFile(req, res) {
if (err) {
if (err instanceof Errors.NotFoundError) {
res.sendStatus(404)
res.logInfo.notFound = true
next()
} else {
logger.log(
{ err, oldProject_id: oldProjectId, oldFile_id: oldFileId },
'something went wrong copying file'
)
res.sendStatus(500)
next(err)
}
return
}
res.sendStatus(200)
next()
}
)
}
function deleteFile(req, res) {
function deleteFile(req, res, next) {
metrics.inc('deleteFile')
const { key, bucket } = req
logger.log({ key, bucket }, 'receiving request to delete file')
return FileHandler.deleteFile(bucket, key, function(err) {
if (err != null) {
logger.log({ err, key, bucket }, 'something went wrong deleting file')
return res.sendStatus(500)
req.logInfo = { key, bucket }
req.logMsg = 'deleting file'
FileHandler.deleteFile(bucket, key, function(err) {
if (err) {
next(err)
} else {
return res.sendStatus(204)
res.sendStatus(204)
next()
}
})
}
function directorySize(req, res) {
function directorySize(req, res, next) {
metrics.inc('projectSize')
const { project_id: projectId, bucket } = req
logger.log({ projectId, bucket }, 'receiving request to project size')
req.logMsg = 'getting project size'
req.logInfo = { projectId, bucket }
FileHandler.getDirectorySize(bucket, projectId, function(err, size) {
if (err) {
logger.log({ err, projectId, bucket }, 'error inserting file')
return res.sendStatus(500)
return next(err)
}
res.json({ 'total bytes': size })
req.logInfo.size = size
next()
})
}

View file

@ -1,5 +1,4 @@
const metrics = require('metrics-sharelatex')
const logger = require('logger-sharelatex')
const Settings = require('settings-sharelatex')
const { callbackify } = require('util')
@ -69,8 +68,6 @@ async function preview(sourcePath) {
}
async function _convert(sourcePath, requestedFormat, command) {
logger.log({ sourcePath, requestedFormat }, 'converting file format')
if (!APPROVED_FORMATS.includes(requestedFormat)) {
throw new ConversionError({
message: 'invalid format requested',
@ -97,9 +94,5 @@ async function _convert(sourcePath, requestedFormat, command) {
}
timer.done()
logger.log(
{ sourcePath, requestedFormat, destPath },
'finished converting file'
)
return destPath
}

View file

@ -2,17 +2,11 @@ const { promisify } = require('util')
const fs = require('fs')
const PersistorManager = require('./PersistorManager')
const LocalFileWriter = require('./LocalFileWriter')
const logger = require('logger-sharelatex')
const FileConverter = require('./FileConverter')
const KeyBuilder = require('./KeyBuilder')
const async = require('async')
const ImageOptimiser = require('./ImageOptimiser')
const {
WriteError,
ReadError,
ConversionError,
NotFoundError
} = require('./Errors')
const { ConversionError } = require('./Errors')
module.exports = {
insertFile,
@ -33,7 +27,7 @@ function insertFile(bucket, key, stream, callback) {
const convertedKey = KeyBuilder.getConvertedFolderKey(key)
PersistorManager.deleteDirectory(bucket, convertedKey, function(error) {
if (error) {
return callback(new WriteError('error inserting file').withCause(error))
return callback(error)
}
PersistorManager.sendStream(bucket, key, stream, callback)
})
@ -51,13 +45,9 @@ function deleteFile(bucket, key, callback) {
}
function getFile(bucket, key, opts, callback) {
// In this call, opts can contain credentials
if (!opts) {
opts = {}
}
logger.log({ bucket, key, opts: _scrubSecrets(opts) }, 'getting file')
opts = opts || {}
if (!opts.format && !opts.style) {
_getStandardFile(bucket, key, opts, callback)
PersistorManager.getFileStream(bucket, key, opts, callback)
} else {
_getConvertedFile(bucket, key, opts, callback)
}
@ -68,27 +58,7 @@ function getFileSize(bucket, key, callback) {
}
function getDirectorySize(bucket, projectId, callback) {
logger.log({ bucket, project_id: projectId }, 'getting project size')
PersistorManager.directorySize(bucket, projectId, function(err, size) {
if (err) {
return callback(
new ReadError('error getting project size').withCause(err)
)
}
callback(null, size)
})
}
function _getStandardFile(bucket, key, opts, callback) {
PersistorManager.getFileStream(bucket, key, opts, function(err, fileStream) {
if (err && !(err instanceof NotFoundError)) {
logger.err(
{ bucket, key, opts: _scrubSecrets(opts) },
'error getting fileStream'
)
}
callback(err, fileStream)
})
PersistorManager.directorySize(bucket, projectId, callback)
}
function _getConvertedFile(bucket, key, opts, callback) {
@ -124,7 +94,10 @@ function _getConvertedFileAndCache(bucket, key, convertedKey, opts, callback) {
if (err) {
LocalFileWriter.deleteFile(convertedFsPath, function() {})
return callback(
new ConversionError('failed to convert file').withCause(err)
new ConversionError({
message: 'failed to convert file',
info: { opts, bucket, key, convertedKey }
}).withCause(err)
)
}
// Send back the converted file from the local copy to avoid problems
@ -152,26 +125,26 @@ function _convertFile(bucket, originalKey, opts, callback) {
_writeFileToDisk(bucket, originalKey, opts, function(err, originalFsPath) {
if (err) {
return callback(
new ConversionError('unable to write file to disk').withCause(err)
new ConversionError({
message: 'unable to write file to disk',
info: { bucket, originalKey, opts }
}).withCause(err)
)
}
const done = function(err, destPath) {
if (err) {
logger.err(
{ err, bucket, originalKey, opts: _scrubSecrets(opts) },
'error converting file'
)
return callback(
new ConversionError('error converting file').withCause(err)
new ConversionError({
message: 'error converting file',
info: { bucket, originalKey, opts }
}).withCause(err)
)
}
LocalFileWriter.deleteFile(originalFsPath, function() {})
callback(err, destPath)
}
logger.log({ opts }, 'converting file depending on opts')
if (opts.format) {
FileConverter.convert(originalFsPath, opts.format, done)
} else if (opts.style === 'thumbnail') {
@ -180,11 +153,14 @@ function _convertFile(bucket, originalKey, opts, callback) {
FileConverter.preview(originalFsPath, done)
} else {
callback(
new ConversionError(
`should have specified opts to convert file with ${JSON.stringify(
new ConversionError({
message: 'invalid file conversion options',
info: {
bucket,
originalKey,
opts
)}`
)
}
})
)
}
})
@ -193,16 +169,8 @@ function _convertFile(bucket, originalKey, opts, callback) {
function _writeFileToDisk(bucket, key, opts, callback) {
PersistorManager.getFileStream(bucket, key, opts, function(err, fileStream) {
if (err) {
return callback(
new ReadError('unable to get read stream for file').withCause(err)
)
return callback(err)
}
LocalFileWriter.writeStream(fileStream, key, callback)
})
}
function _scrubSecrets(opts) {
const safe = Object.assign({}, opts)
delete safe.credentials
return safe
}

View file

@ -1,6 +1,5 @@
const fs = require('fs-extra')
const path = require('path')
const logger = require('logger-sharelatex')
const Settings = require('settings-sharelatex')
const streamBuffers = require('stream-buffers')
const { promisify } = require('util')
@ -60,13 +59,11 @@ async function checkFileConvert() {
}
module.exports = {
check(req, res) {
logger.log({}, 'performing health check')
check(req, res, next) {
Promise.all([checkCanGetFiles(), checkFileConvert()])
.then(() => res.sendStatus(200))
.catch(err => {
logger.err({ err }, 'Health check: error running')
res.sendStatus(500)
next(err)
})
}
}

View file

@ -12,8 +12,6 @@ module.exports = {
async function compressPng(localPath, callback) {
const timer = new metrics.Timer('compressPng')
logger.log({ localPath }, 'optimising png path')
const args = ['optipng', localPath]
const opts = {
timeout: 30 * 1000,
@ -23,7 +21,6 @@ async function compressPng(localPath, callback) {
try {
await safeExec(args, opts)
timer.done()
logger.log({ localPath }, 'finished compressing png')
} catch (err) {
if (err.code === 'SIGKILL') {
logger.warn(
@ -31,10 +28,6 @@ async function compressPng(localPath, callback) {
'optimiser timeout reached'
)
} else {
logger.err(
{ err, stderr: err.stderr, localPath },
'something went wrong compressing png'
)
throw err
}
}

View file

@ -3,7 +3,6 @@ const uuid = require('node-uuid')
const path = require('path')
const Stream = require('stream')
const { callbackify, promisify } = require('util')
const logger = require('logger-sharelatex')
const metrics = require('metrics-sharelatex')
const Settings = require('settings-sharelatex')
const { WriteError } = require('./Errors')
@ -23,18 +22,14 @@ async function writeStream(stream, key) {
const timer = new metrics.Timer('writingFile')
const fsPath = _getPath(key)
logger.log({ fsPath }, 'writing file locally')
const writeStream = fs.createWriteStream(fsPath)
try {
await pipeline(stream, writeStream)
timer.done()
logger.log({ fsPath }, 'finished writing file locally')
return fsPath
} catch (err) {
await deleteFile(fsPath)
logger.err({ err, fsPath }, 'problem writing file locally')
throw new WriteError({
message: 'problem writing file locally',
info: { err, fsPath }
@ -46,7 +41,6 @@ async function deleteFile(fsPath) {
if (!fsPath) {
return
}
logger.log({ fsPath }, 'removing local temp file')
try {
await promisify(fs.unlink)(fsPath)
} catch (err) {

View file

@ -0,0 +1,32 @@
const logger = require('logger-sharelatex')
module.exports = {
logRequest,
logError
}
function logRequest(req, res) {
// response has already been sent, but we log what happened here
logger.log(
{
info: res.logInfo,
url: req.originalUrl,
params: req.params
},
res.logMsg || 'HTTP request'
)
}
function logError(err, req, res, next) {
logger.err(
{
err,
info: res.logInfo,
url: req.originalUrl,
params: req.params,
msg: res.logMsg
},
err.message
)
next(err) // use the standard error handler to send the response
}

View file

@ -4,7 +4,6 @@ http.globalAgent.maxSockets = 300
https.globalAgent.maxSockets = 300
const settings = require('settings-sharelatex')
const logger = require('logger-sharelatex')
const metrics = require('metrics-sharelatex')
const meter = require('stream-meter')
@ -64,15 +63,13 @@ async function sendStream(bucketName, key, readStream) {
metrics.count('s3.egress', meteredStream.bytes)
})
const response = await _getClientForBucket(bucketName)
await _getClientForBucket(bucketName)
.upload({
Bucket: bucketName,
Key: key,
Body: readStream.pipe(meteredStream)
})
.promise()
logger.log({ response, bucketName, key }, 'data uploaded to s3')
} catch (err) {
throw _wrapError(
err,
@ -116,7 +113,6 @@ async function getFileStream(bucketName, key, opts) {
}
async function deleteDirectory(bucketName, key) {
logger.log({ key, bucketName }, 'deleting directory')
let response
try {

View file

@ -1,5 +1,4 @@
const _ = require('underscore')
const logger = require('logger-sharelatex')
const childProcess = require('child_process')
const Settings = require('settings-sharelatex')
const { ConversionsDisabledError, FailedCommandError } = require('./Errors')
@ -29,20 +28,6 @@ function safeExec(command, options, callback) {
let killTimer
if (options.timeout) {
killTimer = setTimeout(function() {
try {
// use negative process id to kill process group
process.kill(-child.pid, options.killSignal || 'SIGTERM')
} catch (error) {
logger.log(
{ process: child.pid, kill_error: error },
'error killing process'
)
}
}, options.timeout)
}
const cleanup = _.once(function(err) {
if (killTimer) {
clearTimeout(killTimer)
@ -50,6 +35,26 @@ function safeExec(command, options, callback) {
callback(err, stdout, stderr)
})
if (options.timeout) {
killTimer = setTimeout(function() {
try {
// use negative process id to kill process group
process.kill(-child.pid, options.killSignal || 'SIGTERM')
} catch (error) {
cleanup(
new FailedCommandError({
message: 'failed to kill process after timeout',
info: {
command,
options,
pid: child.pid
}
})
)
}
}, options.timeout)
}
child.on('close', function(code, signal) {
if (code || signal) {
return cleanup(

View file

@ -44,10 +44,6 @@ describe('FSPersistorManagerTests', function() {
FSPersistorManager = SandboxedModule.require(modulePath, {
requires: {
'./LocalFileWriter': LocalFileWriter,
'logger-sharelatex': {
log() {},
err() {}
},
'./Errors': Errors,
fs,
glob,

View file

@ -12,6 +12,7 @@ describe('FileController', function() {
FileController,
req,
res,
next,
stream
const settings = {
s3: {
@ -26,6 +27,7 @@ describe('FileController', function() {
const fileId = 'file_id'
const bucket = 'user_files'
const key = `${projectId}/${fileId}`
const error = new Error('incorrect utensil')
beforeEach(function() {
PersistorManager = {
@ -57,10 +59,6 @@ describe('FileController', function() {
'settings-sharelatex': settings,
'metrics-sharelatex': {
inc() {}
},
'logger-sharelatex': {
log() {},
err() {}
}
},
globals: { console }
@ -82,11 +80,13 @@ describe('FileController', function() {
sendStatus: sinon.stub().returnsThis(),
status: sinon.stub().returnsThis()
}
next = sinon.stub()
})
describe('getFile', function() {
it('should pipe the stream', function() {
FileController.getFile(req, res)
FileController.getFile(req, res, next)
expect(stream.pipeline).to.have.been.calledWith(fileStream, res)
})
@ -96,16 +96,13 @@ describe('FileController', function() {
statusCode.should.equal(200)
done()
}
FileController.getFile(req, res)
FileController.getFile(req, res, next)
})
it('should send a 500 if there is a problem', function(done) {
FileHandler.getFile.yields('error')
res.sendStatus = code => {
code.should.equal(500)
done()
}
FileController.getFile(req, res)
it('should send an error if there is a problem', function() {
FileHandler.getFile.yields(error)
FileController.getFile(req, res, next)
expect(next).to.have.been.calledWith(error)
})
describe('with a range header', function() {
@ -125,7 +122,7 @@ describe('FileController', function() {
expectedOptions.start = 0
expectedOptions.end = 8
FileController.getFile(req, res)
FileController.getFile(req, res, next)
expect(FileHandler.getFile).to.have.been.calledWith(
bucket,
key,
@ -135,7 +132,7 @@ describe('FileController', function() {
it('should ignore an invalid range header', function() {
req.headers.range = 'potato'
FileController.getFile(req, res)
FileController.getFile(req, res, next)
expect(FileHandler.getFile).to.have.been.calledWith(
bucket,
key,
@ -145,7 +142,7 @@ describe('FileController', function() {
it("should ignore any type other than 'bytes'", function() {
req.headers.range = 'wombats=0-8'
FileController.getFile(req, res)
FileController.getFile(req, res, next)
expect(FileHandler.getFile).to.have.been.calledWith(
bucket,
key,
@ -163,7 +160,7 @@ describe('FileController', function() {
done()
}
FileController.getFileHead(req, res)
FileController.getFileHead(req, res, next)
})
it('should return a 404 is the file is not found', function(done) {
@ -174,18 +171,14 @@ describe('FileController', function() {
done()
}
FileController.getFileHead(req, res)
FileController.getFileHead(req, res, next)
})
it('should return a 500 on internal errors', function(done) {
FileHandler.getFileSize.yields(new Error())
it('should send an error on internal errors', function() {
FileHandler.getFileSize.yields(error)
res.sendStatus = code => {
expect(code).to.equal(500)
done()
}
FileController.getFileHead(req, res)
FileController.getFileHead(req, res, next)
expect(next).to.have.been.calledWith(error)
})
})
@ -196,7 +189,7 @@ describe('FileController', function() {
expect(code).to.equal(200)
done()
}
FileController.insertFile(req, res)
FileController.insertFile(req, res, next)
})
})
@ -224,7 +217,7 @@ describe('FileController', function() {
)
done()
}
FileController.copyFile(req, res)
FileController.copyFile(req, res, next)
})
it('should send a 404 if the original file was not found', function(done) {
@ -233,16 +226,13 @@ describe('FileController', function() {
code.should.equal(404)
done()
}
FileController.copyFile(req, res)
FileController.copyFile(req, res, next)
})
it('should send a 500 if there was an error', function(done) {
PersistorManager.copyFile.yields('error')
res.sendStatus = code => {
code.should.equal(500)
done()
}
FileController.copyFile(req, res)
it('should send an error if there was an error', function() {
PersistorManager.copyFile.yields(error)
FileController.copyFile(req, res, next)
expect(next).to.have.been.calledWith(error)
})
})
@ -253,16 +243,13 @@ describe('FileController', function() {
expect(FileHandler.deleteFile).to.have.been.calledWith(bucket, key)
done()
}
FileController.deleteFile(req, res)
FileController.deleteFile(req, res, next)
})
it('should send a 500 if there was an error', function(done) {
FileHandler.deleteFile.yields('error')
res.sendStatus = code => {
code.should.equal(500)
done()
}
FileController.deleteFile(req, res)
it('should send a 500 if there was an error', function() {
FileHandler.deleteFile.yields(error)
FileController.deleteFile(req, res, next)
expect(next).to.have.been.calledWith(error)
})
})
@ -276,13 +263,10 @@ describe('FileController', function() {
})
})
it('should send a 500 if there was an error', function(done) {
FileHandler.getDirectorySize.callsArgWith(2, 'error')
res.sendStatus = code => {
code.should.equal(500)
done()
}
FileController.directorySize(req, res)
it('should send a 500 if there was an error', function() {
FileHandler.getDirectorySize.yields(error)
FileController.directorySize(req, res, next)
expect(next).to.have.been.calledWith(error)
})
})
})

View file

@ -25,10 +25,6 @@ describe('FileConverter', function() {
FileConverter = SandboxedModule.require(modulePath, {
requires: {
'./SafeExec': SafeExec,
'logger-sharelatex': {
log() {},
err() {}
},
'metrics-sharelatex': {
inc: sinon.stub(),
Timer: sinon.stub().returns({ done: sinon.stub() })

View file

@ -67,11 +67,7 @@ describe('FileHandler', function() {
'./FileConverter': FileConverter,
'./KeyBuilder': KeyBuilder,
'./ImageOptimiser': ImageOptimiser,
fs: fs,
'logger-sharelatex': {
log() {},
err() {}
}
fs: fs
},
globals: { console }
})

View file

@ -6,21 +6,20 @@ const { FailedCommandError } = require('../../../app/js/Errors')
const SandboxedModule = require('sandboxed-module')
describe('ImageOptimiser', function() {
let ImageOptimiser, SafeExec
let ImageOptimiser, SafeExec, logger
const sourcePath = '/wombat/potato.eps'
beforeEach(function() {
SafeExec = {
promises: sinon.stub().resolves()
}
logger = {
warn: sinon.stub()
}
ImageOptimiser = SandboxedModule.require(modulePath, {
requires: {
'./SafeExec': SafeExec,
'logger-sharelatex': {
log() {},
err() {},
warn() {}
}
'logger-sharelatex': logger
}
})
})
@ -47,13 +46,23 @@ describe('ImageOptimiser', function() {
})
describe('when optimiser is sigkilled', function() {
it('should not produce an error', function(done) {
const error = new FailedCommandError('', 'SIGKILL', '', '')
SafeExec.promises.rejects(error)
const expectedError = new FailedCommandError('', 'SIGKILL', '', '')
let error
beforeEach(function(done) {
SafeExec.promises.rejects(expectedError)
ImageOptimiser.compressPng(sourcePath, err => {
expect(err).not.to.exist
error = err
done()
})
})
it('should not produce an error', function() {
expect(error).not.to.exist
})
it('should log a warning', function() {
expect(logger.warn).to.have.been.calledOnce
})
})
})

View file

@ -7,14 +7,7 @@ describe('LocalFileWriter', function() {
const key = 'wombat/potato'
beforeEach(function() {
KeyBuilder = SandboxedModule.require(modulePath, {
requires: {
'logger-sharelatex': {
log() {},
err() {}
}
}
})
KeyBuilder = SandboxedModule.require(modulePath)
})
describe('cachedKey', function() {

View file

@ -26,10 +26,6 @@ describe('LocalFileWriter', function() {
requires: {
fs,
stream,
'logger-sharelatex': {
log() {},
err() {}
},
'settings-sharelatex': settings,
'metrics-sharelatex': {
inc: sinon.stub(),

View file

@ -122,10 +122,6 @@ describe('S3PersistorManagerTests', function() {
'./Errors': Errors,
fs: Fs,
'stream-meter': Meter,
'logger-sharelatex': {
log() {},
err() {}
},
'metrics-sharelatex': Metrics
},
globals: { console }

View file

@ -13,10 +13,6 @@ describe('SafeExec', function() {
safeExec = SandboxedModule.require(modulePath, {
requires: {
'logger-sharelatex': {
log() {},
err() {}
},
'settings-sharelatex': settings
}
})