mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-29 11:03:36 -05:00
Prettier: convert app/js decaffeinated files to Prettier format
This commit is contained in:
parent
601861ee58
commit
6225f2f236
14 changed files with 1464 additions and 1139 deletions
|
@ -15,137 +15,183 @@
|
|||
// latest aws-sdk and delete this module so that PersistorManager would load the
|
||||
// same backend for both the 's3' and 'aws-sdk' options.
|
||||
|
||||
const logger = require("logger-sharelatex");
|
||||
const aws = require("aws-sdk");
|
||||
const _ = require("underscore");
|
||||
const fs = require("fs");
|
||||
const Errors = require("./Errors");
|
||||
const logger = require('logger-sharelatex')
|
||||
const aws = require('aws-sdk')
|
||||
const _ = require('underscore')
|
||||
const fs = require('fs')
|
||||
const Errors = require('./Errors')
|
||||
|
||||
const s3 = new aws.S3();
|
||||
const s3 = new aws.S3()
|
||||
|
||||
module.exports = {
|
||||
sendFile(bucketName, key, fsPath, callback){
|
||||
logger.log({bucketName, key}, "send file data to s3");
|
||||
const stream = fs.createReadStream(fsPath);
|
||||
return s3.upload({Bucket: bucketName, Key: key, Body: stream}, function(err, data) {
|
||||
if (err != null) {
|
||||
logger.err({err, Bucket: bucketName, Key: key}, "error sending file data to s3");
|
||||
}
|
||||
return callback(err);
|
||||
});
|
||||
},
|
||||
sendFile(bucketName, key, fsPath, callback) {
|
||||
logger.log({ bucketName, key }, 'send file data to s3')
|
||||
const stream = fs.createReadStream(fsPath)
|
||||
return s3.upload({ Bucket: bucketName, Key: key, Body: stream }, function(
|
||||
err,
|
||||
data
|
||||
) {
|
||||
if (err != null) {
|
||||
logger.err(
|
||||
{ err, Bucket: bucketName, Key: key },
|
||||
'error sending file data to s3'
|
||||
)
|
||||
}
|
||||
return callback(err)
|
||||
})
|
||||
},
|
||||
|
||||
sendStream(bucketName, key, stream, callback){
|
||||
logger.log({bucketName, key}, "send file stream to s3");
|
||||
return s3.upload({Bucket: bucketName, Key: key, Body: stream}, function(err, data) {
|
||||
if (err != null) {
|
||||
logger.err({err, Bucket: bucketName, Key: key}, "error sending file stream to s3");
|
||||
}
|
||||
return callback(err);
|
||||
});
|
||||
},
|
||||
sendStream(bucketName, key, stream, callback) {
|
||||
logger.log({ bucketName, key }, 'send file stream to s3')
|
||||
return s3.upload({ Bucket: bucketName, Key: key, Body: stream }, function(
|
||||
err,
|
||||
data
|
||||
) {
|
||||
if (err != null) {
|
||||
logger.err(
|
||||
{ err, Bucket: bucketName, Key: key },
|
||||
'error sending file stream to s3'
|
||||
)
|
||||
}
|
||||
return callback(err)
|
||||
})
|
||||
},
|
||||
|
||||
getFileStream(bucketName, key, opts, callback){
|
||||
if (callback == null) { callback = function(err, res){}; }
|
||||
logger.log({bucketName, key}, "get file stream from s3");
|
||||
callback = _.once(callback);
|
||||
const params = {
|
||||
Bucket:bucketName,
|
||||
Key: key
|
||||
};
|
||||
if ((opts.start != null) && (opts.end != null)) {
|
||||
params.Range = `bytes=${opts.start}-${opts.end}`;
|
||||
}
|
||||
const request = s3.getObject(params);
|
||||
const stream = request.createReadStream();
|
||||
stream.on('readable', () => callback(null, stream));
|
||||
return stream.on('error', function(err) {
|
||||
logger.err({err, bucketName, key}, "error getting file stream from s3");
|
||||
if (err.code === 'NoSuchKey') {
|
||||
return callback(new Errors.NotFoundError(`File not found in S3: ${bucketName}:${key}`));
|
||||
}
|
||||
return callback(err);
|
||||
});
|
||||
},
|
||||
getFileStream(bucketName, key, opts, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(err, res) {}
|
||||
}
|
||||
logger.log({ bucketName, key }, 'get file stream from s3')
|
||||
callback = _.once(callback)
|
||||
const params = {
|
||||
Bucket: bucketName,
|
||||
Key: key
|
||||
}
|
||||
if (opts.start != null && opts.end != null) {
|
||||
params.Range = `bytes=${opts.start}-${opts.end}`
|
||||
}
|
||||
const request = s3.getObject(params)
|
||||
const stream = request.createReadStream()
|
||||
stream.on('readable', () => callback(null, stream))
|
||||
return stream.on('error', function(err) {
|
||||
logger.err({ err, bucketName, key }, 'error getting file stream from s3')
|
||||
if (err.code === 'NoSuchKey') {
|
||||
return callback(
|
||||
new Errors.NotFoundError(`File not found in S3: ${bucketName}:${key}`)
|
||||
)
|
||||
}
|
||||
return callback(err)
|
||||
})
|
||||
},
|
||||
|
||||
copyFile(bucketName, sourceKey, destKey, callback){
|
||||
logger.log({bucketName, sourceKey, destKey}, "copying file in s3");
|
||||
const source = bucketName + '/' + sourceKey;
|
||||
return s3.copyObject({Bucket: bucketName, Key: destKey, CopySource: source}, function(err) {
|
||||
if (err != null) {
|
||||
logger.err({err, bucketName, sourceKey, destKey}, "something went wrong copying file in s3");
|
||||
}
|
||||
return callback(err);
|
||||
});
|
||||
},
|
||||
copyFile(bucketName, sourceKey, destKey, callback) {
|
||||
logger.log({ bucketName, sourceKey, destKey }, 'copying file in s3')
|
||||
const source = bucketName + '/' + sourceKey
|
||||
return s3.copyObject(
|
||||
{ Bucket: bucketName, Key: destKey, CopySource: source },
|
||||
function(err) {
|
||||
if (err != null) {
|
||||
logger.err(
|
||||
{ err, bucketName, sourceKey, destKey },
|
||||
'something went wrong copying file in s3'
|
||||
)
|
||||
}
|
||||
return callback(err)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
deleteFile(bucketName, key, callback){
|
||||
logger.log({bucketName, key}, "delete file in s3");
|
||||
return s3.deleteObject({Bucket: bucketName, Key: key}, function(err) {
|
||||
if (err != null) {
|
||||
logger.err({err, bucketName, key}, "something went wrong deleting file in s3");
|
||||
}
|
||||
return callback(err);
|
||||
});
|
||||
},
|
||||
deleteFile(bucketName, key, callback) {
|
||||
logger.log({ bucketName, key }, 'delete file in s3')
|
||||
return s3.deleteObject({ Bucket: bucketName, Key: key }, function(err) {
|
||||
if (err != null) {
|
||||
logger.err(
|
||||
{ err, bucketName, key },
|
||||
'something went wrong deleting file in s3'
|
||||
)
|
||||
}
|
||||
return callback(err)
|
||||
})
|
||||
},
|
||||
|
||||
deleteDirectory(bucketName, key, callback){
|
||||
logger.log({bucketName, key}, "delete directory in s3");
|
||||
return s3.listObjects({Bucket: bucketName, Prefix: key}, function(err, data) {
|
||||
if (err != null) {
|
||||
logger.err({err, bucketName, key}, "something went wrong listing prefix in s3");
|
||||
return callback(err);
|
||||
}
|
||||
if (data.Contents.length === 0) {
|
||||
logger.log({bucketName, key}, "the directory is empty");
|
||||
return callback();
|
||||
}
|
||||
const keys = _.map(data.Contents, entry => ({
|
||||
Key: entry.Key
|
||||
}));
|
||||
return s3.deleteObjects({
|
||||
Bucket: bucketName,
|
||||
Delete: {
|
||||
Objects: keys,
|
||||
Quiet: true
|
||||
}
|
||||
}
|
||||
, function(err) {
|
||||
if (err != null) {
|
||||
logger.err({err, bucketName, key:keys}, "something went wrong deleting directory in s3");
|
||||
}
|
||||
return callback(err);
|
||||
});
|
||||
});
|
||||
},
|
||||
deleteDirectory(bucketName, key, callback) {
|
||||
logger.log({ bucketName, key }, 'delete directory in s3')
|
||||
return s3.listObjects({ Bucket: bucketName, Prefix: key }, function(
|
||||
err,
|
||||
data
|
||||
) {
|
||||
if (err != null) {
|
||||
logger.err(
|
||||
{ err, bucketName, key },
|
||||
'something went wrong listing prefix in s3'
|
||||
)
|
||||
return callback(err)
|
||||
}
|
||||
if (data.Contents.length === 0) {
|
||||
logger.log({ bucketName, key }, 'the directory is empty')
|
||||
return callback()
|
||||
}
|
||||
const keys = _.map(data.Contents, entry => ({
|
||||
Key: entry.Key
|
||||
}))
|
||||
return s3.deleteObjects(
|
||||
{
|
||||
Bucket: bucketName,
|
||||
Delete: {
|
||||
Objects: keys,
|
||||
Quiet: true
|
||||
}
|
||||
},
|
||||
function(err) {
|
||||
if (err != null) {
|
||||
logger.err(
|
||||
{ err, bucketName, key: keys },
|
||||
'something went wrong deleting directory in s3'
|
||||
)
|
||||
}
|
||||
return callback(err)
|
||||
}
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
checkIfFileExists(bucketName, key, callback){
|
||||
logger.log({bucketName, key}, "check file existence in s3");
|
||||
return s3.headObject({Bucket: bucketName, Key: key}, function(err, data) {
|
||||
if (err != null) {
|
||||
if (err.code === 'NotFound') { return (callback(null, false)); }
|
||||
logger.err({err, bucketName, key}, "something went wrong checking head in s3");
|
||||
return callback(err);
|
||||
}
|
||||
return callback(null, (data.ETag != null));
|
||||
});
|
||||
},
|
||||
|
||||
directorySize(bucketName, key, callback){
|
||||
logger.log({bucketName, key}, "get project size in s3");
|
||||
return s3.listObjects({Bucket: bucketName, Prefix: key}, function(err, data) {
|
||||
if (err != null) {
|
||||
logger.err({err, bucketName, key}, "something went wrong listing prefix in s3");
|
||||
return callback(err);
|
||||
}
|
||||
if (data.Contents.length === 0) {
|
||||
logger.log({bucketName, key}, "the directory is empty");
|
||||
return callback();
|
||||
}
|
||||
let totalSize = 0;
|
||||
_.each(data.Contents, entry => totalSize += entry.Size);
|
||||
return callback(null, totalSize);
|
||||
});
|
||||
}
|
||||
};
|
||||
checkIfFileExists(bucketName, key, callback) {
|
||||
logger.log({ bucketName, key }, 'check file existence in s3')
|
||||
return s3.headObject({ Bucket: bucketName, Key: key }, function(err, data) {
|
||||
if (err != null) {
|
||||
if (err.code === 'NotFound') {
|
||||
return callback(null, false)
|
||||
}
|
||||
logger.err(
|
||||
{ err, bucketName, key },
|
||||
'something went wrong checking head in s3'
|
||||
)
|
||||
return callback(err)
|
||||
}
|
||||
return callback(null, data.ETag != null)
|
||||
})
|
||||
},
|
||||
|
||||
directorySize(bucketName, key, callback) {
|
||||
logger.log({ bucketName, key }, 'get project size in s3')
|
||||
return s3.listObjects({ Bucket: bucketName, Prefix: key }, function(
|
||||
err,
|
||||
data
|
||||
) {
|
||||
if (err != null) {
|
||||
logger.err(
|
||||
{ err, bucketName, key },
|
||||
'something went wrong listing prefix in s3'
|
||||
)
|
||||
return callback(err)
|
||||
}
|
||||
if (data.Contents.length === 0) {
|
||||
logger.log({ bucketName, key }, 'the directory is empty')
|
||||
return callback()
|
||||
}
|
||||
let totalSize = 0
|
||||
_.each(data.Contents, entry => (totalSize += entry.Size))
|
||||
return callback(null, totalSize)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,38 +9,40 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let BucketController;
|
||||
const settings = require("settings-sharelatex");
|
||||
const logger = require("logger-sharelatex");
|
||||
const FileHandler = require("./FileHandler");
|
||||
const metrics = require("metrics-sharelatex");
|
||||
const Errors = require('./Errors');
|
||||
let BucketController
|
||||
const settings = require('settings-sharelatex')
|
||||
const logger = require('logger-sharelatex')
|
||||
const FileHandler = require('./FileHandler')
|
||||
const metrics = require('metrics-sharelatex')
|
||||
const Errors = require('./Errors')
|
||||
|
||||
module.exports = (BucketController = {
|
||||
|
||||
getFile(req, res){
|
||||
const {bucket} = req.params;
|
||||
const key = req.params[0];
|
||||
const credentials = settings.filestore.s3BucketCreds != null ? settings.filestore.s3BucketCreds[bucket] : undefined;
|
||||
const options = {
|
||||
key,
|
||||
bucket,
|
||||
credentials
|
||||
};
|
||||
metrics.inc(`${bucket}.getFile`);
|
||||
logger.log({key, bucket}, "receiving request to get file from bucket");
|
||||
return FileHandler.getFile(bucket, key, options, function(err, fileStream){
|
||||
if (err != null) {
|
||||
logger.err({err, key, bucket}, "problem getting file from bucket");
|
||||
if (err instanceof Errors.NotFoundError) {
|
||||
return res.send(404);
|
||||
} else {
|
||||
return res.send(500);
|
||||
}
|
||||
} else {
|
||||
logger.log({key, bucket}, "sending bucket file to response");
|
||||
return fileStream.pipe(res);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
module.exports = BucketController = {
|
||||
getFile(req, res) {
|
||||
const { bucket } = req.params
|
||||
const key = req.params[0]
|
||||
const credentials =
|
||||
settings.filestore.s3BucketCreds != null
|
||||
? settings.filestore.s3BucketCreds[bucket]
|
||||
: undefined
|
||||
const options = {
|
||||
key,
|
||||
bucket,
|
||||
credentials
|
||||
}
|
||||
metrics.inc(`${bucket}.getFile`)
|
||||
logger.log({ key, bucket }, 'receiving request to get file from bucket')
|
||||
return FileHandler.getFile(bucket, key, options, function(err, fileStream) {
|
||||
if (err != null) {
|
||||
logger.err({ err, key, bucket }, 'problem getting file from bucket')
|
||||
if (err instanceof Errors.NotFoundError) {
|
||||
return res.send(404)
|
||||
} else {
|
||||
return res.send(500)
|
||||
}
|
||||
} else {
|
||||
logger.log({ key, bucket }, 'sending bucket file to response')
|
||||
return fileStream.pipe(res)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,14 +4,13 @@
|
|||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
let Errors;
|
||||
let Errors
|
||||
var NotFoundError = function(message) {
|
||||
const error = new Error(message);
|
||||
error.name = "NotFoundError";
|
||||
error.__proto__ = NotFoundError.prototype;
|
||||
return error;
|
||||
};
|
||||
NotFoundError.prototype.__proto__ = Error.prototype;
|
||||
const error = new Error(message)
|
||||
error.name = 'NotFoundError'
|
||||
error.__proto__ = NotFoundError.prototype
|
||||
return error
|
||||
}
|
||||
NotFoundError.prototype.__proto__ = Error.prototype
|
||||
|
||||
module.exports = (Errors =
|
||||
{NotFoundError});
|
||||
module.exports = Errors = { NotFoundError }
|
||||
|
|
|
@ -11,161 +11,196 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const logger = require("logger-sharelatex");
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const LocalFileWriter = require("./LocalFileWriter");
|
||||
const Errors = require('./Errors');
|
||||
const rimraf = require("rimraf");
|
||||
const _ = require("underscore");
|
||||
|
||||
const filterName = key => key.replace(/\//g, "_");
|
||||
const logger = require('logger-sharelatex')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const LocalFileWriter = require('./LocalFileWriter')
|
||||
const Errors = require('./Errors')
|
||||
const rimraf = require('rimraf')
|
||||
const _ = require('underscore')
|
||||
|
||||
const filterName = key => key.replace(/\//g, '_')
|
||||
|
||||
module.exports = {
|
||||
sendFile( location, target, source, callback) {
|
||||
if (callback == null) { callback = function(err){}; }
|
||||
const filteredTarget = filterName(target);
|
||||
logger.log({location, target:filteredTarget, source}, "sending file");
|
||||
sendFile(location, target, source, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(err) {}
|
||||
}
|
||||
const filteredTarget = filterName(target)
|
||||
logger.log({ location, target: filteredTarget, source }, 'sending file')
|
||||
const done = _.once(function(err) {
|
||||
if (err != null) {
|
||||
logger.err({err, location, target:filteredTarget, source}, "Error on put of file");
|
||||
logger.err(
|
||||
{ err, location, target: filteredTarget, source },
|
||||
'Error on put of file'
|
||||
)
|
||||
}
|
||||
return callback(err);
|
||||
});
|
||||
return callback(err)
|
||||
})
|
||||
// actually copy the file (instead of moving it) to maintain consistent behaviour
|
||||
// between the different implementations
|
||||
const sourceStream = fs.createReadStream(source);
|
||||
sourceStream.on('error', done);
|
||||
const targetStream = fs.createWriteStream(`${location}/${filteredTarget}`);
|
||||
targetStream.on('error', done);
|
||||
targetStream.on('finish', () => done());
|
||||
return sourceStream.pipe(targetStream);
|
||||
const sourceStream = fs.createReadStream(source)
|
||||
sourceStream.on('error', done)
|
||||
const targetStream = fs.createWriteStream(`${location}/${filteredTarget}`)
|
||||
targetStream.on('error', done)
|
||||
targetStream.on('finish', () => done())
|
||||
return sourceStream.pipe(targetStream)
|
||||
},
|
||||
|
||||
sendStream( location, target, sourceStream, callback) {
|
||||
if (callback == null) { callback = function(err){}; }
|
||||
logger.log({location, target}, "sending file stream");
|
||||
sourceStream.on("error", err => logger.err({location, target, err:err("error on stream to send")}));
|
||||
return LocalFileWriter.writeStream(sourceStream, null, (err, fsPath)=> {
|
||||
sendStream(location, target, sourceStream, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(err) {}
|
||||
}
|
||||
logger.log({ location, target }, 'sending file stream')
|
||||
sourceStream.on('error', err =>
|
||||
logger.err({ location, target, err: err('error on stream to send') })
|
||||
)
|
||||
return LocalFileWriter.writeStream(sourceStream, null, (err, fsPath) => {
|
||||
if (err != null) {
|
||||
logger.err({location, target, fsPath, err}, "something went wrong writing stream to disk");
|
||||
return callback(err);
|
||||
logger.err(
|
||||
{ location, target, fsPath, err },
|
||||
'something went wrong writing stream to disk'
|
||||
)
|
||||
return callback(err)
|
||||
}
|
||||
return this.sendFile(location, target, fsPath, err => // delete the temporary file created above and return the original error
|
||||
LocalFileWriter.deleteFile(fsPath, () => callback(err)));
|
||||
});
|
||||
return this.sendFile(location, target, fsPath, (
|
||||
err // delete the temporary file created above and return the original error
|
||||
) => LocalFileWriter.deleteFile(fsPath, () => callback(err)))
|
||||
})
|
||||
},
|
||||
|
||||
// opts may be {start: Number, end: Number}
|
||||
getFileStream(location, name, opts, callback) {
|
||||
if (callback == null) { callback = function(err, res){}; }
|
||||
const filteredName = filterName(name);
|
||||
logger.log({location, filteredName}, "getting file");
|
||||
if (callback == null) {
|
||||
callback = function(err, res) {}
|
||||
}
|
||||
const filteredName = filterName(name)
|
||||
logger.log({ location, filteredName }, 'getting file')
|
||||
return fs.open(`${location}/${filteredName}`, 'r', function(err, fd) {
|
||||
if (err != null) {
|
||||
logger.err({err, location, filteredName:name}, "Error reading from file");
|
||||
logger.err(
|
||||
{ err, location, filteredName: name },
|
||||
'Error reading from file'
|
||||
)
|
||||
}
|
||||
if (err.code === 'ENOENT') {
|
||||
return callback(new Errors.NotFoundError(err.message), null);
|
||||
return callback(new Errors.NotFoundError(err.message), null)
|
||||
} else {
|
||||
return callback(err, null);
|
||||
return callback(err, null)
|
||||
}
|
||||
opts.fd = fd;
|
||||
const sourceStream = fs.createReadStream(null, opts);
|
||||
return callback(null, sourceStream);
|
||||
});
|
||||
opts.fd = fd
|
||||
const sourceStream = fs.createReadStream(null, opts)
|
||||
return callback(null, sourceStream)
|
||||
})
|
||||
},
|
||||
|
||||
getFileSize(location, filename, callback) {
|
||||
const fullPath = path.join(location, filterName(filename));
|
||||
const fullPath = path.join(location, filterName(filename))
|
||||
return fs.stat(fullPath, function(err, stats) {
|
||||
if (err != null) {
|
||||
if (err.code === 'ENOENT') {
|
||||
logger.log({location, filename}, "file not found");
|
||||
callback(new Errors.NotFoundError(err.message));
|
||||
logger.log({ location, filename }, 'file not found')
|
||||
callback(new Errors.NotFoundError(err.message))
|
||||
} else {
|
||||
logger.err({err, location, filename}, "failed to stat file");
|
||||
callback(err);
|
||||
logger.err({ err, location, filename }, 'failed to stat file')
|
||||
callback(err)
|
||||
}
|
||||
return;
|
||||
return
|
||||
}
|
||||
return callback(null, stats.size);
|
||||
});
|
||||
return callback(null, stats.size)
|
||||
})
|
||||
},
|
||||
|
||||
copyFile(location, fromName, toName, callback){
|
||||
if (callback == null) { callback = function(err){}; }
|
||||
const filteredFromName=filterName(fromName);
|
||||
const filteredToName=filterName(toName);
|
||||
logger.log({location, fromName:filteredFromName, toName:filteredToName}, "copying file");
|
||||
const sourceStream = fs.createReadStream(`${location}/${filteredFromName}`);
|
||||
copyFile(location, fromName, toName, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(err) {}
|
||||
}
|
||||
const filteredFromName = filterName(fromName)
|
||||
const filteredToName = filterName(toName)
|
||||
logger.log(
|
||||
{ location, fromName: filteredFromName, toName: filteredToName },
|
||||
'copying file'
|
||||
)
|
||||
const sourceStream = fs.createReadStream(`${location}/${filteredFromName}`)
|
||||
sourceStream.on('error', function(err) {
|
||||
logger.err({err, location, key:filteredFromName}, "Error reading from file");
|
||||
return callback(err);
|
||||
});
|
||||
const targetStream = fs.createWriteStream(`${location}/${filteredToName}`);
|
||||
logger.err(
|
||||
{ err, location, key: filteredFromName },
|
||||
'Error reading from file'
|
||||
)
|
||||
return callback(err)
|
||||
})
|
||||
const targetStream = fs.createWriteStream(`${location}/${filteredToName}`)
|
||||
targetStream.on('error', function(err) {
|
||||
logger.err({err, location, key:filteredToName}, "Error writing to file");
|
||||
return callback(err);
|
||||
});
|
||||
targetStream.on('finish', () => callback(null));
|
||||
return sourceStream.pipe(targetStream);
|
||||
logger.err(
|
||||
{ err, location, key: filteredToName },
|
||||
'Error writing to file'
|
||||
)
|
||||
return callback(err)
|
||||
})
|
||||
targetStream.on('finish', () => callback(null))
|
||||
return sourceStream.pipe(targetStream)
|
||||
},
|
||||
|
||||
deleteFile(location, name, callback){
|
||||
const filteredName = filterName(name);
|
||||
logger.log({location, filteredName}, "delete file");
|
||||
deleteFile(location, name, callback) {
|
||||
const filteredName = filterName(name)
|
||||
logger.log({ location, filteredName }, 'delete file')
|
||||
return fs.unlink(`${location}/${filteredName}`, function(err) {
|
||||
if (err != null) {
|
||||
logger.err({err, location, filteredName}, "Error on delete.");
|
||||
return callback(err);
|
||||
logger.err({ err, location, filteredName }, 'Error on delete.')
|
||||
return callback(err)
|
||||
} else {
|
||||
return callback();
|
||||
return callback()
|
||||
}
|
||||
});
|
||||
})
|
||||
},
|
||||
|
||||
deleteDirectory(location, name, callback){
|
||||
if (callback == null) { callback = function(err){}; }
|
||||
const filteredName = filterName(name.replace(/\/$/,''));
|
||||
deleteDirectory(location, name, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(err) {}
|
||||
}
|
||||
const filteredName = filterName(name.replace(/\/$/, ''))
|
||||
return rimraf(`${location}/${filteredName}`, function(err) {
|
||||
if (err != null) {
|
||||
logger.err({err, location, filteredName}, "Error on rimraf rmdir.");
|
||||
return callback(err);
|
||||
logger.err({ err, location, filteredName }, 'Error on rimraf rmdir.')
|
||||
return callback(err)
|
||||
} else {
|
||||
return callback();
|
||||
return callback()
|
||||
}
|
||||
});
|
||||
})
|
||||
},
|
||||
|
||||
checkIfFileExists(location, name, callback){
|
||||
if (callback == null) { callback = function(err,exists){}; }
|
||||
const filteredName = filterName(name);
|
||||
logger.log({location, filteredName}, "checking if file exists");
|
||||
checkIfFileExists(location, name, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(err, exists) {}
|
||||
}
|
||||
const filteredName = filterName(name)
|
||||
logger.log({ location, filteredName }, 'checking if file exists')
|
||||
return fs.exists(`${location}/${filteredName}`, function(exists) {
|
||||
logger.log({location, filteredName, exists}, "checked if file exists");
|
||||
return callback(null, exists);
|
||||
});
|
||||
logger.log({ location, filteredName, exists }, 'checked if file exists')
|
||||
return callback(null, exists)
|
||||
})
|
||||
},
|
||||
|
||||
directorySize(location, name, callback){
|
||||
const filteredName = filterName(name.replace(/\/$/,''));
|
||||
logger.log({location, filteredName}, "get project size in file system");
|
||||
directorySize(location, name, callback) {
|
||||
const filteredName = filterName(name.replace(/\/$/, ''))
|
||||
logger.log({ location, filteredName }, 'get project size in file system')
|
||||
return fs.readdir(`${location}/${filteredName}`, function(err, files) {
|
||||
if (err != null) {
|
||||
logger.err({err, location, filteredName}, "something went wrong listing prefix in aws");
|
||||
return callback(err);
|
||||
logger.err(
|
||||
{ err, location, filteredName },
|
||||
'something went wrong listing prefix in aws'
|
||||
)
|
||||
return callback(err)
|
||||
}
|
||||
let totalSize = 0;
|
||||
_.each(files, function(entry){
|
||||
const fd = fs.openSync(`${location}/${filteredName}/${entry}`, 'r');
|
||||
const fileStats = fs.fstatSync(fd);
|
||||
totalSize += fileStats.size;
|
||||
return fs.closeSync(fd);
|
||||
});
|
||||
logger.log({totalSize}, "total size", {files});
|
||||
return callback(null, totalSize);
|
||||
});
|
||||
let totalSize = 0
|
||||
_.each(files, function(entry) {
|
||||
const fd = fs.openSync(`${location}/${filteredName}/${entry}`, 'r')
|
||||
const fileStats = fs.fstatSync(fd)
|
||||
totalSize += fileStats.size
|
||||
return fs.closeSync(fd)
|
||||
})
|
||||
logger.log({ totalSize }, 'total size', { files })
|
||||
return callback(null, totalSize)
|
||||
})
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -10,142 +10,164 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let FileController;
|
||||
const PersistorManager = require("./PersistorManager");
|
||||
const settings = require("settings-sharelatex");
|
||||
const logger = require("logger-sharelatex");
|
||||
const FileHandler = require("./FileHandler");
|
||||
const metrics = require("metrics-sharelatex");
|
||||
const parseRange = require('range-parser');
|
||||
const Errors = require('./Errors');
|
||||
let FileController
|
||||
const PersistorManager = require('./PersistorManager')
|
||||
const settings = require('settings-sharelatex')
|
||||
const logger = require('logger-sharelatex')
|
||||
const FileHandler = require('./FileHandler')
|
||||
const metrics = require('metrics-sharelatex')
|
||||
const parseRange = require('range-parser')
|
||||
const Errors = require('./Errors')
|
||||
|
||||
const oneDayInSeconds = 60 * 60 * 24;
|
||||
const maxSizeInBytes = 1024 * 1024 * 1024; // 1GB
|
||||
const oneDayInSeconds = 60 * 60 * 24
|
||||
const maxSizeInBytes = 1024 * 1024 * 1024 // 1GB
|
||||
|
||||
module.exports = (FileController = {
|
||||
module.exports = FileController = {
|
||||
getFile(req, res) {
|
||||
const { key, bucket } = req
|
||||
const { format, style } = req.query
|
||||
const options = {
|
||||
key,
|
||||
bucket,
|
||||
format,
|
||||
style
|
||||
}
|
||||
metrics.inc('getFile')
|
||||
logger.log({ key, bucket, format, style }, 'receiving request to get file')
|
||||
if (req.headers.range != null) {
|
||||
const range = FileController._get_range(req.headers.range)
|
||||
options.start = range.start
|
||||
options.end = range.end
|
||||
logger.log(
|
||||
{ start: range.start, end: range.end },
|
||||
'getting range of bytes from file'
|
||||
)
|
||||
}
|
||||
return FileHandler.getFile(bucket, key, options, function(err, fileStream) {
|
||||
if (err != null) {
|
||||
if (err instanceof Errors.NotFoundError) {
|
||||
return res.send(404)
|
||||
} else {
|
||||
logger.err(
|
||||
{ err, key, bucket, format, style },
|
||||
'problem getting file'
|
||||
)
|
||||
return res.send(500)
|
||||
}
|
||||
} else if (req.query.cacheWarm) {
|
||||
logger.log(
|
||||
{ key, bucket, format, style },
|
||||
'request is only for cache warm so not sending stream'
|
||||
)
|
||||
return res.send(200)
|
||||
} else {
|
||||
logger.log({ key, bucket, format, style }, 'sending file to response')
|
||||
return fileStream.pipe(res)
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
getFile(req, res){
|
||||
const {key, bucket} = req;
|
||||
const {format, style} = req.query;
|
||||
const options = {
|
||||
key,
|
||||
bucket,
|
||||
format,
|
||||
style,
|
||||
};
|
||||
metrics.inc("getFile");
|
||||
logger.log({key, bucket, format, style}, "receiving request to get file");
|
||||
if (req.headers.range != null) {
|
||||
const range = FileController._get_range(req.headers.range);
|
||||
options.start = range.start;
|
||||
options.end = range.end;
|
||||
logger.log({start: range.start, end: range.end}, "getting range of bytes from file");
|
||||
}
|
||||
return FileHandler.getFile(bucket, key, options, function(err, fileStream){
|
||||
if (err != null) {
|
||||
if (err instanceof Errors.NotFoundError) {
|
||||
return res.send(404);
|
||||
} else {
|
||||
logger.err({err, key, bucket, format, style}, "problem getting file");
|
||||
return res.send(500);
|
||||
}
|
||||
} else if (req.query.cacheWarm) {
|
||||
logger.log({key, bucket, format, style}, "request is only for cache warm so not sending stream");
|
||||
return res.send(200);
|
||||
} else {
|
||||
logger.log({key, bucket, format, style}, "sending file to response");
|
||||
return fileStream.pipe(res);
|
||||
}
|
||||
});
|
||||
},
|
||||
getFileHead(req, res) {
|
||||
const { key, bucket } = req
|
||||
metrics.inc('getFileSize')
|
||||
logger.log({ key, bucket }, 'receiving request to get file metadata')
|
||||
return FileHandler.getFileSize(bucket, key, function(err, fileSize) {
|
||||
if (err != null) {
|
||||
if (err instanceof Errors.NotFoundError) {
|
||||
res.status(404).end()
|
||||
} else {
|
||||
res.status(500).end()
|
||||
}
|
||||
return
|
||||
}
|
||||
res.set('Content-Length', fileSize)
|
||||
return res.status(200).end()
|
||||
})
|
||||
},
|
||||
|
||||
getFileHead(req, res) {
|
||||
const {key, bucket} = req;
|
||||
metrics.inc("getFileSize");
|
||||
logger.log({ key, bucket }, "receiving request to get file metadata");
|
||||
return FileHandler.getFileSize(bucket, key, function(err, fileSize) {
|
||||
if (err != null) {
|
||||
if (err instanceof Errors.NotFoundError) {
|
||||
res.status(404).end();
|
||||
} else {
|
||||
res.status(500).end();
|
||||
}
|
||||
return;
|
||||
}
|
||||
res.set("Content-Length", fileSize);
|
||||
return res.status(200).end();
|
||||
});
|
||||
},
|
||||
insertFile(req, res) {
|
||||
metrics.inc('insertFile')
|
||||
const { key, bucket } = req
|
||||
logger.log({ key, bucket }, 'receiving request to insert file')
|
||||
return FileHandler.insertFile(bucket, key, req, function(err) {
|
||||
if (err != null) {
|
||||
logger.log({ err, key, bucket }, 'error inserting file')
|
||||
return res.send(500)
|
||||
} else {
|
||||
return res.send(200)
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
insertFile(req, res){
|
||||
metrics.inc("insertFile");
|
||||
const {key, bucket} = req;
|
||||
logger.log({key, bucket}, "receiving request to insert file");
|
||||
return FileHandler.insertFile(bucket, key, req, function(err){
|
||||
if (err != null) {
|
||||
logger.log({err, key, bucket}, "error inserting file");
|
||||
return res.send(500);
|
||||
} else {
|
||||
return res.send(200);
|
||||
}
|
||||
});
|
||||
},
|
||||
copyFile(req, res) {
|
||||
metrics.inc('copyFile')
|
||||
const { key, bucket } = req
|
||||
const oldProject_id = req.body.source.project_id
|
||||
const oldFile_id = req.body.source.file_id
|
||||
logger.log(
|
||||
{ key, bucket, oldProject_id, oldFile_id },
|
||||
'receiving request to copy file'
|
||||
)
|
||||
return PersistorManager.copyFile(
|
||||
bucket,
|
||||
`${oldProject_id}/${oldFile_id}`,
|
||||
key,
|
||||
function(err) {
|
||||
if (err != null) {
|
||||
if (err instanceof Errors.NotFoundError) {
|
||||
return res.send(404)
|
||||
} else {
|
||||
logger.log(
|
||||
{ err, oldProject_id, oldFile_id },
|
||||
'something went wrong copying file'
|
||||
)
|
||||
return res.send(500)
|
||||
}
|
||||
} else {
|
||||
return res.send(200)
|
||||
}
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
copyFile(req, res){
|
||||
metrics.inc("copyFile");
|
||||
const {key, bucket} = req;
|
||||
const oldProject_id = req.body.source.project_id;
|
||||
const oldFile_id = req.body.source.file_id;
|
||||
logger.log({key, bucket, oldProject_id, oldFile_id}, "receiving request to copy file");
|
||||
return PersistorManager.copyFile(bucket, `${oldProject_id}/${oldFile_id}`, key, function(err){
|
||||
if (err != null) {
|
||||
if (err instanceof Errors.NotFoundError) {
|
||||
return res.send(404);
|
||||
} else {
|
||||
logger.log({err, oldProject_id, oldFile_id}, "something went wrong copying file");
|
||||
return res.send(500);
|
||||
}
|
||||
} else {
|
||||
return res.send(200);
|
||||
}
|
||||
});
|
||||
},
|
||||
deleteFile(req, res) {
|
||||
metrics.inc('deleteFile')
|
||||
const { key, bucket } = req
|
||||
logger.log({ key, bucket }, 'receiving request to delete file')
|
||||
return FileHandler.deleteFile(bucket, key, function(err) {
|
||||
if (err != null) {
|
||||
logger.log({ err, key, bucket }, 'something went wrong deleting file')
|
||||
return res.send(500)
|
||||
} else {
|
||||
return res.send(204)
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
deleteFile(req, res){
|
||||
metrics.inc("deleteFile");
|
||||
const {key, bucket} = req;
|
||||
logger.log({key, bucket}, "receiving request to delete file");
|
||||
return FileHandler.deleteFile(bucket, key, function(err){
|
||||
if (err != null) {
|
||||
logger.log({err, key, bucket}, "something went wrong deleting file");
|
||||
return res.send(500);
|
||||
} else {
|
||||
return res.send(204);
|
||||
}
|
||||
});
|
||||
},
|
||||
_get_range(header) {
|
||||
const parsed = parseRange(maxSizeInBytes, header)
|
||||
if (parsed === -1 || parsed === -2 || parsed.type !== 'bytes') {
|
||||
return null
|
||||
} else {
|
||||
const range = parsed[0]
|
||||
return { start: range.start, end: range.end }
|
||||
}
|
||||
},
|
||||
|
||||
_get_range(header) {
|
||||
const parsed = parseRange(maxSizeInBytes, header);
|
||||
if ((parsed === -1) || (parsed === -2) || (parsed.type !== 'bytes')) {
|
||||
return null;
|
||||
} else {
|
||||
const range = parsed[0];
|
||||
return {start: range.start, end: range.end};
|
||||
}
|
||||
},
|
||||
|
||||
directorySize(req, res){
|
||||
metrics.inc("projectSize");
|
||||
const {project_id, bucket} = req;
|
||||
logger.log({project_id, bucket}, "receiving request to project size");
|
||||
return FileHandler.getDirectorySize(bucket, project_id, function(err, size){
|
||||
if (err != null) {
|
||||
logger.log({err, project_id, bucket}, "error inserting file");
|
||||
return res.send(500);
|
||||
} else {
|
||||
return res.json({'total bytes' : size});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
directorySize(req, res) {
|
||||
metrics.inc('projectSize')
|
||||
const { project_id, bucket } = req
|
||||
logger.log({ project_id, bucket }, 'receiving request to project size')
|
||||
return FileHandler.getDirectorySize(bucket, project_id, function(
|
||||
err,
|
||||
size
|
||||
) {
|
||||
if (err != null) {
|
||||
logger.log({ err, project_id, bucket }, 'error inserting file')
|
||||
return res.send(500)
|
||||
} else {
|
||||
return res.json({ 'total bytes': size })
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,77 +9,125 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const _ = require("underscore");
|
||||
const metrics = require("metrics-sharelatex");
|
||||
const logger = require("logger-sharelatex");
|
||||
const safe_exec = require("./SafeExec");
|
||||
const approvedFormats = ["png"];
|
||||
const Settings = require("settings-sharelatex");
|
||||
const _ = require('underscore')
|
||||
const metrics = require('metrics-sharelatex')
|
||||
const logger = require('logger-sharelatex')
|
||||
const safe_exec = require('./SafeExec')
|
||||
const approvedFormats = ['png']
|
||||
const Settings = require('settings-sharelatex')
|
||||
|
||||
const fourtySeconds = 40 * 1000;
|
||||
const fourtySeconds = 40 * 1000
|
||||
|
||||
const childProcessOpts = {
|
||||
killSignal: "SIGTERM",
|
||||
timeout: fourtySeconds
|
||||
};
|
||||
|
||||
killSignal: 'SIGTERM',
|
||||
timeout: fourtySeconds
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
convert(sourcePath, requestedFormat, callback) {
|
||||
logger.log({ sourcePath, requestedFormat }, 'converting file format')
|
||||
const timer = new metrics.Timer('imageConvert')
|
||||
const destPath = `${sourcePath}.${requestedFormat}`
|
||||
sourcePath = `${sourcePath}[0]`
|
||||
if (!_.include(approvedFormats, requestedFormat)) {
|
||||
const err = new Error('invalid format requested')
|
||||
return callback(err)
|
||||
}
|
||||
const width = '600x'
|
||||
let command = [
|
||||
'convert',
|
||||
'-define',
|
||||
`pdf:fit-page=${width}`,
|
||||
'-flatten',
|
||||
'-density',
|
||||
'300',
|
||||
sourcePath,
|
||||
destPath
|
||||
]
|
||||
command = Settings.commands.convertCommandPrefix.concat(command)
|
||||
return safe_exec(command, childProcessOpts, function(err, stdout, stderr) {
|
||||
timer.done()
|
||||
if (err != null) {
|
||||
logger.err(
|
||||
{ err, stderr, sourcePath, requestedFormat, destPath },
|
||||
'something went wrong converting file'
|
||||
)
|
||||
} else {
|
||||
logger.log(
|
||||
{ sourcePath, requestedFormat, destPath },
|
||||
'finished converting file'
|
||||
)
|
||||
}
|
||||
return callback(err, destPath)
|
||||
})
|
||||
},
|
||||
|
||||
convert(sourcePath, requestedFormat, callback){
|
||||
logger.log({sourcePath, requestedFormat}, "converting file format");
|
||||
const timer = new metrics.Timer("imageConvert");
|
||||
const destPath = `${sourcePath}.${requestedFormat}`;
|
||||
sourcePath = `${sourcePath}[0]`;
|
||||
if (!_.include(approvedFormats, requestedFormat)) {
|
||||
const err = new Error("invalid format requested");
|
||||
return callback(err);
|
||||
}
|
||||
const width = "600x";
|
||||
let command = ["convert", "-define", `pdf:fit-page=${width}`, "-flatten", "-density", "300", sourcePath, destPath];
|
||||
command = Settings.commands.convertCommandPrefix.concat(command);
|
||||
return safe_exec(command, childProcessOpts, function(err, stdout, stderr){
|
||||
timer.done();
|
||||
if (err != null) {
|
||||
logger.err({err, stderr, sourcePath, requestedFormat, destPath}, "something went wrong converting file");
|
||||
} else {
|
||||
logger.log({sourcePath, requestedFormat, destPath}, "finished converting file");
|
||||
}
|
||||
return callback(err, destPath);
|
||||
});
|
||||
},
|
||||
thumbnail(sourcePath, callback) {
|
||||
const destPath = `${sourcePath}.png`
|
||||
sourcePath = `${sourcePath}[0]`
|
||||
const width = '260x'
|
||||
let command = [
|
||||
'convert',
|
||||
'-flatten',
|
||||
'-background',
|
||||
'white',
|
||||
'-density',
|
||||
'300',
|
||||
'-define',
|
||||
`pdf:fit-page=${width}`,
|
||||
sourcePath,
|
||||
'-resize',
|
||||
width,
|
||||
destPath
|
||||
]
|
||||
logger.log({ sourcePath, destPath, command }, 'thumbnail convert file')
|
||||
command = Settings.commands.convertCommandPrefix.concat(command)
|
||||
return safe_exec(command, childProcessOpts, function(err, stdout, stderr) {
|
||||
if (err != null) {
|
||||
logger.err(
|
||||
{ err, stderr, sourcePath },
|
||||
'something went wrong converting file to thumbnail'
|
||||
)
|
||||
} else {
|
||||
logger.log({ sourcePath, destPath }, 'finished thumbnailing file')
|
||||
}
|
||||
return callback(err, destPath)
|
||||
})
|
||||
},
|
||||
|
||||
thumbnail(sourcePath, callback){
|
||||
const destPath = `${sourcePath}.png`;
|
||||
sourcePath = `${sourcePath}[0]`;
|
||||
const width = "260x";
|
||||
let command = ["convert", "-flatten", "-background", "white", "-density", "300", "-define", `pdf:fit-page=${width}`, sourcePath, "-resize", width, destPath];
|
||||
logger.log({sourcePath, destPath, command}, "thumbnail convert file");
|
||||
command = Settings.commands.convertCommandPrefix.concat(command);
|
||||
return safe_exec(command, childProcessOpts, function(err, stdout, stderr){
|
||||
if (err != null) {
|
||||
logger.err({err, stderr, sourcePath}, "something went wrong converting file to thumbnail");
|
||||
} else {
|
||||
logger.log({sourcePath, destPath}, "finished thumbnailing file");
|
||||
}
|
||||
return callback(err, destPath);
|
||||
});
|
||||
},
|
||||
|
||||
preview(sourcePath, callback){
|
||||
logger.log({sourcePath}, "preview convert file");
|
||||
const destPath = `${sourcePath}.png`;
|
||||
sourcePath = `${sourcePath}[0]`;
|
||||
const width = "548x";
|
||||
let command = ["convert", "-flatten", "-background", "white", "-density", "300", "-define", `pdf:fit-page=${width}`, sourcePath, "-resize", width, destPath];
|
||||
command = Settings.commands.convertCommandPrefix.concat(command);
|
||||
return safe_exec(command, childProcessOpts, function(err, stdout, stderr){
|
||||
if (err != null) {
|
||||
logger.err({err, stderr, sourcePath, destPath}, "something went wrong converting file to preview");
|
||||
} else {
|
||||
logger.log({sourcePath, destPath}, "finished converting file to preview");
|
||||
}
|
||||
return callback(err, destPath);
|
||||
});
|
||||
}
|
||||
};
|
||||
preview(sourcePath, callback) {
|
||||
logger.log({ sourcePath }, 'preview convert file')
|
||||
const destPath = `${sourcePath}.png`
|
||||
sourcePath = `${sourcePath}[0]`
|
||||
const width = '548x'
|
||||
let command = [
|
||||
'convert',
|
||||
'-flatten',
|
||||
'-background',
|
||||
'white',
|
||||
'-density',
|
||||
'300',
|
||||
'-define',
|
||||
`pdf:fit-page=${width}`,
|
||||
sourcePath,
|
||||
'-resize',
|
||||
width,
|
||||
destPath
|
||||
]
|
||||
command = Settings.commands.convertCommandPrefix.concat(command)
|
||||
return safe_exec(command, childProcessOpts, function(err, stdout, stderr) {
|
||||
if (err != null) {
|
||||
logger.err(
|
||||
{ err, stderr, sourcePath, destPath },
|
||||
'something went wrong converting file to preview'
|
||||
)
|
||||
} else {
|
||||
logger.log(
|
||||
{ sourcePath, destPath },
|
||||
'finished converting file to preview'
|
||||
)
|
||||
}
|
||||
return callback(err, destPath)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,166 +11,225 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let FileHandler;
|
||||
const settings = require("settings-sharelatex");
|
||||
const PersistorManager = require("./PersistorManager");
|
||||
const LocalFileWriter = require("./LocalFileWriter");
|
||||
const logger = require("logger-sharelatex");
|
||||
const FileConverter = require("./FileConverter");
|
||||
const KeyBuilder = require("./KeyBuilder");
|
||||
const async = require("async");
|
||||
const ImageOptimiser = require("./ImageOptimiser");
|
||||
const Errors = require('./Errors');
|
||||
let FileHandler
|
||||
const settings = require('settings-sharelatex')
|
||||
const PersistorManager = require('./PersistorManager')
|
||||
const LocalFileWriter = require('./LocalFileWriter')
|
||||
const logger = require('logger-sharelatex')
|
||||
const FileConverter = require('./FileConverter')
|
||||
const KeyBuilder = require('./KeyBuilder')
|
||||
const async = require('async')
|
||||
const ImageOptimiser = require('./ImageOptimiser')
|
||||
const Errors = require('./Errors')
|
||||
|
||||
module.exports = (FileHandler = {
|
||||
module.exports = FileHandler = {
|
||||
insertFile(bucket, key, stream, callback) {
|
||||
const convertedKey = KeyBuilder.getConvertedFolderKey(key)
|
||||
return PersistorManager.deleteDirectory(bucket, convertedKey, function(
|
||||
error
|
||||
) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return PersistorManager.sendStream(bucket, key, stream, callback)
|
||||
})
|
||||
},
|
||||
|
||||
insertFile(bucket, key, stream, callback){
|
||||
const convertedKey = KeyBuilder.getConvertedFolderKey(key);
|
||||
return PersistorManager.deleteDirectory(bucket, convertedKey, function(error) {
|
||||
if (error != null) { return callback(error); }
|
||||
return PersistorManager.sendStream(bucket, key, stream, callback);
|
||||
});
|
||||
},
|
||||
deleteFile(bucket, key, callback) {
|
||||
const convertedKey = KeyBuilder.getConvertedFolderKey(key)
|
||||
return async.parallel(
|
||||
[
|
||||
done => PersistorManager.deleteFile(bucket, key, done),
|
||||
done => PersistorManager.deleteDirectory(bucket, convertedKey, done)
|
||||
],
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
deleteFile(bucket, key, callback){
|
||||
const convertedKey = KeyBuilder.getConvertedFolderKey(key);
|
||||
return async.parallel([
|
||||
done => PersistorManager.deleteFile(bucket, key, done),
|
||||
done => PersistorManager.deleteDirectory(bucket, convertedKey, done)
|
||||
], callback);
|
||||
},
|
||||
getFile(bucket, key, opts, callback) {
|
||||
// In this call, opts can contain credentials
|
||||
if (opts == null) {
|
||||
opts = {}
|
||||
}
|
||||
logger.log({ bucket, key, opts: this._scrubSecrets(opts) }, 'getting file')
|
||||
if (opts.format == null && opts.style == null) {
|
||||
return this._getStandardFile(bucket, key, opts, callback)
|
||||
} else {
|
||||
return this._getConvertedFile(bucket, key, opts, callback)
|
||||
}
|
||||
},
|
||||
|
||||
getFile(bucket, key, opts, callback){
|
||||
// In this call, opts can contain credentials
|
||||
if (opts == null) { opts = {}; }
|
||||
logger.log({bucket, key, opts:this._scrubSecrets(opts)}, "getting file");
|
||||
if ((opts.format == null) && (opts.style == null)) {
|
||||
return this._getStandardFile(bucket, key, opts, callback);
|
||||
} else {
|
||||
return this._getConvertedFile(bucket, key, opts, callback);
|
||||
}
|
||||
},
|
||||
getFileSize(bucket, key, callback) {
|
||||
return PersistorManager.getFileSize(bucket, key, callback)
|
||||
},
|
||||
|
||||
getFileSize(bucket, key, callback) {
|
||||
return PersistorManager.getFileSize(bucket, key, callback);
|
||||
},
|
||||
_getStandardFile(bucket, key, opts, callback) {
|
||||
return PersistorManager.getFileStream(bucket, key, opts, function(
|
||||
err,
|
||||
fileStream
|
||||
) {
|
||||
if (err != null && !(err instanceof Errors.NotFoundError)) {
|
||||
logger.err(
|
||||
{ bucket, key, opts: FileHandler._scrubSecrets(opts) },
|
||||
'error getting fileStream'
|
||||
)
|
||||
}
|
||||
return callback(err, fileStream)
|
||||
})
|
||||
},
|
||||
|
||||
_getStandardFile(bucket, key, opts, callback){
|
||||
return PersistorManager.getFileStream(bucket, key, opts, function(err, fileStream){
|
||||
if ((err != null) && !(err instanceof Errors.NotFoundError)) {
|
||||
logger.err({bucket, key, opts:FileHandler._scrubSecrets(opts)}, "error getting fileStream");
|
||||
}
|
||||
return callback(err, fileStream);
|
||||
});
|
||||
},
|
||||
_getConvertedFile(bucket, key, opts, callback) {
|
||||
const convertedKey = KeyBuilder.addCachingToKey(key, opts)
|
||||
return PersistorManager.checkIfFileExists(
|
||||
bucket,
|
||||
convertedKey,
|
||||
(err, exists) => {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
if (exists) {
|
||||
return PersistorManager.getFileStream(
|
||||
bucket,
|
||||
convertedKey,
|
||||
opts,
|
||||
callback
|
||||
)
|
||||
} else {
|
||||
return this._getConvertedFileAndCache(
|
||||
bucket,
|
||||
key,
|
||||
convertedKey,
|
||||
opts,
|
||||
callback
|
||||
)
|
||||
}
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
_getConvertedFile(bucket, key, opts, callback){
|
||||
const convertedKey = KeyBuilder.addCachingToKey(key, opts);
|
||||
return PersistorManager.checkIfFileExists(bucket, convertedKey, (err, exists)=> {
|
||||
if (err != null) {
|
||||
return callback(err);
|
||||
}
|
||||
if (exists) {
|
||||
return PersistorManager.getFileStream(bucket, convertedKey, opts, callback);
|
||||
} else {
|
||||
return this._getConvertedFileAndCache(bucket, key, convertedKey, opts, callback);
|
||||
}
|
||||
});
|
||||
},
|
||||
_getConvertedFileAndCache(bucket, key, convertedKey, opts, callback) {
|
||||
let convertedFsPath = ''
|
||||
const originalFsPath = ''
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return this._convertFile(bucket, key, opts, function(
|
||||
err,
|
||||
fileSystemPath,
|
||||
originalFsPath
|
||||
) {
|
||||
convertedFsPath = fileSystemPath
|
||||
originalFsPath = originalFsPath
|
||||
return cb(err)
|
||||
})
|
||||
},
|
||||
cb => ImageOptimiser.compressPng(convertedFsPath, cb),
|
||||
cb =>
|
||||
PersistorManager.sendFile(bucket, convertedKey, convertedFsPath, cb)
|
||||
],
|
||||
function(err) {
|
||||
if (err != null) {
|
||||
LocalFileWriter.deleteFile(convertedFsPath, function() {})
|
||||
LocalFileWriter.deleteFile(originalFsPath, function() {})
|
||||
return callback(err)
|
||||
}
|
||||
// Send back the converted file from the local copy to avoid problems
|
||||
// with the file not being present in S3 yet. As described in the
|
||||
// documentation below, we have already made a 'HEAD' request in
|
||||
// checkIfFileExists so we only have "eventual consistency" if we try
|
||||
// to stream it from S3 here. This was a cause of many 403 errors.
|
||||
//
|
||||
// "Amazon S3 provides read-after-write consistency for PUTS of new
|
||||
// objects in your S3 bucket in all regions with one caveat. The
|
||||
// caveat is that if you make a HEAD or GET request to the key name
|
||||
// (to find if the object exists) before creating the object, Amazon
|
||||
// S3 provides eventual consistency for read-after-write.""
|
||||
// https://docs.aws.amazon.com/AmazonS3/latest/dev/Introduction.html#ConsistencyModel
|
||||
return LocalFileWriter.getStream(convertedFsPath, function(
|
||||
err,
|
||||
readStream
|
||||
) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
readStream.on('end', function() {
|
||||
logger.log({ convertedFsPath }, 'deleting temporary file')
|
||||
return LocalFileWriter.deleteFile(convertedFsPath, function() {})
|
||||
})
|
||||
return callback(null, readStream)
|
||||
})
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
_getConvertedFileAndCache(bucket, key, convertedKey, opts, callback){
|
||||
let convertedFsPath = "";
|
||||
const originalFsPath = "";
|
||||
return async.series([
|
||||
cb => {
|
||||
return this._convertFile(bucket, key, opts, function(err, fileSystemPath, originalFsPath) {
|
||||
convertedFsPath = fileSystemPath;
|
||||
originalFsPath = originalFsPath;
|
||||
return cb(err);
|
||||
});
|
||||
},
|
||||
cb => ImageOptimiser.compressPng(convertedFsPath, cb),
|
||||
cb => PersistorManager.sendFile(bucket, convertedKey, convertedFsPath, cb)
|
||||
], function(err){
|
||||
if (err != null) {
|
||||
LocalFileWriter.deleteFile(convertedFsPath, function() {});
|
||||
LocalFileWriter.deleteFile(originalFsPath, function() {});
|
||||
return callback(err);
|
||||
}
|
||||
// Send back the converted file from the local copy to avoid problems
|
||||
// with the file not being present in S3 yet. As described in the
|
||||
// documentation below, we have already made a 'HEAD' request in
|
||||
// checkIfFileExists so we only have "eventual consistency" if we try
|
||||
// to stream it from S3 here. This was a cause of many 403 errors.
|
||||
//
|
||||
// "Amazon S3 provides read-after-write consistency for PUTS of new
|
||||
// objects in your S3 bucket in all regions with one caveat. The
|
||||
// caveat is that if you make a HEAD or GET request to the key name
|
||||
// (to find if the object exists) before creating the object, Amazon
|
||||
// S3 provides eventual consistency for read-after-write.""
|
||||
// https://docs.aws.amazon.com/AmazonS3/latest/dev/Introduction.html#ConsistencyModel
|
||||
return LocalFileWriter.getStream(convertedFsPath, function(err, readStream) {
|
||||
if (err != null) { return callback(err); }
|
||||
readStream.on('end', function() {
|
||||
logger.log({convertedFsPath}, "deleting temporary file");
|
||||
return LocalFileWriter.deleteFile(convertedFsPath, function() {});
|
||||
});
|
||||
return callback(null, readStream);
|
||||
});
|
||||
});
|
||||
},
|
||||
_convertFile(bucket, originalKey, opts, callback) {
|
||||
return this._writeS3FileToDisk(bucket, originalKey, opts, function(
|
||||
err,
|
||||
originalFsPath
|
||||
) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
const done = function(err, destPath) {
|
||||
if (err != null) {
|
||||
logger.err(
|
||||
{ err, bucket, originalKey, opts: FileHandler._scrubSecrets(opts) },
|
||||
'error converting file'
|
||||
)
|
||||
return callback(err)
|
||||
}
|
||||
LocalFileWriter.deleteFile(originalFsPath, function() {})
|
||||
return callback(err, destPath, originalFsPath)
|
||||
}
|
||||
|
||||
_convertFile(bucket, originalKey, opts, callback){
|
||||
return this._writeS3FileToDisk(bucket, originalKey, opts, function(err, originalFsPath){
|
||||
if (err != null) {
|
||||
return callback(err);
|
||||
}
|
||||
const done = function(err, destPath){
|
||||
if (err != null) {
|
||||
logger.err({err, bucket, originalKey, opts:FileHandler._scrubSecrets(opts)}, "error converting file");
|
||||
return callback(err);
|
||||
}
|
||||
LocalFileWriter.deleteFile(originalFsPath, function() {});
|
||||
return callback(err, destPath, originalFsPath);
|
||||
};
|
||||
logger.log({ opts }, 'converting file depending on opts')
|
||||
|
||||
logger.log({opts}, "converting file depending on opts");
|
||||
if (opts.format != null) {
|
||||
return FileConverter.convert(originalFsPath, opts.format, done)
|
||||
} else if (opts.style === 'thumbnail') {
|
||||
return FileConverter.thumbnail(originalFsPath, done)
|
||||
} else if (opts.style === 'preview') {
|
||||
return FileConverter.preview(originalFsPath, done)
|
||||
} else {
|
||||
return callback(
|
||||
new Error(
|
||||
`should have specified opts to convert file with ${JSON.stringify(
|
||||
opts
|
||||
)}`
|
||||
)
|
||||
)
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
if (opts.format != null) {
|
||||
return FileConverter.convert(originalFsPath, opts.format, done);
|
||||
} else if (opts.style === "thumbnail") {
|
||||
return FileConverter.thumbnail(originalFsPath, done);
|
||||
} else if (opts.style === "preview") {
|
||||
return FileConverter.preview(originalFsPath, done);
|
||||
} else {
|
||||
return callback(new Error(`should have specified opts to convert file with ${JSON.stringify(opts)}`));
|
||||
}
|
||||
});
|
||||
},
|
||||
_writeS3FileToDisk(bucket, key, opts, callback) {
|
||||
return PersistorManager.getFileStream(bucket, key, opts, function(
|
||||
err,
|
||||
fileStream
|
||||
) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
return LocalFileWriter.writeStream(fileStream, key, callback)
|
||||
})
|
||||
},
|
||||
|
||||
getDirectorySize(bucket, project_id, callback) {
|
||||
logger.log({ bucket, project_id }, 'getting project size')
|
||||
return PersistorManager.directorySize(bucket, project_id, function(
|
||||
err,
|
||||
size
|
||||
) {
|
||||
if (err != null) {
|
||||
logger.err({ bucket, project_id }, 'error getting size')
|
||||
}
|
||||
return callback(err, size)
|
||||
})
|
||||
},
|
||||
|
||||
_writeS3FileToDisk(bucket, key, opts, callback){
|
||||
return PersistorManager.getFileStream(bucket, key, opts, function(err, fileStream){
|
||||
if (err != null) {
|
||||
return callback(err);
|
||||
}
|
||||
return LocalFileWriter.writeStream(fileStream, key, callback);
|
||||
});
|
||||
},
|
||||
|
||||
getDirectorySize(bucket, project_id, callback){
|
||||
logger.log({bucket, project_id}, "getting project size");
|
||||
return PersistorManager.directorySize(bucket, project_id, function(err, size){
|
||||
if (err != null) {
|
||||
logger.err({bucket, project_id}, "error getting size");
|
||||
}
|
||||
return callback(err, size);
|
||||
});
|
||||
},
|
||||
|
||||
_scrubSecrets(opts){
|
||||
const safe = Object.assign({}, opts);
|
||||
delete safe.credentials;
|
||||
return safe;
|
||||
}
|
||||
});
|
||||
_scrubSecrets(opts) {
|
||||
const safe = Object.assign({}, opts)
|
||||
delete safe.credentials
|
||||
return safe
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,71 +6,75 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const fs = require("fs-extra");
|
||||
const path = require("path");
|
||||
const async = require("async");
|
||||
const fileConverter = require("./FileConverter");
|
||||
const keyBuilder = require("./KeyBuilder");
|
||||
const fileController = require("./FileController");
|
||||
const logger = require('logger-sharelatex');
|
||||
const settings = require("settings-sharelatex");
|
||||
const streamBuffers = require("stream-buffers");
|
||||
const _ = require('underscore');
|
||||
const fs = require('fs-extra')
|
||||
const path = require('path')
|
||||
const async = require('async')
|
||||
const fileConverter = require('./FileConverter')
|
||||
const keyBuilder = require('./KeyBuilder')
|
||||
const fileController = require('./FileController')
|
||||
const logger = require('logger-sharelatex')
|
||||
const settings = require('settings-sharelatex')
|
||||
const streamBuffers = require('stream-buffers')
|
||||
const _ = require('underscore')
|
||||
|
||||
const checkCanStoreFiles = function(callback) {
|
||||
callback = _.once(callback)
|
||||
const req = { params: {}, query: {}, headers: {} }
|
||||
req.params.project_id = settings.health_check.project_id
|
||||
req.params.file_id = settings.health_check.file_id
|
||||
const myWritableStreamBuffer = new streamBuffers.WritableStreamBuffer({
|
||||
initialSize: 100
|
||||
})
|
||||
const res = {
|
||||
send(code) {
|
||||
if (code !== 200) {
|
||||
return callback(new Error(`non-200 code from getFile: ${code}`))
|
||||
}
|
||||
}
|
||||
}
|
||||
myWritableStreamBuffer.send = res.send
|
||||
return keyBuilder.userFileKey(req, res, function() {
|
||||
fileController.getFile(req, myWritableStreamBuffer)
|
||||
return myWritableStreamBuffer.on('close', function() {
|
||||
if (myWritableStreamBuffer.size() > 0) {
|
||||
return callback()
|
||||
} else {
|
||||
const err = 'no data in write stream buffer for health check'
|
||||
logger.err({ err }, 'error performing health check')
|
||||
return callback(err)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
const checkCanStoreFiles = function(callback){
|
||||
callback = _.once(callback);
|
||||
const req = {params:{}, query:{}, headers:{}};
|
||||
req.params.project_id = settings.health_check.project_id;
|
||||
req.params.file_id = settings.health_check.file_id;
|
||||
const myWritableStreamBuffer = new streamBuffers.WritableStreamBuffer({initialSize: 100});
|
||||
const res = {
|
||||
send(code) {
|
||||
if (code !== 200) {
|
||||
return callback(new Error(`non-200 code from getFile: ${code}`));
|
||||
}
|
||||
}
|
||||
};
|
||||
myWritableStreamBuffer.send = res.send;
|
||||
return keyBuilder.userFileKey(req, res, function() {
|
||||
fileController.getFile(req, myWritableStreamBuffer);
|
||||
return myWritableStreamBuffer.on("close", function() {
|
||||
if (myWritableStreamBuffer.size() > 0) {
|
||||
return callback();
|
||||
} else {
|
||||
const err = "no data in write stream buffer for health check";
|
||||
logger.err({err,}, "error performing health check");
|
||||
return callback(err);
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const checkFileConvert = function(callback){
|
||||
if (!settings.enableConversions) {
|
||||
return callback();
|
||||
}
|
||||
const imgPath = path.join(settings.path.uploadFolder, "/tiny.pdf");
|
||||
return async.waterfall([
|
||||
cb => fs.copy("./tiny.pdf", imgPath, cb),
|
||||
cb => fileConverter.thumbnail(imgPath, cb),
|
||||
(resultPath, cb) => fs.unlink(resultPath, cb),
|
||||
cb => fs.unlink(imgPath, cb)
|
||||
], callback);
|
||||
};
|
||||
|
||||
const checkFileConvert = function(callback) {
|
||||
if (!settings.enableConversions) {
|
||||
return callback()
|
||||
}
|
||||
const imgPath = path.join(settings.path.uploadFolder, '/tiny.pdf')
|
||||
return async.waterfall(
|
||||
[
|
||||
cb => fs.copy('./tiny.pdf', imgPath, cb),
|
||||
cb => fileConverter.thumbnail(imgPath, cb),
|
||||
(resultPath, cb) => fs.unlink(resultPath, cb),
|
||||
cb => fs.unlink(imgPath, cb)
|
||||
],
|
||||
callback
|
||||
)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
||||
check(req, res) {
|
||||
logger.log({}, "performing health check");
|
||||
return async.parallel([checkFileConvert, checkCanStoreFiles], function(err){
|
||||
if (err != null) {
|
||||
logger.err({err}, "Health check: error running");
|
||||
return res.send(500);
|
||||
} else {
|
||||
return res.send(200);
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
check(req, res) {
|
||||
logger.log({}, 'performing health check')
|
||||
return async.parallel([checkFileConvert, checkCanStoreFiles], function(
|
||||
err
|
||||
) {
|
||||
if (err != null) {
|
||||
logger.err({ err }, 'Health check: error running')
|
||||
return res.send(500)
|
||||
} else {
|
||||
return res.send(200)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,36 +9,36 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const {
|
||||
exec
|
||||
} = require('child_process');
|
||||
const logger = require("logger-sharelatex");
|
||||
const Settings = require("settings-sharelatex");
|
||||
const { exec } = require('child_process')
|
||||
const logger = require('logger-sharelatex')
|
||||
const Settings = require('settings-sharelatex')
|
||||
|
||||
module.exports = {
|
||||
|
||||
compressPng(localPath, callback){
|
||||
const startTime = new Date();
|
||||
logger.log({localPath}, "optimising png path");
|
||||
const args = `optipng ${localPath}`;
|
||||
const opts = {
|
||||
timeout: 30 * 1000,
|
||||
killSignal: "SIGKILL"
|
||||
};
|
||||
if (!Settings.enableConversions) {
|
||||
const error = new Error("Image conversions are disabled");
|
||||
return callback(error);
|
||||
}
|
||||
return exec(args, opts,function(err, stdout, stderr){
|
||||
if ((err != null) && (err.signal === 'SIGKILL')) {
|
||||
logger.warn({err, stderr, localPath}, "optimiser timeout reached");
|
||||
err = null;
|
||||
} else if (err != null) {
|
||||
logger.err({err, stderr, localPath}, "something went wrong converting compressPng");
|
||||
} else {
|
||||
logger.log({localPath}, "finished compressPng file");
|
||||
}
|
||||
return callback(err);
|
||||
});
|
||||
}
|
||||
};
|
||||
compressPng(localPath, callback) {
|
||||
const startTime = new Date()
|
||||
logger.log({ localPath }, 'optimising png path')
|
||||
const args = `optipng ${localPath}`
|
||||
const opts = {
|
||||
timeout: 30 * 1000,
|
||||
killSignal: 'SIGKILL'
|
||||
}
|
||||
if (!Settings.enableConversions) {
|
||||
const error = new Error('Image conversions are disabled')
|
||||
return callback(error)
|
||||
}
|
||||
return exec(args, opts, function(err, stdout, stderr) {
|
||||
if (err != null && err.signal === 'SIGKILL') {
|
||||
logger.warn({ err, stderr, localPath }, 'optimiser timeout reached')
|
||||
err = null
|
||||
} else if (err != null) {
|
||||
logger.err(
|
||||
{ err, stderr, localPath },
|
||||
'something went wrong converting compressPng'
|
||||
)
|
||||
} else {
|
||||
logger.log({ localPath }, 'finished compressPng file')
|
||||
}
|
||||
return callback(err)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,65 +11,61 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const settings = require("settings-sharelatex");
|
||||
const settings = require('settings-sharelatex')
|
||||
|
||||
module.exports = {
|
||||
getConvertedFolderKey(key) {
|
||||
return (key = `${key}-converted-cache/`)
|
||||
},
|
||||
|
||||
addCachingToKey(key, opts) {
|
||||
key = this.getConvertedFolderKey(key)
|
||||
if (opts.format != null && opts.style == null) {
|
||||
key = `${key}format-${opts.format}`
|
||||
}
|
||||
if (opts.style != null && opts.format == null) {
|
||||
key = `${key}style-${opts.style}`
|
||||
}
|
||||
if (opts.style != null && opts.format != null) {
|
||||
key = `${key}format-${opts.format}-style-${opts.style}`
|
||||
}
|
||||
return key
|
||||
},
|
||||
|
||||
getConvertedFolderKey(key){
|
||||
return key = `${key}-converted-cache/`;
|
||||
},
|
||||
userFileKey(req, res, next) {
|
||||
const { project_id, file_id } = req.params
|
||||
req.key = `${project_id}/${file_id}`
|
||||
req.bucket = settings.filestore.stores.user_files
|
||||
return next()
|
||||
},
|
||||
|
||||
addCachingToKey(key, opts){
|
||||
key = this.getConvertedFolderKey(key);
|
||||
if ((opts.format != null) && (opts.style == null)) {
|
||||
key = `${key}format-${opts.format}`;
|
||||
}
|
||||
if ((opts.style != null) && (opts.format == null)) {
|
||||
key = `${key}style-${opts.style}`;
|
||||
}
|
||||
if ((opts.style != null) && (opts.format != null)) {
|
||||
key = `${key}format-${opts.format}-style-${opts.style}`;
|
||||
}
|
||||
return key;
|
||||
},
|
||||
publicFileKey(req, res, next) {
|
||||
const { project_id, public_file_id } = req.params
|
||||
if (settings.filestore.stores.public_files == null) {
|
||||
return res.status(501).send('public files not available')
|
||||
} else {
|
||||
req.key = `${project_id}/${public_file_id}`
|
||||
req.bucket = settings.filestore.stores.public_files
|
||||
return next()
|
||||
}
|
||||
},
|
||||
|
||||
templateFileKey(req, res, next) {
|
||||
const { template_id, format, version, sub_type } = req.params
|
||||
req.key = `${template_id}/v/${version}/${format}`
|
||||
if (sub_type != null) {
|
||||
req.key = `${req.key}/${sub_type}`
|
||||
}
|
||||
req.bucket = settings.filestore.stores.template_files
|
||||
req.version = version
|
||||
const opts = req.query
|
||||
return next()
|
||||
},
|
||||
|
||||
userFileKey(req, res, next){
|
||||
const {project_id, file_id} = req.params;
|
||||
req.key = `${project_id}/${file_id}`;
|
||||
req.bucket = settings.filestore.stores.user_files;
|
||||
return next();
|
||||
},
|
||||
|
||||
publicFileKey(req, res, next){
|
||||
const {project_id, public_file_id} = req.params;
|
||||
if ((settings.filestore.stores.public_files == null)) {
|
||||
return res.status(501).send("public files not available");
|
||||
} else {
|
||||
req.key = `${project_id}/${public_file_id}`;
|
||||
req.bucket = settings.filestore.stores.public_files;
|
||||
return next();
|
||||
}
|
||||
},
|
||||
|
||||
templateFileKey(req, res, next){
|
||||
const {template_id, format, version, sub_type} = req.params;
|
||||
req.key = `${template_id}/v/${version}/${format}`;
|
||||
if (sub_type != null) {
|
||||
req.key = `${req.key}/${sub_type}`;
|
||||
}
|
||||
req.bucket = settings.filestore.stores.template_files;
|
||||
req.version = version;
|
||||
const opts = req.query;
|
||||
return next();
|
||||
},
|
||||
|
||||
publicProjectKey(req, res, next){
|
||||
const {project_id} = req.params;
|
||||
req.project_id = project_id;
|
||||
req.bucket = settings.filestore.stores.user_files;
|
||||
return next();
|
||||
}
|
||||
};
|
||||
|
||||
publicProjectKey(req, res, next) {
|
||||
const { project_id } = req.params
|
||||
req.project_id = project_id
|
||||
req.bucket = settings.filestore.stores.user_files
|
||||
return next()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,73 +9,83 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const fs = require("fs");
|
||||
const uuid = require('node-uuid');
|
||||
const path = require("path");
|
||||
const _ = require("underscore");
|
||||
const logger = require("logger-sharelatex");
|
||||
const metrics = require("metrics-sharelatex");
|
||||
const Settings = require("settings-sharelatex");
|
||||
const Errors = require("./Errors");
|
||||
const fs = require('fs')
|
||||
const uuid = require('node-uuid')
|
||||
const path = require('path')
|
||||
const _ = require('underscore')
|
||||
const logger = require('logger-sharelatex')
|
||||
const metrics = require('metrics-sharelatex')
|
||||
const Settings = require('settings-sharelatex')
|
||||
const Errors = require('./Errors')
|
||||
|
||||
module.exports = {
|
||||
writeStream(stream, key, callback) {
|
||||
const timer = new metrics.Timer('writingFile')
|
||||
callback = _.once(callback)
|
||||
const fsPath = this._getPath(key)
|
||||
logger.log({ fsPath }, 'writing file locally')
|
||||
const writeStream = fs.createWriteStream(fsPath)
|
||||
writeStream.on('finish', function() {
|
||||
timer.done()
|
||||
logger.log({ fsPath }, 'finished writing file locally')
|
||||
return callback(null, fsPath)
|
||||
})
|
||||
writeStream.on('error', function(err) {
|
||||
logger.err(
|
||||
{ err, fsPath },
|
||||
'problem writing file locally, with write stream'
|
||||
)
|
||||
return callback(err)
|
||||
})
|
||||
stream.on('error', function(err) {
|
||||
logger.log(
|
||||
{ err, fsPath },
|
||||
'problem writing file locally, with read stream'
|
||||
)
|
||||
return callback(err)
|
||||
})
|
||||
return stream.pipe(writeStream)
|
||||
},
|
||||
|
||||
writeStream(stream, key, callback){
|
||||
const timer = new metrics.Timer("writingFile");
|
||||
callback = _.once(callback);
|
||||
const fsPath = this._getPath(key);
|
||||
logger.log({fsPath}, "writing file locally");
|
||||
const writeStream = fs.createWriteStream(fsPath);
|
||||
writeStream.on("finish", function() {
|
||||
timer.done();
|
||||
logger.log({fsPath}, "finished writing file locally");
|
||||
return callback(null, fsPath);
|
||||
});
|
||||
writeStream.on("error", function(err){
|
||||
logger.err({err, fsPath}, "problem writing file locally, with write stream");
|
||||
return callback(err);
|
||||
});
|
||||
stream.on("error", function(err){
|
||||
logger.log({err, fsPath}, "problem writing file locally, with read stream");
|
||||
return callback(err);
|
||||
});
|
||||
return stream.pipe(writeStream);
|
||||
},
|
||||
getStream(fsPath, _callback) {
|
||||
if (_callback == null) {
|
||||
_callback = function(err, res) {}
|
||||
}
|
||||
const callback = _.once(_callback)
|
||||
const timer = new metrics.Timer('readingFile')
|
||||
logger.log({ fsPath }, 'reading file locally')
|
||||
const readStream = fs.createReadStream(fsPath)
|
||||
readStream.on('end', function() {
|
||||
timer.done()
|
||||
return logger.log({ fsPath }, 'finished reading file locally')
|
||||
})
|
||||
readStream.on('error', function(err) {
|
||||
logger.err(
|
||||
{ err, fsPath },
|
||||
'problem reading file locally, with read stream'
|
||||
)
|
||||
if (err.code === 'ENOENT') {
|
||||
return callback(new Errors.NotFoundError(err.message), null)
|
||||
} else {
|
||||
return callback(err)
|
||||
}
|
||||
})
|
||||
return callback(null, readStream)
|
||||
},
|
||||
|
||||
getStream(fsPath, _callback) {
|
||||
if (_callback == null) { _callback = function(err, res){}; }
|
||||
const callback = _.once(_callback);
|
||||
const timer = new metrics.Timer("readingFile");
|
||||
logger.log({fsPath}, "reading file locally");
|
||||
const readStream = fs.createReadStream(fsPath);
|
||||
readStream.on("end", function() {
|
||||
timer.done();
|
||||
return logger.log({fsPath}, "finished reading file locally");
|
||||
});
|
||||
readStream.on("error", function(err){
|
||||
logger.err({err, fsPath}, "problem reading file locally, with read stream");
|
||||
if (err.code === 'ENOENT') {
|
||||
return callback(new Errors.NotFoundError(err.message), null);
|
||||
} else {
|
||||
return callback(err);
|
||||
}
|
||||
});
|
||||
return callback(null, readStream);
|
||||
},
|
||||
deleteFile(fsPath, callback) {
|
||||
if (fsPath == null || fsPath === '') {
|
||||
return callback()
|
||||
}
|
||||
logger.log({ fsPath }, 'removing local temp file')
|
||||
return fs.unlink(fsPath, callback)
|
||||
},
|
||||
|
||||
deleteFile(fsPath, callback){
|
||||
if ((fsPath == null) || (fsPath === "")) {
|
||||
return callback();
|
||||
}
|
||||
logger.log({fsPath}, "removing local temp file");
|
||||
return fs.unlink(fsPath, callback);
|
||||
},
|
||||
|
||||
_getPath(key){
|
||||
if ((key == null)) {
|
||||
key = uuid.v1();
|
||||
}
|
||||
key = key.replace(/\//g,"-");
|
||||
return path.join(Settings.path.uploadFolder, key);
|
||||
}
|
||||
};
|
||||
_getPath(key) {
|
||||
if (key == null) {
|
||||
key = uuid.v1()
|
||||
}
|
||||
key = key.replace(/\//g, '-')
|
||||
return path.join(Settings.path.uploadFolder, key)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,24 +7,46 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const settings = require("settings-sharelatex");
|
||||
const logger = require("logger-sharelatex");
|
||||
const settings = require('settings-sharelatex')
|
||||
const logger = require('logger-sharelatex')
|
||||
|
||||
// assume s3 if none specified
|
||||
__guard__(settings != null ? settings.filestore : undefined, x => x.backend || (settings.filestore.backend = "s3"));
|
||||
__guard__(
|
||||
settings != null ? settings.filestore : undefined,
|
||||
x => x.backend || (settings.filestore.backend = 's3')
|
||||
)
|
||||
|
||||
logger.log({backend:__guard__(settings != null ? settings.filestore : undefined, x1 => x1.backend)}, "Loading backend");
|
||||
module.exports = (() => { switch (__guard__(settings != null ? settings.filestore : undefined, x2 => x2.backend)) {
|
||||
case "aws-sdk":
|
||||
return require("./AWSSDKPersistorManager");
|
||||
case "s3":
|
||||
return require("./S3PersistorManager");
|
||||
case "fs":
|
||||
return require("./FSPersistorManager");
|
||||
default:
|
||||
throw new Error( `Unknown filestore backend: ${settings.filestore.backend}` );
|
||||
} })();
|
||||
logger.log(
|
||||
{
|
||||
backend: __guard__(
|
||||
settings != null ? settings.filestore : undefined,
|
||||
x1 => x1.backend
|
||||
)
|
||||
},
|
||||
'Loading backend'
|
||||
)
|
||||
module.exports = (() => {
|
||||
switch (
|
||||
__guard__(
|
||||
settings != null ? settings.filestore : undefined,
|
||||
x2 => x2.backend
|
||||
)
|
||||
) {
|
||||
case 'aws-sdk':
|
||||
return require('./AWSSDKPersistorManager')
|
||||
case 's3':
|
||||
return require('./S3PersistorManager')
|
||||
case 'fs':
|
||||
return require('./FSPersistorManager')
|
||||
default:
|
||||
throw new Error(
|
||||
`Unknown filestore backend: ${settings.filestore.backend}`
|
||||
)
|
||||
}
|
||||
})()
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined;
|
||||
return typeof value !== 'undefined' && value !== null
|
||||
? transform(value)
|
||||
: undefined
|
||||
}
|
|
@ -19,282 +19,358 @@
|
|||
// to use aws-sdk throughout, see the comments in AWSSDKPersistorManager for
|
||||
// details. The knox library is unmaintained and has bugs.
|
||||
|
||||
const http = require('http');
|
||||
http.globalAgent.maxSockets = 300;
|
||||
const https = require('https');
|
||||
https.globalAgent.maxSockets = 300;
|
||||
const settings = require("settings-sharelatex");
|
||||
const request = require("request");
|
||||
const logger = require("logger-sharelatex");
|
||||
const metrics = require("metrics-sharelatex");
|
||||
const fs = require("fs");
|
||||
const knox = require("knox");
|
||||
const path = require("path");
|
||||
const LocalFileWriter = require("./LocalFileWriter");
|
||||
const Errors = require("./Errors");
|
||||
const _ = require("underscore");
|
||||
const awsS3 = require("aws-sdk/clients/s3");
|
||||
const URL = require('url');
|
||||
const http = require('http')
|
||||
http.globalAgent.maxSockets = 300
|
||||
const https = require('https')
|
||||
https.globalAgent.maxSockets = 300
|
||||
const settings = require('settings-sharelatex')
|
||||
const request = require('request')
|
||||
const logger = require('logger-sharelatex')
|
||||
const metrics = require('metrics-sharelatex')
|
||||
const fs = require('fs')
|
||||
const knox = require('knox')
|
||||
const path = require('path')
|
||||
const LocalFileWriter = require('./LocalFileWriter')
|
||||
const Errors = require('./Errors')
|
||||
const _ = require('underscore')
|
||||
const awsS3 = require('aws-sdk/clients/s3')
|
||||
const URL = require('url')
|
||||
|
||||
const thirtySeconds = 30 * 1000;
|
||||
const thirtySeconds = 30 * 1000
|
||||
|
||||
const buildDefaultOptions = function(bucketName, method, key){
|
||||
let endpoint;
|
||||
if (settings.filestore.s3.endpoint) {
|
||||
endpoint = `${settings.filestore.s3.endpoint}/${bucketName}`;
|
||||
} else {
|
||||
endpoint = `https://${bucketName}.s3.amazonaws.com`;
|
||||
}
|
||||
return {
|
||||
aws: {
|
||||
key: settings.filestore.s3.key,
|
||||
secret: settings.filestore.s3.secret,
|
||||
bucket: bucketName
|
||||
},
|
||||
method,
|
||||
timeout: thirtySeconds,
|
||||
uri:`${endpoint}/${key}`
|
||||
};
|
||||
};
|
||||
const buildDefaultOptions = function(bucketName, method, key) {
|
||||
let endpoint
|
||||
if (settings.filestore.s3.endpoint) {
|
||||
endpoint = `${settings.filestore.s3.endpoint}/${bucketName}`
|
||||
} else {
|
||||
endpoint = `https://${bucketName}.s3.amazonaws.com`
|
||||
}
|
||||
return {
|
||||
aws: {
|
||||
key: settings.filestore.s3.key,
|
||||
secret: settings.filestore.s3.secret,
|
||||
bucket: bucketName
|
||||
},
|
||||
method,
|
||||
timeout: thirtySeconds,
|
||||
uri: `${endpoint}/${key}`
|
||||
}
|
||||
}
|
||||
|
||||
const getS3Options = function(credentials) {
|
||||
const options = {
|
||||
credentials: {
|
||||
accessKeyId: credentials.auth_key,
|
||||
secretAccessKey: credentials.auth_secret
|
||||
}
|
||||
};
|
||||
const options = {
|
||||
credentials: {
|
||||
accessKeyId: credentials.auth_key,
|
||||
secretAccessKey: credentials.auth_secret
|
||||
}
|
||||
}
|
||||
|
||||
if (settings.filestore.s3.endpoint) {
|
||||
const endpoint = URL.parse(settings.filestore.s3.endpoint);
|
||||
options.endpoint = settings.filestore.s3.endpoint;
|
||||
options.sslEnabled = endpoint.protocol === 'https';
|
||||
}
|
||||
if (settings.filestore.s3.endpoint) {
|
||||
const endpoint = URL.parse(settings.filestore.s3.endpoint)
|
||||
options.endpoint = settings.filestore.s3.endpoint
|
||||
options.sslEnabled = endpoint.protocol === 'https'
|
||||
}
|
||||
|
||||
return options;
|
||||
};
|
||||
return options
|
||||
}
|
||||
|
||||
const defaultS3Client = new awsS3(getS3Options({
|
||||
auth_key: settings.filestore.s3.key,
|
||||
auth_secret: settings.filestore.s3.secret
|
||||
}));
|
||||
const defaultS3Client = new awsS3(
|
||||
getS3Options({
|
||||
auth_key: settings.filestore.s3.key,
|
||||
auth_secret: settings.filestore.s3.secret
|
||||
})
|
||||
)
|
||||
|
||||
const getS3Client = function(credentials) {
|
||||
if (credentials != null) {
|
||||
return new awsS3(getS3Options(credentials));
|
||||
} else {
|
||||
return defaultS3Client;
|
||||
}
|
||||
};
|
||||
if (credentials != null) {
|
||||
return new awsS3(getS3Options(credentials))
|
||||
} else {
|
||||
return defaultS3Client
|
||||
}
|
||||
}
|
||||
|
||||
const getKnoxClient = bucketName => {
|
||||
const options = {
|
||||
key: settings.filestore.s3.key,
|
||||
secret: settings.filestore.s3.secret,
|
||||
bucket: bucketName
|
||||
};
|
||||
if (settings.filestore.s3.endpoint) {
|
||||
const endpoint = URL.parse(settings.filestore.s3.endpoint);
|
||||
options.endpoint = endpoint.hostname;
|
||||
options.port = endpoint.port;
|
||||
}
|
||||
return knox.createClient(options);
|
||||
};
|
||||
const options = {
|
||||
key: settings.filestore.s3.key,
|
||||
secret: settings.filestore.s3.secret,
|
||||
bucket: bucketName
|
||||
}
|
||||
if (settings.filestore.s3.endpoint) {
|
||||
const endpoint = URL.parse(settings.filestore.s3.endpoint)
|
||||
options.endpoint = endpoint.hostname
|
||||
options.port = endpoint.port
|
||||
}
|
||||
return knox.createClient(options)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
sendFile(bucketName, key, fsPath, callback) {
|
||||
const s3Client = getKnoxClient(bucketName)
|
||||
let uploaded = 0
|
||||
const putEventEmiter = s3Client.putFile(fsPath, key, function(err, res) {
|
||||
metrics.count('s3.egress', uploaded)
|
||||
if (err != null) {
|
||||
logger.err(
|
||||
{ err, bucketName, key, fsPath },
|
||||
'something went wrong uploading file to s3'
|
||||
)
|
||||
return callback(err)
|
||||
}
|
||||
if (res == null) {
|
||||
logger.err(
|
||||
{ err, res, bucketName, key, fsPath },
|
||||
'no response from s3 put file'
|
||||
)
|
||||
return callback('no response from put file')
|
||||
}
|
||||
if (res.statusCode !== 200) {
|
||||
logger.err(
|
||||
{ bucketName, key, fsPath },
|
||||
'non 200 response from s3 putting file'
|
||||
)
|
||||
return callback('non 200 response from s3 on put file')
|
||||
}
|
||||
logger.log({ res, bucketName, key, fsPath }, 'file uploaded to s3')
|
||||
return callback(err)
|
||||
})
|
||||
putEventEmiter.on('error', function(err) {
|
||||
logger.err(
|
||||
{ err, bucketName, key, fsPath },
|
||||
'error emmited on put of file'
|
||||
)
|
||||
return callback(err)
|
||||
})
|
||||
return putEventEmiter.on(
|
||||
'progress',
|
||||
progress => (uploaded = progress.written)
|
||||
)
|
||||
},
|
||||
|
||||
sendFile(bucketName, key, fsPath, callback){
|
||||
const s3Client = getKnoxClient(bucketName);
|
||||
let uploaded = 0;
|
||||
const putEventEmiter = s3Client.putFile(fsPath, key, function(err, res){
|
||||
metrics.count('s3.egress', uploaded);
|
||||
if (err != null) {
|
||||
logger.err({err, bucketName, key, fsPath},"something went wrong uploading file to s3");
|
||||
return callback(err);
|
||||
}
|
||||
if ((res == null)) {
|
||||
logger.err({err, res, bucketName, key, fsPath}, "no response from s3 put file");
|
||||
return callback("no response from put file");
|
||||
}
|
||||
if (res.statusCode !== 200) {
|
||||
logger.err({bucketName, key, fsPath}, "non 200 response from s3 putting file");
|
||||
return callback("non 200 response from s3 on put file");
|
||||
}
|
||||
logger.log({res, bucketName, key, fsPath},"file uploaded to s3");
|
||||
return callback(err);
|
||||
});
|
||||
putEventEmiter.on("error", function(err){
|
||||
logger.err({err, bucketName, key, fsPath}, "error emmited on put of file");
|
||||
return callback(err);
|
||||
});
|
||||
return putEventEmiter.on("progress", progress => uploaded = progress.written);
|
||||
},
|
||||
sendStream(bucketName, key, readStream, callback) {
|
||||
logger.log({ bucketName, key }, 'sending file to s3')
|
||||
readStream.on('error', err =>
|
||||
logger.err({ bucketName, key }, 'error on stream to send to s3')
|
||||
)
|
||||
return LocalFileWriter.writeStream(readStream, null, (err, fsPath) => {
|
||||
if (err != null) {
|
||||
logger.err(
|
||||
{ bucketName, key, fsPath, err },
|
||||
'something went wrong writing stream to disk'
|
||||
)
|
||||
return callback(err)
|
||||
}
|
||||
return this.sendFile(bucketName, key, fsPath, (
|
||||
err // delete the temporary file created above and return the original error
|
||||
) => LocalFileWriter.deleteFile(fsPath, () => callback(err)))
|
||||
})
|
||||
},
|
||||
|
||||
sendStream(bucketName, key, readStream, callback){
|
||||
logger.log({bucketName, key}, "sending file to s3");
|
||||
readStream.on("error", err => logger.err({bucketName, key}, "error on stream to send to s3"));
|
||||
return LocalFileWriter.writeStream(readStream, null, (err, fsPath)=> {
|
||||
if (err != null) {
|
||||
logger.err({bucketName, key, fsPath, err}, "something went wrong writing stream to disk");
|
||||
return callback(err);
|
||||
}
|
||||
return this.sendFile(bucketName, key, fsPath, err => // delete the temporary file created above and return the original error
|
||||
LocalFileWriter.deleteFile(fsPath, () => callback(err)));
|
||||
});
|
||||
},
|
||||
// opts may be {start: Number, end: Number}
|
||||
getFileStream(bucketName, key, opts, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(err, res) {}
|
||||
}
|
||||
opts = opts || {}
|
||||
callback = _.once(callback)
|
||||
logger.log({ bucketName, key }, 'getting file from s3')
|
||||
|
||||
// opts may be {start: Number, end: Number}
|
||||
getFileStream(bucketName, key, opts, callback){
|
||||
if (callback == null) { callback = function(err, res){}; }
|
||||
opts = opts || {};
|
||||
callback = _.once(callback);
|
||||
logger.log({bucketName, key}, "getting file from s3");
|
||||
const s3 = getS3Client(opts.credentials)
|
||||
const s3Params = {
|
||||
Bucket: bucketName,
|
||||
Key: key
|
||||
}
|
||||
if (opts.start != null && opts.end != null) {
|
||||
s3Params.Range = `bytes=${opts.start}-${opts.end}`
|
||||
}
|
||||
const s3Request = s3.getObject(s3Params)
|
||||
|
||||
const s3 = getS3Client(opts.credentials);
|
||||
const s3Params = {
|
||||
Bucket: bucketName,
|
||||
Key: key
|
||||
};
|
||||
if ((opts.start != null) && (opts.end != null)) {
|
||||
s3Params.Range = `bytes=${opts.start}-${opts.end}`;
|
||||
}
|
||||
const s3Request = s3.getObject(s3Params);
|
||||
s3Request.on(
|
||||
'httpHeaders',
|
||||
(statusCode, headers, response, statusMessage) => {
|
||||
if ([403, 404].includes(statusCode)) {
|
||||
// S3 returns a 403 instead of a 404 when the user doesn't have
|
||||
// permission to list the bucket contents.
|
||||
logger.log({ bucketName, key }, 'file not found in s3')
|
||||
return callback(
|
||||
new Errors.NotFoundError(
|
||||
`File not found in S3: ${bucketName}:${key}`
|
||||
),
|
||||
null
|
||||
)
|
||||
}
|
||||
if (![200, 206].includes(statusCode)) {
|
||||
logger.log(
|
||||
{ bucketName, key },
|
||||
`error getting file from s3: ${statusCode}`
|
||||
)
|
||||
return callback(
|
||||
new Error(
|
||||
`Got non-200 response from S3: ${statusCode} ${statusMessage}`
|
||||
),
|
||||
null
|
||||
)
|
||||
}
|
||||
const stream = response.httpResponse.createUnbufferedStream()
|
||||
stream.on('data', data => metrics.count('s3.ingress', data.byteLength))
|
||||
|
||||
s3Request.on('httpHeaders', (statusCode, headers, response, statusMessage) => {
|
||||
if ([403, 404].includes(statusCode)) {
|
||||
// S3 returns a 403 instead of a 404 when the user doesn't have
|
||||
// permission to list the bucket contents.
|
||||
logger.log({ bucketName, key }, "file not found in s3");
|
||||
return callback(new Errors.NotFoundError(`File not found in S3: ${bucketName}:${key}`), null);
|
||||
}
|
||||
if (![200, 206].includes(statusCode)) {
|
||||
logger.log({bucketName, key }, `error getting file from s3: ${statusCode}`);
|
||||
return callback(new Error(`Got non-200 response from S3: ${statusCode} ${statusMessage}`), null);
|
||||
}
|
||||
const stream = response.httpResponse.createUnbufferedStream();
|
||||
stream.on('data', data => metrics.count('s3.ingress', data.byteLength));
|
||||
return callback(null, stream)
|
||||
}
|
||||
)
|
||||
|
||||
return callback(null, stream);
|
||||
});
|
||||
s3Request.on('error', err => {
|
||||
logger.err({ err, bucketName, key }, 'error getting file stream from s3')
|
||||
return callback(err)
|
||||
})
|
||||
|
||||
s3Request.on('error', err => {
|
||||
logger.err({ err, bucketName, key }, "error getting file stream from s3");
|
||||
return callback(err);
|
||||
});
|
||||
return s3Request.send()
|
||||
},
|
||||
|
||||
return s3Request.send();
|
||||
},
|
||||
getFileSize(bucketName, key, callback) {
|
||||
logger.log({ bucketName, key }, 'getting file size from S3')
|
||||
const s3 = getS3Client()
|
||||
return s3.headObject({ Bucket: bucketName, Key: key }, function(err, data) {
|
||||
if (err != null) {
|
||||
if ([403, 404].includes(err.statusCode)) {
|
||||
// S3 returns a 403 instead of a 404 when the user doesn't have
|
||||
// permission to list the bucket contents.
|
||||
logger.log(
|
||||
{
|
||||
bucketName,
|
||||
key
|
||||
},
|
||||
'file not found in s3'
|
||||
)
|
||||
callback(
|
||||
new Errors.NotFoundError(
|
||||
`File not found in S3: ${bucketName}:${key}`
|
||||
)
|
||||
)
|
||||
} else {
|
||||
logger.err(
|
||||
{
|
||||
bucketName,
|
||||
key,
|
||||
err
|
||||
},
|
||||
'error performing S3 HeadObject'
|
||||
)
|
||||
callback(err)
|
||||
}
|
||||
return
|
||||
}
|
||||
return callback(null, data.ContentLength)
|
||||
})
|
||||
},
|
||||
|
||||
getFileSize(bucketName, key, callback) {
|
||||
logger.log({ bucketName, key }, "getting file size from S3");
|
||||
const s3 = getS3Client();
|
||||
return s3.headObject({ Bucket: bucketName, Key: key }, function(err, data) {
|
||||
if (err != null) {
|
||||
if ([403, 404].includes(err.statusCode)) {
|
||||
// S3 returns a 403 instead of a 404 when the user doesn't have
|
||||
// permission to list the bucket contents.
|
||||
logger.log({
|
||||
bucketName,
|
||||
key
|
||||
}, "file not found in s3");
|
||||
callback(
|
||||
new Errors.NotFoundError(`File not found in S3: ${bucketName}:${key}`)
|
||||
);
|
||||
} else {
|
||||
logger.err({
|
||||
bucketName,
|
||||
key,
|
||||
err
|
||||
}, "error performing S3 HeadObject");
|
||||
callback(err);
|
||||
}
|
||||
return;
|
||||
}
|
||||
return callback(null, data.ContentLength);
|
||||
});
|
||||
},
|
||||
copyFile(bucketName, sourceKey, destKey, callback) {
|
||||
logger.log({ bucketName, sourceKey, destKey }, 'copying file in s3')
|
||||
const source = bucketName + '/' + sourceKey
|
||||
// use the AWS SDK instead of knox due to problems with error handling (https://github.com/Automattic/knox/issues/114)
|
||||
const s3 = getS3Client()
|
||||
return s3.copyObject(
|
||||
{ Bucket: bucketName, Key: destKey, CopySource: source },
|
||||
function(err) {
|
||||
if (err != null) {
|
||||
if (err.code === 'NoSuchKey') {
|
||||
logger.err(
|
||||
{ bucketName, sourceKey },
|
||||
'original file not found in s3 when copying'
|
||||
)
|
||||
return callback(
|
||||
new Errors.NotFoundError(
|
||||
'original file not found in S3 when copying'
|
||||
)
|
||||
)
|
||||
} else {
|
||||
logger.err(
|
||||
{ err, bucketName, sourceKey, destKey },
|
||||
'something went wrong copying file in aws'
|
||||
)
|
||||
return callback(err)
|
||||
}
|
||||
} else {
|
||||
return callback()
|
||||
}
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
copyFile(bucketName, sourceKey, destKey, callback){
|
||||
logger.log({bucketName, sourceKey, destKey}, "copying file in s3");
|
||||
const source = bucketName + '/' + sourceKey;
|
||||
// use the AWS SDK instead of knox due to problems with error handling (https://github.com/Automattic/knox/issues/114)
|
||||
const s3 = getS3Client();
|
||||
return s3.copyObject({Bucket: bucketName, Key: destKey, CopySource: source}, function(err) {
|
||||
if (err != null) {
|
||||
if (err.code === 'NoSuchKey') {
|
||||
logger.err({bucketName, sourceKey}, "original file not found in s3 when copying");
|
||||
return callback(new Errors.NotFoundError("original file not found in S3 when copying"));
|
||||
} else {
|
||||
logger.err({err, bucketName, sourceKey, destKey}, "something went wrong copying file in aws");
|
||||
return callback(err);
|
||||
}
|
||||
} else {
|
||||
return callback();
|
||||
}
|
||||
});
|
||||
},
|
||||
deleteFile(bucketName, key, callback) {
|
||||
logger.log({ bucketName, key }, 'delete file in s3')
|
||||
const options = buildDefaultOptions(bucketName, 'delete', key)
|
||||
return request(options, function(err, res) {
|
||||
if (err != null) {
|
||||
logger.err(
|
||||
{ err, res, bucketName, key },
|
||||
'something went wrong deleting file in aws'
|
||||
)
|
||||
}
|
||||
return callback(err)
|
||||
})
|
||||
},
|
||||
|
||||
deleteFile(bucketName, key, callback){
|
||||
logger.log({bucketName, key}, "delete file in s3");
|
||||
const options = buildDefaultOptions(bucketName, "delete", key);
|
||||
return request(options, function(err, res){
|
||||
if (err != null) {
|
||||
logger.err({err, res, bucketName, key}, "something went wrong deleting file in aws");
|
||||
}
|
||||
return callback(err);
|
||||
});
|
||||
},
|
||||
deleteDirectory(bucketName, key, _callback) {
|
||||
// deleteMultiple can call the callback multiple times so protect against this.
|
||||
const callback = function(...args) {
|
||||
_callback(...Array.from(args || []))
|
||||
return (_callback = function() {})
|
||||
}
|
||||
|
||||
deleteDirectory(bucketName, key, _callback){
|
||||
// deleteMultiple can call the callback multiple times so protect against this.
|
||||
const callback = function(...args) {
|
||||
_callback(...Array.from(args || []));
|
||||
return _callback = function() {};
|
||||
};
|
||||
logger.log({ key, bucketName }, 'deleting directory')
|
||||
const s3Client = getKnoxClient(bucketName)
|
||||
return s3Client.list({ prefix: key }, function(err, data) {
|
||||
if (err != null) {
|
||||
logger.err(
|
||||
{ err, bucketName, key },
|
||||
'something went wrong listing prefix in aws'
|
||||
)
|
||||
return callback(err)
|
||||
}
|
||||
const keys = _.map(data.Contents, entry => entry.Key)
|
||||
return s3Client.deleteMultiple(keys, callback)
|
||||
})
|
||||
},
|
||||
|
||||
logger.log({key, bucketName}, "deleting directory");
|
||||
const s3Client = getKnoxClient(bucketName);
|
||||
return s3Client.list({prefix:key}, function(err, data){
|
||||
if (err != null) {
|
||||
logger.err({err, bucketName, key}, "something went wrong listing prefix in aws");
|
||||
return callback(err);
|
||||
}
|
||||
const keys = _.map(data.Contents, entry => entry.Key);
|
||||
return s3Client.deleteMultiple(keys, callback);
|
||||
});
|
||||
},
|
||||
checkIfFileExists(bucketName, key, callback) {
|
||||
logger.log({ bucketName, key }, 'checking if file exists in s3')
|
||||
const options = buildDefaultOptions(bucketName, 'head', key)
|
||||
return request(options, function(err, res) {
|
||||
if (err != null) {
|
||||
logger.err(
|
||||
{ err, res, bucketName, key },
|
||||
'something went wrong checking file in aws'
|
||||
)
|
||||
return callback(err)
|
||||
}
|
||||
if (res == null) {
|
||||
logger.err(
|
||||
{ err, res, bucketName, key },
|
||||
'no response object returned when checking if file exists'
|
||||
)
|
||||
err = new Error(`no response from s3 ${bucketName} ${key}`)
|
||||
return callback(err)
|
||||
}
|
||||
const exists = res.statusCode === 200
|
||||
logger.log({ bucketName, key, exists }, 'checked if file exsists in s3')
|
||||
return callback(err, exists)
|
||||
})
|
||||
},
|
||||
|
||||
checkIfFileExists(bucketName, key, callback){
|
||||
logger.log({bucketName, key}, "checking if file exists in s3");
|
||||
const options = buildDefaultOptions(bucketName, "head", key);
|
||||
return request(options, function(err, res){
|
||||
if (err != null) {
|
||||
logger.err({err, res, bucketName, key}, "something went wrong checking file in aws");
|
||||
return callback(err);
|
||||
}
|
||||
if ((res == null)) {
|
||||
logger.err({err, res, bucketName, key}, "no response object returned when checking if file exists");
|
||||
err = new Error(`no response from s3 ${bucketName} ${key}`);
|
||||
return callback(err);
|
||||
}
|
||||
const exists = res.statusCode === 200;
|
||||
logger.log({bucketName, key, exists}, "checked if file exsists in s3");
|
||||
return callback(err, exists);
|
||||
});
|
||||
},
|
||||
|
||||
directorySize(bucketName, key, callback){
|
||||
logger.log({bucketName, key}, "get project size in s3");
|
||||
const s3Client = getKnoxClient(bucketName);
|
||||
return s3Client.list({prefix:key}, function(err, data){
|
||||
if (err != null) {
|
||||
logger.err({err, bucketName, key}, "something went wrong listing prefix in aws");
|
||||
return callback(err);
|
||||
}
|
||||
let totalSize = 0;
|
||||
_.each(data.Contents, entry => totalSize += entry.Size);
|
||||
logger.log({totalSize}, "total size");
|
||||
return callback(null, totalSize);
|
||||
});
|
||||
}
|
||||
};
|
||||
directorySize(bucketName, key, callback) {
|
||||
logger.log({ bucketName, key }, 'get project size in s3')
|
||||
const s3Client = getKnoxClient(bucketName)
|
||||
return s3Client.list({ prefix: key }, function(err, data) {
|
||||
if (err != null) {
|
||||
logger.err(
|
||||
{ err, bucketName, key },
|
||||
'something went wrong listing prefix in aws'
|
||||
)
|
||||
return callback(err)
|
||||
}
|
||||
let totalSize = 0
|
||||
_.each(data.Contents, entry => (totalSize += entry.Size))
|
||||
logger.log({ totalSize }, 'total size')
|
||||
return callback(null, totalSize)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,10 +12,10 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const _ = require("underscore");
|
||||
const logger = require("logger-sharelatex");
|
||||
const child_process = require('child_process');
|
||||
const Settings = require("settings-sharelatex");
|
||||
const _ = require('underscore')
|
||||
const logger = require('logger-sharelatex')
|
||||
const child_process = require('child_process')
|
||||
const Settings = require('settings-sharelatex')
|
||||
|
||||
// execute a command in the same way as 'exec' but with a timeout that
|
||||
// kills all child processes
|
||||
|
@ -24,44 +24,50 @@ const Settings = require("settings-sharelatex");
|
|||
// group, then we can kill everything in that process group.
|
||||
|
||||
module.exports = function(command, options, callback) {
|
||||
if (callback == null) { callback = function(err, stdout, stderr) {}; }
|
||||
if (!Settings.enableConversions) {
|
||||
const error = new Error("Image conversions are disabled");
|
||||
return callback(error);
|
||||
}
|
||||
if (callback == null) {
|
||||
callback = function(err, stdout, stderr) {}
|
||||
}
|
||||
if (!Settings.enableConversions) {
|
||||
const error = new Error('Image conversions are disabled')
|
||||
return callback(error)
|
||||
}
|
||||
|
||||
// options are {timeout: number-of-milliseconds, killSignal: signal-name}
|
||||
const [cmd, ...args] = Array.from(command);
|
||||
// options are {timeout: number-of-milliseconds, killSignal: signal-name}
|
||||
const [cmd, ...args] = Array.from(command)
|
||||
|
||||
const child = child_process.spawn(cmd, args, {detached:true});
|
||||
let stdout = "";
|
||||
let stderr = "";
|
||||
const child = child_process.spawn(cmd, args, { detached: true })
|
||||
let stdout = ''
|
||||
let stderr = ''
|
||||
|
||||
const cleanup = _.once(function(err) {
|
||||
if (killTimer != null) { clearTimeout(killTimer); }
|
||||
return callback(err, stdout, stderr);
|
||||
});
|
||||
const cleanup = _.once(function(err) {
|
||||
if (killTimer != null) {
|
||||
clearTimeout(killTimer)
|
||||
}
|
||||
return callback(err, stdout, stderr)
|
||||
})
|
||||
|
||||
if (options.timeout != null) {
|
||||
var killTimer = setTimeout(function() {
|
||||
try {
|
||||
// use negative process id to kill process group
|
||||
return process.kill(-child.pid, options.killSignal || "SIGTERM");
|
||||
} catch (error) {
|
||||
return logger.log({process: child.pid, kill_error: error}, "error killing process");
|
||||
}
|
||||
}
|
||||
, options.timeout);
|
||||
}
|
||||
if (options.timeout != null) {
|
||||
var killTimer = setTimeout(function() {
|
||||
try {
|
||||
// use negative process id to kill process group
|
||||
return process.kill(-child.pid, options.killSignal || 'SIGTERM')
|
||||
} catch (error) {
|
||||
return logger.log(
|
||||
{ process: child.pid, kill_error: error },
|
||||
'error killing process'
|
||||
)
|
||||
}
|
||||
}, options.timeout)
|
||||
}
|
||||
|
||||
child.on('close', function(code, signal) {
|
||||
const err = code ? new Error(`exit status ${code}`) : signal;
|
||||
return cleanup(err);
|
||||
});
|
||||
child.on('close', function(code, signal) {
|
||||
const err = code ? new Error(`exit status ${code}`) : signal
|
||||
return cleanup(err)
|
||||
})
|
||||
|
||||
child.on('error', err => cleanup(err));
|
||||
child.on('error', err => cleanup(err))
|
||||
|
||||
child.stdout.on('data', chunk => stdout += chunk);
|
||||
child.stdout.on('data', chunk => (stdout += chunk))
|
||||
|
||||
return child.stderr.on('data', chunk => stderr += chunk);
|
||||
};
|
||||
return child.stderr.on('data', chunk => (stderr += chunk))
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue