decaffeinate: Convert AWSSDKPersistorManager.coffee and 13 other files to JS

This commit is contained in:
decaffeinate 2019-12-16 10:24:35 +00:00 committed by Simon Detheridge
parent 8d2c87420e
commit 6bd8452f19
14 changed files with 1260 additions and 936 deletions

View file

@ -1,106 +1,145 @@
# This module is not used in production, which currently uses
# S3PersistorManager. The intention is to migrate S3PersistorManager to use the
# latest aws-sdk and delete this module so that PersistorManager would load the
# same backend for both the 's3' and 'aws-sdk' options.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
// This module is not used in production, which currently uses
// S3PersistorManager. The intention is to migrate S3PersistorManager to use the
// latest aws-sdk and delete this module so that PersistorManager would load the
// same backend for both the 's3' and 'aws-sdk' options.
logger = require "logger-sharelatex"
aws = require "aws-sdk"
_ = require "underscore"
fs = require "fs"
Errors = require "./Errors"
const logger = require("logger-sharelatex");
const aws = require("aws-sdk");
const _ = require("underscore");
const fs = require("fs");
const Errors = require("./Errors");
s3 = new aws.S3()
const s3 = new aws.S3();
module.exports =
sendFile: (bucketName, key, fsPath, callback)->
logger.log bucketName:bucketName, key:key, "send file data to s3"
stream = fs.createReadStream fsPath
s3.upload Bucket: bucketName, Key: key, Body: stream, (err, data) ->
if err?
logger.err err: err, Bucket: bucketName, Key: key, "error sending file data to s3"
callback err
module.exports = {
sendFile(bucketName, key, fsPath, callback){
logger.log({bucketName, key}, "send file data to s3");
const stream = fs.createReadStream(fsPath);
return s3.upload({Bucket: bucketName, Key: key, Body: stream}, function(err, data) {
if (err != null) {
logger.err({err, Bucket: bucketName, Key: key}, "error sending file data to s3");
}
return callback(err);
});
},
sendStream: (bucketName, key, stream, callback)->
logger.log bucketName:bucketName, key:key, "send file stream to s3"
s3.upload Bucket: bucketName, Key: key, Body: stream, (err, data) ->
if err?
logger.err err: err, Bucket: bucketName, Key: key, "error sending file stream to s3"
callback err
sendStream(bucketName, key, stream, callback){
logger.log({bucketName, key}, "send file stream to s3");
return s3.upload({Bucket: bucketName, Key: key, Body: stream}, function(err, data) {
if (err != null) {
logger.err({err, Bucket: bucketName, Key: key}, "error sending file stream to s3");
}
return callback(err);
});
},
getFileStream: (bucketName, key, opts, callback = (err, res)->)->
logger.log bucketName:bucketName, key:key, "get file stream from s3"
callback = _.once callback
params =
Bucket:bucketName
getFileStream(bucketName, key, opts, callback){
if (callback == null) { callback = function(err, res){}; }
logger.log({bucketName, key}, "get file stream from s3");
callback = _.once(callback);
const params = {
Bucket:bucketName,
Key: key
if opts.start? and opts.end?
params['Range'] = "bytes=#{opts.start}-#{opts.end}"
request = s3.getObject params
stream = request.createReadStream()
stream.on 'readable', () ->
callback null, stream
stream.on 'error', (err) ->
logger.err err:err, bucketName:bucketName, key:key, "error getting file stream from s3"
if err.code == 'NoSuchKey'
return callback new Errors.NotFoundError "File not found in S3: #{bucketName}:#{key}"
callback err
};
if ((opts.start != null) && (opts.end != null)) {
params['Range'] = `bytes=${opts.start}-${opts.end}`;
}
const request = s3.getObject(params);
const stream = request.createReadStream();
stream.on('readable', () => callback(null, stream));
return stream.on('error', function(err) {
logger.err({err, bucketName, key}, "error getting file stream from s3");
if (err.code === 'NoSuchKey') {
return callback(new Errors.NotFoundError(`File not found in S3: ${bucketName}:${key}`));
}
return callback(err);
});
},
copyFile: (bucketName, sourceKey, destKey, callback)->
logger.log bucketName:bucketName, sourceKey:sourceKey, destKey: destKey, "copying file in s3"
source = bucketName + '/' + sourceKey
s3.copyObject {Bucket: bucketName, Key: destKey, CopySource: source}, (err) ->
if err?
logger.err err:err, bucketName:bucketName, sourceKey:sourceKey, destKey:destKey, "something went wrong copying file in s3"
callback err
copyFile(bucketName, sourceKey, destKey, callback){
logger.log({bucketName, sourceKey, destKey}, "copying file in s3");
const source = bucketName + '/' + sourceKey;
return s3.copyObject({Bucket: bucketName, Key: destKey, CopySource: source}, function(err) {
if (err != null) {
logger.err({err, bucketName, sourceKey, destKey}, "something went wrong copying file in s3");
}
return callback(err);
});
},
deleteFile: (bucketName, key, callback)->
logger.log bucketName:bucketName, key:key, "delete file in s3"
s3.deleteObject {Bucket: bucketName, Key: key}, (err) ->
if err?
logger.err err:err, bucketName:bucketName, key:key, "something went wrong deleting file in s3"
callback err
deleteFile(bucketName, key, callback){
logger.log({bucketName, key}, "delete file in s3");
return s3.deleteObject({Bucket: bucketName, Key: key}, function(err) {
if (err != null) {
logger.err({err, bucketName, key}, "something went wrong deleting file in s3");
}
return callback(err);
});
},
deleteDirectory: (bucketName, key, callback)->
logger.log bucketName:bucketName, key:key, "delete directory in s3"
s3.listObjects {Bucket: bucketName, Prefix: key}, (err, data) ->
if err?
logger.err err:err, bucketName:bucketName, key:key, "something went wrong listing prefix in s3"
return callback err
if data.Contents.length == 0
logger.log bucketName:bucketName, key:key, "the directory is empty"
return callback()
keys = _.map data.Contents, (entry)->
deleteDirectory(bucketName, key, callback){
logger.log({bucketName, key}, "delete directory in s3");
return s3.listObjects({Bucket: bucketName, Prefix: key}, function(err, data) {
if (err != null) {
logger.err({err, bucketName, key}, "something went wrong listing prefix in s3");
return callback(err);
}
if (data.Contents.length === 0) {
logger.log({bucketName, key}, "the directory is empty");
return callback();
}
const keys = _.map(data.Contents, entry => ({
Key: entry.Key
s3.deleteObjects
Bucket: bucketName
Delete:
Objects: keys
}));
return s3.deleteObjects({
Bucket: bucketName,
Delete: {
Objects: keys,
Quiet: true
, (err) ->
if err?
logger.err err:err, bucketName:bucketName, key:keys, "something went wrong deleting directory in s3"
callback err
}
}
, function(err) {
if (err != null) {
logger.err({err, bucketName, key:keys}, "something went wrong deleting directory in s3");
}
return callback(err);
});
});
},
checkIfFileExists:(bucketName, key, callback)->
logger.log bucketName:bucketName, key:key, "check file existence in s3"
s3.headObject {Bucket: bucketName, Key: key}, (err, data) ->
if err?
return (callback null, false) if err.code == 'NotFound'
logger.err err:err, bucketName:bucketName, key:key, "something went wrong checking head in s3"
return callback err
callback null, data.ETag?
checkIfFileExists(bucketName, key, callback){
logger.log({bucketName, key}, "check file existence in s3");
return s3.headObject({Bucket: bucketName, Key: key}, function(err, data) {
if (err != null) {
if (err.code === 'NotFound') { return (callback(null, false)); }
logger.err({err, bucketName, key}, "something went wrong checking head in s3");
return callback(err);
}
return callback(null, (data.ETag != null));
});
},
directorySize:(bucketName, key, callback)->
logger.log bucketName:bucketName, key:key, "get project size in s3"
s3.listObjects {Bucket: bucketName, Prefix: key}, (err, data) ->
if err?
logger.err err:err, bucketName:bucketName, key:key, "something went wrong listing prefix in s3"
return callback err
if data.Contents.length == 0
logger.log bucketName:bucketName, key:key, "the directory is empty"
return callback()
totalSize = 0
_.each data.Contents, (entry)->
totalSize += entry.Size
callback null, totalSize
directorySize(bucketName, key, callback){
logger.log({bucketName, key}, "get project size in s3");
return s3.listObjects({Bucket: bucketName, Prefix: key}, function(err, data) {
if (err != null) {
logger.err({err, bucketName, key}, "something went wrong listing prefix in s3");
return callback(err);
}
if (data.Contents.length === 0) {
logger.log({bucketName, key}, "the directory is empty");
return callback();
}
let totalSize = 0;
_.each(data.Contents, entry => totalSize += entry.Size);
return callback(null, totalSize);
});
}
};

View file

@ -1,29 +1,41 @@
settings = require("settings-sharelatex")
logger = require("logger-sharelatex")
FileHandler = require("./FileHandler")
metrics = require("metrics-sharelatex")
Errors = require('./Errors')
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let BucketController;
const settings = require("settings-sharelatex");
const logger = require("logger-sharelatex");
const FileHandler = require("./FileHandler");
const metrics = require("metrics-sharelatex");
const Errors = require('./Errors');
module.exports = BucketController =
module.exports = (BucketController = {
getFile: (req, res)->
{bucket} = req.params
key = req.params[0]
credentials = settings.filestore.s3BucketCreds?[bucket]
options = {
key: key,
bucket: bucket,
credentials: credentials
getFile(req, res){
const {bucket} = req.params;
const key = req.params[0];
const credentials = settings.filestore.s3BucketCreds != null ? settings.filestore.s3BucketCreds[bucket] : undefined;
const options = {
key,
bucket,
credentials
};
metrics.inc(`${bucket}.getFile`);
logger.log({key, bucket}, "receiving request to get file from bucket");
return FileHandler.getFile(bucket, key, options, function(err, fileStream){
if (err != null) {
logger.err({err, key, bucket}, "problem getting file from bucket");
if (err instanceof Errors.NotFoundError) {
return res.send(404);
} else {
return res.send(500);
}
metrics.inc "#{bucket}.getFile"
logger.log key:key, bucket:bucket, "receiving request to get file from bucket"
FileHandler.getFile bucket, key, options, (err, fileStream)->
if err?
logger.err err:err, key:key, bucket:bucket, "problem getting file from bucket"
if err instanceof Errors.NotFoundError
return res.send 404
else
return res.send 500
else
logger.log key:key, bucket:bucket, "sending bucket file to response"
fileStream.pipe res
} else {
logger.log({key, bucket}, "sending bucket file to response");
return fileStream.pipe(res);
}
});
}
});

View file

@ -1,9 +1,11 @@
NotFoundError = (message) ->
error = new Error(message)
error.name = "NotFoundError"
error.__proto__ = NotFoundError.prototype
return error
NotFoundError.prototype.__proto__ = Error.prototype
let Errors;
var NotFoundError = function(message) {
const error = new Error(message);
error.name = "NotFoundError";
error.__proto__ = NotFoundError.prototype;
return error;
};
NotFoundError.prototype.__proto__ = Error.prototype;
module.exports = Errors =
NotFoundError: NotFoundError
module.exports = (Errors =
{NotFoundError});

View file

@ -1,128 +1,164 @@
logger = require("logger-sharelatex")
fs = require("fs")
path = require("path")
LocalFileWriter = require("./LocalFileWriter")
Errors = require('./Errors')
rimraf = require("rimraf")
_ = require "underscore"
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const logger = require("logger-sharelatex");
const fs = require("fs");
const path = require("path");
const LocalFileWriter = require("./LocalFileWriter");
const Errors = require('./Errors');
const rimraf = require("rimraf");
const _ = require("underscore");
filterName = (key) ->
return key.replace /\//g, "_"
const filterName = key => key.replace(/\//g, "_");
module.exports =
sendFile: ( location, target, source, callback = (err)->) ->
filteredTarget = filterName target
logger.log location:location, target:filteredTarget, source:source, "sending file"
done = _.once (err) ->
if err?
logger.err err:err, location:location, target:filteredTarget, source:source, "Error on put of file"
callback(err)
# actually copy the file (instead of moving it) to maintain consistent behaviour
# between the different implementations
sourceStream = fs.createReadStream source
sourceStream.on 'error', done
targetStream = fs.createWriteStream "#{location}/#{filteredTarget}"
targetStream.on 'error', done
targetStream.on 'finish', () ->
done()
sourceStream.pipe targetStream
module.exports = {
sendFile( location, target, source, callback) {
if (callback == null) { callback = function(err){}; }
const filteredTarget = filterName(target);
logger.log({location, target:filteredTarget, source}, "sending file");
const done = _.once(function(err) {
if (err != null) {
logger.err({err, location, target:filteredTarget, source}, "Error on put of file");
}
return callback(err);
});
// actually copy the file (instead of moving it) to maintain consistent behaviour
// between the different implementations
const sourceStream = fs.createReadStream(source);
sourceStream.on('error', done);
const targetStream = fs.createWriteStream(`${location}/${filteredTarget}`);
targetStream.on('error', done);
targetStream.on('finish', () => done());
return sourceStream.pipe(targetStream);
},
sendStream: ( location, target, sourceStream, callback = (err)->) ->
logger.log location:location, target:target, "sending file stream"
sourceStream.on "error", (err)->
logger.err location:location, target:target, err:err "error on stream to send"
LocalFileWriter.writeStream sourceStream, null, (err, fsPath)=>
if err?
logger.err location:location, target:target, fsPath:fsPath, err:err, "something went wrong writing stream to disk"
return callback err
@sendFile location, target, fsPath, (err) ->
# delete the temporary file created above and return the original error
LocalFileWriter.deleteFile fsPath, () ->
callback(err)
sendStream( location, target, sourceStream, callback) {
if (callback == null) { callback = function(err){}; }
logger.log({location, target}, "sending file stream");
sourceStream.on("error", err => logger.err({location, target, err:err("error on stream to send")}));
return LocalFileWriter.writeStream(sourceStream, null, (err, fsPath)=> {
if (err != null) {
logger.err({location, target, fsPath, err}, "something went wrong writing stream to disk");
return callback(err);
}
return this.sendFile(location, target, fsPath, err => // delete the temporary file created above and return the original error
LocalFileWriter.deleteFile(fsPath, () => callback(err)));
});
},
# opts may be {start: Number, end: Number}
getFileStream: (location, name, opts, callback = (err, res)->) ->
filteredName = filterName name
logger.log location:location, filteredName:filteredName, "getting file"
fs.open "#{location}/#{filteredName}", 'r', (err, fd) ->
if err?
logger.err err:err, location:location, filteredName:name, "Error reading from file"
if err.code == 'ENOENT'
return callback new Errors.NotFoundError(err.message), null
else
return callback err, null
opts.fd = fd
sourceStream = fs.createReadStream null, opts
return callback null, sourceStream
// opts may be {start: Number, end: Number}
getFileStream(location, name, opts, callback) {
if (callback == null) { callback = function(err, res){}; }
const filteredName = filterName(name);
logger.log({location, filteredName}, "getting file");
return fs.open(`${location}/${filteredName}`, 'r', function(err, fd) {
if (err != null) {
logger.err({err, location, filteredName:name}, "Error reading from file");
}
if (err.code === 'ENOENT') {
return callback(new Errors.NotFoundError(err.message), null);
} else {
return callback(err, null);
}
opts.fd = fd;
const sourceStream = fs.createReadStream(null, opts);
return callback(null, sourceStream);
});
},
getFileSize: (location, filename, callback) ->
fullPath = path.join(location, filterName(filename))
fs.stat fullPath, (err, stats) ->
if err?
if err.code == 'ENOENT'
logger.log({location:location, filename:filename}, "file not found")
callback(new Errors.NotFoundError(err.message))
else
logger.err({err:err, location:location, filename:filename}, "failed to stat file")
callback(err)
return
callback(null, stats.size)
getFileSize(location, filename, callback) {
const fullPath = path.join(location, filterName(filename));
return fs.stat(fullPath, function(err, stats) {
if (err != null) {
if (err.code === 'ENOENT') {
logger.log({location, filename}, "file not found");
callback(new Errors.NotFoundError(err.message));
} else {
logger.err({err, location, filename}, "failed to stat file");
callback(err);
}
return;
}
return callback(null, stats.size);
});
},
copyFile: (location, fromName, toName, callback = (err)->)->
filteredFromName=filterName fromName
filteredToName=filterName toName
logger.log location:location, fromName:filteredFromName, toName:filteredToName, "copying file"
sourceStream = fs.createReadStream "#{location}/#{filteredFromName}"
sourceStream.on 'error', (err) ->
logger.err err:err, location:location, key:filteredFromName, "Error reading from file"
callback err
targetStream = fs.createWriteStream "#{location}/#{filteredToName}"
targetStream.on 'error', (err) ->
logger.err err:err, location:location, key:filteredToName, "Error writing to file"
callback err
targetStream.on 'finish', () ->
callback null
sourceStream.pipe targetStream
copyFile(location, fromName, toName, callback){
if (callback == null) { callback = function(err){}; }
const filteredFromName=filterName(fromName);
const filteredToName=filterName(toName);
logger.log({location, fromName:filteredFromName, toName:filteredToName}, "copying file");
const sourceStream = fs.createReadStream(`${location}/${filteredFromName}`);
sourceStream.on('error', function(err) {
logger.err({err, location, key:filteredFromName}, "Error reading from file");
return callback(err);
});
const targetStream = fs.createWriteStream(`${location}/${filteredToName}`);
targetStream.on('error', function(err) {
logger.err({err, location, key:filteredToName}, "Error writing to file");
return callback(err);
});
targetStream.on('finish', () => callback(null));
return sourceStream.pipe(targetStream);
},
deleteFile: (location, name, callback)->
filteredName = filterName name
logger.log location:location, filteredName:filteredName, "delete file"
fs.unlink "#{location}/#{filteredName}", (err) ->
if err?
logger.err err:err, location:location, filteredName:filteredName, "Error on delete."
callback err
else
callback()
deleteFile(location, name, callback){
const filteredName = filterName(name);
logger.log({location, filteredName}, "delete file");
return fs.unlink(`${location}/${filteredName}`, function(err) {
if (err != null) {
logger.err({err, location, filteredName}, "Error on delete.");
return callback(err);
} else {
return callback();
}
});
},
deleteDirectory: (location, name, callback = (err)->)->
filteredName = filterName name.replace(/\/$/,'')
rimraf "#{location}/#{filteredName}", (err) ->
if err?
logger.err err:err, location:location, filteredName:filteredName, "Error on rimraf rmdir."
callback err
else
callback()
deleteDirectory(location, name, callback){
if (callback == null) { callback = function(err){}; }
const filteredName = filterName(name.replace(/\/$/,''));
return rimraf(`${location}/${filteredName}`, function(err) {
if (err != null) {
logger.err({err, location, filteredName}, "Error on rimraf rmdir.");
return callback(err);
} else {
return callback();
}
});
},
checkIfFileExists:(location, name, callback = (err,exists)->)->
filteredName = filterName name
logger.log location:location, filteredName:filteredName, "checking if file exists"
fs.exists "#{location}/#{filteredName}", (exists) ->
logger.log location:location, filteredName:filteredName, exists:exists, "checked if file exists"
callback null, exists
checkIfFileExists(location, name, callback){
if (callback == null) { callback = function(err,exists){}; }
const filteredName = filterName(name);
logger.log({location, filteredName}, "checking if file exists");
return fs.exists(`${location}/${filteredName}`, function(exists) {
logger.log({location, filteredName, exists}, "checked if file exists");
return callback(null, exists);
});
},
directorySize:(location, name, callback)->
filteredName = filterName name.replace(/\/$/,'')
logger.log location:location, filteredName:filteredName, "get project size in file system"
fs.readdir "#{location}/#{filteredName}", (err, files) ->
if err?
logger.err err:err, location:location, filteredName:filteredName, "something went wrong listing prefix in aws"
return callback(err)
totalSize = 0
_.each files, (entry)->
fd = fs.openSync "#{location}/#{filteredName}/#{entry}", 'r'
fileStats = fs.fstatSync(fd)
totalSize += fileStats.size
fs.closeSync fd
logger.log totalSize:totalSize, "total size", files:files
callback null, totalSize
directorySize(location, name, callback){
const filteredName = filterName(name.replace(/\/$/,''));
logger.log({location, filteredName}, "get project size in file system");
return fs.readdir(`${location}/${filteredName}`, function(err, files) {
if (err != null) {
logger.err({err, location, filteredName}, "something went wrong listing prefix in aws");
return callback(err);
}
let totalSize = 0;
_.each(files, function(entry){
const fd = fs.openSync(`${location}/${filteredName}/${entry}`, 'r');
const fileStats = fs.fstatSync(fd);
totalSize += fileStats.size;
return fs.closeSync(fd);
});
logger.log({totalSize}, "total size", {files});
return callback(null, totalSize);
});
}
};

View file

@ -1,113 +1,145 @@
PersistorManager = require("./PersistorManager")
settings = require("settings-sharelatex")
logger = require("logger-sharelatex")
FileHandler = require("./FileHandler")
metrics = require("metrics-sharelatex")
parseRange = require('range-parser')
Errors = require('./Errors')
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let FileController;
const PersistorManager = require("./PersistorManager");
const settings = require("settings-sharelatex");
const logger = require("logger-sharelatex");
const FileHandler = require("./FileHandler");
const metrics = require("metrics-sharelatex");
const parseRange = require('range-parser');
const Errors = require('./Errors');
oneDayInSeconds = 60 * 60 * 24
maxSizeInBytes = 1024 * 1024 * 1024 # 1GB
const oneDayInSeconds = 60 * 60 * 24;
const maxSizeInBytes = 1024 * 1024 * 1024; // 1GB
module.exports = FileController =
module.exports = (FileController = {
getFile: (req, res)->
{key, bucket} = req
{format, style} = req.query
options = {
key: key,
bucket: bucket,
format: format,
style: style,
getFile(req, res){
const {key, bucket} = req;
const {format, style} = req.query;
const options = {
key,
bucket,
format,
style,
};
metrics.inc("getFile");
logger.log({key, bucket, format, style}, "receiving request to get file");
if (req.headers.range != null) {
const range = FileController._get_range(req.headers.range);
options.start = range.start;
options.end = range.end;
logger.log({start: range.start, end: range.end}, "getting range of bytes from file");
}
metrics.inc "getFile"
logger.log key:key, bucket:bucket, format:format, style: style, "receiving request to get file"
if req.headers.range?
range = FileController._get_range(req.headers.range)
options.start = range.start
options.end = range.end
logger.log start: range.start, end: range.end, "getting range of bytes from file"
FileHandler.getFile bucket, key, options, (err, fileStream)->
if err?
if err instanceof Errors.NotFoundError
return res.send 404
else
logger.err err:err, key:key, bucket:bucket, format:format, style:style, "problem getting file"
return res.send 500
else if req.query.cacheWarm
logger.log key:key, bucket:bucket, format:format, style:style, "request is only for cache warm so not sending stream"
res.send 200
else
logger.log key:key, bucket:bucket, format:format, style:style, "sending file to response"
fileStream.pipe res
return FileHandler.getFile(bucket, key, options, function(err, fileStream){
if (err != null) {
if (err instanceof Errors.NotFoundError) {
return res.send(404);
} else {
logger.err({err, key, bucket, format, style}, "problem getting file");
return res.send(500);
}
} else if (req.query.cacheWarm) {
logger.log({key, bucket, format, style}, "request is only for cache warm so not sending stream");
return res.send(200);
} else {
logger.log({key, bucket, format, style}, "sending file to response");
return fileStream.pipe(res);
}
});
},
getFileHead: (req, res) ->
{key, bucket} = req
metrics.inc("getFileSize")
logger.log({ key: key, bucket: bucket }, "receiving request to get file metadata")
FileHandler.getFileSize bucket, key, (err, fileSize) ->
if err?
if err instanceof Errors.NotFoundError
res.status(404).end()
else
res.status(500).end()
return
res.set("Content-Length", fileSize)
res.status(200).end()
getFileHead(req, res) {
const {key, bucket} = req;
metrics.inc("getFileSize");
logger.log({ key, bucket }, "receiving request to get file metadata");
return FileHandler.getFileSize(bucket, key, function(err, fileSize) {
if (err != null) {
if (err instanceof Errors.NotFoundError) {
res.status(404).end();
} else {
res.status(500).end();
}
return;
}
res.set("Content-Length", fileSize);
return res.status(200).end();
});
},
insertFile: (req, res)->
metrics.inc "insertFile"
{key, bucket} = req
logger.log key:key, bucket:bucket, "receiving request to insert file"
FileHandler.insertFile bucket, key, req, (err)->
if err?
logger.log err: err, key: key, bucket: bucket, "error inserting file"
res.send 500
else
res.send 200
insertFile(req, res){
metrics.inc("insertFile");
const {key, bucket} = req;
logger.log({key, bucket}, "receiving request to insert file");
return FileHandler.insertFile(bucket, key, req, function(err){
if (err != null) {
logger.log({err, key, bucket}, "error inserting file");
return res.send(500);
} else {
return res.send(200);
}
});
},
copyFile: (req, res)->
metrics.inc "copyFile"
{key, bucket} = req
oldProject_id = req.body.source.project_id
oldFile_id = req.body.source.file_id
logger.log key:key, bucket:bucket, oldProject_id:oldProject_id, oldFile_id:oldFile_id, "receiving request to copy file"
PersistorManager.copyFile bucket, "#{oldProject_id}/#{oldFile_id}", key, (err)->
if err?
if err instanceof Errors.NotFoundError
res.send 404
else
logger.log err:err, oldProject_id:oldProject_id, oldFile_id:oldFile_id, "something went wrong copying file"
res.send 500
else
res.send 200
copyFile(req, res){
metrics.inc("copyFile");
const {key, bucket} = req;
const oldProject_id = req.body.source.project_id;
const oldFile_id = req.body.source.file_id;
logger.log({key, bucket, oldProject_id, oldFile_id}, "receiving request to copy file");
return PersistorManager.copyFile(bucket, `${oldProject_id}/${oldFile_id}`, key, function(err){
if (err != null) {
if (err instanceof Errors.NotFoundError) {
return res.send(404);
} else {
logger.log({err, oldProject_id, oldFile_id}, "something went wrong copying file");
return res.send(500);
}
} else {
return res.send(200);
}
});
},
deleteFile: (req, res)->
metrics.inc "deleteFile"
{key, bucket} = req
logger.log key:key, bucket:bucket, "receiving request to delete file"
FileHandler.deleteFile bucket, key, (err)->
if err?
logger.log err:err, key:key, bucket:bucket, "something went wrong deleting file"
res.send 500
else
res.send 204
deleteFile(req, res){
metrics.inc("deleteFile");
const {key, bucket} = req;
logger.log({key, bucket}, "receiving request to delete file");
return FileHandler.deleteFile(bucket, key, function(err){
if (err != null) {
logger.log({err, key, bucket}, "something went wrong deleting file");
return res.send(500);
} else {
return res.send(204);
}
});
},
_get_range: (header) ->
parsed = parseRange(maxSizeInBytes, header)
if parsed == -1 or parsed == -2 or parsed.type != 'bytes'
null
else
range = parsed[0]
{start: range.start, end: range.end}
_get_range(header) {
const parsed = parseRange(maxSizeInBytes, header);
if ((parsed === -1) || (parsed === -2) || (parsed.type !== 'bytes')) {
return null;
} else {
const range = parsed[0];
return {start: range.start, end: range.end};
}
},
directorySize: (req, res)->
metrics.inc "projectSize"
{project_id, bucket} = req
logger.log project_id:project_id, bucket:bucket, "receiving request to project size"
FileHandler.getDirectorySize bucket, project_id, (err, size)->
if err?
logger.log err: err, project_id: project_id, bucket: bucket, "error inserting file"
res.send 500
else
res.json {'total bytes' : size}
directorySize(req, res){
metrics.inc("projectSize");
const {project_id, bucket} = req;
logger.log({project_id, bucket}, "receiving request to project size");
return FileHandler.getDirectorySize(bucket, project_id, function(err, size){
if (err != null) {
logger.log({err, project_id, bucket}, "error inserting file");
return res.send(500);
} else {
return res.json({'total bytes' : size});
}
});
}
});

View file

@ -1,62 +1,80 @@
_ = require("underscore")
metrics = require("metrics-sharelatex")
logger = require("logger-sharelatex")
safe_exec = require("./SafeExec")
approvedFormats = ["png"]
Settings = require "settings-sharelatex"
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const _ = require("underscore");
const metrics = require("metrics-sharelatex");
const logger = require("logger-sharelatex");
const safe_exec = require("./SafeExec");
const approvedFormats = ["png"];
const Settings = require("settings-sharelatex");
fourtySeconds = 40 * 1000
const fourtySeconds = 40 * 1000;
childProcessOpts =
killSignal: "SIGTERM"
const childProcessOpts = {
killSignal: "SIGTERM",
timeout: fourtySeconds
};
module.exports =
module.exports = {
convert: (sourcePath, requestedFormat, callback)->
logger.log sourcePath:sourcePath, requestedFormat:requestedFormat, "converting file format"
timer = new metrics.Timer("imageConvert")
destPath = "#{sourcePath}.#{requestedFormat}"
sourcePath = "#{sourcePath}[0]"
if !_.include approvedFormats, requestedFormat
err = new Error("invalid format requested")
return callback err
width = "600x"
command = ["convert", "-define", "pdf:fit-page=#{width}", "-flatten", "-density", "300", sourcePath, destPath]
command = Settings.commands.convertCommandPrefix.concat(command)
safe_exec command, childProcessOpts, (err, stdout, stderr)->
timer.done()
if err?
logger.err err:err, stderr:stderr, sourcePath:sourcePath, requestedFormat:requestedFormat, destPath:destPath, "something went wrong converting file"
else
logger.log sourcePath:sourcePath, requestedFormat:requestedFormat, destPath:destPath, "finished converting file"
callback(err, destPath)
convert(sourcePath, requestedFormat, callback){
logger.log({sourcePath, requestedFormat}, "converting file format");
const timer = new metrics.Timer("imageConvert");
const destPath = `${sourcePath}.${requestedFormat}`;
sourcePath = `${sourcePath}[0]`;
if (!_.include(approvedFormats, requestedFormat)) {
const err = new Error("invalid format requested");
return callback(err);
}
const width = "600x";
let command = ["convert", "-define", `pdf:fit-page=${width}`, "-flatten", "-density", "300", sourcePath, destPath];
command = Settings.commands.convertCommandPrefix.concat(command);
return safe_exec(command, childProcessOpts, function(err, stdout, stderr){
timer.done();
if (err != null) {
logger.err({err, stderr, sourcePath, requestedFormat, destPath}, "something went wrong converting file");
} else {
logger.log({sourcePath, requestedFormat, destPath}, "finished converting file");
}
return callback(err, destPath);
});
},
thumbnail: (sourcePath, callback)->
destPath = "#{sourcePath}.png"
sourcePath = "#{sourcePath}[0]"
width = "260x"
command = ["convert", "-flatten", "-background", "white", "-density", "300", "-define", "pdf:fit-page=#{width}", sourcePath, "-resize", width, destPath]
logger.log sourcePath:sourcePath, destPath:destPath, command:command, "thumbnail convert file"
command = Settings.commands.convertCommandPrefix.concat(command)
safe_exec command, childProcessOpts, (err, stdout, stderr)->
if err?
logger.err err:err, stderr:stderr, sourcePath:sourcePath, "something went wrong converting file to thumbnail"
else
logger.log sourcePath:sourcePath, destPath:destPath, "finished thumbnailing file"
callback(err, destPath)
thumbnail(sourcePath, callback){
const destPath = `${sourcePath}.png`;
sourcePath = `${sourcePath}[0]`;
const width = "260x";
let command = ["convert", "-flatten", "-background", "white", "-density", "300", "-define", `pdf:fit-page=${width}`, sourcePath, "-resize", width, destPath];
logger.log({sourcePath, destPath, command}, "thumbnail convert file");
command = Settings.commands.convertCommandPrefix.concat(command);
return safe_exec(command, childProcessOpts, function(err, stdout, stderr){
if (err != null) {
logger.err({err, stderr, sourcePath}, "something went wrong converting file to thumbnail");
} else {
logger.log({sourcePath, destPath}, "finished thumbnailing file");
}
return callback(err, destPath);
});
},
preview: (sourcePath, callback)->
logger.log sourcePath:sourcePath, "preview convert file"
destPath = "#{sourcePath}.png"
sourcePath = "#{sourcePath}[0]"
width = "548x"
command = ["convert", "-flatten", "-background", "white", "-density", "300", "-define", "pdf:fit-page=#{width}", sourcePath, "-resize", width, destPath]
command = Settings.commands.convertCommandPrefix.concat(command)
safe_exec command, childProcessOpts, (err, stdout, stderr)->
if err?
logger.err err:err, stderr:stderr, sourcePath:sourcePath, destPath:destPath, "something went wrong converting file to preview"
else
logger.log sourcePath:sourcePath, destPath:destPath, "finished converting file to preview"
callback(err, destPath)
preview(sourcePath, callback){
logger.log({sourcePath}, "preview convert file");
const destPath = `${sourcePath}.png`;
sourcePath = `${sourcePath}[0]`;
const width = "548x";
let command = ["convert", "-flatten", "-background", "white", "-density", "300", "-define", `pdf:fit-page=${width}`, sourcePath, "-resize", width, destPath];
command = Settings.commands.convertCommandPrefix.concat(command);
return safe_exec(command, childProcessOpts, function(err, stdout, stderr){
if (err != null) {
logger.err({err, stderr, sourcePath, destPath}, "something went wrong converting file to preview");
} else {
logger.log({sourcePath, destPath}, "finished converting file to preview");
}
return callback(err, destPath);
});
}
};

View file

@ -1,129 +1,169 @@
settings = require("settings-sharelatex")
PersistorManager = require("./PersistorManager")
LocalFileWriter = require("./LocalFileWriter")
logger = require("logger-sharelatex")
FileConverter = require("./FileConverter")
KeyBuilder = require("./KeyBuilder")
async = require("async")
ImageOptimiser = require("./ImageOptimiser")
Errors = require('./Errors')
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let FileHandler;
const settings = require("settings-sharelatex");
const PersistorManager = require("./PersistorManager");
const LocalFileWriter = require("./LocalFileWriter");
const logger = require("logger-sharelatex");
const FileConverter = require("./FileConverter");
const KeyBuilder = require("./KeyBuilder");
const async = require("async");
const ImageOptimiser = require("./ImageOptimiser");
const Errors = require('./Errors');
module.exports = FileHandler =
module.exports = (FileHandler = {
insertFile: (bucket, key, stream, callback)->
convertedKey = KeyBuilder.getConvertedFolderKey key
PersistorManager.deleteDirectory bucket, convertedKey, (error) ->
return callback(error) if error?
PersistorManager.sendStream bucket, key, stream, callback
insertFile(bucket, key, stream, callback){
const convertedKey = KeyBuilder.getConvertedFolderKey(key);
return PersistorManager.deleteDirectory(bucket, convertedKey, function(error) {
if (error != null) { return callback(error); }
return PersistorManager.sendStream(bucket, key, stream, callback);
});
},
deleteFile: (bucket, key, callback)->
convertedKey = KeyBuilder.getConvertedFolderKey key
async.parallel [
(done)-> PersistorManager.deleteFile bucket, key, done
(done)-> PersistorManager.deleteDirectory bucket, convertedKey, done
], callback
deleteFile(bucket, key, callback){
const convertedKey = KeyBuilder.getConvertedFolderKey(key);
return async.parallel([
done => PersistorManager.deleteFile(bucket, key, done),
done => PersistorManager.deleteDirectory(bucket, convertedKey, done)
], callback);
},
getFile: (bucket, key, opts = {}, callback)->
# In this call, opts can contain credentials
logger.log bucket:bucket, key:key, opts:@_scrubSecrets(opts), "getting file"
if !opts.format? and !opts.style?
@_getStandardFile bucket, key, opts, callback
else
@_getConvertedFile bucket, key, opts, callback
getFile(bucket, key, opts, callback){
// In this call, opts can contain credentials
if (opts == null) { opts = {}; }
logger.log({bucket, key, opts:this._scrubSecrets(opts)}, "getting file");
if ((opts.format == null) && (opts.style == null)) {
return this._getStandardFile(bucket, key, opts, callback);
} else {
return this._getConvertedFile(bucket, key, opts, callback);
}
},
getFileSize: (bucket, key, callback) ->
PersistorManager.getFileSize(bucket, key, callback)
getFileSize(bucket, key, callback) {
return PersistorManager.getFileSize(bucket, key, callback);
},
_getStandardFile: (bucket, key, opts, callback)->
PersistorManager.getFileStream bucket, key, opts, (err, fileStream)->
if err? and !(err instanceof Errors.NotFoundError)
logger.err bucket:bucket, key:key, opts:FileHandler._scrubSecrets(opts), "error getting fileStream"
callback err, fileStream
_getStandardFile(bucket, key, opts, callback){
return PersistorManager.getFileStream(bucket, key, opts, function(err, fileStream){
if ((err != null) && !(err instanceof Errors.NotFoundError)) {
logger.err({bucket, key, opts:FileHandler._scrubSecrets(opts)}, "error getting fileStream");
}
return callback(err, fileStream);
});
},
_getConvertedFile: (bucket, key, opts, callback)->
convertedKey = KeyBuilder.addCachingToKey key, opts
PersistorManager.checkIfFileExists bucket, convertedKey, (err, exists)=>
if err?
return callback err
if exists
PersistorManager.getFileStream bucket, convertedKey, opts, callback
else
@_getConvertedFileAndCache bucket, key, convertedKey, opts, callback
_getConvertedFile(bucket, key, opts, callback){
const convertedKey = KeyBuilder.addCachingToKey(key, opts);
return PersistorManager.checkIfFileExists(bucket, convertedKey, (err, exists)=> {
if (err != null) {
return callback(err);
}
if (exists) {
return PersistorManager.getFileStream(bucket, convertedKey, opts, callback);
} else {
return this._getConvertedFileAndCache(bucket, key, convertedKey, opts, callback);
}
});
},
_getConvertedFileAndCache: (bucket, key, convertedKey, opts, callback)->
convertedFsPath = ""
originalFsPath = ""
async.series [
(cb) =>
@_convertFile bucket, key, opts, (err, fileSystemPath, originalFsPath) ->
convertedFsPath = fileSystemPath
originalFsPath = originalFsPath
cb err
(cb)->
ImageOptimiser.compressPng convertedFsPath, cb
(cb)->
PersistorManager.sendFile bucket, convertedKey, convertedFsPath, cb
], (err)->
if err?
LocalFileWriter.deleteFile convertedFsPath, ->
LocalFileWriter.deleteFile originalFsPath, ->
return callback(err)
# Send back the converted file from the local copy to avoid problems
# with the file not being present in S3 yet. As described in the
# documentation below, we have already made a 'HEAD' request in
# checkIfFileExists so we only have "eventual consistency" if we try
# to stream it from S3 here. This was a cause of many 403 errors.
#
# "Amazon S3 provides read-after-write consistency for PUTS of new
# objects in your S3 bucket in all regions with one caveat. The
# caveat is that if you make a HEAD or GET request to the key name
# (to find if the object exists) before creating the object, Amazon
# S3 provides eventual consistency for read-after-write.""
# https://docs.aws.amazon.com/AmazonS3/latest/dev/Introduction.html#ConsistencyModel
LocalFileWriter.getStream convertedFsPath, (err, readStream) ->
return callback(err) if err?
readStream.on 'end', () ->
logger.log {convertedFsPath: convertedFsPath}, "deleting temporary file"
LocalFileWriter.deleteFile convertedFsPath, ->
callback(null, readStream)
_getConvertedFileAndCache(bucket, key, convertedKey, opts, callback){
let convertedFsPath = "";
const originalFsPath = "";
return async.series([
cb => {
return this._convertFile(bucket, key, opts, function(err, fileSystemPath, originalFsPath) {
convertedFsPath = fileSystemPath;
originalFsPath = originalFsPath;
return cb(err);
});
},
cb => ImageOptimiser.compressPng(convertedFsPath, cb),
cb => PersistorManager.sendFile(bucket, convertedKey, convertedFsPath, cb)
], function(err){
if (err != null) {
LocalFileWriter.deleteFile(convertedFsPath, function() {});
LocalFileWriter.deleteFile(originalFsPath, function() {});
return callback(err);
}
// Send back the converted file from the local copy to avoid problems
// with the file not being present in S3 yet. As described in the
// documentation below, we have already made a 'HEAD' request in
// checkIfFileExists so we only have "eventual consistency" if we try
// to stream it from S3 here. This was a cause of many 403 errors.
//
// "Amazon S3 provides read-after-write consistency for PUTS of new
// objects in your S3 bucket in all regions with one caveat. The
// caveat is that if you make a HEAD or GET request to the key name
// (to find if the object exists) before creating the object, Amazon
// S3 provides eventual consistency for read-after-write.""
// https://docs.aws.amazon.com/AmazonS3/latest/dev/Introduction.html#ConsistencyModel
return LocalFileWriter.getStream(convertedFsPath, function(err, readStream) {
if (err != null) { return callback(err); }
readStream.on('end', function() {
logger.log({convertedFsPath}, "deleting temporary file");
return LocalFileWriter.deleteFile(convertedFsPath, function() {});
});
return callback(null, readStream);
});
});
},
_convertFile: (bucket, originalKey, opts, callback)->
@_writeS3FileToDisk bucket, originalKey, opts, (err, originalFsPath)->
if err?
return callback(err)
done = (err, destPath)->
if err?
logger.err err:err, bucket:bucket, originalKey:originalKey, opts:FileHandler._scrubSecrets(opts), "error converting file"
return callback(err)
LocalFileWriter.deleteFile originalFsPath, ->
callback(err, destPath, originalFsPath)
_convertFile(bucket, originalKey, opts, callback){
return this._writeS3FileToDisk(bucket, originalKey, opts, function(err, originalFsPath){
if (err != null) {
return callback(err);
}
const done = function(err, destPath){
if (err != null) {
logger.err({err, bucket, originalKey, opts:FileHandler._scrubSecrets(opts)}, "error converting file");
return callback(err);
}
LocalFileWriter.deleteFile(originalFsPath, function() {});
return callback(err, destPath, originalFsPath);
};
logger.log opts:opts, "converting file depending on opts"
logger.log({opts}, "converting file depending on opts");
if opts.format?
FileConverter.convert originalFsPath, opts.format, done
else if opts.style == "thumbnail"
FileConverter.thumbnail originalFsPath, done
else if opts.style == "preview"
FileConverter.preview originalFsPath, done
else
return callback(new Error("should have specified opts to convert file with #{JSON.stringify(opts)}"))
if (opts.format != null) {
return FileConverter.convert(originalFsPath, opts.format, done);
} else if (opts.style === "thumbnail") {
return FileConverter.thumbnail(originalFsPath, done);
} else if (opts.style === "preview") {
return FileConverter.preview(originalFsPath, done);
} else {
return callback(new Error(`should have specified opts to convert file with ${JSON.stringify(opts)}`));
}
});
},
_writeS3FileToDisk: (bucket, key, opts, callback)->
PersistorManager.getFileStream bucket, key, opts, (err, fileStream)->
if err?
return callback(err)
LocalFileWriter.writeStream fileStream, key, callback
_writeS3FileToDisk(bucket, key, opts, callback){
return PersistorManager.getFileStream(bucket, key, opts, function(err, fileStream){
if (err != null) {
return callback(err);
}
return LocalFileWriter.writeStream(fileStream, key, callback);
});
},
getDirectorySize: (bucket, project_id, callback)->
logger.log bucket:bucket, project_id:project_id, "getting project size"
PersistorManager.directorySize bucket, project_id, (err, size)->
if err?
logger.err bucket:bucket, project_id:project_id, "error getting size"
callback err, size
getDirectorySize(bucket, project_id, callback){
logger.log({bucket, project_id}, "getting project size");
return PersistorManager.directorySize(bucket, project_id, function(err, size){
if (err != null) {
logger.err({bucket, project_id}, "error getting size");
}
return callback(err, size);
});
},
_scrubSecrets: (opts)->
safe = Object.assign {}, opts
delete safe.credentials
safe
_scrubSecrets(opts){
const safe = Object.assign({}, opts);
delete safe.credentials;
return safe;
}
});

View file

@ -1,57 +1,74 @@
fs = require("fs-extra")
path = require("path")
async = require("async")
fileConverter = require("./FileConverter")
keyBuilder = require("./KeyBuilder")
fileController = require("./FileController")
logger = require('logger-sharelatex')
settings = require("settings-sharelatex")
streamBuffers = require("stream-buffers")
_ = require('underscore')
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const fs = require("fs-extra");
const path = require("path");
const async = require("async");
const fileConverter = require("./FileConverter");
const keyBuilder = require("./KeyBuilder");
const fileController = require("./FileController");
const logger = require('logger-sharelatex');
const settings = require("settings-sharelatex");
const streamBuffers = require("stream-buffers");
const _ = require('underscore');
checkCanStoreFiles = (callback)->
callback = _.once(callback)
req = {params:{}, query:{}, headers:{}}
req.params.project_id = settings.health_check.project_id
req.params.file_id = settings.health_check.file_id
myWritableStreamBuffer = new streamBuffers.WritableStreamBuffer(initialSize: 100)
res = {
send: (code) ->
if code != 200
callback(new Error("non-200 code from getFile: #{code}"))
const checkCanStoreFiles = function(callback){
callback = _.once(callback);
const req = {params:{}, query:{}, headers:{}};
req.params.project_id = settings.health_check.project_id;
req.params.file_id = settings.health_check.file_id;
const myWritableStreamBuffer = new streamBuffers.WritableStreamBuffer({initialSize: 100});
const res = {
send(code) {
if (code !== 200) {
return callback(new Error(`non-200 code from getFile: ${code}`));
}
myWritableStreamBuffer.send = res.send
keyBuilder.userFileKey req, res, ->
fileController.getFile req, myWritableStreamBuffer
myWritableStreamBuffer.on "close", ->
if myWritableStreamBuffer.size() > 0
callback()
else
err = "no data in write stream buffer for health check"
logger.err {err,}, "error performing health check"
callback(err)
}
};
myWritableStreamBuffer.send = res.send;
return keyBuilder.userFileKey(req, res, function() {
fileController.getFile(req, myWritableStreamBuffer);
return myWritableStreamBuffer.on("close", function() {
if (myWritableStreamBuffer.size() > 0) {
return callback();
} else {
const err = "no data in write stream buffer for health check";
logger.err({err,}, "error performing health check");
return callback(err);
}
});
});
};
checkFileConvert = (callback)->
if !settings.enableConversions
return callback()
imgPath = path.join(settings.path.uploadFolder, "/tiny.pdf")
async.waterfall [
(cb)->
fs.copy("./tiny.pdf", imgPath, cb)
(cb)-> fileConverter.thumbnail imgPath, cb
(resultPath, cb)-> fs.unlink resultPath, cb
(cb)-> fs.unlink imgPath, cb
], callback
const checkFileConvert = function(callback){
if (!settings.enableConversions) {
return callback();
}
const imgPath = path.join(settings.path.uploadFolder, "/tiny.pdf");
return async.waterfall([
cb => fs.copy("./tiny.pdf", imgPath, cb),
cb => fileConverter.thumbnail(imgPath, cb),
(resultPath, cb) => fs.unlink(resultPath, cb),
cb => fs.unlink(imgPath, cb)
], callback);
};
module.exports =
module.exports = {
check: (req, res) ->
logger.log {}, "performing health check"
async.parallel [checkFileConvert, checkCanStoreFiles], (err)->
if err?
logger.err err:err, "Health check: error running"
res.send 500
else
res.send 200
check(req, res) {
logger.log({}, "performing health check");
return async.parallel([checkFileConvert, checkCanStoreFiles], function(err){
if (err != null) {
logger.err({err}, "Health check: error running");
return res.send(500);
} else {
return res.send(200);
}
});
}
};

View file

@ -1,25 +1,39 @@
exec = require('child_process').exec
logger = require("logger-sharelatex")
Settings = require "settings-sharelatex"
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const {
exec
} = require('child_process');
const logger = require("logger-sharelatex");
const Settings = require("settings-sharelatex");
module.exports =
module.exports = {
compressPng: (localPath, callback)->
startTime = new Date()
logger.log localPath:localPath, "optimising png path"
args = "optipng #{localPath}"
opts =
timeout: 30 * 1000
compressPng(localPath, callback){
const startTime = new Date();
logger.log({localPath}, "optimising png path");
const args = `optipng ${localPath}`;
const opts = {
timeout: 30 * 1000,
killSignal: "SIGKILL"
if !Settings.enableConversions
error = new Error("Image conversions are disabled")
return callback(error)
exec args, opts,(err, stdout, stderr)->
if err? and err.signal == 'SIGKILL'
logger.warn {err: err, stderr: stderr, localPath: localPath}, "optimiser timeout reached"
err = null
else if err?
logger.err err:err, stderr:stderr, localPath:localPath, "something went wrong converting compressPng"
else
logger.log localPath:localPath, "finished compressPng file"
callback(err)
};
if (!Settings.enableConversions) {
const error = new Error("Image conversions are disabled");
return callback(error);
}
return exec(args, opts,function(err, stdout, stderr){
if ((err != null) && (err.signal === 'SIGKILL')) {
logger.warn({err, stderr, localPath}, "optimiser timeout reached");
err = null;
} else if (err != null) {
logger.err({err, stderr, localPath}, "something went wrong converting compressPng");
} else {
logger.log({localPath}, "finished compressPng file");
}
return callback(err);
});
}
};

View file

@ -1,50 +1,68 @@
settings = require("settings-sharelatex")
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const settings = require("settings-sharelatex");
module.exports =
module.exports = {
getConvertedFolderKey: (key)->
key = "#{key}-converted-cache/"
getConvertedFolderKey(key){
return key = `${key}-converted-cache/`;
},
addCachingToKey: (key, opts)->
key = @getConvertedFolderKey(key)
if opts.format? and !opts.style?
key = "#{key}format-#{opts.format}"
if opts.style? and !opts.format?
key = "#{key}style-#{opts.style}"
if opts.style? and opts.format?
key = "#{key}format-#{opts.format}-style-#{opts.style}"
return key
addCachingToKey(key, opts){
key = this.getConvertedFolderKey(key);
if ((opts.format != null) && (opts.style == null)) {
key = `${key}format-${opts.format}`;
}
if ((opts.style != null) && (opts.format == null)) {
key = `${key}style-${opts.style}`;
}
if ((opts.style != null) && (opts.format != null)) {
key = `${key}format-${opts.format}-style-${opts.style}`;
}
return key;
},
userFileKey: (req, res, next)->
{project_id, file_id} = req.params
req.key = "#{project_id}/#{file_id}"
req.bucket = settings.filestore.stores.user_files
next()
userFileKey(req, res, next){
const {project_id, file_id} = req.params;
req.key = `${project_id}/${file_id}`;
req.bucket = settings.filestore.stores.user_files;
return next();
},
publicFileKey: (req, res, next)->
{project_id, public_file_id} = req.params
if not settings.filestore.stores.public_files?
res.status(501).send("public files not available")
else
req.key = "#{project_id}/#{public_file_id}"
req.bucket = settings.filestore.stores.public_files
next()
publicFileKey(req, res, next){
const {project_id, public_file_id} = req.params;
if ((settings.filestore.stores.public_files == null)) {
return res.status(501).send("public files not available");
} else {
req.key = `${project_id}/${public_file_id}`;
req.bucket = settings.filestore.stores.public_files;
return next();
}
},
templateFileKey: (req, res, next)->
{template_id, format, version, sub_type} = req.params
req.key = "#{template_id}/v/#{version}/#{format}"
if sub_type?
req.key = "#{req.key}/#{sub_type}"
req.bucket = settings.filestore.stores.template_files
req.version = version
opts = req.query
next()
templateFileKey(req, res, next){
const {template_id, format, version, sub_type} = req.params;
req.key = `${template_id}/v/${version}/${format}`;
if (sub_type != null) {
req.key = `${req.key}/${sub_type}`;
}
req.bucket = settings.filestore.stores.template_files;
req.version = version;
const opts = req.query;
return next();
},
publicProjectKey: (req, res, next)->
{project_id} = req.params
req.project_id = project_id
req.bucket = settings.filestore.stores.user_files
next()
publicProjectKey(req, res, next){
const {project_id} = req.params;
req.project_id = project_id;
req.bucket = settings.filestore.stores.user_files;
return next();
}
};

View file

@ -1,56 +1,76 @@
fs = require("fs")
uuid = require('node-uuid')
path = require("path")
_ = require("underscore")
logger = require("logger-sharelatex")
metrics = require("metrics-sharelatex")
Settings = require("settings-sharelatex")
Errors = require "./Errors"
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const fs = require("fs");
const uuid = require('node-uuid');
const path = require("path");
const _ = require("underscore");
const logger = require("logger-sharelatex");
const metrics = require("metrics-sharelatex");
const Settings = require("settings-sharelatex");
const Errors = require("./Errors");
module.exports =
module.exports = {
writeStream: (stream, key, callback)->
timer = new metrics.Timer("writingFile")
callback = _.once callback
fsPath = @_getPath(key)
logger.log fsPath:fsPath, "writing file locally"
writeStream = fs.createWriteStream(fsPath)
writeStream.on "finish", ->
timer.done()
logger.log fsPath:fsPath, "finished writing file locally"
callback(null, fsPath)
writeStream.on "error", (err)->
logger.err err:err, fsPath:fsPath, "problem writing file locally, with write stream"
callback err
stream.on "error", (err)->
logger.log err:err, fsPath:fsPath, "problem writing file locally, with read stream"
callback err
stream.pipe writeStream
writeStream(stream, key, callback){
const timer = new metrics.Timer("writingFile");
callback = _.once(callback);
const fsPath = this._getPath(key);
logger.log({fsPath}, "writing file locally");
const writeStream = fs.createWriteStream(fsPath);
writeStream.on("finish", function() {
timer.done();
logger.log({fsPath}, "finished writing file locally");
return callback(null, fsPath);
});
writeStream.on("error", function(err){
logger.err({err, fsPath}, "problem writing file locally, with write stream");
return callback(err);
});
stream.on("error", function(err){
logger.log({err, fsPath}, "problem writing file locally, with read stream");
return callback(err);
});
return stream.pipe(writeStream);
},
getStream: (fsPath, _callback = (err, res)->) ->
callback = _.once _callback
timer = new metrics.Timer("readingFile")
logger.log fsPath:fsPath, "reading file locally"
readStream = fs.createReadStream(fsPath)
readStream.on "end", ->
timer.done()
logger.log fsPath:fsPath, "finished reading file locally"
readStream.on "error", (err)->
logger.err err:err, fsPath:fsPath, "problem reading file locally, with read stream"
if err.code == 'ENOENT'
callback new Errors.NotFoundError(err.message), null
else
callback err
callback null, readStream
getStream(fsPath, _callback) {
if (_callback == null) { _callback = function(err, res){}; }
const callback = _.once(_callback);
const timer = new metrics.Timer("readingFile");
logger.log({fsPath}, "reading file locally");
const readStream = fs.createReadStream(fsPath);
readStream.on("end", function() {
timer.done();
return logger.log({fsPath}, "finished reading file locally");
});
readStream.on("error", function(err){
logger.err({err, fsPath}, "problem reading file locally, with read stream");
if (err.code === 'ENOENT') {
return callback(new Errors.NotFoundError(err.message), null);
} else {
return callback(err);
}
});
return callback(null, readStream);
},
deleteFile: (fsPath, callback)->
if !fsPath? or fsPath == ""
return callback()
logger.log fsPath:fsPath, "removing local temp file"
fs.unlink fsPath, callback
deleteFile(fsPath, callback){
if ((fsPath == null) || (fsPath === "")) {
return callback();
}
logger.log({fsPath}, "removing local temp file");
return fs.unlink(fsPath, callback);
},
_getPath : (key)->
if !key?
key = uuid.v1()
key = key.replace(/\//g,"-")
path.join(Settings.path.uploadFolder, key)
_getPath(key){
if ((key == null)) {
key = uuid.v1();
}
key = key.replace(/\//g,"-");
return path.join(Settings.path.uploadFolder, key);
}
};

View file

@ -1,16 +1,28 @@
settings = require("settings-sharelatex")
logger = require("logger-sharelatex")
/*
* decaffeinate suggestions:
* DS103: Rewrite code to no longer use __guard__
* DS205: Consider reworking code to avoid use of IIFEs
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const settings = require("settings-sharelatex");
const logger = require("logger-sharelatex");
# assume s3 if none specified
settings?.filestore?.backend ||= "s3"
// assume s3 if none specified
__guard__(settings != null ? settings.filestore : undefined, x => x.backend || (settings.filestore.backend = "s3"));
logger.log backend:settings?.filestore?.backend, "Loading backend"
module.exports = switch settings?.filestore?.backend
when "aws-sdk"
require "./AWSSDKPersistorManager"
when "s3"
require("./S3PersistorManager")
when "fs"
require("./FSPersistorManager")
else
throw new Error( "Unknown filestore backend: #{settings.filestore.backend}" )
logger.log({backend:__guard__(settings != null ? settings.filestore : undefined, x1 => x1.backend)}, "Loading backend");
module.exports = (() => { switch (__guard__(settings != null ? settings.filestore : undefined, x2 => x2.backend)) {
case "aws-sdk":
return require("./AWSSDKPersistorManager");
case "s3":
return require("./S3PersistorManager");
case "fs":
return require("./FSPersistorManager");
default:
throw new Error( `Unknown filestore backend: ${settings.filestore.backend}` );
} })();
function __guard__(value, transform) {
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined;
}

View file

@ -1,238 +1,290 @@
# This module is the one which is used in production. It needs to be migrated
# to use aws-sdk throughout, see the comments in AWSSDKPersistorManager for
# details. The knox library is unmaintained and has bugs.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
// This module is the one which is used in production. It needs to be migrated
// to use aws-sdk throughout, see the comments in AWSSDKPersistorManager for
// details. The knox library is unmaintained and has bugs.
http = require('http')
http.globalAgent.maxSockets = 300
https = require('https')
https.globalAgent.maxSockets = 300
settings = require("settings-sharelatex")
request = require("request")
logger = require("logger-sharelatex")
metrics = require("metrics-sharelatex")
fs = require("fs")
knox = require("knox")
path = require("path")
LocalFileWriter = require("./LocalFileWriter")
Errors = require("./Errors")
_ = require("underscore")
awsS3 = require "aws-sdk/clients/s3"
URL = require('url')
const http = require('http');
http.globalAgent.maxSockets = 300;
const https = require('https');
https.globalAgent.maxSockets = 300;
const settings = require("settings-sharelatex");
const request = require("request");
const logger = require("logger-sharelatex");
const metrics = require("metrics-sharelatex");
const fs = require("fs");
const knox = require("knox");
const path = require("path");
const LocalFileWriter = require("./LocalFileWriter");
const Errors = require("./Errors");
const _ = require("underscore");
const awsS3 = require("aws-sdk/clients/s3");
const URL = require('url');
thirtySeconds = 30 * 1000
const thirtySeconds = 30 * 1000;
buildDefaultOptions = (bucketName, method, key)->
if settings.filestore.s3.endpoint
endpoint = "#{settings.filestore.s3.endpoint}/#{bucketName}"
else
endpoint = "https://#{bucketName}.s3.amazonaws.com"
const buildDefaultOptions = function(bucketName, method, key){
let endpoint;
if (settings.filestore.s3.endpoint) {
endpoint = `${settings.filestore.s3.endpoint}/${bucketName}`;
} else {
endpoint = `https://${bucketName}.s3.amazonaws.com`;
}
return {
aws:
key: settings.filestore.s3.key
secret: settings.filestore.s3.secret
aws: {
key: settings.filestore.s3.key,
secret: settings.filestore.s3.secret,
bucket: bucketName
method: method
timeout: thirtySeconds
uri:"#{endpoint}/#{key}"
},
method,
timeout: thirtySeconds,
uri:`${endpoint}/${key}`
};
};
const getS3Options = function(credentials) {
const options = {
credentials: {
accessKeyId: credentials.auth_key,
secretAccessKey: credentials.auth_secret
}
};
if (settings.filestore.s3.endpoint) {
const endpoint = URL.parse(settings.filestore.s3.endpoint);
options.endpoint = settings.filestore.s3.endpoint;
options.sslEnabled = endpoint.protocol === 'https';
}
getS3Options = (credentials) ->
options =
credentials:
accessKeyId: credentials.auth_key
secretAccessKey: credentials.auth_secret
return options;
};
if settings.filestore.s3.endpoint
endpoint = URL.parse(settings.filestore.s3.endpoint)
options.endpoint = settings.filestore.s3.endpoint
options.sslEnabled = endpoint.protocol == 'https'
return options
defaultS3Client = new awsS3(getS3Options({
const defaultS3Client = new awsS3(getS3Options({
auth_key: settings.filestore.s3.key,
auth_secret: settings.filestore.s3.secret
}))
}));
getS3Client = (credentials) ->
if credentials?
return new awsS3(getS3Options(credentials))
else
return defaultS3Client
getKnoxClient = (bucketName) =>
options =
key: settings.filestore.s3.key
secret: settings.filestore.s3.secret
bucket: bucketName
if settings.filestore.s3.endpoint
endpoint = URL.parse(settings.filestore.s3.endpoint)
options.endpoint = endpoint.hostname
options.port = endpoint.port
return knox.createClient(options)
module.exports =
sendFile: (bucketName, key, fsPath, callback)->
s3Client = getKnoxClient(bucketName)
uploaded = 0
putEventEmiter = s3Client.putFile fsPath, key, (err, res)->
metrics.count 's3.egress', uploaded
if err?
logger.err err:err, bucketName:bucketName, key:key, fsPath:fsPath,"something went wrong uploading file to s3"
return callback(err)
if !res?
logger.err err:err, res:res, bucketName:bucketName, key:key, fsPath:fsPath, "no response from s3 put file"
return callback("no response from put file")
if res.statusCode != 200
logger.err bucketName:bucketName, key:key, fsPath:fsPath, "non 200 response from s3 putting file"
return callback("non 200 response from s3 on put file")
logger.log res:res, bucketName:bucketName, key:key, fsPath:fsPath,"file uploaded to s3"
callback(err)
putEventEmiter.on "error", (err)->
logger.err err:err, bucketName:bucketName, key:key, fsPath:fsPath, "error emmited on put of file"
callback err
putEventEmiter.on "progress", (progress)->
uploaded = progress.written
sendStream: (bucketName, key, readStream, callback)->
logger.log bucketName:bucketName, key:key, "sending file to s3"
readStream.on "error", (err)->
logger.err bucketName:bucketName, key:key, "error on stream to send to s3"
LocalFileWriter.writeStream readStream, null, (err, fsPath)=>
if err?
logger.err bucketName:bucketName, key:key, fsPath:fsPath, err:err, "something went wrong writing stream to disk"
return callback(err)
@sendFile bucketName, key, fsPath, (err) ->
# delete the temporary file created above and return the original error
LocalFileWriter.deleteFile fsPath, () ->
callback(err)
# opts may be {start: Number, end: Number}
getFileStream: (bucketName, key, opts, callback = (err, res)->)->
opts = opts || {}
callback = _.once(callback)
logger.log bucketName:bucketName, key:key, "getting file from s3"
s3 = getS3Client(opts.credentials)
s3Params = {
Bucket: bucketName
Key: key
const getS3Client = function(credentials) {
if (credentials != null) {
return new awsS3(getS3Options(credentials));
} else {
return defaultS3Client;
}
if opts.start? and opts.end?
s3Params['Range'] = "bytes=#{opts.start}-#{opts.end}"
s3Request = s3.getObject(s3Params)
};
s3Request.on 'httpHeaders', (statusCode, headers, response, statusMessage) =>
if statusCode in [403, 404]
# S3 returns a 403 instead of a 404 when the user doesn't have
# permission to list the bucket contents.
logger.log({ bucketName: bucketName, key: key }, "file not found in s3")
return callback(new Errors.NotFoundError("File not found in S3: #{bucketName}:#{key}"), null)
if statusCode not in [200, 206]
logger.log({bucketName: bucketName, key: key }, "error getting file from s3: #{statusCode}")
return callback(new Error("Got non-200 response from S3: #{statusCode} #{statusMessage}"), null)
stream = response.httpResponse.createUnbufferedStream()
stream.on 'data', (data) ->
metrics.count 's3.ingress', data.byteLength
const getKnoxClient = bucketName => {
const options = {
key: settings.filestore.s3.key,
secret: settings.filestore.s3.secret,
bucket: bucketName
};
if (settings.filestore.s3.endpoint) {
const endpoint = URL.parse(settings.filestore.s3.endpoint);
options.endpoint = endpoint.hostname;
options.port = endpoint.port;
}
return knox.createClient(options);
};
callback(null, stream)
module.exports = {
s3Request.on 'error', (err) =>
logger.err({ err: err, bucketName: bucketName, key: key }, "error getting file stream from s3")
callback(err)
sendFile(bucketName, key, fsPath, callback){
const s3Client = getKnoxClient(bucketName);
let uploaded = 0;
const putEventEmiter = s3Client.putFile(fsPath, key, function(err, res){
metrics.count('s3.egress', uploaded);
if (err != null) {
logger.err({err, bucketName, key, fsPath},"something went wrong uploading file to s3");
return callback(err);
}
if ((res == null)) {
logger.err({err, res, bucketName, key, fsPath}, "no response from s3 put file");
return callback("no response from put file");
}
if (res.statusCode !== 200) {
logger.err({bucketName, key, fsPath}, "non 200 response from s3 putting file");
return callback("non 200 response from s3 on put file");
}
logger.log({res, bucketName, key, fsPath},"file uploaded to s3");
return callback(err);
});
putEventEmiter.on("error", function(err){
logger.err({err, bucketName, key, fsPath}, "error emmited on put of file");
return callback(err);
});
return putEventEmiter.on("progress", progress => uploaded = progress.written);
},
s3Request.send()
sendStream(bucketName, key, readStream, callback){
logger.log({bucketName, key}, "sending file to s3");
readStream.on("error", err => logger.err({bucketName, key}, "error on stream to send to s3"));
return LocalFileWriter.writeStream(readStream, null, (err, fsPath)=> {
if (err != null) {
logger.err({bucketName, key, fsPath, err}, "something went wrong writing stream to disk");
return callback(err);
}
return this.sendFile(bucketName, key, fsPath, err => // delete the temporary file created above and return the original error
LocalFileWriter.deleteFile(fsPath, () => callback(err)));
});
},
getFileSize: (bucketName, key, callback) ->
logger.log({ bucketName: bucketName, key: key }, "getting file size from S3")
s3 = getS3Client()
s3.headObject { Bucket: bucketName, Key: key }, (err, data) ->
if err?
if err.statusCode in [403, 404]
# S3 returns a 403 instead of a 404 when the user doesn't have
# permission to list the bucket contents.
// opts may be {start: Number, end: Number}
getFileStream(bucketName, key, opts, callback){
if (callback == null) { callback = function(err, res){}; }
opts = opts || {};
callback = _.once(callback);
logger.log({bucketName, key}, "getting file from s3");
const s3 = getS3Client(opts.credentials);
const s3Params = {
Bucket: bucketName,
Key: key
};
if ((opts.start != null) && (opts.end != null)) {
s3Params['Range'] = `bytes=${opts.start}-${opts.end}`;
}
const s3Request = s3.getObject(s3Params);
s3Request.on('httpHeaders', (statusCode, headers, response, statusMessage) => {
if ([403, 404].includes(statusCode)) {
// S3 returns a 403 instead of a 404 when the user doesn't have
// permission to list the bucket contents.
logger.log({ bucketName, key }, "file not found in s3");
return callback(new Errors.NotFoundError(`File not found in S3: ${bucketName}:${key}`), null);
}
if (![200, 206].includes(statusCode)) {
logger.log({bucketName, key }, `error getting file from s3: ${statusCode}`);
return callback(new Error(`Got non-200 response from S3: ${statusCode} ${statusMessage}`), null);
}
const stream = response.httpResponse.createUnbufferedStream();
stream.on('data', data => metrics.count('s3.ingress', data.byteLength));
return callback(null, stream);
});
s3Request.on('error', err => {
logger.err({ err, bucketName, key }, "error getting file stream from s3");
return callback(err);
});
return s3Request.send();
},
getFileSize(bucketName, key, callback) {
logger.log({ bucketName, key }, "getting file size from S3");
const s3 = getS3Client();
return s3.headObject({ Bucket: bucketName, Key: key }, function(err, data) {
if (err != null) {
if ([403, 404].includes(err.statusCode)) {
// S3 returns a 403 instead of a 404 when the user doesn't have
// permission to list the bucket contents.
logger.log({
bucketName: bucketName,
key: key
}, "file not found in s3")
bucketName,
key
}, "file not found in s3");
callback(
new Errors.NotFoundError("File not found in S3: #{bucketName}:#{key}")
)
else
new Errors.NotFoundError(`File not found in S3: ${bucketName}:${key}`)
);
} else {
logger.err({
bucketName: bucketName,
key: key,
err: err
}, "error performing S3 HeadObject")
callback(err)
return
callback(null, data.ContentLength)
bucketName,
key,
err
}, "error performing S3 HeadObject");
callback(err);
}
return;
}
return callback(null, data.ContentLength);
});
},
copyFile: (bucketName, sourceKey, destKey, callback)->
logger.log bucketName:bucketName, sourceKey:sourceKey, destKey: destKey, "copying file in s3"
source = bucketName + '/' + sourceKey
# use the AWS SDK instead of knox due to problems with error handling (https://github.com/Automattic/knox/issues/114)
s3 = getS3Client()
s3.copyObject {Bucket: bucketName, Key: destKey, CopySource: source}, (err) ->
if err?
if err.code is 'NoSuchKey'
logger.err bucketName:bucketName, sourceKey:sourceKey, "original file not found in s3 when copying"
callback(new Errors.NotFoundError("original file not found in S3 when copying"))
else
logger.err err:err, bucketName:bucketName, sourceKey:sourceKey, destKey:destKey, "something went wrong copying file in aws"
callback(err)
else
callback()
copyFile(bucketName, sourceKey, destKey, callback){
logger.log({bucketName, sourceKey, destKey}, "copying file in s3");
const source = bucketName + '/' + sourceKey;
// use the AWS SDK instead of knox due to problems with error handling (https://github.com/Automattic/knox/issues/114)
const s3 = getS3Client();
return s3.copyObject({Bucket: bucketName, Key: destKey, CopySource: source}, function(err) {
if (err != null) {
if (err.code === 'NoSuchKey') {
logger.err({bucketName, sourceKey}, "original file not found in s3 when copying");
return callback(new Errors.NotFoundError("original file not found in S3 when copying"));
} else {
logger.err({err, bucketName, sourceKey, destKey}, "something went wrong copying file in aws");
return callback(err);
}
} else {
return callback();
}
});
},
deleteFile: (bucketName, key, callback)->
logger.log bucketName:bucketName, key:key, "delete file in s3"
options = buildDefaultOptions(bucketName, "delete", key)
request options, (err, res)->
if err?
logger.err err:err, res:res, bucketName:bucketName, key:key, "something went wrong deleting file in aws"
callback(err)
deleteFile(bucketName, key, callback){
logger.log({bucketName, key}, "delete file in s3");
const options = buildDefaultOptions(bucketName, "delete", key);
return request(options, function(err, res){
if (err != null) {
logger.err({err, res, bucketName, key}, "something went wrong deleting file in aws");
}
return callback(err);
});
},
deleteDirectory: (bucketName, key, _callback)->
# deleteMultiple can call the callback multiple times so protect against this.
callback = (args...) ->
_callback(args...)
_callback = () ->
deleteDirectory(bucketName, key, _callback){
// deleteMultiple can call the callback multiple times so protect against this.
const callback = function(...args) {
_callback(...Array.from(args || []));
return _callback = function() {};
};
logger.log key: key, bucketName: bucketName, "deleting directory"
s3Client = getKnoxClient(bucketName)
s3Client.list prefix:key, (err, data)->
if err?
logger.err err:err, bucketName:bucketName, key:key, "something went wrong listing prefix in aws"
return callback(err)
keys = _.map data.Contents, (entry)->
return entry.Key
s3Client.deleteMultiple keys, callback
logger.log({key, bucketName}, "deleting directory");
const s3Client = getKnoxClient(bucketName);
return s3Client.list({prefix:key}, function(err, data){
if (err != null) {
logger.err({err, bucketName, key}, "something went wrong listing prefix in aws");
return callback(err);
}
const keys = _.map(data.Contents, entry => entry.Key);
return s3Client.deleteMultiple(keys, callback);
});
},
checkIfFileExists:(bucketName, key, callback)->
logger.log bucketName:bucketName, key:key, "checking if file exists in s3"
options = buildDefaultOptions(bucketName, "head", key)
request options, (err, res)->
if err?
logger.err err:err, res:res, bucketName:bucketName, key:key, "something went wrong checking file in aws"
return callback(err)
if !res?
logger.err err:err, res:res, bucketName:bucketName, key:key, "no response object returned when checking if file exists"
err = new Error("no response from s3 #{bucketName} #{key}")
return callback(err)
exists = res.statusCode == 200
logger.log bucketName:bucketName, key:key, exists:exists, "checked if file exsists in s3"
callback(err, exists)
checkIfFileExists(bucketName, key, callback){
logger.log({bucketName, key}, "checking if file exists in s3");
const options = buildDefaultOptions(bucketName, "head", key);
return request(options, function(err, res){
if (err != null) {
logger.err({err, res, bucketName, key}, "something went wrong checking file in aws");
return callback(err);
}
if ((res == null)) {
logger.err({err, res, bucketName, key}, "no response object returned when checking if file exists");
err = new Error(`no response from s3 ${bucketName} ${key}`);
return callback(err);
}
const exists = res.statusCode === 200;
logger.log({bucketName, key, exists}, "checked if file exsists in s3");
return callback(err, exists);
});
},
directorySize:(bucketName, key, callback)->
logger.log bucketName:bucketName, key:key, "get project size in s3"
s3Client = getKnoxClient(bucketName)
s3Client.list prefix:key, (err, data)->
if err?
logger.err err:err, bucketName:bucketName, key:key, "something went wrong listing prefix in aws"
return callback(err)
totalSize = 0
_.each data.Contents, (entry)->
totalSize += entry.Size
logger.log totalSize:totalSize, "total size"
callback null, totalSize
directorySize(bucketName, key, callback){
logger.log({bucketName, key}, "get project size in s3");
const s3Client = getKnoxClient(bucketName);
return s3Client.list({prefix:key}, function(err, data){
if (err != null) {
logger.err({err, bucketName, key}, "something went wrong listing prefix in aws");
return callback(err);
}
let totalSize = 0;
_.each(data.Contents, entry => totalSize += entry.Size);
logger.log({totalSize}, "total size");
return callback(null, totalSize);
});
}
};

View file

@ -1,48 +1,60 @@
_ = require("underscore")
logger = require("logger-sharelatex")
child_process = require('child_process')
Settings = require "settings-sharelatex"
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const _ = require("underscore");
const logger = require("logger-sharelatex");
const child_process = require('child_process');
const Settings = require("settings-sharelatex");
# execute a command in the same way as 'exec' but with a timeout that
# kills all child processes
#
# we spawn the command with 'detached:true' to make a new process
# group, then we can kill everything in that process group.
// execute a command in the same way as 'exec' but with a timeout that
// kills all child processes
//
// we spawn the command with 'detached:true' to make a new process
// group, then we can kill everything in that process group.
module.exports = (command, options, callback = (err, stdout, stderr) ->) ->
if !Settings.enableConversions
error = new Error("Image conversions are disabled")
return callback(error)
module.exports = function(command, options, callback) {
if (callback == null) { callback = function(err, stdout, stderr) {}; }
if (!Settings.enableConversions) {
const error = new Error("Image conversions are disabled");
return callback(error);
}
# options are {timeout: number-of-milliseconds, killSignal: signal-name}
[cmd, args...] = command
// options are {timeout: number-of-milliseconds, killSignal: signal-name}
const [cmd, ...args] = Array.from(command);
child = child_process.spawn cmd, args, {detached:true}
stdout = ""
stderr = ""
const child = child_process.spawn(cmd, args, {detached:true});
let stdout = "";
let stderr = "";
cleanup = _.once (err) ->
clearTimeout killTimer if killTimer?
callback err, stdout, stderr
const cleanup = _.once(function(err) {
if (killTimer != null) { clearTimeout(killTimer); }
return callback(err, stdout, stderr);
});
if options.timeout?
killTimer = setTimeout () ->
try
# use negative process id to kill process group
process.kill -child.pid, options.killSignal || "SIGTERM"
catch error
logger.log process: child.pid, kill_error: error, "error killing process"
, options.timeout
if (options.timeout != null) {
var killTimer = setTimeout(function() {
try {
// use negative process id to kill process group
return process.kill(-child.pid, options.killSignal || "SIGTERM");
} catch (error) {
return logger.log({process: child.pid, kill_error: error}, "error killing process");
}
}
, options.timeout);
}
child.on 'close', (code, signal) ->
err = if code then new Error("exit status #{code}") else signal
cleanup err
child.on('close', function(code, signal) {
const err = code ? new Error(`exit status ${code}`) : signal;
return cleanup(err);
});
child.on 'error', (err) ->
cleanup err
child.on('error', err => cleanup(err));
child.stdout.on 'data', (chunk) ->
stdout += chunk
child.stdout.on('data', chunk => stdout += chunk);
child.stderr.on 'data', (chunk) ->
stderr += chunk
return child.stderr.on('data', chunk => stderr += chunk);
};