mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
prettier: convert app/js decaffeinated files to Prettier format
This commit is contained in:
parent
3af6bdd588
commit
8729acd48c
26 changed files with 3881 additions and 2639 deletions
|
@ -5,16 +5,16 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let commandRunnerPath;
|
||||
const Settings = require("settings-sharelatex");
|
||||
const logger = require("logger-sharelatex");
|
||||
let commandRunnerPath
|
||||
const Settings = require('settings-sharelatex')
|
||||
const logger = require('logger-sharelatex')
|
||||
|
||||
if ((Settings.clsi != null ? Settings.clsi.dockerRunner : undefined) === true) {
|
||||
commandRunnerPath = "./DockerRunner";
|
||||
} else {
|
||||
commandRunnerPath = "./LocalCommandRunner";
|
||||
commandRunnerPath = './DockerRunner'
|
||||
} else {
|
||||
commandRunnerPath = './LocalCommandRunner'
|
||||
}
|
||||
logger.info({commandRunnerPath}, "selecting command runner for clsi");
|
||||
const CommandRunner = require(commandRunnerPath);
|
||||
logger.info({ commandRunnerPath }, 'selecting command runner for clsi')
|
||||
const CommandRunner = require(commandRunnerPath)
|
||||
|
||||
module.exports = CommandRunner;
|
||||
module.exports = CommandRunner
|
||||
|
|
|
@ -12,159 +12,227 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let CompileController;
|
||||
const RequestParser = require("./RequestParser");
|
||||
const CompileManager = require("./CompileManager");
|
||||
const Settings = require("settings-sharelatex");
|
||||
const Metrics = require("./Metrics");
|
||||
const ProjectPersistenceManager = require("./ProjectPersistenceManager");
|
||||
const logger = require("logger-sharelatex");
|
||||
const Errors = require("./Errors");
|
||||
let CompileController
|
||||
const RequestParser = require('./RequestParser')
|
||||
const CompileManager = require('./CompileManager')
|
||||
const Settings = require('settings-sharelatex')
|
||||
const Metrics = require('./Metrics')
|
||||
const ProjectPersistenceManager = require('./ProjectPersistenceManager')
|
||||
const logger = require('logger-sharelatex')
|
||||
const Errors = require('./Errors')
|
||||
|
||||
module.exports = (CompileController = {
|
||||
compile(req, res, next) {
|
||||
if (next == null) { next = function(error) {}; }
|
||||
const timer = new Metrics.Timer("compile-request");
|
||||
return RequestParser.parse(req.body, function(error, request) {
|
||||
if (error != null) { return next(error); }
|
||||
request.project_id = req.params.project_id;
|
||||
if (req.params.user_id != null) { request.user_id = req.params.user_id; }
|
||||
return ProjectPersistenceManager.markProjectAsJustAccessed(request.project_id, function(error) {
|
||||
if (error != null) { return next(error); }
|
||||
return CompileManager.doCompileWithLock(request, function(error, outputFiles) {
|
||||
let code, status;
|
||||
if (outputFiles == null) { outputFiles = []; }
|
||||
if (error instanceof Errors.AlreadyCompilingError) {
|
||||
code = 423; // Http 423 Locked
|
||||
status = "compile-in-progress";
|
||||
} else if (error instanceof Errors.FilesOutOfSyncError) {
|
||||
code = 409; // Http 409 Conflict
|
||||
status = "retry";
|
||||
} else if (error != null ? error.terminated : undefined) {
|
||||
status = "terminated";
|
||||
} else if (error != null ? error.validate : undefined) {
|
||||
status = `validation-${error.validate}`;
|
||||
} else if (error != null ? error.timedout : undefined) {
|
||||
status = "timedout";
|
||||
logger.log({err: error, project_id: request.project_id}, "timeout running compile");
|
||||
} else if (error != null) {
|
||||
status = "error";
|
||||
code = 500;
|
||||
logger.warn({err: error, project_id: request.project_id}, "error running compile");
|
||||
} else {
|
||||
let file;
|
||||
status = "failure";
|
||||
for (file of Array.from(outputFiles)) {
|
||||
if (file.path != null ? file.path.match(/output\.pdf$/) : undefined) {
|
||||
status = "success";
|
||||
}
|
||||
}
|
||||
module.exports = CompileController = {
|
||||
compile(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function(error) {}
|
||||
}
|
||||
const timer = new Metrics.Timer('compile-request')
|
||||
return RequestParser.parse(req.body, function(error, request) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
request.project_id = req.params.project_id
|
||||
if (req.params.user_id != null) {
|
||||
request.user_id = req.params.user_id
|
||||
}
|
||||
return ProjectPersistenceManager.markProjectAsJustAccessed(
|
||||
request.project_id,
|
||||
function(error) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
return CompileManager.doCompileWithLock(request, function(
|
||||
error,
|
||||
outputFiles
|
||||
) {
|
||||
let code, status
|
||||
if (outputFiles == null) {
|
||||
outputFiles = []
|
||||
}
|
||||
if (error instanceof Errors.AlreadyCompilingError) {
|
||||
code = 423 // Http 423 Locked
|
||||
status = 'compile-in-progress'
|
||||
} else if (error instanceof Errors.FilesOutOfSyncError) {
|
||||
code = 409 // Http 409 Conflict
|
||||
status = 'retry'
|
||||
} else if (error != null ? error.terminated : undefined) {
|
||||
status = 'terminated'
|
||||
} else if (error != null ? error.validate : undefined) {
|
||||
status = `validation-${error.validate}`
|
||||
} else if (error != null ? error.timedout : undefined) {
|
||||
status = 'timedout'
|
||||
logger.log(
|
||||
{ err: error, project_id: request.project_id },
|
||||
'timeout running compile'
|
||||
)
|
||||
} else if (error != null) {
|
||||
status = 'error'
|
||||
code = 500
|
||||
logger.warn(
|
||||
{ err: error, project_id: request.project_id },
|
||||
'error running compile'
|
||||
)
|
||||
} else {
|
||||
let file
|
||||
status = 'failure'
|
||||
for (file of Array.from(outputFiles)) {
|
||||
if (
|
||||
file.path != null
|
||||
? file.path.match(/output\.pdf$/)
|
||||
: undefined
|
||||
) {
|
||||
status = 'success'
|
||||
}
|
||||
}
|
||||
|
||||
if (status === "failure") {
|
||||
logger.warn({project_id: request.project_id, outputFiles}, "project failed to compile successfully, no output.pdf generated");
|
||||
}
|
||||
if (status === 'failure') {
|
||||
logger.warn(
|
||||
{ project_id: request.project_id, outputFiles },
|
||||
'project failed to compile successfully, no output.pdf generated'
|
||||
)
|
||||
}
|
||||
|
||||
// log an error if any core files are found
|
||||
for (file of Array.from(outputFiles)) {
|
||||
if (file.path === "core") {
|
||||
logger.error({project_id:request.project_id, req, outputFiles}, "core file found in output");
|
||||
}
|
||||
}
|
||||
}
|
||||
// log an error if any core files are found
|
||||
for (file of Array.from(outputFiles)) {
|
||||
if (file.path === 'core') {
|
||||
logger.error(
|
||||
{ project_id: request.project_id, req, outputFiles },
|
||||
'core file found in output'
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (error != null) {
|
||||
outputFiles = error.outputFiles || [];
|
||||
}
|
||||
if (error != null) {
|
||||
outputFiles = error.outputFiles || []
|
||||
}
|
||||
|
||||
timer.done();
|
||||
return res.status(code || 200).send({
|
||||
compile: {
|
||||
status,
|
||||
error: (error != null ? error.message : undefined) || error,
|
||||
outputFiles: outputFiles.map(file =>
|
||||
({
|
||||
url:
|
||||
`${Settings.apis.clsi.url}/project/${request.project_id}` +
|
||||
((request.user_id != null) ? `/user/${request.user_id}` : "") +
|
||||
((file.build != null) ? `/build/${file.build}` : "") +
|
||||
`/output/${file.path}`,
|
||||
path: file.path,
|
||||
type: file.type,
|
||||
build: file.build
|
||||
})
|
||||
)
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
},
|
||||
timer.done()
|
||||
return res.status(code || 200).send({
|
||||
compile: {
|
||||
status,
|
||||
error: (error != null ? error.message : undefined) || error,
|
||||
outputFiles: outputFiles.map(file => ({
|
||||
url:
|
||||
`${Settings.apis.clsi.url}/project/${request.project_id}` +
|
||||
(request.user_id != null
|
||||
? `/user/${request.user_id}`
|
||||
: '') +
|
||||
(file.build != null ? `/build/${file.build}` : '') +
|
||||
`/output/${file.path}`,
|
||||
path: file.path,
|
||||
type: file.type,
|
||||
build: file.build
|
||||
}))
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
stopCompile(req, res, next) {
|
||||
const {project_id, user_id} = req.params;
|
||||
return CompileManager.stopCompile(project_id, user_id, function(error) {
|
||||
if (error != null) { return next(error); }
|
||||
return res.sendStatus(204);
|
||||
});
|
||||
},
|
||||
stopCompile(req, res, next) {
|
||||
const { project_id, user_id } = req.params
|
||||
return CompileManager.stopCompile(project_id, user_id, function(error) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
return res.sendStatus(204)
|
||||
})
|
||||
},
|
||||
|
||||
clearCache(req, res, next) {
|
||||
if (next == null) { next = function(error) {}; }
|
||||
return ProjectPersistenceManager.clearProject(req.params.project_id, req.params.user_id, function(error) {
|
||||
if (error != null) { return next(error); }
|
||||
return res.sendStatus(204);
|
||||
});
|
||||
}, // No content
|
||||
clearCache(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function(error) {}
|
||||
}
|
||||
return ProjectPersistenceManager.clearProject(
|
||||
req.params.project_id,
|
||||
req.params.user_id,
|
||||
function(error) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
return res.sendStatus(204)
|
||||
}
|
||||
)
|
||||
}, // No content
|
||||
|
||||
syncFromCode(req, res, next) {
|
||||
if (next == null) { next = function(error) {}; }
|
||||
const { file } = req.query;
|
||||
const line = parseInt(req.query.line, 10);
|
||||
const column = parseInt(req.query.column, 10);
|
||||
const { project_id } = req.params;
|
||||
const { user_id } = req.params;
|
||||
return CompileManager.syncFromCode(project_id, user_id, file, line, column, function(error, pdfPositions) {
|
||||
if (error != null) { return next(error); }
|
||||
return res.json({
|
||||
pdf: pdfPositions
|
||||
});
|
||||
});
|
||||
},
|
||||
syncFromCode(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function(error) {}
|
||||
}
|
||||
const { file } = req.query
|
||||
const line = parseInt(req.query.line, 10)
|
||||
const column = parseInt(req.query.column, 10)
|
||||
const { project_id } = req.params
|
||||
const { user_id } = req.params
|
||||
return CompileManager.syncFromCode(
|
||||
project_id,
|
||||
user_id,
|
||||
file,
|
||||
line,
|
||||
column,
|
||||
function(error, pdfPositions) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
return res.json({
|
||||
pdf: pdfPositions
|
||||
})
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
syncFromPdf(req, res, next) {
|
||||
if (next == null) { next = function(error) {}; }
|
||||
const page = parseInt(req.query.page, 10);
|
||||
const h = parseFloat(req.query.h);
|
||||
const v = parseFloat(req.query.v);
|
||||
const { project_id } = req.params;
|
||||
const { user_id } = req.params;
|
||||
return CompileManager.syncFromPdf(project_id, user_id, page, h, v, function(error, codePositions) {
|
||||
if (error != null) { return next(error); }
|
||||
return res.json({
|
||||
code: codePositions
|
||||
});
|
||||
});
|
||||
},
|
||||
syncFromPdf(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function(error) {}
|
||||
}
|
||||
const page = parseInt(req.query.page, 10)
|
||||
const h = parseFloat(req.query.h)
|
||||
const v = parseFloat(req.query.v)
|
||||
const { project_id } = req.params
|
||||
const { user_id } = req.params
|
||||
return CompileManager.syncFromPdf(project_id, user_id, page, h, v, function(
|
||||
error,
|
||||
codePositions
|
||||
) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
return res.json({
|
||||
code: codePositions
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
wordcount(req, res, next) {
|
||||
if (next == null) { next = function(error) {}; }
|
||||
const file = req.query.file || "main.tex";
|
||||
const { project_id } = req.params;
|
||||
const { user_id } = req.params;
|
||||
const { image } = req.query;
|
||||
logger.log({image, file, project_id}, "word count request");
|
||||
wordcount(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function(error) {}
|
||||
}
|
||||
const file = req.query.file || 'main.tex'
|
||||
const { project_id } = req.params
|
||||
const { user_id } = req.params
|
||||
const { image } = req.query
|
||||
logger.log({ image, file, project_id }, 'word count request')
|
||||
|
||||
return CompileManager.wordcount(project_id, user_id, file, image, function(error, result) {
|
||||
if (error != null) { return next(error); }
|
||||
return res.json({
|
||||
texcount: result
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
status(req, res, next ){
|
||||
if (next == null) { next = function(error){}; }
|
||||
return res.send("OK");
|
||||
}
|
||||
});
|
||||
return CompileManager.wordcount(project_id, user_id, file, image, function(
|
||||
error,
|
||||
result
|
||||
) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
return res.json({
|
||||
texcount: result
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
status(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function(error) {}
|
||||
}
|
||||
return res.send('OK')
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -3,31 +3,36 @@
|
|||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
let ContentTypeMapper;
|
||||
const Path = require('path');
|
||||
let ContentTypeMapper
|
||||
const Path = require('path')
|
||||
|
||||
// here we coerce html, css and js to text/plain,
|
||||
// otherwise choose correct mime type based on file extension,
|
||||
// falling back to octet-stream
|
||||
module.exports = (ContentTypeMapper = {
|
||||
map(path) {
|
||||
switch (Path.extname(path)) {
|
||||
case '.txt': case '.html': case '.js': case '.css': case '.svg':
|
||||
return 'text/plain';
|
||||
case '.csv':
|
||||
return 'text/csv';
|
||||
case '.pdf':
|
||||
return 'application/pdf';
|
||||
case '.png':
|
||||
return 'image/png';
|
||||
case '.jpg': case '.jpeg':
|
||||
return 'image/jpeg';
|
||||
case '.tiff':
|
||||
return 'image/tiff';
|
||||
case '.gif':
|
||||
return 'image/gif';
|
||||
default:
|
||||
return 'application/octet-stream';
|
||||
}
|
||||
}
|
||||
});
|
||||
module.exports = ContentTypeMapper = {
|
||||
map(path) {
|
||||
switch (Path.extname(path)) {
|
||||
case '.txt':
|
||||
case '.html':
|
||||
case '.js':
|
||||
case '.css':
|
||||
case '.svg':
|
||||
return 'text/plain'
|
||||
case '.csv':
|
||||
return 'text/csv'
|
||||
case '.pdf':
|
||||
return 'application/pdf'
|
||||
case '.png':
|
||||
return 'image/png'
|
||||
case '.jpg':
|
||||
case '.jpeg':
|
||||
return 'image/jpeg'
|
||||
case '.tiff':
|
||||
return 'image/tiff'
|
||||
case '.gif':
|
||||
return 'image/gif'
|
||||
default:
|
||||
return 'application/octet-stream'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,14 +5,14 @@
|
|||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const async = require("async");
|
||||
const Settings = require("settings-sharelatex");
|
||||
const logger = require("logger-sharelatex");
|
||||
const queue = async.queue((task, cb)=> task(cb)
|
||||
, Settings.parallelSqlQueryLimit);
|
||||
const async = require('async')
|
||||
const Settings = require('settings-sharelatex')
|
||||
const logger = require('logger-sharelatex')
|
||||
const queue = async.queue(
|
||||
(task, cb) => task(cb),
|
||||
Settings.parallelSqlQueryLimit
|
||||
)
|
||||
|
||||
queue.drain = ()=> logger.debug('all items have been processed');
|
||||
|
||||
module.exports =
|
||||
{queue};
|
||||
queue.drain = () => logger.debug('all items have been processed')
|
||||
|
||||
module.exports = { queue }
|
||||
|
|
|
@ -10,80 +10,104 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let LockManager;
|
||||
const logger = require("logger-sharelatex");
|
||||
let LockManager
|
||||
const logger = require('logger-sharelatex')
|
||||
|
||||
const LockState = {}; // locks for docker container operations, by container name
|
||||
const LockState = {} // locks for docker container operations, by container name
|
||||
|
||||
module.exports = (LockManager = {
|
||||
module.exports = LockManager = {
|
||||
MAX_LOCK_HOLD_TIME: 15000, // how long we can keep a lock
|
||||
MAX_LOCK_WAIT_TIME: 10000, // how long we wait for a lock
|
||||
LOCK_TEST_INTERVAL: 1000, // retry time
|
||||
|
||||
MAX_LOCK_HOLD_TIME: 15000, // how long we can keep a lock
|
||||
MAX_LOCK_WAIT_TIME: 10000, // how long we wait for a lock
|
||||
LOCK_TEST_INTERVAL: 1000, // retry time
|
||||
tryLock(key, callback) {
|
||||
let lockValue
|
||||
if (callback == null) {
|
||||
callback = function(err, gotLock) {}
|
||||
}
|
||||
const existingLock = LockState[key]
|
||||
if (existingLock != null) {
|
||||
// the lock is already taken, check how old it is
|
||||
const lockAge = Date.now() - existingLock.created
|
||||
if (lockAge < LockManager.MAX_LOCK_HOLD_TIME) {
|
||||
return callback(null, false) // we didn't get the lock, bail out
|
||||
} else {
|
||||
logger.error(
|
||||
{ key, lock: existingLock, age: lockAge },
|
||||
'taking old lock by force'
|
||||
)
|
||||
}
|
||||
}
|
||||
// take the lock
|
||||
LockState[key] = lockValue = { created: Date.now() }
|
||||
return callback(null, true, lockValue)
|
||||
},
|
||||
|
||||
tryLock(key, callback) {
|
||||
let lockValue;
|
||||
if (callback == null) { callback = function(err, gotLock) {}; }
|
||||
const existingLock = LockState[key];
|
||||
if (existingLock != null) { // the lock is already taken, check how old it is
|
||||
const lockAge = Date.now() - existingLock.created;
|
||||
if (lockAge < LockManager.MAX_LOCK_HOLD_TIME) {
|
||||
return callback(null, false); // we didn't get the lock, bail out
|
||||
} else {
|
||||
logger.error({key, lock: existingLock, age:lockAge}, "taking old lock by force");
|
||||
}
|
||||
}
|
||||
// take the lock
|
||||
LockState[key] = (lockValue = {created: Date.now()});
|
||||
return callback(null, true, lockValue);
|
||||
},
|
||||
getLock(key, callback) {
|
||||
let attempt
|
||||
if (callback == null) {
|
||||
callback = function(error, lockValue) {}
|
||||
}
|
||||
const startTime = Date.now()
|
||||
return (attempt = () =>
|
||||
LockManager.tryLock(key, function(error, gotLock, lockValue) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
if (gotLock) {
|
||||
return callback(null, lockValue)
|
||||
} else if (Date.now() - startTime > LockManager.MAX_LOCK_WAIT_TIME) {
|
||||
const e = new Error('Lock timeout')
|
||||
e.key = key
|
||||
return callback(e)
|
||||
} else {
|
||||
return setTimeout(attempt, LockManager.LOCK_TEST_INTERVAL)
|
||||
}
|
||||
}))()
|
||||
},
|
||||
|
||||
getLock(key, callback) {
|
||||
let attempt;
|
||||
if (callback == null) { callback = function(error, lockValue) {}; }
|
||||
const startTime = Date.now();
|
||||
return (attempt = () =>
|
||||
LockManager.tryLock(key, function(error, gotLock, lockValue) {
|
||||
if (error != null) { return callback(error); }
|
||||
if (gotLock) {
|
||||
return callback(null, lockValue);
|
||||
} else if ((Date.now() - startTime) > LockManager.MAX_LOCK_WAIT_TIME) {
|
||||
const e = new Error("Lock timeout");
|
||||
e.key = key;
|
||||
return callback(e);
|
||||
} else {
|
||||
return setTimeout(attempt, LockManager.LOCK_TEST_INTERVAL);
|
||||
}
|
||||
})
|
||||
)();
|
||||
},
|
||||
releaseLock(key, lockValue, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
const existingLock = LockState[key]
|
||||
if (existingLock === lockValue) {
|
||||
// lockValue is an object, so we can test by reference
|
||||
delete LockState[key] // our lock, so we can free it
|
||||
return callback()
|
||||
} else if (existingLock != null) {
|
||||
// lock exists but doesn't match ours
|
||||
logger.error(
|
||||
{ key, lock: existingLock },
|
||||
'tried to release lock taken by force'
|
||||
)
|
||||
return callback()
|
||||
} else {
|
||||
logger.error(
|
||||
{ key, lock: existingLock },
|
||||
'tried to release lock that has gone'
|
||||
)
|
||||
return callback()
|
||||
}
|
||||
},
|
||||
|
||||
releaseLock(key, lockValue, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
const existingLock = LockState[key];
|
||||
if (existingLock === lockValue) { // lockValue is an object, so we can test by reference
|
||||
delete LockState[key]; // our lock, so we can free it
|
||||
return callback();
|
||||
} else if (existingLock != null) { // lock exists but doesn't match ours
|
||||
logger.error({key, lock: existingLock}, "tried to release lock taken by force");
|
||||
return callback();
|
||||
} else {
|
||||
logger.error({key, lock: existingLock}, "tried to release lock that has gone");
|
||||
return callback();
|
||||
}
|
||||
},
|
||||
|
||||
runWithLock(key, runner, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
return LockManager.getLock(key, function(error, lockValue) {
|
||||
if (error != null) { return callback(error); }
|
||||
return runner((error1, ...args) =>
|
||||
LockManager.releaseLock(key, lockValue, function(error2) {
|
||||
error = error1 || error2;
|
||||
if (error != null) { return callback(error); }
|
||||
return callback(null, ...Array.from(args));
|
||||
})
|
||||
);
|
||||
});
|
||||
}
|
||||
});
|
||||
runWithLock(key, runner, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return LockManager.getLock(key, function(error, lockValue) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return runner((error1, ...args) =>
|
||||
LockManager.releaseLock(key, lockValue, function(error2) {
|
||||
error = error1 || error2
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback(null, ...Array.from(args))
|
||||
})
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -11,34 +11,47 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let DraftModeManager;
|
||||
const fs = require("fs");
|
||||
const logger = require("logger-sharelatex");
|
||||
let DraftModeManager
|
||||
const fs = require('fs')
|
||||
const logger = require('logger-sharelatex')
|
||||
|
||||
module.exports = (DraftModeManager = {
|
||||
injectDraftMode(filename, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
return fs.readFile(filename, "utf8", function(error, content) {
|
||||
if (error != null) { return callback(error); }
|
||||
// avoid adding draft mode more than once
|
||||
if ((content != null ? content.indexOf("\\documentclass\[draft") : undefined) >= 0) {
|
||||
return callback();
|
||||
}
|
||||
const modified_content = DraftModeManager._injectDraftOption(content);
|
||||
logger.log({
|
||||
content: content.slice(0,1024), // \documentclass is normally v near the top
|
||||
modified_content: modified_content.slice(0,1024),
|
||||
filename
|
||||
}, "injected draft class");
|
||||
return fs.writeFile(filename, modified_content, callback);
|
||||
});
|
||||
},
|
||||
|
||||
_injectDraftOption(content) {
|
||||
return content
|
||||
// With existing options (must be first, otherwise both are applied)
|
||||
.replace(/\\documentclass\[/g, "\\documentclass[draft,")
|
||||
// Without existing options
|
||||
.replace(/\\documentclass\{/g, "\\documentclass[draft]{");
|
||||
}
|
||||
});
|
||||
module.exports = DraftModeManager = {
|
||||
injectDraftMode(filename, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return fs.readFile(filename, 'utf8', function(error, content) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
// avoid adding draft mode more than once
|
||||
if (
|
||||
(content != null
|
||||
? content.indexOf('\\documentclass[draft')
|
||||
: undefined) >= 0
|
||||
) {
|
||||
return callback()
|
||||
}
|
||||
const modified_content = DraftModeManager._injectDraftOption(content)
|
||||
logger.log(
|
||||
{
|
||||
content: content.slice(0, 1024), // \documentclass is normally v near the top
|
||||
modified_content: modified_content.slice(0, 1024),
|
||||
filename
|
||||
},
|
||||
'injected draft class'
|
||||
)
|
||||
return fs.writeFile(filename, modified_content, callback)
|
||||
})
|
||||
},
|
||||
|
||||
_injectDraftOption(content) {
|
||||
return (
|
||||
content
|
||||
// With existing options (must be first, otherwise both are applied)
|
||||
.replace(/\\documentclass\[/g, '\\documentclass[draft,')
|
||||
// Without existing options
|
||||
.replace(/\\documentclass\{/g, '\\documentclass[draft]{')
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,33 +4,33 @@
|
|||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
let Errors;
|
||||
let Errors
|
||||
var NotFoundError = function(message) {
|
||||
const error = new Error(message);
|
||||
error.name = "NotFoundError";
|
||||
error.__proto__ = NotFoundError.prototype;
|
||||
return error;
|
||||
};
|
||||
NotFoundError.prototype.__proto__ = Error.prototype;
|
||||
const error = new Error(message)
|
||||
error.name = 'NotFoundError'
|
||||
error.__proto__ = NotFoundError.prototype
|
||||
return error
|
||||
}
|
||||
NotFoundError.prototype.__proto__ = Error.prototype
|
||||
|
||||
var FilesOutOfSyncError = function(message) {
|
||||
const error = new Error(message);
|
||||
error.name = "FilesOutOfSyncError";
|
||||
error.__proto__ = FilesOutOfSyncError.prototype;
|
||||
return error;
|
||||
};
|
||||
FilesOutOfSyncError.prototype.__proto__ = Error.prototype;
|
||||
const error = new Error(message)
|
||||
error.name = 'FilesOutOfSyncError'
|
||||
error.__proto__ = FilesOutOfSyncError.prototype
|
||||
return error
|
||||
}
|
||||
FilesOutOfSyncError.prototype.__proto__ = Error.prototype
|
||||
|
||||
var AlreadyCompilingError = function(message) {
|
||||
const error = new Error(message);
|
||||
error.name = "AlreadyCompilingError";
|
||||
error.__proto__ = AlreadyCompilingError.prototype;
|
||||
return error;
|
||||
};
|
||||
AlreadyCompilingError.prototype.__proto__ = Error.prototype;
|
||||
const error = new Error(message)
|
||||
error.name = 'AlreadyCompilingError'
|
||||
error.__proto__ = AlreadyCompilingError.prototype
|
||||
return error
|
||||
}
|
||||
AlreadyCompilingError.prototype.__proto__ = Error.prototype
|
||||
|
||||
module.exports = (Errors = {
|
||||
NotFoundError,
|
||||
FilesOutOfSyncError,
|
||||
AlreadyCompilingError
|
||||
});
|
||||
module.exports = Errors = {
|
||||
NotFoundError,
|
||||
FilesOutOfSyncError,
|
||||
AlreadyCompilingError
|
||||
}
|
||||
|
|
|
@ -13,119 +13,192 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let LatexRunner;
|
||||
const Path = require("path");
|
||||
const Settings = require("settings-sharelatex");
|
||||
const logger = require("logger-sharelatex");
|
||||
const Metrics = require("./Metrics");
|
||||
const CommandRunner = require("./CommandRunner");
|
||||
let LatexRunner
|
||||
const Path = require('path')
|
||||
const Settings = require('settings-sharelatex')
|
||||
const logger = require('logger-sharelatex')
|
||||
const Metrics = require('./Metrics')
|
||||
const CommandRunner = require('./CommandRunner')
|
||||
|
||||
const ProcessTable = {}; // table of currently running jobs (pids or docker container names)
|
||||
const ProcessTable = {} // table of currently running jobs (pids or docker container names)
|
||||
|
||||
module.exports = (LatexRunner = {
|
||||
runLatex(project_id, options, callback) {
|
||||
let command;
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
let {directory, mainFile, compiler, timeout, image, environment, flags} = options;
|
||||
if (!compiler) { compiler = "pdflatex"; }
|
||||
if (!timeout) { timeout = 60000; } // milliseconds
|
||||
module.exports = LatexRunner = {
|
||||
runLatex(project_id, options, callback) {
|
||||
let command
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
let {
|
||||
directory,
|
||||
mainFile,
|
||||
compiler,
|
||||
timeout,
|
||||
image,
|
||||
environment,
|
||||
flags
|
||||
} = options
|
||||
if (!compiler) {
|
||||
compiler = 'pdflatex'
|
||||
}
|
||||
if (!timeout) {
|
||||
timeout = 60000
|
||||
} // milliseconds
|
||||
|
||||
logger.log({directory, compiler, timeout, mainFile, environment, flags}, "starting compile");
|
||||
logger.log(
|
||||
{ directory, compiler, timeout, mainFile, environment, flags },
|
||||
'starting compile'
|
||||
)
|
||||
|
||||
// We want to run latexmk on the tex file which we will automatically
|
||||
// generate from the Rtex/Rmd/md file.
|
||||
mainFile = mainFile.replace(/\.(Rtex|md|Rmd)$/, ".tex");
|
||||
// We want to run latexmk on the tex file which we will automatically
|
||||
// generate from the Rtex/Rmd/md file.
|
||||
mainFile = mainFile.replace(/\.(Rtex|md|Rmd)$/, '.tex')
|
||||
|
||||
if (compiler === "pdflatex") {
|
||||
command = LatexRunner._pdflatexCommand(mainFile, flags);
|
||||
} else if (compiler === "latex") {
|
||||
command = LatexRunner._latexCommand(mainFile, flags);
|
||||
} else if (compiler === "xelatex") {
|
||||
command = LatexRunner._xelatexCommand(mainFile, flags);
|
||||
} else if (compiler === "lualatex") {
|
||||
command = LatexRunner._lualatexCommand(mainFile, flags);
|
||||
} else {
|
||||
return callback(new Error(`unknown compiler: ${compiler}`));
|
||||
}
|
||||
if (compiler === 'pdflatex') {
|
||||
command = LatexRunner._pdflatexCommand(mainFile, flags)
|
||||
} else if (compiler === 'latex') {
|
||||
command = LatexRunner._latexCommand(mainFile, flags)
|
||||
} else if (compiler === 'xelatex') {
|
||||
command = LatexRunner._xelatexCommand(mainFile, flags)
|
||||
} else if (compiler === 'lualatex') {
|
||||
command = LatexRunner._lualatexCommand(mainFile, flags)
|
||||
} else {
|
||||
return callback(new Error(`unknown compiler: ${compiler}`))
|
||||
}
|
||||
|
||||
if (Settings.clsi != null ? Settings.clsi.strace : undefined) {
|
||||
command = ["strace", "-o", "strace", "-ff"].concat(command);
|
||||
}
|
||||
if (Settings.clsi != null ? Settings.clsi.strace : undefined) {
|
||||
command = ['strace', '-o', 'strace', '-ff'].concat(command)
|
||||
}
|
||||
|
||||
const id = `${project_id}`; // record running project under this id
|
||||
const id = `${project_id}` // record running project under this id
|
||||
|
||||
return ProcessTable[id] = CommandRunner.run(project_id, command, directory, image, timeout, environment, function(error, output) {
|
||||
delete ProcessTable[id];
|
||||
if (error != null) { return callback(error); }
|
||||
const runs = __guard__(__guard__(output != null ? output.stderr : undefined, x1 => x1.match(/^Run number \d+ of .*latex/mg)), x => x.length) || 0;
|
||||
const failed = (__guard__(output != null ? output.stdout : undefined, x2 => x2.match(/^Latexmk: Errors/m)) != null) ? 1 : 0;
|
||||
// counters from latexmk output
|
||||
const stats = {};
|
||||
stats["latexmk-errors"] = failed;
|
||||
stats["latex-runs"] = runs;
|
||||
stats["latex-runs-with-errors"] = failed ? runs : 0;
|
||||
stats[`latex-runs-${runs}`] = 1;
|
||||
stats[`latex-runs-with-errors-${runs}`] = failed ? 1 : 0;
|
||||
// timing information from /usr/bin/time
|
||||
const timings = {};
|
||||
const stderr = output != null ? output.stderr : undefined;
|
||||
timings["cpu-percent"] = __guard__(stderr != null ? stderr.match(/Percent of CPU this job got: (\d+)/m) : undefined, x3 => x3[1]) || 0;
|
||||
timings["cpu-time"] = __guard__(stderr != null ? stderr.match(/User time.*: (\d+.\d+)/m) : undefined, x4 => x4[1]) || 0;
|
||||
timings["sys-time"] = __guard__(stderr != null ? stderr.match(/System time.*: (\d+.\d+)/m) : undefined, x5 => x5[1]) || 0;
|
||||
return callback(error, output, stats, timings);
|
||||
});
|
||||
},
|
||||
return (ProcessTable[id] = CommandRunner.run(
|
||||
project_id,
|
||||
command,
|
||||
directory,
|
||||
image,
|
||||
timeout,
|
||||
environment,
|
||||
function(error, output) {
|
||||
delete ProcessTable[id]
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
const runs =
|
||||
__guard__(
|
||||
__guard__(output != null ? output.stderr : undefined, x1 =>
|
||||
x1.match(/^Run number \d+ of .*latex/gm)
|
||||
),
|
||||
x => x.length
|
||||
) || 0
|
||||
const failed =
|
||||
__guard__(output != null ? output.stdout : undefined, x2 =>
|
||||
x2.match(/^Latexmk: Errors/m)
|
||||
) != null
|
||||
? 1
|
||||
: 0
|
||||
// counters from latexmk output
|
||||
const stats = {}
|
||||
stats['latexmk-errors'] = failed
|
||||
stats['latex-runs'] = runs
|
||||
stats['latex-runs-with-errors'] = failed ? runs : 0
|
||||
stats[`latex-runs-${runs}`] = 1
|
||||
stats[`latex-runs-with-errors-${runs}`] = failed ? 1 : 0
|
||||
// timing information from /usr/bin/time
|
||||
const timings = {}
|
||||
const stderr = output != null ? output.stderr : undefined
|
||||
timings['cpu-percent'] =
|
||||
__guard__(
|
||||
stderr != null
|
||||
? stderr.match(/Percent of CPU this job got: (\d+)/m)
|
||||
: undefined,
|
||||
x3 => x3[1]
|
||||
) || 0
|
||||
timings['cpu-time'] =
|
||||
__guard__(
|
||||
stderr != null
|
||||
? stderr.match(/User time.*: (\d+.\d+)/m)
|
||||
: undefined,
|
||||
x4 => x4[1]
|
||||
) || 0
|
||||
timings['sys-time'] =
|
||||
__guard__(
|
||||
stderr != null
|
||||
? stderr.match(/System time.*: (\d+.\d+)/m)
|
||||
: undefined,
|
||||
x5 => x5[1]
|
||||
) || 0
|
||||
return callback(error, output, stats, timings)
|
||||
}
|
||||
))
|
||||
},
|
||||
|
||||
killLatex(project_id, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
const id = `${project_id}`;
|
||||
logger.log({id}, "killing running compile");
|
||||
if ((ProcessTable[id] == null)) {
|
||||
logger.warn({id}, "no such project to kill");
|
||||
return callback(null);
|
||||
} else {
|
||||
return CommandRunner.kill(ProcessTable[id], callback);
|
||||
}
|
||||
},
|
||||
killLatex(project_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
const id = `${project_id}`
|
||||
logger.log({ id }, 'killing running compile')
|
||||
if (ProcessTable[id] == null) {
|
||||
logger.warn({ id }, 'no such project to kill')
|
||||
return callback(null)
|
||||
} else {
|
||||
return CommandRunner.kill(ProcessTable[id], callback)
|
||||
}
|
||||
},
|
||||
|
||||
_latexmkBaseCommand(flags) {
|
||||
let args = ["latexmk", "-cd", "-f", "-jobname=output", "-auxdir=$COMPILE_DIR", "-outdir=$COMPILE_DIR", "-synctex=1","-interaction=batchmode"];
|
||||
if (flags) {
|
||||
args = args.concat(flags);
|
||||
}
|
||||
return (__guard__(Settings != null ? Settings.clsi : undefined, x => x.latexmkCommandPrefix) || []).concat(args);
|
||||
},
|
||||
_latexmkBaseCommand(flags) {
|
||||
let args = [
|
||||
'latexmk',
|
||||
'-cd',
|
||||
'-f',
|
||||
'-jobname=output',
|
||||
'-auxdir=$COMPILE_DIR',
|
||||
'-outdir=$COMPILE_DIR',
|
||||
'-synctex=1',
|
||||
'-interaction=batchmode'
|
||||
]
|
||||
if (flags) {
|
||||
args = args.concat(flags)
|
||||
}
|
||||
return (
|
||||
__guard__(
|
||||
Settings != null ? Settings.clsi : undefined,
|
||||
x => x.latexmkCommandPrefix
|
||||
) || []
|
||||
).concat(args)
|
||||
},
|
||||
|
||||
_pdflatexCommand(mainFile, flags) {
|
||||
return LatexRunner._latexmkBaseCommand(flags).concat([
|
||||
"-pdf",
|
||||
Path.join("$COMPILE_DIR", mainFile)
|
||||
]);
|
||||
},
|
||||
_pdflatexCommand(mainFile, flags) {
|
||||
return LatexRunner._latexmkBaseCommand(flags).concat([
|
||||
'-pdf',
|
||||
Path.join('$COMPILE_DIR', mainFile)
|
||||
])
|
||||
},
|
||||
|
||||
_latexCommand(mainFile, flags) {
|
||||
return LatexRunner._latexmkBaseCommand(flags).concat([
|
||||
"-pdfdvi",
|
||||
Path.join("$COMPILE_DIR", mainFile)
|
||||
]);
|
||||
},
|
||||
_latexCommand(mainFile, flags) {
|
||||
return LatexRunner._latexmkBaseCommand(flags).concat([
|
||||
'-pdfdvi',
|
||||
Path.join('$COMPILE_DIR', mainFile)
|
||||
])
|
||||
},
|
||||
|
||||
_xelatexCommand(mainFile, flags) {
|
||||
return LatexRunner._latexmkBaseCommand(flags).concat([
|
||||
"-xelatex",
|
||||
Path.join("$COMPILE_DIR", mainFile)
|
||||
]);
|
||||
},
|
||||
|
||||
_lualatexCommand(mainFile, flags) {
|
||||
return LatexRunner._latexmkBaseCommand(flags).concat([
|
||||
"-lualatex",
|
||||
Path.join("$COMPILE_DIR", mainFile)
|
||||
]);
|
||||
}
|
||||
});
|
||||
_xelatexCommand(mainFile, flags) {
|
||||
return LatexRunner._latexmkBaseCommand(flags).concat([
|
||||
'-xelatex',
|
||||
Path.join('$COMPILE_DIR', mainFile)
|
||||
])
|
||||
},
|
||||
|
||||
_lualatexCommand(mainFile, flags) {
|
||||
return LatexRunner._latexmkBaseCommand(flags).concat([
|
||||
'-lualatex',
|
||||
Path.join('$COMPILE_DIR', mainFile)
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined;
|
||||
}
|
||||
return typeof value !== 'undefined' && value !== null
|
||||
? transform(value)
|
||||
: undefined
|
||||
}
|
||||
|
|
|
@ -13,62 +13,79 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let CommandRunner;
|
||||
const { spawn } = require("child_process");
|
||||
const logger = require("logger-sharelatex");
|
||||
let CommandRunner
|
||||
const { spawn } = require('child_process')
|
||||
const logger = require('logger-sharelatex')
|
||||
|
||||
logger.info("using standard command runner");
|
||||
logger.info('using standard command runner')
|
||||
|
||||
module.exports = (CommandRunner = {
|
||||
run(project_id, command, directory, image, timeout, environment, callback) {
|
||||
let key, value;
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
command = (Array.from(command).map((arg) => arg.toString().replace('$COMPILE_DIR', directory)));
|
||||
logger.log({project_id, command, directory}, "running command");
|
||||
logger.warn("timeouts and sandboxing are not enabled with CommandRunner");
|
||||
module.exports = CommandRunner = {
|
||||
run(project_id, command, directory, image, timeout, environment, callback) {
|
||||
let key, value
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
command = Array.from(command).map(arg =>
|
||||
arg.toString().replace('$COMPILE_DIR', directory)
|
||||
)
|
||||
logger.log({ project_id, command, directory }, 'running command')
|
||||
logger.warn('timeouts and sandboxing are not enabled with CommandRunner')
|
||||
|
||||
// merge environment settings
|
||||
const env = {};
|
||||
for (key in process.env) { value = process.env[key]; env[key] = value; }
|
||||
for (key in environment) { value = environment[key]; env[key] = value; }
|
||||
// merge environment settings
|
||||
const env = {}
|
||||
for (key in process.env) {
|
||||
value = process.env[key]
|
||||
env[key] = value
|
||||
}
|
||||
for (key in environment) {
|
||||
value = environment[key]
|
||||
env[key] = value
|
||||
}
|
||||
|
||||
// run command as detached process so it has its own process group (which can be killed if needed)
|
||||
const proc = spawn(command[0], command.slice(1), {cwd: directory, env});
|
||||
// run command as detached process so it has its own process group (which can be killed if needed)
|
||||
const proc = spawn(command[0], command.slice(1), { cwd: directory, env })
|
||||
|
||||
let stdout = "";
|
||||
proc.stdout.on("data", data=> stdout += data);
|
||||
let stdout = ''
|
||||
proc.stdout.on('data', data => (stdout += data))
|
||||
|
||||
proc.on("error", function(err){
|
||||
logger.err({err, project_id, command, directory}, "error running command");
|
||||
return callback(err);
|
||||
});
|
||||
proc.on('error', function(err) {
|
||||
logger.err(
|
||||
{ err, project_id, command, directory },
|
||||
'error running command'
|
||||
)
|
||||
return callback(err)
|
||||
})
|
||||
|
||||
proc.on("close", function(code, signal) {
|
||||
let err;
|
||||
logger.info({code, signal, project_id}, "command exited");
|
||||
if (signal === 'SIGTERM') { // signal from kill method below
|
||||
err = new Error("terminated");
|
||||
err.terminated = true;
|
||||
return callback(err);
|
||||
} else if (code === 1) { // exit status from chktex
|
||||
err = new Error("exited");
|
||||
err.code = code;
|
||||
return callback(err);
|
||||
} else {
|
||||
return callback(null, {"stdout": stdout});
|
||||
}
|
||||
});
|
||||
proc.on('close', function(code, signal) {
|
||||
let err
|
||||
logger.info({ code, signal, project_id }, 'command exited')
|
||||
if (signal === 'SIGTERM') {
|
||||
// signal from kill method below
|
||||
err = new Error('terminated')
|
||||
err.terminated = true
|
||||
return callback(err)
|
||||
} else if (code === 1) {
|
||||
// exit status from chktex
|
||||
err = new Error('exited')
|
||||
err.code = code
|
||||
return callback(err)
|
||||
} else {
|
||||
return callback(null, { stdout: stdout })
|
||||
}
|
||||
})
|
||||
|
||||
return proc.pid;
|
||||
}, // return process id to allow job to be killed if necessary
|
||||
return proc.pid
|
||||
}, // return process id to allow job to be killed if necessary
|
||||
|
||||
kill(pid, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
try {
|
||||
process.kill(-pid); // kill all processes in group
|
||||
} catch (err) {
|
||||
return callback(err);
|
||||
}
|
||||
return callback();
|
||||
}
|
||||
});
|
||||
kill(pid, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
try {
|
||||
process.kill(-pid) // kill all processes in group
|
||||
} catch (err) {
|
||||
return callback(err)
|
||||
}
|
||||
return callback()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,46 +11,62 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let LockManager;
|
||||
const Settings = require('settings-sharelatex');
|
||||
const logger = require("logger-sharelatex");
|
||||
const Lockfile = require('lockfile'); // from https://github.com/npm/lockfile
|
||||
const Errors = require("./Errors");
|
||||
const fs = require("fs");
|
||||
const Path = require("path");
|
||||
module.exports = (LockManager = {
|
||||
LOCK_TEST_INTERVAL: 1000, // 50ms between each test of the lock
|
||||
MAX_LOCK_WAIT_TIME: 15000, // 10s maximum time to spend trying to get the lock
|
||||
LOCK_STALE: 5*60*1000, // 5 mins time until lock auto expires
|
||||
let LockManager
|
||||
const Settings = require('settings-sharelatex')
|
||||
const logger = require('logger-sharelatex')
|
||||
const Lockfile = require('lockfile') // from https://github.com/npm/lockfile
|
||||
const Errors = require('./Errors')
|
||||
const fs = require('fs')
|
||||
const Path = require('path')
|
||||
module.exports = LockManager = {
|
||||
LOCK_TEST_INTERVAL: 1000, // 50ms between each test of the lock
|
||||
MAX_LOCK_WAIT_TIME: 15000, // 10s maximum time to spend trying to get the lock
|
||||
LOCK_STALE: 5 * 60 * 1000, // 5 mins time until lock auto expires
|
||||
|
||||
runWithLock(path, runner, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
const lockOpts = {
|
||||
wait: this.MAX_LOCK_WAIT_TIME,
|
||||
pollPeriod: this.LOCK_TEST_INTERVAL,
|
||||
stale: this.LOCK_STALE
|
||||
};
|
||||
return Lockfile.lock(path, lockOpts, function(error) {
|
||||
if ((error != null ? error.code : undefined) === 'EEXIST') {
|
||||
return callback(new Errors.AlreadyCompilingError("compile in progress"));
|
||||
} else if (error != null) {
|
||||
return fs.lstat(path, (statLockErr, statLock)=>
|
||||
fs.lstat(Path.dirname(path), (statDirErr, statDir)=>
|
||||
fs.readdir(Path.dirname(path), function(readdirErr, readdirDir){
|
||||
logger.err({error, path, statLock, statLockErr, statDir, statDirErr, readdirErr, readdirDir}, "unable to get lock");
|
||||
return callback(error);
|
||||
})
|
||||
)
|
||||
);
|
||||
} else {
|
||||
return runner((error1, ...args) =>
|
||||
Lockfile.unlock(path, function(error2) {
|
||||
error = error1 || error2;
|
||||
if (error != null) { return callback(error); }
|
||||
return callback(null, ...Array.from(args));
|
||||
})
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
runWithLock(path, runner, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
const lockOpts = {
|
||||
wait: this.MAX_LOCK_WAIT_TIME,
|
||||
pollPeriod: this.LOCK_TEST_INTERVAL,
|
||||
stale: this.LOCK_STALE
|
||||
}
|
||||
return Lockfile.lock(path, lockOpts, function(error) {
|
||||
if ((error != null ? error.code : undefined) === 'EEXIST') {
|
||||
return callback(new Errors.AlreadyCompilingError('compile in progress'))
|
||||
} else if (error != null) {
|
||||
return fs.lstat(path, (statLockErr, statLock) =>
|
||||
fs.lstat(Path.dirname(path), (statDirErr, statDir) =>
|
||||
fs.readdir(Path.dirname(path), function(readdirErr, readdirDir) {
|
||||
logger.err(
|
||||
{
|
||||
error,
|
||||
path,
|
||||
statLock,
|
||||
statLockErr,
|
||||
statDir,
|
||||
statDirErr,
|
||||
readdirErr,
|
||||
readdirDir
|
||||
},
|
||||
'unable to get lock'
|
||||
)
|
||||
return callback(error)
|
||||
})
|
||||
)
|
||||
)
|
||||
} else {
|
||||
return runner((error1, ...args) =>
|
||||
Lockfile.unlock(path, function(error2) {
|
||||
error = error1 || error2
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback(null, ...Array.from(args))
|
||||
})
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Sanity-check the conversion and remove this comment.
|
||||
module.exports = require("metrics-sharelatex");
|
||||
|
||||
module.exports = require('metrics-sharelatex')
|
||||
|
|
|
@ -13,263 +13,387 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let OutputCacheManager;
|
||||
const async = require("async");
|
||||
const fs = require("fs");
|
||||
const fse = require("fs-extra");
|
||||
const Path = require("path");
|
||||
const logger = require("logger-sharelatex");
|
||||
const _ = require("underscore");
|
||||
const Settings = require("settings-sharelatex");
|
||||
const crypto = require("crypto");
|
||||
let OutputCacheManager
|
||||
const async = require('async')
|
||||
const fs = require('fs')
|
||||
const fse = require('fs-extra')
|
||||
const Path = require('path')
|
||||
const logger = require('logger-sharelatex')
|
||||
const _ = require('underscore')
|
||||
const Settings = require('settings-sharelatex')
|
||||
const crypto = require('crypto')
|
||||
|
||||
const OutputFileOptimiser = require("./OutputFileOptimiser");
|
||||
const OutputFileOptimiser = require('./OutputFileOptimiser')
|
||||
|
||||
module.exports = (OutputCacheManager = {
|
||||
CACHE_SUBDIR: '.cache/clsi',
|
||||
ARCHIVE_SUBDIR: '.archive/clsi',
|
||||
// build id is HEXDATE-HEXRANDOM from Date.now()and RandomBytes
|
||||
// for backwards compatibility, make the randombytes part optional
|
||||
BUILD_REGEX: /^[0-9a-f]+(-[0-9a-f]+)?$/,
|
||||
CACHE_LIMIT: 2, // maximum number of cache directories
|
||||
CACHE_AGE: 60*60*1000, // up to one hour old
|
||||
module.exports = OutputCacheManager = {
|
||||
CACHE_SUBDIR: '.cache/clsi',
|
||||
ARCHIVE_SUBDIR: '.archive/clsi',
|
||||
// build id is HEXDATE-HEXRANDOM from Date.now()and RandomBytes
|
||||
// for backwards compatibility, make the randombytes part optional
|
||||
BUILD_REGEX: /^[0-9a-f]+(-[0-9a-f]+)?$/,
|
||||
CACHE_LIMIT: 2, // maximum number of cache directories
|
||||
CACHE_AGE: 60 * 60 * 1000, // up to one hour old
|
||||
|
||||
path(buildId, file) {
|
||||
// used by static server, given build id return '.cache/clsi/buildId'
|
||||
if (buildId.match(OutputCacheManager.BUILD_REGEX)) {
|
||||
return Path.join(OutputCacheManager.CACHE_SUBDIR, buildId, file);
|
||||
} else {
|
||||
// for invalid build id, return top level
|
||||
return file;
|
||||
}
|
||||
},
|
||||
path(buildId, file) {
|
||||
// used by static server, given build id return '.cache/clsi/buildId'
|
||||
if (buildId.match(OutputCacheManager.BUILD_REGEX)) {
|
||||
return Path.join(OutputCacheManager.CACHE_SUBDIR, buildId, file)
|
||||
} else {
|
||||
// for invalid build id, return top level
|
||||
return file
|
||||
}
|
||||
},
|
||||
|
||||
generateBuildId(callback) {
|
||||
// generate a secure build id from Date.now() and 8 random bytes in hex
|
||||
if (callback == null) { callback = function(error, buildId) {}; }
|
||||
return crypto.randomBytes(8, function(err, buf) {
|
||||
if (err != null) { return callback(err); }
|
||||
const random = buf.toString('hex');
|
||||
const date = Date.now().toString(16);
|
||||
return callback(err, `${date}-${random}`);
|
||||
});
|
||||
},
|
||||
generateBuildId(callback) {
|
||||
// generate a secure build id from Date.now() and 8 random bytes in hex
|
||||
if (callback == null) {
|
||||
callback = function(error, buildId) {}
|
||||
}
|
||||
return crypto.randomBytes(8, function(err, buf) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
const random = buf.toString('hex')
|
||||
const date = Date.now().toString(16)
|
||||
return callback(err, `${date}-${random}`)
|
||||
})
|
||||
},
|
||||
|
||||
saveOutputFiles(outputFiles, compileDir, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
return OutputCacheManager.generateBuildId(function(err, buildId) {
|
||||
if (err != null) { return callback(err); }
|
||||
return OutputCacheManager.saveOutputFilesInBuildDir(outputFiles, compileDir, buildId, callback);
|
||||
});
|
||||
},
|
||||
saveOutputFiles(outputFiles, compileDir, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return OutputCacheManager.generateBuildId(function(err, buildId) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
return OutputCacheManager.saveOutputFilesInBuildDir(
|
||||
outputFiles,
|
||||
compileDir,
|
||||
buildId,
|
||||
callback
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
saveOutputFilesInBuildDir(outputFiles, compileDir, buildId, callback) {
|
||||
// make a compileDir/CACHE_SUBDIR/build_id directory and
|
||||
// copy all the output files into it
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
const cacheRoot = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR);
|
||||
// Put the files into a new cache subdirectory
|
||||
const cacheDir = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR, buildId);
|
||||
// Is it a per-user compile? check if compile directory is PROJECTID-USERID
|
||||
const perUser = Path.basename(compileDir).match(/^[0-9a-f]{24}-[0-9a-f]{24}$/);
|
||||
saveOutputFilesInBuildDir(outputFiles, compileDir, buildId, callback) {
|
||||
// make a compileDir/CACHE_SUBDIR/build_id directory and
|
||||
// copy all the output files into it
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
const cacheRoot = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR)
|
||||
// Put the files into a new cache subdirectory
|
||||
const cacheDir = Path.join(
|
||||
compileDir,
|
||||
OutputCacheManager.CACHE_SUBDIR,
|
||||
buildId
|
||||
)
|
||||
// Is it a per-user compile? check if compile directory is PROJECTID-USERID
|
||||
const perUser = Path.basename(compileDir).match(
|
||||
/^[0-9a-f]{24}-[0-9a-f]{24}$/
|
||||
)
|
||||
|
||||
// Archive logs in background
|
||||
if ((Settings.clsi != null ? Settings.clsi.archive_logs : undefined) || (Settings.clsi != null ? Settings.clsi.strace : undefined)) {
|
||||
OutputCacheManager.archiveLogs(outputFiles, compileDir, buildId, function(err) {
|
||||
if (err != null) {
|
||||
return logger.warn({err}, "erroring archiving log files");
|
||||
}
|
||||
});
|
||||
}
|
||||
// Archive logs in background
|
||||
if (
|
||||
(Settings.clsi != null ? Settings.clsi.archive_logs : undefined) ||
|
||||
(Settings.clsi != null ? Settings.clsi.strace : undefined)
|
||||
) {
|
||||
OutputCacheManager.archiveLogs(outputFiles, compileDir, buildId, function(
|
||||
err
|
||||
) {
|
||||
if (err != null) {
|
||||
return logger.warn({ err }, 'erroring archiving log files')
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// make the new cache directory
|
||||
return fse.ensureDir(cacheDir, function(err) {
|
||||
if (err != null) {
|
||||
logger.error({err, directory: cacheDir}, "error creating cache directory");
|
||||
return callback(err, outputFiles);
|
||||
} else {
|
||||
// copy all the output files into the new cache directory
|
||||
const results = [];
|
||||
return async.mapSeries(outputFiles, function(file, cb) {
|
||||
// don't send dot files as output, express doesn't serve them
|
||||
if (OutputCacheManager._fileIsHidden(file.path)) {
|
||||
logger.debug({compileDir, path: file.path}, "ignoring dotfile in output");
|
||||
return cb();
|
||||
}
|
||||
// copy other files into cache directory if valid
|
||||
const newFile = _.clone(file);
|
||||
const [src, dst] = Array.from([Path.join(compileDir, file.path), Path.join(cacheDir, file.path)]);
|
||||
return OutputCacheManager._checkFileIsSafe(src, function(err, isSafe) {
|
||||
if (err != null) { return cb(err); }
|
||||
if (!isSafe) {
|
||||
return cb();
|
||||
}
|
||||
return OutputCacheManager._checkIfShouldCopy(src, function(err, shouldCopy) {
|
||||
if (err != null) { return cb(err); }
|
||||
if (!shouldCopy) {
|
||||
return cb();
|
||||
}
|
||||
return OutputCacheManager._copyFile(src, dst, function(err) {
|
||||
if (err != null) { return cb(err); }
|
||||
newFile.build = buildId; // attach a build id if we cached the file
|
||||
results.push(newFile);
|
||||
return cb();
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
, function(err) {
|
||||
if (err != null) {
|
||||
// pass back the original files if we encountered *any* error
|
||||
callback(err, outputFiles);
|
||||
// clean up the directory we just created
|
||||
return fse.remove(cacheDir, function(err) {
|
||||
if (err != null) {
|
||||
return logger.error({err, dir: cacheDir}, "error removing cache dir after failure");
|
||||
}
|
||||
});
|
||||
} else {
|
||||
// pass back the list of new files in the cache
|
||||
callback(err, results);
|
||||
// let file expiry run in the background, expire all previous files if per-user
|
||||
return OutputCacheManager.expireOutputFiles(cacheRoot, {keep: buildId, limit: perUser ? 1 : null});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
},
|
||||
// make the new cache directory
|
||||
return fse.ensureDir(cacheDir, function(err) {
|
||||
if (err != null) {
|
||||
logger.error(
|
||||
{ err, directory: cacheDir },
|
||||
'error creating cache directory'
|
||||
)
|
||||
return callback(err, outputFiles)
|
||||
} else {
|
||||
// copy all the output files into the new cache directory
|
||||
const results = []
|
||||
return async.mapSeries(
|
||||
outputFiles,
|
||||
function(file, cb) {
|
||||
// don't send dot files as output, express doesn't serve them
|
||||
if (OutputCacheManager._fileIsHidden(file.path)) {
|
||||
logger.debug(
|
||||
{ compileDir, path: file.path },
|
||||
'ignoring dotfile in output'
|
||||
)
|
||||
return cb()
|
||||
}
|
||||
// copy other files into cache directory if valid
|
||||
const newFile = _.clone(file)
|
||||
const [src, dst] = Array.from([
|
||||
Path.join(compileDir, file.path),
|
||||
Path.join(cacheDir, file.path)
|
||||
])
|
||||
return OutputCacheManager._checkFileIsSafe(src, function(
|
||||
err,
|
||||
isSafe
|
||||
) {
|
||||
if (err != null) {
|
||||
return cb(err)
|
||||
}
|
||||
if (!isSafe) {
|
||||
return cb()
|
||||
}
|
||||
return OutputCacheManager._checkIfShouldCopy(src, function(
|
||||
err,
|
||||
shouldCopy
|
||||
) {
|
||||
if (err != null) {
|
||||
return cb(err)
|
||||
}
|
||||
if (!shouldCopy) {
|
||||
return cb()
|
||||
}
|
||||
return OutputCacheManager._copyFile(src, dst, function(err) {
|
||||
if (err != null) {
|
||||
return cb(err)
|
||||
}
|
||||
newFile.build = buildId // attach a build id if we cached the file
|
||||
results.push(newFile)
|
||||
return cb()
|
||||
})
|
||||
})
|
||||
})
|
||||
},
|
||||
function(err) {
|
||||
if (err != null) {
|
||||
// pass back the original files if we encountered *any* error
|
||||
callback(err, outputFiles)
|
||||
// clean up the directory we just created
|
||||
return fse.remove(cacheDir, function(err) {
|
||||
if (err != null) {
|
||||
return logger.error(
|
||||
{ err, dir: cacheDir },
|
||||
'error removing cache dir after failure'
|
||||
)
|
||||
}
|
||||
})
|
||||
} else {
|
||||
// pass back the list of new files in the cache
|
||||
callback(err, results)
|
||||
// let file expiry run in the background, expire all previous files if per-user
|
||||
return OutputCacheManager.expireOutputFiles(cacheRoot, {
|
||||
keep: buildId,
|
||||
limit: perUser ? 1 : null
|
||||
})
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
archiveLogs(outputFiles, compileDir, buildId, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
const archiveDir = Path.join(compileDir, OutputCacheManager.ARCHIVE_SUBDIR, buildId);
|
||||
logger.log({dir: archiveDir}, "archiving log files for project");
|
||||
return fse.ensureDir(archiveDir, function(err) {
|
||||
if (err != null) { return callback(err); }
|
||||
return async.mapSeries(outputFiles, function(file, cb) {
|
||||
const [src, dst] = Array.from([Path.join(compileDir, file.path), Path.join(archiveDir, file.path)]);
|
||||
return OutputCacheManager._checkFileIsSafe(src, function(err, isSafe) {
|
||||
if (err != null) { return cb(err); }
|
||||
if (!isSafe) { return cb(); }
|
||||
return OutputCacheManager._checkIfShouldArchive(src, function(err, shouldArchive) {
|
||||
if (err != null) { return cb(err); }
|
||||
if (!shouldArchive) { return cb(); }
|
||||
return OutputCacheManager._copyFile(src, dst, cb);
|
||||
});
|
||||
});
|
||||
}
|
||||
, callback);
|
||||
});
|
||||
},
|
||||
archiveLogs(outputFiles, compileDir, buildId, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
const archiveDir = Path.join(
|
||||
compileDir,
|
||||
OutputCacheManager.ARCHIVE_SUBDIR,
|
||||
buildId
|
||||
)
|
||||
logger.log({ dir: archiveDir }, 'archiving log files for project')
|
||||
return fse.ensureDir(archiveDir, function(err) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
return async.mapSeries(
|
||||
outputFiles,
|
||||
function(file, cb) {
|
||||
const [src, dst] = Array.from([
|
||||
Path.join(compileDir, file.path),
|
||||
Path.join(archiveDir, file.path)
|
||||
])
|
||||
return OutputCacheManager._checkFileIsSafe(src, function(
|
||||
err,
|
||||
isSafe
|
||||
) {
|
||||
if (err != null) {
|
||||
return cb(err)
|
||||
}
|
||||
if (!isSafe) {
|
||||
return cb()
|
||||
}
|
||||
return OutputCacheManager._checkIfShouldArchive(src, function(
|
||||
err,
|
||||
shouldArchive
|
||||
) {
|
||||
if (err != null) {
|
||||
return cb(err)
|
||||
}
|
||||
if (!shouldArchive) {
|
||||
return cb()
|
||||
}
|
||||
return OutputCacheManager._copyFile(src, dst, cb)
|
||||
})
|
||||
})
|
||||
},
|
||||
callback
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
expireOutputFiles(cacheRoot, options, callback) {
|
||||
// look in compileDir for build dirs and delete if > N or age of mod time > T
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
return fs.readdir(cacheRoot, function(err, results) {
|
||||
if (err != null) {
|
||||
if (err.code === 'ENOENT') { return callback(null); } // cache directory is empty
|
||||
logger.error({err, project_id: cacheRoot}, "error clearing cache");
|
||||
return callback(err);
|
||||
}
|
||||
expireOutputFiles(cacheRoot, options, callback) {
|
||||
// look in compileDir for build dirs and delete if > N or age of mod time > T
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return fs.readdir(cacheRoot, function(err, results) {
|
||||
if (err != null) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return callback(null)
|
||||
} // cache directory is empty
|
||||
logger.error({ err, project_id: cacheRoot }, 'error clearing cache')
|
||||
return callback(err)
|
||||
}
|
||||
|
||||
const dirs = results.sort().reverse();
|
||||
const currentTime = Date.now();
|
||||
const dirs = results.sort().reverse()
|
||||
const currentTime = Date.now()
|
||||
|
||||
const isExpired = function(dir, index) {
|
||||
if ((options != null ? options.keep : undefined) === dir) { return false; }
|
||||
// remove any directories over the requested (non-null) limit
|
||||
if (((options != null ? options.limit : undefined) != null) && (index > options.limit)) { return true; }
|
||||
// remove any directories over the hard limit
|
||||
if (index > OutputCacheManager.CACHE_LIMIT) { return true; }
|
||||
// we can get the build time from the first part of the directory name DDDD-RRRR
|
||||
// DDDD is date and RRRR is random bytes
|
||||
const dirTime = parseInt(__guard__(dir.split('-'), x => x[0]), 16);
|
||||
const age = currentTime - dirTime;
|
||||
return age > OutputCacheManager.CACHE_AGE;
|
||||
};
|
||||
const isExpired = function(dir, index) {
|
||||
if ((options != null ? options.keep : undefined) === dir) {
|
||||
return false
|
||||
}
|
||||
// remove any directories over the requested (non-null) limit
|
||||
if (
|
||||
(options != null ? options.limit : undefined) != null &&
|
||||
index > options.limit
|
||||
) {
|
||||
return true
|
||||
}
|
||||
// remove any directories over the hard limit
|
||||
if (index > OutputCacheManager.CACHE_LIMIT) {
|
||||
return true
|
||||
}
|
||||
// we can get the build time from the first part of the directory name DDDD-RRRR
|
||||
// DDDD is date and RRRR is random bytes
|
||||
const dirTime = parseInt(
|
||||
__guard__(dir.split('-'), x => x[0]),
|
||||
16
|
||||
)
|
||||
const age = currentTime - dirTime
|
||||
return age > OutputCacheManager.CACHE_AGE
|
||||
}
|
||||
|
||||
const toRemove = _.filter(dirs, isExpired);
|
||||
const toRemove = _.filter(dirs, isExpired)
|
||||
|
||||
const removeDir = (dir, cb) =>
|
||||
fse.remove(Path.join(cacheRoot, dir), function(err, result) {
|
||||
logger.log({cache: cacheRoot, dir}, "removed expired cache dir");
|
||||
if (err != null) {
|
||||
logger.error({err, dir}, "cache remove error");
|
||||
}
|
||||
return cb(err, result);
|
||||
})
|
||||
;
|
||||
const removeDir = (dir, cb) =>
|
||||
fse.remove(Path.join(cacheRoot, dir), function(err, result) {
|
||||
logger.log({ cache: cacheRoot, dir }, 'removed expired cache dir')
|
||||
if (err != null) {
|
||||
logger.error({ err, dir }, 'cache remove error')
|
||||
}
|
||||
return cb(err, result)
|
||||
})
|
||||
return async.eachSeries(
|
||||
toRemove,
|
||||
(dir, cb) => removeDir(dir, cb),
|
||||
callback
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
return async.eachSeries(toRemove, (dir, cb) => removeDir(dir, cb)
|
||||
, callback);
|
||||
});
|
||||
},
|
||||
_fileIsHidden(path) {
|
||||
return (path != null ? path.match(/^\.|\/\./) : undefined) != null
|
||||
},
|
||||
|
||||
_fileIsHidden(path) {
|
||||
return ((path != null ? path.match(/^\.|\/\./) : undefined) != null);
|
||||
},
|
||||
_checkFileIsSafe(src, callback) {
|
||||
// check if we have a valid file to copy into the cache
|
||||
if (callback == null) {
|
||||
callback = function(error, isSafe) {}
|
||||
}
|
||||
return fs.stat(src, function(err, stats) {
|
||||
if ((err != null ? err.code : undefined) === 'ENOENT') {
|
||||
logger.warn(
|
||||
{ err, file: src },
|
||||
'file has disappeared before copying to build cache'
|
||||
)
|
||||
return callback(err, false)
|
||||
} else if (err != null) {
|
||||
// some other problem reading the file
|
||||
logger.error({ err, file: src }, 'stat error for file in cache')
|
||||
return callback(err, false)
|
||||
} else if (!stats.isFile()) {
|
||||
// other filetype - reject it
|
||||
logger.warn(
|
||||
{ src, stat: stats },
|
||||
'nonfile output - refusing to copy to cache'
|
||||
)
|
||||
return callback(null, false)
|
||||
} else {
|
||||
// it's a plain file, ok to copy
|
||||
return callback(null, true)
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
_checkFileIsSafe(src, callback) {
|
||||
// check if we have a valid file to copy into the cache
|
||||
if (callback == null) { callback = function(error, isSafe) {}; }
|
||||
return fs.stat(src, function(err, stats) {
|
||||
if ((err != null ? err.code : undefined) === 'ENOENT') {
|
||||
logger.warn({err, file: src}, "file has disappeared before copying to build cache");
|
||||
return callback(err, false);
|
||||
} else if (err != null) {
|
||||
// some other problem reading the file
|
||||
logger.error({err, file: src}, "stat error for file in cache");
|
||||
return callback(err, false);
|
||||
} else if (!stats.isFile()) {
|
||||
// other filetype - reject it
|
||||
logger.warn({src, stat: stats}, "nonfile output - refusing to copy to cache");
|
||||
return callback(null, false);
|
||||
} else {
|
||||
// it's a plain file, ok to copy
|
||||
return callback(null, true);
|
||||
}
|
||||
});
|
||||
},
|
||||
_copyFile(src, dst, callback) {
|
||||
// copy output file into the cache
|
||||
return fse.copy(src, dst, function(err) {
|
||||
if ((err != null ? err.code : undefined) === 'ENOENT') {
|
||||
logger.warn(
|
||||
{ err, file: src },
|
||||
'file has disappeared when copying to build cache'
|
||||
)
|
||||
return callback(err, false)
|
||||
} else if (err != null) {
|
||||
logger.error({ err, src, dst }, 'copy error for file in cache')
|
||||
return callback(err)
|
||||
} else {
|
||||
if (
|
||||
Settings.clsi != null ? Settings.clsi.optimiseInDocker : undefined
|
||||
) {
|
||||
// don't run any optimisations on the pdf when they are done
|
||||
// in the docker container
|
||||
return callback()
|
||||
} else {
|
||||
// call the optimiser for the file too
|
||||
return OutputFileOptimiser.optimiseFile(src, dst, callback)
|
||||
}
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
_copyFile(src, dst, callback) {
|
||||
// copy output file into the cache
|
||||
return fse.copy(src, dst, function(err) {
|
||||
if ((err != null ? err.code : undefined) === 'ENOENT') {
|
||||
logger.warn({err, file: src}, "file has disappeared when copying to build cache");
|
||||
return callback(err, false);
|
||||
} else if (err != null) {
|
||||
logger.error({err, src, dst}, "copy error for file in cache");
|
||||
return callback(err);
|
||||
} else {
|
||||
if ((Settings.clsi != null ? Settings.clsi.optimiseInDocker : undefined)) {
|
||||
// don't run any optimisations on the pdf when they are done
|
||||
// in the docker container
|
||||
return callback();
|
||||
} else {
|
||||
// call the optimiser for the file too
|
||||
return OutputFileOptimiser.optimiseFile(src, dst, callback);
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
_checkIfShouldCopy(src, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(err, shouldCopy) {}
|
||||
}
|
||||
return callback(null, !Path.basename(src).match(/^strace/))
|
||||
},
|
||||
|
||||
_checkIfShouldCopy(src, callback) {
|
||||
if (callback == null) { callback = function(err, shouldCopy) {}; }
|
||||
return callback(null, !Path.basename(src).match(/^strace/));
|
||||
},
|
||||
|
||||
_checkIfShouldArchive(src, callback) {
|
||||
let needle;
|
||||
if (callback == null) { callback = function(err, shouldCopy) {}; }
|
||||
if (Path.basename(src).match(/^strace/)) {
|
||||
return callback(null, true);
|
||||
}
|
||||
if ((Settings.clsi != null ? Settings.clsi.archive_logs : undefined) && (needle = Path.basename(src), ["output.log", "output.blg"].includes(needle))) {
|
||||
return callback(null, true);
|
||||
}
|
||||
return callback(null, false);
|
||||
}
|
||||
});
|
||||
_checkIfShouldArchive(src, callback) {
|
||||
let needle
|
||||
if (callback == null) {
|
||||
callback = function(err, shouldCopy) {}
|
||||
}
|
||||
if (Path.basename(src).match(/^strace/)) {
|
||||
return callback(null, true)
|
||||
}
|
||||
if (
|
||||
(Settings.clsi != null ? Settings.clsi.archive_logs : undefined) &&
|
||||
((needle = Path.basename(src)),
|
||||
['output.log', 'output.blg'].includes(needle))
|
||||
) {
|
||||
return callback(null, true)
|
||||
}
|
||||
return callback(null, false)
|
||||
}
|
||||
}
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined;
|
||||
}
|
||||
return typeof value !== 'undefined' && value !== null
|
||||
? transform(value)
|
||||
: undefined
|
||||
}
|
||||
|
|
|
@ -14,73 +14,102 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let OutputFileFinder;
|
||||
const async = require("async");
|
||||
const fs = require("fs");
|
||||
const Path = require("path");
|
||||
const { spawn } = require("child_process");
|
||||
const logger = require("logger-sharelatex");
|
||||
let OutputFileFinder
|
||||
const async = require('async')
|
||||
const fs = require('fs')
|
||||
const Path = require('path')
|
||||
const { spawn } = require('child_process')
|
||||
const logger = require('logger-sharelatex')
|
||||
|
||||
module.exports = (OutputFileFinder = {
|
||||
findOutputFiles(resources, directory, callback) {
|
||||
if (callback == null) { callback = function(error, outputFiles, allFiles) {}; }
|
||||
const incomingResources = {};
|
||||
for (const resource of Array.from(resources)) {
|
||||
incomingResources[resource.path] = true;
|
||||
}
|
||||
|
||||
return OutputFileFinder._getAllFiles(directory, function(error, allFiles) {
|
||||
if (allFiles == null) { allFiles = []; }
|
||||
if (error != null) {
|
||||
logger.err({err:error}, "error finding all output files");
|
||||
return callback(error);
|
||||
}
|
||||
const outputFiles = [];
|
||||
for (const file of Array.from(allFiles)) {
|
||||
if (!incomingResources[file]) {
|
||||
outputFiles.push({
|
||||
path: file,
|
||||
type: __guard__(file.match(/\.([^\.]+)$/), x => x[1])
|
||||
});
|
||||
}
|
||||
}
|
||||
return callback(null, outputFiles, allFiles);
|
||||
});
|
||||
},
|
||||
module.exports = OutputFileFinder = {
|
||||
findOutputFiles(resources, directory, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, outputFiles, allFiles) {}
|
||||
}
|
||||
const incomingResources = {}
|
||||
for (const resource of Array.from(resources)) {
|
||||
incomingResources[resource.path] = true
|
||||
}
|
||||
|
||||
_getAllFiles(directory, _callback) {
|
||||
if (_callback == null) { _callback = function(error, fileList) {}; }
|
||||
const callback = function(error, fileList) {
|
||||
_callback(error, fileList);
|
||||
return _callback = function() {};
|
||||
};
|
||||
return OutputFileFinder._getAllFiles(directory, function(error, allFiles) {
|
||||
if (allFiles == null) {
|
||||
allFiles = []
|
||||
}
|
||||
if (error != null) {
|
||||
logger.err({ err: error }, 'error finding all output files')
|
||||
return callback(error)
|
||||
}
|
||||
const outputFiles = []
|
||||
for (const file of Array.from(allFiles)) {
|
||||
if (!incomingResources[file]) {
|
||||
outputFiles.push({
|
||||
path: file,
|
||||
type: __guard__(file.match(/\.([^\.]+)$/), x => x[1])
|
||||
})
|
||||
}
|
||||
}
|
||||
return callback(null, outputFiles, allFiles)
|
||||
})
|
||||
},
|
||||
|
||||
// don't include clsi-specific files/directories in the output list
|
||||
const EXCLUDE_DIRS = ["-name", ".cache", "-o", "-name", ".archive","-o", "-name", ".project-*"];
|
||||
const args = [directory, "(", ...Array.from(EXCLUDE_DIRS), ")", "-prune", "-o", "-type", "f", "-print"];
|
||||
logger.log({args}, "running find command");
|
||||
_getAllFiles(directory, _callback) {
|
||||
if (_callback == null) {
|
||||
_callback = function(error, fileList) {}
|
||||
}
|
||||
const callback = function(error, fileList) {
|
||||
_callback(error, fileList)
|
||||
return (_callback = function() {})
|
||||
}
|
||||
|
||||
const proc = spawn("find", args);
|
||||
let stdout = "";
|
||||
proc.stdout.on("data", chunk => stdout += chunk.toString());
|
||||
proc.on("error", callback);
|
||||
return proc.on("close", function(code) {
|
||||
if (code !== 0) {
|
||||
logger.warn({directory, code}, "find returned error, directory likely doesn't exist");
|
||||
return callback(null, []);
|
||||
}
|
||||
let fileList = stdout.trim().split("\n");
|
||||
fileList = fileList.map(function(file) {
|
||||
// Strip leading directory
|
||||
let path;
|
||||
return path = Path.relative(directory, file);
|
||||
});
|
||||
return callback(null, fileList);
|
||||
});
|
||||
}
|
||||
});
|
||||
// don't include clsi-specific files/directories in the output list
|
||||
const EXCLUDE_DIRS = [
|
||||
'-name',
|
||||
'.cache',
|
||||
'-o',
|
||||
'-name',
|
||||
'.archive',
|
||||
'-o',
|
||||
'-name',
|
||||
'.project-*'
|
||||
]
|
||||
const args = [
|
||||
directory,
|
||||
'(',
|
||||
...Array.from(EXCLUDE_DIRS),
|
||||
')',
|
||||
'-prune',
|
||||
'-o',
|
||||
'-type',
|
||||
'f',
|
||||
'-print'
|
||||
]
|
||||
logger.log({ args }, 'running find command')
|
||||
|
||||
const proc = spawn('find', args)
|
||||
let stdout = ''
|
||||
proc.stdout.on('data', chunk => (stdout += chunk.toString()))
|
||||
proc.on('error', callback)
|
||||
return proc.on('close', function(code) {
|
||||
if (code !== 0) {
|
||||
logger.warn(
|
||||
{ directory, code },
|
||||
"find returned error, directory likely doesn't exist"
|
||||
)
|
||||
return callback(null, [])
|
||||
}
|
||||
let fileList = stdout.trim().split('\n')
|
||||
fileList = fileList.map(function(file) {
|
||||
// Strip leading directory
|
||||
let path
|
||||
return (path = Path.relative(directory, file))
|
||||
})
|
||||
return callback(null, fileList)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined;
|
||||
}
|
||||
return typeof value !== 'undefined' && value !== null
|
||||
? transform(value)
|
||||
: undefined
|
||||
}
|
||||
|
|
|
@ -13,74 +13,92 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let OutputFileOptimiser;
|
||||
const fs = require("fs");
|
||||
const Path = require("path");
|
||||
const { spawn } = require("child_process");
|
||||
const logger = require("logger-sharelatex");
|
||||
const Metrics = require("./Metrics");
|
||||
const _ = require("underscore");
|
||||
let OutputFileOptimiser
|
||||
const fs = require('fs')
|
||||
const Path = require('path')
|
||||
const { spawn } = require('child_process')
|
||||
const logger = require('logger-sharelatex')
|
||||
const Metrics = require('./Metrics')
|
||||
const _ = require('underscore')
|
||||
|
||||
module.exports = (OutputFileOptimiser = {
|
||||
module.exports = OutputFileOptimiser = {
|
||||
optimiseFile(src, dst, callback) {
|
||||
// check output file (src) and see if we can optimise it, storing
|
||||
// the result in the build directory (dst)
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
if (src.match(/\/output\.pdf$/)) {
|
||||
return OutputFileOptimiser.checkIfPDFIsOptimised(src, function(
|
||||
err,
|
||||
isOptimised
|
||||
) {
|
||||
if (err != null || isOptimised) {
|
||||
return callback(null)
|
||||
}
|
||||
return OutputFileOptimiser.optimisePDF(src, dst, callback)
|
||||
})
|
||||
} else {
|
||||
return callback(null)
|
||||
}
|
||||
},
|
||||
|
||||
optimiseFile(src, dst, callback) {
|
||||
// check output file (src) and see if we can optimise it, storing
|
||||
// the result in the build directory (dst)
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
if (src.match(/\/output\.pdf$/)) {
|
||||
return OutputFileOptimiser.checkIfPDFIsOptimised(src, function(err, isOptimised) {
|
||||
if ((err != null) || isOptimised) { return callback(null); }
|
||||
return OutputFileOptimiser.optimisePDF(src, dst, callback);
|
||||
});
|
||||
} else {
|
||||
return callback((null));
|
||||
}
|
||||
},
|
||||
checkIfPDFIsOptimised(file, callback) {
|
||||
const SIZE = 16 * 1024 // check the header of the pdf
|
||||
const result = new Buffer(SIZE)
|
||||
result.fill(0) // prevent leakage of uninitialised buffer
|
||||
return fs.open(file, 'r', function(err, fd) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
return fs.read(fd, result, 0, SIZE, 0, (errRead, bytesRead, buffer) =>
|
||||
fs.close(fd, function(errClose) {
|
||||
if (errRead != null) {
|
||||
return callback(errRead)
|
||||
}
|
||||
if (typeof errReadClose !== 'undefined' && errReadClose !== null) {
|
||||
return callback(errClose)
|
||||
}
|
||||
const isOptimised =
|
||||
buffer.toString('ascii').indexOf('/Linearized 1') >= 0
|
||||
return callback(null, isOptimised)
|
||||
})
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
checkIfPDFIsOptimised(file, callback) {
|
||||
const SIZE = 16*1024; // check the header of the pdf
|
||||
const result = new Buffer(SIZE);
|
||||
result.fill(0); // prevent leakage of uninitialised buffer
|
||||
return fs.open(file, "r", function(err, fd) {
|
||||
if (err != null) { return callback(err); }
|
||||
return fs.read(fd, result, 0, SIZE, 0, (errRead, bytesRead, buffer) =>
|
||||
fs.close(fd, function(errClose) {
|
||||
if (errRead != null) { return callback(errRead); }
|
||||
if (typeof errReadClose !== 'undefined' && errReadClose !== null) { return callback(errClose); }
|
||||
const isOptimised = buffer.toString('ascii').indexOf("/Linearized 1") >= 0;
|
||||
return callback(null, isOptimised);
|
||||
})
|
||||
);
|
||||
});
|
||||
},
|
||||
optimisePDF(src, dst, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
const tmpOutput = dst + '.opt'
|
||||
const args = ['--linearize', src, tmpOutput]
|
||||
logger.log({ args }, 'running qpdf command')
|
||||
|
||||
optimisePDF(src, dst, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
const tmpOutput = dst + '.opt';
|
||||
const args = ["--linearize", src, tmpOutput];
|
||||
logger.log({args}, "running qpdf command");
|
||||
|
||||
const timer = new Metrics.Timer("qpdf");
|
||||
const proc = spawn("qpdf", args);
|
||||
let stdout = "";
|
||||
proc.stdout.on("data", chunk => stdout += chunk.toString());
|
||||
callback = _.once(callback); // avoid double call back for error and close event
|
||||
proc.on("error", function(err) {
|
||||
logger.warn({err, args}, "qpdf failed");
|
||||
return callback(null);
|
||||
}); // ignore the error
|
||||
return proc.on("close", function(code) {
|
||||
timer.done();
|
||||
if (code !== 0) {
|
||||
logger.warn({code, args}, "qpdf returned error");
|
||||
return callback(null); // ignore the error
|
||||
}
|
||||
return fs.rename(tmpOutput, dst, function(err) {
|
||||
if (err != null) {
|
||||
logger.warn({tmpOutput, dst}, "failed to rename output of qpdf command");
|
||||
}
|
||||
return callback(null);
|
||||
});
|
||||
});
|
||||
} // ignore the error
|
||||
});
|
||||
const timer = new Metrics.Timer('qpdf')
|
||||
const proc = spawn('qpdf', args)
|
||||
let stdout = ''
|
||||
proc.stdout.on('data', chunk => (stdout += chunk.toString()))
|
||||
callback = _.once(callback) // avoid double call back for error and close event
|
||||
proc.on('error', function(err) {
|
||||
logger.warn({ err, args }, 'qpdf failed')
|
||||
return callback(null)
|
||||
}) // ignore the error
|
||||
return proc.on('close', function(code) {
|
||||
timer.done()
|
||||
if (code !== 0) {
|
||||
logger.warn({ code, args }, 'qpdf returned error')
|
||||
return callback(null) // ignore the error
|
||||
}
|
||||
return fs.rename(tmpOutput, dst, function(err) {
|
||||
if (err != null) {
|
||||
logger.warn(
|
||||
{ tmpOutput, dst },
|
||||
'failed to rename output of qpdf command'
|
||||
)
|
||||
}
|
||||
return callback(null)
|
||||
})
|
||||
})
|
||||
} // ignore the error
|
||||
}
|
||||
|
|
|
@ -11,113 +11,153 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let ProjectPersistenceManager;
|
||||
const UrlCache = require("./UrlCache");
|
||||
const CompileManager = require("./CompileManager");
|
||||
const db = require("./db");
|
||||
const dbQueue = require("./DbQueue");
|
||||
const async = require("async");
|
||||
const logger = require("logger-sharelatex");
|
||||
const oneDay = 24 * 60 * 60 * 1000;
|
||||
const Settings = require("settings-sharelatex");
|
||||
let ProjectPersistenceManager
|
||||
const UrlCache = require('./UrlCache')
|
||||
const CompileManager = require('./CompileManager')
|
||||
const db = require('./db')
|
||||
const dbQueue = require('./DbQueue')
|
||||
const async = require('async')
|
||||
const logger = require('logger-sharelatex')
|
||||
const oneDay = 24 * 60 * 60 * 1000
|
||||
const Settings = require('settings-sharelatex')
|
||||
|
||||
module.exports = (ProjectPersistenceManager = {
|
||||
module.exports = ProjectPersistenceManager = {
|
||||
EXPIRY_TIMEOUT: Settings.project_cache_length_ms || oneDay * 2.5,
|
||||
|
||||
EXPIRY_TIMEOUT: Settings.project_cache_length_ms || (oneDay * 2.5),
|
||||
markProjectAsJustAccessed(project_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
const job = cb =>
|
||||
db.Project.findOrCreate({ where: { project_id } })
|
||||
.spread((project, created) =>
|
||||
project
|
||||
.updateAttributes({ lastAccessed: new Date() })
|
||||
.then(() => cb())
|
||||
.error(cb)
|
||||
)
|
||||
.error(cb)
|
||||
return dbQueue.queue.push(job, callback)
|
||||
},
|
||||
|
||||
markProjectAsJustAccessed(project_id, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
const job = cb=>
|
||||
db.Project.findOrCreate({where: {project_id}})
|
||||
.spread(
|
||||
(project, created) =>
|
||||
project.updateAttributes({lastAccessed: new Date()})
|
||||
.then(() => cb())
|
||||
.error(cb)
|
||||
)
|
||||
.error(cb)
|
||||
;
|
||||
return dbQueue.queue.push(job, callback);
|
||||
},
|
||||
clearExpiredProjects(callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return ProjectPersistenceManager._findExpiredProjectIds(function(
|
||||
error,
|
||||
project_ids
|
||||
) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
logger.log({ project_ids }, 'clearing expired projects')
|
||||
const jobs = Array.from(project_ids || []).map(project_id =>
|
||||
(project_id => callback =>
|
||||
ProjectPersistenceManager.clearProjectFromCache(project_id, function(
|
||||
err
|
||||
) {
|
||||
if (err != null) {
|
||||
logger.error({ err, project_id }, 'error clearing project')
|
||||
}
|
||||
return callback()
|
||||
}))(project_id)
|
||||
)
|
||||
return async.series(jobs, function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return CompileManager.clearExpiredProjects(
|
||||
ProjectPersistenceManager.EXPIRY_TIMEOUT,
|
||||
error => callback()
|
||||
)
|
||||
})
|
||||
})
|
||||
}, // ignore any errors from deleting directories
|
||||
|
||||
clearProject(project_id, user_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
logger.log({ project_id, user_id }, 'clearing project for user')
|
||||
return CompileManager.clearProject(project_id, user_id, function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return ProjectPersistenceManager.clearProjectFromCache(
|
||||
project_id,
|
||||
function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback()
|
||||
}
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
clearExpiredProjects(callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
return ProjectPersistenceManager._findExpiredProjectIds(function(error, project_ids) {
|
||||
if (error != null) { return callback(error); }
|
||||
logger.log({project_ids}, "clearing expired projects");
|
||||
const jobs = (Array.from(project_ids || [])).map((project_id) =>
|
||||
(project_id =>
|
||||
callback =>
|
||||
ProjectPersistenceManager.clearProjectFromCache(project_id, function(err) {
|
||||
if (err != null) {
|
||||
logger.error({err, project_id}, "error clearing project");
|
||||
}
|
||||
return callback();
|
||||
})
|
||||
|
||||
)(project_id));
|
||||
return async.series(jobs, function(error) {
|
||||
if (error != null) { return callback(error); }
|
||||
return CompileManager.clearExpiredProjects(ProjectPersistenceManager.EXPIRY_TIMEOUT, error => callback());
|
||||
});
|
||||
});
|
||||
}, // ignore any errors from deleting directories
|
||||
clearProjectFromCache(project_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
logger.log({ project_id }, 'clearing project from cache')
|
||||
return UrlCache.clearProject(project_id, function(error) {
|
||||
if (error != null) {
|
||||
logger.err({ error, project_id }, 'error clearing project from cache')
|
||||
return callback(error)
|
||||
}
|
||||
return ProjectPersistenceManager._clearProjectFromDatabase(
|
||||
project_id,
|
||||
function(error) {
|
||||
if (error != null) {
|
||||
logger.err(
|
||||
{ error, project_id },
|
||||
'error clearing project from database'
|
||||
)
|
||||
}
|
||||
return callback(error)
|
||||
}
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
clearProject(project_id, user_id, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
logger.log({project_id, user_id}, "clearing project for user");
|
||||
return CompileManager.clearProject(project_id, user_id, function(error) {
|
||||
if (error != null) { return callback(error); }
|
||||
return ProjectPersistenceManager.clearProjectFromCache(project_id, function(error) {
|
||||
if (error != null) { return callback(error); }
|
||||
return callback();
|
||||
});
|
||||
});
|
||||
},
|
||||
_clearProjectFromDatabase(project_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
logger.log({ project_id }, 'clearing project from database')
|
||||
const job = cb =>
|
||||
db.Project.destroy({ where: { project_id } })
|
||||
.then(() => cb())
|
||||
.error(cb)
|
||||
return dbQueue.queue.push(job, callback)
|
||||
},
|
||||
|
||||
clearProjectFromCache(project_id, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
logger.log({project_id}, "clearing project from cache");
|
||||
return UrlCache.clearProject(project_id, function(error) {
|
||||
if (error != null) {
|
||||
logger.err({error, project_id}, "error clearing project from cache");
|
||||
return callback(error);
|
||||
}
|
||||
return ProjectPersistenceManager._clearProjectFromDatabase(project_id, function(error) {
|
||||
if (error != null) {
|
||||
logger.err({error, project_id}, "error clearing project from database");
|
||||
}
|
||||
return callback(error);
|
||||
});
|
||||
});
|
||||
},
|
||||
_findExpiredProjectIds(callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, project_ids) {}
|
||||
}
|
||||
const job = function(cb) {
|
||||
const keepProjectsFrom = new Date(
|
||||
Date.now() - ProjectPersistenceManager.EXPIRY_TIMEOUT
|
||||
)
|
||||
const q = {}
|
||||
q[db.op.lt] = keepProjectsFrom
|
||||
return db.Project.findAll({ where: { lastAccessed: q } })
|
||||
.then(projects =>
|
||||
cb(
|
||||
null,
|
||||
projects.map(project => project.project_id)
|
||||
)
|
||||
)
|
||||
.error(cb)
|
||||
}
|
||||
|
||||
_clearProjectFromDatabase(project_id, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
logger.log({project_id}, "clearing project from database");
|
||||
const job = cb=>
|
||||
db.Project.destroy({where: {project_id}})
|
||||
.then(() => cb())
|
||||
.error(cb)
|
||||
;
|
||||
return dbQueue.queue.push(job, callback);
|
||||
},
|
||||
return dbQueue.queue.push(job, callback)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
_findExpiredProjectIds(callback) {
|
||||
if (callback == null) { callback = function(error, project_ids) {}; }
|
||||
const job = function(cb){
|
||||
const keepProjectsFrom = new Date(Date.now() - ProjectPersistenceManager.EXPIRY_TIMEOUT);
|
||||
const q = {};
|
||||
q[db.op.lt] = keepProjectsFrom;
|
||||
return db.Project.findAll({where:{lastAccessed:q}})
|
||||
.then(projects => cb(null, projects.map(project => project.project_id))).error(cb);
|
||||
};
|
||||
|
||||
return dbQueue.queue.push(job, callback);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
logger.log({EXPIRY_TIMEOUT: ProjectPersistenceManager.EXPIRY_TIMEOUT}, "project assets kept timeout");
|
||||
logger.log(
|
||||
{ EXPIRY_TIMEOUT: ProjectPersistenceManager.EXPIRY_TIMEOUT },
|
||||
'project assets kept timeout'
|
||||
)
|
||||
|
|
|
@ -17,177 +17,201 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let RequestParser;
|
||||
const settings = require("settings-sharelatex");
|
||||
let RequestParser
|
||||
const settings = require('settings-sharelatex')
|
||||
|
||||
module.exports = (RequestParser = {
|
||||
VALID_COMPILERS: ["pdflatex", "latex", "xelatex", "lualatex"],
|
||||
MAX_TIMEOUT: 600,
|
||||
module.exports = RequestParser = {
|
||||
VALID_COMPILERS: ['pdflatex', 'latex', 'xelatex', 'lualatex'],
|
||||
MAX_TIMEOUT: 600,
|
||||
|
||||
parse(body, callback) {
|
||||
let resource;
|
||||
if (callback == null) { callback = function(error, data) {}; }
|
||||
const response = {};
|
||||
parse(body, callback) {
|
||||
let resource
|
||||
if (callback == null) {
|
||||
callback = function(error, data) {}
|
||||
}
|
||||
const response = {}
|
||||
|
||||
if ((body.compile == null)) {
|
||||
return callback("top level object should have a compile attribute");
|
||||
}
|
||||
if (body.compile == null) {
|
||||
return callback('top level object should have a compile attribute')
|
||||
}
|
||||
|
||||
const { compile } = body;
|
||||
if (!compile.options) { compile.options = {}; }
|
||||
const { compile } = body
|
||||
if (!compile.options) {
|
||||
compile.options = {}
|
||||
}
|
||||
|
||||
try {
|
||||
response.compiler = this._parseAttribute("compiler",
|
||||
compile.options.compiler, {
|
||||
validValues: this.VALID_COMPILERS,
|
||||
default: "pdflatex",
|
||||
type: "string"
|
||||
}
|
||||
);
|
||||
response.timeout = this._parseAttribute("timeout",
|
||||
compile.options.timeout, {
|
||||
default: RequestParser.MAX_TIMEOUT,
|
||||
type: "number"
|
||||
}
|
||||
);
|
||||
response.imageName = this._parseAttribute("imageName",
|
||||
compile.options.imageName,
|
||||
{type: "string"});
|
||||
response.draft = this._parseAttribute("draft",
|
||||
compile.options.draft, {
|
||||
default: false,
|
||||
type: "boolean"
|
||||
}
|
||||
);
|
||||
response.check = this._parseAttribute("check",
|
||||
compile.options.check,
|
||||
{type: "string"});
|
||||
response.flags = this._parseAttribute("flags",
|
||||
compile.options.flags, {
|
||||
default: [],
|
||||
type: "object"
|
||||
}
|
||||
);
|
||||
try {
|
||||
response.compiler = this._parseAttribute(
|
||||
'compiler',
|
||||
compile.options.compiler,
|
||||
{
|
||||
validValues: this.VALID_COMPILERS,
|
||||
default: 'pdflatex',
|
||||
type: 'string'
|
||||
}
|
||||
)
|
||||
response.timeout = this._parseAttribute(
|
||||
'timeout',
|
||||
compile.options.timeout,
|
||||
{
|
||||
default: RequestParser.MAX_TIMEOUT,
|
||||
type: 'number'
|
||||
}
|
||||
)
|
||||
response.imageName = this._parseAttribute(
|
||||
'imageName',
|
||||
compile.options.imageName,
|
||||
{ type: 'string' }
|
||||
)
|
||||
response.draft = this._parseAttribute('draft', compile.options.draft, {
|
||||
default: false,
|
||||
type: 'boolean'
|
||||
})
|
||||
response.check = this._parseAttribute('check', compile.options.check, {
|
||||
type: 'string'
|
||||
})
|
||||
response.flags = this._parseAttribute('flags', compile.options.flags, {
|
||||
default: [],
|
||||
type: 'object'
|
||||
})
|
||||
|
||||
// The syncType specifies whether the request contains all
|
||||
// resources (full) or only those resources to be updated
|
||||
// in-place (incremental).
|
||||
response.syncType = this._parseAttribute("syncType",
|
||||
compile.options.syncType, {
|
||||
validValues: ["full", "incremental"],
|
||||
type: "string"
|
||||
}
|
||||
);
|
||||
// The syncType specifies whether the request contains all
|
||||
// resources (full) or only those resources to be updated
|
||||
// in-place (incremental).
|
||||
response.syncType = this._parseAttribute(
|
||||
'syncType',
|
||||
compile.options.syncType,
|
||||
{
|
||||
validValues: ['full', 'incremental'],
|
||||
type: 'string'
|
||||
}
|
||||
)
|
||||
|
||||
// The syncState is an identifier passed in with the request
|
||||
// which has the property that it changes when any resource is
|
||||
// added, deleted, moved or renamed.
|
||||
//
|
||||
// on syncType full the syncState identifier is passed in and
|
||||
// stored
|
||||
//
|
||||
// on syncType incremental the syncState identifier must match
|
||||
// the stored value
|
||||
response.syncState = this._parseAttribute("syncState",
|
||||
compile.options.syncState,
|
||||
{type: "string"});
|
||||
// The syncState is an identifier passed in with the request
|
||||
// which has the property that it changes when any resource is
|
||||
// added, deleted, moved or renamed.
|
||||
//
|
||||
// on syncType full the syncState identifier is passed in and
|
||||
// stored
|
||||
//
|
||||
// on syncType incremental the syncState identifier must match
|
||||
// the stored value
|
||||
response.syncState = this._parseAttribute(
|
||||
'syncState',
|
||||
compile.options.syncState,
|
||||
{ type: 'string' }
|
||||
)
|
||||
|
||||
if (response.timeout > RequestParser.MAX_TIMEOUT) {
|
||||
response.timeout = RequestParser.MAX_TIMEOUT;
|
||||
}
|
||||
response.timeout = response.timeout * 1000; // milliseconds
|
||||
if (response.timeout > RequestParser.MAX_TIMEOUT) {
|
||||
response.timeout = RequestParser.MAX_TIMEOUT
|
||||
}
|
||||
response.timeout = response.timeout * 1000 // milliseconds
|
||||
|
||||
response.resources = ((() => {
|
||||
const result = [];
|
||||
for (resource of Array.from((compile.resources || []))) { result.push(this._parseResource(resource));
|
||||
}
|
||||
return result;
|
||||
})());
|
||||
response.resources = (() => {
|
||||
const result = []
|
||||
for (resource of Array.from(compile.resources || [])) {
|
||||
result.push(this._parseResource(resource))
|
||||
}
|
||||
return result
|
||||
})()
|
||||
|
||||
const rootResourcePath = this._parseAttribute("rootResourcePath",
|
||||
compile.rootResourcePath, {
|
||||
default: "main.tex",
|
||||
type: "string"
|
||||
}
|
||||
);
|
||||
const originalRootResourcePath = rootResourcePath;
|
||||
const sanitizedRootResourcePath = RequestParser._sanitizePath(rootResourcePath);
|
||||
response.rootResourcePath = RequestParser._checkPath(sanitizedRootResourcePath);
|
||||
const rootResourcePath = this._parseAttribute(
|
||||
'rootResourcePath',
|
||||
compile.rootResourcePath,
|
||||
{
|
||||
default: 'main.tex',
|
||||
type: 'string'
|
||||
}
|
||||
)
|
||||
const originalRootResourcePath = rootResourcePath
|
||||
const sanitizedRootResourcePath = RequestParser._sanitizePath(
|
||||
rootResourcePath
|
||||
)
|
||||
response.rootResourcePath = RequestParser._checkPath(
|
||||
sanitizedRootResourcePath
|
||||
)
|
||||
|
||||
for (resource of Array.from(response.resources)) {
|
||||
if (resource.path === originalRootResourcePath) {
|
||||
resource.path = sanitizedRootResourcePath;
|
||||
}
|
||||
}
|
||||
} catch (error1) {
|
||||
const error = error1;
|
||||
return callback(error);
|
||||
}
|
||||
for (resource of Array.from(response.resources)) {
|
||||
if (resource.path === originalRootResourcePath) {
|
||||
resource.path = sanitizedRootResourcePath
|
||||
}
|
||||
}
|
||||
} catch (error1) {
|
||||
const error = error1
|
||||
return callback(error)
|
||||
}
|
||||
|
||||
return callback(null, response);
|
||||
},
|
||||
return callback(null, response)
|
||||
},
|
||||
|
||||
_parseResource(resource) {
|
||||
let modified;
|
||||
if ((resource.path == null) || (typeof resource.path !== "string")) {
|
||||
throw "all resources should have a path attribute";
|
||||
}
|
||||
_parseResource(resource) {
|
||||
let modified
|
||||
if (resource.path == null || typeof resource.path !== 'string') {
|
||||
throw 'all resources should have a path attribute'
|
||||
}
|
||||
|
||||
if (resource.modified != null) {
|
||||
modified = new Date(resource.modified);
|
||||
if (isNaN(modified.getTime())) {
|
||||
throw `resource modified date could not be understood: ${resource.modified}`;
|
||||
}
|
||||
}
|
||||
if (resource.modified != null) {
|
||||
modified = new Date(resource.modified)
|
||||
if (isNaN(modified.getTime())) {
|
||||
throw `resource modified date could not be understood: ${resource.modified}`
|
||||
}
|
||||
}
|
||||
|
||||
if ((resource.url == null) && (resource.content == null)) {
|
||||
throw "all resources should have either a url or content attribute";
|
||||
}
|
||||
if ((resource.content != null) && (typeof resource.content !== "string")) {
|
||||
throw "content attribute should be a string";
|
||||
}
|
||||
if ((resource.url != null) && (typeof resource.url !== "string")) {
|
||||
throw "url attribute should be a string";
|
||||
}
|
||||
if (resource.url == null && resource.content == null) {
|
||||
throw 'all resources should have either a url or content attribute'
|
||||
}
|
||||
if (resource.content != null && typeof resource.content !== 'string') {
|
||||
throw 'content attribute should be a string'
|
||||
}
|
||||
if (resource.url != null && typeof resource.url !== 'string') {
|
||||
throw 'url attribute should be a string'
|
||||
}
|
||||
|
||||
return {
|
||||
path: resource.path,
|
||||
modified,
|
||||
url: resource.url,
|
||||
content: resource.content
|
||||
};
|
||||
},
|
||||
return {
|
||||
path: resource.path,
|
||||
modified,
|
||||
url: resource.url,
|
||||
content: resource.content
|
||||
}
|
||||
},
|
||||
|
||||
_parseAttribute(name, attribute, options) {
|
||||
if (attribute != null) {
|
||||
if (options.validValues != null) {
|
||||
if (options.validValues.indexOf(attribute) === -1) {
|
||||
throw `${name} attribute should be one of: ${options.validValues.join(", ")}`;
|
||||
}
|
||||
}
|
||||
if (options.type != null) {
|
||||
if (typeof attribute !== options.type) {
|
||||
throw `${name} attribute should be a ${options.type}`;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (options.default != null) { return options.default; }
|
||||
}
|
||||
return attribute;
|
||||
},
|
||||
_parseAttribute(name, attribute, options) {
|
||||
if (attribute != null) {
|
||||
if (options.validValues != null) {
|
||||
if (options.validValues.indexOf(attribute) === -1) {
|
||||
throw `${name} attribute should be one of: ${options.validValues.join(
|
||||
', '
|
||||
)}`
|
||||
}
|
||||
}
|
||||
if (options.type != null) {
|
||||
if (typeof attribute !== options.type) {
|
||||
throw `${name} attribute should be a ${options.type}`
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (options.default != null) {
|
||||
return options.default
|
||||
}
|
||||
}
|
||||
return attribute
|
||||
},
|
||||
|
||||
_sanitizePath(path) {
|
||||
// See http://php.net/manual/en/function.escapeshellcmd.php
|
||||
return path.replace(/[\#\&\;\`\|\*\?\~\<\>\^\(\)\[\]\{\}\$\\\x0A\xFF\x00]/g, "");
|
||||
},
|
||||
_sanitizePath(path) {
|
||||
// See http://php.net/manual/en/function.escapeshellcmd.php
|
||||
return path.replace(
|
||||
/[\#\&\;\`\|\*\?\~\<\>\^\(\)\[\]\{\}\$\\\x0A\xFF\x00]/g,
|
||||
''
|
||||
)
|
||||
},
|
||||
|
||||
_checkPath(path) {
|
||||
// check that the request does not use a relative path
|
||||
for (const dir of Array.from(path.split('/'))) {
|
||||
if (dir === '..') {
|
||||
throw "relative path in root resource";
|
||||
}
|
||||
}
|
||||
return path;
|
||||
}
|
||||
});
|
||||
_checkPath(path) {
|
||||
// check that the request does not use a relative path
|
||||
for (const dir of Array.from(path.split('/'))) {
|
||||
if (dir === '..') {
|
||||
throw 'relative path in root resource'
|
||||
}
|
||||
}
|
||||
return path
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,102 +13,142 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let ResourceStateManager;
|
||||
const Path = require("path");
|
||||
const fs = require("fs");
|
||||
const logger = require("logger-sharelatex");
|
||||
const settings = require("settings-sharelatex");
|
||||
const Errors = require("./Errors");
|
||||
const SafeReader = require("./SafeReader");
|
||||
let ResourceStateManager
|
||||
const Path = require('path')
|
||||
const fs = require('fs')
|
||||
const logger = require('logger-sharelatex')
|
||||
const settings = require('settings-sharelatex')
|
||||
const Errors = require('./Errors')
|
||||
const SafeReader = require('./SafeReader')
|
||||
|
||||
module.exports = (ResourceStateManager = {
|
||||
module.exports = ResourceStateManager = {
|
||||
// The sync state is an identifier which must match for an
|
||||
// incremental update to be allowed.
|
||||
//
|
||||
// The initial value is passed in and stored on a full
|
||||
// compile, along with the list of resources..
|
||||
//
|
||||
// Subsequent incremental compiles must come with the same value - if
|
||||
// not they will be rejected with a 409 Conflict response. The
|
||||
// previous list of resources is returned.
|
||||
//
|
||||
// An incremental compile can only update existing files with new
|
||||
// content. The sync state identifier must change if any docs or
|
||||
// files are moved, added, deleted or renamed.
|
||||
|
||||
// The sync state is an identifier which must match for an
|
||||
// incremental update to be allowed.
|
||||
//
|
||||
// The initial value is passed in and stored on a full
|
||||
// compile, along with the list of resources..
|
||||
//
|
||||
// Subsequent incremental compiles must come with the same value - if
|
||||
// not they will be rejected with a 409 Conflict response. The
|
||||
// previous list of resources is returned.
|
||||
//
|
||||
// An incremental compile can only update existing files with new
|
||||
// content. The sync state identifier must change if any docs or
|
||||
// files are moved, added, deleted or renamed.
|
||||
SYNC_STATE_FILE: '.project-sync-state',
|
||||
SYNC_STATE_MAX_SIZE: 128 * 1024,
|
||||
|
||||
SYNC_STATE_FILE: ".project-sync-state",
|
||||
SYNC_STATE_MAX_SIZE: 128*1024,
|
||||
saveProjectState(state, resources, basePath, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
const stateFile = Path.join(basePath, this.SYNC_STATE_FILE)
|
||||
if (state == null) {
|
||||
// remove the file if no state passed in
|
||||
logger.log({ state, basePath }, 'clearing sync state')
|
||||
return fs.unlink(stateFile, function(err) {
|
||||
if (err != null && err.code !== 'ENOENT') {
|
||||
return callback(err)
|
||||
} else {
|
||||
return callback()
|
||||
}
|
||||
})
|
||||
} else {
|
||||
logger.log({ state, basePath }, 'writing sync state')
|
||||
const resourceList = Array.from(resources).map(resource => resource.path)
|
||||
return fs.writeFile(
|
||||
stateFile,
|
||||
[...Array.from(resourceList), `stateHash:${state}`].join('\n'),
|
||||
callback
|
||||
)
|
||||
}
|
||||
},
|
||||
|
||||
saveProjectState(state, resources, basePath, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
const stateFile = Path.join(basePath, this.SYNC_STATE_FILE);
|
||||
if ((state == null)) { // remove the file if no state passed in
|
||||
logger.log({state, basePath}, "clearing sync state");
|
||||
return fs.unlink(stateFile, function(err) {
|
||||
if ((err != null) && (err.code !== 'ENOENT')) {
|
||||
return callback(err);
|
||||
} else {
|
||||
return callback();
|
||||
}
|
||||
});
|
||||
} else {
|
||||
logger.log({state, basePath}, "writing sync state");
|
||||
const resourceList = (Array.from(resources).map((resource) => resource.path));
|
||||
return fs.writeFile(stateFile, [...Array.from(resourceList), `stateHash:${state}`].join("\n"), callback);
|
||||
}
|
||||
},
|
||||
checkProjectStateMatches(state, basePath, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, resources) {}
|
||||
}
|
||||
const stateFile = Path.join(basePath, this.SYNC_STATE_FILE)
|
||||
const size = this.SYNC_STATE_MAX_SIZE
|
||||
return SafeReader.readFile(stateFile, size, 'utf8', function(
|
||||
err,
|
||||
result,
|
||||
bytesRead
|
||||
) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
if (bytesRead === size) {
|
||||
logger.error(
|
||||
{ file: stateFile, size, bytesRead },
|
||||
'project state file truncated'
|
||||
)
|
||||
}
|
||||
const array =
|
||||
__guard__(result != null ? result.toString() : undefined, x =>
|
||||
x.split('\n')
|
||||
) || []
|
||||
const adjustedLength = Math.max(array.length, 1)
|
||||
const resourceList = array.slice(0, adjustedLength - 1)
|
||||
const oldState = array[adjustedLength - 1]
|
||||
const newState = `stateHash:${state}`
|
||||
logger.log(
|
||||
{ state, oldState, basePath, stateMatches: newState === oldState },
|
||||
'checking sync state'
|
||||
)
|
||||
if (newState !== oldState) {
|
||||
return callback(
|
||||
new Errors.FilesOutOfSyncError('invalid state for incremental update')
|
||||
)
|
||||
} else {
|
||||
const resources = Array.from(resourceList).map(path => ({ path }))
|
||||
return callback(null, resources)
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
checkProjectStateMatches(state, basePath, callback) {
|
||||
if (callback == null) { callback = function(error, resources) {}; }
|
||||
const stateFile = Path.join(basePath, this.SYNC_STATE_FILE);
|
||||
const size = this.SYNC_STATE_MAX_SIZE;
|
||||
return SafeReader.readFile(stateFile, size, 'utf8', function(err, result, bytesRead) {
|
||||
if (err != null) { return callback(err); }
|
||||
if (bytesRead === size) {
|
||||
logger.error({file:stateFile, size, bytesRead}, "project state file truncated");
|
||||
}
|
||||
const array = __guard__(result != null ? result.toString() : undefined, x => x.split("\n")) || [];
|
||||
const adjustedLength = Math.max(array.length, 1);
|
||||
const resourceList = array.slice(0, adjustedLength - 1);
|
||||
const oldState = array[adjustedLength - 1];
|
||||
const newState = `stateHash:${state}`;
|
||||
logger.log({state, oldState, basePath, stateMatches: (newState === oldState)}, "checking sync state");
|
||||
if (newState !== oldState) {
|
||||
return callback(new Errors.FilesOutOfSyncError("invalid state for incremental update"));
|
||||
} else {
|
||||
const resources = (Array.from(resourceList).map((path) => ({path})));
|
||||
return callback(null, resources);
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
checkResourceFiles(resources, allFiles, basePath, callback) {
|
||||
// check the paths are all relative to current directory
|
||||
let file;
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
for (file of Array.from(resources || [])) {
|
||||
for (const dir of Array.from(__guard__(file != null ? file.path : undefined, x => x.split('/')))) {
|
||||
if (dir === '..') {
|
||||
return callback(new Error("relative path in resource file list"));
|
||||
}
|
||||
}
|
||||
}
|
||||
// check if any of the input files are not present in list of files
|
||||
const seenFile = {};
|
||||
for (file of Array.from(allFiles)) {
|
||||
seenFile[file] = true;
|
||||
}
|
||||
const missingFiles = (Array.from(resources).filter((resource) => !seenFile[resource.path]).map((resource) => resource.path));
|
||||
if ((missingFiles != null ? missingFiles.length : undefined) > 0) {
|
||||
logger.err({missingFiles, basePath, allFiles, resources}, "missing input files for project");
|
||||
return callback(new Errors.FilesOutOfSyncError("resource files missing in incremental update"));
|
||||
} else {
|
||||
return callback();
|
||||
}
|
||||
}
|
||||
});
|
||||
checkResourceFiles(resources, allFiles, basePath, callback) {
|
||||
// check the paths are all relative to current directory
|
||||
let file
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
for (file of Array.from(resources || [])) {
|
||||
for (const dir of Array.from(
|
||||
__guard__(file != null ? file.path : undefined, x => x.split('/'))
|
||||
)) {
|
||||
if (dir === '..') {
|
||||
return callback(new Error('relative path in resource file list'))
|
||||
}
|
||||
}
|
||||
}
|
||||
// check if any of the input files are not present in list of files
|
||||
const seenFile = {}
|
||||
for (file of Array.from(allFiles)) {
|
||||
seenFile[file] = true
|
||||
}
|
||||
const missingFiles = Array.from(resources)
|
||||
.filter(resource => !seenFile[resource.path])
|
||||
.map(resource => resource.path)
|
||||
if ((missingFiles != null ? missingFiles.length : undefined) > 0) {
|
||||
logger.err(
|
||||
{ missingFiles, basePath, allFiles, resources },
|
||||
'missing input files for project'
|
||||
)
|
||||
return callback(
|
||||
new Errors.FilesOutOfSyncError(
|
||||
'resource files missing in incremental update'
|
||||
)
|
||||
)
|
||||
} else {
|
||||
return callback()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined;
|
||||
}
|
||||
return typeof value !== 'undefined' && value !== null
|
||||
? transform(value)
|
||||
: undefined
|
||||
}
|
||||
|
|
|
@ -14,202 +14,339 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let ResourceWriter;
|
||||
const UrlCache = require("./UrlCache");
|
||||
const Path = require("path");
|
||||
const fs = require("fs");
|
||||
const async = require("async");
|
||||
const mkdirp = require("mkdirp");
|
||||
const OutputFileFinder = require("./OutputFileFinder");
|
||||
const ResourceStateManager = require("./ResourceStateManager");
|
||||
const Metrics = require("./Metrics");
|
||||
const logger = require("logger-sharelatex");
|
||||
const settings = require("settings-sharelatex");
|
||||
let ResourceWriter
|
||||
const UrlCache = require('./UrlCache')
|
||||
const Path = require('path')
|
||||
const fs = require('fs')
|
||||
const async = require('async')
|
||||
const mkdirp = require('mkdirp')
|
||||
const OutputFileFinder = require('./OutputFileFinder')
|
||||
const ResourceStateManager = require('./ResourceStateManager')
|
||||
const Metrics = require('./Metrics')
|
||||
const logger = require('logger-sharelatex')
|
||||
const settings = require('settings-sharelatex')
|
||||
|
||||
const parallelFileDownloads = settings.parallelFileDownloads || 1;
|
||||
const parallelFileDownloads = settings.parallelFileDownloads || 1
|
||||
|
||||
module.exports = (ResourceWriter = {
|
||||
module.exports = ResourceWriter = {
|
||||
syncResourcesToDisk(request, basePath, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, resourceList) {}
|
||||
}
|
||||
if (request.syncType === 'incremental') {
|
||||
logger.log(
|
||||
{ project_id: request.project_id, user_id: request.user_id },
|
||||
'incremental sync'
|
||||
)
|
||||
return ResourceStateManager.checkProjectStateMatches(
|
||||
request.syncState,
|
||||
basePath,
|
||||
function(error, resourceList) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return ResourceWriter._removeExtraneousFiles(
|
||||
resourceList,
|
||||
basePath,
|
||||
function(error, outputFiles, allFiles) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return ResourceStateManager.checkResourceFiles(
|
||||
resourceList,
|
||||
allFiles,
|
||||
basePath,
|
||||
function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return ResourceWriter.saveIncrementalResourcesToDisk(
|
||||
request.project_id,
|
||||
request.resources,
|
||||
basePath,
|
||||
function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback(null, resourceList)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
} else {
|
||||
logger.log(
|
||||
{ project_id: request.project_id, user_id: request.user_id },
|
||||
'full sync'
|
||||
)
|
||||
return this.saveAllResourcesToDisk(
|
||||
request.project_id,
|
||||
request.resources,
|
||||
basePath,
|
||||
function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return ResourceStateManager.saveProjectState(
|
||||
request.syncState,
|
||||
request.resources,
|
||||
basePath,
|
||||
function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback(null, request.resources)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
},
|
||||
|
||||
syncResourcesToDisk(request, basePath, callback) {
|
||||
if (callback == null) { callback = function(error, resourceList) {}; }
|
||||
if (request.syncType === "incremental") {
|
||||
logger.log({project_id: request.project_id, user_id: request.user_id}, "incremental sync");
|
||||
return ResourceStateManager.checkProjectStateMatches(request.syncState, basePath, function(error, resourceList) {
|
||||
if (error != null) { return callback(error); }
|
||||
return ResourceWriter._removeExtraneousFiles(resourceList, basePath, function(error, outputFiles, allFiles) {
|
||||
if (error != null) { return callback(error); }
|
||||
return ResourceStateManager.checkResourceFiles(resourceList, allFiles, basePath, function(error) {
|
||||
if (error != null) { return callback(error); }
|
||||
return ResourceWriter.saveIncrementalResourcesToDisk(request.project_id, request.resources, basePath, function(error) {
|
||||
if (error != null) { return callback(error); }
|
||||
return callback(null, resourceList);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
} else {
|
||||
logger.log({project_id: request.project_id, user_id: request.user_id}, "full sync");
|
||||
return this.saveAllResourcesToDisk(request.project_id, request.resources, basePath, function(error) {
|
||||
if (error != null) { return callback(error); }
|
||||
return ResourceStateManager.saveProjectState(request.syncState, request.resources, basePath, function(error) {
|
||||
if (error != null) { return callback(error); }
|
||||
return callback(null, request.resources);
|
||||
});
|
||||
});
|
||||
}
|
||||
},
|
||||
saveIncrementalResourcesToDisk(project_id, resources, basePath, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return this._createDirectory(basePath, error => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
const jobs = Array.from(resources).map(resource =>
|
||||
(resource => {
|
||||
return callback =>
|
||||
this._writeResourceToDisk(project_id, resource, basePath, callback)
|
||||
})(resource)
|
||||
)
|
||||
return async.parallelLimit(jobs, parallelFileDownloads, callback)
|
||||
})
|
||||
},
|
||||
|
||||
saveIncrementalResourcesToDisk(project_id, resources, basePath, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
return this._createDirectory(basePath, error => {
|
||||
if (error != null) { return callback(error); }
|
||||
const jobs = Array.from(resources).map((resource) =>
|
||||
(resource => {
|
||||
return callback => this._writeResourceToDisk(project_id, resource, basePath, callback);
|
||||
})(resource));
|
||||
return async.parallelLimit(jobs, parallelFileDownloads, callback);
|
||||
});
|
||||
},
|
||||
saveAllResourcesToDisk(project_id, resources, basePath, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return this._createDirectory(basePath, error => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return this._removeExtraneousFiles(resources, basePath, error => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
const jobs = Array.from(resources).map(resource =>
|
||||
(resource => {
|
||||
return callback =>
|
||||
this._writeResourceToDisk(
|
||||
project_id,
|
||||
resource,
|
||||
basePath,
|
||||
callback
|
||||
)
|
||||
})(resource)
|
||||
)
|
||||
return async.parallelLimit(jobs, parallelFileDownloads, callback)
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
saveAllResourcesToDisk(project_id, resources, basePath, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
return this._createDirectory(basePath, error => {
|
||||
if (error != null) { return callback(error); }
|
||||
return this._removeExtraneousFiles(resources, basePath, error => {
|
||||
if (error != null) { return callback(error); }
|
||||
const jobs = Array.from(resources).map((resource) =>
|
||||
(resource => {
|
||||
return callback => this._writeResourceToDisk(project_id, resource, basePath, callback);
|
||||
})(resource));
|
||||
return async.parallelLimit(jobs, parallelFileDownloads, callback);
|
||||
});
|
||||
});
|
||||
},
|
||||
_createDirectory(basePath, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return fs.mkdir(basePath, function(err) {
|
||||
if (err != null) {
|
||||
if (err.code === 'EEXIST') {
|
||||
return callback()
|
||||
} else {
|
||||
logger.log({ err, dir: basePath }, 'error creating directory')
|
||||
return callback(err)
|
||||
}
|
||||
} else {
|
||||
return callback()
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
_createDirectory(basePath, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
return fs.mkdir(basePath, function(err) {
|
||||
if (err != null) {
|
||||
if (err.code === 'EEXIST') {
|
||||
return callback();
|
||||
} else {
|
||||
logger.log({err, dir:basePath}, "error creating directory");
|
||||
return callback(err);
|
||||
}
|
||||
} else {
|
||||
return callback();
|
||||
}
|
||||
});
|
||||
},
|
||||
_removeExtraneousFiles(resources, basePath, _callback) {
|
||||
if (_callback == null) {
|
||||
_callback = function(error, outputFiles, allFiles) {}
|
||||
}
|
||||
const timer = new Metrics.Timer('unlink-output-files')
|
||||
const callback = function(error, ...result) {
|
||||
timer.done()
|
||||
return _callback(error, ...Array.from(result))
|
||||
}
|
||||
|
||||
_removeExtraneousFiles(resources, basePath, _callback) {
|
||||
if (_callback == null) { _callback = function(error, outputFiles, allFiles) {}; }
|
||||
const timer = new Metrics.Timer("unlink-output-files");
|
||||
const callback = function(error, ...result) {
|
||||
timer.done();
|
||||
return _callback(error, ...Array.from(result));
|
||||
};
|
||||
return OutputFileFinder.findOutputFiles(resources, basePath, function(
|
||||
error,
|
||||
outputFiles,
|
||||
allFiles
|
||||
) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
|
||||
return OutputFileFinder.findOutputFiles(resources, basePath, function(error, outputFiles, allFiles) {
|
||||
if (error != null) { return callback(error); }
|
||||
const jobs = []
|
||||
for (const file of Array.from(outputFiles || [])) {
|
||||
;(function(file) {
|
||||
const { path } = file
|
||||
let should_delete = true
|
||||
if (
|
||||
path.match(/^output\./) ||
|
||||
path.match(/\.aux$/) ||
|
||||
path.match(/^cache\//)
|
||||
) {
|
||||
// knitr cache
|
||||
should_delete = false
|
||||
}
|
||||
if (path.match(/^output-.*/)) {
|
||||
// Tikz cached figures (default case)
|
||||
should_delete = false
|
||||
}
|
||||
if (path.match(/\.(pdf|dpth|md5)$/)) {
|
||||
// Tikz cached figures (by extension)
|
||||
should_delete = false
|
||||
}
|
||||
if (
|
||||
path.match(/\.(pygtex|pygstyle)$/) ||
|
||||
path.match(/(^|\/)_minted-[^\/]+\//)
|
||||
) {
|
||||
// minted files/directory
|
||||
should_delete = false
|
||||
}
|
||||
if (
|
||||
path.match(/\.md\.tex$/) ||
|
||||
path.match(/(^|\/)_markdown_[^\/]+\//)
|
||||
) {
|
||||
// markdown files/directory
|
||||
should_delete = false
|
||||
}
|
||||
if (path.match(/-eps-converted-to\.pdf$/)) {
|
||||
// Epstopdf generated files
|
||||
should_delete = false
|
||||
}
|
||||
if (
|
||||
path === 'output.pdf' ||
|
||||
path === 'output.dvi' ||
|
||||
path === 'output.log' ||
|
||||
path === 'output.xdv'
|
||||
) {
|
||||
should_delete = true
|
||||
}
|
||||
if (path === 'output.tex') {
|
||||
// created by TikzManager if present in output files
|
||||
should_delete = true
|
||||
}
|
||||
if (should_delete) {
|
||||
return jobs.push(callback =>
|
||||
ResourceWriter._deleteFileIfNotDirectory(
|
||||
Path.join(basePath, path),
|
||||
callback
|
||||
)
|
||||
)
|
||||
}
|
||||
})(file)
|
||||
}
|
||||
|
||||
const jobs = [];
|
||||
for (const file of Array.from(outputFiles || [])) {
|
||||
(function(file) {
|
||||
const { path } = file;
|
||||
let should_delete = true;
|
||||
if (path.match(/^output\./) || path.match(/\.aux$/) || path.match(/^cache\//)) { // knitr cache
|
||||
should_delete = false;
|
||||
}
|
||||
if (path.match(/^output-.*/)) { // Tikz cached figures (default case)
|
||||
should_delete = false;
|
||||
}
|
||||
if (path.match(/\.(pdf|dpth|md5)$/)) { // Tikz cached figures (by extension)
|
||||
should_delete = false;
|
||||
}
|
||||
if (path.match(/\.(pygtex|pygstyle)$/) || path.match(/(^|\/)_minted-[^\/]+\//)) { // minted files/directory
|
||||
should_delete = false;
|
||||
}
|
||||
if (path.match(/\.md\.tex$/) || path.match(/(^|\/)_markdown_[^\/]+\//)) { // markdown files/directory
|
||||
should_delete = false;
|
||||
}
|
||||
if (path.match(/-eps-converted-to\.pdf$/)) { // Epstopdf generated files
|
||||
should_delete = false;
|
||||
}
|
||||
if ((path === "output.pdf") || (path === "output.dvi") || (path === "output.log") || (path === "output.xdv")) {
|
||||
should_delete = true;
|
||||
}
|
||||
if (path === "output.tex") { // created by TikzManager if present in output files
|
||||
should_delete = true;
|
||||
}
|
||||
if (should_delete) {
|
||||
return jobs.push(callback => ResourceWriter._deleteFileIfNotDirectory(Path.join(basePath, path), callback));
|
||||
}
|
||||
})(file);
|
||||
}
|
||||
return async.series(jobs, function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback(null, outputFiles, allFiles)
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
return async.series(jobs, function(error) {
|
||||
if (error != null) { return callback(error); }
|
||||
return callback(null, outputFiles, allFiles);
|
||||
});
|
||||
});
|
||||
},
|
||||
_deleteFileIfNotDirectory(path, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return fs.stat(path, function(error, stat) {
|
||||
if (error != null && error.code === 'ENOENT') {
|
||||
return callback()
|
||||
} else if (error != null) {
|
||||
logger.err(
|
||||
{ err: error, path },
|
||||
'error stating file in deleteFileIfNotDirectory'
|
||||
)
|
||||
return callback(error)
|
||||
} else if (stat.isFile()) {
|
||||
return fs.unlink(path, function(error) {
|
||||
if (error != null) {
|
||||
logger.err(
|
||||
{ err: error, path },
|
||||
'error removing file in deleteFileIfNotDirectory'
|
||||
)
|
||||
return callback(error)
|
||||
} else {
|
||||
return callback()
|
||||
}
|
||||
})
|
||||
} else {
|
||||
return callback()
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
_deleteFileIfNotDirectory(path, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
return fs.stat(path, function(error, stat) {
|
||||
if ((error != null) && (error.code === 'ENOENT')) {
|
||||
return callback();
|
||||
} else if (error != null) {
|
||||
logger.err({err: error, path}, "error stating file in deleteFileIfNotDirectory");
|
||||
return callback(error);
|
||||
} else if (stat.isFile()) {
|
||||
return fs.unlink(path, function(error) {
|
||||
if (error != null) {
|
||||
logger.err({err: error, path}, "error removing file in deleteFileIfNotDirectory");
|
||||
return callback(error);
|
||||
} else {
|
||||
return callback();
|
||||
}
|
||||
});
|
||||
} else {
|
||||
return callback();
|
||||
}
|
||||
});
|
||||
},
|
||||
_writeResourceToDisk(project_id, resource, basePath, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return ResourceWriter.checkPath(basePath, resource.path, function(
|
||||
error,
|
||||
path
|
||||
) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return mkdirp(Path.dirname(path), function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
// TODO: Don't overwrite file if it hasn't been modified
|
||||
if (resource.url != null) {
|
||||
return UrlCache.downloadUrlToFile(
|
||||
project_id,
|
||||
resource.url,
|
||||
path,
|
||||
resource.modified,
|
||||
function(err) {
|
||||
if (err != null) {
|
||||
logger.err(
|
||||
{
|
||||
err,
|
||||
project_id,
|
||||
path,
|
||||
resource_url: resource.url,
|
||||
modified: resource.modified
|
||||
},
|
||||
'error downloading file for resources'
|
||||
)
|
||||
}
|
||||
return callback()
|
||||
}
|
||||
) // try and continue compiling even if http resource can not be downloaded at this time
|
||||
} else {
|
||||
const process = require('process')
|
||||
fs.writeFile(path, resource.content, callback)
|
||||
try {
|
||||
let result
|
||||
return (result = fs.lstatSync(path))
|
||||
} catch (e) {}
|
||||
}
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
_writeResourceToDisk(project_id, resource, basePath, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
return ResourceWriter.checkPath(basePath, resource.path, function(error, path) {
|
||||
if (error != null) { return callback(error); }
|
||||
return mkdirp(Path.dirname(path), function(error) {
|
||||
if (error != null) { return callback(error); }
|
||||
// TODO: Don't overwrite file if it hasn't been modified
|
||||
if (resource.url != null) {
|
||||
return UrlCache.downloadUrlToFile(project_id, resource.url, path, resource.modified, function(err){
|
||||
if (err != null) {
|
||||
logger.err({err, project_id, path, resource_url:resource.url, modified:resource.modified}, "error downloading file for resources");
|
||||
}
|
||||
return callback();
|
||||
}); // try and continue compiling even if http resource can not be downloaded at this time
|
||||
} else {
|
||||
const process = require("process");
|
||||
fs.writeFile(path, resource.content, callback);
|
||||
try {
|
||||
let result;
|
||||
return result = fs.lstatSync(path);
|
||||
} catch (e) {}
|
||||
}
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
checkPath(basePath, resourcePath, callback) {
|
||||
const path = Path.normalize(Path.join(basePath, resourcePath));
|
||||
if (path.slice(0, basePath.length + 1) !== (basePath + "/")) {
|
||||
return callback(new Error("resource path is outside root directory"));
|
||||
} else {
|
||||
return callback(null, path);
|
||||
}
|
||||
}
|
||||
});
|
||||
checkPath(basePath, resourcePath, callback) {
|
||||
const path = Path.normalize(Path.join(basePath, resourcePath))
|
||||
if (path.slice(0, basePath.length + 1) !== basePath + '/') {
|
||||
return callback(new Error('resource path is outside root directory'))
|
||||
} else {
|
||||
return callback(null, path)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,36 +12,49 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let SafeReader;
|
||||
const fs = require("fs");
|
||||
const logger = require("logger-sharelatex");
|
||||
let SafeReader
|
||||
const fs = require('fs')
|
||||
const logger = require('logger-sharelatex')
|
||||
|
||||
module.exports = (SafeReader = {
|
||||
module.exports = SafeReader = {
|
||||
// safely read up to size bytes from a file and return result as a
|
||||
// string
|
||||
|
||||
// safely read up to size bytes from a file and return result as a
|
||||
// string
|
||||
readFile(file, size, encoding, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, result) {}
|
||||
}
|
||||
return fs.open(file, 'r', function(err, fd) {
|
||||
if (err != null && err.code === 'ENOENT') {
|
||||
return callback()
|
||||
}
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
|
||||
readFile(file, size, encoding, callback) {
|
||||
if (callback == null) { callback = function(error, result) {}; }
|
||||
return fs.open(file, 'r', function(err, fd) {
|
||||
if ((err != null) && (err.code === 'ENOENT')) { return callback(); }
|
||||
if (err != null) { return callback(err); }
|
||||
|
||||
// safely return always closing the file
|
||||
const callbackWithClose = (err, ...result) =>
|
||||
fs.close(fd, function(err1) {
|
||||
if (err != null) { return callback(err); }
|
||||
if (err1 != null) { return callback(err1); }
|
||||
return callback(null, ...Array.from(result));
|
||||
})
|
||||
;
|
||||
|
||||
const buff = new Buffer(size, 0); // fill with zeros
|
||||
return fs.read(fd, buff, 0, buff.length, 0, function(err, bytesRead, buffer) {
|
||||
if (err != null) { return callbackWithClose(err); }
|
||||
const result = buffer.toString(encoding, 0, bytesRead);
|
||||
return callbackWithClose(null, result, bytesRead);
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
// safely return always closing the file
|
||||
const callbackWithClose = (err, ...result) =>
|
||||
fs.close(fd, function(err1) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
if (err1 != null) {
|
||||
return callback(err1)
|
||||
}
|
||||
return callback(null, ...Array.from(result))
|
||||
})
|
||||
const buff = new Buffer(size, 0) // fill with zeros
|
||||
return fs.read(fd, buff, 0, buff.length, 0, function(
|
||||
err,
|
||||
bytesRead,
|
||||
buffer
|
||||
) {
|
||||
if (err != null) {
|
||||
return callbackWithClose(err)
|
||||
}
|
||||
const result = buffer.toString(encoding, 0, bytesRead)
|
||||
return callbackWithClose(null, result, bytesRead)
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,59 +14,81 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let ForbidSymlinks;
|
||||
const Path = require("path");
|
||||
const fs = require("fs");
|
||||
const Settings = require("settings-sharelatex");
|
||||
const logger = require("logger-sharelatex");
|
||||
const url = require("url");
|
||||
let ForbidSymlinks
|
||||
const Path = require('path')
|
||||
const fs = require('fs')
|
||||
const Settings = require('settings-sharelatex')
|
||||
const logger = require('logger-sharelatex')
|
||||
const url = require('url')
|
||||
|
||||
module.exports = (ForbidSymlinks = function(staticFn, root, options) {
|
||||
const expressStatic = staticFn(root, options);
|
||||
const basePath = Path.resolve(root);
|
||||
return function(req, res, next) {
|
||||
let file, project_id, result;
|
||||
const path = __guard__(url.parse(req.url), x => x.pathname);
|
||||
// check that the path is of the form /project_id_or_name/path/to/file.log
|
||||
if (result = path.match(/^\/?([a-zA-Z0-9_-]+)\/(.*)/)) {
|
||||
project_id = result[1];
|
||||
file = result[2];
|
||||
} else {
|
||||
logger.warn({path}, "unrecognized file request");
|
||||
return res.sendStatus(404);
|
||||
}
|
||||
// check that the file does not use a relative path
|
||||
for (const dir of Array.from(file.split('/'))) {
|
||||
if (dir === '..') {
|
||||
logger.warn({path}, "attempt to use a relative path");
|
||||
return res.sendStatus(404);
|
||||
}
|
||||
}
|
||||
// check that the requested path is normalized
|
||||
const requestedFsPath = `${basePath}/${project_id}/${file}`;
|
||||
if (requestedFsPath !== Path.normalize(requestedFsPath)) {
|
||||
logger.error({path: requestedFsPath}, "requestedFsPath is not normalized");
|
||||
return res.sendStatus(404);
|
||||
}
|
||||
// check that the requested path is not a symlink
|
||||
return fs.realpath(requestedFsPath, function(err, realFsPath){
|
||||
if (err != null) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return res.sendStatus(404);
|
||||
} else {
|
||||
logger.error({err, requestedFsPath, realFsPath, path: req.params[0], project_id: req.params.project_id}, "error checking file access");
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
} else if (requestedFsPath !== realFsPath) {
|
||||
logger.warn({requestedFsPath, realFsPath, path: req.params[0], project_id: req.params.project_id}, "trying to access a different file (symlink), aborting");
|
||||
return res.sendStatus(404);
|
||||
} else {
|
||||
return expressStatic(req, res, next);
|
||||
}
|
||||
});
|
||||
};
|
||||
});
|
||||
module.exports = ForbidSymlinks = function(staticFn, root, options) {
|
||||
const expressStatic = staticFn(root, options)
|
||||
const basePath = Path.resolve(root)
|
||||
return function(req, res, next) {
|
||||
let file, project_id, result
|
||||
const path = __guard__(url.parse(req.url), x => x.pathname)
|
||||
// check that the path is of the form /project_id_or_name/path/to/file.log
|
||||
if ((result = path.match(/^\/?([a-zA-Z0-9_-]+)\/(.*)/))) {
|
||||
project_id = result[1]
|
||||
file = result[2]
|
||||
} else {
|
||||
logger.warn({ path }, 'unrecognized file request')
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
// check that the file does not use a relative path
|
||||
for (const dir of Array.from(file.split('/'))) {
|
||||
if (dir === '..') {
|
||||
logger.warn({ path }, 'attempt to use a relative path')
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
}
|
||||
// check that the requested path is normalized
|
||||
const requestedFsPath = `${basePath}/${project_id}/${file}`
|
||||
if (requestedFsPath !== Path.normalize(requestedFsPath)) {
|
||||
logger.error(
|
||||
{ path: requestedFsPath },
|
||||
'requestedFsPath is not normalized'
|
||||
)
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
// check that the requested path is not a symlink
|
||||
return fs.realpath(requestedFsPath, function(err, realFsPath) {
|
||||
if (err != null) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return res.sendStatus(404)
|
||||
} else {
|
||||
logger.error(
|
||||
{
|
||||
err,
|
||||
requestedFsPath,
|
||||
realFsPath,
|
||||
path: req.params[0],
|
||||
project_id: req.params.project_id
|
||||
},
|
||||
'error checking file access'
|
||||
)
|
||||
return res.sendStatus(500)
|
||||
}
|
||||
} else if (requestedFsPath !== realFsPath) {
|
||||
logger.warn(
|
||||
{
|
||||
requestedFsPath,
|
||||
realFsPath,
|
||||
path: req.params[0],
|
||||
project_id: req.params.project_id
|
||||
},
|
||||
'trying to access a different file (symlink), aborting'
|
||||
)
|
||||
return res.sendStatus(404)
|
||||
} else {
|
||||
return expressStatic(req, res, next)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined;
|
||||
}
|
||||
return typeof value !== 'undefined' && value !== null
|
||||
? transform(value)
|
||||
: undefined
|
||||
}
|
||||
|
|
|
@ -11,52 +11,84 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let TikzManager;
|
||||
const fs = require("fs");
|
||||
const Path = require("path");
|
||||
const ResourceWriter = require("./ResourceWriter");
|
||||
const SafeReader = require("./SafeReader");
|
||||
const logger = require("logger-sharelatex");
|
||||
let TikzManager
|
||||
const fs = require('fs')
|
||||
const Path = require('path')
|
||||
const ResourceWriter = require('./ResourceWriter')
|
||||
const SafeReader = require('./SafeReader')
|
||||
const logger = require('logger-sharelatex')
|
||||
|
||||
// for \tikzexternalize or pstool to work the main file needs to match the
|
||||
// jobname. Since we set the -jobname to output, we have to create a
|
||||
// copy of the main file as 'output.tex'.
|
||||
|
||||
module.exports = (TikzManager = {
|
||||
module.exports = TikzManager = {
|
||||
checkMainFile(compileDir, mainFile, resources, callback) {
|
||||
// if there's already an output.tex file, we don't want to touch it
|
||||
if (callback == null) {
|
||||
callback = function(error, needsMainFile) {}
|
||||
}
|
||||
for (const resource of Array.from(resources)) {
|
||||
if (resource.path === 'output.tex') {
|
||||
logger.log({ compileDir, mainFile }, 'output.tex already in resources')
|
||||
return callback(null, false)
|
||||
}
|
||||
}
|
||||
// if there's no output.tex, see if we are using tikz/pgf or pstool in the main file
|
||||
return ResourceWriter.checkPath(compileDir, mainFile, function(
|
||||
error,
|
||||
path
|
||||
) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return SafeReader.readFile(path, 65536, 'utf8', function(error, content) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
const usesTikzExternalize =
|
||||
(content != null
|
||||
? content.indexOf('\\tikzexternalize')
|
||||
: undefined) >= 0
|
||||
const usesPsTool =
|
||||
(content != null ? content.indexOf('{pstool}') : undefined) >= 0
|
||||
logger.log(
|
||||
{ compileDir, mainFile, usesTikzExternalize, usesPsTool },
|
||||
'checked for packages needing main file as output.tex'
|
||||
)
|
||||
const needsMainFile = usesTikzExternalize || usesPsTool
|
||||
return callback(null, needsMainFile)
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
checkMainFile(compileDir, mainFile, resources, callback) {
|
||||
// if there's already an output.tex file, we don't want to touch it
|
||||
if (callback == null) { callback = function(error, needsMainFile) {}; }
|
||||
for (const resource of Array.from(resources)) {
|
||||
if (resource.path === "output.tex") {
|
||||
logger.log({compileDir, mainFile}, "output.tex already in resources");
|
||||
return callback(null, false);
|
||||
}
|
||||
}
|
||||
// if there's no output.tex, see if we are using tikz/pgf or pstool in the main file
|
||||
return ResourceWriter.checkPath(compileDir, mainFile, function(error, path) {
|
||||
if (error != null) { return callback(error); }
|
||||
return SafeReader.readFile(path, 65536, "utf8", function(error, content) {
|
||||
if (error != null) { return callback(error); }
|
||||
const usesTikzExternalize = (content != null ? content.indexOf("\\tikzexternalize") : undefined) >= 0;
|
||||
const usesPsTool = (content != null ? content.indexOf("{pstool}") : undefined) >= 0;
|
||||
logger.log({compileDir, mainFile, usesTikzExternalize, usesPsTool}, "checked for packages needing main file as output.tex");
|
||||
const needsMainFile = (usesTikzExternalize || usesPsTool);
|
||||
return callback(null, needsMainFile);
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
injectOutputFile(compileDir, mainFile, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
return ResourceWriter.checkPath(compileDir, mainFile, function(error, path) {
|
||||
if (error != null) { return callback(error); }
|
||||
return fs.readFile(path, "utf8", function(error, content) {
|
||||
if (error != null) { return callback(error); }
|
||||
logger.log({compileDir, mainFile}, "copied file to output.tex as project uses packages which require it");
|
||||
// use wx flag to ensure that output file does not already exist
|
||||
return fs.writeFile(Path.join(compileDir, "output.tex"), content, {flag:'wx'}, callback);
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
injectOutputFile(compileDir, mainFile, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return ResourceWriter.checkPath(compileDir, mainFile, function(
|
||||
error,
|
||||
path
|
||||
) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return fs.readFile(path, 'utf8', function(error, content) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
logger.log(
|
||||
{ compileDir, mainFile },
|
||||
'copied file to output.tex as project uses packages which require it'
|
||||
)
|
||||
// use wx flag to ensure that output file does not already exist
|
||||
return fs.writeFile(
|
||||
Path.join(compileDir, 'output.tex'),
|
||||
content,
|
||||
{ flag: 'wx' },
|
||||
callback
|
||||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,185 +12,267 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let UrlCache;
|
||||
const db = require("./db");
|
||||
const dbQueue = require("./DbQueue");
|
||||
const UrlFetcher = require("./UrlFetcher");
|
||||
const Settings = require("settings-sharelatex");
|
||||
const crypto = require("crypto");
|
||||
const fs = require("fs");
|
||||
const logger = require("logger-sharelatex");
|
||||
const async = require("async");
|
||||
let UrlCache
|
||||
const db = require('./db')
|
||||
const dbQueue = require('./DbQueue')
|
||||
const UrlFetcher = require('./UrlFetcher')
|
||||
const Settings = require('settings-sharelatex')
|
||||
const crypto = require('crypto')
|
||||
const fs = require('fs')
|
||||
const logger = require('logger-sharelatex')
|
||||
const async = require('async')
|
||||
|
||||
module.exports = (UrlCache = {
|
||||
downloadUrlToFile(project_id, url, destPath, lastModified, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
return UrlCache._ensureUrlIsInCache(project_id, url, lastModified, (error, pathToCachedUrl) => {
|
||||
if (error != null) { return callback(error); }
|
||||
return UrlCache._copyFile(pathToCachedUrl, destPath, function(error) {
|
||||
if (error != null) {
|
||||
return UrlCache._clearUrlDetails(project_id, url, () => callback(error));
|
||||
} else {
|
||||
return callback(error);
|
||||
}
|
||||
});
|
||||
});
|
||||
},
|
||||
module.exports = UrlCache = {
|
||||
downloadUrlToFile(project_id, url, destPath, lastModified, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return UrlCache._ensureUrlIsInCache(
|
||||
project_id,
|
||||
url,
|
||||
lastModified,
|
||||
(error, pathToCachedUrl) => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return UrlCache._copyFile(pathToCachedUrl, destPath, function(error) {
|
||||
if (error != null) {
|
||||
return UrlCache._clearUrlDetails(project_id, url, () =>
|
||||
callback(error)
|
||||
)
|
||||
} else {
|
||||
return callback(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
clearProject(project_id, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
return UrlCache._findAllUrlsInProject(project_id, function(error, urls) {
|
||||
logger.log({project_id, url_count: urls.length}, "clearing project URLs");
|
||||
if (error != null) { return callback(error); }
|
||||
const jobs = (Array.from(urls || [])).map((url) =>
|
||||
(url =>
|
||||
callback =>
|
||||
UrlCache._clearUrlFromCache(project_id, url, function(error) {
|
||||
if (error != null) {
|
||||
logger.error({err: error, project_id, url}, "error clearing project URL");
|
||||
}
|
||||
return callback();
|
||||
})
|
||||
|
||||
)(url));
|
||||
return async.series(jobs, callback);
|
||||
});
|
||||
},
|
||||
clearProject(project_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return UrlCache._findAllUrlsInProject(project_id, function(error, urls) {
|
||||
logger.log(
|
||||
{ project_id, url_count: urls.length },
|
||||
'clearing project URLs'
|
||||
)
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
const jobs = Array.from(urls || []).map(url =>
|
||||
(url => callback =>
|
||||
UrlCache._clearUrlFromCache(project_id, url, function(error) {
|
||||
if (error != null) {
|
||||
logger.error(
|
||||
{ err: error, project_id, url },
|
||||
'error clearing project URL'
|
||||
)
|
||||
}
|
||||
return callback()
|
||||
}))(url)
|
||||
)
|
||||
return async.series(jobs, callback)
|
||||
})
|
||||
},
|
||||
|
||||
_ensureUrlIsInCache(project_id, url, lastModified, callback) {
|
||||
if (callback == null) { callback = function(error, pathOnDisk) {}; }
|
||||
if (lastModified != null) {
|
||||
// MYSQL only stores dates to an accuracy of a second but the incoming lastModified might have milliseconds.
|
||||
// So round down to seconds
|
||||
lastModified = new Date(Math.floor(lastModified.getTime() / 1000) * 1000);
|
||||
}
|
||||
return UrlCache._doesUrlNeedDownloading(project_id, url, lastModified, (error, needsDownloading) => {
|
||||
if (error != null) { return callback(error); }
|
||||
if (needsDownloading) {
|
||||
logger.log({url, lastModified}, "downloading URL");
|
||||
return UrlFetcher.pipeUrlToFile(url, UrlCache._cacheFilePathForUrl(project_id, url), error => {
|
||||
if (error != null) { return callback(error); }
|
||||
return UrlCache._updateOrCreateUrlDetails(project_id, url, lastModified, error => {
|
||||
if (error != null) { return callback(error); }
|
||||
return callback(null, UrlCache._cacheFilePathForUrl(project_id, url));
|
||||
});
|
||||
});
|
||||
} else {
|
||||
logger.log({url, lastModified}, "URL is up to date in cache");
|
||||
return callback(null, UrlCache._cacheFilePathForUrl(project_id, url));
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
_doesUrlNeedDownloading(project_id, url, lastModified, callback) {
|
||||
if (callback == null) { callback = function(error, needsDownloading) {}; }
|
||||
if ((lastModified == null)) {
|
||||
return callback(null, true);
|
||||
}
|
||||
return UrlCache._findUrlDetails(project_id, url, function(error, urlDetails) {
|
||||
if (error != null) { return callback(error); }
|
||||
if ((urlDetails == null) || (urlDetails.lastModified == null) || (urlDetails.lastModified.getTime() < lastModified.getTime())) {
|
||||
return callback(null, true);
|
||||
} else {
|
||||
return callback(null, false);
|
||||
}
|
||||
});
|
||||
},
|
||||
_ensureUrlIsInCache(project_id, url, lastModified, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, pathOnDisk) {}
|
||||
}
|
||||
if (lastModified != null) {
|
||||
// MYSQL only stores dates to an accuracy of a second but the incoming lastModified might have milliseconds.
|
||||
// So round down to seconds
|
||||
lastModified = new Date(Math.floor(lastModified.getTime() / 1000) * 1000)
|
||||
}
|
||||
return UrlCache._doesUrlNeedDownloading(
|
||||
project_id,
|
||||
url,
|
||||
lastModified,
|
||||
(error, needsDownloading) => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
if (needsDownloading) {
|
||||
logger.log({ url, lastModified }, 'downloading URL')
|
||||
return UrlFetcher.pipeUrlToFile(
|
||||
url,
|
||||
UrlCache._cacheFilePathForUrl(project_id, url),
|
||||
error => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return UrlCache._updateOrCreateUrlDetails(
|
||||
project_id,
|
||||
url,
|
||||
lastModified,
|
||||
error => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback(
|
||||
null,
|
||||
UrlCache._cacheFilePathForUrl(project_id, url)
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
} else {
|
||||
logger.log({ url, lastModified }, 'URL is up to date in cache')
|
||||
return callback(null, UrlCache._cacheFilePathForUrl(project_id, url))
|
||||
}
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
_cacheFileNameForUrl(project_id, url) {
|
||||
return project_id + ":" + crypto.createHash("md5").update(url).digest("hex");
|
||||
},
|
||||
_doesUrlNeedDownloading(project_id, url, lastModified, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, needsDownloading) {}
|
||||
}
|
||||
if (lastModified == null) {
|
||||
return callback(null, true)
|
||||
}
|
||||
return UrlCache._findUrlDetails(project_id, url, function(
|
||||
error,
|
||||
urlDetails
|
||||
) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
if (
|
||||
urlDetails == null ||
|
||||
urlDetails.lastModified == null ||
|
||||
urlDetails.lastModified.getTime() < lastModified.getTime()
|
||||
) {
|
||||
return callback(null, true)
|
||||
} else {
|
||||
return callback(null, false)
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
_cacheFilePathForUrl(project_id, url) {
|
||||
return `${Settings.path.clsiCacheDir}/${UrlCache._cacheFileNameForUrl(project_id, url)}`;
|
||||
},
|
||||
_cacheFileNameForUrl(project_id, url) {
|
||||
return (
|
||||
project_id +
|
||||
':' +
|
||||
crypto
|
||||
.createHash('md5')
|
||||
.update(url)
|
||||
.digest('hex')
|
||||
)
|
||||
},
|
||||
|
||||
_copyFile(from, to, _callback) {
|
||||
if (_callback == null) { _callback = function(error) {}; }
|
||||
const callbackOnce = function(error) {
|
||||
if (error != null) {
|
||||
logger.error({err: error, from, to}, "error copying file from cache");
|
||||
}
|
||||
_callback(error);
|
||||
return _callback = function() {};
|
||||
};
|
||||
const writeStream = fs.createWriteStream(to);
|
||||
const readStream = fs.createReadStream(from);
|
||||
writeStream.on("error", callbackOnce);
|
||||
readStream.on("error", callbackOnce);
|
||||
writeStream.on("close", callbackOnce);
|
||||
return writeStream.on("open", () => readStream.pipe(writeStream));
|
||||
},
|
||||
_cacheFilePathForUrl(project_id, url) {
|
||||
return `${Settings.path.clsiCacheDir}/${UrlCache._cacheFileNameForUrl(
|
||||
project_id,
|
||||
url
|
||||
)}`
|
||||
},
|
||||
|
||||
_clearUrlFromCache(project_id, url, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
return UrlCache._clearUrlDetails(project_id, url, function(error) {
|
||||
if (error != null) { return callback(error); }
|
||||
return UrlCache._deleteUrlCacheFromDisk(project_id, url, function(error) {
|
||||
if (error != null) { return callback(error); }
|
||||
return callback(null);
|
||||
});
|
||||
});
|
||||
},
|
||||
_copyFile(from, to, _callback) {
|
||||
if (_callback == null) {
|
||||
_callback = function(error) {}
|
||||
}
|
||||
const callbackOnce = function(error) {
|
||||
if (error != null) {
|
||||
logger.error({ err: error, from, to }, 'error copying file from cache')
|
||||
}
|
||||
_callback(error)
|
||||
return (_callback = function() {})
|
||||
}
|
||||
const writeStream = fs.createWriteStream(to)
|
||||
const readStream = fs.createReadStream(from)
|
||||
writeStream.on('error', callbackOnce)
|
||||
readStream.on('error', callbackOnce)
|
||||
writeStream.on('close', callbackOnce)
|
||||
return writeStream.on('open', () => readStream.pipe(writeStream))
|
||||
},
|
||||
|
||||
_deleteUrlCacheFromDisk(project_id, url, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
return fs.unlink(UrlCache._cacheFilePathForUrl(project_id, url), function(error) {
|
||||
if ((error != null) && (error.code !== 'ENOENT')) { // no error if the file isn't present
|
||||
return callback(error);
|
||||
} else {
|
||||
return callback();
|
||||
}
|
||||
});
|
||||
},
|
||||
_clearUrlFromCache(project_id, url, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return UrlCache._clearUrlDetails(project_id, url, function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return UrlCache._deleteUrlCacheFromDisk(project_id, url, function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback(null)
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
_findUrlDetails(project_id, url, callback) {
|
||||
if (callback == null) { callback = function(error, urlDetails) {}; }
|
||||
const job = cb=>
|
||||
db.UrlCache.find({where: { url, project_id }})
|
||||
.then(urlDetails => cb(null, urlDetails))
|
||||
.error(cb)
|
||||
;
|
||||
return dbQueue.queue.push(job, callback);
|
||||
},
|
||||
_deleteUrlCacheFromDisk(project_id, url, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return fs.unlink(UrlCache._cacheFilePathForUrl(project_id, url), function(
|
||||
error
|
||||
) {
|
||||
if (error != null && error.code !== 'ENOENT') {
|
||||
// no error if the file isn't present
|
||||
return callback(error)
|
||||
} else {
|
||||
return callback()
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
_updateOrCreateUrlDetails(project_id, url, lastModified, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
const job = cb=>
|
||||
db.UrlCache.findOrCreate({where: {url, project_id}})
|
||||
.spread(
|
||||
(urlDetails, created) =>
|
||||
urlDetails.updateAttributes({lastModified})
|
||||
.then(() => cb())
|
||||
.error(cb)
|
||||
)
|
||||
.error(cb)
|
||||
;
|
||||
return dbQueue.queue.push(job, callback);
|
||||
},
|
||||
_findUrlDetails(project_id, url, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, urlDetails) {}
|
||||
}
|
||||
const job = cb =>
|
||||
db.UrlCache.find({ where: { url, project_id } })
|
||||
.then(urlDetails => cb(null, urlDetails))
|
||||
.error(cb)
|
||||
return dbQueue.queue.push(job, callback)
|
||||
},
|
||||
|
||||
_clearUrlDetails(project_id, url, callback) {
|
||||
if (callback == null) { callback = function(error) {}; }
|
||||
const job = cb=>
|
||||
db.UrlCache.destroy({where: {url, project_id}})
|
||||
.then(() => cb(null))
|
||||
.error(cb)
|
||||
;
|
||||
return dbQueue.queue.push(job, callback);
|
||||
},
|
||||
_updateOrCreateUrlDetails(project_id, url, lastModified, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
const job = cb =>
|
||||
db.UrlCache.findOrCreate({ where: { url, project_id } })
|
||||
.spread((urlDetails, created) =>
|
||||
urlDetails
|
||||
.updateAttributes({ lastModified })
|
||||
.then(() => cb())
|
||||
.error(cb)
|
||||
)
|
||||
.error(cb)
|
||||
return dbQueue.queue.push(job, callback)
|
||||
},
|
||||
|
||||
_clearUrlDetails(project_id, url, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
const job = cb =>
|
||||
db.UrlCache.destroy({ where: { url, project_id } })
|
||||
.then(() => cb(null))
|
||||
.error(cb)
|
||||
return dbQueue.queue.push(job, callback)
|
||||
},
|
||||
|
||||
_findAllUrlsInProject(project_id, callback) {
|
||||
if (callback == null) { callback = function(error, urls) {}; }
|
||||
const job = cb=>
|
||||
db.UrlCache.findAll({where: { project_id }})
|
||||
.then(
|
||||
urlEntries => cb(null, urlEntries.map(entry => entry.url)))
|
||||
.error(cb)
|
||||
;
|
||||
return dbQueue.queue.push(job, callback);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
|
||||
_findAllUrlsInProject(project_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, urls) {}
|
||||
}
|
||||
const job = cb =>
|
||||
db.UrlCache.findAll({ where: { project_id } })
|
||||
.then(urlEntries =>
|
||||
cb(
|
||||
null,
|
||||
urlEntries.map(entry => entry.url)
|
||||
)
|
||||
)
|
||||
.error(cb)
|
||||
return dbQueue.queue.push(job, callback)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,85 +12,109 @@
|
|||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let UrlFetcher;
|
||||
const request = require("request").defaults({jar: false});
|
||||
const fs = require("fs");
|
||||
const logger = require("logger-sharelatex");
|
||||
const settings = require("settings-sharelatex");
|
||||
const URL = require('url');
|
||||
let UrlFetcher
|
||||
const request = require('request').defaults({ jar: false })
|
||||
const fs = require('fs')
|
||||
const logger = require('logger-sharelatex')
|
||||
const settings = require('settings-sharelatex')
|
||||
const URL = require('url')
|
||||
|
||||
const oneMinute = 60 * 1000;
|
||||
const oneMinute = 60 * 1000
|
||||
|
||||
module.exports = (UrlFetcher = {
|
||||
pipeUrlToFile(url, filePath, _callback) {
|
||||
if (_callback == null) { _callback = function(error) {}; }
|
||||
const callbackOnce = function(error) {
|
||||
if (timeoutHandler != null) { clearTimeout(timeoutHandler); }
|
||||
_callback(error);
|
||||
return _callback = function() {};
|
||||
};
|
||||
module.exports = UrlFetcher = {
|
||||
pipeUrlToFile(url, filePath, _callback) {
|
||||
if (_callback == null) {
|
||||
_callback = function(error) {}
|
||||
}
|
||||
const callbackOnce = function(error) {
|
||||
if (timeoutHandler != null) {
|
||||
clearTimeout(timeoutHandler)
|
||||
}
|
||||
_callback(error)
|
||||
return (_callback = function() {})
|
||||
}
|
||||
|
||||
if (settings.filestoreDomainOveride != null) {
|
||||
const p = URL.parse(url).path;
|
||||
url = `${settings.filestoreDomainOveride}${p}`;
|
||||
}
|
||||
var timeoutHandler = setTimeout(function() {
|
||||
timeoutHandler = null;
|
||||
logger.error({url, filePath}, "Timed out downloading file to cache");
|
||||
return callbackOnce(new Error(`Timed out downloading file to cache ${url}`));
|
||||
}
|
||||
// FIXME: maybe need to close fileStream here
|
||||
, 3 * oneMinute);
|
||||
if (settings.filestoreDomainOveride != null) {
|
||||
const p = URL.parse(url).path
|
||||
url = `${settings.filestoreDomainOveride}${p}`
|
||||
}
|
||||
var timeoutHandler = setTimeout(
|
||||
function() {
|
||||
timeoutHandler = null
|
||||
logger.error({ url, filePath }, 'Timed out downloading file to cache')
|
||||
return callbackOnce(
|
||||
new Error(`Timed out downloading file to cache ${url}`)
|
||||
)
|
||||
},
|
||||
// FIXME: maybe need to close fileStream here
|
||||
3 * oneMinute
|
||||
)
|
||||
|
||||
logger.log({url, filePath}, "started downloading url to cache");
|
||||
const urlStream = request.get({url, timeout: oneMinute});
|
||||
urlStream.pause(); // stop data flowing until we are ready
|
||||
logger.log({ url, filePath }, 'started downloading url to cache')
|
||||
const urlStream = request.get({ url, timeout: oneMinute })
|
||||
urlStream.pause() // stop data flowing until we are ready
|
||||
|
||||
// attach handlers before setting up pipes
|
||||
urlStream.on("error", function(error) {
|
||||
logger.error({err: error, url, filePath}, "error downloading url");
|
||||
return callbackOnce(error || new Error(`Something went wrong downloading the URL ${url}`));
|
||||
});
|
||||
// attach handlers before setting up pipes
|
||||
urlStream.on('error', function(error) {
|
||||
logger.error({ err: error, url, filePath }, 'error downloading url')
|
||||
return callbackOnce(
|
||||
error || new Error(`Something went wrong downloading the URL ${url}`)
|
||||
)
|
||||
})
|
||||
|
||||
urlStream.on("end", () => logger.log({url, filePath}, "finished downloading file into cache"));
|
||||
urlStream.on('end', () =>
|
||||
logger.log({ url, filePath }, 'finished downloading file into cache')
|
||||
)
|
||||
|
||||
return urlStream.on("response", function(res) {
|
||||
if ((res.statusCode >= 200) && (res.statusCode < 300)) {
|
||||
const fileStream = fs.createWriteStream(filePath);
|
||||
return urlStream.on('response', function(res) {
|
||||
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
const fileStream = fs.createWriteStream(filePath)
|
||||
|
||||
// attach handlers before setting up pipes
|
||||
fileStream.on('error', function(error) {
|
||||
logger.error({err: error, url, filePath}, "error writing file into cache");
|
||||
return fs.unlink(filePath, function(err) {
|
||||
if (err != null) {
|
||||
logger.err({err, filePath}, "error deleting file from cache");
|
||||
}
|
||||
return callbackOnce(error);
|
||||
});
|
||||
});
|
||||
// attach handlers before setting up pipes
|
||||
fileStream.on('error', function(error) {
|
||||
logger.error(
|
||||
{ err: error, url, filePath },
|
||||
'error writing file into cache'
|
||||
)
|
||||
return fs.unlink(filePath, function(err) {
|
||||
if (err != null) {
|
||||
logger.err({ err, filePath }, 'error deleting file from cache')
|
||||
}
|
||||
return callbackOnce(error)
|
||||
})
|
||||
})
|
||||
|
||||
fileStream.on('finish', function() {
|
||||
logger.log({url, filePath}, "finished writing file into cache");
|
||||
return callbackOnce();
|
||||
});
|
||||
fileStream.on('finish', function() {
|
||||
logger.log({ url, filePath }, 'finished writing file into cache')
|
||||
return callbackOnce()
|
||||
})
|
||||
|
||||
fileStream.on('pipe', () => logger.log({url, filePath}, "piping into filestream"));
|
||||
fileStream.on('pipe', () =>
|
||||
logger.log({ url, filePath }, 'piping into filestream')
|
||||
)
|
||||
|
||||
urlStream.pipe(fileStream);
|
||||
return urlStream.resume(); // now we are ready to handle the data
|
||||
} else {
|
||||
logger.error({statusCode: res.statusCode, url, filePath}, "unexpected status code downloading url to cache");
|
||||
// https://nodejs.org/api/http.html#http_class_http_clientrequest
|
||||
// If you add a 'response' event handler, then you must consume
|
||||
// the data from the response object, either by calling
|
||||
// response.read() whenever there is a 'readable' event, or by
|
||||
// adding a 'data' handler, or by calling the .resume()
|
||||
// method. Until the data is consumed, the 'end' event will not
|
||||
// fire. Also, until the data is read it will consume memory
|
||||
// that can eventually lead to a 'process out of memory' error.
|
||||
urlStream.resume(); // discard the data
|
||||
return callbackOnce(new Error(`URL returned non-success status code: ${res.statusCode} ${url}`));
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
urlStream.pipe(fileStream)
|
||||
return urlStream.resume() // now we are ready to handle the data
|
||||
} else {
|
||||
logger.error(
|
||||
{ statusCode: res.statusCode, url, filePath },
|
||||
'unexpected status code downloading url to cache'
|
||||
)
|
||||
// https://nodejs.org/api/http.html#http_class_http_clientrequest
|
||||
// If you add a 'response' event handler, then you must consume
|
||||
// the data from the response object, either by calling
|
||||
// response.read() whenever there is a 'readable' event, or by
|
||||
// adding a 'data' handler, or by calling the .resume()
|
||||
// method. Until the data is consumed, the 'end' event will not
|
||||
// fire. Also, until the data is read it will consume memory
|
||||
// that can eventually lead to a 'process out of memory' error.
|
||||
urlStream.resume() // discard the data
|
||||
return callbackOnce(
|
||||
new Error(
|
||||
`URL returned non-success status code: ${res.statusCode} ${url}`
|
||||
)
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,57 +8,60 @@
|
|||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const Sequelize = require("sequelize");
|
||||
const Settings = require("settings-sharelatex");
|
||||
const _ = require("underscore");
|
||||
const logger = require("logger-sharelatex");
|
||||
const Sequelize = require('sequelize')
|
||||
const Settings = require('settings-sharelatex')
|
||||
const _ = require('underscore')
|
||||
const logger = require('logger-sharelatex')
|
||||
|
||||
const options = _.extend({logging:false}, Settings.mysql.clsi);
|
||||
const options = _.extend({ logging: false }, Settings.mysql.clsi)
|
||||
|
||||
logger.log({dbPath:Settings.mysql.clsi.storage}, "connecting to db");
|
||||
logger.log({ dbPath: Settings.mysql.clsi.storage }, 'connecting to db')
|
||||
|
||||
const sequelize = new Sequelize(
|
||||
Settings.mysql.clsi.database,
|
||||
Settings.mysql.clsi.username,
|
||||
Settings.mysql.clsi.password,
|
||||
options
|
||||
);
|
||||
Settings.mysql.clsi.database,
|
||||
Settings.mysql.clsi.username,
|
||||
Settings.mysql.clsi.password,
|
||||
options
|
||||
)
|
||||
|
||||
if (Settings.mysql.clsi.dialect === "sqlite") {
|
||||
logger.log("running PRAGMA journal_mode=WAL;");
|
||||
sequelize.query("PRAGMA journal_mode=WAL;");
|
||||
sequelize.query("PRAGMA synchronous=OFF;");
|
||||
sequelize.query("PRAGMA read_uncommitted = true;");
|
||||
if (Settings.mysql.clsi.dialect === 'sqlite') {
|
||||
logger.log('running PRAGMA journal_mode=WAL;')
|
||||
sequelize.query('PRAGMA journal_mode=WAL;')
|
||||
sequelize.query('PRAGMA synchronous=OFF;')
|
||||
sequelize.query('PRAGMA read_uncommitted = true;')
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
UrlCache: sequelize.define("UrlCache", {
|
||||
url: Sequelize.STRING,
|
||||
project_id: Sequelize.STRING,
|
||||
lastModified: Sequelize.DATE
|
||||
}, {
|
||||
indexes: [
|
||||
{fields: ['url', 'project_id']},
|
||||
{fields: ['project_id']}
|
||||
]
|
||||
}),
|
||||
UrlCache: sequelize.define(
|
||||
'UrlCache',
|
||||
{
|
||||
url: Sequelize.STRING,
|
||||
project_id: Sequelize.STRING,
|
||||
lastModified: Sequelize.DATE
|
||||
},
|
||||
{
|
||||
indexes: [{ fields: ['url', 'project_id'] }, { fields: ['project_id'] }]
|
||||
}
|
||||
),
|
||||
|
||||
Project: sequelize.define("Project", {
|
||||
project_id: {type: Sequelize.STRING, primaryKey: true},
|
||||
lastAccessed: Sequelize.DATE
|
||||
}, {
|
||||
indexes: [
|
||||
{fields: ['lastAccessed']}
|
||||
]
|
||||
}),
|
||||
Project: sequelize.define(
|
||||
'Project',
|
||||
{
|
||||
project_id: { type: Sequelize.STRING, primaryKey: true },
|
||||
lastAccessed: Sequelize.DATE
|
||||
},
|
||||
{
|
||||
indexes: [{ fields: ['lastAccessed'] }]
|
||||
}
|
||||
),
|
||||
|
||||
op: Sequelize.Op,
|
||||
|
||||
sync() {
|
||||
logger.log({dbPath:Settings.mysql.clsi.storage}, "syncing db schema");
|
||||
return sequelize.sync()
|
||||
.then(() => logger.log("db sync complete")).catch(err=> console.log(err, "error syncing"));
|
||||
}
|
||||
};
|
||||
op: Sequelize.Op,
|
||||
|
||||
|
||||
sync() {
|
||||
logger.log({ dbPath: Settings.mysql.clsi.storage }, 'syncing db schema')
|
||||
return sequelize
|
||||
.sync()
|
||||
.then(() => logger.log('db sync complete'))
|
||||
.catch(err => console.log(err, 'error syncing'))
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue