mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
Merge pull request #260 from overleaf/jpa-bulk-dependency-upgrades
[misc] bulk dependency upgrades
This commit is contained in:
commit
3de07dbef2
77 changed files with 2562 additions and 4133 deletions
|
@ -3,9 +3,9 @@
|
||||||
// https://github.com/sharelatex/sharelatex-dev-environment
|
// https://github.com/sharelatex/sharelatex-dev-environment
|
||||||
{
|
{
|
||||||
"extends": [
|
"extends": [
|
||||||
|
"eslint:recommended",
|
||||||
"standard",
|
"standard",
|
||||||
"prettier",
|
"prettier"
|
||||||
"prettier/standard"
|
|
||||||
],
|
],
|
||||||
"parserOptions": {
|
"parserOptions": {
|
||||||
"ecmaVersion": 2018
|
"ecmaVersion": 2018
|
||||||
|
@ -20,6 +20,19 @@
|
||||||
"mocha": true
|
"mocha": true
|
||||||
},
|
},
|
||||||
"rules": {
|
"rules": {
|
||||||
|
// TODO(das7pad): remove overrides after fixing all the violations manually (https://github.com/overleaf/issues/issues/3882#issuecomment-878999671)
|
||||||
|
// START of temporary overrides
|
||||||
|
"array-callback-return": "off",
|
||||||
|
"no-dupe-else-if": "off",
|
||||||
|
"no-var": "off",
|
||||||
|
"no-empty": "off",
|
||||||
|
"node/handle-callback-err": "off",
|
||||||
|
"no-loss-of-precision": "off",
|
||||||
|
"node/no-callback-literal": "off",
|
||||||
|
"node/no-path-concat": "off",
|
||||||
|
"prefer-regex-literals": "off",
|
||||||
|
// END of temporary overrides
|
||||||
|
|
||||||
// Swap the no-unused-expressions rule with a more chai-friendly one
|
// Swap the no-unused-expressions rule with a more chai-friendly one
|
||||||
"no-unused-expressions": 0,
|
"no-unused-expressions": 0,
|
||||||
"chai-friendly/no-unused-expressions": "error",
|
"chai-friendly/no-unused-expressions": "error",
|
||||||
|
|
2
services/clsi/.github/dependabot.yml
vendored
2
services/clsi/.github/dependabot.yml
vendored
|
@ -20,4 +20,4 @@ updates:
|
||||||
# future if we reorganise teams
|
# future if we reorganise teams
|
||||||
labels:
|
labels:
|
||||||
- "dependencies"
|
- "dependencies"
|
||||||
- "Team-Magma"
|
- "type:maintenance"
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
12.21.0
|
12.22.3
|
||||||
|
|
|
@ -2,6 +2,10 @@
|
||||||
# Instead run bin/update_build_scripts from
|
# Instead run bin/update_build_scripts from
|
||||||
# https://github.com/sharelatex/sharelatex-dev-environment
|
# https://github.com/sharelatex/sharelatex-dev-environment
|
||||||
{
|
{
|
||||||
|
"arrowParens": "avoid",
|
||||||
"semi": false,
|
"semi": false,
|
||||||
"singleQuote": true
|
"singleQuote": true,
|
||||||
|
"trailingComma": "es5",
|
||||||
|
"tabWidth": 2,
|
||||||
|
"useTabs": false
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
# Instead run bin/update_build_scripts from
|
# Instead run bin/update_build_scripts from
|
||||||
# https://github.com/sharelatex/sharelatex-dev-environment
|
# https://github.com/sharelatex/sharelatex-dev-environment
|
||||||
|
|
||||||
FROM node:12.21.0 as base
|
FROM node:12.22.3 as base
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
COPY install_deps.sh /app
|
COPY install_deps.sh /app
|
||||||
|
|
|
@ -11,7 +11,7 @@ Metrics.initialize('clsi')
|
||||||
|
|
||||||
const CompileController = require('./app/js/CompileController')
|
const CompileController = require('./app/js/CompileController')
|
||||||
const ContentController = require('./app/js/ContentController')
|
const ContentController = require('./app/js/ContentController')
|
||||||
const Settings = require('settings-sharelatex')
|
const Settings = require('@overleaf/settings')
|
||||||
const logger = require('logger-sharelatex')
|
const logger = require('logger-sharelatex')
|
||||||
logger.initialize('clsi')
|
logger.initialize('clsi')
|
||||||
if ((Settings.sentry != null ? Settings.sentry.dsn : undefined) != null) {
|
if ((Settings.sentry != null ? Settings.sentry.dsn : undefined) != null) {
|
||||||
|
@ -157,7 +157,7 @@ const staticCompileServer = ForbidSymlinks(
|
||||||
res.set('Etag', etag(path, stat))
|
res.set('Etag', etag(path, stat))
|
||||||
}
|
}
|
||||||
return res.set('Content-Type', ContentTypeMapper.map(path))
|
return res.set('Content-Type', ContentTypeMapper.map(path))
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -177,7 +177,7 @@ const staticOutputServer = ForbidSymlinks(
|
||||||
res.set('Etag', etag(path, stat))
|
res.set('Etag', etag(path, stat))
|
||||||
}
|
}
|
||||||
return res.set('Content-Type', ContentTypeMapper.map(path))
|
return res.set('Content-Type', ContentTypeMapper.map(path))
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -201,28 +201,29 @@ app.get(
|
||||||
ContentController.getPdfRange
|
ContentController.getPdfRange
|
||||||
)
|
)
|
||||||
|
|
||||||
app.get('/project/:project_id/build/:build_id/output/*', function (
|
app.get(
|
||||||
req,
|
'/project/:project_id/build/:build_id/output/*',
|
||||||
res,
|
function (req, res, next) {
|
||||||
next
|
// for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
|
||||||
) {
|
req.url =
|
||||||
// for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
|
`/${req.params.project_id}/` +
|
||||||
req.url =
|
OutputCacheManager.path(req.params.build_id, `/${req.params[0]}`)
|
||||||
`/${req.params.project_id}/` +
|
return staticOutputServer(req, res, next)
|
||||||
OutputCacheManager.path(req.params.build_id, `/${req.params[0]}`)
|
}
|
||||||
return staticOutputServer(req, res, next)
|
)
|
||||||
})
|
|
||||||
|
|
||||||
app.get('/project/:project_id/user/:user_id/output/*', function (
|
app.get(
|
||||||
req,
|
'/project/:project_id/user/:user_id/output/*',
|
||||||
res,
|
function (req, res, next) {
|
||||||
next
|
// for specific user get the path to the top level file
|
||||||
) {
|
logger.warn(
|
||||||
// for specific user get the path to the top level file
|
{ url: req.url },
|
||||||
logger.warn({ url: req.url }, 'direct request for file in compile directory')
|
'direct request for file in compile directory'
|
||||||
req.url = `/${req.params.project_id}-${req.params.user_id}/${req.params[0]}`
|
)
|
||||||
return staticCompileServer(req, res, next)
|
req.url = `/${req.params.project_id}-${req.params.user_id}/${req.params[0]}`
|
||||||
})
|
return staticCompileServer(req, res, next)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
app.get('/project/:project_id/output/*', function (req, res, next) {
|
app.get('/project/:project_id/output/*', function (req, res, next) {
|
||||||
logger.warn({ url: req.url }, 'direct request for file in compile directory')
|
logger.warn({ url: req.url }, 'direct request for file in compile directory')
|
||||||
|
@ -271,7 +272,7 @@ if (Settings.processLifespanLimitMs) {
|
||||||
function runSmokeTest() {
|
function runSmokeTest() {
|
||||||
if (Settings.processTooOld) return
|
if (Settings.processTooOld) return
|
||||||
logger.log('running smoke tests')
|
logger.log('running smoke tests')
|
||||||
smokeTest.triggerRun((err) => {
|
smokeTest.triggerRun(err => {
|
||||||
if (err) logger.error({ err }, 'smoke tests failed')
|
if (err) logger.error({ err }, 'smoke tests failed')
|
||||||
setTimeout(runSmokeTest, 30 * 1000)
|
setTimeout(runSmokeTest, 30 * 1000)
|
||||||
})
|
})
|
||||||
|
@ -364,12 +365,12 @@ loadHttpServer.post('/state/maint', function (req, res, next) {
|
||||||
const port =
|
const port =
|
||||||
__guard__(
|
__guard__(
|
||||||
Settings.internal != null ? Settings.internal.clsi : undefined,
|
Settings.internal != null ? Settings.internal.clsi : undefined,
|
||||||
(x) => x.port
|
x => x.port
|
||||||
) || 3013
|
) || 3013
|
||||||
const host =
|
const host =
|
||||||
__guard__(
|
__guard__(
|
||||||
Settings.internal != null ? Settings.internal.clsi : undefined,
|
Settings.internal != null ? Settings.internal.clsi : undefined,
|
||||||
(x1) => x1.host
|
x1 => x1.host
|
||||||
) || 'localhost'
|
) || 'localhost'
|
||||||
|
|
||||||
const loadTcpPort = Settings.internal.load_balancer_agent.load_port
|
const loadTcpPort = Settings.internal.load_balancer_agent.load_port
|
||||||
|
@ -381,12 +382,12 @@ if (!module.parent) {
|
||||||
// handle uncaught exceptions when running in production
|
// handle uncaught exceptions when running in production
|
||||||
if (Settings.catchErrors) {
|
if (Settings.catchErrors) {
|
||||||
process.removeAllListeners('uncaughtException')
|
process.removeAllListeners('uncaughtException')
|
||||||
process.on('uncaughtException', (error) =>
|
process.on('uncaughtException', error =>
|
||||||
logger.error({ err: error }, 'uncaughtException')
|
logger.error({ err: error }, 'uncaughtException')
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
app.listen(port, host, (error) => {
|
app.listen(port, host, error => {
|
||||||
if (error) {
|
if (error) {
|
||||||
logger.fatal({ error }, `Error starting CLSI on ${host}:${port}`)
|
logger.fatal({ error }, `Error starting CLSI on ${host}:${port}`)
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
*/
|
*/
|
||||||
let commandRunnerPath
|
let commandRunnerPath
|
||||||
const Settings = require('settings-sharelatex')
|
const Settings = require('@overleaf/settings')
|
||||||
const logger = require('logger-sharelatex')
|
const logger = require('logger-sharelatex')
|
||||||
|
|
||||||
if ((Settings.clsi != null ? Settings.clsi.dockerRunner : undefined) === true) {
|
if ((Settings.clsi != null ? Settings.clsi.dockerRunner : undefined) === true) {
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
let CompileController
|
let CompileController
|
||||||
const RequestParser = require('./RequestParser')
|
const RequestParser = require('./RequestParser')
|
||||||
const CompileManager = require('./CompileManager')
|
const CompileManager = require('./CompileManager')
|
||||||
const Settings = require('settings-sharelatex')
|
const Settings = require('@overleaf/settings')
|
||||||
const Metrics = require('./Metrics')
|
const Metrics = require('./Metrics')
|
||||||
const ProjectPersistenceManager = require('./ProjectPersistenceManager')
|
const ProjectPersistenceManager = require('./ProjectPersistenceManager')
|
||||||
const logger = require('logger-sharelatex')
|
const logger = require('logger-sharelatex')
|
||||||
|
@ -47,96 +47,94 @@ module.exports = CompileController = {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return next(error)
|
return next(error)
|
||||||
}
|
}
|
||||||
return CompileManager.doCompileWithLock(request, function (
|
return CompileManager.doCompileWithLock(
|
||||||
error,
|
request,
|
||||||
outputFiles,
|
function (error, outputFiles, stats, timings) {
|
||||||
stats,
|
let code, status
|
||||||
timings
|
if (outputFiles == null) {
|
||||||
) {
|
outputFiles = []
|
||||||
let code, status
|
|
||||||
if (outputFiles == null) {
|
|
||||||
outputFiles = []
|
|
||||||
}
|
|
||||||
if (error instanceof Errors.AlreadyCompilingError) {
|
|
||||||
code = 423 // Http 423 Locked
|
|
||||||
status = 'compile-in-progress'
|
|
||||||
} else if (error instanceof Errors.FilesOutOfSyncError) {
|
|
||||||
code = 409 // Http 409 Conflict
|
|
||||||
status = 'retry'
|
|
||||||
} else if (error && error.code === 'EPIPE') {
|
|
||||||
// docker returns EPIPE when shutting down
|
|
||||||
code = 503 // send 503 Unavailable response
|
|
||||||
status = 'unavailable'
|
|
||||||
} else if (error != null ? error.terminated : undefined) {
|
|
||||||
status = 'terminated'
|
|
||||||
} else if (error != null ? error.validate : undefined) {
|
|
||||||
status = `validation-${error.validate}`
|
|
||||||
} else if (error != null ? error.timedout : undefined) {
|
|
||||||
status = 'timedout'
|
|
||||||
logger.log(
|
|
||||||
{ err: error, project_id: request.project_id },
|
|
||||||
'timeout running compile'
|
|
||||||
)
|
|
||||||
} else if (error != null) {
|
|
||||||
status = 'error'
|
|
||||||
code = 500
|
|
||||||
logger.warn(
|
|
||||||
{ err: error, project_id: request.project_id },
|
|
||||||
'error running compile'
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
let file
|
|
||||||
status = 'failure'
|
|
||||||
for (file of Array.from(outputFiles)) {
|
|
||||||
if (file.path === 'output.pdf' && file.size > 0) {
|
|
||||||
status = 'success'
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
if (error instanceof Errors.AlreadyCompilingError) {
|
||||||
if (status === 'failure') {
|
code = 423 // Http 423 Locked
|
||||||
logger.warn(
|
status = 'compile-in-progress'
|
||||||
{ project_id: request.project_id, outputFiles },
|
} else if (error instanceof Errors.FilesOutOfSyncError) {
|
||||||
'project failed to compile successfully, no output.pdf generated'
|
code = 409 // Http 409 Conflict
|
||||||
|
status = 'retry'
|
||||||
|
} else if (error && error.code === 'EPIPE') {
|
||||||
|
// docker returns EPIPE when shutting down
|
||||||
|
code = 503 // send 503 Unavailable response
|
||||||
|
status = 'unavailable'
|
||||||
|
} else if (error != null ? error.terminated : undefined) {
|
||||||
|
status = 'terminated'
|
||||||
|
} else if (error != null ? error.validate : undefined) {
|
||||||
|
status = `validation-${error.validate}`
|
||||||
|
} else if (error != null ? error.timedout : undefined) {
|
||||||
|
status = 'timedout'
|
||||||
|
logger.log(
|
||||||
|
{ err: error, project_id: request.project_id },
|
||||||
|
'timeout running compile'
|
||||||
)
|
)
|
||||||
}
|
} else if (error != null) {
|
||||||
|
status = 'error'
|
||||||
|
code = 500
|
||||||
|
logger.warn(
|
||||||
|
{ err: error, project_id: request.project_id },
|
||||||
|
'error running compile'
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
let file
|
||||||
|
status = 'failure'
|
||||||
|
for (file of Array.from(outputFiles)) {
|
||||||
|
if (file.path === 'output.pdf' && file.size > 0) {
|
||||||
|
status = 'success'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// log an error if any core files are found
|
if (status === 'failure') {
|
||||||
for (file of Array.from(outputFiles)) {
|
logger.warn(
|
||||||
if (file.path === 'core') {
|
{ project_id: request.project_id, outputFiles },
|
||||||
logger.error(
|
'project failed to compile successfully, no output.pdf generated'
|
||||||
{ project_id: request.project_id, req, outputFiles },
|
|
||||||
'core file found in output'
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (error != null) {
|
// log an error if any core files are found
|
||||||
outputFiles = error.outputFiles || []
|
for (file of Array.from(outputFiles)) {
|
||||||
}
|
if (file.path === 'core') {
|
||||||
|
logger.error(
|
||||||
timer.done()
|
{ project_id: request.project_id, req, outputFiles },
|
||||||
return res.status(code || 200).send({
|
'core file found in output'
|
||||||
compile: {
|
)
|
||||||
status,
|
|
||||||
error: (error != null ? error.message : undefined) || error,
|
|
||||||
stats,
|
|
||||||
timings,
|
|
||||||
outputFiles: outputFiles.map((file) => {
|
|
||||||
return {
|
|
||||||
url:
|
|
||||||
`${Settings.apis.clsi.url}/project/${request.project_id}` +
|
|
||||||
(request.user_id != null
|
|
||||||
? `/user/${request.user_id}`
|
|
||||||
: '') +
|
|
||||||
(file.build != null ? `/build/${file.build}` : '') +
|
|
||||||
`/output/${file.path}`,
|
|
||||||
...file
|
|
||||||
}
|
}
|
||||||
})
|
}
|
||||||
}
|
}
|
||||||
})
|
|
||||||
})
|
if (error != null) {
|
||||||
|
outputFiles = error.outputFiles || []
|
||||||
|
}
|
||||||
|
|
||||||
|
timer.done()
|
||||||
|
return res.status(code || 200).send({
|
||||||
|
compile: {
|
||||||
|
status,
|
||||||
|
error: (error != null ? error.message : undefined) || error,
|
||||||
|
stats,
|
||||||
|
timings,
|
||||||
|
outputFiles: outputFiles.map(file => {
|
||||||
|
return {
|
||||||
|
url:
|
||||||
|
`${Settings.apis.clsi.url}/project/${request.project_id}` +
|
||||||
|
(request.user_id != null
|
||||||
|
? `/user/${request.user_id}`
|
||||||
|
: '') +
|
||||||
|
(file.build != null ? `/build/${file.build}` : '') +
|
||||||
|
`/output/${file.path}`,
|
||||||
|
...file,
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
@ -195,7 +193,7 @@ module.exports = CompileController = {
|
||||||
return next(error)
|
return next(error)
|
||||||
}
|
}
|
||||||
return res.json({
|
return res.json({
|
||||||
pdf: pdfPositions
|
pdf: pdfPositions,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@ -227,7 +225,7 @@ module.exports = CompileController = {
|
||||||
return next(error)
|
return next(error)
|
||||||
}
|
}
|
||||||
return res.json({
|
return res.json({
|
||||||
code: codePositions
|
code: codePositions,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@ -246,17 +244,20 @@ module.exports = CompileController = {
|
||||||
}
|
}
|
||||||
logger.log({ image, file, project_id }, 'word count request')
|
logger.log({ image, file, project_id }, 'word count request')
|
||||||
|
|
||||||
return CompileManager.wordcount(project_id, user_id, file, image, function (
|
return CompileManager.wordcount(
|
||||||
error,
|
project_id,
|
||||||
result
|
user_id,
|
||||||
) {
|
file,
|
||||||
if (error != null) {
|
image,
|
||||||
return next(error)
|
function (error, result) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
return res.json({
|
||||||
|
texcount: result,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
return res.json({
|
)
|
||||||
texcount: result
|
|
||||||
})
|
|
||||||
})
|
|
||||||
},
|
},
|
||||||
|
|
||||||
status(req, res, next) {
|
status(req, res, next) {
|
||||||
|
@ -264,5 +265,5 @@ module.exports = CompileController = {
|
||||||
next = function (error) {}
|
next = function (error) {}
|
||||||
}
|
}
|
||||||
return res.send('OK')
|
return res.send('OK')
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,7 +20,7 @@ const ResourceWriter = require('./ResourceWriter')
|
||||||
const LatexRunner = require('./LatexRunner')
|
const LatexRunner = require('./LatexRunner')
|
||||||
const OutputFileFinder = require('./OutputFileFinder')
|
const OutputFileFinder = require('./OutputFileFinder')
|
||||||
const OutputCacheManager = require('./OutputCacheManager')
|
const OutputCacheManager = require('./OutputCacheManager')
|
||||||
const Settings = require('settings-sharelatex')
|
const Settings = require('@overleaf/settings')
|
||||||
const Path = require('path')
|
const Path = require('path')
|
||||||
const logger = require('logger-sharelatex')
|
const logger = require('logger-sharelatex')
|
||||||
const Metrics = require('./Metrics')
|
const Metrics = require('./Metrics')
|
||||||
|
@ -65,7 +65,7 @@ module.exports = CompileManager = {
|
||||||
}
|
}
|
||||||
return LockManager.runWithLock(
|
return LockManager.runWithLock(
|
||||||
lockFile,
|
lockFile,
|
||||||
(releaseLock) => CompileManager.doCompile(request, releaseLock),
|
releaseLock => CompileManager.doCompile(request, releaseLock),
|
||||||
callback
|
callback
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
@ -84,264 +84,266 @@ module.exports = CompileManager = {
|
||||||
{ project_id: request.project_id, user_id: request.user_id },
|
{ project_id: request.project_id, user_id: request.user_id },
|
||||||
'syncing resources to disk'
|
'syncing resources to disk'
|
||||||
)
|
)
|
||||||
return ResourceWriter.syncResourcesToDisk(request, compileDir, function (
|
return ResourceWriter.syncResourcesToDisk(
|
||||||
error,
|
request,
|
||||||
resourceList
|
compileDir,
|
||||||
) {
|
function (error, resourceList) {
|
||||||
// NOTE: resourceList is insecure, it should only be used to exclude files from the output list
|
// NOTE: resourceList is insecure, it should only be used to exclude files from the output list
|
||||||
if (error != null && error instanceof Errors.FilesOutOfSyncError) {
|
if (error != null && error instanceof Errors.FilesOutOfSyncError) {
|
||||||
logger.warn(
|
logger.warn(
|
||||||
{ project_id: request.project_id, user_id: request.user_id },
|
{ project_id: request.project_id, user_id: request.user_id },
|
||||||
'files out of sync, please retry'
|
'files out of sync, please retry'
|
||||||
)
|
|
||||||
return callback(error)
|
|
||||||
} else if (error != null) {
|
|
||||||
logger.err(
|
|
||||||
{
|
|
||||||
err: error,
|
|
||||||
project_id: request.project_id,
|
|
||||||
user_id: request.user_id
|
|
||||||
},
|
|
||||||
'error writing resources to disk'
|
|
||||||
)
|
|
||||||
return callback(error)
|
|
||||||
}
|
|
||||||
logger.log(
|
|
||||||
{
|
|
||||||
project_id: request.project_id,
|
|
||||||
user_id: request.user_id,
|
|
||||||
time_taken: Date.now() - timer.start
|
|
||||||
},
|
|
||||||
'written files to disk'
|
|
||||||
)
|
|
||||||
const syncStage = timer.done()
|
|
||||||
|
|
||||||
const injectDraftModeIfRequired = function (callback) {
|
|
||||||
if (request.draft) {
|
|
||||||
return DraftModeManager.injectDraftMode(
|
|
||||||
Path.join(compileDir, request.rootResourcePath),
|
|
||||||
callback
|
|
||||||
)
|
)
|
||||||
} else {
|
return callback(error)
|
||||||
return callback()
|
} else if (error != null) {
|
||||||
}
|
logger.err(
|
||||||
}
|
|
||||||
|
|
||||||
const createTikzFileIfRequired = (callback) =>
|
|
||||||
TikzManager.checkMainFile(
|
|
||||||
compileDir,
|
|
||||||
request.rootResourcePath,
|
|
||||||
resourceList,
|
|
||||||
function (error, needsMainFile) {
|
|
||||||
if (error != null) {
|
|
||||||
return callback(error)
|
|
||||||
}
|
|
||||||
if (needsMainFile) {
|
|
||||||
return TikzManager.injectOutputFile(
|
|
||||||
compileDir,
|
|
||||||
request.rootResourcePath,
|
|
||||||
callback
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
return callback()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
// set up environment variables for chktex
|
|
||||||
const env = {}
|
|
||||||
if (Settings.texliveOpenoutAny && Settings.texliveOpenoutAny !== '') {
|
|
||||||
// override default texlive openout_any environment variable
|
|
||||||
env.openout_any = Settings.texliveOpenoutAny
|
|
||||||
}
|
|
||||||
// only run chktex on LaTeX files (not knitr .Rtex files or any others)
|
|
||||||
const isLaTeXFile =
|
|
||||||
request.rootResourcePath != null
|
|
||||||
? request.rootResourcePath.match(/\.tex$/i)
|
|
||||||
: undefined
|
|
||||||
if (request.check != null && isLaTeXFile) {
|
|
||||||
env.CHKTEX_OPTIONS = '-nall -e9 -e10 -w15 -w16'
|
|
||||||
env.CHKTEX_ULIMIT_OPTIONS = '-t 5 -v 64000'
|
|
||||||
if (request.check === 'error') {
|
|
||||||
env.CHKTEX_EXIT_ON_ERROR = 1
|
|
||||||
}
|
|
||||||
if (request.check === 'validate') {
|
|
||||||
env.CHKTEX_VALIDATE = 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// apply a series of file modifications/creations for draft mode and tikz
|
|
||||||
return async.series(
|
|
||||||
[injectDraftModeIfRequired, createTikzFileIfRequired],
|
|
||||||
function (error) {
|
|
||||||
if (error != null) {
|
|
||||||
return callback(error)
|
|
||||||
}
|
|
||||||
timer = new Metrics.Timer('run-compile')
|
|
||||||
// find the image tag to log it as a metric, e.g. 2015.1 (convert . to - for graphite)
|
|
||||||
let tag =
|
|
||||||
__guard__(
|
|
||||||
__guard__(
|
|
||||||
request.imageName != null
|
|
||||||
? request.imageName.match(/:(.*)/)
|
|
||||||
: undefined,
|
|
||||||
(x1) => x1[1]
|
|
||||||
),
|
|
||||||
(x) => x.replace(/\./g, '-')
|
|
||||||
) || 'default'
|
|
||||||
if (!request.project_id.match(/^[0-9a-f]{24}$/)) {
|
|
||||||
tag = 'other'
|
|
||||||
} // exclude smoke test
|
|
||||||
Metrics.inc('compiles')
|
|
||||||
Metrics.inc(`compiles-with-image.${tag}`)
|
|
||||||
const compileName = getCompileName(
|
|
||||||
request.project_id,
|
|
||||||
request.user_id
|
|
||||||
)
|
|
||||||
return LatexRunner.runLatex(
|
|
||||||
compileName,
|
|
||||||
{
|
{
|
||||||
directory: compileDir,
|
err: error,
|
||||||
mainFile: request.rootResourcePath,
|
project_id: request.project_id,
|
||||||
compiler: request.compiler,
|
user_id: request.user_id,
|
||||||
timeout: request.timeout,
|
|
||||||
image: request.imageName,
|
|
||||||
flags: request.flags,
|
|
||||||
environment: env,
|
|
||||||
compileGroup: request.compileGroup
|
|
||||||
},
|
},
|
||||||
function (error, output, stats, timings) {
|
'error writing resources to disk'
|
||||||
// request was for validation only
|
)
|
||||||
let metric_key, metric_value
|
return callback(error)
|
||||||
if (request.check === 'validate') {
|
}
|
||||||
const result = (error != null ? error.code : undefined)
|
logger.log(
|
||||||
? 'fail'
|
{
|
||||||
: 'pass'
|
project_id: request.project_id,
|
||||||
error = new Error('validation')
|
user_id: request.user_id,
|
||||||
error.validate = result
|
time_taken: Date.now() - timer.start,
|
||||||
}
|
},
|
||||||
// request was for compile, and failed on validation
|
'written files to disk'
|
||||||
if (
|
)
|
||||||
request.check === 'error' &&
|
const syncStage = timer.done()
|
||||||
(error != null ? error.message : undefined) === 'exited'
|
|
||||||
) {
|
const injectDraftModeIfRequired = function (callback) {
|
||||||
error = new Error('compilation')
|
if (request.draft) {
|
||||||
error.validate = 'fail'
|
return DraftModeManager.injectDraftMode(
|
||||||
}
|
Path.join(compileDir, request.rootResourcePath),
|
||||||
// compile was killed by user, was a validation, or a compile which failed validation
|
callback
|
||||||
if (
|
)
|
||||||
(error != null ? error.terminated : undefined) ||
|
} else {
|
||||||
(error != null ? error.validate : undefined) ||
|
return callback()
|
||||||
(error != null ? error.timedout : undefined)
|
}
|
||||||
) {
|
}
|
||||||
OutputFileFinder.findOutputFiles(
|
|
||||||
resourceList,
|
const createTikzFileIfRequired = callback =>
|
||||||
compileDir,
|
TikzManager.checkMainFile(
|
||||||
function (err, outputFiles) {
|
compileDir,
|
||||||
if (err != null) {
|
request.rootResourcePath,
|
||||||
return callback(err)
|
resourceList,
|
||||||
}
|
function (error, needsMainFile) {
|
||||||
error.outputFiles = outputFiles // return output files so user can check logs
|
|
||||||
return callback(error)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
// compile completed normally
|
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
Metrics.inc('compiles-succeeded')
|
if (needsMainFile) {
|
||||||
stats = stats || {}
|
return TikzManager.injectOutputFile(
|
||||||
const object = stats || {}
|
compileDir,
|
||||||
for (metric_key in object) {
|
request.rootResourcePath,
|
||||||
metric_value = object[metric_key]
|
callback
|
||||||
Metrics.count(metric_key, metric_value)
|
|
||||||
}
|
|
||||||
timings = timings || {}
|
|
||||||
const object1 = timings || {}
|
|
||||||
for (metric_key in object1) {
|
|
||||||
metric_value = object1[metric_key]
|
|
||||||
Metrics.timing(metric_key, metric_value)
|
|
||||||
}
|
|
||||||
const loadavg =
|
|
||||||
typeof os.loadavg === 'function' ? os.loadavg() : undefined
|
|
||||||
if (loadavg != null) {
|
|
||||||
Metrics.gauge('load-avg', loadavg[0])
|
|
||||||
}
|
|
||||||
const ts = timer.done()
|
|
||||||
logger.log(
|
|
||||||
{
|
|
||||||
project_id: request.project_id,
|
|
||||||
user_id: request.user_id,
|
|
||||||
time_taken: ts,
|
|
||||||
stats,
|
|
||||||
timings,
|
|
||||||
loadavg
|
|
||||||
},
|
|
||||||
'done compile'
|
|
||||||
)
|
|
||||||
if ((stats != null ? stats['latex-runs'] : undefined) > 0) {
|
|
||||||
Metrics.timing('run-compile-per-pass', ts / stats['latex-runs'])
|
|
||||||
}
|
|
||||||
if (
|
|
||||||
(stats != null ? stats['latex-runs'] : undefined) > 0 &&
|
|
||||||
(timings != null ? timings['cpu-time'] : undefined) > 0
|
|
||||||
) {
|
|
||||||
Metrics.timing(
|
|
||||||
'run-compile-cpu-time-per-pass',
|
|
||||||
timings['cpu-time'] / stats['latex-runs']
|
|
||||||
)
|
)
|
||||||
|
} else {
|
||||||
|
return callback()
|
||||||
}
|
}
|
||||||
// Emit compile time.
|
|
||||||
timings.compile = ts
|
|
||||||
|
|
||||||
timer = new Metrics.Timer('process-output-files')
|
|
||||||
|
|
||||||
return OutputFileFinder.findOutputFiles(
|
|
||||||
resourceList,
|
|
||||||
compileDir,
|
|
||||||
function (error, outputFiles) {
|
|
||||||
if (error != null) {
|
|
||||||
return callback(error)
|
|
||||||
}
|
|
||||||
return OutputCacheManager.saveOutputFiles(
|
|
||||||
{ request, stats, timings },
|
|
||||||
outputFiles,
|
|
||||||
compileDir,
|
|
||||||
outputDir,
|
|
||||||
(err, newOutputFiles) => {
|
|
||||||
if (err) {
|
|
||||||
const {
|
|
||||||
project_id: projectId,
|
|
||||||
user_id: userId
|
|
||||||
} = request
|
|
||||||
logger.err(
|
|
||||||
{ projectId, userId, err },
|
|
||||||
'failed to save output files'
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const outputStage = timer.done()
|
|
||||||
timings.sync = syncStage
|
|
||||||
timings.output = outputStage
|
|
||||||
|
|
||||||
// Emit e2e compile time.
|
|
||||||
timings.compileE2E = timerE2E.done()
|
|
||||||
|
|
||||||
if (stats['pdf-size']) {
|
|
||||||
emitPdfStats(stats, timings)
|
|
||||||
}
|
|
||||||
|
|
||||||
callback(null, newOutputFiles, stats, timings)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
// set up environment variables for chktex
|
||||||
|
const env = {}
|
||||||
|
if (Settings.texliveOpenoutAny && Settings.texliveOpenoutAny !== '') {
|
||||||
|
// override default texlive openout_any environment variable
|
||||||
|
env.openout_any = Settings.texliveOpenoutAny
|
||||||
}
|
}
|
||||||
)
|
// only run chktex on LaTeX files (not knitr .Rtex files or any others)
|
||||||
})
|
const isLaTeXFile =
|
||||||
|
request.rootResourcePath != null
|
||||||
|
? request.rootResourcePath.match(/\.tex$/i)
|
||||||
|
: undefined
|
||||||
|
if (request.check != null && isLaTeXFile) {
|
||||||
|
env.CHKTEX_OPTIONS = '-nall -e9 -e10 -w15 -w16'
|
||||||
|
env.CHKTEX_ULIMIT_OPTIONS = '-t 5 -v 64000'
|
||||||
|
if (request.check === 'error') {
|
||||||
|
env.CHKTEX_EXIT_ON_ERROR = 1
|
||||||
|
}
|
||||||
|
if (request.check === 'validate') {
|
||||||
|
env.CHKTEX_VALIDATE = 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// apply a series of file modifications/creations for draft mode and tikz
|
||||||
|
return async.series(
|
||||||
|
[injectDraftModeIfRequired, createTikzFileIfRequired],
|
||||||
|
function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
timer = new Metrics.Timer('run-compile')
|
||||||
|
// find the image tag to log it as a metric, e.g. 2015.1 (convert . to - for graphite)
|
||||||
|
let tag =
|
||||||
|
__guard__(
|
||||||
|
__guard__(
|
||||||
|
request.imageName != null
|
||||||
|
? request.imageName.match(/:(.*)/)
|
||||||
|
: undefined,
|
||||||
|
x1 => x1[1]
|
||||||
|
),
|
||||||
|
x => x.replace(/\./g, '-')
|
||||||
|
) || 'default'
|
||||||
|
if (!request.project_id.match(/^[0-9a-f]{24}$/)) {
|
||||||
|
tag = 'other'
|
||||||
|
} // exclude smoke test
|
||||||
|
Metrics.inc('compiles')
|
||||||
|
Metrics.inc(`compiles-with-image.${tag}`)
|
||||||
|
const compileName = getCompileName(
|
||||||
|
request.project_id,
|
||||||
|
request.user_id
|
||||||
|
)
|
||||||
|
return LatexRunner.runLatex(
|
||||||
|
compileName,
|
||||||
|
{
|
||||||
|
directory: compileDir,
|
||||||
|
mainFile: request.rootResourcePath,
|
||||||
|
compiler: request.compiler,
|
||||||
|
timeout: request.timeout,
|
||||||
|
image: request.imageName,
|
||||||
|
flags: request.flags,
|
||||||
|
environment: env,
|
||||||
|
compileGroup: request.compileGroup,
|
||||||
|
},
|
||||||
|
function (error, output, stats, timings) {
|
||||||
|
// request was for validation only
|
||||||
|
let metric_key, metric_value
|
||||||
|
if (request.check === 'validate') {
|
||||||
|
const result = (error != null ? error.code : undefined)
|
||||||
|
? 'fail'
|
||||||
|
: 'pass'
|
||||||
|
error = new Error('validation')
|
||||||
|
error.validate = result
|
||||||
|
}
|
||||||
|
// request was for compile, and failed on validation
|
||||||
|
if (
|
||||||
|
request.check === 'error' &&
|
||||||
|
(error != null ? error.message : undefined) === 'exited'
|
||||||
|
) {
|
||||||
|
error = new Error('compilation')
|
||||||
|
error.validate = 'fail'
|
||||||
|
}
|
||||||
|
// compile was killed by user, was a validation, or a compile which failed validation
|
||||||
|
if (
|
||||||
|
(error != null ? error.terminated : undefined) ||
|
||||||
|
(error != null ? error.validate : undefined) ||
|
||||||
|
(error != null ? error.timedout : undefined)
|
||||||
|
) {
|
||||||
|
OutputFileFinder.findOutputFiles(
|
||||||
|
resourceList,
|
||||||
|
compileDir,
|
||||||
|
function (err, outputFiles) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
error.outputFiles = outputFiles // return output files so user can check logs
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// compile completed normally
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
Metrics.inc('compiles-succeeded')
|
||||||
|
stats = stats || {}
|
||||||
|
const object = stats || {}
|
||||||
|
for (metric_key in object) {
|
||||||
|
metric_value = object[metric_key]
|
||||||
|
Metrics.count(metric_key, metric_value)
|
||||||
|
}
|
||||||
|
timings = timings || {}
|
||||||
|
const object1 = timings || {}
|
||||||
|
for (metric_key in object1) {
|
||||||
|
metric_value = object1[metric_key]
|
||||||
|
Metrics.timing(metric_key, metric_value)
|
||||||
|
}
|
||||||
|
const loadavg =
|
||||||
|
typeof os.loadavg === 'function' ? os.loadavg() : undefined
|
||||||
|
if (loadavg != null) {
|
||||||
|
Metrics.gauge('load-avg', loadavg[0])
|
||||||
|
}
|
||||||
|
const ts = timer.done()
|
||||||
|
logger.log(
|
||||||
|
{
|
||||||
|
project_id: request.project_id,
|
||||||
|
user_id: request.user_id,
|
||||||
|
time_taken: ts,
|
||||||
|
stats,
|
||||||
|
timings,
|
||||||
|
loadavg,
|
||||||
|
},
|
||||||
|
'done compile'
|
||||||
|
)
|
||||||
|
if ((stats != null ? stats['latex-runs'] : undefined) > 0) {
|
||||||
|
Metrics.timing(
|
||||||
|
'run-compile-per-pass',
|
||||||
|
ts / stats['latex-runs']
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
(stats != null ? stats['latex-runs'] : undefined) > 0 &&
|
||||||
|
(timings != null ? timings['cpu-time'] : undefined) > 0
|
||||||
|
) {
|
||||||
|
Metrics.timing(
|
||||||
|
'run-compile-cpu-time-per-pass',
|
||||||
|
timings['cpu-time'] / stats['latex-runs']
|
||||||
|
)
|
||||||
|
}
|
||||||
|
// Emit compile time.
|
||||||
|
timings.compile = ts
|
||||||
|
|
||||||
|
timer = new Metrics.Timer('process-output-files')
|
||||||
|
|
||||||
|
return OutputFileFinder.findOutputFiles(
|
||||||
|
resourceList,
|
||||||
|
compileDir,
|
||||||
|
function (error, outputFiles) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return OutputCacheManager.saveOutputFiles(
|
||||||
|
{ request, stats, timings },
|
||||||
|
outputFiles,
|
||||||
|
compileDir,
|
||||||
|
outputDir,
|
||||||
|
(err, newOutputFiles) => {
|
||||||
|
if (err) {
|
||||||
|
const { project_id: projectId, user_id: userId } =
|
||||||
|
request
|
||||||
|
logger.err(
|
||||||
|
{ projectId, userId, err },
|
||||||
|
'failed to save output files'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const outputStage = timer.done()
|
||||||
|
timings.sync = syncStage
|
||||||
|
timings.output = outputStage
|
||||||
|
|
||||||
|
// Emit e2e compile time.
|
||||||
|
timings.compileE2E = timerE2E.done()
|
||||||
|
|
||||||
|
if (stats['pdf-size']) {
|
||||||
|
emitPdfStats(stats, timings)
|
||||||
|
}
|
||||||
|
|
||||||
|
callback(null, newOutputFiles, stats, timings)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
stopCompile(project_id, user_id, callback) {
|
stopCompile(project_id, user_id, callback) {
|
||||||
|
@ -377,13 +379,13 @@ module.exports = CompileManager = {
|
||||||
'-f',
|
'-f',
|
||||||
'--',
|
'--',
|
||||||
compileDir,
|
compileDir,
|
||||||
outputDir
|
outputDir,
|
||||||
])
|
])
|
||||||
|
|
||||||
proc.on('error', callback)
|
proc.on('error', callback)
|
||||||
|
|
||||||
let stderr = ''
|
let stderr = ''
|
||||||
proc.stderr.setEncoding('utf8').on('data', (chunk) => (stderr += chunk))
|
proc.stderr.setEncoding('utf8').on('data', chunk => (stderr += chunk))
|
||||||
|
|
||||||
return proc.on('close', function (code) {
|
return proc.on('close', function (code) {
|
||||||
if (code === 0) {
|
if (code === 0) {
|
||||||
|
@ -406,7 +408,7 @@ module.exports = CompileManager = {
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
return callback(err)
|
return callback(err)
|
||||||
}
|
}
|
||||||
const allDirs = Array.from(files).map((file) => Path.join(root, file))
|
const allDirs = Array.from(files).map(file => Path.join(root, file))
|
||||||
return callback(null, allDirs)
|
return callback(null, allDirs)
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
@ -575,7 +577,7 @@ module.exports = CompileManager = {
|
||||||
const timeout = 60 * 1000 // increased to allow for large projects
|
const timeout = 60 * 1000 // increased to allow for large projects
|
||||||
const compileName = getCompileName(project_id, user_id)
|
const compileName = getCompileName(project_id, user_id)
|
||||||
const compileGroup = 'synctex'
|
const compileGroup = 'synctex'
|
||||||
CompileManager._checkFileExists(directory, 'output.synctex.gz', (error) => {
|
CompileManager._checkFileExists(directory, 'output.synctex.gz', error => {
|
||||||
if (error) {
|
if (error) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
|
@ -614,7 +616,7 @@ module.exports = CompileManager = {
|
||||||
h: parseFloat(h),
|
h: parseFloat(h),
|
||||||
v: parseFloat(v),
|
v: parseFloat(v),
|
||||||
height: parseFloat(height),
|
height: parseFloat(height),
|
||||||
width: parseFloat(width)
|
width: parseFloat(width),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -631,7 +633,7 @@ module.exports = CompileManager = {
|
||||||
results.push({
|
results.push({
|
||||||
file,
|
file,
|
||||||
line: parseInt(line, 10),
|
line: parseInt(line, 10),
|
||||||
column: parseInt(column, 10)
|
column: parseInt(column, 10),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -649,7 +651,7 @@ module.exports = CompileManager = {
|
||||||
'-nocol',
|
'-nocol',
|
||||||
'-inc',
|
'-inc',
|
||||||
file_path,
|
file_path,
|
||||||
`-out=${file_path}.wc`
|
`-out=${file_path}.wc`,
|
||||||
]
|
]
|
||||||
const compileDir = getCompileDir(project_id, user_id)
|
const compileDir = getCompileDir(project_id, user_id)
|
||||||
const timeout = 60 * 1000
|
const timeout = 60 * 1000
|
||||||
|
@ -711,7 +713,7 @@ module.exports = CompileManager = {
|
||||||
mathInline: 0,
|
mathInline: 0,
|
||||||
mathDisplay: 0,
|
mathDisplay: 0,
|
||||||
errors: 0,
|
errors: 0,
|
||||||
messages: ''
|
messages: '',
|
||||||
}
|
}
|
||||||
for (const line of Array.from(output.split('\n'))) {
|
for (const line of Array.from(output.split('\n'))) {
|
||||||
const [data, info] = Array.from(line.split(':'))
|
const [data, info] = Array.from(line.split(':'))
|
||||||
|
@ -749,7 +751,7 @@ module.exports = CompileManager = {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return results
|
return results
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
function __guard__(value, transform) {
|
function __guard__(value, transform) {
|
||||||
|
|
|
@ -6,7 +6,7 @@ const { callbackify } = require('util')
|
||||||
const fs = require('fs')
|
const fs = require('fs')
|
||||||
const crypto = require('crypto')
|
const crypto = require('crypto')
|
||||||
const Path = require('path')
|
const Path = require('path')
|
||||||
const Settings = require('settings-sharelatex')
|
const Settings = require('@overleaf/settings')
|
||||||
const OError = require('@overleaf/o-error')
|
const OError = require('@overleaf/o-error')
|
||||||
const pLimit = require('p-limit')
|
const pLimit = require('p-limit')
|
||||||
const { parseXrefTable } = require('../lib/pdfjs/parseXrefTable')
|
const { parseXrefTable } = require('../lib/pdfjs/parseXrefTable')
|
||||||
|
@ -76,14 +76,14 @@ async function update(contentDir, filePath, size, compileTime) {
|
||||||
if (bytesRead !== object.size) {
|
if (bytesRead !== object.size) {
|
||||||
throw new OError('could not read full chunk', {
|
throw new OError('could not read full chunk', {
|
||||||
object,
|
object,
|
||||||
bytesRead
|
bytesRead,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
const idxObj = buffer.indexOf('obj')
|
const idxObj = buffer.indexOf('obj')
|
||||||
if (idxObj > 100) {
|
if (idxObj > 100) {
|
||||||
throw new OError('objectId is too large', {
|
throw new OError('objectId is too large', {
|
||||||
object,
|
object,
|
||||||
idxObj
|
idxObj,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
const objectIdRaw = buffer.subarray(0, idxObj)
|
const objectIdRaw = buffer.subarray(0, idxObj)
|
||||||
|
@ -95,7 +95,7 @@ async function update(contentDir, filePath, size, compileTime) {
|
||||||
objectId: objectIdRaw.toString(),
|
objectId: objectIdRaw.toString(),
|
||||||
start: object.offset + objectIdRaw.byteLength,
|
start: object.offset + objectIdRaw.byteLength,
|
||||||
end: object.endOffset,
|
end: object.endOffset,
|
||||||
hash
|
hash,
|
||||||
}
|
}
|
||||||
ranges.push(range)
|
ranges.push(range)
|
||||||
|
|
||||||
|
@ -168,7 +168,7 @@ class HashFileTracker {
|
||||||
const statePath = getStatePath(this.contentDir)
|
const statePath = getStatePath(this.contentDir)
|
||||||
const blob = JSON.stringify({
|
const blob = JSON.stringify({
|
||||||
hashAge: Array.from(this.hashAge.entries()),
|
hashAge: Array.from(this.hashAge.entries()),
|
||||||
hashSize: Array.from(this.hashSize.entries())
|
hashSize: Array.from(this.hashSize.entries()),
|
||||||
})
|
})
|
||||||
const atomicWrite = statePath + '~'
|
const atomicWrite = statePath + '~'
|
||||||
try {
|
try {
|
||||||
|
@ -198,7 +198,7 @@ class HashFileTracker {
|
||||||
return reclaimedSpace
|
return reclaimedSpace
|
||||||
}
|
}
|
||||||
|
|
||||||
await promiseMapWithLimit(10, hashes, async (hash) => {
|
await promiseMapWithLimit(10, hashes, async hash => {
|
||||||
await fs.promises.unlink(Path.join(this.contentDir, hash))
|
await fs.promises.unlink(Path.join(this.contentDir, hash))
|
||||||
this.hashAge.delete(hash)
|
this.hashAge.delete(hash)
|
||||||
reclaimedSpace += this.hashSize.get(hash)
|
reclaimedSpace += this.hashSize.get(hash)
|
||||||
|
@ -251,7 +251,7 @@ function getDeadlineChecker(compileTime) {
|
||||||
throw new TimedOutError(stage, {
|
throw new TimedOutError(stage, {
|
||||||
completedStages,
|
completedStages,
|
||||||
lastStage: lastStage.stage,
|
lastStage: lastStage.stage,
|
||||||
diffToLastStage: now - lastStage.now
|
diffToLastStage: now - lastStage.now,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
completedStages++
|
completedStages++
|
||||||
|
@ -261,13 +261,13 @@ function getDeadlineChecker(compileTime) {
|
||||||
|
|
||||||
function promiseMapWithLimit(concurrency, array, fn) {
|
function promiseMapWithLimit(concurrency, array, fn) {
|
||||||
const limit = pLimit(concurrency)
|
const limit = pLimit(concurrency)
|
||||||
return Promise.all(array.map((x) => limit(() => fn(x))))
|
return Promise.all(array.map(x => limit(() => fn(x))))
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
HASH_REGEX: /^[0-9a-f]{64}$/,
|
HASH_REGEX: /^[0-9a-f]{64}$/,
|
||||||
update: callbackify(update),
|
update: callbackify(update),
|
||||||
promises: {
|
promises: {
|
||||||
update
|
update,
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,13 +4,13 @@ const os = require('os')
|
||||||
|
|
||||||
let CACHED_LOAD = {
|
let CACHED_LOAD = {
|
||||||
expires: -1,
|
expires: -1,
|
||||||
load: [0, 0, 0]
|
load: [0, 0, 0],
|
||||||
}
|
}
|
||||||
function getSystemLoad() {
|
function getSystemLoad() {
|
||||||
if (CACHED_LOAD.expires < Date.now()) {
|
if (CACHED_LOAD.expires < Date.now()) {
|
||||||
CACHED_LOAD = {
|
CACHED_LOAD = {
|
||||||
expires: Date.now() + 10 * 1000,
|
expires: Date.now() + 10 * 1000,
|
||||||
load: os.loadavg()
|
load: os.loadavg(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return CACHED_LOAD.load
|
return CACHED_LOAD.load
|
||||||
|
@ -47,7 +47,7 @@ function emitPdfCachingStats(stats, timings) {
|
||||||
{
|
{
|
||||||
stats,
|
stats,
|
||||||
timings,
|
timings,
|
||||||
load: getSystemLoad()
|
load: getSystemLoad(),
|
||||||
},
|
},
|
||||||
'slow pdf caching'
|
'slow pdf caching'
|
||||||
)
|
)
|
||||||
|
@ -111,5 +111,5 @@ function emitPdfCachingStats(stats, timings) {
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
emitPdfStats
|
emitPdfStats,
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
const Path = require('path')
|
const Path = require('path')
|
||||||
const send = require('send')
|
const send = require('send')
|
||||||
const Settings = require('settings-sharelatex')
|
const Settings = require('@overleaf/settings')
|
||||||
const OutputCacheManager = require('./OutputCacheManager')
|
const OutputCacheManager = require('./OutputCacheManager')
|
||||||
|
|
||||||
const ONE_DAY_S = 24 * 60 * 60
|
const ONE_DAY_S = 24 * 60 * 60
|
||||||
|
|
|
@ -34,5 +34,5 @@ module.exports = ContentTypeMapper = {
|
||||||
default:
|
default:
|
||||||
return 'application/octet-stream'
|
return 'application/octet-stream'
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
*/
|
*/
|
||||||
const async = require('async')
|
const async = require('async')
|
||||||
const Settings = require('settings-sharelatex')
|
const Settings = require('@overleaf/settings')
|
||||||
const logger = require('logger-sharelatex')
|
const logger = require('logger-sharelatex')
|
||||||
const queue = async.queue(
|
const queue = async.queue(
|
||||||
(task, cb) => task(cb),
|
(task, cb) => task(cb),
|
||||||
|
|
|
@ -109,5 +109,5 @@ module.exports = LockManager = {
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
const Settings = require('settings-sharelatex')
|
const Settings = require('@overleaf/settings')
|
||||||
const logger = require('logger-sharelatex')
|
const logger = require('logger-sharelatex')
|
||||||
const Docker = require('dockerode')
|
const Docker = require('dockerode')
|
||||||
const dockerode = new Docker()
|
const dockerode = new Docker()
|
||||||
|
@ -52,7 +52,7 @@ const DockerRunner = {
|
||||||
|
|
||||||
const volumes = { [directory]: '/compile' }
|
const volumes = { [directory]: '/compile' }
|
||||||
|
|
||||||
command = command.map((arg) =>
|
command = command.map(arg =>
|
||||||
arg.toString().replace('$COMPILE_DIR', '/compile')
|
arg.toString().replace('$COMPILE_DIR', '/compile')
|
||||||
)
|
)
|
||||||
if (image == null) {
|
if (image == null) {
|
||||||
|
@ -96,7 +96,7 @@ const DockerRunner = {
|
||||||
{ err: error, projectId },
|
{ err: error, projectId },
|
||||||
'error running container so destroying and retrying'
|
'error running container so destroying and retrying'
|
||||||
)
|
)
|
||||||
DockerRunner.destroyContainer(name, null, true, (error) => {
|
DockerRunner.destroyContainer(name, null, true, error => {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
|
@ -120,7 +120,7 @@ const DockerRunner = {
|
||||||
kill(containerId, callback) {
|
kill(containerId, callback) {
|
||||||
logger.log({ containerId }, 'sending kill signal to container')
|
logger.log({ containerId }, 'sending kill signal to container')
|
||||||
const container = dockerode.getContainer(containerId)
|
const container = dockerode.getContainer(containerId)
|
||||||
container.kill((error) => {
|
container.kill(error => {
|
||||||
if (
|
if (
|
||||||
error != null &&
|
error != null &&
|
||||||
error.message != null &&
|
error.message != null &&
|
||||||
|
@ -250,12 +250,12 @@ const DockerRunner = {
|
||||||
{
|
{
|
||||||
Name: 'cpu',
|
Name: 'cpu',
|
||||||
Soft: timeoutInSeconds + 5,
|
Soft: timeoutInSeconds + 5,
|
||||||
Hard: timeoutInSeconds + 10
|
Hard: timeoutInSeconds + 10,
|
||||||
}
|
},
|
||||||
],
|
],
|
||||||
CapDrop: 'ALL',
|
CapDrop: 'ALL',
|
||||||
SecurityOpt: ['no-new-privileges']
|
SecurityOpt: ['no-new-privileges'],
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
if (Settings.path != null && Settings.path.synctexBinHostPath != null) {
|
if (Settings.path != null && Settings.path.synctexBinHostPath != null) {
|
||||||
|
@ -303,12 +303,12 @@ const DockerRunner = {
|
||||||
startContainer(options, volumes, attachStreamHandler, callback) {
|
startContainer(options, volumes, attachStreamHandler, callback) {
|
||||||
LockManager.runWithLock(
|
LockManager.runWithLock(
|
||||||
options.name,
|
options.name,
|
||||||
(releaseLock) =>
|
releaseLock =>
|
||||||
// Check that volumes exist before starting the container.
|
// Check that volumes exist before starting the container.
|
||||||
// When a container is started with volume pointing to a
|
// When a container is started with volume pointing to a
|
||||||
// non-existent directory then docker creates the directory but
|
// non-existent directory then docker creates the directory but
|
||||||
// with root ownership.
|
// with root ownership.
|
||||||
DockerRunner._checkVolumes(options, volumes, (err) => {
|
DockerRunner._checkVolumes(options, volumes, err => {
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
return releaseLock(err)
|
return releaseLock(err)
|
||||||
}
|
}
|
||||||
|
@ -343,7 +343,7 @@ const DockerRunner = {
|
||||||
})
|
})
|
||||||
const jobs = []
|
const jobs = []
|
||||||
for (const vol in volumes) {
|
for (const vol in volumes) {
|
||||||
jobs.push((cb) => checkVolume(vol, cb))
|
jobs.push(cb => checkVolume(vol, cb))
|
||||||
}
|
}
|
||||||
async.series(jobs, callback)
|
async.series(jobs, callback)
|
||||||
},
|
},
|
||||||
|
@ -368,11 +368,11 @@ const DockerRunner = {
|
||||||
DockerRunner.attachToContainer(
|
DockerRunner.attachToContainer(
|
||||||
options.name,
|
options.name,
|
||||||
attachStreamHandler,
|
attachStreamHandler,
|
||||||
(error) => {
|
error => {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
container.start((error) => {
|
container.start(error => {
|
||||||
if (error != null && error.statusCode !== 304) {
|
if (error != null && error.statusCode !== 304) {
|
||||||
callback(error)
|
callback(error)
|
||||||
} else {
|
} else {
|
||||||
|
@ -430,14 +430,14 @@ const DockerRunner = {
|
||||||
{
|
{
|
||||||
containerId,
|
containerId,
|
||||||
length: this.data.length,
|
length: this.data.length,
|
||||||
maxLen: MAX_OUTPUT
|
maxLen: MAX_OUTPUT,
|
||||||
},
|
},
|
||||||
`${name} exceeds max size`
|
`${name} exceeds max size`
|
||||||
)
|
)
|
||||||
this.data += `(...truncated at ${MAX_OUTPUT} chars...)`
|
this.data += `(...truncated at ${MAX_OUTPUT} chars...)`
|
||||||
this.overflowed = true
|
this.overflowed = true
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
// kill container if too much output
|
// kill container if too much output
|
||||||
// docker.containers.kill(containerId, () ->)
|
// docker.containers.kill(containerId, () ->)
|
||||||
}
|
}
|
||||||
|
@ -448,7 +448,7 @@ const DockerRunner = {
|
||||||
|
|
||||||
container.modem.demuxStream(stream, stdout, stderr)
|
container.modem.demuxStream(stream, stdout, stderr)
|
||||||
|
|
||||||
stream.on('error', (err) =>
|
stream.on('error', err =>
|
||||||
logger.error(
|
logger.error(
|
||||||
{ err, containerId },
|
{ err, containerId },
|
||||||
'error reading from container stream'
|
'error reading from container stream'
|
||||||
|
@ -470,7 +470,7 @@ const DockerRunner = {
|
||||||
const timeoutId = setTimeout(() => {
|
const timeoutId = setTimeout(() => {
|
||||||
timedOut = true
|
timedOut = true
|
||||||
logger.log({ containerId }, 'timeout reached, killing container')
|
logger.log({ containerId }, 'timeout reached, killing container')
|
||||||
container.kill((err) => {
|
container.kill(err => {
|
||||||
logger.warn({ err, containerId }, 'failed to kill container')
|
logger.warn({ err, containerId }, 'failed to kill container')
|
||||||
})
|
})
|
||||||
}, timeout)
|
}, timeout)
|
||||||
|
@ -507,7 +507,7 @@ const DockerRunner = {
|
||||||
// supplied.
|
// supplied.
|
||||||
LockManager.runWithLock(
|
LockManager.runWithLock(
|
||||||
containerName,
|
containerName,
|
||||||
(releaseLock) =>
|
releaseLock =>
|
||||||
DockerRunner._destroyContainer(
|
DockerRunner._destroyContainer(
|
||||||
containerId || containerName,
|
containerId || containerName,
|
||||||
shouldForce,
|
shouldForce,
|
||||||
|
@ -520,7 +520,7 @@ const DockerRunner = {
|
||||||
_destroyContainer(containerId, shouldForce, callback) {
|
_destroyContainer(containerId, shouldForce, callback) {
|
||||||
logger.log({ containerId }, 'destroying docker container')
|
logger.log({ containerId }, 'destroying docker container')
|
||||||
const container = dockerode.getContainer(containerId)
|
const container = dockerode.getContainer(containerId)
|
||||||
container.remove({ force: shouldForce === true, v: true }, (error) => {
|
container.remove({ force: shouldForce === true, v: true }, error => {
|
||||||
if (error != null && error.statusCode === 404) {
|
if (error != null && error.statusCode === 404) {
|
||||||
logger.warn(
|
logger.warn(
|
||||||
{ err: error, containerId },
|
{ err: error, containerId },
|
||||||
|
@ -567,7 +567,7 @@ const DockerRunner = {
|
||||||
// strip the / prefix
|
// strip the / prefix
|
||||||
// the LockManager uses the plain container name
|
// the LockManager uses the plain container name
|
||||||
const plainName = name.slice(1)
|
const plainName = name.slice(1)
|
||||||
jobs.push((cb) =>
|
jobs.push(cb =>
|
||||||
DockerRunner.destroyContainer(plainName, id, false, () => cb())
|
DockerRunner.destroyContainer(plainName, id, false, () => cb())
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -592,7 +592,7 @@ const DockerRunner = {
|
||||||
containerMonitorTimeout = setTimeout(() => {
|
containerMonitorTimeout = setTimeout(() => {
|
||||||
containerMonitorInterval = setInterval(
|
containerMonitorInterval = setInterval(
|
||||||
() =>
|
() =>
|
||||||
DockerRunner.destroyOldContainers((err) => {
|
DockerRunner.destroyOldContainers(err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
logger.error({ err }, 'failed to destroy old containers')
|
logger.error({ err }, 'failed to destroy old containers')
|
||||||
}
|
}
|
||||||
|
@ -611,7 +611,7 @@ const DockerRunner = {
|
||||||
clearInterval(containerMonitorInterval)
|
clearInterval(containerMonitorInterval)
|
||||||
containerMonitorInterval = undefined
|
containerMonitorInterval = undefined
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
DockerRunner.startContainerMonitor()
|
DockerRunner.startContainerMonitor()
|
||||||
|
|
|
@ -37,7 +37,7 @@ module.exports = DraftModeManager = {
|
||||||
{
|
{
|
||||||
content: content.slice(0, 1024), // \documentclass is normally v near the top
|
content: content.slice(0, 1024), // \documentclass is normally v near the top
|
||||||
modified_content: modified_content.slice(0, 1024),
|
modified_content: modified_content.slice(0, 1024),
|
||||||
filename
|
filename,
|
||||||
},
|
},
|
||||||
'injected draft class'
|
'injected draft class'
|
||||||
)
|
)
|
||||||
|
@ -53,5 +53,5 @@ module.exports = DraftModeManager = {
|
||||||
// Without existing options
|
// Without existing options
|
||||||
.replace(/\\documentclass\{/g, '\\documentclass[draft]{')
|
.replace(/\\documentclass\{/g, '\\documentclass[draft]{')
|
||||||
)
|
)
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,5 +37,5 @@ module.exports = Errors = {
|
||||||
TimedOutError,
|
TimedOutError,
|
||||||
NotFoundError,
|
NotFoundError,
|
||||||
FilesOutOfSyncError,
|
FilesOutOfSyncError,
|
||||||
AlreadyCompilingError
|
AlreadyCompilingError,
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
*/
|
*/
|
||||||
let LatexRunner
|
let LatexRunner
|
||||||
const Path = require('path')
|
const Path = require('path')
|
||||||
const Settings = require('settings-sharelatex')
|
const Settings = require('@overleaf/settings')
|
||||||
const logger = require('logger-sharelatex')
|
const logger = require('logger-sharelatex')
|
||||||
const Metrics = require('./Metrics')
|
const Metrics = require('./Metrics')
|
||||||
const CommandRunner = require('./CommandRunner')
|
const CommandRunner = require('./CommandRunner')
|
||||||
|
@ -26,7 +26,7 @@ const ProcessTable = {} // table of currently running jobs (pids or docker conta
|
||||||
const TIME_V_METRICS = Object.entries({
|
const TIME_V_METRICS = Object.entries({
|
||||||
'cpu-percent': /Percent of CPU this job got: (\d+)/m,
|
'cpu-percent': /Percent of CPU this job got: (\d+)/m,
|
||||||
'cpu-time': /User time.*: (\d+.\d+)/m,
|
'cpu-time': /User time.*: (\d+.\d+)/m,
|
||||||
'sys-time': /System time.*: (\d+.\d+)/m
|
'sys-time': /System time.*: (\d+.\d+)/m,
|
||||||
})
|
})
|
||||||
|
|
||||||
module.exports = LatexRunner = {
|
module.exports = LatexRunner = {
|
||||||
|
@ -43,7 +43,7 @@ module.exports = LatexRunner = {
|
||||||
image,
|
image,
|
||||||
environment,
|
environment,
|
||||||
flags,
|
flags,
|
||||||
compileGroup
|
compileGroup,
|
||||||
} = options
|
} = options
|
||||||
if (!compiler) {
|
if (!compiler) {
|
||||||
compiler = 'pdflatex'
|
compiler = 'pdflatex'
|
||||||
|
@ -60,7 +60,7 @@ module.exports = LatexRunner = {
|
||||||
mainFile,
|
mainFile,
|
||||||
environment,
|
environment,
|
||||||
flags,
|
flags,
|
||||||
compileGroup
|
compileGroup,
|
||||||
},
|
},
|
||||||
'starting compile'
|
'starting compile'
|
||||||
)
|
)
|
||||||
|
@ -102,13 +102,13 @@ module.exports = LatexRunner = {
|
||||||
}
|
}
|
||||||
const runs =
|
const runs =
|
||||||
__guard__(
|
__guard__(
|
||||||
__guard__(output != null ? output.stderr : undefined, (x1) =>
|
__guard__(output != null ? output.stderr : undefined, x1 =>
|
||||||
x1.match(/^Run number \d+ of .*latex/gm)
|
x1.match(/^Run number \d+ of .*latex/gm)
|
||||||
),
|
),
|
||||||
(x) => x.length
|
x => x.length
|
||||||
) || 0
|
) || 0
|
||||||
const failed =
|
const failed =
|
||||||
__guard__(output != null ? output.stdout : undefined, (x2) =>
|
__guard__(output != null ? output.stdout : undefined, x2 =>
|
||||||
x2.match(/^Latexmk: Errors/m)
|
x2.match(/^Latexmk: Errors/m)
|
||||||
) != null
|
) != null
|
||||||
? 1
|
? 1
|
||||||
|
@ -147,7 +147,7 @@ module.exports = LatexRunner = {
|
||||||
// internal method for writing non-empty log files
|
// internal method for writing non-empty log files
|
||||||
function _writeFile(file, content, cb) {
|
function _writeFile(file, content, cb) {
|
||||||
if (content && content.length > 0) {
|
if (content && content.length > 0) {
|
||||||
fs.writeFile(file, content, (err) => {
|
fs.writeFile(file, content, err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
logger.error({ project_id, file }, 'error writing log file') // don't fail on error
|
logger.error({ project_id, file }, 'error writing log file') // don't fail on error
|
||||||
}
|
}
|
||||||
|
@ -188,7 +188,7 @@ module.exports = LatexRunner = {
|
||||||
'-auxdir=$COMPILE_DIR',
|
'-auxdir=$COMPILE_DIR',
|
||||||
'-outdir=$COMPILE_DIR',
|
'-outdir=$COMPILE_DIR',
|
||||||
'-synctex=1',
|
'-synctex=1',
|
||||||
'-interaction=batchmode'
|
'-interaction=batchmode',
|
||||||
]
|
]
|
||||||
if (flags) {
|
if (flags) {
|
||||||
args = args.concat(flags)
|
args = args.concat(flags)
|
||||||
|
@ -196,7 +196,7 @@ module.exports = LatexRunner = {
|
||||||
return (
|
return (
|
||||||
__guard__(
|
__guard__(
|
||||||
Settings != null ? Settings.clsi : undefined,
|
Settings != null ? Settings.clsi : undefined,
|
||||||
(x) => x.latexmkCommandPrefix
|
x => x.latexmkCommandPrefix
|
||||||
) || []
|
) || []
|
||||||
).concat(args)
|
).concat(args)
|
||||||
},
|
},
|
||||||
|
@ -204,30 +204,30 @@ module.exports = LatexRunner = {
|
||||||
_pdflatexCommand(mainFile, flags) {
|
_pdflatexCommand(mainFile, flags) {
|
||||||
return LatexRunner._latexmkBaseCommand(flags).concat([
|
return LatexRunner._latexmkBaseCommand(flags).concat([
|
||||||
'-pdf',
|
'-pdf',
|
||||||
Path.join('$COMPILE_DIR', mainFile)
|
Path.join('$COMPILE_DIR', mainFile),
|
||||||
])
|
])
|
||||||
},
|
},
|
||||||
|
|
||||||
_latexCommand(mainFile, flags) {
|
_latexCommand(mainFile, flags) {
|
||||||
return LatexRunner._latexmkBaseCommand(flags).concat([
|
return LatexRunner._latexmkBaseCommand(flags).concat([
|
||||||
'-pdfdvi',
|
'-pdfdvi',
|
||||||
Path.join('$COMPILE_DIR', mainFile)
|
Path.join('$COMPILE_DIR', mainFile),
|
||||||
])
|
])
|
||||||
},
|
},
|
||||||
|
|
||||||
_xelatexCommand(mainFile, flags) {
|
_xelatexCommand(mainFile, flags) {
|
||||||
return LatexRunner._latexmkBaseCommand(flags).concat([
|
return LatexRunner._latexmkBaseCommand(flags).concat([
|
||||||
'-xelatex',
|
'-xelatex',
|
||||||
Path.join('$COMPILE_DIR', mainFile)
|
Path.join('$COMPILE_DIR', mainFile),
|
||||||
])
|
])
|
||||||
},
|
},
|
||||||
|
|
||||||
_lualatexCommand(mainFile, flags) {
|
_lualatexCommand(mainFile, flags) {
|
||||||
return LatexRunner._latexmkBaseCommand(flags).concat([
|
return LatexRunner._latexmkBaseCommand(flags).concat([
|
||||||
'-lualatex',
|
'-lualatex',
|
||||||
Path.join('$COMPILE_DIR', mainFile)
|
Path.join('$COMPILE_DIR', mainFile),
|
||||||
])
|
])
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
function __guard__(value, transform) {
|
function __guard__(value, transform) {
|
||||||
|
|
|
@ -37,7 +37,7 @@ module.exports = CommandRunner = {
|
||||||
} else {
|
} else {
|
||||||
callback = _.once(callback)
|
callback = _.once(callback)
|
||||||
}
|
}
|
||||||
command = Array.from(command).map((arg) =>
|
command = Array.from(command).map(arg =>
|
||||||
arg.toString().replace('$COMPILE_DIR', directory)
|
arg.toString().replace('$COMPILE_DIR', directory)
|
||||||
)
|
)
|
||||||
logger.log({ project_id, command, directory }, 'running command')
|
logger.log({ project_id, command, directory }, 'running command')
|
||||||
|
@ -58,7 +58,7 @@ module.exports = CommandRunner = {
|
||||||
const proc = spawn(command[0], command.slice(1), { cwd: directory, env })
|
const proc = spawn(command[0], command.slice(1), { cwd: directory, env })
|
||||||
|
|
||||||
let stdout = ''
|
let stdout = ''
|
||||||
proc.stdout.setEncoding('utf8').on('data', (data) => (stdout += data))
|
proc.stdout.setEncoding('utf8').on('data', data => (stdout += data))
|
||||||
|
|
||||||
proc.on('error', function (err) {
|
proc.on('error', function (err) {
|
||||||
logger.err(
|
logger.err(
|
||||||
|
@ -99,5 +99,5 @@ module.exports = CommandRunner = {
|
||||||
return callback(err)
|
return callback(err)
|
||||||
}
|
}
|
||||||
return callback()
|
return callback()
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,7 +12,7 @@
|
||||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
*/
|
*/
|
||||||
let LockManager
|
let LockManager
|
||||||
const Settings = require('settings-sharelatex')
|
const Settings = require('@overleaf/settings')
|
||||||
const logger = require('logger-sharelatex')
|
const logger = require('logger-sharelatex')
|
||||||
const Lockfile = require('lockfile') // from https://github.com/npm/lockfile
|
const Lockfile = require('lockfile') // from https://github.com/npm/lockfile
|
||||||
const Errors = require('./Errors')
|
const Errors = require('./Errors')
|
||||||
|
@ -30,7 +30,7 @@ module.exports = LockManager = {
|
||||||
const lockOpts = {
|
const lockOpts = {
|
||||||
wait: this.MAX_LOCK_WAIT_TIME,
|
wait: this.MAX_LOCK_WAIT_TIME,
|
||||||
pollPeriod: this.LOCK_TEST_INTERVAL,
|
pollPeriod: this.LOCK_TEST_INTERVAL,
|
||||||
stale: this.LOCK_STALE
|
stale: this.LOCK_STALE,
|
||||||
}
|
}
|
||||||
return Lockfile.lock(path, lockOpts, function (error) {
|
return Lockfile.lock(path, lockOpts, function (error) {
|
||||||
if ((error != null ? error.code : undefined) === 'EEXIST') {
|
if ((error != null ? error.code : undefined) === 'EEXIST') {
|
||||||
|
@ -48,7 +48,7 @@ module.exports = LockManager = {
|
||||||
statDir,
|
statDir,
|
||||||
statDirErr,
|
statDirErr,
|
||||||
readdirErr,
|
readdirErr,
|
||||||
readdirDir
|
readdirDir,
|
||||||
},
|
},
|
||||||
'unable to get lock'
|
'unable to get lock'
|
||||||
)
|
)
|
||||||
|
@ -68,5 +68,5 @@ module.exports = LockManager = {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,7 +20,7 @@ const fse = require('fs-extra')
|
||||||
const Path = require('path')
|
const Path = require('path')
|
||||||
const logger = require('logger-sharelatex')
|
const logger = require('logger-sharelatex')
|
||||||
const _ = require('lodash')
|
const _ = require('lodash')
|
||||||
const Settings = require('settings-sharelatex')
|
const Settings = require('@overleaf/settings')
|
||||||
const crypto = require('crypto')
|
const crypto = require('crypto')
|
||||||
const Metrics = require('./Metrics')
|
const Metrics = require('./Metrics')
|
||||||
|
|
||||||
|
@ -180,38 +180,42 @@ module.exports = OutputCacheManager = {
|
||||||
const newFile = _.clone(file)
|
const newFile = _.clone(file)
|
||||||
const [src, dst] = Array.from([
|
const [src, dst] = Array.from([
|
||||||
Path.join(compileDir, file.path),
|
Path.join(compileDir, file.path),
|
||||||
Path.join(cacheDir, file.path)
|
Path.join(cacheDir, file.path),
|
||||||
])
|
])
|
||||||
return OutputCacheManager._checkFileIsSafe(src, function (
|
return OutputCacheManager._checkFileIsSafe(
|
||||||
err,
|
src,
|
||||||
isSafe
|
function (err, isSafe) {
|
||||||
) {
|
|
||||||
if (err != null) {
|
|
||||||
return cb(err)
|
|
||||||
}
|
|
||||||
if (!isSafe) {
|
|
||||||
return cb()
|
|
||||||
}
|
|
||||||
return OutputCacheManager._checkIfShouldCopy(src, function (
|
|
||||||
err,
|
|
||||||
shouldCopy
|
|
||||||
) {
|
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
return cb(err)
|
return cb(err)
|
||||||
}
|
}
|
||||||
if (!shouldCopy) {
|
if (!isSafe) {
|
||||||
return cb()
|
return cb()
|
||||||
}
|
}
|
||||||
return OutputCacheManager._copyFile(src, dst, function (err) {
|
return OutputCacheManager._checkIfShouldCopy(
|
||||||
if (err != null) {
|
src,
|
||||||
return cb(err)
|
function (err, shouldCopy) {
|
||||||
|
if (err != null) {
|
||||||
|
return cb(err)
|
||||||
|
}
|
||||||
|
if (!shouldCopy) {
|
||||||
|
return cb()
|
||||||
|
}
|
||||||
|
return OutputCacheManager._copyFile(
|
||||||
|
src,
|
||||||
|
dst,
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
return cb(err)
|
||||||
|
}
|
||||||
|
newFile.build = buildId // attach a build id if we cached the file
|
||||||
|
results.push(newFile)
|
||||||
|
return cb()
|
||||||
|
}
|
||||||
|
)
|
||||||
}
|
}
|
||||||
newFile.build = buildId // attach a build id if we cached the file
|
)
|
||||||
results.push(newFile)
|
}
|
||||||
return cb()
|
)
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
||||||
},
|
},
|
||||||
function (err) {
|
function (err) {
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
|
@ -232,7 +236,7 @@ module.exports = OutputCacheManager = {
|
||||||
// let file expiry run in the background, expire all previous files if per-user
|
// let file expiry run in the background, expire all previous files if per-user
|
||||||
return OutputCacheManager.expireOutputFiles(cacheRoot, {
|
return OutputCacheManager.expireOutputFiles(cacheRoot, {
|
||||||
keep: buildId,
|
keep: buildId,
|
||||||
limit: perUser ? 1 : null
|
limit: perUser ? 1 : null,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -242,7 +246,7 @@ module.exports = OutputCacheManager = {
|
||||||
},
|
},
|
||||||
|
|
||||||
collectOutputPdfSize(outputFiles, outputDir, stats, callback) {
|
collectOutputPdfSize(outputFiles, outputDir, stats, callback) {
|
||||||
const outputFile = outputFiles.find((x) => x.path === 'output.pdf')
|
const outputFile = outputFiles.find(x => x.path === 'output.pdf')
|
||||||
if (!outputFile) return callback(null, outputFiles)
|
if (!outputFile) return callback(null, outputFiles)
|
||||||
const outputFilePath = Path.join(
|
const outputFilePath = Path.join(
|
||||||
outputDir,
|
outputDir,
|
||||||
|
@ -269,7 +273,7 @@ module.exports = OutputCacheManager = {
|
||||||
OutputCacheManager.ensureContentDir(cacheRoot, function (err, contentDir) {
|
OutputCacheManager.ensureContentDir(cacheRoot, function (err, contentDir) {
|
||||||
if (err) return callback(err, outputFiles)
|
if (err) return callback(err, outputFiles)
|
||||||
|
|
||||||
const outputFile = outputFiles.find((x) => x.path === 'output.pdf')
|
const outputFile = outputFiles.find(x => x.path === 'output.pdf')
|
||||||
if (outputFile) {
|
if (outputFile) {
|
||||||
// possibly we should copy the file from the build dir here
|
// possibly we should copy the file from the build dir here
|
||||||
const outputFilePath = Path.join(
|
const outputFilePath = Path.join(
|
||||||
|
@ -331,7 +335,7 @@ module.exports = OutputCacheManager = {
|
||||||
}
|
}
|
||||||
fs.readdir(contentRoot, function (err, results) {
|
fs.readdir(contentRoot, function (err, results) {
|
||||||
const dirs = results.sort()
|
const dirs = results.sort()
|
||||||
const contentId = dirs.find((dir) =>
|
const contentId = dirs.find(dir =>
|
||||||
OutputCacheManager.BUILD_REGEX.test(dir)
|
OutputCacheManager.BUILD_REGEX.test(dir)
|
||||||
)
|
)
|
||||||
if (contentId) {
|
if (contentId) {
|
||||||
|
@ -374,31 +378,31 @@ module.exports = OutputCacheManager = {
|
||||||
function (file, cb) {
|
function (file, cb) {
|
||||||
const [src, dst] = Array.from([
|
const [src, dst] = Array.from([
|
||||||
Path.join(compileDir, file.path),
|
Path.join(compileDir, file.path),
|
||||||
Path.join(archiveDir, file.path)
|
Path.join(archiveDir, file.path),
|
||||||
])
|
])
|
||||||
return OutputCacheManager._checkFileIsSafe(src, function (
|
return OutputCacheManager._checkFileIsSafe(
|
||||||
err,
|
src,
|
||||||
isSafe
|
function (err, isSafe) {
|
||||||
) {
|
|
||||||
if (err != null) {
|
|
||||||
return cb(err)
|
|
||||||
}
|
|
||||||
if (!isSafe) {
|
|
||||||
return cb()
|
|
||||||
}
|
|
||||||
return OutputCacheManager._checkIfShouldArchive(src, function (
|
|
||||||
err,
|
|
||||||
shouldArchive
|
|
||||||
) {
|
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
return cb(err)
|
return cb(err)
|
||||||
}
|
}
|
||||||
if (!shouldArchive) {
|
if (!isSafe) {
|
||||||
return cb()
|
return cb()
|
||||||
}
|
}
|
||||||
return OutputCacheManager._copyFile(src, dst, cb)
|
return OutputCacheManager._checkIfShouldArchive(
|
||||||
})
|
src,
|
||||||
})
|
function (err, shouldArchive) {
|
||||||
|
if (err != null) {
|
||||||
|
return cb(err)
|
||||||
|
}
|
||||||
|
if (!shouldArchive) {
|
||||||
|
return cb()
|
||||||
|
}
|
||||||
|
return OutputCacheManager._copyFile(src, dst, cb)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
},
|
},
|
||||||
callback
|
callback
|
||||||
)
|
)
|
||||||
|
@ -440,7 +444,7 @@ module.exports = OutputCacheManager = {
|
||||||
// we can get the build time from the first part of the directory name DDDD-RRRR
|
// we can get the build time from the first part of the directory name DDDD-RRRR
|
||||||
// DDDD is date and RRRR is random bytes
|
// DDDD is date and RRRR is random bytes
|
||||||
const dirTime = parseInt(
|
const dirTime = parseInt(
|
||||||
__guard__(dir.split('-'), (x) => x[0]),
|
__guard__(dir.split('-'), x => x[0]),
|
||||||
16
|
16
|
||||||
)
|
)
|
||||||
const age = currentTime - dirTime
|
const age = currentTime - dirTime
|
||||||
|
@ -549,7 +553,7 @@ module.exports = OutputCacheManager = {
|
||||||
return callback(null, true)
|
return callback(null, true)
|
||||||
}
|
}
|
||||||
return callback(null, false)
|
return callback(null, false)
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
function __guard__(value, transform) {
|
function __guard__(value, transform) {
|
||||||
|
|
|
@ -6,9 +6,7 @@ const logger = require('logger-sharelatex')
|
||||||
|
|
||||||
module.exports = OutputFileFinder = {
|
module.exports = OutputFileFinder = {
|
||||||
findOutputFiles(resources, directory, callback) {
|
findOutputFiles(resources, directory, callback) {
|
||||||
const incomingResources = new Set(
|
const incomingResources = new Set(resources.map(resource => resource.path))
|
||||||
resources.map((resource) => resource.path)
|
|
||||||
)
|
|
||||||
|
|
||||||
OutputFileFinder._getAllFiles(directory, function (error, allFiles) {
|
OutputFileFinder._getAllFiles(directory, function (error, allFiles) {
|
||||||
if (allFiles == null) {
|
if (allFiles == null) {
|
||||||
|
@ -23,7 +21,7 @@ module.exports = OutputFileFinder = {
|
||||||
if (!incomingResources.has(file)) {
|
if (!incomingResources.has(file)) {
|
||||||
outputFiles.push({
|
outputFiles.push({
|
||||||
path: file,
|
path: file,
|
||||||
type: Path.extname(file).replace(/^\./, '') || undefined
|
type: Path.extname(file).replace(/^\./, '') || undefined,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -42,7 +40,7 @@ module.exports = OutputFileFinder = {
|
||||||
'.archive',
|
'.archive',
|
||||||
'-o',
|
'-o',
|
||||||
'-name',
|
'-name',
|
||||||
'.project-*'
|
'.project-*',
|
||||||
]
|
]
|
||||||
const args = [
|
const args = [
|
||||||
directory,
|
directory,
|
||||||
|
@ -53,13 +51,13 @@ module.exports = OutputFileFinder = {
|
||||||
'-o',
|
'-o',
|
||||||
'-type',
|
'-type',
|
||||||
'f',
|
'f',
|
||||||
'-print'
|
'-print',
|
||||||
]
|
]
|
||||||
logger.log({ args }, 'running find command')
|
logger.log({ args }, 'running find command')
|
||||||
|
|
||||||
const proc = spawn('find', args)
|
const proc = spawn('find', args)
|
||||||
let stdout = ''
|
let stdout = ''
|
||||||
proc.stdout.setEncoding('utf8').on('data', (chunk) => (stdout += chunk))
|
proc.stdout.setEncoding('utf8').on('data', chunk => (stdout += chunk))
|
||||||
proc.on('error', callback)
|
proc.on('error', callback)
|
||||||
proc.on('close', function (code) {
|
proc.on('close', function (code) {
|
||||||
if (code !== 0) {
|
if (code !== 0) {
|
||||||
|
@ -76,5 +74,5 @@ module.exports = OutputFileFinder = {
|
||||||
})
|
})
|
||||||
callback(null, fileList)
|
callback(null, fileList)
|
||||||
})
|
})
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,15 +29,15 @@ module.exports = OutputFileOptimiser = {
|
||||||
callback = function (error) {}
|
callback = function (error) {}
|
||||||
}
|
}
|
||||||
if (src.match(/\/output\.pdf$/)) {
|
if (src.match(/\/output\.pdf$/)) {
|
||||||
return OutputFileOptimiser.checkIfPDFIsOptimised(src, function (
|
return OutputFileOptimiser.checkIfPDFIsOptimised(
|
||||||
err,
|
src,
|
||||||
isOptimised
|
function (err, isOptimised) {
|
||||||
) {
|
if (err != null || isOptimised) {
|
||||||
if (err != null || isOptimised) {
|
return callback(null)
|
||||||
return callback(null)
|
}
|
||||||
|
return OutputFileOptimiser.optimisePDF(src, dst, callback)
|
||||||
}
|
}
|
||||||
return OutputFileOptimiser.optimisePDF(src, dst, callback)
|
)
|
||||||
})
|
|
||||||
} else {
|
} else {
|
||||||
return callback(null)
|
return callback(null)
|
||||||
}
|
}
|
||||||
|
@ -77,7 +77,7 @@ module.exports = OutputFileOptimiser = {
|
||||||
const timer = new Metrics.Timer('qpdf')
|
const timer = new Metrics.Timer('qpdf')
|
||||||
const proc = spawn('qpdf', args)
|
const proc = spawn('qpdf', args)
|
||||||
let stdout = ''
|
let stdout = ''
|
||||||
proc.stdout.setEncoding('utf8').on('data', (chunk) => (stdout += chunk))
|
proc.stdout.setEncoding('utf8').on('data', chunk => (stdout += chunk))
|
||||||
callback = _.once(callback) // avoid double call back for error and close event
|
callback = _.once(callback) // avoid double call back for error and close event
|
||||||
proc.on('error', function (err) {
|
proc.on('error', function (err) {
|
||||||
logger.warn({ err, args }, 'qpdf failed')
|
logger.warn({ err, args }, 'qpdf failed')
|
||||||
|
@ -99,5 +99,5 @@ module.exports = OutputFileOptimiser = {
|
||||||
return callback(null)
|
return callback(null)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
} // ignore the error
|
}, // ignore the error
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,7 +20,7 @@ const dbQueue = require('./DbQueue')
|
||||||
const async = require('async')
|
const async = require('async')
|
||||||
const logger = require('logger-sharelatex')
|
const logger = require('logger-sharelatex')
|
||||||
const oneDay = 24 * 60 * 60 * 1000
|
const oneDay = 24 * 60 * 60 * 1000
|
||||||
const Settings = require('settings-sharelatex')
|
const Settings = require('@overleaf/settings')
|
||||||
const diskusage = require('diskusage')
|
const diskusage = require('diskusage')
|
||||||
const { callbackify } = require('util')
|
const { callbackify } = require('util')
|
||||||
|
|
||||||
|
@ -28,7 +28,7 @@ async function refreshExpiryTimeout() {
|
||||||
const paths = [
|
const paths = [
|
||||||
Settings.path.compilesDir,
|
Settings.path.compilesDir,
|
||||||
Settings.path.outputDir,
|
Settings.path.outputDir,
|
||||||
Settings.path.clsiCacheDir
|
Settings.path.clsiCacheDir,
|
||||||
]
|
]
|
||||||
for (const path of paths) {
|
for (const path of paths) {
|
||||||
try {
|
try {
|
||||||
|
@ -40,7 +40,7 @@ async function refreshExpiryTimeout() {
|
||||||
logger.warn(
|
logger.warn(
|
||||||
{
|
{
|
||||||
stats,
|
stats,
|
||||||
newExpiryTimeoutInDays: (lowerExpiry / oneDay).toFixed(2)
|
newExpiryTimeoutInDays: (lowerExpiry / oneDay).toFixed(2),
|
||||||
},
|
},
|
||||||
'disk running low on space, modifying EXPIRY_TIMEOUT'
|
'disk running low on space, modifying EXPIRY_TIMEOUT'
|
||||||
)
|
)
|
||||||
|
@ -57,7 +57,7 @@ module.exports = ProjectPersistenceManager = {
|
||||||
EXPIRY_TIMEOUT: Settings.project_cache_length_ms || oneDay * 2.5,
|
EXPIRY_TIMEOUT: Settings.project_cache_length_ms || oneDay * 2.5,
|
||||||
|
|
||||||
promises: {
|
promises: {
|
||||||
refreshExpiryTimeout
|
refreshExpiryTimeout,
|
||||||
},
|
},
|
||||||
|
|
||||||
refreshExpiryTimeout: callbackify(refreshExpiryTimeout),
|
refreshExpiryTimeout: callbackify(refreshExpiryTimeout),
|
||||||
|
@ -66,7 +66,7 @@ module.exports = ProjectPersistenceManager = {
|
||||||
callback = function (error) {}
|
callback = function (error) {}
|
||||||
}
|
}
|
||||||
const timer = new Metrics.Timer('db-bump-last-accessed')
|
const timer = new Metrics.Timer('db-bump-last-accessed')
|
||||||
const job = (cb) =>
|
const job = cb =>
|
||||||
db.Project.findOrCreate({ where: { project_id } })
|
db.Project.findOrCreate({ where: { project_id } })
|
||||||
.spread((project, created) =>
|
.spread((project, created) =>
|
||||||
project
|
project
|
||||||
|
@ -75,7 +75,7 @@ module.exports = ProjectPersistenceManager = {
|
||||||
.error(cb)
|
.error(cb)
|
||||||
)
|
)
|
||||||
.error(cb)
|
.error(cb)
|
||||||
dbQueue.queue.push(job, (error) => {
|
dbQueue.queue.push(job, error => {
|
||||||
timer.done()
|
timer.done()
|
||||||
callback(error)
|
callback(error)
|
||||||
})
|
})
|
||||||
|
@ -93,16 +93,19 @@ module.exports = ProjectPersistenceManager = {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
logger.log({ project_ids }, 'clearing expired projects')
|
logger.log({ project_ids }, 'clearing expired projects')
|
||||||
const jobs = Array.from(project_ids || []).map((project_id) =>
|
const jobs = Array.from(project_ids || []).map(project_id =>
|
||||||
((project_id) => (callback) =>
|
(
|
||||||
ProjectPersistenceManager.clearProjectFromCache(project_id, function (
|
project_id => callback =>
|
||||||
err
|
ProjectPersistenceManager.clearProjectFromCache(
|
||||||
) {
|
project_id,
|
||||||
if (err != null) {
|
function (err) {
|
||||||
logger.error({ err, project_id }, 'error clearing project')
|
if (err != null) {
|
||||||
}
|
logger.error({ err, project_id }, 'error clearing project')
|
||||||
return callback()
|
}
|
||||||
}))(project_id)
|
return callback()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)(project_id)
|
||||||
)
|
)
|
||||||
return async.series(jobs, function (error) {
|
return async.series(jobs, function (error) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
|
@ -110,7 +113,7 @@ module.exports = ProjectPersistenceManager = {
|
||||||
}
|
}
|
||||||
return CompileManager.clearExpiredProjects(
|
return CompileManager.clearExpiredProjects(
|
||||||
ProjectPersistenceManager.EXPIRY_TIMEOUT,
|
ProjectPersistenceManager.EXPIRY_TIMEOUT,
|
||||||
(error) => callback()
|
error => callback()
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -167,7 +170,7 @@ module.exports = ProjectPersistenceManager = {
|
||||||
callback = function (error) {}
|
callback = function (error) {}
|
||||||
}
|
}
|
||||||
logger.log({ project_id }, 'clearing project from database')
|
logger.log({ project_id }, 'clearing project from database')
|
||||||
const job = (cb) =>
|
const job = cb =>
|
||||||
db.Project.destroy({ where: { project_id } })
|
db.Project.destroy({ where: { project_id } })
|
||||||
.then(() => cb())
|
.then(() => cb())
|
||||||
.error(cb)
|
.error(cb)
|
||||||
|
@ -185,17 +188,17 @@ module.exports = ProjectPersistenceManager = {
|
||||||
const q = {}
|
const q = {}
|
||||||
q[db.op.lt] = keepProjectsFrom
|
q[db.op.lt] = keepProjectsFrom
|
||||||
return db.Project.findAll({ where: { lastAccessed: q } })
|
return db.Project.findAll({ where: { lastAccessed: q } })
|
||||||
.then((projects) =>
|
.then(projects =>
|
||||||
cb(
|
cb(
|
||||||
null,
|
null,
|
||||||
projects.map((project) => project.project_id)
|
projects.map(project => project.project_id)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.error(cb)
|
.error(cb)
|
||||||
}
|
}
|
||||||
|
|
||||||
return dbQueue.queue.push(job, callback)
|
return dbQueue.queue.push(job, callback)
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.log(
|
logger.log(
|
||||||
|
|
|
@ -4,7 +4,6 @@
|
||||||
no-throw-literal,
|
no-throw-literal,
|
||||||
no-unused-vars,
|
no-unused-vars,
|
||||||
no-useless-escape,
|
no-useless-escape,
|
||||||
standard/no-callback-literal,
|
|
||||||
valid-typeof,
|
valid-typeof,
|
||||||
*/
|
*/
|
||||||
// TODO: This file was created by bulk-decaffeinate.
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
@ -18,7 +17,7 @@
|
||||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
*/
|
*/
|
||||||
let RequestParser
|
let RequestParser
|
||||||
const settings = require('settings-sharelatex')
|
const settings = require('@overleaf/settings')
|
||||||
|
|
||||||
module.exports = RequestParser = {
|
module.exports = RequestParser = {
|
||||||
VALID_COMPILERS: ['pdflatex', 'latex', 'xelatex', 'lualatex'],
|
VALID_COMPILERS: ['pdflatex', 'latex', 'xelatex', 'lualatex'],
|
||||||
|
@ -47,7 +46,7 @@ module.exports = RequestParser = {
|
||||||
{
|
{
|
||||||
validValues: this.VALID_COMPILERS,
|
validValues: this.VALID_COMPILERS,
|
||||||
default: 'pdflatex',
|
default: 'pdflatex',
|
||||||
type: 'string'
|
type: 'string',
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
response.enablePdfCaching = this._parseAttribute(
|
response.enablePdfCaching = this._parseAttribute(
|
||||||
|
@ -55,7 +54,7 @@ module.exports = RequestParser = {
|
||||||
compile.options.enablePdfCaching,
|
compile.options.enablePdfCaching,
|
||||||
{
|
{
|
||||||
default: false,
|
default: false,
|
||||||
type: 'boolean'
|
type: 'boolean',
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
response.timeout = this._parseAttribute(
|
response.timeout = this._parseAttribute(
|
||||||
|
@ -63,7 +62,7 @@ module.exports = RequestParser = {
|
||||||
compile.options.timeout,
|
compile.options.timeout,
|
||||||
{
|
{
|
||||||
default: RequestParser.MAX_TIMEOUT,
|
default: RequestParser.MAX_TIMEOUT,
|
||||||
type: 'number'
|
type: 'number',
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
response.imageName = this._parseAttribute(
|
response.imageName = this._parseAttribute(
|
||||||
|
@ -74,19 +73,19 @@ module.exports = RequestParser = {
|
||||||
validValues:
|
validValues:
|
||||||
settings.clsi &&
|
settings.clsi &&
|
||||||
settings.clsi.docker &&
|
settings.clsi.docker &&
|
||||||
settings.clsi.docker.allowedImages
|
settings.clsi.docker.allowedImages,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
response.draft = this._parseAttribute('draft', compile.options.draft, {
|
response.draft = this._parseAttribute('draft', compile.options.draft, {
|
||||||
default: false,
|
default: false,
|
||||||
type: 'boolean'
|
type: 'boolean',
|
||||||
})
|
})
|
||||||
response.check = this._parseAttribute('check', compile.options.check, {
|
response.check = this._parseAttribute('check', compile.options.check, {
|
||||||
type: 'string'
|
type: 'string',
|
||||||
})
|
})
|
||||||
response.flags = this._parseAttribute('flags', compile.options.flags, {
|
response.flags = this._parseAttribute('flags', compile.options.flags, {
|
||||||
default: [],
|
default: [],
|
||||||
type: 'object'
|
type: 'object',
|
||||||
})
|
})
|
||||||
if (settings.allowedCompileGroups) {
|
if (settings.allowedCompileGroups) {
|
||||||
response.compileGroup = this._parseAttribute(
|
response.compileGroup = this._parseAttribute(
|
||||||
|
@ -95,7 +94,7 @@ module.exports = RequestParser = {
|
||||||
{
|
{
|
||||||
validValues: settings.allowedCompileGroups,
|
validValues: settings.allowedCompileGroups,
|
||||||
default: '',
|
default: '',
|
||||||
type: 'string'
|
type: 'string',
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -107,7 +106,7 @@ module.exports = RequestParser = {
|
||||||
compile.options.syncType,
|
compile.options.syncType,
|
||||||
{
|
{
|
||||||
validValues: ['full', 'incremental'],
|
validValues: ['full', 'incremental'],
|
||||||
type: 'string'
|
type: 'string',
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -144,13 +143,12 @@ module.exports = RequestParser = {
|
||||||
compile.rootResourcePath,
|
compile.rootResourcePath,
|
||||||
{
|
{
|
||||||
default: 'main.tex',
|
default: 'main.tex',
|
||||||
type: 'string'
|
type: 'string',
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
const originalRootResourcePath = rootResourcePath
|
const originalRootResourcePath = rootResourcePath
|
||||||
const sanitizedRootResourcePath = RequestParser._sanitizePath(
|
const sanitizedRootResourcePath =
|
||||||
rootResourcePath
|
RequestParser._sanitizePath(rootResourcePath)
|
||||||
)
|
|
||||||
response.rootResourcePath = RequestParser._checkPath(
|
response.rootResourcePath = RequestParser._checkPath(
|
||||||
sanitizedRootResourcePath
|
sanitizedRootResourcePath
|
||||||
)
|
)
|
||||||
|
@ -195,7 +193,7 @@ module.exports = RequestParser = {
|
||||||
path: resource.path,
|
path: resource.path,
|
||||||
modified,
|
modified,
|
||||||
url: resource.url,
|
url: resource.url,
|
||||||
content: resource.content
|
content: resource.content,
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
|
@ -237,5 +235,5 @@ module.exports = RequestParser = {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return path
|
return path
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,7 +36,7 @@ module.exports = {
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
logger.log({ state, basePath }, 'writing sync state')
|
logger.log({ state, basePath }, 'writing sync state')
|
||||||
const resourceList = resources.map((resource) => resource.path)
|
const resourceList = resources.map(resource => resource.path)
|
||||||
fs.writeFile(
|
fs.writeFile(
|
||||||
stateFile,
|
stateFile,
|
||||||
[...resourceList, `stateHash:${state}`].join('\n'),
|
[...resourceList, `stateHash:${state}`].join('\n'),
|
||||||
|
@ -48,43 +48,46 @@ module.exports = {
|
||||||
checkProjectStateMatches(state, basePath, callback) {
|
checkProjectStateMatches(state, basePath, callback) {
|
||||||
const stateFile = Path.join(basePath, this.SYNC_STATE_FILE)
|
const stateFile = Path.join(basePath, this.SYNC_STATE_FILE)
|
||||||
const size = this.SYNC_STATE_MAX_SIZE
|
const size = this.SYNC_STATE_MAX_SIZE
|
||||||
SafeReader.readFile(stateFile, size, 'utf8', function (
|
SafeReader.readFile(
|
||||||
err,
|
stateFile,
|
||||||
result,
|
size,
|
||||||
bytesRead
|
'utf8',
|
||||||
) {
|
function (err, result, bytesRead) {
|
||||||
if (err) {
|
if (err) {
|
||||||
return callback(err)
|
return callback(err)
|
||||||
}
|
}
|
||||||
if (bytesRead === size) {
|
if (bytesRead === size) {
|
||||||
logger.error(
|
logger.error(
|
||||||
{ file: stateFile, size, bytesRead },
|
{ file: stateFile, size, bytesRead },
|
||||||
'project state file truncated'
|
'project state file truncated'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const array = result ? result.toString().split('\n') : []
|
||||||
|
const adjustedLength = Math.max(array.length, 1)
|
||||||
|
const resourceList = array.slice(0, adjustedLength - 1)
|
||||||
|
const oldState = array[adjustedLength - 1]
|
||||||
|
const newState = `stateHash:${state}`
|
||||||
|
logger.log(
|
||||||
|
{ state, oldState, basePath, stateMatches: newState === oldState },
|
||||||
|
'checking sync state'
|
||||||
)
|
)
|
||||||
|
if (newState !== oldState) {
|
||||||
|
return callback(
|
||||||
|
new Errors.FilesOutOfSyncError(
|
||||||
|
'invalid state for incremental update'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
const resources = resourceList.map(path => ({ path }))
|
||||||
|
callback(null, resources)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
const array = result ? result.toString().split('\n') : []
|
)
|
||||||
const adjustedLength = Math.max(array.length, 1)
|
|
||||||
const resourceList = array.slice(0, adjustedLength - 1)
|
|
||||||
const oldState = array[adjustedLength - 1]
|
|
||||||
const newState = `stateHash:${state}`
|
|
||||||
logger.log(
|
|
||||||
{ state, oldState, basePath, stateMatches: newState === oldState },
|
|
||||||
'checking sync state'
|
|
||||||
)
|
|
||||||
if (newState !== oldState) {
|
|
||||||
return callback(
|
|
||||||
new Errors.FilesOutOfSyncError('invalid state for incremental update')
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
const resources = resourceList.map((path) => ({ path }))
|
|
||||||
callback(null, resources)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
},
|
},
|
||||||
|
|
||||||
checkResourceFiles(resources, allFiles, basePath, callback) {
|
checkResourceFiles(resources, allFiles, basePath, callback) {
|
||||||
// check the paths are all relative to current directory
|
// check the paths are all relative to current directory
|
||||||
const containsRelativePath = (resource) => {
|
const containsRelativePath = resource => {
|
||||||
const dirs = resource.path.split('/')
|
const dirs = resource.path.split('/')
|
||||||
return dirs.indexOf('..') !== -1
|
return dirs.indexOf('..') !== -1
|
||||||
}
|
}
|
||||||
|
@ -94,8 +97,8 @@ module.exports = {
|
||||||
// check if any of the input files are not present in list of files
|
// check if any of the input files are not present in list of files
|
||||||
const seenFiles = new Set(allFiles)
|
const seenFiles = new Set(allFiles)
|
||||||
const missingFiles = resources
|
const missingFiles = resources
|
||||||
.map((resource) => resource.path)
|
.map(resource => resource.path)
|
||||||
.filter((path) => !seenFiles.has(path))
|
.filter(path => !seenFiles.has(path))
|
||||||
if (missingFiles.length > 0) {
|
if (missingFiles.length > 0) {
|
||||||
logger.err(
|
logger.err(
|
||||||
{ missingFiles, basePath, allFiles, resources },
|
{ missingFiles, basePath, allFiles, resources },
|
||||||
|
@ -109,5 +112,5 @@ module.exports = {
|
||||||
} else {
|
} else {
|
||||||
callback()
|
callback()
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,7 +23,7 @@ const OutputFileFinder = require('./OutputFileFinder')
|
||||||
const ResourceStateManager = require('./ResourceStateManager')
|
const ResourceStateManager = require('./ResourceStateManager')
|
||||||
const Metrics = require('./Metrics')
|
const Metrics = require('./Metrics')
|
||||||
const logger = require('logger-sharelatex')
|
const logger = require('logger-sharelatex')
|
||||||
const settings = require('settings-sharelatex')
|
const settings = require('@overleaf/settings')
|
||||||
|
|
||||||
const parallelFileDownloads = settings.parallelFileDownloads || 1
|
const parallelFileDownloads = settings.parallelFileDownloads || 1
|
||||||
|
|
||||||
|
@ -109,13 +109,13 @@ module.exports = ResourceWriter = {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function (error) {}
|
callback = function (error) {}
|
||||||
}
|
}
|
||||||
return this._createDirectory(basePath, (error) => {
|
return this._createDirectory(basePath, error => {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
const jobs = Array.from(resources).map((resource) =>
|
const jobs = Array.from(resources).map(resource =>
|
||||||
((resource) => {
|
(resource => {
|
||||||
return (callback) =>
|
return callback =>
|
||||||
this._writeResourceToDisk(project_id, resource, basePath, callback)
|
this._writeResourceToDisk(project_id, resource, basePath, callback)
|
||||||
})(resource)
|
})(resource)
|
||||||
)
|
)
|
||||||
|
@ -127,17 +127,17 @@ module.exports = ResourceWriter = {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function (error) {}
|
callback = function (error) {}
|
||||||
}
|
}
|
||||||
return this._createDirectory(basePath, (error) => {
|
return this._createDirectory(basePath, error => {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
return this._removeExtraneousFiles(resources, basePath, (error) => {
|
return this._removeExtraneousFiles(resources, basePath, error => {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
const jobs = Array.from(resources).map((resource) =>
|
const jobs = Array.from(resources).map(resource =>
|
||||||
((resource) => {
|
(resource => {
|
||||||
return (callback) =>
|
return callback =>
|
||||||
this._writeResourceToDisk(
|
this._writeResourceToDisk(
|
||||||
project_id,
|
project_id,
|
||||||
resource,
|
resource,
|
||||||
|
@ -179,86 +179,86 @@ module.exports = ResourceWriter = {
|
||||||
return _callback(error, ...Array.from(result))
|
return _callback(error, ...Array.from(result))
|
||||||
}
|
}
|
||||||
|
|
||||||
return OutputFileFinder.findOutputFiles(resources, basePath, function (
|
return OutputFileFinder.findOutputFiles(
|
||||||
error,
|
resources,
|
||||||
outputFiles,
|
basePath,
|
||||||
allFiles
|
function (error, outputFiles, allFiles) {
|
||||||
) {
|
|
||||||
if (error != null) {
|
|
||||||
return callback(error)
|
|
||||||
}
|
|
||||||
|
|
||||||
const jobs = []
|
|
||||||
for (const file of Array.from(outputFiles || [])) {
|
|
||||||
;(function (file) {
|
|
||||||
const { path } = file
|
|
||||||
let should_delete = true
|
|
||||||
if (
|
|
||||||
path.match(/^output\./) ||
|
|
||||||
path.match(/\.aux$/) ||
|
|
||||||
path.match(/^cache\//)
|
|
||||||
) {
|
|
||||||
// knitr cache
|
|
||||||
should_delete = false
|
|
||||||
}
|
|
||||||
if (path.match(/^output-.*/)) {
|
|
||||||
// Tikz cached figures (default case)
|
|
||||||
should_delete = false
|
|
||||||
}
|
|
||||||
if (path.match(/\.(pdf|dpth|md5)$/)) {
|
|
||||||
// Tikz cached figures (by extension)
|
|
||||||
should_delete = false
|
|
||||||
}
|
|
||||||
if (
|
|
||||||
path.match(/\.(pygtex|pygstyle)$/) ||
|
|
||||||
path.match(/(^|\/)_minted-[^\/]+\//)
|
|
||||||
) {
|
|
||||||
// minted files/directory
|
|
||||||
should_delete = false
|
|
||||||
}
|
|
||||||
if (
|
|
||||||
path.match(/\.md\.tex$/) ||
|
|
||||||
path.match(/(^|\/)_markdown_[^\/]+\//)
|
|
||||||
) {
|
|
||||||
// markdown files/directory
|
|
||||||
should_delete = false
|
|
||||||
}
|
|
||||||
if (path.match(/-eps-converted-to\.pdf$/)) {
|
|
||||||
// Epstopdf generated files
|
|
||||||
should_delete = false
|
|
||||||
}
|
|
||||||
if (
|
|
||||||
path === 'output.pdf' ||
|
|
||||||
path === 'output.dvi' ||
|
|
||||||
path === 'output.log' ||
|
|
||||||
path === 'output.xdv' ||
|
|
||||||
path === 'output.stdout' ||
|
|
||||||
path === 'output.stderr'
|
|
||||||
) {
|
|
||||||
should_delete = true
|
|
||||||
}
|
|
||||||
if (path === 'output.tex') {
|
|
||||||
// created by TikzManager if present in output files
|
|
||||||
should_delete = true
|
|
||||||
}
|
|
||||||
if (should_delete) {
|
|
||||||
return jobs.push((callback) =>
|
|
||||||
ResourceWriter._deleteFileIfNotDirectory(
|
|
||||||
Path.join(basePath, path),
|
|
||||||
callback
|
|
||||||
)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
})(file)
|
|
||||||
}
|
|
||||||
|
|
||||||
return async.series(jobs, function (error) {
|
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
return callback(null, outputFiles, allFiles)
|
|
||||||
})
|
const jobs = []
|
||||||
})
|
for (const file of Array.from(outputFiles || [])) {
|
||||||
|
;(function (file) {
|
||||||
|
const { path } = file
|
||||||
|
let should_delete = true
|
||||||
|
if (
|
||||||
|
path.match(/^output\./) ||
|
||||||
|
path.match(/\.aux$/) ||
|
||||||
|
path.match(/^cache\//)
|
||||||
|
) {
|
||||||
|
// knitr cache
|
||||||
|
should_delete = false
|
||||||
|
}
|
||||||
|
if (path.match(/^output-.*/)) {
|
||||||
|
// Tikz cached figures (default case)
|
||||||
|
should_delete = false
|
||||||
|
}
|
||||||
|
if (path.match(/\.(pdf|dpth|md5)$/)) {
|
||||||
|
// Tikz cached figures (by extension)
|
||||||
|
should_delete = false
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
path.match(/\.(pygtex|pygstyle)$/) ||
|
||||||
|
path.match(/(^|\/)_minted-[^\/]+\//)
|
||||||
|
) {
|
||||||
|
// minted files/directory
|
||||||
|
should_delete = false
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
path.match(/\.md\.tex$/) ||
|
||||||
|
path.match(/(^|\/)_markdown_[^\/]+\//)
|
||||||
|
) {
|
||||||
|
// markdown files/directory
|
||||||
|
should_delete = false
|
||||||
|
}
|
||||||
|
if (path.match(/-eps-converted-to\.pdf$/)) {
|
||||||
|
// Epstopdf generated files
|
||||||
|
should_delete = false
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
path === 'output.pdf' ||
|
||||||
|
path === 'output.dvi' ||
|
||||||
|
path === 'output.log' ||
|
||||||
|
path === 'output.xdv' ||
|
||||||
|
path === 'output.stdout' ||
|
||||||
|
path === 'output.stderr'
|
||||||
|
) {
|
||||||
|
should_delete = true
|
||||||
|
}
|
||||||
|
if (path === 'output.tex') {
|
||||||
|
// created by TikzManager if present in output files
|
||||||
|
should_delete = true
|
||||||
|
}
|
||||||
|
if (should_delete) {
|
||||||
|
return jobs.push(callback =>
|
||||||
|
ResourceWriter._deleteFileIfNotDirectory(
|
||||||
|
Path.join(basePath, path),
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
})(file)
|
||||||
|
}
|
||||||
|
|
||||||
|
return async.series(jobs, function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return callback(null, outputFiles, allFiles)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
_deleteFileIfNotDirectory(path, callback) {
|
_deleteFileIfNotDirectory(path, callback) {
|
||||||
|
@ -296,48 +296,51 @@ module.exports = ResourceWriter = {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function (error) {}
|
callback = function (error) {}
|
||||||
}
|
}
|
||||||
return ResourceWriter.checkPath(basePath, resource.path, function (
|
return ResourceWriter.checkPath(
|
||||||
error,
|
basePath,
|
||||||
path
|
resource.path,
|
||||||
) {
|
function (error, path) {
|
||||||
if (error != null) {
|
|
||||||
return callback(error)
|
|
||||||
}
|
|
||||||
return fs.mkdir(Path.dirname(path), { recursive: true }, function (
|
|
||||||
error
|
|
||||||
) {
|
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
// TODO: Don't overwrite file if it hasn't been modified
|
return fs.mkdir(
|
||||||
if (resource.url != null) {
|
Path.dirname(path),
|
||||||
return UrlCache.downloadUrlToFile(
|
{ recursive: true },
|
||||||
project_id,
|
function (error) {
|
||||||
resource.url,
|
if (error != null) {
|
||||||
path,
|
return callback(error)
|
||||||
resource.modified,
|
|
||||||
function (err) {
|
|
||||||
if (err != null) {
|
|
||||||
logger.err(
|
|
||||||
{
|
|
||||||
err,
|
|
||||||
project_id,
|
|
||||||
path,
|
|
||||||
resource_url: resource.url,
|
|
||||||
modified: resource.modified
|
|
||||||
},
|
|
||||||
'error downloading file for resources'
|
|
||||||
)
|
|
||||||
Metrics.inc('download-failed')
|
|
||||||
}
|
|
||||||
return callback()
|
|
||||||
}
|
}
|
||||||
) // try and continue compiling even if http resource can not be downloaded at this time
|
// TODO: Don't overwrite file if it hasn't been modified
|
||||||
} else {
|
if (resource.url != null) {
|
||||||
fs.writeFile(path, resource.content, callback)
|
return UrlCache.downloadUrlToFile(
|
||||||
}
|
project_id,
|
||||||
})
|
resource.url,
|
||||||
})
|
path,
|
||||||
|
resource.modified,
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
logger.err(
|
||||||
|
{
|
||||||
|
err,
|
||||||
|
project_id,
|
||||||
|
path,
|
||||||
|
resource_url: resource.url,
|
||||||
|
modified: resource.modified,
|
||||||
|
},
|
||||||
|
'error downloading file for resources'
|
||||||
|
)
|
||||||
|
Metrics.inc('download-failed')
|
||||||
|
}
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
) // try and continue compiling even if http resource can not be downloaded at this time
|
||||||
|
} else {
|
||||||
|
fs.writeFile(path, resource.content, callback)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
checkPath(basePath, resourcePath, callback) {
|
checkPath(basePath, resourcePath, callback) {
|
||||||
|
@ -347,5 +350,5 @@ module.exports = ResourceWriter = {
|
||||||
} else {
|
} else {
|
||||||
return callback(null, path)
|
return callback(null, path)
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -44,17 +44,20 @@ module.exports = SafeReader = {
|
||||||
return callback(null, ...Array.from(result))
|
return callback(null, ...Array.from(result))
|
||||||
})
|
})
|
||||||
const buff = Buffer.alloc(size) // fills with zeroes by default
|
const buff = Buffer.alloc(size) // fills with zeroes by default
|
||||||
return fs.read(fd, buff, 0, buff.length, 0, function (
|
return fs.read(
|
||||||
err,
|
fd,
|
||||||
bytesRead,
|
buff,
|
||||||
buffer
|
0,
|
||||||
) {
|
buff.length,
|
||||||
if (err != null) {
|
0,
|
||||||
return callbackWithClose(err)
|
function (err, bytesRead, buffer) {
|
||||||
|
if (err != null) {
|
||||||
|
return callbackWithClose(err)
|
||||||
|
}
|
||||||
|
const result = buffer.toString(encoding, 0, bytesRead)
|
||||||
|
return callbackWithClose(null, result, bytesRead)
|
||||||
}
|
}
|
||||||
const result = buffer.toString(encoding, 0, bytesRead)
|
)
|
||||||
return callbackWithClose(null, result, bytesRead)
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,7 +17,7 @@
|
||||||
let ForbidSymlinks
|
let ForbidSymlinks
|
||||||
const Path = require('path')
|
const Path = require('path')
|
||||||
const fs = require('fs')
|
const fs = require('fs')
|
||||||
const Settings = require('settings-sharelatex')
|
const Settings = require('@overleaf/settings')
|
||||||
const logger = require('logger-sharelatex')
|
const logger = require('logger-sharelatex')
|
||||||
const url = require('url')
|
const url = require('url')
|
||||||
|
|
||||||
|
@ -26,7 +26,7 @@ module.exports = ForbidSymlinks = function (staticFn, root, options) {
|
||||||
const basePath = Path.resolve(root)
|
const basePath = Path.resolve(root)
|
||||||
return function (req, res, next) {
|
return function (req, res, next) {
|
||||||
let file, project_id, result
|
let file, project_id, result
|
||||||
const path = __guard__(url.parse(req.url), (x) => x.pathname)
|
const path = __guard__(url.parse(req.url), x => x.pathname)
|
||||||
// check that the path is of the form /project_id_or_name/path/to/file.log
|
// check that the path is of the form /project_id_or_name/path/to/file.log
|
||||||
if ((result = path.match(/^\/?([a-zA-Z0-9_-]+)\/(.*)/))) {
|
if ((result = path.match(/^\/?([a-zA-Z0-9_-]+)\/(.*)/))) {
|
||||||
project_id = result[1]
|
project_id = result[1]
|
||||||
|
@ -63,7 +63,7 @@ module.exports = ForbidSymlinks = function (staticFn, root, options) {
|
||||||
requestedFsPath,
|
requestedFsPath,
|
||||||
realFsPath,
|
realFsPath,
|
||||||
path: req.params[0],
|
path: req.params[0],
|
||||||
project_id: req.params.project_id
|
project_id: req.params.project_id,
|
||||||
},
|
},
|
||||||
'error checking file access'
|
'error checking file access'
|
||||||
)
|
)
|
||||||
|
@ -75,7 +75,7 @@ module.exports = ForbidSymlinks = function (staticFn, root, options) {
|
||||||
requestedFsPath,
|
requestedFsPath,
|
||||||
realFsPath,
|
realFsPath,
|
||||||
path: req.params[0],
|
path: req.params[0],
|
||||||
project_id: req.params.project_id
|
project_id: req.params.project_id,
|
||||||
},
|
},
|
||||||
'trying to access a different file (symlink), aborting'
|
'trying to access a different file (symlink), aborting'
|
||||||
)
|
)
|
||||||
|
|
|
@ -35,63 +35,67 @@ module.exports = TikzManager = {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// if there's no output.tex, see if we are using tikz/pgf or pstool in the main file
|
// if there's no output.tex, see if we are using tikz/pgf or pstool in the main file
|
||||||
return ResourceWriter.checkPath(compileDir, mainFile, function (
|
return ResourceWriter.checkPath(
|
||||||
error,
|
compileDir,
|
||||||
path
|
mainFile,
|
||||||
) {
|
function (error, path) {
|
||||||
if (error != null) {
|
|
||||||
return callback(error)
|
|
||||||
}
|
|
||||||
return SafeReader.readFile(path, 65536, 'utf8', function (
|
|
||||||
error,
|
|
||||||
content
|
|
||||||
) {
|
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
const usesTikzExternalize =
|
return SafeReader.readFile(
|
||||||
(content != null
|
path,
|
||||||
? content.indexOf('\\tikzexternalize')
|
65536,
|
||||||
: undefined) >= 0
|
'utf8',
|
||||||
const usesPsTool =
|
function (error, content) {
|
||||||
(content != null ? content.indexOf('{pstool}') : undefined) >= 0
|
if (error != null) {
|
||||||
logger.log(
|
return callback(error)
|
||||||
{ compileDir, mainFile, usesTikzExternalize, usesPsTool },
|
}
|
||||||
'checked for packages needing main file as output.tex'
|
const usesTikzExternalize =
|
||||||
|
(content != null
|
||||||
|
? content.indexOf('\\tikzexternalize')
|
||||||
|
: undefined) >= 0
|
||||||
|
const usesPsTool =
|
||||||
|
(content != null ? content.indexOf('{pstool}') : undefined) >= 0
|
||||||
|
logger.log(
|
||||||
|
{ compileDir, mainFile, usesTikzExternalize, usesPsTool },
|
||||||
|
'checked for packages needing main file as output.tex'
|
||||||
|
)
|
||||||
|
const needsMainFile = usesTikzExternalize || usesPsTool
|
||||||
|
return callback(null, needsMainFile)
|
||||||
|
}
|
||||||
)
|
)
|
||||||
const needsMainFile = usesTikzExternalize || usesPsTool
|
}
|
||||||
return callback(null, needsMainFile)
|
)
|
||||||
})
|
|
||||||
})
|
|
||||||
},
|
},
|
||||||
|
|
||||||
injectOutputFile(compileDir, mainFile, callback) {
|
injectOutputFile(compileDir, mainFile, callback) {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function (error) {}
|
callback = function (error) {}
|
||||||
}
|
}
|
||||||
return ResourceWriter.checkPath(compileDir, mainFile, function (
|
return ResourceWriter.checkPath(
|
||||||
error,
|
compileDir,
|
||||||
path
|
mainFile,
|
||||||
) {
|
function (error, path) {
|
||||||
if (error != null) {
|
|
||||||
return callback(error)
|
|
||||||
}
|
|
||||||
return fs.readFile(path, 'utf8', function (error, content) {
|
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
logger.log(
|
return fs.readFile(path, 'utf8', function (error, content) {
|
||||||
{ compileDir, mainFile },
|
if (error != null) {
|
||||||
'copied file to output.tex as project uses packages which require it'
|
return callback(error)
|
||||||
)
|
}
|
||||||
// use wx flag to ensure that output file does not already exist
|
logger.log(
|
||||||
return fs.writeFile(
|
{ compileDir, mainFile },
|
||||||
Path.join(compileDir, 'output.tex'),
|
'copied file to output.tex as project uses packages which require it'
|
||||||
content,
|
)
|
||||||
{ flag: 'wx' },
|
// use wx flag to ensure that output file does not already exist
|
||||||
callback
|
return fs.writeFile(
|
||||||
)
|
Path.join(compileDir, 'output.tex'),
|
||||||
})
|
content,
|
||||||
})
|
{ flag: 'wx' },
|
||||||
}
|
callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,7 +16,7 @@ let UrlCache
|
||||||
const db = require('./db')
|
const db = require('./db')
|
||||||
const dbQueue = require('./DbQueue')
|
const dbQueue = require('./DbQueue')
|
||||||
const UrlFetcher = require('./UrlFetcher')
|
const UrlFetcher = require('./UrlFetcher')
|
||||||
const Settings = require('settings-sharelatex')
|
const Settings = require('@overleaf/settings')
|
||||||
const crypto = require('crypto')
|
const crypto = require('crypto')
|
||||||
const fs = require('fs')
|
const fs = require('fs')
|
||||||
const logger = require('logger-sharelatex')
|
const logger = require('logger-sharelatex')
|
||||||
|
@ -65,17 +65,19 @@ module.exports = UrlCache = {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
const jobs = Array.from(urls || []).map((url) =>
|
const jobs = Array.from(urls || []).map(url =>
|
||||||
((url) => (callback) =>
|
(
|
||||||
UrlCache._clearUrlFromCache(project_id, url, function (error) {
|
url => callback =>
|
||||||
if (error != null) {
|
UrlCache._clearUrlFromCache(project_id, url, function (error) {
|
||||||
logger.error(
|
if (error != null) {
|
||||||
{ err: error, project_id, url },
|
logger.error(
|
||||||
'error clearing project URL'
|
{ err: error, project_id, url },
|
||||||
)
|
'error clearing project URL'
|
||||||
}
|
)
|
||||||
return callback()
|
}
|
||||||
}))(url)
|
return callback()
|
||||||
|
})
|
||||||
|
)(url)
|
||||||
)
|
)
|
||||||
return async.series(jobs, callback)
|
return async.series(jobs, callback)
|
||||||
})
|
})
|
||||||
|
@ -103,7 +105,7 @@ module.exports = UrlCache = {
|
||||||
return UrlFetcher.pipeUrlToFileWithRetry(
|
return UrlFetcher.pipeUrlToFileWithRetry(
|
||||||
url,
|
url,
|
||||||
UrlCache._cacheFilePathForUrl(project_id, url),
|
UrlCache._cacheFilePathForUrl(project_id, url),
|
||||||
(error) => {
|
error => {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
|
@ -111,7 +113,7 @@ module.exports = UrlCache = {
|
||||||
project_id,
|
project_id,
|
||||||
url,
|
url,
|
||||||
lastModified,
|
lastModified,
|
||||||
(error) => {
|
error => {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
|
@ -138,23 +140,24 @@ module.exports = UrlCache = {
|
||||||
if (lastModified == null) {
|
if (lastModified == null) {
|
||||||
return callback(null, true)
|
return callback(null, true)
|
||||||
}
|
}
|
||||||
return UrlCache._findUrlDetails(project_id, url, function (
|
return UrlCache._findUrlDetails(
|
||||||
error,
|
project_id,
|
||||||
urlDetails
|
url,
|
||||||
) {
|
function (error, urlDetails) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
urlDetails == null ||
|
||||||
|
urlDetails.lastModified == null ||
|
||||||
|
urlDetails.lastModified.getTime() < lastModified.getTime()
|
||||||
|
) {
|
||||||
|
return callback(null, true)
|
||||||
|
} else {
|
||||||
|
return callback(null, false)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if (
|
)
|
||||||
urlDetails == null ||
|
|
||||||
urlDetails.lastModified == null ||
|
|
||||||
urlDetails.lastModified.getTime() < lastModified.getTime()
|
|
||||||
) {
|
|
||||||
return callback(null, true)
|
|
||||||
} else {
|
|
||||||
return callback(null, false)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
},
|
},
|
||||||
|
|
||||||
_cacheFileNameForUrl(project_id, url) {
|
_cacheFileNameForUrl(project_id, url) {
|
||||||
|
@ -176,14 +179,16 @@ module.exports = UrlCache = {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
return UrlCache._deleteUrlCacheFromDisk(project_id, url, function (
|
return UrlCache._deleteUrlCacheFromDisk(
|
||||||
error
|
project_id,
|
||||||
) {
|
url,
|
||||||
if (error != null) {
|
function (error) {
|
||||||
return callback(error)
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return callback(null)
|
||||||
}
|
}
|
||||||
return callback(null)
|
)
|
||||||
})
|
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
|
@ -191,16 +196,17 @@ module.exports = UrlCache = {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function (error) {}
|
callback = function (error) {}
|
||||||
}
|
}
|
||||||
return fs.unlink(UrlCache._cacheFilePathForUrl(project_id, url), function (
|
return fs.unlink(
|
||||||
error
|
UrlCache._cacheFilePathForUrl(project_id, url),
|
||||||
) {
|
function (error) {
|
||||||
if (error != null && error.code !== 'ENOENT') {
|
if (error != null && error.code !== 'ENOENT') {
|
||||||
// no error if the file isn't present
|
// no error if the file isn't present
|
||||||
return callback(error)
|
return callback(error)
|
||||||
} else {
|
} else {
|
||||||
return callback()
|
return callback()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
})
|
)
|
||||||
},
|
},
|
||||||
|
|
||||||
_findUrlDetails(project_id, url, callback) {
|
_findUrlDetails(project_id, url, callback) {
|
||||||
|
@ -208,9 +214,9 @@ module.exports = UrlCache = {
|
||||||
callback = function (error, urlDetails) {}
|
callback = function (error, urlDetails) {}
|
||||||
}
|
}
|
||||||
const timer = new Metrics.Timer('db-find-url-details')
|
const timer = new Metrics.Timer('db-find-url-details')
|
||||||
const job = (cb) =>
|
const job = cb =>
|
||||||
db.UrlCache.findOne({ where: { url, project_id } })
|
db.UrlCache.findOne({ where: { url, project_id } })
|
||||||
.then((urlDetails) => cb(null, urlDetails))
|
.then(urlDetails => cb(null, urlDetails))
|
||||||
.error(cb)
|
.error(cb)
|
||||||
dbQueue.queue.push(job, (error, urlDetails) => {
|
dbQueue.queue.push(job, (error, urlDetails) => {
|
||||||
timer.done()
|
timer.done()
|
||||||
|
@ -223,7 +229,7 @@ module.exports = UrlCache = {
|
||||||
callback = function (error) {}
|
callback = function (error) {}
|
||||||
}
|
}
|
||||||
const timer = new Metrics.Timer('db-update-or-create-url-details')
|
const timer = new Metrics.Timer('db-update-or-create-url-details')
|
||||||
const job = (cb) =>
|
const job = cb =>
|
||||||
db.UrlCache.findOrCreate({ where: { url, project_id } })
|
db.UrlCache.findOrCreate({ where: { url, project_id } })
|
||||||
.spread((urlDetails, created) =>
|
.spread((urlDetails, created) =>
|
||||||
urlDetails
|
urlDetails
|
||||||
|
@ -232,7 +238,7 @@ module.exports = UrlCache = {
|
||||||
.error(cb)
|
.error(cb)
|
||||||
)
|
)
|
||||||
.error(cb)
|
.error(cb)
|
||||||
dbQueue.queue.push(job, (error) => {
|
dbQueue.queue.push(job, error => {
|
||||||
timer.done()
|
timer.done()
|
||||||
callback(error)
|
callback(error)
|
||||||
})
|
})
|
||||||
|
@ -243,11 +249,11 @@ module.exports = UrlCache = {
|
||||||
callback = function (error) {}
|
callback = function (error) {}
|
||||||
}
|
}
|
||||||
const timer = new Metrics.Timer('db-clear-url-details')
|
const timer = new Metrics.Timer('db-clear-url-details')
|
||||||
const job = (cb) =>
|
const job = cb =>
|
||||||
db.UrlCache.destroy({ where: { url, project_id } })
|
db.UrlCache.destroy({ where: { url, project_id } })
|
||||||
.then(() => cb(null))
|
.then(() => cb(null))
|
||||||
.error(cb)
|
.error(cb)
|
||||||
dbQueue.queue.push(job, (error) => {
|
dbQueue.queue.push(job, error => {
|
||||||
timer.done()
|
timer.done()
|
||||||
callback(error)
|
callback(error)
|
||||||
})
|
})
|
||||||
|
@ -258,12 +264,12 @@ module.exports = UrlCache = {
|
||||||
callback = function (error, urls) {}
|
callback = function (error, urls) {}
|
||||||
}
|
}
|
||||||
const timer = new Metrics.Timer('db-find-urls-in-project')
|
const timer = new Metrics.Timer('db-find-urls-in-project')
|
||||||
const job = (cb) =>
|
const job = cb =>
|
||||||
db.UrlCache.findAll({ where: { project_id } })
|
db.UrlCache.findAll({ where: { project_id } })
|
||||||
.then((urlEntries) =>
|
.then(urlEntries =>
|
||||||
cb(
|
cb(
|
||||||
null,
|
null,
|
||||||
urlEntries.map((entry) => entry.url)
|
urlEntries.map(entry => entry.url)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.error(cb)
|
.error(cb)
|
||||||
|
@ -271,5 +277,5 @@ module.exports = UrlCache = {
|
||||||
timer.done()
|
timer.done()
|
||||||
callback(err, urls)
|
callback(err, urls)
|
||||||
})
|
})
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,7 +16,7 @@ let UrlFetcher
|
||||||
const request = require('request').defaults({ jar: false })
|
const request = require('request').defaults({ jar: false })
|
||||||
const fs = require('fs')
|
const fs = require('fs')
|
||||||
const logger = require('logger-sharelatex')
|
const logger = require('logger-sharelatex')
|
||||||
const settings = require('settings-sharelatex')
|
const settings = require('@overleaf/settings')
|
||||||
const URL = require('url')
|
const URL = require('url')
|
||||||
const async = require('async')
|
const async = require('async')
|
||||||
|
|
||||||
|
@ -127,5 +127,5 @@ module.exports = UrlFetcher = {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,7 +9,7 @@
|
||||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
*/
|
*/
|
||||||
const Sequelize = require('sequelize')
|
const Sequelize = require('sequelize')
|
||||||
const Settings = require('settings-sharelatex')
|
const Settings = require('@overleaf/settings')
|
||||||
const _ = require('lodash')
|
const _ = require('lodash')
|
||||||
const logger = require('logger-sharelatex')
|
const logger = require('logger-sharelatex')
|
||||||
|
|
||||||
|
@ -37,10 +37,10 @@ module.exports = {
|
||||||
{
|
{
|
||||||
url: Sequelize.STRING,
|
url: Sequelize.STRING,
|
||||||
project_id: Sequelize.STRING,
|
project_id: Sequelize.STRING,
|
||||||
lastModified: Sequelize.DATE
|
lastModified: Sequelize.DATE,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
indexes: [{ fields: ['url', 'project_id'] }, { fields: ['project_id'] }]
|
indexes: [{ fields: ['url', 'project_id'] }, { fields: ['project_id'] }],
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
|
|
||||||
|
@ -48,10 +48,10 @@ module.exports = {
|
||||||
'Project',
|
'Project',
|
||||||
{
|
{
|
||||||
project_id: { type: Sequelize.STRING, primaryKey: true },
|
project_id: { type: Sequelize.STRING, primaryKey: true },
|
||||||
lastAccessed: Sequelize.DATE
|
lastAccessed: Sequelize.DATE,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
indexes: [{ fields: ['lastAccessed'] }]
|
indexes: [{ fields: ['lastAccessed'] }],
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
|
|
||||||
|
@ -62,6 +62,6 @@ module.exports = {
|
||||||
return sequelize
|
return sequelize
|
||||||
.sync()
|
.sync()
|
||||||
.then(() => logger.log('db sync complete'))
|
.then(() => logger.log('db sync complete'))
|
||||||
.catch((err) => console.log(err, 'error syncing'))
|
.catch(err => console.log(err, 'error syncing'))
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -39,5 +39,5 @@ class FSPdfManager extends LocalPdfManager {
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
FSPdfManager
|
FSPdfManager,
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,14 +34,14 @@ class FSStream extends Stream {
|
||||||
const result = {
|
const result = {
|
||||||
begin: begin,
|
begin: begin,
|
||||||
end: end,
|
end: end,
|
||||||
buffer: Buffer.alloc(end - begin, 0)
|
buffer: Buffer.alloc(end - begin, 0),
|
||||||
}
|
}
|
||||||
this.cachedBytes.push(result)
|
this.cachedBytes.push(result)
|
||||||
return this.fh.read(result.buffer, 0, end - begin, begin)
|
return this.fh.read(result.buffer, 0, end - begin, begin)
|
||||||
}
|
}
|
||||||
|
|
||||||
_ensureGetPos(pos) {
|
_ensureGetPos(pos) {
|
||||||
const found = this.cachedBytes.find((x) => {
|
const found = this.cachedBytes.find(x => {
|
||||||
return x.begin <= pos && pos < x.end
|
return x.begin <= pos && pos < x.end
|
||||||
})
|
})
|
||||||
if (!found) {
|
if (!found) {
|
||||||
|
@ -52,7 +52,7 @@ class FSStream extends Stream {
|
||||||
|
|
||||||
_ensureGetRange(begin, end) {
|
_ensureGetRange(begin, end) {
|
||||||
end = Math.min(end, this.length) // BG: handle overflow case
|
end = Math.min(end, this.length) // BG: handle overflow case
|
||||||
const found = this.cachedBytes.find((x) => {
|
const found = this.cachedBytes.find(x => {
|
||||||
return x.begin <= begin && end <= x.end
|
return x.begin <= begin && end <= x.end
|
||||||
})
|
})
|
||||||
if (!found) {
|
if (!found) {
|
||||||
|
|
|
@ -23,5 +23,5 @@ async function parseXrefTable(path, size, checkDeadline) {
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
parseXrefTable
|
parseXrefTable,
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,6 +4,6 @@ clsi
|
||||||
--docker-repos=gcr.io/overleaf-ops
|
--docker-repos=gcr.io/overleaf-ops
|
||||||
--env-add=
|
--env-add=
|
||||||
--env-pass-through=TEXLIVE_IMAGE
|
--env-pass-through=TEXLIVE_IMAGE
|
||||||
--node-version=12.21.0
|
--node-version=12.22.3
|
||||||
--public-repo=True
|
--public-repo=True
|
||||||
--script-version=3.8.0
|
--script-version=3.11.0
|
||||||
|
|
|
@ -12,12 +12,12 @@ module.exports = {
|
||||||
process.env.SQLITE_PATH || Path.resolve(__dirname, '../db/db.sqlite'),
|
process.env.SQLITE_PATH || Path.resolve(__dirname, '../db/db.sqlite'),
|
||||||
pool: {
|
pool: {
|
||||||
max: 1,
|
max: 1,
|
||||||
min: 1
|
min: 1,
|
||||||
},
|
},
|
||||||
retry: {
|
retry: {
|
||||||
max: 10
|
max: 10,
|
||||||
}
|
},
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
compileSizeLimit: process.env.COMPILE_SIZE_LIMIT || '7mb',
|
compileSizeLimit: process.env.COMPILE_SIZE_LIMIT || '7mb',
|
||||||
|
@ -33,30 +33,30 @@ module.exports = {
|
||||||
clsiCacheDir: Path.resolve(__dirname, '../cache'),
|
clsiCacheDir: Path.resolve(__dirname, '../cache'),
|
||||||
synctexBaseDir(projectId) {
|
synctexBaseDir(projectId) {
|
||||||
return Path.join(this.compilesDir, projectId)
|
return Path.join(this.compilesDir, projectId)
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
internal: {
|
internal: {
|
||||||
clsi: {
|
clsi: {
|
||||||
port: 3013,
|
port: 3013,
|
||||||
host: process.env.LISTEN_ADDRESS || 'localhost'
|
host: process.env.LISTEN_ADDRESS || 'localhost',
|
||||||
},
|
},
|
||||||
|
|
||||||
load_balancer_agent: {
|
load_balancer_agent: {
|
||||||
report_load: true,
|
report_load: true,
|
||||||
load_port: 3048,
|
load_port: 3048,
|
||||||
local_port: 3049
|
local_port: 3049,
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
apis: {
|
apis: {
|
||||||
clsi: {
|
clsi: {
|
||||||
url: `http://${process.env.CLSI_HOST || 'localhost'}:3013`
|
url: `http://${process.env.CLSI_HOST || 'localhost'}:3013`,
|
||||||
},
|
},
|
||||||
clsiPerf: {
|
clsiPerf: {
|
||||||
host: `${process.env.CLSI_PERF_HOST || 'localhost'}:${
|
host: `${process.env.CLSI_PERF_HOST || 'localhost'}:${
|
||||||
process.env.CLSI_PERF_PORT || '3043'
|
process.env.CLSI_PERF_PORT || '3043'
|
||||||
}`
|
}`,
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
smokeTest: process.env.SMOKE_TEST || false,
|
smokeTest: process.env.SMOKE_TEST || false,
|
||||||
|
@ -67,7 +67,7 @@ module.exports = {
|
||||||
texliveImageNameOveride: process.env.TEX_LIVE_IMAGE_NAME_OVERRIDE,
|
texliveImageNameOveride: process.env.TEX_LIVE_IMAGE_NAME_OVERRIDE,
|
||||||
texliveOpenoutAny: process.env.TEXLIVE_OPENOUT_ANY,
|
texliveOpenoutAny: process.env.TEXLIVE_OPENOUT_ANY,
|
||||||
sentry: {
|
sentry: {
|
||||||
dsn: process.env.SENTRY_DSN
|
dsn: process.env.SENTRY_DSN,
|
||||||
},
|
},
|
||||||
|
|
||||||
enablePdfCaching: process.env.ENABLE_PDF_CACHING === 'true',
|
enablePdfCaching: process.env.ENABLE_PDF_CACHING === 'true',
|
||||||
|
@ -75,14 +75,13 @@ module.exports = {
|
||||||
pdfCachingMinChunkSize:
|
pdfCachingMinChunkSize:
|
||||||
parseInt(process.env.PDF_CACHING_MIN_CHUNK_SIZE, 10) || 1024,
|
parseInt(process.env.PDF_CACHING_MIN_CHUNK_SIZE, 10) || 1024,
|
||||||
pdfCachingMaxProcessingTime:
|
pdfCachingMaxProcessingTime:
|
||||||
parseInt(process.env.PDF_CACHING_MAX_PROCESSING_TIME, 10) || 10 * 1000
|
parseInt(process.env.PDF_CACHING_MAX_PROCESSING_TIME, 10) || 10 * 1000,
|
||||||
}
|
}
|
||||||
|
|
||||||
if (process.env.ALLOWED_COMPILE_GROUPS) {
|
if (process.env.ALLOWED_COMPILE_GROUPS) {
|
||||||
try {
|
try {
|
||||||
module.exports.allowedCompileGroups = process.env.ALLOWED_COMPILE_GROUPS.split(
|
module.exports.allowedCompileGroups =
|
||||||
' '
|
process.env.ALLOWED_COMPILE_GROUPS.split(' ')
|
||||||
)
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(error, 'could not apply allowed compile group setting')
|
console.error(error, 'could not apply allowed compile group setting')
|
||||||
process.exit(1)
|
process.exit(1)
|
||||||
|
@ -98,14 +97,14 @@ if (process.env.DOCKER_RUNNER) {
|
||||||
image:
|
image:
|
||||||
process.env.TEXLIVE_IMAGE || 'quay.io/sharelatex/texlive-full:2017.1',
|
process.env.TEXLIVE_IMAGE || 'quay.io/sharelatex/texlive-full:2017.1',
|
||||||
env: {
|
env: {
|
||||||
HOME: '/tmp'
|
HOME: '/tmp',
|
||||||
},
|
},
|
||||||
socketPath: '/var/run/docker.sock',
|
socketPath: '/var/run/docker.sock',
|
||||||
user: process.env.TEXLIVE_IMAGE_USER || 'tex'
|
user: process.env.TEXLIVE_IMAGE_USER || 'tex',
|
||||||
},
|
},
|
||||||
optimiseInDocker: true,
|
optimiseInDocker: true,
|
||||||
expireProjectAfterIdleMs: 24 * 60 * 60 * 1000,
|
expireProjectAfterIdleMs: 24 * 60 * 60 * 1000,
|
||||||
checkProjectsIntervalMs: 10 * 60 * 1000
|
checkProjectsIntervalMs: 10 * 60 * 1000,
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
@ -120,7 +119,7 @@ if (process.env.DOCKER_RUNNER) {
|
||||||
// Automatically clean up wordcount and synctex containers
|
// Automatically clean up wordcount and synctex containers
|
||||||
const defaultCompileGroupConfig = {
|
const defaultCompileGroupConfig = {
|
||||||
wordcount: { 'HostConfig.AutoRemove': true },
|
wordcount: { 'HostConfig.AutoRemove': true },
|
||||||
synctex: { 'HostConfig.AutoRemove': true }
|
synctex: { 'HostConfig.AutoRemove': true },
|
||||||
}
|
}
|
||||||
module.exports.clsi.docker.compileGroupConfig = Object.assign(
|
module.exports.clsi.docker.compileGroupConfig = Object.assign(
|
||||||
defaultCompileGroupConfig,
|
defaultCompileGroupConfig,
|
||||||
|
@ -146,9 +145,8 @@ if (process.env.DOCKER_RUNNER) {
|
||||||
|
|
||||||
if (process.env.ALLOWED_IMAGES) {
|
if (process.env.ALLOWED_IMAGES) {
|
||||||
try {
|
try {
|
||||||
module.exports.clsi.docker.allowedImages = process.env.ALLOWED_IMAGES.split(
|
module.exports.clsi.docker.allowedImages =
|
||||||
' '
|
process.env.ALLOWED_IMAGES.split(' ')
|
||||||
)
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(error, 'could not apply allowed images setting')
|
console.error(error, 'could not apply allowed images setting')
|
||||||
process.exit(1)
|
process.exit(1)
|
||||||
|
|
3934
services/clsi/package-lock.json
generated
3934
services/clsi/package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -13,16 +13,19 @@
|
||||||
"test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js test/unit/lib",
|
"test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js test/unit/lib",
|
||||||
"test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP",
|
"test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP",
|
||||||
"nodemon": "nodemon --config nodemon.json",
|
"nodemon": "nodemon --config nodemon.json",
|
||||||
"lint": "node_modules/.bin/eslint --max-warnings 0 .",
|
"lint": "eslint --max-warnings 0 --format unix .",
|
||||||
"format": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --list-different",
|
"format": "prettier --list-different $PWD/'**/*.js'",
|
||||||
"format:fix": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --write"
|
"format:fix": "prettier --write $PWD/'**/*.js'",
|
||||||
|
"lint:fix": "eslint --fix ."
|
||||||
},
|
},
|
||||||
"author": "James Allen <james@sharelatex.com>",
|
"author": "James Allen <james@sharelatex.com>",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@overleaf/metrics": "^3.5.1",
|
"@overleaf/metrics": "^3.5.1",
|
||||||
"@overleaf/o-error": "^3.3.1",
|
"@overleaf/o-error": "^3.3.1",
|
||||||
|
"@overleaf/settings": "^2.1.1",
|
||||||
"async": "3.2.0",
|
"async": "3.2.0",
|
||||||
"body-parser": "^1.19.0",
|
"body-parser": "^1.19.0",
|
||||||
|
"bunyan": "^1.8.15",
|
||||||
"diskusage": "^1.1.3",
|
"diskusage": "^1.1.3",
|
||||||
"dockerode": "^3.1.0",
|
"dockerode": "^3.1.0",
|
||||||
"express": "^4.17.1",
|
"express": "^4.17.1",
|
||||||
|
@ -37,34 +40,26 @@
|
||||||
"request": "^2.88.2",
|
"request": "^2.88.2",
|
||||||
"send": "^0.17.1",
|
"send": "^0.17.1",
|
||||||
"sequelize": "^5.21.5",
|
"sequelize": "^5.21.5",
|
||||||
"settings-sharelatex": "^1.1.0",
|
|
||||||
"sqlite3": "^4.1.1",
|
"sqlite3": "^4.1.1",
|
||||||
"v8-profiler-node8": "^6.1.1",
|
"v8-profiler-node8": "^6.1.1",
|
||||||
"wrench": "~1.5.9"
|
"wrench": "~1.5.9"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"babel-eslint": "^10.1.0",
|
"chai": "^4.2.0",
|
||||||
"bunyan": "^1.8.12",
|
"chai-as-promised": "^7.1.1",
|
||||||
"chai": "~4.2.0",
|
"eslint": "^7.21.0",
|
||||||
"eslint": "^6.8.0",
|
"eslint-config-prettier": "^8.1.0",
|
||||||
"eslint-config-prettier": "^6.10.0",
|
"eslint-config-standard": "^16.0.2",
|
||||||
"eslint-config-standard": "^14.1.0",
|
"eslint-plugin-chai-expect": "^2.2.0",
|
||||||
"eslint-config-standard-jsx": "^8.1.0",
|
"eslint-plugin-chai-friendly": "^0.6.0",
|
||||||
"eslint-config-standard-react": "^9.2.0",
|
"eslint-plugin-import": "^2.22.1",
|
||||||
"eslint-plugin-chai-expect": "^2.1.0",
|
"eslint-plugin-mocha": "^8.0.0",
|
||||||
"eslint-plugin-chai-friendly": "^0.5.0",
|
"eslint-plugin-node": "^11.1.0",
|
||||||
"eslint-plugin-import": "^2.20.1",
|
|
||||||
"eslint-plugin-jsx-a11y": "^6.2.3",
|
|
||||||
"eslint-plugin-mocha": "^6.3.0",
|
|
||||||
"eslint-plugin-node": "^11.0.0",
|
|
||||||
"eslint-plugin-prettier": "^3.1.2",
|
"eslint-plugin-prettier": "^3.1.2",
|
||||||
"eslint-plugin-promise": "^4.2.1",
|
"eslint-plugin-promise": "^4.2.1",
|
||||||
"eslint-plugin-react": "^7.19.0",
|
"mocha": "^8.3.2",
|
||||||
"eslint-plugin-standard": "^4.0.1",
|
|
||||||
"mocha": "^7.1.0",
|
|
||||||
"nodemon": "^2.0.7",
|
"nodemon": "^2.0.7",
|
||||||
"prettier": "^2.0.0",
|
"prettier": "^2.2.1",
|
||||||
"prettier-eslint-cli": "^5.0.0",
|
|
||||||
"sandboxed-module": "^2.0.3",
|
"sandboxed-module": "^2.0.3",
|
||||||
"sinon": "~9.0.1",
|
"sinon": "~9.0.1",
|
||||||
"timekeeper": "2.2.0"
|
"timekeeper": "2.2.0"
|
||||||
|
|
|
@ -7,7 +7,7 @@ describe('AllowedImageNames', function () {
|
||||||
this.project_id = Client.randomId()
|
this.project_id = Client.randomId()
|
||||||
this.request = {
|
this.request = {
|
||||||
options: {
|
options: {
|
||||||
imageName: undefined
|
imageName: undefined,
|
||||||
},
|
},
|
||||||
resources: [
|
resources: [
|
||||||
{
|
{
|
||||||
|
@ -17,9 +17,9 @@ describe('AllowedImageNames', function () {
|
||||||
\\begin{document}
|
\\begin{document}
|
||||||
Hello world
|
Hello world
|
||||||
\\end{document}\
|
\\end{document}\
|
||||||
`
|
`,
|
||||||
}
|
},
|
||||||
]
|
],
|
||||||
}
|
}
|
||||||
ClsiApp.ensureRunning(done)
|
ClsiApp.ensureRunning(done)
|
||||||
})
|
})
|
||||||
|
@ -101,8 +101,8 @@ Hello world
|
||||||
expect(error).to.not.exist
|
expect(error).to.not.exist
|
||||||
expect(result).to.deep.equal({
|
expect(result).to.deep.equal({
|
||||||
pdf: [
|
pdf: [
|
||||||
{ page: 1, h: 133.77, v: 134.76, height: 6.92, width: 343.71 }
|
{ page: 1, h: 133.77, v: 134.76, height: 6.92, width: 343.71 },
|
||||||
]
|
],
|
||||||
})
|
})
|
||||||
done()
|
done()
|
||||||
}
|
}
|
||||||
|
@ -139,7 +139,7 @@ Hello world
|
||||||
(error, result) => {
|
(error, result) => {
|
||||||
expect(error).to.not.exist
|
expect(error).to.not.exist
|
||||||
expect(result).to.deep.equal({
|
expect(result).to.deep.equal({
|
||||||
code: [{ file: 'main.tex', line: 3, column: -1 }]
|
code: [{ file: 'main.tex', line: 3, column: -1 }],
|
||||||
})
|
})
|
||||||
done()
|
done()
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,9 +23,9 @@ describe('Broken LaTeX file', function () {
|
||||||
\\begin{documen % :(
|
\\begin{documen % :(
|
||||||
Broken
|
Broken
|
||||||
\\end{documen % :(\
|
\\end{documen % :(\
|
||||||
`
|
`,
|
||||||
}
|
},
|
||||||
]
|
],
|
||||||
}
|
}
|
||||||
this.correct_request = {
|
this.correct_request = {
|
||||||
resources: [
|
resources: [
|
||||||
|
@ -36,9 +36,9 @@ Broken
|
||||||
\\begin{document}
|
\\begin{document}
|
||||||
Hello world
|
Hello world
|
||||||
\\end{document}\
|
\\end{document}\
|
||||||
`
|
`,
|
||||||
}
|
},
|
||||||
]
|
],
|
||||||
}
|
}
|
||||||
return ClsiApp.ensureRunning(done)
|
return ClsiApp.ensureRunning(done)
|
||||||
})
|
})
|
||||||
|
|
|
@ -23,9 +23,9 @@ describe('Deleting Old Files', function () {
|
||||||
\\begin{document}
|
\\begin{document}
|
||||||
Hello world
|
Hello world
|
||||||
\\end{document}\
|
\\end{document}\
|
||||||
`
|
`,
|
||||||
}
|
},
|
||||||
]
|
],
|
||||||
}
|
}
|
||||||
return ClsiApp.ensureRunning(done)
|
return ClsiApp.ensureRunning(done)
|
||||||
})
|
})
|
||||||
|
|
|
@ -23,7 +23,7 @@ const ChildProcess = require('child_process')
|
||||||
const ClsiApp = require('./helpers/ClsiApp')
|
const ClsiApp = require('./helpers/ClsiApp')
|
||||||
const logger = require('logger-sharelatex')
|
const logger = require('logger-sharelatex')
|
||||||
const Path = require('path')
|
const Path = require('path')
|
||||||
const fixturePath = (path) => {
|
const fixturePath = path => {
|
||||||
if (path.slice(0, 3) === 'tmp') {
|
if (path.slice(0, 3) === 'tmp') {
|
||||||
return '/tmp/clsi_acceptance_tests' + path.slice(3)
|
return '/tmp/clsi_acceptance_tests' + path.slice(3)
|
||||||
}
|
}
|
||||||
|
@ -49,8 +49,8 @@ const convertToPng = function (pdfPath, pngPath, callback) {
|
||||||
console.log(command)
|
console.log(command)
|
||||||
const convert = ChildProcess.exec(command)
|
const convert = ChildProcess.exec(command)
|
||||||
const stdout = ''
|
const stdout = ''
|
||||||
convert.stdout.on('data', (chunk) => console.log('STDOUT', chunk.toString()))
|
convert.stdout.on('data', chunk => console.log('STDOUT', chunk.toString()))
|
||||||
convert.stderr.on('data', (chunk) => console.log('STDERR', chunk.toString()))
|
convert.stderr.on('data', chunk => console.log('STDERR', chunk.toString()))
|
||||||
return convert.on('exit', () => callback())
|
return convert.on('exit', () => callback())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -65,11 +65,11 @@ const compare = function (originalPath, generatedPath, callback) {
|
||||||
)} ${diff_file}`
|
)} ${diff_file}`
|
||||||
)
|
)
|
||||||
let stderr = ''
|
let stderr = ''
|
||||||
proc.stderr.on('data', (chunk) => (stderr += chunk))
|
proc.stderr.on('data', chunk => (stderr += chunk))
|
||||||
return proc.on('exit', () => {
|
return proc.on('exit', () => {
|
||||||
if (stderr.trim() === '0 (0)') {
|
if (stderr.trim() === '0 (0)') {
|
||||||
// remove output diff if test matches expected image
|
// remove output diff if test matches expected image
|
||||||
fs.unlink(diff_file, (err) => {
|
fs.unlink(diff_file, err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
throw err
|
throw err
|
||||||
}
|
}
|
||||||
|
@ -88,8 +88,8 @@ const checkPdfInfo = function (pdfPath, callback) {
|
||||||
}
|
}
|
||||||
const proc = ChildProcess.exec(`pdfinfo ${fixturePath(pdfPath)}`)
|
const proc = ChildProcess.exec(`pdfinfo ${fixturePath(pdfPath)}`)
|
||||||
let stdout = ''
|
let stdout = ''
|
||||||
proc.stdout.on('data', (chunk) => (stdout += chunk))
|
proc.stdout.on('data', chunk => (stdout += chunk))
|
||||||
proc.stderr.on('data', (chunk) => console.log('STDERR', chunk.toString()))
|
proc.stderr.on('data', chunk => console.log('STDERR', chunk.toString()))
|
||||||
return proc.on('exit', () => {
|
return proc.on('exit', () => {
|
||||||
if (stdout.match(/Optimized:\s+yes/)) {
|
if (stdout.match(/Optimized:\s+yes/)) {
|
||||||
return callback(null, true)
|
return callback(null, true)
|
||||||
|
@ -135,14 +135,14 @@ const comparePdf = function (project_id, example_dir, callback) {
|
||||||
return convertToPng(
|
return convertToPng(
|
||||||
`tmp/${project_id}.pdf`,
|
`tmp/${project_id}.pdf`,
|
||||||
`tmp/${project_id}-generated.png`,
|
`tmp/${project_id}-generated.png`,
|
||||||
(error) => {
|
error => {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
throw error
|
throw error
|
||||||
}
|
}
|
||||||
return convertToPng(
|
return convertToPng(
|
||||||
`examples/${example_dir}/output.pdf`,
|
`examples/${example_dir}/output.pdf`,
|
||||||
`tmp/${project_id}-source.png`,
|
`tmp/${project_id}-source.png`,
|
||||||
(error) => {
|
error => {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
throw error
|
throw error
|
||||||
}
|
}
|
||||||
|
@ -162,7 +162,7 @@ const comparePdf = function (project_id, example_dir, callback) {
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
return compareMultiplePages(project_id, (error) => {
|
return compareMultiplePages(project_id, error => {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
throw error
|
throw error
|
||||||
}
|
}
|
||||||
|
@ -216,82 +216,71 @@ describe('Example Documents', function () {
|
||||||
fsExtra.remove(fixturePath('tmp'), done)
|
fsExtra.remove(fixturePath('tmp'), done)
|
||||||
})
|
})
|
||||||
|
|
||||||
return Array.from(fs.readdirSync(fixturePath('examples'))).map(
|
return Array.from(fs.readdirSync(fixturePath('examples'))).map(example_dir =>
|
||||||
(example_dir) =>
|
(example_dir =>
|
||||||
((example_dir) =>
|
describe(example_dir, function () {
|
||||||
describe(example_dir, function () {
|
before(function () {
|
||||||
before(function () {
|
return (this.project_id = Client.randomId() + '_' + example_dir)
|
||||||
return (this.project_id = Client.randomId() + '_' + example_dir)
|
})
|
||||||
})
|
|
||||||
|
|
||||||
it('should generate the correct pdf', function (done) {
|
it('should generate the correct pdf', function (done) {
|
||||||
this.timeout(MOCHA_LATEX_TIMEOUT)
|
this.timeout(MOCHA_LATEX_TIMEOUT)
|
||||||
return Client.compileDirectory(
|
return Client.compileDirectory(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
fixturePath('examples'),
|
fixturePath('examples'),
|
||||||
example_dir,
|
example_dir,
|
||||||
4242,
|
4242,
|
||||||
(error, res, body) => {
|
(error, res, body) => {
|
||||||
if (
|
if (
|
||||||
error ||
|
error ||
|
||||||
__guard__(
|
__guard__(
|
||||||
body != null ? body.compile : undefined,
|
body != null ? body.compile : undefined,
|
||||||
(x) => x.status
|
x => x.status
|
||||||
) === 'failure'
|
) === 'failure'
|
||||||
) {
|
) {
|
||||||
console.log(
|
console.log('DEBUG: error', error, 'body', JSON.stringify(body))
|
||||||
'DEBUG: error',
|
return done(new Error('Compile failed'))
|
||||||
error,
|
|
||||||
'body',
|
|
||||||
JSON.stringify(body)
|
|
||||||
)
|
|
||||||
return done(new Error('Compile failed'))
|
|
||||||
}
|
|
||||||
const pdf = Client.getOutputFile(body, 'pdf')
|
|
||||||
return downloadAndComparePdf(
|
|
||||||
this.project_id,
|
|
||||||
example_dir,
|
|
||||||
pdf.url,
|
|
||||||
done
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
)
|
const pdf = Client.getOutputFile(body, 'pdf')
|
||||||
})
|
return downloadAndComparePdf(
|
||||||
|
this.project_id,
|
||||||
|
example_dir,
|
||||||
|
pdf.url,
|
||||||
|
done
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
return it('should generate the correct pdf on the second run as well', function (done) {
|
return it('should generate the correct pdf on the second run as well', function (done) {
|
||||||
this.timeout(MOCHA_LATEX_TIMEOUT)
|
this.timeout(MOCHA_LATEX_TIMEOUT)
|
||||||
return Client.compileDirectory(
|
return Client.compileDirectory(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
fixturePath('examples'),
|
fixturePath('examples'),
|
||||||
example_dir,
|
example_dir,
|
||||||
4242,
|
4242,
|
||||||
(error, res, body) => {
|
(error, res, body) => {
|
||||||
if (
|
if (
|
||||||
error ||
|
error ||
|
||||||
__guard__(
|
__guard__(
|
||||||
body != null ? body.compile : undefined,
|
body != null ? body.compile : undefined,
|
||||||
(x) => x.status
|
x => x.status
|
||||||
) === 'failure'
|
) === 'failure'
|
||||||
) {
|
) {
|
||||||
console.log(
|
console.log('DEBUG: error', error, 'body', JSON.stringify(body))
|
||||||
'DEBUG: error',
|
return done(new Error('Compile failed'))
|
||||||
error,
|
|
||||||
'body',
|
|
||||||
JSON.stringify(body)
|
|
||||||
)
|
|
||||||
return done(new Error('Compile failed'))
|
|
||||||
}
|
|
||||||
const pdf = Client.getOutputFile(body, 'pdf')
|
|
||||||
return downloadAndComparePdf(
|
|
||||||
this.project_id,
|
|
||||||
example_dir,
|
|
||||||
pdf.url,
|
|
||||||
done
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
)
|
const pdf = Client.getOutputFile(body, 'pdf')
|
||||||
})
|
return downloadAndComparePdf(
|
||||||
}))(example_dir)
|
this.project_id,
|
||||||
|
example_dir,
|
||||||
|
pdf.url,
|
||||||
|
done
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}))(example_dir)
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -24,9 +24,9 @@ describe('Simple LaTeX file', function () {
|
||||||
\\begin{document}
|
\\begin{document}
|
||||||
Hello world
|
Hello world
|
||||||
\\end{document}\
|
\\end{document}\
|
||||||
`
|
`,
|
||||||
}
|
},
|
||||||
]
|
],
|
||||||
}
|
}
|
||||||
return ClsiApp.ensureRunning(() => {
|
return ClsiApp.ensureRunning(() => {
|
||||||
return Client.compile(
|
return Client.compile(
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
const request = require('request')
|
const request = require('request')
|
||||||
const Settings = require('settings-sharelatex')
|
const Settings = require('@overleaf/settings')
|
||||||
after(function (done) {
|
after(function (done) {
|
||||||
request(
|
request(
|
||||||
{
|
{
|
||||||
url: `${Settings.apis.clsi.url}/metrics`
|
url: `${Settings.apis.clsi.url}/metrics`,
|
||||||
},
|
},
|
||||||
(err, response, body) => {
|
(err, response, body) => {
|
||||||
if (err) return done(err)
|
if (err) return done(err)
|
||||||
|
|
|
@ -27,9 +27,9 @@ Hello world
|
||||||
resources: [
|
resources: [
|
||||||
{
|
{
|
||||||
path: 'main.tex',
|
path: 'main.tex',
|
||||||
content
|
content,
|
||||||
}
|
},
|
||||||
]
|
],
|
||||||
}
|
}
|
||||||
this.project_id = Client.randomId()
|
this.project_id = Client.randomId()
|
||||||
return ClsiApp.ensureRunning(() => {
|
return ClsiApp.ensureRunning(() => {
|
||||||
|
@ -59,8 +59,8 @@ Hello world
|
||||||
}
|
}
|
||||||
expect(pdfPositions).to.deep.equal({
|
expect(pdfPositions).to.deep.equal({
|
||||||
pdf: [
|
pdf: [
|
||||||
{ page: 1, h: 133.77, v: 134.76, height: 6.92, width: 343.71 }
|
{ page: 1, h: 133.77, v: 134.76, height: 6.92, width: 343.71 },
|
||||||
]
|
],
|
||||||
})
|
})
|
||||||
return done()
|
return done()
|
||||||
}
|
}
|
||||||
|
@ -80,7 +80,7 @@ Hello world
|
||||||
throw error
|
throw error
|
||||||
}
|
}
|
||||||
expect(codePositions).to.deep.equal({
|
expect(codePositions).to.deep.equal({
|
||||||
code: [{ file: 'main.tex', line: 3, column: -1 }]
|
code: [{ file: 'main.tex', line: 3, column: -1 }],
|
||||||
})
|
})
|
||||||
return done()
|
return done()
|
||||||
}
|
}
|
||||||
|
@ -132,9 +132,9 @@ Hello world
|
||||||
resources: [
|
resources: [
|
||||||
{
|
{
|
||||||
path: 'main.tex',
|
path: 'main.tex',
|
||||||
content
|
content,
|
||||||
}
|
},
|
||||||
]
|
],
|
||||||
}
|
}
|
||||||
Client.compile(
|
Client.compile(
|
||||||
this.broken_project_id,
|
this.broken_project_id,
|
||||||
|
|
|
@ -16,7 +16,7 @@ describe('Timed out compile', function () {
|
||||||
before(function (done) {
|
before(function (done) {
|
||||||
this.request = {
|
this.request = {
|
||||||
options: {
|
options: {
|
||||||
timeout: 10
|
timeout: 10,
|
||||||
}, // seconds
|
}, // seconds
|
||||||
resources: [
|
resources: [
|
||||||
{
|
{
|
||||||
|
@ -27,9 +27,9 @@ describe('Timed out compile', function () {
|
||||||
\\def\\x{Hello!\\par\\x}
|
\\def\\x{Hello!\\par\\x}
|
||||||
\\x
|
\\x
|
||||||
\\end{document}\
|
\\end{document}\
|
||||||
`
|
`,
|
||||||
}
|
},
|
||||||
]
|
],
|
||||||
}
|
}
|
||||||
this.project_id = Client.randomId()
|
this.project_id = Client.randomId()
|
||||||
return ClsiApp.ensureRunning(() => {
|
return ClsiApp.ensureRunning(() => {
|
||||||
|
@ -55,7 +55,7 @@ describe('Timed out compile', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
return it('should return the log output file name', function () {
|
return it('should return the log output file name', function () {
|
||||||
const outputFilePaths = this.body.compile.outputFiles.map((x) => x.path)
|
const outputFilePaths = this.body.compile.outputFiles.map(x => x.path)
|
||||||
return outputFilePaths.should.include('output.log')
|
return outputFilePaths.should.include('output.log')
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -35,7 +35,7 @@ const Server = {
|
||||||
|
|
||||||
randomId() {
|
randomId() {
|
||||||
return Math.random().toString(16).slice(2)
|
return Math.random().toString(16).slice(2)
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
Server.run()
|
Server.run()
|
||||||
|
@ -55,13 +55,13 @@ describe('Url Caching', function () {
|
||||||
\\begin{document}
|
\\begin{document}
|
||||||
\\includegraphics{lion.png}
|
\\includegraphics{lion.png}
|
||||||
\\end{document}\
|
\\end{document}\
|
||||||
`
|
`,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
path: 'lion.png',
|
path: 'lion.png',
|
||||||
url: `http://${host}:31415/${this.file}`
|
url: `http://${host}:31415/${this.file}`,
|
||||||
}
|
},
|
||||||
]
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
sinon.spy(Server, 'getFile')
|
sinon.spy(Server, 'getFile')
|
||||||
|
@ -102,14 +102,14 @@ describe('Url Caching', function () {
|
||||||
\\begin{document}
|
\\begin{document}
|
||||||
\\includegraphics{lion.png}
|
\\includegraphics{lion.png}
|
||||||
\\end{document}\
|
\\end{document}\
|
||||||
`
|
`,
|
||||||
},
|
},
|
||||||
(this.image_resource = {
|
(this.image_resource = {
|
||||||
path: 'lion.png',
|
path: 'lion.png',
|
||||||
url: `http://${host}:31415/${this.file}`,
|
url: `http://${host}:31415/${this.file}`,
|
||||||
modified: Date.now()
|
modified: Date.now(),
|
||||||
})
|
}),
|
||||||
]
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
return Client.compile(
|
return Client.compile(
|
||||||
|
@ -157,14 +157,14 @@ describe('Url Caching', function () {
|
||||||
\\begin{document}
|
\\begin{document}
|
||||||
\\includegraphics{lion.png}
|
\\includegraphics{lion.png}
|
||||||
\\end{document}\
|
\\end{document}\
|
||||||
`
|
`,
|
||||||
},
|
},
|
||||||
(this.image_resource = {
|
(this.image_resource = {
|
||||||
path: 'lion.png',
|
path: 'lion.png',
|
||||||
url: `http://${host}:31415/${this.file}`,
|
url: `http://${host}:31415/${this.file}`,
|
||||||
modified: (this.last_modified = Date.now())
|
modified: (this.last_modified = Date.now()),
|
||||||
})
|
}),
|
||||||
]
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
return Client.compile(
|
return Client.compile(
|
||||||
|
@ -213,14 +213,14 @@ describe('Url Caching', function () {
|
||||||
\\begin{document}
|
\\begin{document}
|
||||||
\\includegraphics{lion.png}
|
\\includegraphics{lion.png}
|
||||||
\\end{document}\
|
\\end{document}\
|
||||||
`
|
`,
|
||||||
},
|
},
|
||||||
(this.image_resource = {
|
(this.image_resource = {
|
||||||
path: 'lion.png',
|
path: 'lion.png',
|
||||||
url: `http://${host}:31415/${this.file}`,
|
url: `http://${host}:31415/${this.file}`,
|
||||||
modified: (this.last_modified = Date.now())
|
modified: (this.last_modified = Date.now()),
|
||||||
})
|
}),
|
||||||
]
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
return Client.compile(
|
return Client.compile(
|
||||||
|
@ -269,14 +269,14 @@ describe('Url Caching', function () {
|
||||||
\\begin{document}
|
\\begin{document}
|
||||||
\\includegraphics{lion.png}
|
\\includegraphics{lion.png}
|
||||||
\\end{document}\
|
\\end{document}\
|
||||||
`
|
`,
|
||||||
},
|
},
|
||||||
(this.image_resource = {
|
(this.image_resource = {
|
||||||
path: 'lion.png',
|
path: 'lion.png',
|
||||||
url: `http://${host}:31415/${this.file}`,
|
url: `http://${host}:31415/${this.file}`,
|
||||||
modified: (this.last_modified = Date.now())
|
modified: (this.last_modified = Date.now()),
|
||||||
})
|
}),
|
||||||
]
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
return Client.compile(
|
return Client.compile(
|
||||||
|
@ -325,17 +325,17 @@ describe('Url Caching', function () {
|
||||||
\\begin{document}
|
\\begin{document}
|
||||||
\\includegraphics{lion.png}
|
\\includegraphics{lion.png}
|
||||||
\\end{document}\
|
\\end{document}\
|
||||||
`
|
`,
|
||||||
},
|
},
|
||||||
(this.image_resource = {
|
(this.image_resource = {
|
||||||
path: 'lion.png',
|
path: 'lion.png',
|
||||||
url: `http://${host}:31415/${this.file}`,
|
url: `http://${host}:31415/${this.file}`,
|
||||||
modified: (this.last_modified = Date.now())
|
modified: (this.last_modified = Date.now()),
|
||||||
})
|
}),
|
||||||
]
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
return Client.compile(this.project_id, this.request, (error) => {
|
return Client.compile(this.project_id, this.request, error => {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
throw error
|
throw error
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,9 +25,9 @@ describe('Syncing', function () {
|
||||||
content: fs.readFileSync(
|
content: fs.readFileSync(
|
||||||
path.join(__dirname, '../fixtures/naugty_strings.txt'),
|
path.join(__dirname, '../fixtures/naugty_strings.txt'),
|
||||||
'utf-8'
|
'utf-8'
|
||||||
)
|
),
|
||||||
}
|
},
|
||||||
]
|
],
|
||||||
}
|
}
|
||||||
this.project_id = Client.randomId()
|
this.project_id = Client.randomId()
|
||||||
return ClsiApp.ensureRunning(() => {
|
return ClsiApp.ensureRunning(() => {
|
||||||
|
@ -61,8 +61,8 @@ describe('Syncing', function () {
|
||||||
mathInline: 6,
|
mathInline: 6,
|
||||||
mathDisplay: 0,
|
mathDisplay: 0,
|
||||||
errors: 0,
|
errors: 0,
|
||||||
messages: ''
|
messages: '',
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
return done()
|
return done()
|
||||||
})
|
})
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
let Client
|
let Client
|
||||||
const request = require('request')
|
const request = require('request')
|
||||||
const fs = require('fs')
|
const fs = require('fs')
|
||||||
const Settings = require('settings-sharelatex')
|
const Settings = require('@overleaf/settings')
|
||||||
|
|
||||||
const host = 'localhost'
|
const host = 'localhost'
|
||||||
|
|
||||||
|
@ -38,8 +38,8 @@ module.exports = Client = {
|
||||||
{
|
{
|
||||||
url: `${this.host}/project/${project_id}/compile`,
|
url: `${this.host}/project/${project_id}/compile`,
|
||||||
json: {
|
json: {
|
||||||
compile: data
|
compile: data,
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
callback
|
callback
|
||||||
)
|
)
|
||||||
|
@ -66,7 +66,7 @@ module.exports = Client = {
|
||||||
const app = express()
|
const app = express()
|
||||||
app.use(express.static(directory))
|
app.use(express.static(directory))
|
||||||
console.log('starting test server on', port, host)
|
console.log('starting test server on', port, host)
|
||||||
return app.listen(port, host).on('error', (error) => {
|
return app.listen(port, host).on('error', error => {
|
||||||
console.error('error starting server:', error.message)
|
console.error('error starting server:', error.message)
|
||||||
return process.exit(1)
|
return process.exit(1)
|
||||||
})
|
})
|
||||||
|
@ -87,9 +87,9 @@ module.exports = Client = {
|
||||||
imageName,
|
imageName,
|
||||||
file,
|
file,
|
||||||
line,
|
line,
|
||||||
column
|
column,
|
||||||
},
|
},
|
||||||
json: true
|
json: true,
|
||||||
},
|
},
|
||||||
(error, response, body) => {
|
(error, response, body) => {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
|
@ -118,9 +118,9 @@ module.exports = Client = {
|
||||||
imageName,
|
imageName,
|
||||||
page,
|
page,
|
||||||
h,
|
h,
|
||||||
v
|
v,
|
||||||
},
|
},
|
||||||
json: true
|
json: true,
|
||||||
},
|
},
|
||||||
(error, response, body) => {
|
(error, response, body) => {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
|
@ -148,7 +148,7 @@ module.exports = Client = {
|
||||||
entities = entities.concat(
|
entities = entities.concat(
|
||||||
fs
|
fs
|
||||||
.readdirSync(`${baseDirectory}/${directory}/${entity}`)
|
.readdirSync(`${baseDirectory}/${directory}/${entity}`)
|
||||||
.map((subEntity) => {
|
.map(subEntity => {
|
||||||
if (subEntity === 'main.tex') {
|
if (subEntity === 'main.tex') {
|
||||||
rootResourcePath = `${entity}/${subEntity}`
|
rootResourcePath = `${entity}/${subEntity}`
|
||||||
}
|
}
|
||||||
|
@ -167,14 +167,14 @@ module.exports = Client = {
|
||||||
'Rtex',
|
'Rtex',
|
||||||
'ist',
|
'ist',
|
||||||
'md',
|
'md',
|
||||||
'Rmd'
|
'Rmd',
|
||||||
].indexOf(extension) > -1
|
].indexOf(extension) > -1
|
||||||
) {
|
) {
|
||||||
resources.push({
|
resources.push({
|
||||||
path: entity,
|
path: entity,
|
||||||
content: fs
|
content: fs
|
||||||
.readFileSync(`${baseDirectory}/${directory}/${entity}`)
|
.readFileSync(`${baseDirectory}/${directory}/${entity}`)
|
||||||
.toString()
|
.toString(),
|
||||||
})
|
})
|
||||||
} else if (
|
} else if (
|
||||||
['eps', 'ttf', 'png', 'jpg', 'pdf', 'jpeg'].indexOf(extension) > -1
|
['eps', 'ttf', 'png', 'jpg', 'pdf', 'jpeg'].indexOf(extension) > -1
|
||||||
|
@ -182,7 +182,7 @@ module.exports = Client = {
|
||||||
resources.push({
|
resources.push({
|
||||||
path: entity,
|
path: entity,
|
||||||
url: `http://${host}:${serverPort}/${directory}/${entity}`,
|
url: `http://${host}:${serverPort}/${directory}/${entity}`,
|
||||||
modified: stat.mtime
|
modified: stat.mtime,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -193,7 +193,7 @@ module.exports = Client = {
|
||||||
(error, body) => {
|
(error, body) => {
|
||||||
const req = {
|
const req = {
|
||||||
resources,
|
resources,
|
||||||
rootResourcePath
|
rootResourcePath,
|
||||||
}
|
}
|
||||||
|
|
||||||
if (error == null) {
|
if (error == null) {
|
||||||
|
@ -220,8 +220,8 @@ module.exports = Client = {
|
||||||
url: `${this.host}/project/${project_id}/wordcount`,
|
url: `${this.host}/project/${project_id}/wordcount`,
|
||||||
qs: {
|
qs: {
|
||||||
image,
|
image,
|
||||||
file
|
file,
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
(error, response, body) => {
|
(error, response, body) => {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
|
@ -233,5 +233,5 @@ module.exports = Client = {
|
||||||
return callback(null, JSON.parse(body))
|
return callback(null, JSON.parse(body))
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
const app = require('../../../../app')
|
const app = require('../../../../app')
|
||||||
require('logger-sharelatex').logger.level('info')
|
require('logger-sharelatex').logger.level('info')
|
||||||
const logger = require('logger-sharelatex')
|
const logger = require('logger-sharelatex')
|
||||||
const Settings = require('settings-sharelatex')
|
const Settings = require('@overleaf/settings')
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
running: false,
|
running: false,
|
||||||
|
@ -35,10 +35,10 @@ module.exports = {
|
||||||
return app.listen(
|
return app.listen(
|
||||||
__guard__(
|
__guard__(
|
||||||
Settings.internal != null ? Settings.internal.clsi : undefined,
|
Settings.internal != null ? Settings.internal.clsi : undefined,
|
||||||
(x) => x.port
|
x => x.port
|
||||||
),
|
),
|
||||||
'localhost',
|
'localhost',
|
||||||
(error) => {
|
error => {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
throw error
|
throw error
|
||||||
}
|
}
|
||||||
|
@ -55,7 +55,7 @@ module.exports = {
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
function __guard__(value, transform) {
|
function __guard__(value, transform) {
|
||||||
return typeof value !== 'undefined' && value !== null
|
return typeof value !== 'undefined' && value !== null
|
||||||
|
|
|
@ -9,8 +9,8 @@ module.exports = {
|
||||||
username: 'clsi',
|
username: 'clsi',
|
||||||
password: null,
|
password: null,
|
||||||
dialect: 'sqlite',
|
dialect: 'sqlite',
|
||||||
storage: Path.resolve('db.sqlite')
|
storage: Path.resolve('db.sqlite'),
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
path: {
|
path: {
|
||||||
|
@ -22,7 +22,7 @@ module.exports = {
|
||||||
synctexBaseDir() {
|
synctexBaseDir() {
|
||||||
return '/compile'
|
return '/compile'
|
||||||
},
|
},
|
||||||
sandboxedCompilesHostDir: process.env.SANDBOXED_COMPILES_HOST_DIR
|
sandboxedCompilesHostDir: process.env.SANDBOXED_COMPILES_HOST_DIR,
|
||||||
},
|
},
|
||||||
|
|
||||||
clsi: {
|
clsi: {
|
||||||
|
@ -33,32 +33,31 @@ module.exports = {
|
||||||
docker: {
|
docker: {
|
||||||
image: process.env.TEXLIVE_IMAGE || 'texlive-full:2017.1-opt',
|
image: process.env.TEXLIVE_IMAGE || 'texlive-full:2017.1-opt',
|
||||||
env: {
|
env: {
|
||||||
PATH:
|
PATH: '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/2017/bin/x86_64-linux/',
|
||||||
'/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/2017/bin/x86_64-linux/',
|
HOME: '/tmp',
|
||||||
HOME: '/tmp'
|
|
||||||
},
|
},
|
||||||
modem: {
|
modem: {
|
||||||
socketPath: false
|
socketPath: false,
|
||||||
},
|
},
|
||||||
user: process.env.SIBLING_CONTAINER_USER || '111'
|
user: process.env.SIBLING_CONTAINER_USER || '111',
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
internal: {
|
internal: {
|
||||||
clsi: {
|
clsi: {
|
||||||
port: 3013,
|
port: 3013,
|
||||||
load_port: 3044,
|
load_port: 3044,
|
||||||
host: 'localhost'
|
host: 'localhost',
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
apis: {
|
apis: {
|
||||||
clsi: {
|
clsi: {
|
||||||
url: 'http://localhost:3013'
|
url: 'http://localhost:3013',
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
smokeTest: false,
|
smokeTest: false,
|
||||||
project_cache_length_ms: 1000 * 60 * 60 * 24,
|
project_cache_length_ms: 1000 * 60 * 60 * 24,
|
||||||
parallelFileDownloads: 1
|
parallelFileDownloads: 1,
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,13 +22,13 @@ function test(hashType, filePath, callback) {
|
||||||
return callback(err)
|
return callback(err)
|
||||||
}
|
}
|
||||||
const t0 = process.hrtime.bigint()
|
const t0 = process.hrtime.bigint()
|
||||||
ContentCacheManager.update(dir, filePath, (x) => {
|
ContentCacheManager.update(dir, filePath, x => {
|
||||||
const t1 = process.hrtime.bigint()
|
const t1 = process.hrtime.bigint()
|
||||||
const cold = Number(t1 - t0) / 1e6
|
const cold = Number(t1 - t0) / 1e6
|
||||||
ContentCacheManager.update(dir, filePath, (x) => {
|
ContentCacheManager.update(dir, filePath, x => {
|
||||||
const t2 = process.hrtime.bigint()
|
const t2 = process.hrtime.bigint()
|
||||||
const warm = Number(t2 - t1) / 1e6
|
const warm = Number(t2 - t1) / 1e6
|
||||||
fs.rmdir(dir, { recursive: true }, (err) => {
|
fs.rmdir(dir, { recursive: true }, err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return callback(err)
|
return callback(err)
|
||||||
}
|
}
|
||||||
|
@ -52,18 +52,18 @@ function test(hashType, filePath, callback) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
var jobs = []
|
const jobs = []
|
||||||
files.forEach((file) => {
|
files.forEach(file => {
|
||||||
jobs.push((cb) => {
|
jobs.push(cb => {
|
||||||
test('md5', file, cb)
|
test('md5', file, cb)
|
||||||
})
|
})
|
||||||
jobs.push((cb) => {
|
jobs.push(cb => {
|
||||||
test('sha1', file, cb)
|
test('sha1', file, cb)
|
||||||
})
|
})
|
||||||
jobs.push((cb) => {
|
jobs.push(cb => {
|
||||||
test('hmac-sha1', file, cb)
|
test('hmac-sha1', file, cb)
|
||||||
})
|
})
|
||||||
jobs.push((cb) => {
|
jobs.push(cb => {
|
||||||
test('sha256', file, cb)
|
test('sha256', file, cb)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
/* eslint-disable
|
|
||||||
standard/no-callback-literal,
|
|
||||||
*/
|
|
||||||
// TODO: This file was created by bulk-decaffeinate.
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
// Fix any style issues and re-enable lint.
|
// Fix any style issues and re-enable lint.
|
||||||
/*
|
/*
|
||||||
|
@ -10,14 +7,14 @@
|
||||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
*/
|
*/
|
||||||
const request = require('request')
|
const request = require('request')
|
||||||
const Settings = require('settings-sharelatex')
|
const Settings = require('@overleaf/settings')
|
||||||
const async = require('async')
|
const async = require('async')
|
||||||
const fs = require('fs')
|
const fs = require('fs')
|
||||||
const _ = require('lodash')
|
const _ = require('lodash')
|
||||||
const concurentCompiles = 5
|
const concurentCompiles = 5
|
||||||
const totalCompiles = 50
|
const totalCompiles = 50
|
||||||
|
|
||||||
const buildUrl = (path) =>
|
const buildUrl = path =>
|
||||||
`http://${Settings.internal.clsi.host}:${Settings.internal.clsi.port}/${path}`
|
`http://${Settings.internal.clsi.host}:${Settings.internal.clsi.port}/${path}`
|
||||||
|
|
||||||
const mainTexContent = fs.readFileSync('./bulk.tex', 'utf-8')
|
const mainTexContent = fs.readFileSync('./bulk.tex', 'utf-8')
|
||||||
|
@ -51,11 +48,11 @@ const makeRequest = function (compileNumber, callback) {
|
||||||
\\begin{document}
|
\\begin{document}
|
||||||
${bodyContent}
|
${bodyContent}
|
||||||
\\end{document}\
|
\\end{document}\
|
||||||
`
|
`,
|
||||||
}
|
},
|
||||||
]
|
],
|
||||||
}
|
},
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
(err, response, body) => {
|
(err, response, body) => {
|
||||||
if (response.statusCode !== 200) {
|
if (response.statusCode !== 200) {
|
||||||
|
@ -74,12 +71,13 @@ ${bodyContent}
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const jobs = _.map(__range__(1, totalCompiles, true), (i) => (cb) =>
|
const jobs = _.map(
|
||||||
makeRequest(i, cb)
|
__range__(1, totalCompiles, true),
|
||||||
|
i => cb => makeRequest(i, cb)
|
||||||
)
|
)
|
||||||
|
|
||||||
const startTime = new Date()
|
const startTime = new Date()
|
||||||
async.parallelLimit(jobs, concurentCompiles, (err) => {
|
async.parallelLimit(jobs, concurentCompiles, err => {
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
console.error(err)
|
console.error(err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,8 +12,8 @@ SandboxedModule.configure({
|
||||||
info() {},
|
info() {},
|
||||||
warn() {},
|
warn() {},
|
||||||
error() {},
|
error() {},
|
||||||
err() {}
|
err() {},
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
globals: { Buffer, console, process }
|
globals: { Buffer, console, process },
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,20 +1,20 @@
|
||||||
const request = require('request')
|
const request = require('request')
|
||||||
const Settings = require('settings-sharelatex')
|
const Settings = require('@overleaf/settings')
|
||||||
|
|
||||||
const buildUrl = (path) =>
|
const buildUrl = path =>
|
||||||
`http://${Settings.internal.clsi.host}:${Settings.internal.clsi.port}/${path}`
|
`http://${Settings.internal.clsi.host}:${Settings.internal.clsi.port}/${path}`
|
||||||
|
|
||||||
const url = buildUrl(`project/smoketest-${process.pid}/compile`)
|
const url = buildUrl(`project/smoketest-${process.pid}/compile`)
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
sendNewResult(res) {
|
sendNewResult(res) {
|
||||||
this._run((error) => this._sendResponse(res, error))
|
this._run(error => this._sendResponse(res, error))
|
||||||
},
|
},
|
||||||
sendLastResult(res) {
|
sendLastResult(res) {
|
||||||
this._sendResponse(res, this._lastError)
|
this._sendResponse(res, this._lastError)
|
||||||
},
|
},
|
||||||
triggerRun(cb) {
|
triggerRun(cb) {
|
||||||
this._run((error) => {
|
this._run(error => {
|
||||||
this._lastError = error
|
this._lastError = error
|
||||||
cb(error)
|
cb(error)
|
||||||
})
|
})
|
||||||
|
@ -74,11 +74,11 @@ module.exports = {
|
||||||
}
|
}
|
||||||
\\end{tikzpicture}
|
\\end{tikzpicture}
|
||||||
\\end{document}\
|
\\end{document}\
|
||||||
`
|
`,
|
||||||
}
|
},
|
||||||
]
|
],
|
||||||
}
|
},
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
(error, response, body) => {
|
(error, response, body) => {
|
||||||
if (error) return done(error)
|
if (error) return done(error)
|
||||||
|
@ -98,5 +98,5 @@ module.exports = {
|
||||||
done()
|
done()
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,7 +24,7 @@ function tryImageNameValidation(method, imageNameField) {
|
||||||
this.Settings.clsi = { docker: {} }
|
this.Settings.clsi = { docker: {} }
|
||||||
this.Settings.clsi.docker.allowedImages = [
|
this.Settings.clsi.docker.allowedImages = [
|
||||||
'repo/image:tag1',
|
'repo/image:tag1',
|
||||||
'repo/image:tag2'
|
'repo/image:tag2',
|
||||||
]
|
]
|
||||||
this.res.send = sinon.stub()
|
this.res.send = sinon.stub()
|
||||||
this.res.status = sinon.stub().returns({ send: this.res.send })
|
this.res.status = sinon.stub().returns({ send: this.res.send })
|
||||||
|
@ -66,15 +66,15 @@ describe('CompileController', function () {
|
||||||
requires: {
|
requires: {
|
||||||
'./CompileManager': (this.CompileManager = {}),
|
'./CompileManager': (this.CompileManager = {}),
|
||||||
'./RequestParser': (this.RequestParser = {}),
|
'./RequestParser': (this.RequestParser = {}),
|
||||||
'settings-sharelatex': (this.Settings = {
|
'@overleaf/settings': (this.Settings = {
|
||||||
apis: {
|
apis: {
|
||||||
clsi: {
|
clsi: {
|
||||||
url: 'http://clsi.example.com'
|
url: 'http://clsi.example.com',
|
||||||
}
|
},
|
||||||
}
|
},
|
||||||
}),
|
}),
|
||||||
'./ProjectPersistenceManager': (this.ProjectPersistenceManager = {})
|
'./ProjectPersistenceManager': (this.ProjectPersistenceManager = {}),
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
this.Settings.externalUrl = 'http://www.example.com'
|
this.Settings.externalUrl = 'http://www.example.com'
|
||||||
this.req = {}
|
this.req = {}
|
||||||
|
@ -85,28 +85,28 @@ describe('CompileController', function () {
|
||||||
describe('compile', function () {
|
describe('compile', function () {
|
||||||
beforeEach(function () {
|
beforeEach(function () {
|
||||||
this.req.body = {
|
this.req.body = {
|
||||||
compile: 'mock-body'
|
compile: 'mock-body',
|
||||||
}
|
}
|
||||||
this.req.params = { project_id: (this.project_id = 'project-id-123') }
|
this.req.params = { project_id: (this.project_id = 'project-id-123') }
|
||||||
this.request = {
|
this.request = {
|
||||||
compile: 'mock-parsed-request'
|
compile: 'mock-parsed-request',
|
||||||
}
|
}
|
||||||
this.request_with_project_id = {
|
this.request_with_project_id = {
|
||||||
compile: this.request.compile,
|
compile: this.request.compile,
|
||||||
project_id: this.project_id
|
project_id: this.project_id,
|
||||||
}
|
}
|
||||||
this.output_files = [
|
this.output_files = [
|
||||||
{
|
{
|
||||||
path: 'output.pdf',
|
path: 'output.pdf',
|
||||||
type: 'pdf',
|
type: 'pdf',
|
||||||
size: 1337,
|
size: 1337,
|
||||||
build: 1234
|
build: 1234,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
path: 'output.log',
|
path: 'output.log',
|
||||||
type: 'log',
|
type: 'log',
|
||||||
build: 1234
|
build: 1234,
|
||||||
}
|
},
|
||||||
]
|
]
|
||||||
this.RequestParser.parse = sinon
|
this.RequestParser.parse = sinon
|
||||||
.stub()
|
.stub()
|
||||||
|
@ -155,13 +155,13 @@ describe('CompileController', function () {
|
||||||
error: null,
|
error: null,
|
||||||
stats: this.stats,
|
stats: this.stats,
|
||||||
timings: this.timings,
|
timings: this.timings,
|
||||||
outputFiles: this.output_files.map((file) => {
|
outputFiles: this.output_files.map(file => {
|
||||||
return {
|
return {
|
||||||
url: `${this.Settings.apis.clsi.url}/project/${this.project_id}/build/${file.build}/output/${file.path}`,
|
url: `${this.Settings.apis.clsi.url}/project/${this.project_id}/build/${file.build}/output/${file.path}`,
|
||||||
...file
|
...file,
|
||||||
}
|
}
|
||||||
})
|
}),
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
.should.equal(true)
|
.should.equal(true)
|
||||||
})
|
})
|
||||||
|
@ -173,13 +173,13 @@ describe('CompileController', function () {
|
||||||
{
|
{
|
||||||
path: 'fake_output.pdf',
|
path: 'fake_output.pdf',
|
||||||
type: 'pdf',
|
type: 'pdf',
|
||||||
build: 1234
|
build: 1234,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
path: 'output.log',
|
path: 'output.log',
|
||||||
type: 'log',
|
type: 'log',
|
||||||
build: 1234
|
build: 1234,
|
||||||
}
|
},
|
||||||
]
|
]
|
||||||
this.CompileManager.doCompileWithLock = sinon
|
this.CompileManager.doCompileWithLock = sinon
|
||||||
.stub()
|
.stub()
|
||||||
|
@ -196,13 +196,13 @@ describe('CompileController', function () {
|
||||||
error: null,
|
error: null,
|
||||||
stats: this.stats,
|
stats: this.stats,
|
||||||
timings: this.timings,
|
timings: this.timings,
|
||||||
outputFiles: this.output_files.map((file) => {
|
outputFiles: this.output_files.map(file => {
|
||||||
return {
|
return {
|
||||||
url: `${this.Settings.apis.clsi.url}/project/${this.project_id}/build/${file.build}/output/${file.path}`,
|
url: `${this.Settings.apis.clsi.url}/project/${this.project_id}/build/${file.build}/output/${file.path}`,
|
||||||
...file
|
...file,
|
||||||
}
|
}
|
||||||
})
|
}),
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
.should.equal(true)
|
.should.equal(true)
|
||||||
})
|
})
|
||||||
|
@ -215,13 +215,13 @@ describe('CompileController', function () {
|
||||||
path: 'output.pdf',
|
path: 'output.pdf',
|
||||||
type: 'pdf',
|
type: 'pdf',
|
||||||
size: 0,
|
size: 0,
|
||||||
build: 1234
|
build: 1234,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
path: 'output.log',
|
path: 'output.log',
|
||||||
type: 'log',
|
type: 'log',
|
||||||
build: 1234
|
build: 1234,
|
||||||
}
|
},
|
||||||
]
|
]
|
||||||
this.CompileManager.doCompileWithLock = sinon
|
this.CompileManager.doCompileWithLock = sinon
|
||||||
.stub()
|
.stub()
|
||||||
|
@ -238,13 +238,13 @@ describe('CompileController', function () {
|
||||||
error: null,
|
error: null,
|
||||||
stats: this.stats,
|
stats: this.stats,
|
||||||
timings: this.timings,
|
timings: this.timings,
|
||||||
outputFiles: this.output_files.map((file) => {
|
outputFiles: this.output_files.map(file => {
|
||||||
return {
|
return {
|
||||||
url: `${this.Settings.apis.clsi.url}/project/${this.project_id}/build/${file.build}/output/${file.path}`,
|
url: `${this.Settings.apis.clsi.url}/project/${this.project_id}/build/${file.build}/output/${file.path}`,
|
||||||
...file
|
...file,
|
||||||
}
|
}
|
||||||
})
|
}),
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
.should.equal(true)
|
.should.equal(true)
|
||||||
})
|
})
|
||||||
|
@ -268,8 +268,8 @@ describe('CompileController', function () {
|
||||||
outputFiles: [],
|
outputFiles: [],
|
||||||
// JSON.stringify will omit these
|
// JSON.stringify will omit these
|
||||||
stats: undefined,
|
stats: undefined,
|
||||||
timings: undefined
|
timings: undefined,
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
.should.equal(true)
|
.should.equal(true)
|
||||||
})
|
})
|
||||||
|
@ -295,8 +295,8 @@ describe('CompileController', function () {
|
||||||
outputFiles: [],
|
outputFiles: [],
|
||||||
// JSON.stringify will omit these
|
// JSON.stringify will omit these
|
||||||
stats: undefined,
|
stats: undefined,
|
||||||
timings: undefined
|
timings: undefined,
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
.should.equal(true)
|
.should.equal(true)
|
||||||
})
|
})
|
||||||
|
@ -320,8 +320,8 @@ describe('CompileController', function () {
|
||||||
outputFiles: [],
|
outputFiles: [],
|
||||||
// JSON.stringify will omit these
|
// JSON.stringify will omit these
|
||||||
stats: undefined,
|
stats: undefined,
|
||||||
timings: undefined
|
timings: undefined,
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
.should.equal(true)
|
.should.equal(true)
|
||||||
})
|
})
|
||||||
|
@ -338,7 +338,7 @@ describe('CompileController', function () {
|
||||||
this.req.query = {
|
this.req.query = {
|
||||||
file: this.file,
|
file: this.file,
|
||||||
line: this.line.toString(),
|
line: this.line.toString(),
|
||||||
column: this.column.toString()
|
column: this.column.toString(),
|
||||||
}
|
}
|
||||||
this.res.json = sinon.stub()
|
this.res.json = sinon.stub()
|
||||||
|
|
||||||
|
@ -363,7 +363,7 @@ describe('CompileController', function () {
|
||||||
it('should return the positions', function () {
|
it('should return the positions', function () {
|
||||||
return this.res.json
|
return this.res.json
|
||||||
.calledWith({
|
.calledWith({
|
||||||
pdf: this.pdfPositions
|
pdf: this.pdfPositions,
|
||||||
})
|
})
|
||||||
.should.equal(true)
|
.should.equal(true)
|
||||||
})
|
})
|
||||||
|
@ -381,7 +381,7 @@ describe('CompileController', function () {
|
||||||
this.req.query = {
|
this.req.query = {
|
||||||
page: this.page.toString(),
|
page: this.page.toString(),
|
||||||
h: this.h.toString(),
|
h: this.h.toString(),
|
||||||
v: this.v.toString()
|
v: this.v.toString(),
|
||||||
}
|
}
|
||||||
this.res.json = sinon.stub()
|
this.res.json = sinon.stub()
|
||||||
|
|
||||||
|
@ -400,7 +400,7 @@ describe('CompileController', function () {
|
||||||
it('should return the positions', function () {
|
it('should return the positions', function () {
|
||||||
return this.res.json
|
return this.res.json
|
||||||
.calledWith({
|
.calledWith({
|
||||||
code: this.codePositions
|
code: this.codePositions,
|
||||||
})
|
})
|
||||||
.should.equal(true)
|
.should.equal(true)
|
||||||
})
|
})
|
||||||
|
@ -415,7 +415,7 @@ describe('CompileController', function () {
|
||||||
this.req.params = { project_id: this.project_id }
|
this.req.params = { project_id: this.project_id }
|
||||||
this.req.query = {
|
this.req.query = {
|
||||||
file: this.file,
|
file: this.file,
|
||||||
image: (this.image = 'example.com/image')
|
image: (this.image = 'example.com/image'),
|
||||||
}
|
}
|
||||||
this.res.json = sinon.stub()
|
this.res.json = sinon.stub()
|
||||||
|
|
||||||
|
@ -435,7 +435,7 @@ describe('CompileController', function () {
|
||||||
this.CompileController.wordcount(this.req, this.res, this.next)
|
this.CompileController.wordcount(this.req, this.res, this.next)
|
||||||
return this.res.json
|
return this.res.json
|
||||||
.calledWith({
|
.calledWith({
|
||||||
texcount: this.texcount
|
texcount: this.texcount,
|
||||||
})
|
})
|
||||||
.should.equal(true)
|
.should.equal(true)
|
||||||
})
|
})
|
||||||
|
|
|
@ -31,19 +31,19 @@ describe('CompileManager', function () {
|
||||||
'./ResourceWriter': (this.ResourceWriter = {}),
|
'./ResourceWriter': (this.ResourceWriter = {}),
|
||||||
'./OutputFileFinder': (this.OutputFileFinder = {}),
|
'./OutputFileFinder': (this.OutputFileFinder = {}),
|
||||||
'./OutputCacheManager': (this.OutputCacheManager = {}),
|
'./OutputCacheManager': (this.OutputCacheManager = {}),
|
||||||
'settings-sharelatex': (this.Settings = {
|
'@overleaf/settings': (this.Settings = {
|
||||||
path: {
|
path: {
|
||||||
compilesDir: '/compiles/dir',
|
compilesDir: '/compiles/dir',
|
||||||
outputDir: '/output/dir'
|
outputDir: '/output/dir',
|
||||||
},
|
},
|
||||||
synctexBaseDir() {
|
synctexBaseDir() {
|
||||||
return '/compile'
|
return '/compile'
|
||||||
},
|
},
|
||||||
clsi: {
|
clsi: {
|
||||||
docker: {
|
docker: {
|
||||||
image: 'SOMEIMAGE'
|
image: 'SOMEIMAGE',
|
||||||
}
|
},
|
||||||
}
|
},
|
||||||
}),
|
}),
|
||||||
|
|
||||||
child_process: (this.child_process = {}),
|
child_process: (this.child_process = {}),
|
||||||
|
@ -52,8 +52,8 @@ describe('CompileManager', function () {
|
||||||
'./TikzManager': (this.TikzManager = {}),
|
'./TikzManager': (this.TikzManager = {}),
|
||||||
'./LockManager': (this.LockManager = {}),
|
'./LockManager': (this.LockManager = {}),
|
||||||
fs: (this.fs = {}),
|
fs: (this.fs = {}),
|
||||||
'fs-extra': (this.fse = { ensureDir: sinon.stub().callsArg(1) })
|
'fs-extra': (this.fse = { ensureDir: sinon.stub().callsArg(1) }),
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
this.callback = sinon.stub()
|
this.callback = sinon.stub()
|
||||||
this.project_id = 'project-id-123'
|
this.project_id = 'project-id-123'
|
||||||
|
@ -64,7 +64,7 @@ describe('CompileManager', function () {
|
||||||
this.request = {
|
this.request = {
|
||||||
resources: (this.resources = 'mock-resources'),
|
resources: (this.resources = 'mock-resources'),
|
||||||
project_id: this.project_id,
|
project_id: this.project_id,
|
||||||
user_id: this.user_id
|
user_id: this.user_id,
|
||||||
}
|
}
|
||||||
this.output_files = ['foo', 'bar']
|
this.output_files = ['foo', 'bar']
|
||||||
this.Settings.compileDir = 'compiles'
|
this.Settings.compileDir = 'compiles'
|
||||||
|
@ -132,24 +132,24 @@ describe('CompileManager', function () {
|
||||||
this.output_files = [
|
this.output_files = [
|
||||||
{
|
{
|
||||||
path: 'output.log',
|
path: 'output.log',
|
||||||
type: 'log'
|
type: 'log',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
path: 'output.pdf',
|
path: 'output.pdf',
|
||||||
type: 'pdf'
|
type: 'pdf',
|
||||||
}
|
},
|
||||||
]
|
]
|
||||||
this.build_files = [
|
this.build_files = [
|
||||||
{
|
{
|
||||||
path: 'output.log',
|
path: 'output.log',
|
||||||
type: 'log',
|
type: 'log',
|
||||||
build: 1234
|
build: 1234,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
path: 'output.pdf',
|
path: 'output.pdf',
|
||||||
type: 'pdf',
|
type: 'pdf',
|
||||||
build: 1234
|
build: 1234,
|
||||||
}
|
},
|
||||||
]
|
]
|
||||||
this.request = {
|
this.request = {
|
||||||
resources: (this.resources = 'mock-resources'),
|
resources: (this.resources = 'mock-resources'),
|
||||||
|
@ -160,7 +160,7 @@ describe('CompileManager', function () {
|
||||||
timeout: (this.timeout = 42000),
|
timeout: (this.timeout = 42000),
|
||||||
imageName: (this.image = 'example.com/image'),
|
imageName: (this.image = 'example.com/image'),
|
||||||
flags: (this.flags = ['-file-line-error']),
|
flags: (this.flags = ['-file-line-error']),
|
||||||
compileGroup: (this.compileGroup = 'compile-group')
|
compileGroup: (this.compileGroup = 'compile-group'),
|
||||||
}
|
}
|
||||||
this.env = {}
|
this.env = {}
|
||||||
this.Settings.compileDir = 'compiles'
|
this.Settings.compileDir = 'compiles'
|
||||||
|
@ -201,7 +201,7 @@ describe('CompileManager', function () {
|
||||||
image: this.image,
|
image: this.image,
|
||||||
flags: this.flags,
|
flags: this.flags,
|
||||||
environment: this.env,
|
environment: this.env,
|
||||||
compileGroup: this.compileGroup
|
compileGroup: this.compileGroup,
|
||||||
})
|
})
|
||||||
.should.equal(true)
|
.should.equal(true)
|
||||||
})
|
})
|
||||||
|
@ -254,9 +254,9 @@ describe('CompileManager', function () {
|
||||||
environment: {
|
environment: {
|
||||||
CHKTEX_OPTIONS: '-nall -e9 -e10 -w15 -w16',
|
CHKTEX_OPTIONS: '-nall -e9 -e10 -w15 -w16',
|
||||||
CHKTEX_EXIT_ON_ERROR: 1,
|
CHKTEX_EXIT_ON_ERROR: 1,
|
||||||
CHKTEX_ULIMIT_OPTIONS: '-t 5 -v 64000'
|
CHKTEX_ULIMIT_OPTIONS: '-t 5 -v 64000',
|
||||||
},
|
},
|
||||||
compileGroup: this.compileGroup
|
compileGroup: this.compileGroup,
|
||||||
})
|
})
|
||||||
.should.equal(true)
|
.should.equal(true)
|
||||||
})
|
})
|
||||||
|
@ -279,7 +279,7 @@ describe('CompileManager', function () {
|
||||||
image: this.image,
|
image: this.image,
|
||||||
flags: this.flags,
|
flags: this.flags,
|
||||||
environment: this.env,
|
environment: this.env,
|
||||||
compileGroup: this.compileGroup
|
compileGroup: this.compileGroup,
|
||||||
})
|
})
|
||||||
.should.equal(true)
|
.should.equal(true)
|
||||||
})
|
})
|
||||||
|
@ -293,7 +293,7 @@ describe('CompileManager', function () {
|
||||||
this.fs.lstat = sinon.stub().callsArgWith(1, null, {
|
this.fs.lstat = sinon.stub().callsArgWith(1, null, {
|
||||||
isDirectory() {
|
isDirectory() {
|
||||||
return true
|
return true
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
this.proc = new EventEmitter()
|
this.proc = new EventEmitter()
|
||||||
this.proc.stdout = new EventEmitter()
|
this.proc.stdout = new EventEmitter()
|
||||||
|
@ -315,7 +315,7 @@ describe('CompileManager', function () {
|
||||||
'-f',
|
'-f',
|
||||||
'--',
|
'--',
|
||||||
`${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`,
|
`${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`,
|
||||||
`${this.Settings.path.outputDir}/${this.project_id}-${this.user_id}`
|
`${this.Settings.path.outputDir}/${this.project_id}-${this.user_id}`,
|
||||||
])
|
])
|
||||||
.should.equal(true)
|
.should.equal(true)
|
||||||
})
|
})
|
||||||
|
@ -331,7 +331,7 @@ describe('CompileManager', function () {
|
||||||
this.fs.lstat = sinon.stub().callsArgWith(1, null, {
|
this.fs.lstat = sinon.stub().callsArgWith(1, null, {
|
||||||
isDirectory() {
|
isDirectory() {
|
||||||
return true
|
return true
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
this.proc = new EventEmitter()
|
this.proc = new EventEmitter()
|
||||||
this.proc.stdout = new EventEmitter()
|
this.proc.stdout = new EventEmitter()
|
||||||
|
@ -354,7 +354,7 @@ describe('CompileManager', function () {
|
||||||
'-f',
|
'-f',
|
||||||
'--',
|
'--',
|
||||||
`${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`,
|
`${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`,
|
||||||
`${this.Settings.path.outputDir}/${this.project_id}-${this.user_id}`
|
`${this.Settings.path.outputDir}/${this.project_id}-${this.user_id}`,
|
||||||
])
|
])
|
||||||
.should.equal(true)
|
.should.equal(true)
|
||||||
})
|
})
|
||||||
|
@ -380,7 +380,7 @@ describe('CompileManager', function () {
|
||||||
this.column = 3
|
this.column = 3
|
||||||
this.file_name = 'main.tex'
|
this.file_name = 'main.tex'
|
||||||
this.child_process.execFile = sinon.stub()
|
this.child_process.execFile = sinon.stub()
|
||||||
return (this.Settings.path.synctexBaseDir = (project_id) =>
|
return (this.Settings.path.synctexBaseDir = project_id =>
|
||||||
`${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`)
|
`${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -389,7 +389,7 @@ describe('CompileManager', function () {
|
||||||
this.fs.stat = sinon.stub().callsArgWith(1, null, {
|
this.fs.stat = sinon.stub().callsArgWith(1, null, {
|
||||||
isFile() {
|
isFile() {
|
||||||
return true
|
return true
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
this.stdout = `NODE\t${this.page}\t${this.h}\t${this.v}\t${this.width}\t${this.height}\n`
|
this.stdout = `NODE\t${this.page}\t${this.h}\t${this.v}\t${this.width}\t${this.height}\n`
|
||||||
this.CommandRunner.run = sinon
|
this.CommandRunner.run = sinon
|
||||||
|
@ -419,7 +419,7 @@ describe('CompileManager', function () {
|
||||||
synctex_path,
|
synctex_path,
|
||||||
file_path,
|
file_path,
|
||||||
this.line,
|
this.line,
|
||||||
this.column
|
this.column,
|
||||||
],
|
],
|
||||||
`${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`,
|
`${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`,
|
||||||
this.Settings.clsi.docker.image,
|
this.Settings.clsi.docker.image,
|
||||||
|
@ -437,8 +437,8 @@ describe('CompileManager', function () {
|
||||||
h: this.h,
|
h: this.h,
|
||||||
v: this.v,
|
v: this.v,
|
||||||
height: this.height,
|
height: this.height,
|
||||||
width: this.width
|
width: this.width,
|
||||||
}
|
},
|
||||||
])
|
])
|
||||||
.should.equal(true)
|
.should.equal(true)
|
||||||
})
|
})
|
||||||
|
@ -470,7 +470,7 @@ describe('CompileManager', function () {
|
||||||
synctex_path,
|
synctex_path,
|
||||||
file_path,
|
file_path,
|
||||||
this.line,
|
this.line,
|
||||||
this.column
|
this.column,
|
||||||
],
|
],
|
||||||
`${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`,
|
`${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`,
|
||||||
customImageName,
|
customImageName,
|
||||||
|
@ -487,7 +487,7 @@ describe('CompileManager', function () {
|
||||||
this.fs.stat = sinon.stub().callsArgWith(1, null, {
|
this.fs.stat = sinon.stub().callsArgWith(1, null, {
|
||||||
isFile() {
|
isFile() {
|
||||||
return true
|
return true
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
this.stdout = `NODE\t${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}/${this.file_name}\t${this.line}\t${this.column}\n`
|
this.stdout = `NODE\t${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}/${this.file_name}\t${this.line}\t${this.column}\n`
|
||||||
this.CommandRunner.run = sinon
|
this.CommandRunner.run = sinon
|
||||||
|
@ -525,8 +525,8 @@ describe('CompileManager', function () {
|
||||||
{
|
{
|
||||||
file: this.file_name,
|
file: this.file_name,
|
||||||
line: this.line,
|
line: this.line,
|
||||||
column: this.column
|
column: this.column,
|
||||||
}
|
},
|
||||||
])
|
])
|
||||||
.should.equal(true)
|
.should.equal(true)
|
||||||
})
|
})
|
||||||
|
@ -598,7 +598,7 @@ describe('CompileManager', function () {
|
||||||
'-nocol',
|
'-nocol',
|
||||||
'-inc',
|
'-inc',
|
||||||
this.file_path,
|
this.file_path,
|
||||||
`-out=${this.file_path}.wc`
|
`-out=${this.file_path}.wc`,
|
||||||
]
|
]
|
||||||
|
|
||||||
return this.CommandRunner.run
|
return this.CommandRunner.run
|
||||||
|
@ -625,7 +625,7 @@ describe('CompileManager', function () {
|
||||||
mathInline: 0,
|
mathInline: 0,
|
||||||
mathDisplay: 0,
|
mathDisplay: 0,
|
||||||
errors: 0,
|
errors: 0,
|
||||||
messages: ''
|
messages: '',
|
||||||
})
|
})
|
||||||
.should.equal(true)
|
.should.equal(true)
|
||||||
})
|
})
|
||||||
|
|
|
@ -8,7 +8,7 @@ describe('ContentCacheManager', function () {
|
||||||
let contentDir, pdfPath
|
let contentDir, pdfPath
|
||||||
let ContentCacheManager, files, Settings
|
let ContentCacheManager, files, Settings
|
||||||
before(function () {
|
before(function () {
|
||||||
Settings = require('settings-sharelatex')
|
Settings = require('@overleaf/settings')
|
||||||
ContentCacheManager = require(MODULE_PATH)
|
ContentCacheManager = require(MODULE_PATH)
|
||||||
})
|
})
|
||||||
let contentRanges, newContentRanges, reclaimed
|
let contentRanges, newContentRanges, reclaimed
|
||||||
|
@ -91,14 +91,14 @@ describe('ContentCacheManager', function () {
|
||||||
objectId: OBJECT_ID_1,
|
objectId: OBJECT_ID_1,
|
||||||
start: START_1,
|
start: START_1,
|
||||||
end: END_1,
|
end: END_1,
|
||||||
hash: h1
|
hash: h1,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
objectId: OBJECT_ID_2,
|
objectId: OBJECT_ID_2,
|
||||||
start: START_2,
|
start: START_2,
|
||||||
end: END_2,
|
end: END_2,
|
||||||
hash: h2
|
hash: h2,
|
||||||
}
|
},
|
||||||
])
|
])
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -110,14 +110,14 @@ describe('ContentCacheManager', function () {
|
||||||
JSON.stringify({
|
JSON.stringify({
|
||||||
hashAge: [
|
hashAge: [
|
||||||
[h1, 0],
|
[h1, 0],
|
||||||
[h2, 0]
|
[h2, 0],
|
||||||
],
|
],
|
||||||
hashSize: [
|
hashSize: [
|
||||||
[h1, RANGE_1.byteLength],
|
[h1, RANGE_1.byteLength],
|
||||||
[h2, RANGE_2.byteLength]
|
[h2, RANGE_2.byteLength],
|
||||||
]
|
],
|
||||||
})
|
})
|
||||||
)
|
),
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -144,8 +144,8 @@ describe('ContentCacheManager', function () {
|
||||||
objectId: OBJECT_ID_1,
|
objectId: OBJECT_ID_1,
|
||||||
start: START_1,
|
start: START_1,
|
||||||
end: END_1,
|
end: END_1,
|
||||||
hash: h1
|
hash: h1,
|
||||||
}
|
},
|
||||||
])
|
])
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -157,14 +157,14 @@ describe('ContentCacheManager', function () {
|
||||||
JSON.stringify({
|
JSON.stringify({
|
||||||
hashAge: [
|
hashAge: [
|
||||||
[h1, 0],
|
[h1, 0],
|
||||||
[h2, 1]
|
[h2, 1],
|
||||||
],
|
],
|
||||||
hashSize: [
|
hashSize: [
|
||||||
[h1, RANGE_1.byteLength],
|
[h1, RANGE_1.byteLength],
|
||||||
[h2, RANGE_2.byteLength]
|
[h2, RANGE_2.byteLength],
|
||||||
]
|
],
|
||||||
})
|
})
|
||||||
)
|
),
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -189,8 +189,8 @@ describe('ContentCacheManager', function () {
|
||||||
objectId: OBJECT_ID_1,
|
objectId: OBJECT_ID_1,
|
||||||
start: START_1,
|
start: START_1,
|
||||||
end: END_1,
|
end: END_1,
|
||||||
hash: h1
|
hash: h1,
|
||||||
}
|
},
|
||||||
])
|
])
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -200,9 +200,9 @@ describe('ContentCacheManager', function () {
|
||||||
[Path.join(contentDir, '.state.v0.json')]: Buffer.from(
|
[Path.join(contentDir, '.state.v0.json')]: Buffer.from(
|
||||||
JSON.stringify({
|
JSON.stringify({
|
||||||
hashAge: [[h1, 0]],
|
hashAge: [[h1, 0]],
|
||||||
hashSize: [[h1, RANGE_1.byteLength]]
|
hashSize: [[h1, RANGE_1.byteLength]],
|
||||||
})
|
})
|
||||||
)
|
),
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -20,8 +20,8 @@ describe('LockManager', function () {
|
||||||
beforeEach(function () {
|
beforeEach(function () {
|
||||||
return (this.LockManager = SandboxedModule.require(modulePath, {
|
return (this.LockManager = SandboxedModule.require(modulePath, {
|
||||||
requires: {
|
requires: {
|
||||||
'settings-sharelatex': (this.Settings = { clsi: { docker: {} } })
|
'@overleaf/settings': (this.Settings = { clsi: { docker: {} } }),
|
||||||
}
|
},
|
||||||
}))
|
}))
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -31,7 +31,7 @@ describe('LockManager', function () {
|
||||||
this.callback = sinon.stub()
|
this.callback = sinon.stub()
|
||||||
return this.LockManager.runWithLock(
|
return this.LockManager.runWithLock(
|
||||||
'lock-one',
|
'lock-one',
|
||||||
(releaseLock) =>
|
releaseLock =>
|
||||||
setTimeout(() => releaseLock(null, 'hello', 'world'), 100),
|
setTimeout(() => releaseLock(null, 'hello', 'world'), 100),
|
||||||
|
|
||||||
(err, ...args) => {
|
(err, ...args) => {
|
||||||
|
@ -54,7 +54,7 @@ describe('LockManager', function () {
|
||||||
this.callback2 = sinon.stub()
|
this.callback2 = sinon.stub()
|
||||||
this.LockManager.runWithLock(
|
this.LockManager.runWithLock(
|
||||||
'lock-one',
|
'lock-one',
|
||||||
(releaseLock) =>
|
releaseLock =>
|
||||||
setTimeout(() => releaseLock(null, 'hello', 'world', 'one'), 100),
|
setTimeout(() => releaseLock(null, 'hello', 'world', 'one'), 100),
|
||||||
|
|
||||||
(err, ...args) => {
|
(err, ...args) => {
|
||||||
|
@ -63,7 +63,7 @@ describe('LockManager', function () {
|
||||||
)
|
)
|
||||||
return this.LockManager.runWithLock(
|
return this.LockManager.runWithLock(
|
||||||
'lock-two',
|
'lock-two',
|
||||||
(releaseLock) =>
|
releaseLock =>
|
||||||
setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 200),
|
setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 200),
|
||||||
|
|
||||||
(err, ...args) => {
|
(err, ...args) => {
|
||||||
|
@ -95,7 +95,7 @@ describe('LockManager', function () {
|
||||||
this.callback2 = sinon.stub()
|
this.callback2 = sinon.stub()
|
||||||
this.LockManager.runWithLock(
|
this.LockManager.runWithLock(
|
||||||
'lock',
|
'lock',
|
||||||
(releaseLock) =>
|
releaseLock =>
|
||||||
setTimeout(() => releaseLock(null, 'hello', 'world', 'one'), 100),
|
setTimeout(() => releaseLock(null, 'hello', 'world', 'one'), 100),
|
||||||
|
|
||||||
(err, ...args) => {
|
(err, ...args) => {
|
||||||
|
@ -104,7 +104,7 @@ describe('LockManager', function () {
|
||||||
)
|
)
|
||||||
return this.LockManager.runWithLock(
|
return this.LockManager.runWithLock(
|
||||||
'lock',
|
'lock',
|
||||||
(releaseLock) =>
|
releaseLock =>
|
||||||
setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 200),
|
setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 200),
|
||||||
|
|
||||||
(err, ...args) => {
|
(err, ...args) => {
|
||||||
|
@ -149,7 +149,7 @@ describe('LockManager', function () {
|
||||||
}
|
}
|
||||||
this.LockManager.runWithLock(
|
this.LockManager.runWithLock(
|
||||||
'lock',
|
'lock',
|
||||||
(releaseLock) =>
|
releaseLock =>
|
||||||
setTimeout(
|
setTimeout(
|
||||||
() => releaseLock(null, 'hello', 'world', 'one'),
|
() => releaseLock(null, 'hello', 'world', 'one'),
|
||||||
1100
|
1100
|
||||||
|
@ -162,7 +162,7 @@ describe('LockManager', function () {
|
||||||
)
|
)
|
||||||
return this.LockManager.runWithLock(
|
return this.LockManager.runWithLock(
|
||||||
'lock',
|
'lock',
|
||||||
(releaseLock) =>
|
releaseLock =>
|
||||||
setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 100),
|
setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 100),
|
||||||
|
|
||||||
(err, ...args) => {
|
(err, ...args) => {
|
||||||
|
@ -206,7 +206,7 @@ describe('LockManager', function () {
|
||||||
}
|
}
|
||||||
this.LockManager.runWithLock(
|
this.LockManager.runWithLock(
|
||||||
'lock',
|
'lock',
|
||||||
(releaseLock) =>
|
releaseLock =>
|
||||||
setTimeout(
|
setTimeout(
|
||||||
() => releaseLock(null, 'hello', 'world', 'one'),
|
() => releaseLock(null, 'hello', 'world', 'one'),
|
||||||
1500
|
1500
|
||||||
|
@ -219,7 +219,7 @@ describe('LockManager', function () {
|
||||||
)
|
)
|
||||||
return this.LockManager.runWithLock(
|
return this.LockManager.runWithLock(
|
||||||
'lock',
|
'lock',
|
||||||
(releaseLock) =>
|
releaseLock =>
|
||||||
setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 100),
|
setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 100),
|
||||||
|
|
||||||
(err, ...args) => {
|
(err, ...args) => {
|
||||||
|
|
|
@ -28,9 +28,9 @@ describe('DockerRunner', function () {
|
||||||
this.container = container = {}
|
this.container = container = {}
|
||||||
this.DockerRunner = SandboxedModule.require(modulePath, {
|
this.DockerRunner = SandboxedModule.require(modulePath, {
|
||||||
requires: {
|
requires: {
|
||||||
'settings-sharelatex': (this.Settings = {
|
'@overleaf/settings': (this.Settings = {
|
||||||
clsi: { docker: {} },
|
clsi: { docker: {} },
|
||||||
path: {}
|
path: {},
|
||||||
}),
|
}),
|
||||||
dockerode: (Docker = (function () {
|
dockerode: (Docker = (function () {
|
||||||
Docker = class Docker {
|
Docker = class Docker {
|
||||||
|
@ -49,21 +49,21 @@ describe('DockerRunner', function () {
|
||||||
stat: sinon.stub().yields(null, {
|
stat: sinon.stub().yields(null, {
|
||||||
isDirectory() {
|
isDirectory() {
|
||||||
return true
|
return true
|
||||||
}
|
},
|
||||||
})
|
}),
|
||||||
}),
|
}),
|
||||||
'./Metrics': {
|
'./Metrics': {
|
||||||
Timer: (Timer = class Timer {
|
Timer: (Timer = class Timer {
|
||||||
done() {}
|
done() {}
|
||||||
})
|
}),
|
||||||
},
|
},
|
||||||
'./LockManager': {
|
'./LockManager': {
|
||||||
runWithLock(key, runner, callback) {
|
runWithLock(key, runner, callback) {
|
||||||
return runner(callback)
|
return runner(callback)
|
||||||
}
|
},
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
globals: { Math } // used by lodash
|
globals: { Math }, // used by lodash
|
||||||
})
|
})
|
||||||
this.Docker = Docker
|
this.Docker = Docker
|
||||||
this.getContainer = Docker.prototype.getContainer
|
this.getContainer = Docker.prototype.getContainer
|
||||||
|
@ -172,10 +172,10 @@ describe('DockerRunner', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should re-write the bind directory', function () {
|
it('should re-write the bind directory', function () {
|
||||||
const volumes = this.DockerRunner._runAndWaitForContainer.lastCall
|
const volumes =
|
||||||
.args[1]
|
this.DockerRunner._runAndWaitForContainer.lastCall.args[1]
|
||||||
return expect(volumes).to.deep.equal({
|
return expect(volumes).to.deep.equal({
|
||||||
'/some/host/dir/compiles/xyz': '/compile'
|
'/some/host/dir/compiles/xyz': '/compile',
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -294,7 +294,7 @@ describe('DockerRunner', function () {
|
||||||
beforeEach(function () {
|
beforeEach(function () {
|
||||||
this.Settings.clsi.docker.allowedImages = [
|
this.Settings.clsi.docker.allowedImages = [
|
||||||
'repo/image:tag1',
|
'repo/image:tag1',
|
||||||
'repo/image:tag2'
|
'repo/image:tag2',
|
||||||
]
|
]
|
||||||
this.DockerRunner._runAndWaitForContainer = sinon
|
this.DockerRunner._runAndWaitForContainer = sinon
|
||||||
.stub()
|
.stub()
|
||||||
|
@ -368,9 +368,9 @@ describe('DockerRunner', function () {
|
||||||
beforeEach(function () {
|
beforeEach(function () {
|
||||||
this.Settings.clsi.docker.compileGroupConfig = {
|
this.Settings.clsi.docker.compileGroupConfig = {
|
||||||
'compile-group': {
|
'compile-group': {
|
||||||
'HostConfig.newProperty': 'new-property'
|
'HostConfig.newProperty': 'new-property',
|
||||||
},
|
},
|
||||||
'other-group': { otherProperty: 'other-property' }
|
'other-group': { otherProperty: 'other-property' },
|
||||||
}
|
}
|
||||||
this.DockerRunner._runAndWaitForContainer = sinon
|
this.DockerRunner._runAndWaitForContainer = sinon
|
||||||
.stub()
|
.stub()
|
||||||
|
@ -388,14 +388,14 @@ describe('DockerRunner', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should set the docker options for the compile group', function () {
|
it('should set the docker options for the compile group', function () {
|
||||||
const options = this.DockerRunner._runAndWaitForContainer.lastCall
|
const options =
|
||||||
.args[0]
|
this.DockerRunner._runAndWaitForContainer.lastCall.args[0]
|
||||||
return expect(options.HostConfig).to.deep.include({
|
return expect(options.HostConfig).to.deep.include({
|
||||||
Binds: ['/local/compile/directory:/compile:rw'],
|
Binds: ['/local/compile/directory:/compile:rw'],
|
||||||
LogConfig: { Type: 'none', Config: {} },
|
LogConfig: { Type: 'none', Config: {} },
|
||||||
CapDrop: 'ALL',
|
CapDrop: 'ALL',
|
||||||
SecurityOpt: ['no-new-privileges'],
|
SecurityOpt: ['no-new-privileges'],
|
||||||
newProperty: 'new-property'
|
newProperty: 'new-property',
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -588,7 +588,7 @@ describe('DockerRunner', function () {
|
||||||
this.fs.stat = sinon.stub().yields(null, {
|
this.fs.stat = sinon.stub().yields(null, {
|
||||||
isDirectory() {
|
isDirectory() {
|
||||||
return false
|
return false
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
return this.DockerRunner.startContainer(
|
return this.DockerRunner.startContainer(
|
||||||
this.options,
|
this.options,
|
||||||
|
@ -715,23 +715,23 @@ describe('DockerRunner', function () {
|
||||||
{
|
{
|
||||||
Name: '/project-old-container-name',
|
Name: '/project-old-container-name',
|
||||||
Id: 'old-container-id',
|
Id: 'old-container-id',
|
||||||
Created: nowInSeconds - oneHourInSeconds - 100
|
Created: nowInSeconds - oneHourInSeconds - 100,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Name: '/project-new-container-name',
|
Name: '/project-new-container-name',
|
||||||
Id: 'new-container-id',
|
Id: 'new-container-id',
|
||||||
Created: nowInSeconds - oneHourInSeconds + 100
|
Created: nowInSeconds - oneHourInSeconds + 100,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Name: '/totally-not-a-project-container',
|
Name: '/totally-not-a-project-container',
|
||||||
Id: 'some-random-id',
|
Id: 'some-random-id',
|
||||||
Created: nowInSeconds - 2 * oneHourInSeconds
|
Created: nowInSeconds - 2 * oneHourInSeconds,
|
||||||
}
|
},
|
||||||
]
|
]
|
||||||
this.DockerRunner.MAX_CONTAINER_AGE = oneHourInMilliseconds
|
this.DockerRunner.MAX_CONTAINER_AGE = oneHourInMilliseconds
|
||||||
this.listContainers.callsArgWith(1, null, this.containers)
|
this.listContainers.callsArgWith(1, null, this.containers)
|
||||||
this.DockerRunner.destroyContainer = sinon.stub().callsArg(3)
|
this.DockerRunner.destroyContainer = sinon.stub().callsArg(3)
|
||||||
return this.DockerRunner.destroyOldContainers((error) => {
|
return this.DockerRunner.destroyOldContainers(error => {
|
||||||
this.callback(error)
|
this.callback(error)
|
||||||
return done()
|
return done()
|
||||||
})
|
})
|
||||||
|
@ -778,7 +778,7 @@ describe('DockerRunner', function () {
|
||||||
return this.DockerRunner._destroyContainer(
|
return this.DockerRunner._destroyContainer(
|
||||||
this.containerId,
|
this.containerId,
|
||||||
false,
|
false,
|
||||||
(err) => {
|
err => {
|
||||||
this.Docker.prototype.getContainer.callCount.should.equal(1)
|
this.Docker.prototype.getContainer.callCount.should.equal(1)
|
||||||
this.Docker.prototype.getContainer
|
this.Docker.prototype.getContainer
|
||||||
.calledWith(this.containerId)
|
.calledWith(this.containerId)
|
||||||
|
@ -792,7 +792,7 @@ describe('DockerRunner', function () {
|
||||||
return this.DockerRunner._destroyContainer(
|
return this.DockerRunner._destroyContainer(
|
||||||
this.containerId,
|
this.containerId,
|
||||||
true,
|
true,
|
||||||
(err) => {
|
err => {
|
||||||
this.fakeContainer.remove.callCount.should.equal(1)
|
this.fakeContainer.remove.callCount.should.equal(1)
|
||||||
this.fakeContainer.remove
|
this.fakeContainer.remove
|
||||||
.calledWith({ force: true, v: true })
|
.calledWith({ force: true, v: true })
|
||||||
|
@ -806,7 +806,7 @@ describe('DockerRunner', function () {
|
||||||
return this.DockerRunner._destroyContainer(
|
return this.DockerRunner._destroyContainer(
|
||||||
this.containerId,
|
this.containerId,
|
||||||
false,
|
false,
|
||||||
(err) => {
|
err => {
|
||||||
this.fakeContainer.remove.callCount.should.equal(1)
|
this.fakeContainer.remove.callCount.should.equal(1)
|
||||||
this.fakeContainer.remove
|
this.fakeContainer.remove
|
||||||
.calledWith({ force: false, v: true })
|
.calledWith({ force: false, v: true })
|
||||||
|
@ -820,7 +820,7 @@ describe('DockerRunner', function () {
|
||||||
return this.DockerRunner._destroyContainer(
|
return this.DockerRunner._destroyContainer(
|
||||||
this.containerId,
|
this.containerId,
|
||||||
false,
|
false,
|
||||||
(err) => {
|
err => {
|
||||||
expect(err).to.equal(null)
|
expect(err).to.equal(null)
|
||||||
return done()
|
return done()
|
||||||
}
|
}
|
||||||
|
@ -832,7 +832,7 @@ describe('DockerRunner', function () {
|
||||||
this.fakeError = new Error('woops')
|
this.fakeError = new Error('woops')
|
||||||
this.fakeError.statusCode = 404
|
this.fakeError.statusCode = 404
|
||||||
this.fakeContainer = {
|
this.fakeContainer = {
|
||||||
remove: sinon.stub().callsArgWith(1, this.fakeError)
|
remove: sinon.stub().callsArgWith(1, this.fakeError),
|
||||||
}
|
}
|
||||||
return (this.Docker.prototype.getContainer = sinon
|
return (this.Docker.prototype.getContainer = sinon
|
||||||
.stub()
|
.stub()
|
||||||
|
@ -843,7 +843,7 @@ describe('DockerRunner', function () {
|
||||||
return this.DockerRunner._destroyContainer(
|
return this.DockerRunner._destroyContainer(
|
||||||
this.containerId,
|
this.containerId,
|
||||||
false,
|
false,
|
||||||
(err) => {
|
err => {
|
||||||
expect(err).to.equal(null)
|
expect(err).to.equal(null)
|
||||||
return done()
|
return done()
|
||||||
}
|
}
|
||||||
|
@ -856,7 +856,7 @@ describe('DockerRunner', function () {
|
||||||
this.fakeError = new Error('woops')
|
this.fakeError = new Error('woops')
|
||||||
this.fakeError.statusCode = 500
|
this.fakeError.statusCode = 500
|
||||||
this.fakeContainer = {
|
this.fakeContainer = {
|
||||||
remove: sinon.stub().callsArgWith(1, this.fakeError)
|
remove: sinon.stub().callsArgWith(1, this.fakeError),
|
||||||
}
|
}
|
||||||
return (this.Docker.prototype.getContainer = sinon
|
return (this.Docker.prototype.getContainer = sinon
|
||||||
.stub()
|
.stub()
|
||||||
|
@ -867,7 +867,7 @@ describe('DockerRunner', function () {
|
||||||
return this.DockerRunner._destroyContainer(
|
return this.DockerRunner._destroyContainer(
|
||||||
this.containerId,
|
this.containerId,
|
||||||
false,
|
false,
|
||||||
(err) => {
|
err => {
|
||||||
expect(err).to.not.equal(null)
|
expect(err).to.not.equal(null)
|
||||||
expect(err).to.equal(this.fakeError)
|
expect(err).to.equal(this.fakeError)
|
||||||
return done()
|
return done()
|
||||||
|
@ -887,7 +887,7 @@ describe('DockerRunner', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should get the container', function (done) {
|
it('should get the container', function (done) {
|
||||||
return this.DockerRunner.kill(this.containerId, (err) => {
|
return this.DockerRunner.kill(this.containerId, err => {
|
||||||
this.Docker.prototype.getContainer.callCount.should.equal(1)
|
this.Docker.prototype.getContainer.callCount.should.equal(1)
|
||||||
this.Docker.prototype.getContainer
|
this.Docker.prototype.getContainer
|
||||||
.calledWith(this.containerId)
|
.calledWith(this.containerId)
|
||||||
|
@ -897,14 +897,14 @@ describe('DockerRunner', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should try to force-destroy the container', function (done) {
|
it('should try to force-destroy the container', function (done) {
|
||||||
return this.DockerRunner.kill(this.containerId, (err) => {
|
return this.DockerRunner.kill(this.containerId, err => {
|
||||||
this.fakeContainer.kill.callCount.should.equal(1)
|
this.fakeContainer.kill.callCount.should.equal(1)
|
||||||
return done()
|
return done()
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should not produce an error', function (done) {
|
it('should not produce an error', function (done) {
|
||||||
return this.DockerRunner.kill(this.containerId, (err) => {
|
return this.DockerRunner.kill(this.containerId, err => {
|
||||||
expect(err).to.equal(undefined)
|
expect(err).to.equal(undefined)
|
||||||
return done()
|
return done()
|
||||||
})
|
})
|
||||||
|
@ -917,7 +917,7 @@ describe('DockerRunner', function () {
|
||||||
this.fakeError.message =
|
this.fakeError.message =
|
||||||
'Cannot kill container <whatever> is not running'
|
'Cannot kill container <whatever> is not running'
|
||||||
this.fakeContainer = {
|
this.fakeContainer = {
|
||||||
kill: sinon.stub().callsArgWith(0, this.fakeError)
|
kill: sinon.stub().callsArgWith(0, this.fakeError),
|
||||||
}
|
}
|
||||||
return (this.Docker.prototype.getContainer = sinon
|
return (this.Docker.prototype.getContainer = sinon
|
||||||
.stub()
|
.stub()
|
||||||
|
@ -925,7 +925,7 @@ describe('DockerRunner', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
return it('should not produce an error', function (done) {
|
return it('should not produce an error', function (done) {
|
||||||
return this.DockerRunner.kill(this.containerId, (err) => {
|
return this.DockerRunner.kill(this.containerId, err => {
|
||||||
expect(err).to.equal(undefined)
|
expect(err).to.equal(undefined)
|
||||||
return done()
|
return done()
|
||||||
})
|
})
|
||||||
|
@ -938,7 +938,7 @@ describe('DockerRunner', function () {
|
||||||
this.fakeError.statusCode = 500
|
this.fakeError.statusCode = 500
|
||||||
this.fakeError.message = 'Totally legitimate reason to throw an error'
|
this.fakeError.message = 'Totally legitimate reason to throw an error'
|
||||||
this.fakeContainer = {
|
this.fakeContainer = {
|
||||||
kill: sinon.stub().callsArgWith(0, this.fakeError)
|
kill: sinon.stub().callsArgWith(0, this.fakeError),
|
||||||
}
|
}
|
||||||
return (this.Docker.prototype.getContainer = sinon
|
return (this.Docker.prototype.getContainer = sinon
|
||||||
.stub()
|
.stub()
|
||||||
|
@ -946,7 +946,7 @@ describe('DockerRunner', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
return it('should produce an error', function (done) {
|
return it('should produce an error', function (done) {
|
||||||
return this.DockerRunner.kill(this.containerId, (err) => {
|
return this.DockerRunner.kill(this.containerId, err => {
|
||||||
expect(err).to.not.equal(undefined)
|
expect(err).to.not.equal(undefined)
|
||||||
expect(err).to.equal(this.fakeError)
|
expect(err).to.equal(this.fakeError)
|
||||||
return done()
|
return done()
|
||||||
|
|
|
@ -19,8 +19,8 @@ describe('DraftModeManager', function () {
|
||||||
beforeEach(function () {
|
beforeEach(function () {
|
||||||
return (this.DraftModeManager = SandboxedModule.require(modulePath, {
|
return (this.DraftModeManager = SandboxedModule.require(modulePath, {
|
||||||
requires: {
|
requires: {
|
||||||
fs: (this.fs = {})
|
fs: (this.fs = {}),
|
||||||
}
|
},
|
||||||
}))
|
}))
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -23,21 +23,21 @@ describe('LatexRunner', function () {
|
||||||
let Timer
|
let Timer
|
||||||
this.LatexRunner = SandboxedModule.require(modulePath, {
|
this.LatexRunner = SandboxedModule.require(modulePath, {
|
||||||
requires: {
|
requires: {
|
||||||
'settings-sharelatex': (this.Settings = {
|
'@overleaf/settings': (this.Settings = {
|
||||||
docker: {
|
docker: {
|
||||||
socketPath: '/var/run/docker.sock'
|
socketPath: '/var/run/docker.sock',
|
||||||
}
|
},
|
||||||
}),
|
}),
|
||||||
'./Metrics': {
|
'./Metrics': {
|
||||||
Timer: (Timer = class Timer {
|
Timer: (Timer = class Timer {
|
||||||
done() {}
|
done() {}
|
||||||
})
|
}),
|
||||||
},
|
},
|
||||||
'./CommandRunner': (this.CommandRunner = {}),
|
'./CommandRunner': (this.CommandRunner = {}),
|
||||||
fs: (this.fs = {
|
fs: (this.fs = {
|
||||||
writeFile: sinon.stub().callsArg(2)
|
writeFile: sinon.stub().callsArg(2),
|
||||||
})
|
}),
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
this.directory = '/local/compile/directory'
|
this.directory = '/local/compile/directory'
|
||||||
|
@ -54,7 +54,7 @@ describe('LatexRunner', function () {
|
||||||
beforeEach(function () {
|
beforeEach(function () {
|
||||||
return (this.CommandRunner.run = sinon.stub().callsArgWith(7, null, {
|
return (this.CommandRunner.run = sinon.stub().callsArgWith(7, null, {
|
||||||
stdout: 'this is stdout',
|
stdout: 'this is stdout',
|
||||||
stderr: 'this is stderr'
|
stderr: 'this is stderr',
|
||||||
}))
|
}))
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -69,7 +69,7 @@ describe('LatexRunner', function () {
|
||||||
timeout: (this.timeout = 42000),
|
timeout: (this.timeout = 42000),
|
||||||
image: this.image,
|
image: this.image,
|
||||||
environment: this.env,
|
environment: this.env,
|
||||||
compileGroup: this.compileGroup
|
compileGroup: this.compileGroup,
|
||||||
},
|
},
|
||||||
(error, output, stats, timings) => {
|
(error, output, stats, timings) => {
|
||||||
this.timings = timings
|
this.timings = timings
|
||||||
|
@ -116,7 +116,7 @@ describe('LatexRunner', function () {
|
||||||
'\tCommand being timed: "sh -c timeout 1 yes > /dev/null"\n' +
|
'\tCommand being timed: "sh -c timeout 1 yes > /dev/null"\n' +
|
||||||
'\tUser time (seconds): 0.28\n' +
|
'\tUser time (seconds): 0.28\n' +
|
||||||
'\tSystem time (seconds): 0.70\n' +
|
'\tSystem time (seconds): 0.70\n' +
|
||||||
'\tPercent of CPU this job got: 98%\n'
|
'\tPercent of CPU this job got: 98%\n',
|
||||||
})
|
})
|
||||||
this.LatexRunner.runLatex(
|
this.LatexRunner.runLatex(
|
||||||
this.project_id,
|
this.project_id,
|
||||||
|
@ -127,7 +127,7 @@ describe('LatexRunner', function () {
|
||||||
timeout: (this.timeout = 42000),
|
timeout: (this.timeout = 42000),
|
||||||
image: this.image,
|
image: this.image,
|
||||||
environment: this.env,
|
environment: this.env,
|
||||||
compileGroup: this.compileGroup
|
compileGroup: this.compileGroup,
|
||||||
},
|
},
|
||||||
(error, output, stats, timings) => {
|
(error, output, stats, timings) => {
|
||||||
this.timings = timings
|
this.timings = timings
|
||||||
|
@ -152,7 +152,7 @@ describe('LatexRunner', function () {
|
||||||
mainFile: 'main-file.Rtex',
|
mainFile: 'main-file.Rtex',
|
||||||
compiler: this.compiler,
|
compiler: this.compiler,
|
||||||
image: this.image,
|
image: this.image,
|
||||||
timeout: (this.timeout = 42000)
|
timeout: (this.timeout = 42000),
|
||||||
},
|
},
|
||||||
this.callback
|
this.callback
|
||||||
)
|
)
|
||||||
|
@ -175,7 +175,7 @@ describe('LatexRunner', function () {
|
||||||
compiler: this.compiler,
|
compiler: this.compiler,
|
||||||
image: this.image,
|
image: this.image,
|
||||||
timeout: (this.timeout = 42000),
|
timeout: (this.timeout = 42000),
|
||||||
flags: ['-file-line-error', '-halt-on-error']
|
flags: ['-file-line-error', '-halt-on-error'],
|
||||||
},
|
},
|
||||||
this.callback
|
this.callback
|
||||||
)
|
)
|
||||||
|
@ -184,7 +184,7 @@ describe('LatexRunner', function () {
|
||||||
return it('should include the flags in the command', function () {
|
return it('should include the flags in the command', function () {
|
||||||
const command = this.CommandRunner.run.args[0][1]
|
const command = this.CommandRunner.run.args[0][1]
|
||||||
const flags = command.filter(
|
const flags = command.filter(
|
||||||
(arg) => arg === '-file-line-error' || arg === '-halt-on-error'
|
arg => arg === '-file-line-error' || arg === '-halt-on-error'
|
||||||
)
|
)
|
||||||
flags.length.should.equal(2)
|
flags.length.should.equal(2)
|
||||||
flags[0].should.equal('-file-line-error')
|
flags[0].should.equal('-file-line-error')
|
||||||
|
|
|
@ -22,13 +22,13 @@ describe('DockerLockManager', function () {
|
||||||
beforeEach(function () {
|
beforeEach(function () {
|
||||||
this.LockManager = SandboxedModule.require(modulePath, {
|
this.LockManager = SandboxedModule.require(modulePath, {
|
||||||
requires: {
|
requires: {
|
||||||
'settings-sharelatex': {},
|
'@overleaf/settings': {},
|
||||||
fs: {
|
fs: {
|
||||||
lstat: sinon.stub().callsArgWith(1),
|
lstat: sinon.stub().callsArgWith(1),
|
||||||
readdir: sinon.stub().callsArgWith(1)
|
readdir: sinon.stub().callsArgWith(1),
|
||||||
},
|
},
|
||||||
lockfile: (this.Lockfile = {})
|
lockfile: (this.Lockfile = {}),
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
return (this.lockFile = '/local/compile/directory/.project-lock')
|
return (this.lockFile = '/local/compile/directory/.project-lock')
|
||||||
})
|
})
|
||||||
|
|
|
@ -25,11 +25,11 @@ describe('OutputFileFinder', function () {
|
||||||
this.OutputFileFinder = SandboxedModule.require(modulePath, {
|
this.OutputFileFinder = SandboxedModule.require(modulePath, {
|
||||||
requires: {
|
requires: {
|
||||||
fs: (this.fs = {}),
|
fs: (this.fs = {}),
|
||||||
child_process: { spawn: (this.spawn = sinon.stub()) }
|
child_process: { spawn: (this.spawn = sinon.stub()) },
|
||||||
},
|
},
|
||||||
globals: {
|
globals: {
|
||||||
Math // used by lodash
|
Math, // used by lodash
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
this.directory = '/test/dir'
|
this.directory = '/test/dir'
|
||||||
return (this.callback = sinon.stub())
|
return (this.callback = sinon.stub())
|
||||||
|
@ -57,12 +57,12 @@ describe('OutputFileFinder', function () {
|
||||||
return expect(this.outputFiles).to.deep.equal([
|
return expect(this.outputFiles).to.deep.equal([
|
||||||
{
|
{
|
||||||
path: 'output.pdf',
|
path: 'output.pdf',
|
||||||
type: 'pdf'
|
type: 'pdf',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
path: 'extra/file.tex',
|
path: 'extra/file.tex',
|
||||||
type: 'tex'
|
type: 'tex',
|
||||||
}
|
},
|
||||||
])
|
])
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -27,9 +27,9 @@ describe('OutputFileOptimiser', function () {
|
||||||
fs: (this.fs = {}),
|
fs: (this.fs = {}),
|
||||||
path: (this.Path = {}),
|
path: (this.Path = {}),
|
||||||
child_process: { spawn: (this.spawn = sinon.stub()) },
|
child_process: { spawn: (this.spawn = sinon.stub()) },
|
||||||
'./Metrics': {}
|
'./Metrics': {},
|
||||||
},
|
},
|
||||||
globals: { Math } // used by lodash
|
globals: { Math }, // used by lodash
|
||||||
})
|
})
|
||||||
this.directory = '/test/dir'
|
this.directory = '/test/dir'
|
||||||
return (this.callback = sinon.stub())
|
return (this.callback = sinon.stub())
|
||||||
|
|
|
@ -27,16 +27,16 @@ describe('ProjectPersistenceManager', function () {
|
||||||
'./UrlCache': (this.UrlCache = {}),
|
'./UrlCache': (this.UrlCache = {}),
|
||||||
'./CompileManager': (this.CompileManager = {}),
|
'./CompileManager': (this.CompileManager = {}),
|
||||||
diskusage: (this.diskusage = { check: sinon.stub() }),
|
diskusage: (this.diskusage = { check: sinon.stub() }),
|
||||||
'settings-sharelatex': (this.settings = {
|
'@overleaf/settings': (this.settings = {
|
||||||
project_cache_length_ms: 1000,
|
project_cache_length_ms: 1000,
|
||||||
path: {
|
path: {
|
||||||
compilesDir: '/compiles',
|
compilesDir: '/compiles',
|
||||||
outputDir: '/output',
|
outputDir: '/output',
|
||||||
clsiCacheDir: '/cache'
|
clsiCacheDir: '/cache',
|
||||||
}
|
},
|
||||||
}),
|
}),
|
||||||
'./db': (this.db = {})
|
'./db': (this.db = {}),
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
this.callback = sinon.stub()
|
this.callback = sinon.stub()
|
||||||
this.project_id = 'project-id-123'
|
this.project_id = 'project-id-123'
|
||||||
|
@ -47,7 +47,7 @@ describe('ProjectPersistenceManager', function () {
|
||||||
it('should leave expiry alone if plenty of disk', function (done) {
|
it('should leave expiry alone if plenty of disk', function (done) {
|
||||||
this.diskusage.check.resolves({
|
this.diskusage.check.resolves({
|
||||||
available: 40,
|
available: 40,
|
||||||
total: 100
|
total: 100,
|
||||||
})
|
})
|
||||||
|
|
||||||
this.ProjectPersistenceManager.refreshExpiryTimeout(() => {
|
this.ProjectPersistenceManager.refreshExpiryTimeout(() => {
|
||||||
|
@ -61,7 +61,7 @@ describe('ProjectPersistenceManager', function () {
|
||||||
it('should drop EXPIRY_TIMEOUT 10% if low disk usage', function (done) {
|
it('should drop EXPIRY_TIMEOUT 10% if low disk usage', function (done) {
|
||||||
this.diskusage.check.resolves({
|
this.diskusage.check.resolves({
|
||||||
available: 5,
|
available: 5,
|
||||||
total: 100
|
total: 100,
|
||||||
})
|
})
|
||||||
|
|
||||||
this.ProjectPersistenceManager.refreshExpiryTimeout(() => {
|
this.ProjectPersistenceManager.refreshExpiryTimeout(() => {
|
||||||
|
@ -73,7 +73,7 @@ describe('ProjectPersistenceManager', function () {
|
||||||
it('should not drop EXPIRY_TIMEOUT to below 50% of project_cache_length_ms', function (done) {
|
it('should not drop EXPIRY_TIMEOUT to below 50% of project_cache_length_ms', function (done) {
|
||||||
this.diskusage.check.resolves({
|
this.diskusage.check.resolves({
|
||||||
available: 5,
|
available: 5,
|
||||||
total: 100
|
total: 100,
|
||||||
})
|
})
|
||||||
this.ProjectPersistenceManager.EXPIRY_TIMEOUT = 500
|
this.ProjectPersistenceManager.EXPIRY_TIMEOUT = 500
|
||||||
this.ProjectPersistenceManager.refreshExpiryTimeout(() => {
|
this.ProjectPersistenceManager.refreshExpiryTimeout(() => {
|
||||||
|
@ -105,7 +105,7 @@ describe('ProjectPersistenceManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should clear each expired project', function () {
|
it('should clear each expired project', function () {
|
||||||
return Array.from(this.project_ids).map((project_id) =>
|
return Array.from(this.project_ids).map(project_id =>
|
||||||
this.ProjectPersistenceManager.clearProjectFromCache
|
this.ProjectPersistenceManager.clearProjectFromCache
|
||||||
.calledWith(project_id)
|
.calledWith(project_id)
|
||||||
.should.equal(true)
|
.should.equal(true)
|
||||||
|
|
|
@ -25,7 +25,7 @@ describe('RequestParser', function () {
|
||||||
this.validResource = {
|
this.validResource = {
|
||||||
path: 'main.tex',
|
path: 'main.tex',
|
||||||
date: '12:00 01/02/03',
|
date: '12:00 01/02/03',
|
||||||
content: 'Hello world'
|
content: 'Hello world',
|
||||||
}
|
}
|
||||||
this.validRequest = {
|
this.validRequest = {
|
||||||
compile: {
|
compile: {
|
||||||
|
@ -33,15 +33,15 @@ describe('RequestParser', function () {
|
||||||
options: {
|
options: {
|
||||||
imageName: 'basicImageName/here:2017-1',
|
imageName: 'basicImageName/here:2017-1',
|
||||||
compiler: 'pdflatex',
|
compiler: 'pdflatex',
|
||||||
timeout: 42
|
timeout: 42,
|
||||||
},
|
},
|
||||||
resources: []
|
resources: [],
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
return (this.RequestParser = SandboxedModule.require(modulePath, {
|
return (this.RequestParser = SandboxedModule.require(modulePath, {
|
||||||
requires: {
|
requires: {
|
||||||
'settings-sharelatex': (this.settings = {})
|
'@overleaf/settings': (this.settings = {}),
|
||||||
}
|
},
|
||||||
}))
|
}))
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -118,7 +118,7 @@ describe('RequestParser', function () {
|
||||||
this.settings.clsi = { docker: {} }
|
this.settings.clsi = { docker: {} }
|
||||||
this.settings.clsi.docker.allowedImages = [
|
this.settings.clsi.docker.allowedImages = [
|
||||||
'repo/name:tag1',
|
'repo/name:tag1',
|
||||||
'repo/name:tag2'
|
'repo/name:tag2',
|
||||||
]
|
]
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -402,7 +402,7 @@ describe('RequestParser', function () {
|
||||||
this.validRequest.compile.resources.push({
|
this.validRequest.compile.resources.push({
|
||||||
path: this.badPath,
|
path: this.badPath,
|
||||||
date: '12:00 01/02/03',
|
date: '12:00 01/02/03',
|
||||||
content: 'Hello world'
|
content: 'Hello world',
|
||||||
})
|
})
|
||||||
this.RequestParser.parse(this.validRequest, this.callback)
|
this.RequestParser.parse(this.validRequest, this.callback)
|
||||||
return (this.data = this.callback.args[0][1])
|
return (this.data = this.callback.args[0][1])
|
||||||
|
|
|
@ -25,14 +25,14 @@ describe('ResourceStateManager', function () {
|
||||||
singleOnly: true,
|
singleOnly: true,
|
||||||
requires: {
|
requires: {
|
||||||
fs: (this.fs = {}),
|
fs: (this.fs = {}),
|
||||||
'./SafeReader': (this.SafeReader = {})
|
'./SafeReader': (this.SafeReader = {}),
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
this.basePath = '/path/to/write/files/to'
|
this.basePath = '/path/to/write/files/to'
|
||||||
this.resources = [
|
this.resources = [
|
||||||
{ path: 'resource-1-mock' },
|
{ path: 'resource-1-mock' },
|
||||||
{ path: 'resource-2-mock' },
|
{ path: 'resource-2-mock' },
|
||||||
{ path: 'resource-3-mock' }
|
{ path: 'resource-3-mock' },
|
||||||
]
|
]
|
||||||
this.state = '1234567890'
|
this.state = '1234567890'
|
||||||
this.resourceFileName = `${this.basePath}/.project-sync-state`
|
this.resourceFileName = `${this.basePath}/.project-sync-state`
|
||||||
|
@ -175,7 +175,7 @@ describe('ResourceStateManager', function () {
|
||||||
this.allFiles = [
|
this.allFiles = [
|
||||||
this.resources[0].path,
|
this.resources[0].path,
|
||||||
this.resources[1].path,
|
this.resources[1].path,
|
||||||
this.resources[2].path
|
this.resources[2].path,
|
||||||
]
|
]
|
||||||
return this.ResourceStateManager.checkResourceFiles(
|
return this.ResourceStateManager.checkResourceFiles(
|
||||||
this.resources,
|
this.resources,
|
||||||
|
@ -220,7 +220,7 @@ describe('ResourceStateManager', function () {
|
||||||
this.allFiles = [
|
this.allFiles = [
|
||||||
this.resources[0].path,
|
this.resources[0].path,
|
||||||
this.resources[1].path,
|
this.resources[1].path,
|
||||||
this.resources[2].path
|
this.resources[2].path,
|
||||||
]
|
]
|
||||||
return this.ResourceStateManager.checkResourceFiles(
|
return this.ResourceStateManager.checkResourceFiles(
|
||||||
this.resources,
|
this.resources,
|
||||||
|
|
|
@ -27,7 +27,7 @@ describe('ResourceWriter', function () {
|
||||||
requires: {
|
requires: {
|
||||||
fs: (this.fs = {
|
fs: (this.fs = {
|
||||||
mkdir: sinon.stub().callsArg(1),
|
mkdir: sinon.stub().callsArg(1),
|
||||||
unlink: sinon.stub().callsArg(1)
|
unlink: sinon.stub().callsArg(1),
|
||||||
}),
|
}),
|
||||||
'./ResourceStateManager': (this.ResourceStateManager = {}),
|
'./ResourceStateManager': (this.ResourceStateManager = {}),
|
||||||
wrench: (this.wrench = {}),
|
wrench: (this.wrench = {}),
|
||||||
|
@ -43,9 +43,9 @@ describe('ResourceWriter', function () {
|
||||||
}
|
}
|
||||||
Timer.initClass()
|
Timer.initClass()
|
||||||
return Timer
|
return Timer
|
||||||
})())
|
})()),
|
||||||
})
|
}),
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
this.project_id = 'project-id-123'
|
this.project_id = 'project-id-123'
|
||||||
this.basePath = '/path/to/write/files/to'
|
this.basePath = '/path/to/write/files/to'
|
||||||
|
@ -62,7 +62,7 @@ describe('ResourceWriter', function () {
|
||||||
{
|
{
|
||||||
project_id: this.project_id,
|
project_id: this.project_id,
|
||||||
syncState: (this.syncState = '0123456789abcdef'),
|
syncState: (this.syncState = '0123456789abcdef'),
|
||||||
resources: this.resources
|
resources: this.resources,
|
||||||
},
|
},
|
||||||
this.basePath,
|
this.basePath,
|
||||||
this.callback
|
this.callback
|
||||||
|
@ -76,7 +76,7 @@ describe('ResourceWriter', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should write each resource to disk', function () {
|
it('should write each resource to disk', function () {
|
||||||
return Array.from(this.resources).map((resource) =>
|
return Array.from(this.resources).map(resource =>
|
||||||
this.ResourceWriter._writeResourceToDisk
|
this.ResourceWriter._writeResourceToDisk
|
||||||
.calledWith(this.project_id, resource, this.basePath)
|
.calledWith(this.project_id, resource, this.basePath)
|
||||||
.should.equal(true)
|
.should.equal(true)
|
||||||
|
@ -111,7 +111,7 @@ describe('ResourceWriter', function () {
|
||||||
project_id: this.project_id,
|
project_id: this.project_id,
|
||||||
syncType: 'incremental',
|
syncType: 'incremental',
|
||||||
syncState: (this.syncState = '1234567890abcdef'),
|
syncState: (this.syncState = '1234567890abcdef'),
|
||||||
resources: this.resources
|
resources: this.resources,
|
||||||
},
|
},
|
||||||
this.basePath,
|
this.basePath,
|
||||||
this.callback
|
this.callback
|
||||||
|
@ -137,7 +137,7 @@ describe('ResourceWriter', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should write each resource to disk', function () {
|
it('should write each resource to disk', function () {
|
||||||
return Array.from(this.resources).map((resource) =>
|
return Array.from(this.resources).map(resource =>
|
||||||
this.ResourceWriter._writeResourceToDisk
|
this.ResourceWriter._writeResourceToDisk
|
||||||
.calledWith(this.project_id, resource, this.basePath)
|
.calledWith(this.project_id, resource, this.basePath)
|
||||||
.should.equal(true)
|
.should.equal(true)
|
||||||
|
@ -160,7 +160,7 @@ describe('ResourceWriter', function () {
|
||||||
project_id: this.project_id,
|
project_id: this.project_id,
|
||||||
syncType: 'incremental',
|
syncType: 'incremental',
|
||||||
syncState: (this.syncState = '1234567890abcdef'),
|
syncState: (this.syncState = '1234567890abcdef'),
|
||||||
resources: this.resources
|
resources: this.resources,
|
||||||
},
|
},
|
||||||
this.basePath,
|
this.basePath,
|
||||||
this.callback
|
this.callback
|
||||||
|
@ -183,58 +183,57 @@ describe('ResourceWriter', function () {
|
||||||
this.output_files = [
|
this.output_files = [
|
||||||
{
|
{
|
||||||
path: 'output.pdf',
|
path: 'output.pdf',
|
||||||
type: 'pdf'
|
type: 'pdf',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
path: 'extra/file.tex',
|
path: 'extra/file.tex',
|
||||||
type: 'tex'
|
type: 'tex',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
path: 'extra.aux',
|
path: 'extra.aux',
|
||||||
type: 'aux'
|
type: 'aux',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
path: 'cache/_chunk1'
|
path: 'cache/_chunk1',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
path: 'figures/image-eps-converted-to.pdf',
|
path: 'figures/image-eps-converted-to.pdf',
|
||||||
type: 'pdf'
|
type: 'pdf',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
path: 'foo/main-figure0.md5',
|
path: 'foo/main-figure0.md5',
|
||||||
type: 'md5'
|
type: 'md5',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
path: 'foo/main-figure0.dpth',
|
path: 'foo/main-figure0.dpth',
|
||||||
type: 'dpth'
|
type: 'dpth',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
path: 'foo/main-figure0.pdf',
|
path: 'foo/main-figure0.pdf',
|
||||||
type: 'pdf'
|
type: 'pdf',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
path: '_minted-main/default-pyg-prefix.pygstyle',
|
path: '_minted-main/default-pyg-prefix.pygstyle',
|
||||||
type: 'pygstyle'
|
type: 'pygstyle',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
path: '_minted-main/default.pygstyle',
|
path: '_minted-main/default.pygstyle',
|
||||||
type: 'pygstyle'
|
type: 'pygstyle',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
path:
|
path: '_minted-main/35E248B60965545BD232AE9F0FE9750D504A7AF0CD3BAA7542030FC560DFCC45.pygtex',
|
||||||
'_minted-main/35E248B60965545BD232AE9F0FE9750D504A7AF0CD3BAA7542030FC560DFCC45.pygtex',
|
type: 'pygtex',
|
||||||
type: 'pygtex'
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
path: '_markdown_main/30893013dec5d869a415610079774c2f.md.tex',
|
path: '_markdown_main/30893013dec5d869a415610079774c2f.md.tex',
|
||||||
type: 'tex'
|
type: 'tex',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
path: 'output.stdout'
|
path: 'output.stdout',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
path: 'output.stderr'
|
path: 'output.stderr',
|
||||||
}
|
},
|
||||||
]
|
]
|
||||||
this.resources = 'mock-resources'
|
this.resources = 'mock-resources'
|
||||||
this.OutputFileFinder.findOutputFiles = sinon
|
this.OutputFileFinder.findOutputFiles = sinon
|
||||||
|
@ -368,7 +367,7 @@ describe('ResourceWriter', function () {
|
||||||
this.resource = {
|
this.resource = {
|
||||||
path: 'main.tex',
|
path: 'main.tex',
|
||||||
url: 'http://www.example.com/main.tex',
|
url: 'http://www.example.com/main.tex',
|
||||||
modified: Date.now()
|
modified: Date.now(),
|
||||||
}
|
}
|
||||||
this.UrlCache.downloadUrlToFile = sinon
|
this.UrlCache.downloadUrlToFile = sinon
|
||||||
.stub()
|
.stub()
|
||||||
|
@ -413,7 +412,7 @@ describe('ResourceWriter', function () {
|
||||||
beforeEach(function () {
|
beforeEach(function () {
|
||||||
this.resource = {
|
this.resource = {
|
||||||
path: 'main.tex',
|
path: 'main.tex',
|
||||||
content: 'Hello world'
|
content: 'Hello world',
|
||||||
}
|
}
|
||||||
this.fs.writeFile = sinon.stub().callsArg(2)
|
this.fs.writeFile = sinon.stub().callsArg(2)
|
||||||
this.fs.mkdir = sinon.stub().callsArg(2)
|
this.fs.mkdir = sinon.stub().callsArg(2)
|
||||||
|
@ -451,7 +450,7 @@ describe('ResourceWriter', function () {
|
||||||
beforeEach(function () {
|
beforeEach(function () {
|
||||||
this.resource = {
|
this.resource = {
|
||||||
path: '../../main.tex',
|
path: '../../main.tex',
|
||||||
content: 'Hello world'
|
content: 'Hello world',
|
||||||
}
|
}
|
||||||
this.fs.writeFile = sinon.stub().callsArg(2)
|
this.fs.writeFile = sinon.stub().callsArg(2)
|
||||||
return this.ResourceWriter._writeResourceToDisk(
|
return this.ResourceWriter._writeResourceToDisk(
|
||||||
|
|
|
@ -23,16 +23,16 @@ describe('StaticServerForbidSymlinks', function () {
|
||||||
beforeEach(function () {
|
beforeEach(function () {
|
||||||
this.settings = {
|
this.settings = {
|
||||||
path: {
|
path: {
|
||||||
compilesDir: '/compiles/here'
|
compilesDir: '/compiles/here',
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
this.fs = {}
|
this.fs = {}
|
||||||
this.ForbidSymlinks = SandboxedModule.require(modulePath, {
|
this.ForbidSymlinks = SandboxedModule.require(modulePath, {
|
||||||
requires: {
|
requires: {
|
||||||
'settings-sharelatex': this.settings,
|
'@overleaf/settings': this.settings,
|
||||||
fs: this.fs
|
fs: this.fs,
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
this.dummyStatic = (rootDir, options) => (req, res, next) =>
|
this.dummyStatic = (rootDir, options) => (req, res, next) =>
|
||||||
|
@ -46,8 +46,8 @@ describe('StaticServerForbidSymlinks', function () {
|
||||||
)
|
)
|
||||||
this.req = {
|
this.req = {
|
||||||
params: {
|
params: {
|
||||||
project_id: '12345'
|
project_id: '12345',
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
this.res = {}
|
this.res = {}
|
||||||
|
|
|
@ -21,8 +21,8 @@ describe('TikzManager', function () {
|
||||||
requires: {
|
requires: {
|
||||||
'./ResourceWriter': (this.ResourceWriter = {}),
|
'./ResourceWriter': (this.ResourceWriter = {}),
|
||||||
'./SafeReader': (this.SafeReader = {}),
|
'./SafeReader': (this.SafeReader = {}),
|
||||||
fs: (this.fs = {})
|
fs: (this.fs = {}),
|
||||||
}
|
},
|
||||||
}))
|
}))
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -24,11 +24,11 @@ describe('UrlCache', function () {
|
||||||
requires: {
|
requires: {
|
||||||
'./db': {},
|
'./db': {},
|
||||||
'./UrlFetcher': (this.UrlFetcher = {}),
|
'./UrlFetcher': (this.UrlFetcher = {}),
|
||||||
'settings-sharelatex': (this.Settings = {
|
'@overleaf/settings': (this.Settings = {
|
||||||
path: { clsiCacheDir: '/cache/dir' }
|
path: { clsiCacheDir: '/cache/dir' },
|
||||||
}),
|
}),
|
||||||
fs: (this.fs = { copyFile: sinon.stub().yields() })
|
fs: (this.fs = { copyFile: sinon.stub().yields() }),
|
||||||
}
|
},
|
||||||
}))
|
}))
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -339,7 +339,7 @@ describe('UrlCache', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should clear the cache for each url in the project', function () {
|
it('should clear the cache for each url in the project', function () {
|
||||||
return Array.from(this.urls).map((url) =>
|
return Array.from(this.urls).map(url =>
|
||||||
this.UrlCache._clearUrlFromCache
|
this.UrlCache._clearUrlFromCache
|
||||||
.calledWith(this.project_id, url)
|
.calledWith(this.project_id, url)
|
||||||
.should.equal(true)
|
.should.equal(true)
|
||||||
|
|
|
@ -21,17 +21,17 @@ describe('UrlFetcher', function () {
|
||||||
return (this.UrlFetcher = SandboxedModule.require(modulePath, {
|
return (this.UrlFetcher = SandboxedModule.require(modulePath, {
|
||||||
requires: {
|
requires: {
|
||||||
request: {
|
request: {
|
||||||
defaults: (this.defaults = sinon.stub().returns((this.request = {})))
|
defaults: (this.defaults = sinon.stub().returns((this.request = {}))),
|
||||||
},
|
},
|
||||||
fs: (this.fs = {}),
|
fs: (this.fs = {}),
|
||||||
'settings-sharelatex': (this.settings = {
|
'@overleaf/settings': (this.settings = {
|
||||||
apis: {
|
apis: {
|
||||||
clsiPerf: {
|
clsiPerf: {
|
||||||
host: 'localhost:3043'
|
host: 'localhost:3043',
|
||||||
}
|
},
|
||||||
}
|
},
|
||||||
})
|
}),
|
||||||
}
|
},
|
||||||
}))
|
}))
|
||||||
})
|
})
|
||||||
describe('pipeUrlToFileWithRetry', function () {
|
describe('pipeUrlToFileWithRetry', function () {
|
||||||
|
@ -41,7 +41,7 @@ describe('UrlFetcher', function () {
|
||||||
|
|
||||||
it('should call pipeUrlToFile', function (done) {
|
it('should call pipeUrlToFile', function (done) {
|
||||||
this.UrlFetcher.pipeUrlToFile.callsArgWith(2)
|
this.UrlFetcher.pipeUrlToFile.callsArgWith(2)
|
||||||
this.UrlFetcher.pipeUrlToFileWithRetry(this.url, this.path, (err) => {
|
this.UrlFetcher.pipeUrlToFileWithRetry(this.url, this.path, err => {
|
||||||
expect(err).to.equal(undefined)
|
expect(err).to.equal(undefined)
|
||||||
this.UrlFetcher.pipeUrlToFile.called.should.equal(true)
|
this.UrlFetcher.pipeUrlToFile.called.should.equal(true)
|
||||||
done()
|
done()
|
||||||
|
@ -51,7 +51,7 @@ describe('UrlFetcher', function () {
|
||||||
it('should call pipeUrlToFile multiple times on error', function (done) {
|
it('should call pipeUrlToFile multiple times on error', function (done) {
|
||||||
const error = new Error("couldn't download file")
|
const error = new Error("couldn't download file")
|
||||||
this.UrlFetcher.pipeUrlToFile.callsArgWith(2, error)
|
this.UrlFetcher.pipeUrlToFile.callsArgWith(2, error)
|
||||||
this.UrlFetcher.pipeUrlToFileWithRetry(this.url, this.path, (err) => {
|
this.UrlFetcher.pipeUrlToFileWithRetry(this.url, this.path, err => {
|
||||||
expect(err).to.equal(error)
|
expect(err).to.equal(error)
|
||||||
this.UrlFetcher.pipeUrlToFile.callCount.should.equal(3)
|
this.UrlFetcher.pipeUrlToFile.callCount.should.equal(3)
|
||||||
done()
|
done()
|
||||||
|
@ -61,7 +61,7 @@ describe('UrlFetcher', function () {
|
||||||
it('should call pipeUrlToFile twice if only 1 error', function (done) {
|
it('should call pipeUrlToFile twice if only 1 error', function (done) {
|
||||||
this.UrlFetcher.pipeUrlToFile.onCall(0).callsArgWith(2, 'error')
|
this.UrlFetcher.pipeUrlToFile.onCall(0).callsArgWith(2, 'error')
|
||||||
this.UrlFetcher.pipeUrlToFile.onCall(1).callsArgWith(2)
|
this.UrlFetcher.pipeUrlToFile.onCall(1).callsArgWith(2)
|
||||||
this.UrlFetcher.pipeUrlToFileWithRetry(this.url, this.path, (err) => {
|
this.UrlFetcher.pipeUrlToFileWithRetry(this.url, this.path, err => {
|
||||||
expect(err).to.equal(undefined)
|
expect(err).to.equal(undefined)
|
||||||
this.UrlFetcher.pipeUrlToFile.callCount.should.equal(2)
|
this.UrlFetcher.pipeUrlToFile.callCount.should.equal(2)
|
||||||
done()
|
done()
|
||||||
|
@ -181,7 +181,7 @@ describe('UrlFetcher', function () {
|
||||||
|
|
||||||
describe('with non success status code', function () {
|
describe('with non success status code', function () {
|
||||||
beforeEach(function (done) {
|
beforeEach(function (done) {
|
||||||
this.UrlFetcher.pipeUrlToFile(this.url, this.path, (err) => {
|
this.UrlFetcher.pipeUrlToFile(this.url, this.path, err => {
|
||||||
this.callback(err)
|
this.callback(err)
|
||||||
return done()
|
return done()
|
||||||
})
|
})
|
||||||
|
@ -202,7 +202,7 @@ describe('UrlFetcher', function () {
|
||||||
|
|
||||||
return describe('with error', function () {
|
return describe('with error', function () {
|
||||||
beforeEach(function (done) {
|
beforeEach(function (done) {
|
||||||
this.UrlFetcher.pipeUrlToFile(this.url, this.path, (err) => {
|
this.UrlFetcher.pipeUrlToFile(this.url, this.path, err => {
|
||||||
this.callback(err)
|
this.callback(err)
|
||||||
return done()
|
return done()
|
||||||
})
|
})
|
||||||
|
|
|
@ -28,14 +28,14 @@ async function loadContext(example) {
|
||||||
const snapshot = blob ? JSON.parse(blob) : null
|
const snapshot = blob ? JSON.parse(blob) : null
|
||||||
return {
|
return {
|
||||||
size,
|
size,
|
||||||
snapshot
|
snapshot,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function backFillSnapshot(example, size) {
|
async function backFillSnapshot(example, size) {
|
||||||
const table = await parseXrefTable(pdfPath(example), size, () => {})
|
const table = await parseXrefTable(pdfPath(example), size, () => {})
|
||||||
await fs.promises.mkdir(Path.dirname(snapshotPath(example)), {
|
await fs.promises.mkdir(Path.dirname(snapshotPath(example)), {
|
||||||
recursive: true
|
recursive: true,
|
||||||
})
|
})
|
||||||
await fs.promises.writeFile(
|
await fs.promises.writeFile(
|
||||||
snapshotPath(example),
|
snapshotPath(example),
|
||||||
|
|
Loading…
Reference in a new issue