mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
Merge pull request #8714 from overleaf/em-promisify-compile-manager
Promisify CompileManager GitOrigin-RevId: 644ed061ae139d6196b24f8ead38579de6b844a3
This commit is contained in:
parent
fceeef5b31
commit
77aa2baa9d
17 changed files with 973 additions and 992 deletions
2
package-lock.json
generated
2
package-lock.json
generated
|
@ -31888,6 +31888,7 @@
|
||||||
"chai": "^4.3.6",
|
"chai": "^4.3.6",
|
||||||
"chai-as-promised": "^7.1.1",
|
"chai-as-promised": "^7.1.1",
|
||||||
"mocha": "^8.4.0",
|
"mocha": "^8.4.0",
|
||||||
|
"mock-fs": "^5.1.2",
|
||||||
"sandboxed-module": "^2.0.4",
|
"sandboxed-module": "^2.0.4",
|
||||||
"sinon": "~9.0.1",
|
"sinon": "~9.0.1",
|
||||||
"sinon-chai": "^3.7.0",
|
"sinon-chai": "^3.7.0",
|
||||||
|
@ -39708,6 +39709,7 @@
|
||||||
"lockfile": "^1.0.4",
|
"lockfile": "^1.0.4",
|
||||||
"lodash": "^4.17.21",
|
"lodash": "^4.17.21",
|
||||||
"mocha": "^8.4.0",
|
"mocha": "^8.4.0",
|
||||||
|
"mock-fs": "^5.1.2",
|
||||||
"p-limit": "^3.1.0",
|
"p-limit": "^3.1.0",
|
||||||
"pdfjs-dist": "~2.7.570",
|
"pdfjs-dist": "~2.7.570",
|
||||||
"request": "^2.88.2",
|
"request": "^2.88.2",
|
||||||
|
|
|
@ -29,9 +29,8 @@ function compile(req, res, next) {
|
||||||
if (error) {
|
if (error) {
|
||||||
return next(error)
|
return next(error)
|
||||||
}
|
}
|
||||||
CompileManager.doCompileWithLock(
|
CompileManager.doCompileWithLock(request, (error, result) => {
|
||||||
request,
|
let { outputFiles, stats, timings } = result || {}
|
||||||
function (error, outputFiles, stats, timings) {
|
|
||||||
let code, status
|
let code, status
|
||||||
if (outputFiles == null) {
|
if (outputFiles == null) {
|
||||||
outputFiles = []
|
outputFiles = []
|
||||||
|
@ -42,6 +41,13 @@ function compile(req, res, next) {
|
||||||
} else if (error instanceof Errors.FilesOutOfSyncError) {
|
} else if (error instanceof Errors.FilesOutOfSyncError) {
|
||||||
code = 409 // Http 409 Conflict
|
code = 409 // Http 409 Conflict
|
||||||
status = 'retry'
|
status = 'retry'
|
||||||
|
logger.warn(
|
||||||
|
{
|
||||||
|
projectId: request.project_id,
|
||||||
|
userId: request.user_id,
|
||||||
|
},
|
||||||
|
'files out of sync, please retry'
|
||||||
|
)
|
||||||
} else if (error?.code === 'EPIPE') {
|
} else if (error?.code === 'EPIPE') {
|
||||||
// docker returns EPIPE when shutting down
|
// docker returns EPIPE when shutting down
|
||||||
code = 503 // send 503 Unavailable response
|
code = 503 // send 503 Unavailable response
|
||||||
|
@ -53,14 +59,14 @@ function compile(req, res, next) {
|
||||||
} else if (error?.timedout) {
|
} else if (error?.timedout) {
|
||||||
status = 'timedout'
|
status = 'timedout'
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ err: error, project_id: request.project_id },
|
{ err: error, projectId: request.project_id },
|
||||||
'timeout running compile'
|
'timeout running compile'
|
||||||
)
|
)
|
||||||
} else if (error) {
|
} else if (error) {
|
||||||
status = 'error'
|
status = 'error'
|
||||||
code = 500
|
code = 500
|
||||||
logger.warn(
|
logger.error(
|
||||||
{ err: error, project_id: request.project_id },
|
{ err: error, projectId: request.project_id },
|
||||||
'error running compile'
|
'error running compile'
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
|
@ -76,7 +82,7 @@ function compile(req, res, next) {
|
||||||
} else {
|
} else {
|
||||||
status = 'failure'
|
status = 'failure'
|
||||||
logger.warn(
|
logger.warn(
|
||||||
{ project_id: request.project_id, outputFiles },
|
{ projectId: request.project_id, outputFiles },
|
||||||
'project failed to compile successfully, no output.pdf generated'
|
'project failed to compile successfully, no output.pdf generated'
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -84,7 +90,7 @@ function compile(req, res, next) {
|
||||||
// log an error if any core files are found
|
// log an error if any core files are found
|
||||||
if (outputFiles.some(file => file.path === 'core')) {
|
if (outputFiles.some(file => file.path === 'core')) {
|
||||||
logger.error(
|
logger.error(
|
||||||
{ project_id: request.project_id, req, outputFiles },
|
{ projectId: request.project_id, req, outputFiles },
|
||||||
'core file found in output'
|
'core file found in output'
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -105,17 +111,14 @@ function compile(req, res, next) {
|
||||||
outputFiles: outputFiles.map(file => ({
|
outputFiles: outputFiles.map(file => ({
|
||||||
url:
|
url:
|
||||||
`${Settings.apis.clsi.url}/project/${request.project_id}` +
|
`${Settings.apis.clsi.url}/project/${request.project_id}` +
|
||||||
(request.user_id != null
|
(request.user_id != null ? `/user/${request.user_id}` : '') +
|
||||||
? `/user/${request.user_id}`
|
|
||||||
: '') +
|
|
||||||
(file.build != null ? `/build/${file.build}` : '') +
|
(file.build != null ? `/build/${file.build}` : '') +
|
||||||
`/output/${file.path}`,
|
`/output/${file.path}`,
|
||||||
...file,
|
...file,
|
||||||
})),
|
})),
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
}
|
})
|
||||||
)
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,24 +1,29 @@
|
||||||
|
const childProcess = require('child_process')
|
||||||
|
const fsPromises = require('fs/promises')
|
||||||
|
const fse = require('fs-extra')
|
||||||
|
const os = require('os')
|
||||||
|
const Path = require('path')
|
||||||
|
const { callbackify, promisify } = require('util')
|
||||||
|
|
||||||
|
const Settings = require('@overleaf/settings')
|
||||||
|
const logger = require('@overleaf/logger')
|
||||||
|
const OError = require('@overleaf/o-error')
|
||||||
|
|
||||||
const ResourceWriter = require('./ResourceWriter')
|
const ResourceWriter = require('./ResourceWriter')
|
||||||
const LatexRunner = require('./LatexRunner')
|
const LatexRunner = require('./LatexRunner')
|
||||||
const OutputFileFinder = require('./OutputFileFinder')
|
const OutputFileFinder = require('./OutputFileFinder')
|
||||||
const OutputCacheManager = require('./OutputCacheManager')
|
const OutputCacheManager = require('./OutputCacheManager')
|
||||||
const Settings = require('@overleaf/settings')
|
|
||||||
const Path = require('path')
|
|
||||||
const logger = require('@overleaf/logger')
|
|
||||||
const Metrics = require('./Metrics')
|
const Metrics = require('./Metrics')
|
||||||
const childProcess = require('child_process')
|
|
||||||
const DraftModeManager = require('./DraftModeManager')
|
const DraftModeManager = require('./DraftModeManager')
|
||||||
const TikzManager = require('./TikzManager')
|
const TikzManager = require('./TikzManager')
|
||||||
const LockManager = require('./LockManager')
|
const LockManager = require('./LockManager')
|
||||||
const fs = require('fs')
|
|
||||||
const fse = require('fs-extra')
|
|
||||||
const os = require('os')
|
|
||||||
const async = require('async')
|
|
||||||
const Errors = require('./Errors')
|
const Errors = require('./Errors')
|
||||||
const CommandRunner = require('./CommandRunner')
|
const CommandRunner = require('./CommandRunner')
|
||||||
const { emitPdfStats } = require('./ContentCacheMetrics')
|
const { emitPdfStats } = require('./ContentCacheMetrics')
|
||||||
const SynctexOutputParser = require('./SynctexOutputParser')
|
const SynctexOutputParser = require('./SynctexOutputParser')
|
||||||
|
|
||||||
|
const execFile = promisify(childProcess.execFile)
|
||||||
|
|
||||||
const COMPILE_TIME_BUCKETS = [
|
const COMPILE_TIME_BUCKETS = [
|
||||||
// NOTE: These buckets are locked in per metric name.
|
// NOTE: These buckets are locked in per metric name.
|
||||||
// If you want to change them, you will need to rename metrics.
|
// If you want to change them, you will need to rename metrics.
|
||||||
|
@ -41,24 +46,21 @@ function getOutputDir(projectId, userId) {
|
||||||
return Path.join(Settings.path.outputDir, getCompileName(projectId, userId))
|
return Path.join(Settings.path.outputDir, getCompileName(projectId, userId))
|
||||||
}
|
}
|
||||||
|
|
||||||
function doCompileWithLock(request, callback) {
|
async function doCompileWithLock(request) {
|
||||||
const compileDir = getCompileDir(request.project_id, request.user_id)
|
const compileDir = getCompileDir(request.project_id, request.user_id)
|
||||||
const lockFile = Path.join(compileDir, '.project-lock')
|
|
||||||
// use a .project-lock file in the compile directory to prevent
|
// use a .project-lock file in the compile directory to prevent
|
||||||
// simultaneous compiles
|
// simultaneous compiles
|
||||||
fse.ensureDir(compileDir, error => {
|
const lockFile = Path.join(compileDir, '.project-lock')
|
||||||
if (error) {
|
await fse.ensureDir(compileDir)
|
||||||
return callback(error)
|
const lock = await LockManager.acquire(lockFile)
|
||||||
|
try {
|
||||||
|
return await doCompile(request)
|
||||||
|
} finally {
|
||||||
|
await lock.release()
|
||||||
}
|
}
|
||||||
LockManager.runWithLock(
|
|
||||||
lockFile,
|
|
||||||
releaseLock => doCompile(request, releaseLock),
|
|
||||||
callback
|
|
||||||
)
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function doCompile(request, callback) {
|
async function doCompile(request) {
|
||||||
const compileDir = getCompileDir(request.project_id, request.user_id)
|
const compileDir = getCompileDir(request.project_id, request.user_id)
|
||||||
const outputDir = getOutputDir(request.project_id, request.user_id)
|
const outputDir = getOutputDir(request.project_id, request.user_id)
|
||||||
|
|
||||||
|
@ -68,74 +70,47 @@ function doCompile(request, callback) {
|
||||||
request.metricsOpts,
|
request.metricsOpts,
|
||||||
COMPILE_TIME_BUCKETS
|
COMPILE_TIME_BUCKETS
|
||||||
)
|
)
|
||||||
const timer = new Metrics.Timer('write-to-disk', 1, request.metricsOpts)
|
const writeToDiskTimer = new Metrics.Timer(
|
||||||
|
'write-to-disk',
|
||||||
|
1,
|
||||||
|
request.metricsOpts
|
||||||
|
)
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ projectId: request.project_id, userId: request.user_id },
|
{ projectId: request.project_id, userId: request.user_id },
|
||||||
'syncing resources to disk'
|
'syncing resources to disk'
|
||||||
)
|
)
|
||||||
ResourceWriter.syncResourcesToDisk(
|
|
||||||
request,
|
let resourceList
|
||||||
compileDir,
|
try {
|
||||||
(error, resourceList) => {
|
|
||||||
// NOTE: resourceList is insecure, it should only be used to exclude files from the output list
|
// NOTE: resourceList is insecure, it should only be used to exclude files from the output list
|
||||||
if (error && error instanceof Errors.FilesOutOfSyncError) {
|
resourceList = await ResourceWriter.promises.syncResourcesToDisk(
|
||||||
logger.warn(
|
request,
|
||||||
{ projectId: request.project_id, userId: request.user_id },
|
compileDir
|
||||||
'files out of sync, please retry'
|
|
||||||
)
|
)
|
||||||
return callback(error)
|
} catch (error) {
|
||||||
} else if (error) {
|
if (error instanceof Errors.FilesOutOfSyncError) {
|
||||||
logger.err(
|
OError.tag(error, 'files out of sync, please retry', {
|
||||||
{
|
|
||||||
err: error,
|
|
||||||
projectId: request.project_id,
|
projectId: request.project_id,
|
||||||
userId: request.user_id,
|
userId: request.user_id,
|
||||||
},
|
})
|
||||||
'error writing resources to disk'
|
} else {
|
||||||
)
|
OError.tag(error, 'error writing resources to disk', {
|
||||||
return callback(error)
|
projectId: request.project_id,
|
||||||
|
userId: request.user_id,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
throw error
|
||||||
}
|
}
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{
|
{
|
||||||
projectId: request.project_id,
|
projectId: request.project_id,
|
||||||
userId: request.user_id,
|
userId: request.user_id,
|
||||||
time_taken: Date.now() - timer.start,
|
time_taken: Date.now() - writeToDiskTimer.start,
|
||||||
},
|
},
|
||||||
'written files to disk'
|
'written files to disk'
|
||||||
)
|
)
|
||||||
const syncStage = timer.done()
|
const syncStage = writeToDiskTimer.done()
|
||||||
|
|
||||||
function injectDraftModeIfRequired(callback) {
|
|
||||||
if (request.draft) {
|
|
||||||
DraftModeManager.injectDraftMode(
|
|
||||||
Path.join(compileDir, request.rootResourcePath),
|
|
||||||
callback
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
callback()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const createTikzFileIfRequired = callback =>
|
|
||||||
TikzManager.checkMainFile(
|
|
||||||
compileDir,
|
|
||||||
request.rootResourcePath,
|
|
||||||
resourceList,
|
|
||||||
(error, needsMainFile) => {
|
|
||||||
if (error) {
|
|
||||||
return callback(error)
|
|
||||||
}
|
|
||||||
if (needsMainFile) {
|
|
||||||
TikzManager.injectOutputFile(
|
|
||||||
compileDir,
|
|
||||||
request.rootResourcePath,
|
|
||||||
callback
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
callback()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
// set up environment variables for chktex
|
// set up environment variables for chktex
|
||||||
const env = {}
|
const env = {}
|
||||||
if (Settings.texliveOpenoutAny && Settings.texliveOpenoutAny !== '') {
|
if (Settings.texliveOpenoutAny && Settings.texliveOpenoutAny !== '') {
|
||||||
|
@ -147,10 +122,7 @@ function doCompile(request, callback) {
|
||||||
env.max_print_line = Settings.texliveMaxPrintLine
|
env.max_print_line = Settings.texliveMaxPrintLine
|
||||||
}
|
}
|
||||||
// only run chktex on LaTeX files (not knitr .Rtex files or any others)
|
// only run chktex on LaTeX files (not knitr .Rtex files or any others)
|
||||||
const isLaTeXFile =
|
const isLaTeXFile = request.rootResourcePath?.match(/\.tex$/i)
|
||||||
request.rootResourcePath != null
|
|
||||||
? request.rootResourcePath.match(/\.tex$/i)
|
|
||||||
: undefined
|
|
||||||
if (request.check != null && isLaTeXFile) {
|
if (request.check != null && isLaTeXFile) {
|
||||||
env.CHKTEX_OPTIONS = '-nall -e9 -e10 -w15 -w16'
|
env.CHKTEX_OPTIONS = '-nall -e9 -e10 -w15 -w16'
|
||||||
env.CHKTEX_ULIMIT_OPTIONS = '-t 5 -v 64000'
|
env.CHKTEX_ULIMIT_OPTIONS = '-t 5 -v 64000'
|
||||||
|
@ -163,13 +135,25 @@ function doCompile(request, callback) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// apply a series of file modifications/creations for draft mode and tikz
|
// apply a series of file modifications/creations for draft mode and tikz
|
||||||
async.series(
|
if (request.draft) {
|
||||||
[injectDraftModeIfRequired, createTikzFileIfRequired],
|
await DraftModeManager.promises.injectDraftMode(
|
||||||
error => {
|
Path.join(compileDir, request.rootResourcePath)
|
||||||
if (error) {
|
)
|
||||||
return callback(error)
|
|
||||||
}
|
}
|
||||||
const timer = new Metrics.Timer('run-compile', 1, request.metricsOpts)
|
|
||||||
|
const needsMainFile = await TikzManager.promises.checkMainFile(
|
||||||
|
compileDir,
|
||||||
|
request.rootResourcePath,
|
||||||
|
resourceList
|
||||||
|
)
|
||||||
|
if (needsMainFile) {
|
||||||
|
await TikzManager.promises.injectOutputFile(
|
||||||
|
compileDir,
|
||||||
|
request.rootResourcePath
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const compileTimer = new Metrics.Timer('run-compile', 1, request.metricsOpts)
|
||||||
// find the image tag to log it as a metric, e.g. 2015.1 (convert . to - for graphite)
|
// find the image tag to log it as a metric, e.g. 2015.1 (convert . to - for graphite)
|
||||||
let tag = 'default'
|
let tag = 'default'
|
||||||
if (request.imageName != null) {
|
if (request.imageName != null) {
|
||||||
|
@ -178,18 +162,17 @@ function doCompile(request, callback) {
|
||||||
tag = match[1].replace(/\./g, '-')
|
tag = match[1].replace(/\./g, '-')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// exclude smoke test
|
||||||
if (!request.project_id.match(/^[0-9a-f]{24}$/)) {
|
if (!request.project_id.match(/^[0-9a-f]{24}$/)) {
|
||||||
tag = 'other'
|
tag = 'other'
|
||||||
} // exclude smoke test
|
}
|
||||||
Metrics.inc('compiles', 1, request.metricsOpts)
|
Metrics.inc('compiles', 1, request.metricsOpts)
|
||||||
Metrics.inc(`compiles-with-image.${tag}`, 1, request.metricsOpts)
|
Metrics.inc(`compiles-with-image.${tag}`, 1, request.metricsOpts)
|
||||||
const compileName = getCompileName(
|
const compileName = getCompileName(request.project_id, request.user_id)
|
||||||
request.project_id,
|
|
||||||
request.user_id
|
let compileResult
|
||||||
)
|
try {
|
||||||
LatexRunner.runLatex(
|
compileResult = await LatexRunner.promises.runLatex(compileName, {
|
||||||
compileName,
|
|
||||||
{
|
|
||||||
directory: compileDir,
|
directory: compileDir,
|
||||||
mainFile: request.rootResourcePath,
|
mainFile: request.rootResourcePath,
|
||||||
compiler: request.compiler,
|
compiler: request.compiler,
|
||||||
|
@ -199,65 +182,63 @@ function doCompile(request, callback) {
|
||||||
environment: env,
|
environment: env,
|
||||||
compileGroup: request.compileGroup,
|
compileGroup: request.compileGroup,
|
||||||
stopOnFirstError: request.stopOnFirstError,
|
stopOnFirstError: request.stopOnFirstError,
|
||||||
},
|
})
|
||||||
(error, output, stats, timings) => {
|
|
||||||
// request was for validation only
|
// We use errors to return the validation state. It would be nice to use a
|
||||||
|
// more appropriate mechanism.
|
||||||
if (request.check === 'validate') {
|
if (request.check === 'validate') {
|
||||||
const result = error && error.code ? 'fail' : 'pass'
|
const validationError = new Error('validation')
|
||||||
error = new Error('validation')
|
validationError.validate = 'pass'
|
||||||
error.validate = result
|
throw validationError
|
||||||
}
|
}
|
||||||
|
} catch (originalError) {
|
||||||
|
let error = originalError
|
||||||
|
// request was for validation only
|
||||||
|
if (request.check === 'validate' && !error.validate) {
|
||||||
|
error = new Error('validation')
|
||||||
|
error.validate = originalError.code ? 'fail' : 'pass'
|
||||||
|
}
|
||||||
|
|
||||||
// request was for compile, and failed on validation
|
// request was for compile, and failed on validation
|
||||||
if (
|
if (request.check === 'error' && originalError.message === 'exited') {
|
||||||
request.check === 'error' &&
|
|
||||||
error &&
|
|
||||||
error.message === 'exited'
|
|
||||||
) {
|
|
||||||
error = new Error('compilation')
|
error = new Error('compilation')
|
||||||
error.validate = 'fail'
|
error.validate = 'fail'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// compile was killed by user, was a validation, or a compile which failed validation
|
||||||
|
if (error.terminated || error.validate || error.timedout) {
|
||||||
// record timeout errors as a separate counter, success is recorded later
|
// record timeout errors as a separate counter, success is recorded later
|
||||||
if (error && error.timedout) {
|
if (error.timedout) {
|
||||||
Metrics.inc('compiles-timeout', 1, request.metricsOpts)
|
Metrics.inc('compiles-timeout', 1, request.metricsOpts)
|
||||||
}
|
}
|
||||||
// compile was killed by user, was a validation, or a compile which failed validation
|
|
||||||
if (
|
const { outputFiles } = await OutputFileFinder.promises.findOutputFiles(
|
||||||
error &&
|
|
||||||
(error.terminated || error.validate || error.timedout)
|
|
||||||
) {
|
|
||||||
return OutputFileFinder.findOutputFiles(
|
|
||||||
resourceList,
|
resourceList,
|
||||||
compileDir,
|
compileDir
|
||||||
(err, outputFiles) => {
|
|
||||||
if (err) {
|
|
||||||
return callback(err)
|
|
||||||
}
|
|
||||||
error.outputFiles = outputFiles // return output files so user can check logs
|
|
||||||
callback(error)
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
error.outputFiles = outputFiles // return output files so user can check logs
|
||||||
}
|
}
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
|
||||||
// compile completed normally
|
// compile completed normally
|
||||||
if (error) {
|
let { stats, timings } = compileResult
|
||||||
return callback(error)
|
|
||||||
}
|
|
||||||
Metrics.inc('compiles-succeeded', 1, request.metricsOpts)
|
|
||||||
stats = stats || {}
|
stats = stats || {}
|
||||||
|
timings = timings || {}
|
||||||
|
Metrics.inc('compiles-succeeded', 1, request.metricsOpts)
|
||||||
for (const metricKey in stats) {
|
for (const metricKey in stats) {
|
||||||
const metricValue = stats[metricKey]
|
const metricValue = stats[metricKey]
|
||||||
Metrics.count(metricKey, metricValue, 1, request.metricsOpts)
|
Metrics.count(metricKey, metricValue, 1, request.metricsOpts)
|
||||||
}
|
}
|
||||||
timings = timings || {}
|
|
||||||
for (const metricKey in timings) {
|
for (const metricKey in timings) {
|
||||||
const metricValue = timings[metricKey]
|
const metricValue = timings[metricKey]
|
||||||
Metrics.timing(metricKey, metricValue, 1, request.metricsOpts)
|
Metrics.timing(metricKey, metricValue, 1, request.metricsOpts)
|
||||||
}
|
}
|
||||||
const loadavg =
|
const loadavg = typeof os.loadavg === 'function' ? os.loadavg() : undefined
|
||||||
typeof os.loadavg === 'function' ? os.loadavg() : undefined
|
|
||||||
if (loadavg != null) {
|
if (loadavg != null) {
|
||||||
Metrics.gauge('load-avg', loadavg[0])
|
Metrics.gauge('load-avg', loadavg[0])
|
||||||
}
|
}
|
||||||
const ts = timer.done()
|
const ts = compileTimer.done()
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{
|
{
|
||||||
projectId: request.project_id,
|
projectId: request.project_id,
|
||||||
|
@ -300,26 +281,21 @@ function doCompile(request, callback) {
|
||||||
request.metricsOpts
|
request.metricsOpts
|
||||||
)
|
)
|
||||||
|
|
||||||
OutputFileFinder.findOutputFiles(
|
let { outputFiles } = await OutputFileFinder.promises.findOutputFiles(
|
||||||
resourceList,
|
resourceList,
|
||||||
compileDir,
|
compileDir
|
||||||
(error, outputFiles) => {
|
)
|
||||||
if (error) {
|
|
||||||
return callback(error)
|
try {
|
||||||
}
|
outputFiles = await OutputCacheManager.promises.saveOutputFiles(
|
||||||
OutputCacheManager.saveOutputFiles(
|
|
||||||
{ request, stats, timings },
|
{ request, stats, timings },
|
||||||
outputFiles,
|
outputFiles,
|
||||||
compileDir,
|
compileDir,
|
||||||
outputDir,
|
outputDir
|
||||||
(err, newOutputFiles) => {
|
|
||||||
if (err) {
|
|
||||||
const { project_id: projectId, user_id: userId } =
|
|
||||||
request
|
|
||||||
logger.err(
|
|
||||||
{ projectId, userId, err },
|
|
||||||
'failed to save output files'
|
|
||||||
)
|
)
|
||||||
|
} catch (err) {
|
||||||
|
const { project_id: projectId, user_id: userId } = request
|
||||||
|
logger.err({ projectId, userId, err }, 'failed to save output files')
|
||||||
}
|
}
|
||||||
|
|
||||||
const outputStage = outputStageTimer.done()
|
const outputStage = outputStageTimer.done()
|
||||||
|
@ -328,135 +304,94 @@ function doCompile(request, callback) {
|
||||||
|
|
||||||
// Emit e2e compile time.
|
// Emit e2e compile time.
|
||||||
timings.compileE2E = timerE2E.done()
|
timings.compileE2E = timerE2E.done()
|
||||||
Metrics.timing(
|
Metrics.timing('compile-e2e-v2', timings.compileE2E, 1, request.metricsOpts)
|
||||||
'compile-e2e-v2',
|
|
||||||
timings.compileE2E,
|
|
||||||
1,
|
|
||||||
request.metricsOpts
|
|
||||||
)
|
|
||||||
|
|
||||||
if (stats['pdf-size']) {
|
if (stats['pdf-size']) {
|
||||||
emitPdfStats(stats, timings, request)
|
emitPdfStats(stats, timings, request)
|
||||||
}
|
}
|
||||||
|
|
||||||
callback(null, newOutputFiles, stats, timings)
|
return { outputFiles, stats, timings }
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function stopCompile(projectId, userId, callback) {
|
async function stopCompile(projectId, userId) {
|
||||||
const compileName = getCompileName(projectId, userId)
|
const compileName = getCompileName(projectId, userId)
|
||||||
LatexRunner.killLatex(compileName, callback)
|
await LatexRunner.promises.killLatex(compileName)
|
||||||
}
|
|
||||||
|
|
||||||
function clearProject(projectId, userId, _callback) {
|
|
||||||
function callback(error) {
|
|
||||||
_callback(error)
|
|
||||||
_callback = function () {}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function clearProject(projectId, userId) {
|
||||||
const compileDir = getCompileDir(projectId, userId)
|
const compileDir = getCompileDir(projectId, userId)
|
||||||
|
|
||||||
_checkDirectory(compileDir, (err, exists) => {
|
const exists = await _checkDirectory(compileDir)
|
||||||
if (err) {
|
|
||||||
return callback(err)
|
|
||||||
}
|
|
||||||
if (!exists) {
|
if (!exists) {
|
||||||
return callback()
|
// skip removal if no directory present
|
||||||
} // skip removal if no directory present
|
return
|
||||||
|
|
||||||
const proc = childProcess.spawn('rm', ['-r', '-f', '--', compileDir])
|
|
||||||
|
|
||||||
proc.on('error', callback)
|
|
||||||
|
|
||||||
let stderr = ''
|
|
||||||
proc.stderr.setEncoding('utf8').on('data', chunk => (stderr += chunk))
|
|
||||||
|
|
||||||
proc.on('close', code => {
|
|
||||||
if (code === 0) {
|
|
||||||
callback(null)
|
|
||||||
} else {
|
|
||||||
callback(new Error(`rm -r ${compileDir} failed: ${stderr}`))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function _findAllDirs(callback) {
|
try {
|
||||||
|
await execFile('rm', ['-r', '-f', '--', compileDir])
|
||||||
|
} catch (err) {
|
||||||
|
OError.tag(err, `rm -r failed`, { compileDir, stderr: err.stderr })
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function _findAllDirs() {
|
||||||
const root = Settings.path.compilesDir
|
const root = Settings.path.compilesDir
|
||||||
fs.readdir(root, (err, files) => {
|
const files = await fsPromises.readdir(root)
|
||||||
if (err) {
|
|
||||||
return callback(err)
|
|
||||||
}
|
|
||||||
const allDirs = files.map(file => Path.join(root, file))
|
const allDirs = files.map(file => Path.join(root, file))
|
||||||
callback(null, allDirs)
|
return allDirs
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function clearExpiredProjects(maxCacheAgeMs, callback) {
|
async function clearExpiredProjects(maxCacheAgeMs) {
|
||||||
const now = Date.now()
|
const now = Date.now()
|
||||||
// action for each directory
|
const dirs = await _findAllDirs()
|
||||||
const expireIfNeeded = (checkDir, cb) =>
|
for (const dir of dirs) {
|
||||||
fs.stat(checkDir, (err, stats) => {
|
let stats
|
||||||
if (err) {
|
try {
|
||||||
return cb()
|
stats = await fsPromises.stat(dir)
|
||||||
} // ignore errors checking directory
|
} catch (err) {
|
||||||
|
// ignore errors checking directory
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
const age = now - stats.mtime
|
const age = now - stats.mtime
|
||||||
const hasExpired = age > maxCacheAgeMs
|
const hasExpired = age > maxCacheAgeMs
|
||||||
if (hasExpired) {
|
if (hasExpired) {
|
||||||
fse.remove(checkDir, cb)
|
await fse.remove(dir)
|
||||||
} else {
|
|
||||||
cb()
|
|
||||||
}
|
}
|
||||||
})
|
|
||||||
// iterate over all project directories
|
|
||||||
_findAllDirs((error, allDirs) => {
|
|
||||||
if (error) {
|
|
||||||
return callback()
|
|
||||||
}
|
}
|
||||||
async.eachSeries(allDirs, expireIfNeeded, callback)
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function _checkDirectory(compileDir, callback) {
|
async function _checkDirectory(compileDir) {
|
||||||
fs.lstat(compileDir, (err, stats) => {
|
let stats
|
||||||
if (err && err.code === 'ENOENT') {
|
try {
|
||||||
callback(null, false) // directory does not exist
|
stats = await fsPromises.lstat(compileDir)
|
||||||
} else if (err) {
|
} catch (err) {
|
||||||
logger.err(
|
if (err.code === 'ENOENT') {
|
||||||
{ dir: compileDir, err },
|
// directory does not exist
|
||||||
'error on stat of project directory for removal'
|
return false
|
||||||
)
|
|
||||||
callback(err)
|
|
||||||
} else if (!stats.isDirectory()) {
|
|
||||||
logger.err(
|
|
||||||
{ dir: compileDir, stats },
|
|
||||||
'bad project directory for removal'
|
|
||||||
)
|
|
||||||
callback(new Error('project directory is not directory'))
|
|
||||||
} else {
|
|
||||||
// directory exists
|
|
||||||
callback(null, true)
|
|
||||||
}
|
}
|
||||||
|
OError.tag(err, 'error on stat of project directory for removal', {
|
||||||
|
dir: compileDir,
|
||||||
|
})
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
if (!stats.isDirectory()) {
|
||||||
|
throw new OError('project directory is not directory', {
|
||||||
|
dir: compileDir,
|
||||||
|
stats,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
function syncFromCode(
|
async function syncFromCode(
|
||||||
projectId,
|
projectId,
|
||||||
userId,
|
userId,
|
||||||
filename,
|
filename,
|
||||||
line,
|
line,
|
||||||
column,
|
column,
|
||||||
imageName,
|
imageName
|
||||||
callback
|
|
||||||
) {
|
) {
|
||||||
// If LaTeX was run in a virtual environment, the file path that synctex expects
|
// If LaTeX was run in a virtual environment, the file path that synctex expects
|
||||||
// might not match the file path on the host. The .synctex.gz file however, will be accessed
|
// might not match the file path on the host. The .synctex.gz file however, will be accessed
|
||||||
|
@ -473,19 +408,15 @@ function syncFromCode(
|
||||||
'-o',
|
'-o',
|
||||||
outputFilePath,
|
outputFilePath,
|
||||||
]
|
]
|
||||||
_runSynctex(projectId, userId, command, imageName, (error, stdout) => {
|
const stdout = await _runSynctex(projectId, userId, command, imageName)
|
||||||
if (error) {
|
|
||||||
return callback(error)
|
|
||||||
}
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ projectId, userId, filename, line, column, command, stdout },
|
{ projectId, userId, filename, line, column, command, stdout },
|
||||||
'synctex code output'
|
'synctex code output'
|
||||||
)
|
)
|
||||||
callback(null, SynctexOutputParser.parseViewOutput(stdout))
|
return SynctexOutputParser.parseViewOutput(stdout)
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function syncFromPdf(projectId, userId, page, h, v, imageName, callback) {
|
async function syncFromPdf(projectId, userId, page, h, v, imageName) {
|
||||||
const compileName = getCompileName(projectId, userId)
|
const compileName = getCompileName(projectId, userId)
|
||||||
const baseDir = Settings.path.synctexBaseDir(compileName)
|
const baseDir = Settings.path.synctexBaseDir(compileName)
|
||||||
const outputFilePath = `${baseDir}/output.pdf`
|
const outputFilePath = `${baseDir}/output.pdf`
|
||||||
|
@ -495,76 +426,64 @@ function syncFromPdf(projectId, userId, page, h, v, imageName, callback) {
|
||||||
'-o',
|
'-o',
|
||||||
`${page}:${h}:${v}:${outputFilePath}`,
|
`${page}:${h}:${v}:${outputFilePath}`,
|
||||||
]
|
]
|
||||||
_runSynctex(projectId, userId, command, imageName, (error, stdout) => {
|
const stdout = await _runSynctex(projectId, userId, command, imageName)
|
||||||
if (error != null) {
|
logger.debug({ projectId, userId, page, h, v, stdout }, 'synctex pdf output')
|
||||||
return callback(error)
|
return SynctexOutputParser.parseEditOutput(stdout, baseDir)
|
||||||
}
|
}
|
||||||
logger.debug(
|
|
||||||
{ projectId, userId, page, h, v, stdout },
|
async function _checkFileExists(dir, filename) {
|
||||||
'synctex pdf output'
|
try {
|
||||||
)
|
await fsPromises.stat(dir)
|
||||||
callback(null, SynctexOutputParser.parseEditOutput(stdout, baseDir))
|
} catch (error) {
|
||||||
})
|
if (error.code === 'ENOENT') {
|
||||||
|
throw new Errors.NotFoundError('no output directory')
|
||||||
|
}
|
||||||
|
throw error
|
||||||
}
|
}
|
||||||
|
|
||||||
function _checkFileExists(dir, filename, callback) {
|
|
||||||
const file = Path.join(dir, filename)
|
const file = Path.join(dir, filename)
|
||||||
fs.stat(dir, (error, stats) => {
|
let stats
|
||||||
if (error && error.code === 'ENOENT') {
|
try {
|
||||||
return callback(new Errors.NotFoundError('no output directory'))
|
stats = await fsPromises.stat(file)
|
||||||
|
} catch (error) {
|
||||||
|
if (error.code === 'ENOENT') {
|
||||||
|
throw new Errors.NotFoundError('no output file')
|
||||||
}
|
}
|
||||||
if (error) {
|
|
||||||
return callback(error)
|
|
||||||
}
|
|
||||||
fs.stat(file, (error, stats) => {
|
|
||||||
if (error && error.code === 'ENOENT') {
|
|
||||||
return callback(new Errors.NotFoundError('no output file'))
|
|
||||||
}
|
|
||||||
if (error) {
|
|
||||||
return callback(error)
|
|
||||||
}
|
}
|
||||||
if (!stats.isFile()) {
|
if (!stats.isFile()) {
|
||||||
return callback(new Error('not a file'))
|
throw new Error('not a file')
|
||||||
}
|
}
|
||||||
callback()
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function _runSynctex(projectId, userId, command, imageName, callback) {
|
async function _runSynctex(projectId, userId, command, imageName) {
|
||||||
const directory = getCompileDir(projectId, userId)
|
const directory = getCompileDir(projectId, userId)
|
||||||
const timeout = 60 * 1000 // increased to allow for large projects
|
const timeout = 60 * 1000 // increased to allow for large projects
|
||||||
const compileName = getCompileName(projectId, userId)
|
const compileName = getCompileName(projectId, userId)
|
||||||
const compileGroup = 'synctex'
|
const compileGroup = 'synctex'
|
||||||
const defaultImageName =
|
const defaultImageName =
|
||||||
Settings.clsi && Settings.clsi.docker && Settings.clsi.docker.image
|
Settings.clsi && Settings.clsi.docker && Settings.clsi.docker.image
|
||||||
_checkFileExists(directory, 'output.synctex.gz', error => {
|
await _checkFileExists(directory, 'output.synctex.gz')
|
||||||
if (error) {
|
try {
|
||||||
return callback(error)
|
const output = await CommandRunner.promises.run(
|
||||||
}
|
|
||||||
CommandRunner.run(
|
|
||||||
compileName,
|
compileName,
|
||||||
command,
|
command,
|
||||||
directory,
|
directory,
|
||||||
imageName || defaultImageName,
|
imageName || defaultImageName,
|
||||||
timeout,
|
timeout,
|
||||||
{},
|
{},
|
||||||
compileGroup,
|
compileGroup
|
||||||
(error, output) => {
|
|
||||||
if (error) {
|
|
||||||
logger.err(
|
|
||||||
{ err: error, command, projectId, userId },
|
|
||||||
'error running synctex'
|
|
||||||
)
|
|
||||||
return callback(error)
|
|
||||||
}
|
|
||||||
callback(null, output.stdout)
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
return output.stdout
|
||||||
|
} catch (error) {
|
||||||
|
throw OError.tag(error, 'error running synctex', {
|
||||||
|
command,
|
||||||
|
projectId,
|
||||||
|
userId,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
function wordcount(projectId, userId, filename, image, callback) {
|
async function wordcount(projectId, userId, filename, image) {
|
||||||
logger.debug({ projectId, userId, filename, image }, 'running wordcount')
|
logger.debug({ projectId, userId, filename, image }, 'running wordcount')
|
||||||
const filePath = `$COMPILE_DIR/${filename}`
|
const filePath = `$COMPILE_DIR/${filename}`
|
||||||
const command = [
|
const command = [
|
||||||
|
@ -578,51 +497,45 @@ function wordcount(projectId, userId, filename, image, callback) {
|
||||||
const timeout = 60 * 1000
|
const timeout = 60 * 1000
|
||||||
const compileName = getCompileName(projectId, userId)
|
const compileName = getCompileName(projectId, userId)
|
||||||
const compileGroup = 'wordcount'
|
const compileGroup = 'wordcount'
|
||||||
fse.ensureDir(compileDir, error => {
|
try {
|
||||||
if (error) {
|
await fse.ensureDir(compileDir)
|
||||||
logger.err(
|
} catch (err) {
|
||||||
{ error, projectId, userId, filename },
|
throw OError.tag(err, 'error ensuring dir for wordcount', {
|
||||||
'error ensuring dir for sync from code'
|
projectId,
|
||||||
)
|
userId,
|
||||||
return callback(error)
|
filename,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
CommandRunner.run(
|
await CommandRunner.promises.run(
|
||||||
compileName,
|
compileName,
|
||||||
command,
|
command,
|
||||||
compileDir,
|
compileDir,
|
||||||
image,
|
image,
|
||||||
timeout,
|
timeout,
|
||||||
{},
|
{},
|
||||||
compileGroup,
|
compileGroup
|
||||||
error => {
|
)
|
||||||
if (error) {
|
|
||||||
return callback(error)
|
let stdout
|
||||||
}
|
try {
|
||||||
fs.readFile(
|
stdout = await fsPromises.readFile(
|
||||||
compileDir + '/' + filename + '.wc',
|
compileDir + '/' + filename + '.wc',
|
||||||
'utf-8',
|
'utf-8'
|
||||||
(err, stdout) => {
|
|
||||||
if (err) {
|
|
||||||
// call it node_err so sentry doesn't use random path error as unique id so it can't be ignored
|
|
||||||
logger.err(
|
|
||||||
{ node_err: err, command, compileDir, projectId, userId },
|
|
||||||
'error reading word count output'
|
|
||||||
)
|
|
||||||
return callback(err)
|
|
||||||
}
|
|
||||||
const results = _parseWordcountFromOutput(stdout)
|
|
||||||
logger.debug(
|
|
||||||
{ projectId, userId, wordcount: results },
|
|
||||||
'word count results'
|
|
||||||
)
|
|
||||||
callback(null, results)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
} catch (err) {
|
||||||
|
throw OError.tag(err, 'error reading word count output', {
|
||||||
|
command,
|
||||||
|
compileDir,
|
||||||
|
projectId,
|
||||||
|
userId,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const results = _parseWordcountFromOutput(stdout)
|
||||||
|
logger.debug({ projectId, userId, wordcount: results }, 'word count results')
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
function _parseWordcountFromOutput(output) {
|
function _parseWordcountFromOutput(output) {
|
||||||
const results = {
|
const results = {
|
||||||
encode: '',
|
encode: '',
|
||||||
|
@ -675,6 +588,14 @@ function _parseWordcountFromOutput(output) {
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
|
doCompileWithLock: callbackify(doCompileWithLock),
|
||||||
|
stopCompile: callbackify(stopCompile),
|
||||||
|
clearProject: callbackify(clearProject),
|
||||||
|
clearExpiredProjects: callbackify(clearExpiredProjects),
|
||||||
|
syncFromCode: callbackify(syncFromCode),
|
||||||
|
syncFromPdf: callbackify(syncFromPdf),
|
||||||
|
wordcount: callbackify(wordcount),
|
||||||
|
promises: {
|
||||||
doCompileWithLock,
|
doCompileWithLock,
|
||||||
stopCompile,
|
stopCompile,
|
||||||
clearProject,
|
clearProject,
|
||||||
|
@ -682,4 +603,5 @@ module.exports = {
|
||||||
syncFromCode,
|
syncFromCode,
|
||||||
syncFromPdf,
|
syncFromPdf,
|
||||||
wordcount,
|
wordcount,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
const { promisify } = require('util')
|
||||||
const Settings = require('@overleaf/settings')
|
const Settings = require('@overleaf/settings')
|
||||||
const logger = require('@overleaf/logger')
|
const logger = require('@overleaf/logger')
|
||||||
const Docker = require('dockerode')
|
const Docker = require('dockerode')
|
||||||
|
@ -617,3 +618,7 @@ const DockerRunner = {
|
||||||
DockerRunner.startContainerMonitor()
|
DockerRunner.startContainerMonitor()
|
||||||
|
|
||||||
module.exports = DockerRunner
|
module.exports = DockerRunner
|
||||||
|
module.exports.promises = {
|
||||||
|
run: promisify(DockerRunner.run),
|
||||||
|
kill: promisify(DockerRunner.kill),
|
||||||
|
}
|
||||||
|
|
|
@ -12,6 +12,7 @@
|
||||||
*/
|
*/
|
||||||
let DraftModeManager
|
let DraftModeManager
|
||||||
const fs = require('fs')
|
const fs = require('fs')
|
||||||
|
const { promisify } = require('util')
|
||||||
const logger = require('@overleaf/logger')
|
const logger = require('@overleaf/logger')
|
||||||
|
|
||||||
module.exports = DraftModeManager = {
|
module.exports = DraftModeManager = {
|
||||||
|
@ -54,3 +55,7 @@ module.exports = DraftModeManager = {
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
module.exports.promises = {
|
||||||
|
injectDraftMode: promisify(DraftModeManager.injectDraftMode),
|
||||||
|
}
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
const Path = require('path')
|
const Path = require('path')
|
||||||
|
const { promisify } = require('util')
|
||||||
const Settings = require('@overleaf/settings')
|
const Settings = require('@overleaf/settings')
|
||||||
const logger = require('@overleaf/logger')
|
const logger = require('@overleaf/logger')
|
||||||
const CommandRunner = require('./CommandRunner')
|
const CommandRunner = require('./CommandRunner')
|
||||||
|
@ -192,4 +193,17 @@ function _buildLatexCommand(mainFile, opts = {}) {
|
||||||
module.exports = {
|
module.exports = {
|
||||||
runLatex,
|
runLatex,
|
||||||
killLatex,
|
killLatex,
|
||||||
|
promises: {
|
||||||
|
runLatex: (projectId, options) =>
|
||||||
|
new Promise((resolve, reject) => {
|
||||||
|
runLatex(projectId, options, (err, output, stats, timing) => {
|
||||||
|
if (err) {
|
||||||
|
reject(err)
|
||||||
|
} else {
|
||||||
|
resolve({ output, stats, timing })
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}),
|
||||||
|
killLatex: promisify(killLatex),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
*/
|
*/
|
||||||
let CommandRunner
|
let CommandRunner
|
||||||
const { spawn } = require('child_process')
|
const { spawn } = require('child_process')
|
||||||
|
const { promisify } = require('util')
|
||||||
const _ = require('lodash')
|
const _ = require('lodash')
|
||||||
const logger = require('@overleaf/logger')
|
const logger = require('@overleaf/logger')
|
||||||
|
|
||||||
|
@ -100,3 +101,8 @@ module.exports = CommandRunner = {
|
||||||
return callback()
|
return callback()
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
module.exports.promises = {
|
||||||
|
run: promisify(CommandRunner.run),
|
||||||
|
kill: promisify(CommandRunner.kill),
|
||||||
|
}
|
||||||
|
|
|
@ -1,71 +1,61 @@
|
||||||
/* eslint-disable
|
const { promisify } = require('util')
|
||||||
no-unused-vars,
|
const OError = require('@overleaf/o-error')
|
||||||
*/
|
const Lockfile = require('lockfile')
|
||||||
// TODO: This file was created by bulk-decaffeinate.
|
|
||||||
// Fix any style issues and re-enable lint.
|
|
||||||
/*
|
|
||||||
* decaffeinate suggestions:
|
|
||||||
* DS101: Remove unnecessary use of Array.from
|
|
||||||
* DS102: Remove unnecessary code created because of implicit returns
|
|
||||||
* DS207: Consider shorter variations of null checks
|
|
||||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
|
||||||
*/
|
|
||||||
let LockManager
|
|
||||||
const Settings = require('@overleaf/settings')
|
|
||||||
const logger = require('@overleaf/logger')
|
|
||||||
const Lockfile = require('lockfile') // from https://github.com/npm/lockfile
|
|
||||||
const Errors = require('./Errors')
|
const Errors = require('./Errors')
|
||||||
const fs = require('fs')
|
const fsPromises = require('fs/promises')
|
||||||
const Path = require('path')
|
const Path = require('path')
|
||||||
module.exports = LockManager = {
|
|
||||||
LOCK_TEST_INTERVAL: 1000, // 50ms between each test of the lock
|
|
||||||
MAX_LOCK_WAIT_TIME: 15000, // 10s maximum time to spend trying to get the lock
|
|
||||||
LOCK_STALE: 5 * 60 * 1000, // 5 mins time until lock auto expires
|
|
||||||
|
|
||||||
runWithLock(path, runner, callback) {
|
const LOCK_OPTS = {
|
||||||
if (callback == null) {
|
pollPeriod: 1000, // 1s between each test of the lock
|
||||||
callback = function () {}
|
wait: 15000, // 15s maximum time to spend trying to get the lock
|
||||||
|
stale: 5 * 60 * 1000, // 5 mins time until lock auto expires
|
||||||
}
|
}
|
||||||
const lockOpts = {
|
|
||||||
wait: this.MAX_LOCK_WAIT_TIME,
|
const PromisifiedLockfile = {
|
||||||
pollPeriod: this.LOCK_TEST_INTERVAL,
|
lock: promisify(Lockfile.lock),
|
||||||
stale: this.LOCK_STALE,
|
unlock: promisify(Lockfile.unlock),
|
||||||
}
|
}
|
||||||
return Lockfile.lock(path, lockOpts, function (error) {
|
|
||||||
if ((error != null ? error.code : undefined) === 'EEXIST') {
|
async function acquire(path) {
|
||||||
return callback(new Errors.AlreadyCompilingError('compile in progress'))
|
try {
|
||||||
} else if (error != null) {
|
await PromisifiedLockfile.lock(path, LOCK_OPTS)
|
||||||
return fs.lstat(path, (statLockErr, statLock) =>
|
} catch (err) {
|
||||||
fs.lstat(Path.dirname(path), (statDirErr, statDir) =>
|
if (err.code === 'EEXIST') {
|
||||||
fs.readdir(Path.dirname(path), function (readdirErr, readdirDir) {
|
throw new Errors.AlreadyCompilingError('compile in progress')
|
||||||
logger.err(
|
|
||||||
{
|
|
||||||
error,
|
|
||||||
path,
|
|
||||||
statLock,
|
|
||||||
statLockErr,
|
|
||||||
statDir,
|
|
||||||
statDirErr,
|
|
||||||
readdirErr,
|
|
||||||
readdirDir,
|
|
||||||
},
|
|
||||||
'unable to get lock'
|
|
||||||
)
|
|
||||||
return callback(error)
|
|
||||||
})
|
|
||||||
)
|
|
||||||
)
|
|
||||||
} else {
|
} else {
|
||||||
return runner((error1, ...args) =>
|
const dir = Path.dirname(path)
|
||||||
Lockfile.unlock(path, function (error2) {
|
const [statLock, statDir, readdirDir] = await Promise.allSettled([
|
||||||
error = error1 || error2
|
fsPromises.lstat(path),
|
||||||
if (error != null) {
|
fsPromises.lstat(dir),
|
||||||
return callback(error)
|
fsPromises.readdir(dir),
|
||||||
}
|
])
|
||||||
return callback(null, ...Array.from(args))
|
OError.tag(err, 'unable to get lock', {
|
||||||
|
statLock: unwrapPromiseResult(statLock),
|
||||||
|
statDir: unwrapPromiseResult(statDir),
|
||||||
|
readdirDir: unwrapPromiseResult(readdirDir),
|
||||||
})
|
})
|
||||||
)
|
throw err
|
||||||
}
|
}
|
||||||
})
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
return new Lock(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
class Lock {
|
||||||
|
constructor(path) {
|
||||||
|
this._path = path
|
||||||
|
}
|
||||||
|
|
||||||
|
async release() {
|
||||||
|
await PromisifiedLockfile.unlock(this._path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function unwrapPromiseResult(result) {
|
||||||
|
if (result.status === 'fulfilled') {
|
||||||
|
return result.value
|
||||||
|
} else {
|
||||||
|
return result.reason
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { acquire }
|
||||||
|
|
|
@ -695,6 +695,7 @@ function __guard__(value, transform) {
|
||||||
|
|
||||||
OutputCacheManager.promises = {
|
OutputCacheManager.promises = {
|
||||||
expireOutputFiles: promisify(OutputCacheManager.expireOutputFiles),
|
expireOutputFiles: promisify(OutputCacheManager.expireOutputFiles),
|
||||||
|
saveOutputFiles: promisify(OutputCacheManager.saveOutputFiles),
|
||||||
saveOutputFilesInBuildDir: promisify(
|
saveOutputFilesInBuildDir: promisify(
|
||||||
OutputCacheManager.saveOutputFilesInBuildDir
|
OutputCacheManager.saveOutputFilesInBuildDir
|
||||||
),
|
),
|
||||||
|
|
|
@ -76,3 +76,20 @@ module.exports = OutputFileFinder = {
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
module.exports.promises = {
|
||||||
|
findOutputFiles: (resources, directory) =>
|
||||||
|
new Promise((resolve, reject) => {
|
||||||
|
OutputFileFinder.findOutputFiles(
|
||||||
|
resources,
|
||||||
|
directory,
|
||||||
|
(err, outputFiles, allFiles) => {
|
||||||
|
if (err) {
|
||||||
|
reject(err)
|
||||||
|
} else {
|
||||||
|
resolve({ outputFiles, allFiles })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
*/
|
*/
|
||||||
let ResourceWriter
|
let ResourceWriter
|
||||||
|
const { promisify } = require('util')
|
||||||
const UrlCache = require('./UrlCache')
|
const UrlCache = require('./UrlCache')
|
||||||
const Path = require('path')
|
const Path = require('path')
|
||||||
const fs = require('fs')
|
const fs = require('fs')
|
||||||
|
@ -85,7 +86,10 @@ module.exports = ResourceWriter = {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
this.saveAllResourcesToDisk(request, basePath, function (error) {
|
ResourceWriter.saveAllResourcesToDisk(
|
||||||
|
request,
|
||||||
|
basePath,
|
||||||
|
function (error) {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
|
@ -100,7 +104,8 @@ module.exports = ResourceWriter = {
|
||||||
return callback(null, request.resources)
|
return callback(null, request.resources)
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
})
|
}
|
||||||
|
)
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
|
@ -108,14 +113,19 @@ module.exports = ResourceWriter = {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return this._createDirectory(basePath, error => {
|
return ResourceWriter._createDirectory(basePath, error => {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
const jobs = Array.from(resources).map(resource =>
|
const jobs = Array.from(resources).map(resource =>
|
||||||
(resource => {
|
(resource => {
|
||||||
return callback =>
|
return callback =>
|
||||||
this._writeResourceToDisk(project_id, resource, basePath, callback)
|
ResourceWriter._writeResourceToDisk(
|
||||||
|
project_id,
|
||||||
|
resource,
|
||||||
|
basePath,
|
||||||
|
callback
|
||||||
|
)
|
||||||
})(resource)
|
})(resource)
|
||||||
)
|
)
|
||||||
return async.parallelLimit(jobs, parallelFileDownloads, callback)
|
return async.parallelLimit(jobs, parallelFileDownloads, callback)
|
||||||
|
@ -126,19 +136,23 @@ module.exports = ResourceWriter = {
|
||||||
if (callback == null) {
|
if (callback == null) {
|
||||||
callback = function () {}
|
callback = function () {}
|
||||||
}
|
}
|
||||||
return this._createDirectory(basePath, error => {
|
return ResourceWriter._createDirectory(basePath, error => {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
const { project_id, resources } = request
|
const { project_id, resources } = request
|
||||||
this._removeExtraneousFiles(request, resources, basePath, error => {
|
ResourceWriter._removeExtraneousFiles(
|
||||||
|
request,
|
||||||
|
resources,
|
||||||
|
basePath,
|
||||||
|
error => {
|
||||||
if (error != null) {
|
if (error != null) {
|
||||||
return callback(error)
|
return callback(error)
|
||||||
}
|
}
|
||||||
const jobs = Array.from(resources).map(resource =>
|
const jobs = Array.from(resources).map(resource =>
|
||||||
(resource => {
|
(resource => {
|
||||||
return callback =>
|
return callback =>
|
||||||
this._writeResourceToDisk(
|
ResourceWriter._writeResourceToDisk(
|
||||||
project_id,
|
project_id,
|
||||||
resource,
|
resource,
|
||||||
basePath,
|
basePath,
|
||||||
|
@ -147,7 +161,8 @@ module.exports = ResourceWriter = {
|
||||||
})(resource)
|
})(resource)
|
||||||
)
|
)
|
||||||
return async.parallelLimit(jobs, parallelFileDownloads, callback)
|
return async.parallelLimit(jobs, parallelFileDownloads, callback)
|
||||||
})
|
}
|
||||||
|
)
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
|
@ -356,3 +371,12 @@ module.exports = ResourceWriter = {
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
module.exports.promises = {
|
||||||
|
syncResourcesToDisk: promisify(ResourceWriter.syncResourcesToDisk),
|
||||||
|
saveIncrementalResourcesToDisk: promisify(
|
||||||
|
ResourceWriter.saveIncrementalResourcesToDisk
|
||||||
|
),
|
||||||
|
saveAllResourcesToDisk: promisify(ResourceWriter.saveAllResourcesToDisk),
|
||||||
|
checkPath: promisify(ResourceWriter.checkPath),
|
||||||
|
}
|
||||||
|
|
|
@ -13,6 +13,7 @@
|
||||||
let TikzManager
|
let TikzManager
|
||||||
const fs = require('fs')
|
const fs = require('fs')
|
||||||
const Path = require('path')
|
const Path = require('path')
|
||||||
|
const { promisify } = require('util')
|
||||||
const ResourceWriter = require('./ResourceWriter')
|
const ResourceWriter = require('./ResourceWriter')
|
||||||
const SafeReader = require('./SafeReader')
|
const SafeReader = require('./SafeReader')
|
||||||
const logger = require('@overleaf/logger')
|
const logger = require('@overleaf/logger')
|
||||||
|
@ -101,3 +102,8 @@ module.exports = TikzManager = {
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
module.exports.promises = {
|
||||||
|
checkMainFile: promisify(TikzManager.checkMainFile),
|
||||||
|
injectOutputFile: promisify(TikzManager.injectOutputFile),
|
||||||
|
}
|
||||||
|
|
|
@ -39,6 +39,7 @@
|
||||||
"chai": "^4.3.6",
|
"chai": "^4.3.6",
|
||||||
"chai-as-promised": "^7.1.1",
|
"chai-as-promised": "^7.1.1",
|
||||||
"mocha": "^8.4.0",
|
"mocha": "^8.4.0",
|
||||||
|
"mock-fs": "^5.1.2",
|
||||||
"sandboxed-module": "^2.0.4",
|
"sandboxed-module": "^2.0.4",
|
||||||
"sinon": "~9.0.1",
|
"sinon": "~9.0.1",
|
||||||
"sinon-chai": "^3.7.0",
|
"sinon-chai": "^3.7.0",
|
||||||
|
|
|
@ -1,10 +1,12 @@
|
||||||
const chai = require('chai')
|
const chai = require('chai')
|
||||||
const sinonChai = require('sinon-chai')
|
const sinonChai = require('sinon-chai')
|
||||||
|
const chaiAsPromised = require('chai-as-promised')
|
||||||
const SandboxedModule = require('sandboxed-module')
|
const SandboxedModule = require('sandboxed-module')
|
||||||
|
|
||||||
// Setup chai
|
// Setup chai
|
||||||
chai.should()
|
chai.should()
|
||||||
chai.use(sinonChai)
|
chai.use(sinonChai)
|
||||||
|
chai.use(chaiAsPromised)
|
||||||
|
|
||||||
// Global SandboxedModule settings
|
// Global SandboxedModule settings
|
||||||
SandboxedModule.configure({
|
SandboxedModule.configure({
|
||||||
|
|
|
@ -111,9 +111,11 @@ describe('CompileController', function () {
|
||||||
|
|
||||||
describe('successfully', function () {
|
describe('successfully', function () {
|
||||||
beforeEach(function () {
|
beforeEach(function () {
|
||||||
this.CompileManager.doCompileWithLock = sinon
|
this.CompileManager.doCompileWithLock = sinon.stub().yields(null, {
|
||||||
.stub()
|
outputFiles: this.output_files,
|
||||||
.yields(null, this.output_files, this.stats, this.timings)
|
stats: this.stats,
|
||||||
|
timings: this.timings,
|
||||||
|
})
|
||||||
this.CompileController.compile(this.req, this.res)
|
this.CompileController.compile(this.req, this.res)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -156,9 +158,11 @@ describe('CompileController', function () {
|
||||||
describe('without a outputUrlPrefix', function () {
|
describe('without a outputUrlPrefix', function () {
|
||||||
beforeEach(function () {
|
beforeEach(function () {
|
||||||
this.Settings.apis.clsi.outputUrlPrefix = ''
|
this.Settings.apis.clsi.outputUrlPrefix = ''
|
||||||
this.CompileManager.doCompileWithLock = sinon
|
this.CompileManager.doCompileWithLock = sinon.stub().yields(null, {
|
||||||
.stub()
|
outputFiles: this.output_files,
|
||||||
.yields(null, this.output_files, this.stats, this.timings)
|
stats: this.stats,
|
||||||
|
timings: this.timings,
|
||||||
|
})
|
||||||
this.CompileController.compile(this.req, this.res)
|
this.CompileController.compile(this.req, this.res)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -196,9 +200,11 @@ describe('CompileController', function () {
|
||||||
build: 1234,
|
build: 1234,
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
this.CompileManager.doCompileWithLock = sinon
|
this.CompileManager.doCompileWithLock = sinon.stub().yields(null, {
|
||||||
.stub()
|
outputFiles: this.output_files,
|
||||||
.yields(null, this.output_files, this.stats, this.timings)
|
stats: this.stats,
|
||||||
|
timings: this.timings,
|
||||||
|
})
|
||||||
this.CompileController.compile(this.req, this.res)
|
this.CompileController.compile(this.req, this.res)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -237,9 +243,11 @@ describe('CompileController', function () {
|
||||||
build: 1234,
|
build: 1234,
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
this.CompileManager.doCompileWithLock = sinon
|
this.CompileManager.doCompileWithLock = sinon.stub().yields(null, {
|
||||||
.stub()
|
outputFiles: this.output_files,
|
||||||
.yields(null, this.output_files, this.stats, this.timings)
|
stats: this.stats,
|
||||||
|
timings: this.timings,
|
||||||
|
})
|
||||||
this.CompileController.compile(this.req, this.res)
|
this.CompileController.compile(this.req, this.res)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -1,14 +1,14 @@
|
||||||
const SandboxedModule = require('sandboxed-module')
|
const SandboxedModule = require('sandboxed-module')
|
||||||
|
const { expect } = require('chai')
|
||||||
const sinon = require('sinon')
|
const sinon = require('sinon')
|
||||||
const modulePath = require('path').join(
|
|
||||||
|
const MODULE_PATH = require('path').join(
|
||||||
__dirname,
|
__dirname,
|
||||||
'../../../app/js/CompileManager'
|
'../../../app/js/CompileManager'
|
||||||
)
|
)
|
||||||
const { EventEmitter } = require('events')
|
|
||||||
|
|
||||||
describe('CompileManager', function () {
|
describe('CompileManager', function () {
|
||||||
beforeEach(function () {
|
beforeEach(function () {
|
||||||
this.callback = sinon.stub()
|
|
||||||
this.projectId = 'project-id-123'
|
this.projectId = 'project-id-123'
|
||||||
this.userId = '1234'
|
this.userId = '1234'
|
||||||
this.resources = 'mock-resources'
|
this.resources = 'mock-resources'
|
||||||
|
@ -40,22 +40,25 @@ describe('CompileManager', function () {
|
||||||
this.compileDir = `${this.compileBaseDir}/${this.projectId}-${this.userId}`
|
this.compileDir = `${this.compileBaseDir}/${this.projectId}-${this.userId}`
|
||||||
this.outputDir = `${this.outputBaseDir}/${this.projectId}-${this.userId}`
|
this.outputDir = `${this.outputBaseDir}/${this.projectId}-${this.userId}`
|
||||||
|
|
||||||
this.proc = new EventEmitter()
|
|
||||||
this.proc.stdout = new EventEmitter()
|
|
||||||
this.proc.stderr = new EventEmitter()
|
|
||||||
this.proc.stderr.setEncoding = sinon.stub().returns(this.proc.stderr)
|
|
||||||
|
|
||||||
this.LatexRunner = {
|
this.LatexRunner = {
|
||||||
runLatex: sinon.stub().yields(),
|
promises: {
|
||||||
|
runLatex: sinon.stub().resolves({}),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
this.ResourceWriter = {
|
this.ResourceWriter = {
|
||||||
syncResourcesToDisk: sinon.stub().yields(null, this.resources),
|
promises: {
|
||||||
|
syncResourcesToDisk: sinon.stub().resolves(this.resources),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
this.OutputFileFinder = {
|
this.OutputFileFinder = {
|
||||||
findOutputFiles: sinon.stub().yields(null, this.outputFiles),
|
promises: {
|
||||||
|
findOutputFiles: sinon.stub().resolves(this.outputFiles),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
this.OutputCacheManager = {
|
this.OutputCacheManager = {
|
||||||
saveOutputFiles: sinon.stub().yields(null, this.buildFiles),
|
promises: {
|
||||||
|
saveOutputFiles: sinon.stub().resolves(this.buildFiles),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
this.Settings = {
|
this.Settings = {
|
||||||
path: {
|
path: {
|
||||||
|
@ -74,37 +77,44 @@ describe('CompileManager', function () {
|
||||||
.returns(this.compileDir)
|
.returns(this.compileDir)
|
||||||
this.child_process = {
|
this.child_process = {
|
||||||
exec: sinon.stub(),
|
exec: sinon.stub(),
|
||||||
spawn: sinon.stub().returns(this.proc),
|
execFile: sinon.stub().yields(),
|
||||||
}
|
}
|
||||||
this.CommandRunner = {
|
this.CommandRunner = {
|
||||||
run: sinon.stub().yields(null, { stdout: this.commandOutput }),
|
promises: {
|
||||||
|
run: sinon.stub().resolves({ stdout: this.commandOutput }),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
this.DraftModeManager = {
|
this.DraftModeManager = {
|
||||||
injectDraftMode: sinon.stub().yields(),
|
promises: {
|
||||||
|
injectDraftMode: sinon.stub().resolves(),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
this.TikzManager = {
|
this.TikzManager = {
|
||||||
checkMainFile: sinon.stub().yields(null, false),
|
promises: {
|
||||||
|
checkMainFile: sinon.stub().resolves(false),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
this.lock = {
|
||||||
|
release: sinon.stub().resolves(),
|
||||||
}
|
}
|
||||||
this.LockManager = {
|
this.LockManager = {
|
||||||
runWithLock: sinon.stub().callsFake((lockFile, runner, callback) => {
|
acquire: sinon.stub().resolves(this.lock),
|
||||||
runner((err, ...result) => callback(err, ...result))
|
|
||||||
}),
|
|
||||||
}
|
}
|
||||||
this.SynctexOutputParser = {
|
this.SynctexOutputParser = {
|
||||||
parseViewOutput: sinon.stub(),
|
parseViewOutput: sinon.stub(),
|
||||||
parseEditOutput: sinon.stub(),
|
parseEditOutput: sinon.stub(),
|
||||||
}
|
}
|
||||||
|
|
||||||
this.fs = {
|
this.fsPromises = {
|
||||||
lstat: sinon.stub(),
|
lstat: sinon.stub(),
|
||||||
stat: sinon.stub(),
|
stat: sinon.stub(),
|
||||||
readFile: sinon.stub(),
|
readFile: sinon.stub(),
|
||||||
}
|
}
|
||||||
this.fse = {
|
this.fse = {
|
||||||
ensureDir: sinon.stub().yields(),
|
ensureDir: sinon.stub().resolves(),
|
||||||
}
|
}
|
||||||
|
|
||||||
this.CompileManager = SandboxedModule.require(modulePath, {
|
this.CompileManager = SandboxedModule.require(MODULE_PATH, {
|
||||||
requires: {
|
requires: {
|
||||||
'./LatexRunner': this.LatexRunner,
|
'./LatexRunner': this.LatexRunner,
|
||||||
'./ResourceWriter': this.ResourceWriter,
|
'./ResourceWriter': this.ResourceWriter,
|
||||||
|
@ -117,7 +127,7 @@ describe('CompileManager', function () {
|
||||||
'./TikzManager': this.TikzManager,
|
'./TikzManager': this.TikzManager,
|
||||||
'./LockManager': this.LockManager,
|
'./LockManager': this.LockManager,
|
||||||
'./SynctexOutputParser': this.SynctexOutputParser,
|
'./SynctexOutputParser': this.SynctexOutputParser,
|
||||||
fs: this.fs,
|
'fs/promises': this.fsPromises,
|
||||||
'fs-extra': this.fse,
|
'fs-extra': this.fse,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
@ -141,45 +151,44 @@ describe('CompileManager', function () {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('when the project is locked', function () {
|
describe('when the project is locked', function () {
|
||||||
beforeEach(function () {
|
beforeEach(async function () {
|
||||||
this.error = new Error('locked')
|
const error = new Error('locked')
|
||||||
this.LockManager.runWithLock.callsFake((lockFile, runner, callback) => {
|
this.LockManager.acquire.rejects(error)
|
||||||
callback(this.error)
|
await expect(
|
||||||
})
|
this.CompileManager.promises.doCompileWithLock(this.request)
|
||||||
this.CompileManager.doCompileWithLock(this.request, this.callback)
|
).to.be.rejectedWith(error)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should ensure that the compile directory exists', function () {
|
it('should ensure that the compile directory exists', function () {
|
||||||
this.fse.ensureDir.calledWith(this.compileDir).should.equal(true)
|
expect(this.fse.ensureDir).to.have.been.calledWith(this.compileDir)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should not run LaTeX', function () {
|
it('should not run LaTeX', function () {
|
||||||
this.LatexRunner.runLatex.called.should.equal(false)
|
expect(this.LatexRunner.promises.runLatex).not.to.have.been.called
|
||||||
})
|
|
||||||
|
|
||||||
it('should call the callback with the error', function () {
|
|
||||||
this.callback.calledWithExactly(this.error).should.equal(true)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('normally', function () {
|
describe('normally', function () {
|
||||||
beforeEach(function () {
|
beforeEach(async function () {
|
||||||
this.CompileManager.doCompileWithLock(this.request, this.callback)
|
this.result = await this.CompileManager.promises.doCompileWithLock(
|
||||||
|
this.request
|
||||||
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should ensure that the compile directory exists', function () {
|
it('should ensure that the compile directory exists', function () {
|
||||||
this.fse.ensureDir.calledWith(this.compileDir).should.equal(true)
|
expect(this.fse.ensureDir).to.have.been.calledWith(this.compileDir)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should write the resources to disk', function () {
|
it('should write the resources to disk', function () {
|
||||||
this.ResourceWriter.syncResourcesToDisk
|
expect(
|
||||||
.calledWith(this.request, this.compileDir)
|
this.ResourceWriter.promises.syncResourcesToDisk
|
||||||
.should.equal(true)
|
).to.have.been.calledWith(this.request, this.compileDir)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should run LaTeX', function () {
|
it('should run LaTeX', function () {
|
||||||
this.LatexRunner.runLatex
|
expect(this.LatexRunner.promises.runLatex).to.have.been.calledWith(
|
||||||
.calledWith(`${this.projectId}-${this.userId}`, {
|
`${this.projectId}-${this.userId}`,
|
||||||
|
{
|
||||||
directory: this.compileDir,
|
directory: this.compileDir,
|
||||||
mainFile: this.rootResourcePath,
|
mainFile: this.rootResourcePath,
|
||||||
compiler: this.compiler,
|
compiler: this.compiler,
|
||||||
|
@ -189,47 +198,49 @@ describe('CompileManager', function () {
|
||||||
environment: this.env,
|
environment: this.env,
|
||||||
compileGroup: this.compileGroup,
|
compileGroup: this.compileGroup,
|
||||||
stopOnFirstError: this.request.stopOnFirstError,
|
stopOnFirstError: this.request.stopOnFirstError,
|
||||||
})
|
}
|
||||||
.should.equal(true)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should find the output files', function () {
|
it('should find the output files', function () {
|
||||||
this.OutputFileFinder.findOutputFiles
|
expect(
|
||||||
.calledWith(this.resources, this.compileDir)
|
this.OutputFileFinder.promises.findOutputFiles
|
||||||
.should.equal(true)
|
).to.have.been.calledWith(this.resources, this.compileDir)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should return the output files', function () {
|
it('should return the output files', function () {
|
||||||
this.callback.calledWith(null, this.buildFiles).should.equal(true)
|
expect(this.result.outputFiles).to.equal(this.buildFiles)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should not inject draft mode by default', function () {
|
it('should not inject draft mode by default', function () {
|
||||||
this.DraftModeManager.injectDraftMode.called.should.equal(false)
|
expect(this.DraftModeManager.promises.injectDraftMode).not.to.have.been
|
||||||
|
.called
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('with draft mode', function () {
|
describe('with draft mode', function () {
|
||||||
beforeEach(function () {
|
beforeEach(async function () {
|
||||||
this.request.draft = true
|
this.request.draft = true
|
||||||
this.CompileManager.doCompileWithLock(this.request, this.callback)
|
await this.CompileManager.promises.doCompileWithLock(this.request)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should inject the draft mode header', function () {
|
it('should inject the draft mode header', function () {
|
||||||
this.DraftModeManager.injectDraftMode
|
expect(
|
||||||
.calledWith(this.compileDir + '/' + this.rootResourcePath)
|
this.DraftModeManager.promises.injectDraftMode
|
||||||
.should.equal(true)
|
).to.have.been.calledWith(this.compileDir + '/' + this.rootResourcePath)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('with a check option', function () {
|
describe('with a check option', function () {
|
||||||
beforeEach(function () {
|
beforeEach(async function () {
|
||||||
this.request.check = 'error'
|
this.request.check = 'error'
|
||||||
this.CompileManager.doCompileWithLock(this.request, this.callback)
|
await this.CompileManager.promises.doCompileWithLock(this.request)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should run chktex', function () {
|
it('should run chktex', function () {
|
||||||
this.LatexRunner.runLatex
|
expect(this.LatexRunner.promises.runLatex).to.have.been.calledWith(
|
||||||
.calledWith(`${this.projectId}-${this.userId}`, {
|
`${this.projectId}-${this.userId}`,
|
||||||
|
{
|
||||||
directory: this.compileDir,
|
directory: this.compileDir,
|
||||||
mainFile: this.rootResourcePath,
|
mainFile: this.rootResourcePath,
|
||||||
compiler: this.compiler,
|
compiler: this.compiler,
|
||||||
|
@ -243,21 +254,22 @@ describe('CompileManager', function () {
|
||||||
},
|
},
|
||||||
compileGroup: this.compileGroup,
|
compileGroup: this.compileGroup,
|
||||||
stopOnFirstError: this.request.stopOnFirstError,
|
stopOnFirstError: this.request.stopOnFirstError,
|
||||||
})
|
}
|
||||||
.should.equal(true)
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('with a knitr file and check options', function () {
|
describe('with a knitr file and check options', function () {
|
||||||
beforeEach(function () {
|
beforeEach(async function () {
|
||||||
this.request.rootResourcePath = 'main.Rtex'
|
this.request.rootResourcePath = 'main.Rtex'
|
||||||
this.request.check = 'error'
|
this.request.check = 'error'
|
||||||
this.CompileManager.doCompileWithLock(this.request, this.callback)
|
await this.CompileManager.promises.doCompileWithLock(this.request)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should not run chktex', function () {
|
it('should not run chktex', function () {
|
||||||
this.LatexRunner.runLatex
|
expect(this.LatexRunner.promises.runLatex).to.have.been.calledWith(
|
||||||
.calledWith(`${this.projectId}-${this.userId}`, {
|
`${this.projectId}-${this.userId}`,
|
||||||
|
{
|
||||||
directory: this.compileDir,
|
directory: this.compileDir,
|
||||||
mainFile: 'main.Rtex',
|
mainFile: 'main.Rtex',
|
||||||
compiler: this.compiler,
|
compiler: this.compiler,
|
||||||
|
@ -267,69 +279,58 @@ describe('CompileManager', function () {
|
||||||
environment: this.env,
|
environment: this.env,
|
||||||
compileGroup: this.compileGroup,
|
compileGroup: this.compileGroup,
|
||||||
stopOnFirstError: this.request.stopOnFirstError,
|
stopOnFirstError: this.request.stopOnFirstError,
|
||||||
})
|
}
|
||||||
.should.equal(true)
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('clearProject', function () {
|
describe('clearProject', function () {
|
||||||
describe('succesfully', function () {
|
describe('succesfully', function () {
|
||||||
beforeEach(function () {
|
beforeEach(async function () {
|
||||||
this.Settings.compileDir = 'compiles'
|
this.Settings.compileDir = 'compiles'
|
||||||
this.fs.lstat.yields(null, {
|
this.fsPromises.lstat.resolves({
|
||||||
isDirectory() {
|
isDirectory() {
|
||||||
return true
|
return true
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
this.CompileManager.clearProject(
|
await this.CompileManager.promises.clearProject(
|
||||||
this.projectId,
|
this.projectId,
|
||||||
this.userId,
|
this.userId
|
||||||
this.callback
|
|
||||||
)
|
)
|
||||||
this.proc.emit('close', 0)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should remove the project directory', function () {
|
it('should remove the project directory', function () {
|
||||||
this.child_process.spawn
|
expect(this.child_process.execFile).to.have.been.calledWith('rm', [
|
||||||
.calledWith('rm', ['-r', '-f', '--', this.compileDir])
|
'-r',
|
||||||
.should.equal(true)
|
'-f',
|
||||||
})
|
'--',
|
||||||
|
this.compileDir,
|
||||||
it('should call the callback', function () {
|
])
|
||||||
this.callback.called.should.equal(true)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('with a non-success status code', function () {
|
describe('with a non-success status code', function () {
|
||||||
beforeEach(function () {
|
beforeEach(async function () {
|
||||||
this.Settings.compileDir = 'compiles'
|
this.Settings.compileDir = 'compiles'
|
||||||
this.fs.lstat.yields(null, {
|
this.fsPromises.lstat.resolves({
|
||||||
isDirectory() {
|
isDirectory() {
|
||||||
return true
|
return true
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
this.CompileManager.clearProject(
|
this.child_process.execFile.yields(new Error('oops'))
|
||||||
this.projectId,
|
await expect(
|
||||||
this.userId,
|
this.CompileManager.promises.clearProject(this.projectId, this.userId)
|
||||||
this.callback
|
).to.be.rejected
|
||||||
)
|
|
||||||
this.proc.stderr.emit('data', (this.error = 'oops'))
|
|
||||||
this.proc.emit('close', 1)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should remove the project directory', function () {
|
it('should remove the project directory', function () {
|
||||||
this.child_process.spawn
|
expect(this.child_process.execFile).to.have.been.calledWith('rm', [
|
||||||
.calledWith('rm', ['-r', '-f', '--', this.compileDir])
|
'-r',
|
||||||
.should.equal(true)
|
'-f',
|
||||||
})
|
'--',
|
||||||
|
this.compileDir,
|
||||||
it('should call the callback with an error from the stderr', function () {
|
])
|
||||||
this.callback.calledWithExactly(sinon.match(Error)).should.equal(true)
|
|
||||||
|
|
||||||
this.callback.args[0][0].message.should.equal(
|
|
||||||
`rm -r ${this.compileDir} failed: ${this.error}`
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -348,7 +349,7 @@ describe('CompileManager', function () {
|
||||||
|
|
||||||
describe('syncFromCode', function () {
|
describe('syncFromCode', function () {
|
||||||
beforeEach(function () {
|
beforeEach(function () {
|
||||||
this.fs.stat.yields(null, {
|
this.fsPromises.stat.resolves({
|
||||||
isFile() {
|
isFile() {
|
||||||
return true
|
return true
|
||||||
},
|
},
|
||||||
|
@ -357,21 +358,24 @@ describe('CompileManager', function () {
|
||||||
this.SynctexOutputParser.parseViewOutput
|
this.SynctexOutputParser.parseViewOutput
|
||||||
.withArgs(this.commandOutput)
|
.withArgs(this.commandOutput)
|
||||||
.returns(this.records)
|
.returns(this.records)
|
||||||
this.CompileManager.syncFromCode(
|
})
|
||||||
|
|
||||||
|
describe('normal case', function () {
|
||||||
|
beforeEach(async function () {
|
||||||
|
this.result = await this.CompileManager.promises.syncFromCode(
|
||||||
this.projectId,
|
this.projectId,
|
||||||
this.userId,
|
this.userId,
|
||||||
this.filename,
|
this.filename,
|
||||||
this.line,
|
this.line,
|
||||||
this.column,
|
this.column,
|
||||||
'',
|
''
|
||||||
this.callback
|
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should execute the synctex binary', function () {
|
it('should execute the synctex binary', function () {
|
||||||
const outputFilePath = `${this.compileDir}/output.pdf`
|
const outputFilePath = `${this.compileDir}/output.pdf`
|
||||||
const inputFilePath = `${this.compileDir}/${this.filename}`
|
const inputFilePath = `${this.compileDir}/${this.filename}`
|
||||||
this.CommandRunner.run.should.have.been.calledWith(
|
expect(this.CommandRunner.promises.run).to.have.been.calledWith(
|
||||||
`${this.projectId}-${this.userId}`,
|
`${this.projectId}-${this.userId}`,
|
||||||
[
|
[
|
||||||
'synctex',
|
'synctex',
|
||||||
|
@ -388,32 +392,28 @@ describe('CompileManager', function () {
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should call the callback with the parsed output', function () {
|
it('should return the parsed output', function () {
|
||||||
this.callback.should.have.been.calledWith(
|
expect(this.result).to.deep.equal(this.records)
|
||||||
null,
|
})
|
||||||
sinon.match.array.deepEquals(this.records)
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('with a custom imageName', function () {
|
describe('with a custom imageName', function () {
|
||||||
const customImageName = 'foo/bar:tag-0'
|
const customImageName = 'foo/bar:tag-0'
|
||||||
beforeEach(function () {
|
beforeEach(async function () {
|
||||||
this.CommandRunner.run.reset()
|
await this.CompileManager.promises.syncFromCode(
|
||||||
this.CompileManager.syncFromCode(
|
|
||||||
this.projectId,
|
this.projectId,
|
||||||
this.userId,
|
this.userId,
|
||||||
this.filename,
|
this.filename,
|
||||||
this.line,
|
this.line,
|
||||||
this.column,
|
this.column,
|
||||||
customImageName,
|
customImageName
|
||||||
this.callback
|
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should execute the synctex binary in a custom docker image', function () {
|
it('should execute the synctex binary in a custom docker image', function () {
|
||||||
const outputFilePath = `${this.compileDir}/output.pdf`
|
const outputFilePath = `${this.compileDir}/output.pdf`
|
||||||
const inputFilePath = `${this.compileDir}/${this.filename}`
|
const inputFilePath = `${this.compileDir}/${this.filename}`
|
||||||
this.CommandRunner.run.should.have.been.calledWith(
|
expect(this.CommandRunner.promises.run).to.have.been.calledWith(
|
||||||
`${this.projectId}-${this.userId}`,
|
`${this.projectId}-${this.userId}`,
|
||||||
[
|
[
|
||||||
'synctex',
|
'synctex',
|
||||||
|
@ -434,7 +434,7 @@ describe('CompileManager', function () {
|
||||||
|
|
||||||
describe('syncFromPdf', function () {
|
describe('syncFromPdf', function () {
|
||||||
beforeEach(function () {
|
beforeEach(function () {
|
||||||
this.fs.stat.yields(null, {
|
this.fsPromises.stat.resolves({
|
||||||
isFile() {
|
isFile() {
|
||||||
return true
|
return true
|
||||||
},
|
},
|
||||||
|
@ -443,20 +443,23 @@ describe('CompileManager', function () {
|
||||||
this.SynctexOutputParser.parseEditOutput
|
this.SynctexOutputParser.parseEditOutput
|
||||||
.withArgs(this.commandOutput, this.compileDir)
|
.withArgs(this.commandOutput, this.compileDir)
|
||||||
.returns(this.records)
|
.returns(this.records)
|
||||||
this.CompileManager.syncFromPdf(
|
})
|
||||||
|
|
||||||
|
describe('normal case', function () {
|
||||||
|
beforeEach(async function () {
|
||||||
|
this.result = await this.CompileManager.promises.syncFromPdf(
|
||||||
this.projectId,
|
this.projectId,
|
||||||
this.userId,
|
this.userId,
|
||||||
this.page,
|
this.page,
|
||||||
this.h,
|
this.h,
|
||||||
this.v,
|
this.v,
|
||||||
'',
|
''
|
||||||
this.callback
|
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should execute the synctex binary', function () {
|
it('should execute the synctex binary', function () {
|
||||||
const outputFilePath = `${this.compileDir}/output.pdf`
|
const outputFilePath = `${this.compileDir}/output.pdf`
|
||||||
this.CommandRunner.run.should.have.been.calledWith(
|
expect(this.CommandRunner.promises.run).to.have.been.calledWith(
|
||||||
`${this.projectId}-${this.userId}`,
|
`${this.projectId}-${this.userId}`,
|
||||||
[
|
[
|
||||||
'synctex',
|
'synctex',
|
||||||
|
@ -471,32 +474,27 @@ describe('CompileManager', function () {
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should call the callback with the parsed output', function () {
|
it('should return the parsed output', function () {
|
||||||
this.callback.should.have.been.calledWith(
|
expect(this.result).to.deep.equal(this.records)
|
||||||
null,
|
})
|
||||||
sinon.match.array.deepEquals(this.records)
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('with a custom imageName', function () {
|
describe('with a custom imageName', function () {
|
||||||
const customImageName = 'foo/bar:tag-1'
|
const customImageName = 'foo/bar:tag-1'
|
||||||
beforeEach(function () {
|
beforeEach(async function () {
|
||||||
this.CommandRunner.run.reset()
|
await this.CompileManager.promises.syncFromPdf(
|
||||||
this.CompileManager.syncFromPdf(
|
|
||||||
this.projectId,
|
this.projectId,
|
||||||
this.userId,
|
this.userId,
|
||||||
this.page,
|
this.page,
|
||||||
this.h,
|
this.h,
|
||||||
this.v,
|
this.v,
|
||||||
customImageName,
|
customImageName
|
||||||
this.callback
|
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should execute the synctex binary in a custom docker image', function () {
|
it('should execute the synctex binary in a custom docker image', function () {
|
||||||
const outputFilePath = `${this.compileDir}/output.pdf`
|
const outputFilePath = `${this.compileDir}/output.pdf`
|
||||||
this.CommandRunner.run
|
expect(this.CommandRunner.promises.run).to.have.been.calledWith(
|
||||||
.calledWith(
|
|
||||||
`${this.projectId}-${this.userId}`,
|
`${this.projectId}-${this.userId}`,
|
||||||
[
|
[
|
||||||
'synctex',
|
'synctex',
|
||||||
|
@ -509,27 +507,25 @@ describe('CompileManager', function () {
|
||||||
60000,
|
60000,
|
||||||
{}
|
{}
|
||||||
)
|
)
|
||||||
.should.equal(true)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('wordcount', function () {
|
describe('wordcount', function () {
|
||||||
beforeEach(function () {
|
beforeEach(async function () {
|
||||||
this.stdout = 'Encoding: ascii\nWords in text: 2'
|
this.stdout = 'Encoding: ascii\nWords in text: 2'
|
||||||
this.fs.readFile.yields(null, this.stdout)
|
this.fsPromises.readFile.resolves(this.stdout)
|
||||||
|
|
||||||
this.timeout = 60 * 1000
|
this.timeout = 60 * 1000
|
||||||
this.filename = 'main.tex'
|
this.filename = 'main.tex'
|
||||||
this.image = 'example.com/image'
|
this.image = 'example.com/image'
|
||||||
|
|
||||||
this.CompileManager.wordcount(
|
this.result = await this.CompileManager.promises.wordcount(
|
||||||
this.projectId,
|
this.projectId,
|
||||||
this.userId,
|
this.userId,
|
||||||
this.filename,
|
this.filename,
|
||||||
this.image,
|
this.image
|
||||||
this.callback
|
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -543,8 +539,7 @@ describe('CompileManager', function () {
|
||||||
`-out=${this.filePath}.wc`,
|
`-out=${this.filePath}.wc`,
|
||||||
]
|
]
|
||||||
|
|
||||||
this.CommandRunner.run
|
expect(this.CommandRunner.promises.run).to.have.been.calledWith(
|
||||||
.calledWith(
|
|
||||||
`${this.projectId}-${this.userId}`,
|
`${this.projectId}-${this.userId}`,
|
||||||
this.command,
|
this.command,
|
||||||
this.compileDir,
|
this.compileDir,
|
||||||
|
@ -552,12 +547,10 @@ describe('CompileManager', function () {
|
||||||
this.timeout,
|
this.timeout,
|
||||||
{}
|
{}
|
||||||
)
|
)
|
||||||
.should.equal(true)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should call the callback with the parsed output', function () {
|
it('should return the parsed output', function () {
|
||||||
this.callback
|
expect(this.result).to.deep.equal({
|
||||||
.calledWith(null, {
|
|
||||||
encode: 'ascii',
|
encode: 'ascii',
|
||||||
textWords: 2,
|
textWords: 2,
|
||||||
headWords: 0,
|
headWords: 0,
|
||||||
|
@ -569,7 +562,6 @@ describe('CompileManager', function () {
|
||||||
errors: 0,
|
errors: 0,
|
||||||
messages: '',
|
messages: '',
|
||||||
})
|
})
|
||||||
.should.equal(true)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,88 +1,71 @@
|
||||||
/* eslint-disable
|
const { expect } = require('chai')
|
||||||
no-return-assign,
|
|
||||||
no-unused-vars,
|
|
||||||
*/
|
|
||||||
// TODO: This file was created by bulk-decaffeinate.
|
|
||||||
// Fix any style issues and re-enable lint.
|
|
||||||
/*
|
|
||||||
* decaffeinate suggestions:
|
|
||||||
* DS102: Remove unnecessary code created because of implicit returns
|
|
||||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
|
||||||
*/
|
|
||||||
const SandboxedModule = require('sandboxed-module')
|
|
||||||
const sinon = require('sinon')
|
const sinon = require('sinon')
|
||||||
const modulePath = require('path').join(
|
const mockFs = require('mock-fs')
|
||||||
__dirname,
|
const OError = require('@overleaf/o-error')
|
||||||
'../../../app/js/LockManager'
|
const LockManager = require('../../../app/js/LockManager')
|
||||||
)
|
|
||||||
const Path = require('path')
|
|
||||||
const Errors = require('../../../app/js/Errors')
|
const Errors = require('../../../app/js/Errors')
|
||||||
|
|
||||||
describe('DockerLockManager', function () {
|
describe('LockManager', function () {
|
||||||
beforeEach(function () {
|
beforeEach(function () {
|
||||||
this.LockManager = SandboxedModule.require(modulePath, {
|
this.lockFile = '/local/compile/directory/.project-lock'
|
||||||
requires: {
|
mockFs({
|
||||||
'@overleaf/settings': {},
|
'/local/compile/directory': {},
|
||||||
fs: {
|
|
||||||
lstat: sinon.stub().callsArgWith(1),
|
|
||||||
readdir: sinon.stub().callsArgWith(1),
|
|
||||||
},
|
|
||||||
lockfile: (this.Lockfile = {}),
|
|
||||||
},
|
|
||||||
})
|
})
|
||||||
return (this.lockFile = '/local/compile/directory/.project-lock')
|
this.clock = sinon.useFakeTimers()
|
||||||
})
|
})
|
||||||
|
|
||||||
return describe('runWithLock', function () {
|
afterEach(function () {
|
||||||
beforeEach(function () {
|
mockFs.restore()
|
||||||
this.runner = sinon.stub().callsArgWith(0, null, 'foo', 'bar')
|
this.clock.restore()
|
||||||
return (this.callback = sinon.stub())
|
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('normally', function () {
|
describe('when the lock is available', function () {
|
||||||
beforeEach(function () {
|
it('the lock can be acquired', async function () {
|
||||||
this.Lockfile.lock = sinon.stub().callsArgWith(2, null)
|
await LockManager.acquire(this.lockFile)
|
||||||
this.Lockfile.unlock = sinon.stub().callsArgWith(1, null)
|
|
||||||
return this.LockManager.runWithLock(
|
|
||||||
this.lockFile,
|
|
||||||
this.runner,
|
|
||||||
this.callback
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should run the compile', function () {
|
it('acquiring a lock in a nonexistent directory throws an error with debug info', async function () {
|
||||||
return this.runner.calledWith().should.equal(true)
|
const err = await expect(
|
||||||
})
|
LockManager.acquire('/invalid/path/.project-lock')
|
||||||
|
).to.be.rejected
|
||||||
return it('should call the callback with the response from the compile', function () {
|
const info = OError.getFullInfo(err)
|
||||||
return this.callback
|
expect(info).to.have.keys(['statLock', 'statDir', 'readdirDir'])
|
||||||
.calledWithExactly(null, 'foo', 'bar')
|
expect(info.statLock.code).to.equal('ENOENT')
|
||||||
.should.equal(true)
|
expect(info.statDir.code).to.equal('ENOENT')
|
||||||
|
expect(info.readdirDir.code).to.equal('ENOENT')
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
return describe('when the project is locked', function () {
|
describe('after the lock is acquired', function () {
|
||||||
beforeEach(function () {
|
beforeEach(async function () {
|
||||||
this.error = new Error()
|
this.lock = await LockManager.acquire(this.lockFile)
|
||||||
this.error.code = 'EEXIST'
|
|
||||||
this.Lockfile.lock = sinon.stub().callsArgWith(2, this.error)
|
|
||||||
this.Lockfile.unlock = sinon.stub().callsArgWith(1, null)
|
|
||||||
return this.LockManager.runWithLock(
|
|
||||||
this.lockFile,
|
|
||||||
this.runner,
|
|
||||||
this.callback
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should not run the compile', function () {
|
it("the lock can't be acquired again", function (done) {
|
||||||
return this.runner.called.should.equal(false)
|
const promise = LockManager.acquire(this.lockFile)
|
||||||
|
// runAllAsync() will advance through time until there are no pending
|
||||||
|
// timers or promises. It interferes with Mocha's promise interface, so
|
||||||
|
// we use Mocha's callback interface for this test.
|
||||||
|
this.clock.runAllAsync()
|
||||||
|
expect(promise)
|
||||||
|
.to.be.rejectedWith(Errors.AlreadyCompilingError)
|
||||||
|
.then(() => {
|
||||||
|
done()
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
done(err)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should return an error', function () {
|
it('the lock can be acquired again after an expiry period', async function () {
|
||||||
this.callback
|
// The expiry time is 5 minutes. Let's wait 10 minutes.
|
||||||
.calledWithExactly(sinon.match(Errors.AlreadyCompilingError))
|
this.clock.tick(10 * 60 * 1000)
|
||||||
.should.equal(true)
|
await LockManager.acquire(this.lockFile)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('the lock can be acquired again after it was released', async function () {
|
||||||
|
this.lock.release()
|
||||||
|
await LockManager.acquire(this.lockFile)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
Loading…
Reference in a new issue