mirror of
https://github.com/overleaf/overleaf.git
synced 2025-04-21 18:38:17 +00:00
[clsi] avoid downloads from compile directory (#23975)
* [clsi] make error copying output files a fatal compile error Co-authored-by: Rebeka <rebeka.dekany@overleaf.com> * [clsi] remove unused endpoints for downloading files from compileDir Co-authored-by: Rebeka <rebeka.dekany@overleaf.com> * [clsi] avoid useless clone of output files * [clsi] add test for output files when compile did not produce a PDF --------- Co-authored-by: Rebeka <rebeka.dekany@overleaf.com> GitOrigin-RevId: cb998b99b4d96cb48ddd70987958f614ad3b40fc
This commit is contained in:
parent
0a270e0870
commit
d8b0ab9436
5 changed files with 41 additions and 69 deletions
|
@ -128,26 +128,6 @@ const ForbidSymlinks = require('./app/js/StaticServerForbidSymlinks')
|
|||
// create a static server which does not allow access to any symlinks
|
||||
// avoids possible mismatch of root directory between middleware check
|
||||
// and serving the files
|
||||
const staticCompileServer = ForbidSymlinks(
|
||||
express.static,
|
||||
Settings.path.compilesDir,
|
||||
{
|
||||
setHeaders(res, path, stat) {
|
||||
if (Path.basename(path) === 'output.pdf') {
|
||||
// Calculate an etag in the same way as nginx
|
||||
// https://github.com/tj/send/issues/65
|
||||
const etag = (path, stat) =>
|
||||
`"${Math.ceil(+stat.mtime / 1000).toString(16)}` +
|
||||
'-' +
|
||||
Number(stat.size).toString(16) +
|
||||
'"'
|
||||
res.set('Etag', etag(path, stat))
|
||||
}
|
||||
res.set('Content-Type', ContentTypeMapper.map(path))
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
const staticOutputServer = ForbidSymlinks(
|
||||
express.static,
|
||||
Settings.path.outputDir,
|
||||
|
@ -213,32 +193,6 @@ app.get(
|
|||
}
|
||||
)
|
||||
|
||||
app.get(
|
||||
'/project/:project_id/user/:user_id/output/*',
|
||||
function (req, res, next) {
|
||||
// for specific user get the path to the top level file
|
||||
logger.warn(
|
||||
{ url: req.url },
|
||||
'direct request for file in compile directory'
|
||||
)
|
||||
req.url = `/${req.params.project_id}-${req.params.user_id}/${req.params[0]}`
|
||||
staticCompileServer(req, res, next)
|
||||
}
|
||||
)
|
||||
|
||||
app.get('/project/:project_id/output/*', function (req, res, next) {
|
||||
logger.warn({ url: req.url }, 'direct request for file in compile directory')
|
||||
if (req.query?.build?.match(OutputCacheManager.BUILD_REGEX)) {
|
||||
// for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
|
||||
req.url =
|
||||
`/${req.params.project_id}/` +
|
||||
OutputCacheManager.path(req.query.build, `/${req.params[0]}`)
|
||||
} else {
|
||||
req.url = `/${req.params.project_id}/${req.params[0]}`
|
||||
}
|
||||
staticCompileServer(req, res, next)
|
||||
})
|
||||
|
||||
app.get('/oops', function (req, res, next) {
|
||||
logger.error({ err: 'hello' }, 'test error')
|
||||
res.send('error\n')
|
||||
|
|
|
@ -117,8 +117,7 @@ function compile(req, res, next) {
|
|||
url:
|
||||
`${Settings.apis.clsi.url}/project/${request.project_id}` +
|
||||
(request.user_id != null ? `/user/${request.user_id}` : '') +
|
||||
(file.build != null ? `/build/${file.build}` : '') +
|
||||
`/output/${file.path}`,
|
||||
`/build/${file.build}/output/${file.path}`,
|
||||
...file,
|
||||
})),
|
||||
},
|
||||
|
|
|
@ -313,24 +313,16 @@ async function _saveOutputFiles({
|
|||
)
|
||||
const outputDir = getOutputDir(request.project_id, request.user_id)
|
||||
|
||||
let { outputFiles, allEntries } =
|
||||
const { outputFiles: rawOutputFiles, allEntries } =
|
||||
await OutputFileFinder.promises.findOutputFiles(resourceList, compileDir)
|
||||
|
||||
let buildId
|
||||
|
||||
try {
|
||||
const saveResult = await OutputCacheManager.promises.saveOutputFiles(
|
||||
const { buildId, outputFiles } =
|
||||
await OutputCacheManager.promises.saveOutputFiles(
|
||||
{ request, stats, timings },
|
||||
outputFiles,
|
||||
rawOutputFiles,
|
||||
compileDir,
|
||||
outputDir
|
||||
)
|
||||
buildId = saveResult.buildId
|
||||
outputFiles = saveResult.outputFiles
|
||||
} catch (err) {
|
||||
const { project_id: projectId, user_id: userId } = request
|
||||
logger.err({ projectId, userId, err }, 'failed to save output files')
|
||||
}
|
||||
|
||||
timings.output = timer.done()
|
||||
return { outputFiles, allEntries, buildId }
|
||||
|
|
|
@ -245,7 +245,7 @@ module.exports = OutputCacheManager = {
|
|||
{ err, directory: cacheDir },
|
||||
'error creating cache directory'
|
||||
)
|
||||
callback(err, outputFiles)
|
||||
callback(err)
|
||||
} else {
|
||||
// copy all the output files into the new cache directory
|
||||
const results = []
|
||||
|
@ -263,7 +263,6 @@ module.exports = OutputCacheManager = {
|
|||
return cb()
|
||||
}
|
||||
// copy other files into cache directory if valid
|
||||
const newFile = _.clone(file)
|
||||
const src = Path.join(compileDir, file.path)
|
||||
const dst = Path.join(cacheDir, file.path)
|
||||
OutputCacheManager._checkIfShouldCopy(
|
||||
|
@ -279,8 +278,8 @@ module.exports = OutputCacheManager = {
|
|||
if (err) {
|
||||
return cb(err)
|
||||
}
|
||||
newFile.build = buildId // attach a build id if we cached the file
|
||||
results.push(newFile)
|
||||
file.build = buildId
|
||||
results.push(file)
|
||||
cb()
|
||||
})
|
||||
}
|
||||
|
@ -288,8 +287,7 @@ module.exports = OutputCacheManager = {
|
|||
},
|
||||
function (err) {
|
||||
if (err) {
|
||||
// pass back the original files if we encountered *any* error
|
||||
callback(err, outputFiles)
|
||||
callback(err)
|
||||
// clean up the directory we just created
|
||||
fs.rm(cacheDir, { force: true, recursive: true }, function (err) {
|
||||
if (err) {
|
||||
|
@ -301,7 +299,7 @@ module.exports = OutputCacheManager = {
|
|||
})
|
||||
} else {
|
||||
// pass back the list of new files in the cache
|
||||
callback(err, results)
|
||||
callback(null, results)
|
||||
// let file expiry run in the background, expire all previous files if per-user
|
||||
cleanupDirectory(outputDir, {
|
||||
keep: buildId,
|
||||
|
|
|
@ -58,9 +58,23 @@ Hello world
|
|||
)
|
||||
})
|
||||
|
||||
return it('should return a failure status', function () {
|
||||
it('should return a failure status', function () {
|
||||
return this.body.compile.status.should.equal('failure')
|
||||
})
|
||||
|
||||
it('should return output files', function () {
|
||||
// NOTE: No output.pdf file.
|
||||
this.body.compile.outputFiles
|
||||
.map(f => f.path)
|
||||
.should.deep.equal([
|
||||
'output.aux',
|
||||
'output.fdb_latexmk',
|
||||
'output.fls',
|
||||
'output.log',
|
||||
'output.stderr',
|
||||
'output.stdout',
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
return describe('on second run', function () {
|
||||
|
@ -80,8 +94,23 @@ Hello world
|
|||
})
|
||||
})
|
||||
|
||||
return it('should return a failure status', function () {
|
||||
it('should return a failure status', function () {
|
||||
return this.body.compile.status.should.equal('failure')
|
||||
})
|
||||
|
||||
it('should return output files', function () {
|
||||
// NOTE: No output.pdf file.
|
||||
this.body.compile.outputFiles
|
||||
.map(f => f.path)
|
||||
.should.deep.equal([
|
||||
'output.aux',
|
||||
'output.fdb_latexmk',
|
||||
'output.fls',
|
||||
'output.log',
|
||||
'output.pdfxref',
|
||||
'output.stderr',
|
||||
'output.stdout',
|
||||
])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
Loading…
Add table
Reference in a new issue