mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
cleanup and logging
This commit is contained in:
parent
a877dd36e7
commit
5fdd1d4f47
2 changed files with 93 additions and 33 deletions
|
@ -8,44 +8,101 @@ _ = require "underscore"
|
|||
OutputFileOptimiser = require "./OutputFileOptimiser"
|
||||
|
||||
module.exports = OutputCacheManager =
|
||||
CACHE_DIR: '.cache/clsi'
|
||||
CACHE_SUBDIR: '.cache/clsi'
|
||||
BUILD_REGEX: /^[0-9a-f]+$/ # build id is Date.now() converted to hex
|
||||
CACHE_LIMIT: 32 # maximum of 32 cache directories
|
||||
CACHE_AGE: 60*60*1000 # up to one hour old
|
||||
|
||||
saveOutputFiles: (outputFiles, target, callback) ->
|
||||
# make a target/build_id directory and
|
||||
path: (buildId) ->
|
||||
# used by static server, given build id return '.cache/clsi/buildId'
|
||||
return Path.join(OutputCacheManager.CACHE_SUBDIR, buildId)
|
||||
|
||||
saveOutputFiles: (outputFiles, compileDir, callback = (error) ->) ->
|
||||
# make a compileDir/CACHE_SUBDIR/build_id directory and
|
||||
# copy all the output files into it
|
||||
#
|
||||
# TODO: use Path module
|
||||
buildId = Date.now()
|
||||
relDir = OutputCacheManager.CACHE_DIR + '/' + buildId
|
||||
newDir = target + '/' + relDir
|
||||
OutputCacheManager.expireOutputFiles target
|
||||
fse.ensureDir newDir, (err) ->
|
||||
if err?
|
||||
callback(err, outputFiles)
|
||||
else
|
||||
async.mapSeries outputFiles, (file, cb) ->
|
||||
newFile = _.clone(file)
|
||||
src = target + '/' + file.path
|
||||
dst = target + '/' + relDir + '/' + file.path
|
||||
cacheRoot = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR)
|
||||
# Put the files into a new cache subdirectory
|
||||
buildId = Date.now().toString(16)
|
||||
cacheDir = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR, buildId)
|
||||
# let file expiry run in the background
|
||||
OutputCacheManager.expireOutputFiles cacheRoot, {keep: buildId}
|
||||
|
||||
checkFile = (src, callback) ->
|
||||
# check if we have a valid file to copy into the cache
|
||||
fs.stat src, (err, stats) ->
|
||||
if err?
|
||||
cb(err)
|
||||
else if stats.isFile()
|
||||
fse.copy src, dst, (err) ->
|
||||
OutputFileOptimiser.optimiseFile src, dst, (err, result) ->
|
||||
newFile.build = buildId
|
||||
cb(err, newFile)
|
||||
# some problem reading the file
|
||||
logger.error err: err, file: src, "stat error for file in cache"
|
||||
callback(err)
|
||||
else if not stats.isFile()
|
||||
# other filetype - reject it
|
||||
logger.error err: err, src: src, dst: dst, stat: stats, "nonfile output - refusing to copy to cache"
|
||||
callback(new Error("output file is not a file"), file)
|
||||
else
|
||||
# other filetype - shouldn't happen
|
||||
cb(new Error("output file is not a file"), file)
|
||||
# it's a plain file, ok to copy
|
||||
callback(null)
|
||||
|
||||
copyFile = (src, dst, callback) ->
|
||||
# copy output file into the cache
|
||||
fse.copy src, dst, (err) ->
|
||||
if err?
|
||||
logger.error err: err, src: src, dst: dst, "copy error for file in cache"
|
||||
callback(err)
|
||||
else
|
||||
# call the optimiser for the file too
|
||||
OutputFileOptimiser.optimiseFile src, dst, callback
|
||||
|
||||
# make the new cache directory
|
||||
fse.ensureDir cacheDir, (err) ->
|
||||
if err?
|
||||
logger.error err: err, directory: cacheDir, "error creating cache directory"
|
||||
callback(err, outputFiles)
|
||||
else
|
||||
# copy all the output files into the new cache directory
|
||||
async.mapSeries outputFiles, (file, cb) ->
|
||||
newFile = _.clone(file)
|
||||
[src, dst] = [Path.join(compileDir, file.path), Path.join(cacheDir, file.path)]
|
||||
checkFile src, (err) ->
|
||||
copyFile src, dst, (err) ->
|
||||
if not err?
|
||||
newFile.build = buildId # attach a build id if we cached the file
|
||||
cb(err, newFile)
|
||||
, (err, results) ->
|
||||
if err?
|
||||
callback err, outputFiles
|
||||
# pass back the original files if we encountered *any* error
|
||||
callback(err, outputFiles)
|
||||
else
|
||||
# pass back the list of new files in the cache
|
||||
callback(err, results)
|
||||
|
||||
expireOutputFiles: (target, callback) ->
|
||||
# look in target for build dirs and delete if > N or age of mod time > T
|
||||
cacheDir = target + '/' + OutputCacheManager.CACHE_DIR
|
||||
fs.readdir cacheDir, (err, results) ->
|
||||
callback(err) if callback?
|
||||
expireOutputFiles: (cacheRoot, options, callback = (error) ->) ->
|
||||
# look in compileDir for build dirs and delete if > N or age of mod time > T
|
||||
fs.readdir cacheRoot, (err, results) ->
|
||||
if err?
|
||||
logger.error err: err, project_id: cacheRoot, "error clearing cache"
|
||||
return callback(err)
|
||||
|
||||
dirs = results.sort().reverse()
|
||||
currentTime = Date.now()
|
||||
|
||||
isExpired = (dir, index) ->
|
||||
return false if options?.keep == dir
|
||||
# remove any directories over the hard limit
|
||||
return true if index > OutputCacheManager.CACHE_LIMIT
|
||||
# we can get the build time from the directory name
|
||||
dirTime = parseInt(dir, 16)
|
||||
age = currentTime - dirTime
|
||||
return age > OutputCacheManager.CACHE_AGE
|
||||
|
||||
toRemove = _.filter(dirs, isExpired)
|
||||
|
||||
removeDir = (dir, cb) ->
|
||||
fse.remove Path.join(cacheRoot, dir), (err, result) ->
|
||||
logger.log cache: cacheRoot, dir: dir, "removed expired cache dir"
|
||||
if err?
|
||||
logger.error err: err, dir: dir, "cache remove error"
|
||||
cb(err, result)
|
||||
|
||||
async.eachSeries toRemove, (dir, cb) ->
|
||||
removeDir dir, cb
|
||||
, callback
|
||||
|
|
|
@ -6,6 +6,8 @@ logger = require "logger-sharelatex"
|
|||
module.exports = OutputFileOptimiser =
|
||||
|
||||
optimiseFile: (src, dst, callback = (error) ->) ->
|
||||
# check output file (src) and see if we can optimise it, storing
|
||||
# the result in the build directory (dst)
|
||||
if src.match(/\.pdf$/)
|
||||
OutputFileOptimiser.optimisePDF src, dst, callback
|
||||
else
|
||||
|
@ -26,5 +28,6 @@ module.exports = OutputFileOptimiser =
|
|||
logger.warn {directory, code}, "qpdf returned error"
|
||||
return callback null
|
||||
fs.rename tmpOutput, dst, (err) ->
|
||||
# could log an error here
|
||||
callback null
|
||||
if err?
|
||||
logger.warn {tmpOutput, dst}, "failed to rename output of qpdf command"
|
||||
callback err
|
||||
|
|
Loading…
Reference in a new issue