mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
Initial open source commit
This commit is contained in:
commit
46b36839e2
95 changed files with 16218 additions and 0 deletions
12
services/clsi/.gitignore
vendored
Normal file
12
services/clsi/.gitignore
vendored
Normal file
|
@ -0,0 +1,12 @@
|
|||
**.swp
|
||||
node_modules
|
||||
app/js
|
||||
test/unit/js
|
||||
test/acceptance/js
|
||||
test/acceptance/fixtures/tmp
|
||||
compiles
|
||||
app.js
|
||||
.DS_Store
|
||||
*~
|
||||
cache
|
||||
.vagrant
|
99
services/clsi/Gruntfile.coffee
Normal file
99
services/clsi/Gruntfile.coffee
Normal file
|
@ -0,0 +1,99 @@
|
|||
module.exports = (grunt) ->
|
||||
grunt.initConfig
|
||||
coffee:
|
||||
app_src:
|
||||
expand: true,
|
||||
flatten: true,
|
||||
cwd: "app"
|
||||
src: ['coffee/*.coffee'],
|
||||
dest: 'app/js/',
|
||||
ext: '.js'
|
||||
|
||||
app:
|
||||
src: "app.coffee"
|
||||
dest: "app.js"
|
||||
|
||||
unit_tests:
|
||||
expand: true
|
||||
cwd: "test/unit/coffee"
|
||||
src: ["**/*.coffee"]
|
||||
dest: "test/unit/js/"
|
||||
ext: ".js"
|
||||
|
||||
acceptance_tests:
|
||||
expand: true
|
||||
cwd: "test/acceptance/coffee"
|
||||
src: ["**/*.coffee"]
|
||||
dest: "test/acceptance/js/"
|
||||
ext: ".js"
|
||||
|
||||
smoke_tests:
|
||||
expand: true
|
||||
cwd: "test/smoke/coffee"
|
||||
src: ["**/*.coffee"]
|
||||
dest: "test/smoke/js"
|
||||
ext: ".js"
|
||||
|
||||
watch:
|
||||
app:
|
||||
files: ['app/coffee/*.coffee']
|
||||
tasks: ['coffee']
|
||||
|
||||
clean:
|
||||
app: ["app/js/"]
|
||||
unit_tests: ["test/unit/js"]
|
||||
acceptance_tests: ["test/acceptance/js"]
|
||||
smoke_tests: ["test/smoke/js"]
|
||||
|
||||
nodemon:
|
||||
dev:
|
||||
options:
|
||||
file: 'app.js'
|
||||
|
||||
concurrent:
|
||||
dev:
|
||||
tasks: ['nodemon', 'watch']
|
||||
options:
|
||||
logConcurrentOutput: true
|
||||
|
||||
mochaTest:
|
||||
unit:
|
||||
options:
|
||||
reporter: "spec"
|
||||
src: ["test/unit/js/**/*.js"]
|
||||
acceptance:
|
||||
options:
|
||||
reporter: "spec"
|
||||
timeout: 40000
|
||||
src: ["test/acceptance/js/**/*.js"]
|
||||
smoke:
|
||||
options:
|
||||
reported: "spec"
|
||||
timeout: 10000
|
||||
src: ["test/smoke/js/**/*.js"]
|
||||
|
||||
grunt.loadNpmTasks 'grunt-contrib-coffee'
|
||||
grunt.loadNpmTasks 'grunt-contrib-watch'
|
||||
grunt.loadNpmTasks 'grunt-contrib-clean'
|
||||
grunt.loadNpmTasks 'grunt-nodemon'
|
||||
grunt.loadNpmTasks 'grunt-concurrent'
|
||||
grunt.loadNpmTasks 'grunt-mocha-test'
|
||||
grunt.loadNpmTasks 'grunt-shell'
|
||||
|
||||
grunt.registerTask 'compile:app', ['clean:app', 'coffee:app', 'coffee:app_src']
|
||||
grunt.registerTask 'run', ['compile:app', 'concurrent']
|
||||
|
||||
grunt.registerTask 'compile:unit_tests', ['clean:unit_tests', 'coffee:unit_tests']
|
||||
grunt.registerTask 'test:unit', ['compile:app', 'compile:unit_tests', 'mochaTest:unit']
|
||||
|
||||
grunt.registerTask 'compile:acceptance_tests', ['clean:acceptance_tests', 'coffee:acceptance_tests']
|
||||
grunt.registerTask 'test:acceptance', ['compile:acceptance_tests', 'mochaTest:acceptance']
|
||||
|
||||
grunt.registerTask 'compile:smoke_tests', ['clean:smoke_tests', 'coffee:smoke_tests']
|
||||
grunt.registerTask 'test:smoke', ['compile:smoke_tests', 'mochaTest:smoke']
|
||||
|
||||
grunt.registerTask 'install', 'compile:app'
|
||||
|
||||
grunt.registerTask 'default', ['run']
|
||||
|
||||
|
36
services/clsi/app.coffee
Normal file
36
services/clsi/app.coffee
Normal file
|
@ -0,0 +1,36 @@
|
|||
CompileController = require "./app/js/CompileController"
|
||||
Settings = require "settings-sharelatex"
|
||||
logger = require "logger-sharelatex"
|
||||
logger.initialize("clsi")
|
||||
smokeTest = require "smoke-test-sharelatex"
|
||||
|
||||
ProjectPersistenceManager = require "./app/js/ProjectPersistenceManager"
|
||||
|
||||
require("./app/js/db").sync()
|
||||
|
||||
express = require "express"
|
||||
app = express()
|
||||
|
||||
app.post "/project/:project_id/compile", express.bodyParser(), CompileController.compile
|
||||
app.del "/project/:project_id", CompileController.clearCache
|
||||
|
||||
staticServer = express.static(Settings.path.compilesDir)
|
||||
app.get "/project/:project_id/output/*", (req, res, next) ->
|
||||
req.url = "/#{req.params.project_id}/#{req.params[0]}"
|
||||
staticServer(req, res, next)
|
||||
|
||||
app.get "/status", (req, res, next) ->
|
||||
res.send "CLSI is alive\n"
|
||||
|
||||
app.get "/health_check", smokeTest.run(require.resolve(__dirname + "/test/smoke/js/SmokeTests.js"))
|
||||
|
||||
app.use (error, req, res, next) ->
|
||||
logger.error err: error, "server error"
|
||||
res.send 500
|
||||
|
||||
app.listen port = (Settings.internal?.clsi?.port or 3013), host = (Settings.internal?.clsi?.host or "localhost"), (error) ->
|
||||
logger.log "CLSI listening on #{host}:#{port}"
|
||||
|
||||
setInterval () ->
|
||||
ProjectPersistenceManager.clearExpiredProjects()
|
||||
, tenMinutes = 10 * 60 * 1000
|
12
services/clsi/app/coffee/CommandRunner.coffee
Normal file
12
services/clsi/app/coffee/CommandRunner.coffee
Normal file
|
@ -0,0 +1,12 @@
|
|||
spawn = require("child_process").spawn
|
||||
logger = require "logger-sharelatex"
|
||||
|
||||
module.exports = CommandRunner =
|
||||
run: (project_id, command, directory, timeout, callback = (error) ->) ->
|
||||
command = (arg.replace('$COMPILE_DIR', directory) for arg in command)
|
||||
logger.log project_id: project_id, command: command, directory: directory, "running command"
|
||||
logger.warn "timeouts and sandboxing are not enabled with CommandRunner"
|
||||
|
||||
proc = spawn command[0], command.slice(1), stdio: "inherit", cwd: directory
|
||||
proc.on "close", () ->
|
||||
callback()
|
40
services/clsi/app/coffee/CompileController.coffee
Normal file
40
services/clsi/app/coffee/CompileController.coffee
Normal file
|
@ -0,0 +1,40 @@
|
|||
RequestParser = require "./RequestParser"
|
||||
CompileManager = require "./CompileManager"
|
||||
Settings = require "settings-sharelatex"
|
||||
Metrics = require "./Metrics"
|
||||
ProjectPersistenceManager = require "./ProjectPersistenceManager"
|
||||
logger = require "logger-sharelatex"
|
||||
|
||||
module.exports = CompileController =
|
||||
compile: (req, res, next = (error) ->) ->
|
||||
timer = new Metrics.Timer("compile-request")
|
||||
RequestParser.parse req.body, (error, request) ->
|
||||
return next(error) if error?
|
||||
request.project_id = req.params.project_id
|
||||
ProjectPersistenceManager.markProjectAsJustAccessed request.project_id, (error) ->
|
||||
return next(error) if error?
|
||||
CompileManager.doCompile request, (error, outputFiles = []) ->
|
||||
if error?
|
||||
logger.error err: error, project_id: request.project_id, "error running compile"
|
||||
error = error.message or error
|
||||
status = "failure"
|
||||
else
|
||||
status = "failure"
|
||||
for file in outputFiles
|
||||
if file.type == "pdf"
|
||||
status = "success"
|
||||
|
||||
timer.done()
|
||||
res.send JSON.stringify {
|
||||
compile:
|
||||
status: status
|
||||
error: error
|
||||
outputFiles: outputFiles.map (file) ->
|
||||
url: "#{Settings.apis.clsi.url}/project/#{request.project_id}/output/#{file.path}"
|
||||
type: file.type
|
||||
}
|
||||
|
||||
clearCache: (req, res, next = (error) ->) ->
|
||||
ProjectPersistenceManager.clearProject req.params.project_id, (error) ->
|
||||
return next(error) if error?
|
||||
res.send 204 # No content
|
39
services/clsi/app/coffee/CompileManager.coffee
Normal file
39
services/clsi/app/coffee/CompileManager.coffee
Normal file
|
@ -0,0 +1,39 @@
|
|||
ResourceWriter = require "./ResourceWriter"
|
||||
LatexRunner = require "./LatexRunner"
|
||||
OutputFileFinder = require "./OutputFileFinder"
|
||||
Settings = require("settings-sharelatex")
|
||||
Path = require "path"
|
||||
logger = require "logger-sharelatex"
|
||||
Metrics = require "./Metrics"
|
||||
rimraf = require "rimraf"
|
||||
|
||||
module.exports = CompileManager =
|
||||
doCompile: (request, callback = (error, outputFiles) ->) ->
|
||||
compileDir = Path.join(Settings.path.compilesDir, request.project_id)
|
||||
|
||||
timer = new Metrics.Timer("write-to-disk")
|
||||
logger.log project_id: request.project_id, "starting compile"
|
||||
ResourceWriter.syncResourcesToDisk request.project_id, request.resources, compileDir, (error) ->
|
||||
return callback(error) if error?
|
||||
logger.log project_id: request.project_id, time_taken: Date.now() - timer.start, "written files to disk"
|
||||
timer.done()
|
||||
|
||||
timer = new Metrics.Timer("run-compile")
|
||||
Metrics.inc("compiles")
|
||||
LatexRunner.runLatex request.project_id, {
|
||||
directory: compileDir
|
||||
mainFile: request.rootResourcePath
|
||||
compiler: request.compiler
|
||||
timeout: request.timeout
|
||||
}, (error) ->
|
||||
return callback(error) if error?
|
||||
logger.log project_id: request.project_id, time_taken: Date.now() - timer.start, "done compile"
|
||||
timer.done()
|
||||
|
||||
OutputFileFinder.findOutputFiles request.resources, compileDir, (error, outputFiles) ->
|
||||
return callback(error) if error?
|
||||
callback null, outputFiles
|
||||
|
||||
clearProject: (project_id, callback = (error) ->) ->
|
||||
compileDir = Path.join(Settings.compileDir, project_id)
|
||||
rimraf compileDir, callback
|
57
services/clsi/app/coffee/LatexRunner.coffee
Normal file
57
services/clsi/app/coffee/LatexRunner.coffee
Normal file
|
@ -0,0 +1,57 @@
|
|||
Path = require "path"
|
||||
Settings = require "settings-sharelatex"
|
||||
logger = require "logger-sharelatex"
|
||||
Metrics = require "./Metrics"
|
||||
CommandRunner = require(Settings.clsi?.commandRunner or "./CommandRunner")
|
||||
|
||||
module.exports = LatexRunner =
|
||||
runLatex: (project_id, options, callback = (error) ->) ->
|
||||
{directory, mainFile, compiler, timeout} = options
|
||||
compiler ||= "pdflatex"
|
||||
timeout ||= 60000 # milliseconds
|
||||
|
||||
logger.log directory: directory, compiler: compiler, timeout: timeout, mainFile: mainFile, "starting compile"
|
||||
|
||||
# We want to run latexmk on the tex file which we it will automatically
|
||||
# generate from the Rtex file.
|
||||
mainFile = mainFile.replace(/\.Rtex$/, ".tex")
|
||||
|
||||
if compiler == "pdflatex"
|
||||
command = LatexRunner._pdflatexCommand mainFile
|
||||
else if compiler == "latex"
|
||||
command = LatexRunner._latexCommand mainFile
|
||||
else if compiler == "xelatex"
|
||||
command = LatexRunner._xelatexCommand mainFile
|
||||
else if compiler == "lualatex"
|
||||
command = LatexRunner._lualatexCommand mainFile
|
||||
else
|
||||
return callback new Error("unknown compiler: #{compiler}")
|
||||
|
||||
CommandRunner.run project_id, command, directory, timeout, callback
|
||||
|
||||
_latexmkBaseCommand: [ "latexmk", "-cd", "-f", "-jobname=output", "-auxdir=$COMPILE_DIR", "-outdir=$COMPILE_DIR"]
|
||||
|
||||
_pdflatexCommand: (mainFile) ->
|
||||
LatexRunner._latexmkBaseCommand.concat [
|
||||
"-pdf", "-e", "$pdflatex='pdflatex -interaction=batchmode %O %S'",
|
||||
Path.join("$COMPILE_DIR", mainFile)
|
||||
]
|
||||
|
||||
_latexCommand: (mainFile) ->
|
||||
LatexRunner._latexmkBaseCommand.concat [
|
||||
"-pdfdvi", "-e", "$latex='latex -interaction=batchmode %O %S'",
|
||||
Path.join("$COMPILE_DIR", mainFile)
|
||||
]
|
||||
|
||||
_xelatexCommand: (mainFile) ->
|
||||
LatexRunner._latexmkBaseCommand.concat [
|
||||
"-xelatex", "-e", "$pdflatex='xelatex -interaction=batchmode %O %S'",
|
||||
Path.join("$COMPILE_DIR", mainFile)
|
||||
]
|
||||
|
||||
_lualatexCommand: (mainFile) ->
|
||||
LatexRunner._latexmkBaseCommand.concat [
|
||||
"-pdf", "-e", "$pdflatex='lualatex -interaction=batchmode %O %S'",
|
||||
Path.join("$COMPILE_DIR", mainFile)
|
||||
]
|
||||
|
23
services/clsi/app/coffee/Metrics.coffee
Normal file
23
services/clsi/app/coffee/Metrics.coffee
Normal file
|
@ -0,0 +1,23 @@
|
|||
StatsD = require('lynx')
|
||||
statsd = new StatsD('localhost', 8125, {on_error:->})
|
||||
|
||||
buildKey = (key)-> "clsi.#{process.env.NODE_ENV or "testing"}.#{key}"
|
||||
|
||||
module.exports =
|
||||
set : (key, value, sampleRate = 1)->
|
||||
statsd.set buildKey(key), value, sampleRate
|
||||
|
||||
inc : (key, sampleRate = 1)->
|
||||
statsd.increment buildKey(key), sampleRate
|
||||
|
||||
Timer : class
|
||||
constructor :(key, sampleRate = 1)->
|
||||
this.start = new Date()
|
||||
this.key = buildKey(key)
|
||||
done:->
|
||||
timeSpan = new Date - this.start
|
||||
statsd.timing(this.key, timeSpan, this.sampleRate)
|
||||
|
||||
gauge : (key, value, sampleRate = 1)->
|
||||
statsd.gauge key, value, sampleRate
|
||||
|
58
services/clsi/app/coffee/OutputFileFinder.coffee
Normal file
58
services/clsi/app/coffee/OutputFileFinder.coffee
Normal file
|
@ -0,0 +1,58 @@
|
|||
async = require "async"
|
||||
fs = require "fs"
|
||||
Path = require "path"
|
||||
wrench = require "wrench"
|
||||
|
||||
module.exports = OutputFileFinder =
|
||||
findOutputFiles: (resources, directory, callback = (error, outputFiles) ->) ->
|
||||
incomingResources = {}
|
||||
for resource in resources
|
||||
incomingResources[resource.path] = true
|
||||
|
||||
OutputFileFinder._getAllFiles directory, (error, allFiles) ->
|
||||
jobs = []
|
||||
outputFiles = []
|
||||
for file in allFiles
|
||||
do (file) ->
|
||||
jobs.push (callback) ->
|
||||
if incomingResources[file.path]
|
||||
return callback()
|
||||
else
|
||||
OutputFileFinder._isDirectory Path.join(directory, file.path), (error, directory) ->
|
||||
return callback(error) if error?
|
||||
if !directory
|
||||
outputFiles.push file
|
||||
callback()
|
||||
|
||||
async.series jobs, (error) ->
|
||||
return callback(error) if error?
|
||||
callback null, outputFiles
|
||||
|
||||
_isDirectory: (path, callback = (error, directory) ->) ->
|
||||
fs.stat path, (error, stat) ->
|
||||
callback error, stat?.isDirectory()
|
||||
|
||||
_getAllFiles: (directory, _callback = (error, outputFiles) ->) ->
|
||||
callback = (error, outputFiles) ->
|
||||
_callback(error, outputFiles)
|
||||
_callback = () ->
|
||||
|
||||
outputFiles = []
|
||||
|
||||
wrench.readdirRecursive directory, (error, files) =>
|
||||
if error?
|
||||
if error.code == "ENOENT"
|
||||
# Directory doesn't exist, which is not a problem
|
||||
return callback(null, [])
|
||||
else
|
||||
return callback(error)
|
||||
|
||||
# readdirRecursive returns multiple times and finishes with a null response
|
||||
if !files?
|
||||
return callback(null, outputFiles)
|
||||
|
||||
for file in files
|
||||
outputFiles.push
|
||||
path: file
|
||||
type: file.match(/\.([^\.]+)$/)?[1]
|
||||
|
54
services/clsi/app/coffee/ProjectPersistenceManager.coffee
Normal file
54
services/clsi/app/coffee/ProjectPersistenceManager.coffee
Normal file
|
@ -0,0 +1,54 @@
|
|||
UrlCache = require "./UrlCache"
|
||||
CompileManager = require "./CompileManager"
|
||||
db = require "./db"
|
||||
async = require "async"
|
||||
logger = require "logger-sharelatex"
|
||||
|
||||
module.exports = ProjectPersistenceManager =
|
||||
EXPIRY_TIMEOUT: oneDay = 24 * 60 * 60 * 1000 #ms
|
||||
|
||||
markProjectAsJustAccessed: (project_id, callback = (error) ->) ->
|
||||
db.Project.findOrCreate(project_id: project_id)
|
||||
.success(
|
||||
(project) ->
|
||||
project.updateAttributes(lastAccessed: new Date())
|
||||
.success(() -> callback())
|
||||
.error callback
|
||||
)
|
||||
.error callback
|
||||
|
||||
clearExpiredProjects: (callback = (error) ->) ->
|
||||
ProjectPersistenceManager._findExpiredProjectIds (error, project_ids) ->
|
||||
return callback(error) if error?
|
||||
logger.log project_ids: project_ids, "clearing expired projects"
|
||||
jobs = for project_id in (project_ids or [])
|
||||
do (project_id) ->
|
||||
(callback) ->
|
||||
ProjectPersistenceManager.clearProject project_id, (err) ->
|
||||
if err?
|
||||
logger.error err: err, project_id: project_id, "error clearing project"
|
||||
callback()
|
||||
async.series jobs, callback
|
||||
|
||||
clearProject: (project_id, callback = (error) ->) ->
|
||||
logger.log project_id: project_id, "clearing project"
|
||||
CompileManager.clearProject project_id, (error) ->
|
||||
return callback(error) if error?
|
||||
UrlCache.clearProject project_id, (error) ->
|
||||
return callback(error) if error?
|
||||
ProjectPersistenceManager._clearProjectFromDatabase project_id, (error) ->
|
||||
return callback(error) if error?
|
||||
callback()
|
||||
|
||||
_clearProjectFromDatabase: (project_id, callback = (error) ->) ->
|
||||
db.Project.destroy(project_id: project_id)
|
||||
.success(() -> callback())
|
||||
.error callback
|
||||
|
||||
_findExpiredProjectIds: (callback = (error, project_ids) ->) ->
|
||||
db.Project.findAll(where: ["lastAccessed < ?", new Date(Date.now() - ProjectPersistenceManager.EXPIRY_TIMEOUT)])
|
||||
.success(
|
||||
(projects) ->
|
||||
callback null, projects.map((project) -> project.project_id)
|
||||
)
|
||||
.error callback
|
74
services/clsi/app/coffee/RequestParser.coffee
Normal file
74
services/clsi/app/coffee/RequestParser.coffee
Normal file
|
@ -0,0 +1,74 @@
|
|||
module.exports = RequestParser =
|
||||
VALID_COMPILERS: ["pdflatex", "latex", "xelatex", "lualatex"]
|
||||
MAX_TIMEOUT: 60
|
||||
|
||||
parse: (body, callback = (error, data) ->) ->
|
||||
response = {}
|
||||
|
||||
if !body.compile?
|
||||
return callback "top level object should have a compile attribute"
|
||||
|
||||
compile = body.compile
|
||||
compile.options ||= {}
|
||||
|
||||
try
|
||||
response.compiler = @_parseAttribute "compiler",
|
||||
compile.options.compiler,
|
||||
validValues: @VALID_COMPILERS
|
||||
default: "pdflatex"
|
||||
type: "string"
|
||||
response.timeout = @_parseAttribute "timeout",
|
||||
compile.options.timeout
|
||||
default: RequestParser.MAX_TIMEOUT
|
||||
type: "number"
|
||||
|
||||
if response.timeout > RequestParser.MAX_TIMEOUT
|
||||
response.timeout = RequestParser.MAX_TIMEOUT
|
||||
response.timeout = response.timeout * 1000 # milliseconds
|
||||
|
||||
response.resources = (@_parseResource(resource) for resource in (compile.resources or []))
|
||||
response.rootResourcePath = @_parseAttribute "rootResourcePath",
|
||||
compile.rootResourcePath
|
||||
default: "main.tex"
|
||||
type: "string"
|
||||
catch error
|
||||
return callback error
|
||||
|
||||
callback null, response
|
||||
|
||||
_parseResource: (resource) ->
|
||||
if !resource.path? or typeof resource.path != "string"
|
||||
throw "all resources should have a path attribute"
|
||||
|
||||
if resource.modified?
|
||||
modified = new Date(resource.modified)
|
||||
if isNaN(modified.getTime())
|
||||
throw "resource modified date could not be understood: #{resource.modified}"
|
||||
|
||||
if !resource.url? and !resource.content?
|
||||
throw "all resources should have either a url or content attribute"
|
||||
if resource.content? and typeof resource.content != "string"
|
||||
throw "content attribute should be a string"
|
||||
if resource.url? and typeof resource.url != "string"
|
||||
throw "url attribute should be a string"
|
||||
|
||||
return {
|
||||
path: resource.path
|
||||
modified: modified
|
||||
url: resource.url
|
||||
content: resource.content
|
||||
}
|
||||
|
||||
_parseAttribute: (name, attribute, options) ->
|
||||
if attribute?
|
||||
if options.validValues?
|
||||
if options.validValues.indexOf(attribute) == -1
|
||||
throw "#{name} attribute should be one of: #{options.validValues.join(", ")}"
|
||||
if options.type?
|
||||
if typeof attribute != options.type
|
||||
throw "#{name} attribute should be a #{options.type}"
|
||||
else
|
||||
return options.default if options.default?
|
||||
throw "Default not implemented"
|
||||
return attribute
|
||||
|
68
services/clsi/app/coffee/ResourceWriter.coffee
Normal file
68
services/clsi/app/coffee/ResourceWriter.coffee
Normal file
|
@ -0,0 +1,68 @@
|
|||
UrlCache = require "./UrlCache"
|
||||
Path = require "path"
|
||||
fs = require "fs"
|
||||
async = require "async"
|
||||
mkdirp = require "mkdirp"
|
||||
OutputFileFinder = require "./OutputFileFinder"
|
||||
Metrics = require "./Metrics"
|
||||
|
||||
module.exports = ResourceWriter =
|
||||
syncResourcesToDisk: (project_id, resources, basePath, callback = (error) ->) ->
|
||||
@_removeExtraneousFiles resources, basePath, (error) =>
|
||||
return callback(error) if error?
|
||||
jobs = for resource in resources
|
||||
do (resource) =>
|
||||
(callback) => @_writeResourceToDisk(project_id, resource, basePath, callback)
|
||||
async.series jobs, callback
|
||||
|
||||
_removeExtraneousFiles: (resources, basePath, _callback = (error) ->) ->
|
||||
timer = new Metrics.Timer("unlink-output-files")
|
||||
callback = (error) ->
|
||||
timer.done()
|
||||
_callback(error)
|
||||
|
||||
OutputFileFinder.findOutputFiles resources, basePath, (error, outputFiles) ->
|
||||
return callback(error) if error?
|
||||
|
||||
jobs = []
|
||||
for file in outputFiles or []
|
||||
do (file) ->
|
||||
path = file.path
|
||||
should_delete = true
|
||||
if path.match(/^output\./) or path.match(/\.aux$/)
|
||||
should_delete = false
|
||||
if path == "output.pdf" or path == "output.dvi" or path == "output.log"
|
||||
should_delete = true
|
||||
if should_delete
|
||||
jobs.push (callback) -> ResourceWriter._deleteFileIfNotDirectory Path.join(basePath, path), callback
|
||||
|
||||
async.series jobs, callback
|
||||
|
||||
_deleteFileIfNotDirectory: (path, callback = (error) ->) ->
|
||||
fs.stat path, (error, stat) ->
|
||||
return callback(error) if error?
|
||||
if stat.isFile()
|
||||
fs.unlink path, callback
|
||||
else
|
||||
callback()
|
||||
|
||||
_writeResourceToDisk: (project_id, resource, basePath, callback = (error) ->) ->
|
||||
path = Path.normalize(Path.join(basePath, resource.path))
|
||||
if (path.slice(0, basePath.length) != basePath)
|
||||
return callback new Error("resource path is outside root directory")
|
||||
|
||||
mkdirp Path.dirname(path), (error) ->
|
||||
return callback(error) if error?
|
||||
# TODO: Don't overwrite file if it hasn't been modified
|
||||
if resource.url?
|
||||
UrlCache.downloadUrlToFile(
|
||||
project_id,
|
||||
resource.url,
|
||||
path,
|
||||
resource.modified,
|
||||
callback
|
||||
)
|
||||
else
|
||||
fs.writeFile path, resource.content, callback
|
||||
|
||||
|
113
services/clsi/app/coffee/UrlCache.coffee
Normal file
113
services/clsi/app/coffee/UrlCache.coffee
Normal file
|
@ -0,0 +1,113 @@
|
|||
db = require("./db")
|
||||
UrlFetcher = require("./UrlFetcher")
|
||||
Settings = require("settings-sharelatex")
|
||||
crypto = require("crypto")
|
||||
fs = require("fs")
|
||||
logger = require "logger-sharelatex"
|
||||
async = require "async"
|
||||
|
||||
module.exports = UrlCache =
|
||||
downloadUrlToFile: (project_id, url, destPath, lastModified, callback = (error) ->) ->
|
||||
UrlCache._ensureUrlIsInCache project_id, url, lastModified, (error, pathToCachedUrl) =>
|
||||
return callback(error) if error?
|
||||
UrlCache._copyFile(pathToCachedUrl, destPath, callback)
|
||||
|
||||
clearProject: (project_id, callback = (error) ->) ->
|
||||
UrlCache._findAllUrlsInProject project_id, (error, urls) ->
|
||||
logger.log project_id: project_id, url_count: urls.length, "clearing project URLs"
|
||||
return callback(error) if error?
|
||||
jobs = for url in (urls or [])
|
||||
do (url) ->
|
||||
(callback) ->
|
||||
UrlCache._clearUrlFromCache project_id, url, (error) ->
|
||||
if error?
|
||||
logger.error err: error, project_id: project_id, url: url, "error clearing project URL"
|
||||
callback()
|
||||
async.series jobs, callback
|
||||
|
||||
_ensureUrlIsInCache: (project_id, url, lastModified, callback = (error, pathOnDisk) ->) ->
|
||||
if lastModified?
|
||||
# MYSQL only stores dates to an accuracy of a second but the incoming lastModified might have milliseconds.
|
||||
# So round down to seconds
|
||||
lastModified = new Date(Math.floor(lastModified.getTime() / 1000) * 1000)
|
||||
UrlCache._doesUrlNeedDownloading project_id, url, lastModified, (error, needsDownloading) =>
|
||||
return callback(error) if error?
|
||||
if needsDownloading
|
||||
logger.log url: url, lastModified: lastModified, "downloading URL"
|
||||
UrlFetcher.pipeUrlToFile url, UrlCache._cacheFilePathForUrl(project_id, url), (error) =>
|
||||
return callback(error) if error?
|
||||
UrlCache._updateOrCreateUrlDetails project_id, url, lastModified, (error) =>
|
||||
return callback(error) if error?
|
||||
callback null, UrlCache._cacheFilePathForUrl(project_id, url)
|
||||
else
|
||||
logger.log url: url, lastModified: lastModified, "URL is up to date in cache"
|
||||
callback null, UrlCache._cacheFilePathForUrl(project_id, url)
|
||||
|
||||
_doesUrlNeedDownloading: (project_id, url, lastModified, callback = (error, needsDownloading) ->) ->
|
||||
if !lastModified?
|
||||
return callback null, true
|
||||
|
||||
UrlCache._findUrlDetails project_id, url, (error, urlDetails) ->
|
||||
return callback(error) if error?
|
||||
if !urlDetails? or !urlDetails.lastModified? or urlDetails.lastModified.getTime() < lastModified.getTime()
|
||||
return callback null, true
|
||||
else
|
||||
return callback null, false
|
||||
|
||||
_cacheFileNameForUrl: (project_id, url) ->
|
||||
project_id + ":" + crypto.createHash("md5").update(url).digest("hex")
|
||||
|
||||
_cacheFilePathForUrl: (project_id, url) ->
|
||||
"#{Settings.path.clsiCacheDir}/#{UrlCache._cacheFileNameForUrl(project_id, url)}"
|
||||
|
||||
_copyFile: (from, to, _callback = (error) ->) ->
|
||||
callbackOnce = (error) ->
|
||||
_callback(error)
|
||||
_callback = () ->
|
||||
writeStream = fs.createWriteStream(to)
|
||||
readStream = fs.createReadStream(from)
|
||||
writeStream.on "error", callbackOnce
|
||||
readStream.on "error", callbackOnce
|
||||
writeStream.on "close", () -> callbackOnce()
|
||||
readStream.pipe(writeStream)
|
||||
|
||||
_clearUrlFromCache: (project_id, url, callback = (error) ->) ->
|
||||
UrlCache._clearUrlDetails project_id, url, (error) ->
|
||||
return callback(error) if error?
|
||||
UrlCache._deleteUrlCacheFromDisk project_id, url, (error) ->
|
||||
return callback(error) if error?
|
||||
callback null
|
||||
|
||||
_deleteUrlCacheFromDisk: (project_id, url, callback = (error) ->) ->
|
||||
fs.unlink UrlCache._cacheFilePathForUrl(project_id, url), callback
|
||||
|
||||
_findUrlDetails: (project_id, url, callback = (error, urlDetails) ->) ->
|
||||
db.UrlCache.find(where: { url: url, project_id: project_id })
|
||||
.success((urlDetails) -> callback null, urlDetails)
|
||||
.error callback
|
||||
|
||||
_updateOrCreateUrlDetails: (project_id, url, lastModified, callback = (error) ->) ->
|
||||
db.UrlCache.findOrCreate(url: url, project_id: project_id)
|
||||
.success(
|
||||
(urlDetails) ->
|
||||
urlDetails.updateAttributes(lastModified: lastModified)
|
||||
.success(() -> callback())
|
||||
.error(callback)
|
||||
)
|
||||
.error callback
|
||||
|
||||
_clearUrlDetails: (project_id, url, callback = (error) ->) ->
|
||||
db.UrlCache.destroy(url: url, project_id: project_id)
|
||||
.success(() -> callback null)
|
||||
.error callback
|
||||
|
||||
_findAllUrlsInProject: (project_id, callback = (error, urls) ->) ->
|
||||
db.UrlCache.findAll(where: { project_id: project_id })
|
||||
.success(
|
||||
(urlEntries) ->
|
||||
callback null, urlEntries.map((entry) -> entry.url)
|
||||
)
|
||||
.error callback
|
||||
|
||||
|
||||
|
23
services/clsi/app/coffee/UrlFetcher.coffee
Normal file
23
services/clsi/app/coffee/UrlFetcher.coffee
Normal file
|
@ -0,0 +1,23 @@
|
|||
request = require("request").defaults(jar: false)
|
||||
fs = require("fs")
|
||||
|
||||
module.exports = UrlFetcher =
|
||||
pipeUrlToFile: (url, filePath, _callback = (error) ->) ->
|
||||
callbackOnce = (error) ->
|
||||
_callback(error)
|
||||
_callback = () ->
|
||||
|
||||
urlStream = request.get(url)
|
||||
fileStream = fs.createWriteStream(filePath)
|
||||
|
||||
urlStream.on "response", (res) ->
|
||||
if res.statusCode >= 200 and res.statusCode < 300
|
||||
urlStream.pipe(fileStream)
|
||||
else
|
||||
callbackOnce(new Error("URL returned non-success status code: #{res.statusCode}"))
|
||||
|
||||
urlStream.on "error", (error) ->
|
||||
callbackOnce(error or new Error("Something went wrong downloading the URL"))
|
||||
|
||||
urlStream.on "end", () ->
|
||||
callbackOnce()
|
24
services/clsi/app/coffee/db.coffee
Normal file
24
services/clsi/app/coffee/db.coffee
Normal file
|
@ -0,0 +1,24 @@
|
|||
Sequelize = require("sequelize")
|
||||
Settings = require("settings-sharelatex")
|
||||
|
||||
sequelize = new Sequelize(
|
||||
Settings.mysql.clsi.database,
|
||||
Settings.mysql.clsi.username,
|
||||
Settings.mysql.clsi.password,
|
||||
Settings.mysql.clsi
|
||||
)
|
||||
|
||||
module.exports =
|
||||
UrlCache: sequelize.define("UrlCache", {
|
||||
url: Sequelize.STRING
|
||||
project_id: Sequelize.STRING
|
||||
lastModified: Sequelize.DATE
|
||||
})
|
||||
|
||||
Project: sequelize.define("Project", {
|
||||
project_id: Sequelize.STRING
|
||||
lastAccessed: Sequelize.DATE
|
||||
})
|
||||
|
||||
sync: () -> sequelize.sync()
|
||||
|
35
services/clsi/config/settings.testing.coffee
Normal file
35
services/clsi/config/settings.testing.coffee
Normal file
|
@ -0,0 +1,35 @@
|
|||
Path = require "path"
|
||||
|
||||
module.exports =
|
||||
# Options are passed to Sequelize.
|
||||
# See http://sequelizejs.com/documentation#usage-options for details
|
||||
mysql:
|
||||
clsi:
|
||||
database: "clsi"
|
||||
username: "clsi"
|
||||
password: null
|
||||
|
||||
|
||||
path:
|
||||
compilesDir: Path.resolve(__dirname + "/../compiles")
|
||||
clsiCacheDir: Path.resolve(__dirname + "/../cache")
|
||||
|
||||
clsi:
|
||||
# commandRunner: "docker-runner-sharelatex"
|
||||
# docker:
|
||||
# image: "quay.io/sharelatex/texlive-full"
|
||||
# env:
|
||||
# PATH: "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/2013/bin/x86_64-linux/"
|
||||
# HOME: "/tmp"
|
||||
# socketPath: "/var/run/docker.sock"
|
||||
# user: "tex"
|
||||
|
||||
internal:
|
||||
clsi:
|
||||
port: 3013
|
||||
host: ""
|
||||
|
||||
apis:
|
||||
clsi:
|
||||
url: "http://localhost:3013"
|
||||
|
36
services/clsi/package.json
Normal file
36
services/clsi/package.json
Normal file
|
@ -0,0 +1,36 @@
|
|||
{
|
||||
"name": "node-clsi",
|
||||
"description": "A Node.js implementation of the CLSI LaTeX web-API",
|
||||
"version": "0.0.1-dev",
|
||||
"author": "James Allen <james@sharelatex.com>",
|
||||
"dependencies": {
|
||||
"async": "0.2.9",
|
||||
"express": "3.3.1",
|
||||
"lynx": "0.0.11",
|
||||
"mkdirp": "0.3.5",
|
||||
"mysql": "2.0.0-alpha7",
|
||||
"request": "~2.21.0",
|
||||
"rimraf": "2.1.4",
|
||||
"logger-sharelatex": "git+ssh://git@bitbucket.org:sharelatex/logger-sharelatex.git#bunyan",
|
||||
"settings-sharelatex": "git+ssh://git@bitbucket.org:sharelatex/settings-sharelatex.git#master",
|
||||
"sequelize": "~2.0.0-beta.2",
|
||||
"wrench": "~1.5.4",
|
||||
"smoke-test-sharelatex": "git+ssh://git@bitbucket.org:sharelatex/smoke-test-sharelatex.git#master"
|
||||
},
|
||||
"devDependencies": {
|
||||
"mocha": "1.10.0",
|
||||
"coffee-script": "1.6.0",
|
||||
"chai": "~1.8.1",
|
||||
"sinon": "~1.7.3",
|
||||
"grunt": "~0.4.2",
|
||||
"grunt-contrib-coffee": "~0.7.0",
|
||||
"grunt-contrib-watch": "~0.5.3",
|
||||
"grunt-concurrent": "~0.4.2",
|
||||
"grunt-nodemon": "~0.1.2",
|
||||
"grunt-contrib-clean": "~0.5.0",
|
||||
"grunt-shell": "~0.6.1",
|
||||
"grunt-mocha-test": "~0.8.1",
|
||||
"sandboxed-module": "~0.3.0",
|
||||
"timekeeper": "0.0.4"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,46 @@
|
|||
Client = require "./helpers/Client"
|
||||
request = require "request"
|
||||
require("chai").should()
|
||||
|
||||
describe "Broken LaTeX file", ->
|
||||
before ->
|
||||
@broken_request =
|
||||
resources: [
|
||||
path: "main.tex"
|
||||
content: '''
|
||||
\\documentclass{articl % :(
|
||||
\\begin{documen % :(
|
||||
Broken
|
||||
\\end{documen % :(
|
||||
'''
|
||||
]
|
||||
@correct_request =
|
||||
resources: [
|
||||
path: "main.tex"
|
||||
content: '''
|
||||
\\documentclass{article}
|
||||
\\begin{document}
|
||||
Hello world
|
||||
\\end{document}
|
||||
'''
|
||||
]
|
||||
|
||||
describe "on first run", ->
|
||||
before (done) ->
|
||||
@project_id = Client.randomId()
|
||||
Client.compile @project_id, @broken_request, (@error, @res, @body) => done()
|
||||
|
||||
it "should return a failure status", ->
|
||||
@body.compile.status.should.equal "failure"
|
||||
|
||||
describe "on second run", ->
|
||||
before (done) ->
|
||||
@project_id = Client.randomId()
|
||||
Client.compile @project_id, @correct_request, () =>
|
||||
Client.compile @project_id, @broken_request, (@error, @res, @body) =>
|
||||
done()
|
||||
|
||||
it "should return a failure status", ->
|
||||
@body.compile.status.should.equal "failure"
|
||||
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
Client = require "./helpers/Client"
|
||||
request = require "request"
|
||||
require("chai").should()
|
||||
|
||||
describe "Deleting Old Files", ->
|
||||
before ->
|
||||
@request =
|
||||
resources: [
|
||||
path: "main.tex"
|
||||
content: '''
|
||||
\\documentclass{article}
|
||||
\\begin{document}
|
||||
Hello world
|
||||
\\end{document}
|
||||
'''
|
||||
]
|
||||
|
||||
describe "on first run", ->
|
||||
before (done) ->
|
||||
@project_id = Client.randomId()
|
||||
Client.compile @project_id, @request, (@error, @res, @body) => done()
|
||||
|
||||
it "should return a success status", ->
|
||||
@body.compile.status.should.equal "success"
|
||||
|
||||
describe "after file has been deleted", ->
|
||||
before (done) ->
|
||||
@request.resources = []
|
||||
Client.compile @project_id, @request, (@error, @res, @body) =>
|
||||
done()
|
||||
|
||||
it "should return a failure status", ->
|
||||
@body.compile.status.should.equal "failure"
|
||||
|
|
@ -0,0 +1,79 @@
|
|||
Client = require "./helpers/Client"
|
||||
request = require "request"
|
||||
require("chai").should()
|
||||
fs = require "fs"
|
||||
ChildProcess = require "child_process"
|
||||
|
||||
fixturePath = (path) -> __dirname + "/../fixtures/" + path
|
||||
|
||||
convertToPng = (pdfPath, pngPath, callback = (error) ->) ->
|
||||
convert = ChildProcess.exec "convert #{fixturePath(pdfPath)} #{fixturePath(pngPath)}"
|
||||
convert.on "exit", () ->
|
||||
callback()
|
||||
|
||||
compare = (originalPath, generatedPath, callback = (error, same) ->) ->
|
||||
proc = ChildProcess.exec "compare -metric mae #{fixturePath(originalPath)} #{fixturePath(generatedPath)} #{fixturePath("tmp/diff.png")}"
|
||||
stderr = ""
|
||||
proc.stderr.on "data", (chunk) -> stderr += chunk
|
||||
proc.on "exit", () ->
|
||||
if stderr.trim() == "0 (0)"
|
||||
callback null, true
|
||||
else
|
||||
console.log stderr
|
||||
callback null, false
|
||||
|
||||
compareMultiplePages = (project_id, callback = (error) ->) ->
|
||||
compareNext = (page_no, callback) ->
|
||||
path = "tmp/#{project_id}-source-#{page_no}.png"
|
||||
fs.stat fixturePath(path), (error, stat) ->
|
||||
if error?
|
||||
callback()
|
||||
else
|
||||
compare "tmp/#{project_id}-source-#{page_no}.png", "tmp/#{project_id}-generated-#{page_no}.png", (error, same) =>
|
||||
throw error if error?
|
||||
same.should.equal true
|
||||
compareNext page_no + 1, callback
|
||||
compareNext 0, callback
|
||||
|
||||
downloadAndComparePdf = (project_id, example_dir, url, callback = (error) ->) ->
|
||||
writeStream = fs.createWriteStream(fixturePath("tmp/#{project_id}.pdf"))
|
||||
request.get(url).pipe(writeStream)
|
||||
writeStream.on "close", () =>
|
||||
convertToPng "tmp/#{project_id}.pdf", "tmp/#{project_id}-generated.png", (error) =>
|
||||
throw error if error?
|
||||
convertToPng "examples/#{example_dir}/output.pdf", "tmp/#{project_id}-source.png", (error) =>
|
||||
throw error if error?
|
||||
fs.stat fixturePath("tmp/#{project_id}-source-0.png"), (error, stat) =>
|
||||
if error?
|
||||
compare "tmp/#{project_id}-source.png", "tmp/#{project_id}-generated.png", (error, same) =>
|
||||
throw error if error?
|
||||
same.should.equal true
|
||||
callback()
|
||||
else
|
||||
compareMultiplePages project_id, (error) ->
|
||||
throw error if error?
|
||||
callback()
|
||||
|
||||
Client.runServer(4242, fixturePath("examples"))
|
||||
|
||||
describe "Example Documents", ->
|
||||
before (done) ->
|
||||
ChildProcess.exec("rm test/acceptance/fixtures/tmp/*").on "exit", () -> done()
|
||||
|
||||
for example_dir in fs.readdirSync fixturePath("examples")
|
||||
do (example_dir) ->
|
||||
describe example_dir, ->
|
||||
before ->
|
||||
@project_id = Client.randomId()
|
||||
|
||||
it "should generate the correct pdf", (done) ->
|
||||
Client.compileDirectory @project_id, fixturePath("examples"), example_dir, 4242, (error, res, body) =>
|
||||
pdf = Client.getOutputFile body, "pdf"
|
||||
downloadAndComparePdf(@project_id, example_dir, pdf.url, done)
|
||||
|
||||
it "should generate the correct pdf on the second run as well", (done) ->
|
||||
Client.compileDirectory @project_id, fixturePath("examples"), example_dir, 4242, (error, res, body) =>
|
||||
pdf = Client.getOutputFile body, "pdf"
|
||||
downloadAndComparePdf(@project_id, example_dir, pdf.url, done)
|
||||
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
Client = require "./helpers/Client"
|
||||
request = require "request"
|
||||
require("chai").should()
|
||||
|
||||
describe "Simple LaTeX file", ->
|
||||
before (done) ->
|
||||
@project_id = Client.randomId()
|
||||
@request =
|
||||
resources: [
|
||||
path: "main.tex"
|
||||
content: '''
|
||||
\\documentclass{article}
|
||||
\\begin{document}
|
||||
Hello world
|
||||
\\end{document}
|
||||
'''
|
||||
]
|
||||
Client.compile @project_id, @request, (@error, @res, @body) => done()
|
||||
|
||||
it "should return the PDF", ->
|
||||
pdf = Client.getOutputFile(@body, "pdf")
|
||||
pdf.type.should.equal "pdf"
|
||||
|
||||
it "should return the log", ->
|
||||
log = Client.getOutputFile(@body, "log")
|
||||
log.type.should.equal "log"
|
||||
|
||||
it "should provide the pdf for download", (done) ->
|
||||
pdf = Client.getOutputFile(@body, "pdf")
|
||||
request.get pdf.url, (error, res, body) ->
|
||||
res.statusCode.should.equal 200
|
||||
done()
|
||||
|
||||
it "should provide the log for download", (done) ->
|
||||
log = Client.getOutputFile(@body, "pdf")
|
||||
request.get log.url, (error, res, body) ->
|
||||
res.statusCode.should.equal 200
|
||||
done()
|
||||
|
27
services/clsi/test/acceptance/coffee/TimeoutTests.coffee
Normal file
27
services/clsi/test/acceptance/coffee/TimeoutTests.coffee
Normal file
|
@ -0,0 +1,27 @@
|
|||
Client = require "./helpers/Client"
|
||||
request = require "request"
|
||||
require("chai").should()
|
||||
|
||||
describe "Timed out compile", ->
|
||||
before (done) ->
|
||||
@request =
|
||||
options:
|
||||
timeout: 0.01 #seconds
|
||||
resources: [
|
||||
path: "main.tex"
|
||||
content: '''
|
||||
\\documentclass{article}
|
||||
\\begin{document}
|
||||
Hello world
|
||||
\\end{document}
|
||||
'''
|
||||
]
|
||||
@project_id = Client.randomId()
|
||||
Client.compile @project_id, @request, (@error, @res, @body) => done()
|
||||
|
||||
it "should return a timeout error", ->
|
||||
@body.compile.error.should.equal "container timed out"
|
||||
|
||||
it "should return a failure status", ->
|
||||
@body.compile.status.should.equal "failure"
|
||||
|
220
services/clsi/test/acceptance/coffee/UrlCachingTests.coffee
Normal file
220
services/clsi/test/acceptance/coffee/UrlCachingTests.coffee
Normal file
|
@ -0,0 +1,220 @@
|
|||
Client = require "./helpers/Client"
|
||||
request = require "request"
|
||||
require("chai").should()
|
||||
sinon = require "sinon"
|
||||
|
||||
host = "localhost"
|
||||
|
||||
Server =
|
||||
run: () ->
|
||||
express = require "express"
|
||||
app = express()
|
||||
|
||||
staticServer = express.static __dirname + "/../fixtures/"
|
||||
app.get "/:random_id/*", (req, res, next) =>
|
||||
@getFile(req.url)
|
||||
req.url = "/" + req.params[0]
|
||||
staticServer(req, res, next)
|
||||
|
||||
app.listen 31415, host
|
||||
|
||||
getFile: () ->
|
||||
|
||||
randomId: () ->
|
||||
Math.random().toString(16).slice(2)
|
||||
|
||||
Server.run()
|
||||
|
||||
describe "Url Caching", ->
|
||||
describe "Downloading an image for the first time", ->
|
||||
before (done) ->
|
||||
@project_id = Client.randomId()
|
||||
@file = "#{Server.randomId()}/lion.png"
|
||||
@request =
|
||||
resources: [{
|
||||
path: "main.tex"
|
||||
content: '''
|
||||
\\documentclass{article}
|
||||
\\usepackage{graphicx}
|
||||
\\begin{document}
|
||||
\\includegraphics{lion.png}
|
||||
\\end{document}
|
||||
'''
|
||||
}, {
|
||||
path: "lion.png"
|
||||
url: "http://#{host}:31415/#{@file}"
|
||||
}]
|
||||
|
||||
sinon.spy Server, "getFile"
|
||||
Client.compile @project_id, @request, (@error, @res, @body) => done()
|
||||
|
||||
afterEach ->
|
||||
Server.getFile.restore()
|
||||
|
||||
it "should download the image", ->
|
||||
Server.getFile
|
||||
.calledWith("/" + @file)
|
||||
.should.equal true
|
||||
|
||||
describe "When an image is in the cache and the last modified date is unchanged", ->
|
||||
before (done) ->
|
||||
@project_id = Client.randomId()
|
||||
@file = "#{Server.randomId()}/lion.png"
|
||||
@request =
|
||||
resources: [{
|
||||
path: "main.tex"
|
||||
content: '''
|
||||
\\documentclass{article}
|
||||
\\usepackage{graphicx}
|
||||
\\begin{document}
|
||||
\\includegraphics{lion.png}
|
||||
\\end{document}
|
||||
'''
|
||||
}, @image_resource = {
|
||||
path: "lion.png"
|
||||
url: "http://#{host}:31415/#{@file}"
|
||||
modified: Date.now()
|
||||
}]
|
||||
|
||||
Client.compile @project_id, @request, (@error, @res, @body) =>
|
||||
sinon.spy Server, "getFile"
|
||||
Client.compile @project_id, @request, (@error, @res, @body) =>
|
||||
done()
|
||||
|
||||
after ->
|
||||
Server.getFile.restore()
|
||||
|
||||
it "should not download the image again", ->
|
||||
Server.getFile.called.should.equal false
|
||||
|
||||
describe "When an image is in the cache and the last modified date is advanced", ->
|
||||
before (done) ->
|
||||
@project_id = Client.randomId()
|
||||
@file = "#{Server.randomId()}/lion.png"
|
||||
@request =
|
||||
resources: [{
|
||||
path: "main.tex"
|
||||
content: '''
|
||||
\\documentclass{article}
|
||||
\\usepackage{graphicx}
|
||||
\\begin{document}
|
||||
\\includegraphics{lion.png}
|
||||
\\end{document}
|
||||
'''
|
||||
}, @image_resource = {
|
||||
path: "lion.png"
|
||||
url: "http://#{host}:31415/#{@file}"
|
||||
modified: @last_modified = Date.now()
|
||||
}]
|
||||
|
||||
Client.compile @project_id, @request, (@error, @res, @body) =>
|
||||
sinon.spy Server, "getFile"
|
||||
@image_resource.modified = new Date(@last_modified + 3000)
|
||||
Client.compile @project_id, @request, (@error, @res, @body) =>
|
||||
done()
|
||||
|
||||
afterEach ->
|
||||
Server.getFile.restore()
|
||||
|
||||
it "should download the image again", ->
|
||||
Server.getFile.called.should.equal true
|
||||
|
||||
describe "When an image is in the cache and the last modified date is further in the past", ->
|
||||
before (done) ->
|
||||
@project_id = Client.randomId()
|
||||
@file = "#{Server.randomId()}/lion.png"
|
||||
@request =
|
||||
resources: [{
|
||||
path: "main.tex"
|
||||
content: '''
|
||||
\\documentclass{article}
|
||||
\\usepackage{graphicx}
|
||||
\\begin{document}
|
||||
\\includegraphics{lion.png}
|
||||
\\end{document}
|
||||
'''
|
||||
}, @image_resource = {
|
||||
path: "lion.png"
|
||||
url: "http://#{host}:31415/#{@file}"
|
||||
modified: @last_modified = Date.now()
|
||||
}]
|
||||
|
||||
Client.compile @project_id, @request, (@error, @res, @body) =>
|
||||
sinon.spy Server, "getFile"
|
||||
@image_resource.modified = new Date(@last_modified - 3000)
|
||||
Client.compile @project_id, @request, (@error, @res, @body) =>
|
||||
done()
|
||||
|
||||
afterEach ->
|
||||
Server.getFile.restore()
|
||||
|
||||
it "should not download the image again", ->
|
||||
Server.getFile.called.should.equal false
|
||||
|
||||
describe "When an image is in the cache and the last modified date is not specified", ->
|
||||
before (done) ->
|
||||
@project_id = Client.randomId()
|
||||
@file = "#{Server.randomId()}/lion.png"
|
||||
@request =
|
||||
resources: [{
|
||||
path: "main.tex"
|
||||
content: '''
|
||||
\\documentclass{article}
|
||||
\\usepackage{graphicx}
|
||||
\\begin{document}
|
||||
\\includegraphics{lion.png}
|
||||
\\end{document}
|
||||
'''
|
||||
}, @image_resource = {
|
||||
path: "lion.png"
|
||||
url: "http://#{host}:31415/#{@file}"
|
||||
modified: @last_modified = Date.now()
|
||||
}]
|
||||
|
||||
Client.compile @project_id, @request, (@error, @res, @body) =>
|
||||
sinon.spy Server, "getFile"
|
||||
delete @image_resource.modified
|
||||
Client.compile @project_id, @request, (@error, @res, @body) =>
|
||||
done()
|
||||
|
||||
afterEach ->
|
||||
Server.getFile.restore()
|
||||
|
||||
it "should download the image again", ->
|
||||
Server.getFile.called.should.equal true
|
||||
|
||||
describe "After clearing the cache", ->
|
||||
before (done) ->
|
||||
@project_id = Client.randomId()
|
||||
@file = "#{Server.randomId()}/lion.png"
|
||||
@request =
|
||||
resources: [{
|
||||
path: "main.tex"
|
||||
content: '''
|
||||
\\documentclass{article}
|
||||
\\usepackage{graphicx}
|
||||
\\begin{document}
|
||||
\\includegraphics{lion.png}
|
||||
\\end{document}
|
||||
'''
|
||||
}, @image_resource = {
|
||||
path: "lion.png"
|
||||
url: "http://#{host}:31415/#{@file}"
|
||||
modified: @last_modified = Date.now()
|
||||
}]
|
||||
|
||||
Client.compile @project_id, @request, (error) =>
|
||||
throw error if error?
|
||||
Client.clearCache @project_id, (error, res, body) =>
|
||||
throw error if error?
|
||||
sinon.spy Server, "getFile"
|
||||
Client.compile @project_id, @request, (@error, @res, @body) =>
|
||||
done()
|
||||
|
||||
afterEach ->
|
||||
Server.getFile.restore()
|
||||
|
||||
it "should download the image again", ->
|
||||
Server.getFile.called.should.equal true
|
||||
|
||||
|
69
services/clsi/test/acceptance/coffee/helpers/Client.coffee
Normal file
69
services/clsi/test/acceptance/coffee/helpers/Client.coffee
Normal file
|
@ -0,0 +1,69 @@
|
|||
request = require "request"
|
||||
fs = require "fs"
|
||||
Settings = require "../../../../app/js/Settings"
|
||||
|
||||
host = "localhost"
|
||||
|
||||
module.exports = Client =
|
||||
host: Settings.externalUrl
|
||||
|
||||
randomId: () ->
|
||||
Math.random().toString(16).slice(2)
|
||||
|
||||
compile: (project_id, data, callback = (error, res, body) ->) ->
|
||||
request.post {
|
||||
url: "#{@host}/project/#{project_id}/compile"
|
||||
json:
|
||||
compile: data
|
||||
}, callback
|
||||
|
||||
clearCache: (project_id, callback = (error, res, body) ->) ->
|
||||
request.del "#{@host}/project/#{project_id}", callback
|
||||
|
||||
getOutputFile: (response, type) ->
|
||||
for file in response.compile.outputFiles
|
||||
if file.type == type
|
||||
return file
|
||||
return null
|
||||
|
||||
runServer: (port, directory) ->
|
||||
express = require("express")
|
||||
app = express()
|
||||
app.use express.static(directory)
|
||||
app.listen(port, host)
|
||||
|
||||
compileDirectory: (project_id, baseDirectory, directory, serverPort, callback = (error, res, body) ->) ->
|
||||
resources = []
|
||||
entities = fs.readdirSync("#{baseDirectory}/#{directory}")
|
||||
rootResourcePath = "main.tex"
|
||||
while (entities.length > 0)
|
||||
entity = entities.pop()
|
||||
stat = fs.statSync("#{baseDirectory}/#{directory}/#{entity}")
|
||||
if stat.isDirectory()
|
||||
entities = entities.concat fs.readdirSync("#{baseDirectory}/#{directory}/#{entity}").map (subEntity) ->
|
||||
if subEntity == "main.tex"
|
||||
rootResourcePath = "#{entity}/#{subEntity}"
|
||||
return "#{entity}/#{subEntity}"
|
||||
else if stat.isFile() and entity != "output.pdf"
|
||||
extension = entity.split(".").pop()
|
||||
if ["tex", "bib", "cls", "sty", "pdf_tex", "Rtex"].indexOf(extension) > -1
|
||||
resources.push
|
||||
path: entity
|
||||
content: fs.readFileSync("#{baseDirectory}/#{directory}/#{entity}").toString()
|
||||
else if ["eps", "ttf", "png", "jpg", "pdf", "jpeg"].indexOf(extension) > -1
|
||||
resources.push
|
||||
path: entity
|
||||
url: "http://#{host}:#{serverPort}/#{directory}/#{entity}"
|
||||
modified: stat.mtime
|
||||
|
||||
fs.readFile "#{baseDirectory}/#{directory}/options.json", (error, body) =>
|
||||
req =
|
||||
resources: resources
|
||||
rootResourcePath: rootResourcePath
|
||||
|
||||
if !error?
|
||||
body = JSON.parse body
|
||||
req.options = body
|
||||
|
||||
@compile project_id, req, callback
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
@book{DouglasAdams,
|
||||
title={The Hitchhiker's Guide to the Galaxy},
|
||||
author={Adams, Douglas},
|
||||
isbn={9781417642595},
|
||||
url={http://books.google.com/books?id=W-xMPgAACAAJ},
|
||||
year={1995},
|
||||
publisher={San Val}
|
||||
}
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
\documentclass{article}
|
||||
|
||||
\usepackage[backend=biber]{biblatex}
|
||||
\addbibresource{bibliography.bib}
|
||||
|
||||
\begin{document}
|
||||
|
||||
The meaning of life, the universe and everything is 42 \cite{DouglasAdams}
|
||||
|
||||
\printbibliography
|
||||
|
||||
\end{document}
|
|
@ -0,0 +1,48 @@
|
|||
% $ biblatex auxiliary file $
|
||||
% $ biblatex version 1.5 $
|
||||
% $ biber version 0.9.3 $
|
||||
% Do not modify the above lines!
|
||||
%
|
||||
% This is an auxiliary file used by the 'biblatex' package.
|
||||
% This file may safely be deleted. It will be recreated by
|
||||
% biber or bibtex as required.
|
||||
%
|
||||
\begingroup
|
||||
\makeatletter
|
||||
\@ifundefined{ver@biblatex.sty}
|
||||
{\@latex@error
|
||||
{Missing 'biblatex' package}
|
||||
{The bibliography requires the 'biblatex' package.}
|
||||
\aftergroup\endinput}
|
||||
{}
|
||||
\endgroup
|
||||
|
||||
|
||||
\refsection{0}
|
||||
\entry{DouglasAdams}{book}{}
|
||||
\name{labelname}{1}{}{%
|
||||
{{}{Adams}{A\bibinitperiod}{Douglas}{D\bibinitperiod}{}{}{}{}}%
|
||||
}
|
||||
\name{author}{1}{}{%
|
||||
{{}{Adams}{A\bibinitperiod}{Douglas}{D\bibinitperiod}{}{}{}{}}%
|
||||
}
|
||||
\list{publisher}{1}{%
|
||||
{San Val}%
|
||||
}
|
||||
\strng{namehash}{AD1}
|
||||
\strng{fullhash}{AD1}
|
||||
\field{sortinit}{A}
|
||||
\field{isbn}{9781417642595}
|
||||
\field{title}{The Hitchhiker's Guide to the Galaxy}
|
||||
\field{year}{1995}
|
||||
\verb{url}
|
||||
\verb http://books.google.com/books?id=W-xMPgAACAAJ
|
||||
\endverb
|
||||
\endentry
|
||||
|
||||
\lossort
|
||||
\endlossort
|
||||
|
||||
\endrefsection
|
||||
\endinput
|
||||
|
Binary file not shown.
|
@ -0,0 +1,84 @@
|
|||
<?xml version="1.0" standalone="yes"?>
|
||||
<!-- logreq request file -->
|
||||
<!-- logreq version 1.0 / dtd version 1.0 -->
|
||||
<!-- Do not edit this file! -->
|
||||
<!DOCTYPE requests [
|
||||
<!ELEMENT requests (internal | external)*>
|
||||
<!ELEMENT internal (generic, (provides | requires)*)>
|
||||
<!ELEMENT external (generic, cmdline?, input?, output?, (provides | requires)*)>
|
||||
<!ELEMENT cmdline (binary, (option | infile | outfile)*)>
|
||||
<!ELEMENT input (file)+>
|
||||
<!ELEMENT output (file)+>
|
||||
<!ELEMENT provides (file)+>
|
||||
<!ELEMENT requires (file)+>
|
||||
<!ELEMENT generic (#PCDATA)>
|
||||
<!ELEMENT binary (#PCDATA)>
|
||||
<!ELEMENT option (#PCDATA)>
|
||||
<!ELEMENT infile (#PCDATA)>
|
||||
<!ELEMENT outfile (#PCDATA)>
|
||||
<!ELEMENT file (#PCDATA)>
|
||||
<!ATTLIST requests
|
||||
version CDATA #REQUIRED
|
||||
>
|
||||
<!ATTLIST internal
|
||||
package CDATA #REQUIRED
|
||||
priority (9) #REQUIRED
|
||||
active (0 | 1) #REQUIRED
|
||||
>
|
||||
<!ATTLIST external
|
||||
package CDATA #REQUIRED
|
||||
priority (1 | 2 | 3 | 4 | 5 | 6 | 7 | 8) #REQUIRED
|
||||
active (0 | 1) #REQUIRED
|
||||
>
|
||||
<!ATTLIST provides
|
||||
type (static | dynamic | editable) #REQUIRED
|
||||
>
|
||||
<!ATTLIST requires
|
||||
type (static | dynamic | editable) #REQUIRED
|
||||
>
|
||||
<!ATTLIST file
|
||||
type CDATA #IMPLIED
|
||||
>
|
||||
]>
|
||||
<requests version="1.0">
|
||||
<internal package="biblatex" priority="9" active="0">
|
||||
<generic>latex</generic>
|
||||
<provides type="dynamic">
|
||||
<file>output.bcf</file>
|
||||
</provides>
|
||||
<requires type="dynamic">
|
||||
<file>output.bbl</file>
|
||||
</requires>
|
||||
<requires type="static">
|
||||
<file>blx-compat.def</file>
|
||||
<file>biblatex.def</file>
|
||||
<file>numeric.bbx</file>
|
||||
<file>standard.bbx</file>
|
||||
<file>numeric.cbx</file>
|
||||
<file>biblatex.cfg</file>
|
||||
<file>english.lbx</file>
|
||||
</requires>
|
||||
</internal>
|
||||
<external package="biblatex" priority="5" active="0">
|
||||
<generic>biber</generic>
|
||||
<cmdline>
|
||||
<binary>biber</binary>
|
||||
<infile>output</infile>
|
||||
</cmdline>
|
||||
<input>
|
||||
<file>output.bcf</file>
|
||||
</input>
|
||||
<output>
|
||||
<file>output.bbl</file>
|
||||
</output>
|
||||
<provides type="dynamic">
|
||||
<file>output.bbl</file>
|
||||
</provides>
|
||||
<requires type="dynamic">
|
||||
<file>output.bcf</file>
|
||||
</requires>
|
||||
<requires type="editable">
|
||||
<file>bibliography.bib</file>
|
||||
</requires>
|
||||
</external>
|
||||
</requests>
|
Binary file not shown.
6673
services/clsi/test/acceptance/fixtures/examples/epstopdf/image.eps
Normal file
6673
services/clsi/test/acceptance/fixtures/examples/epstopdf/image.eps
Normal file
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,10 @@
|
|||
\documentclass{article}
|
||||
|
||||
\usepackage{graphicx}
|
||||
\usepackage{epstopdf}
|
||||
|
||||
\begin{document}
|
||||
|
||||
\includegraphics[width=\textwidth]{image}
|
||||
|
||||
\end{document}
|
Binary file not shown.
|
@ -0,0 +1,28 @@
|
|||
\documentclass[a4paper]{article}
|
||||
\usepackage{feynmf}
|
||||
|
||||
\begin{document}
|
||||
|
||||
\setlength{\unitlength}{1mm}
|
||||
|
||||
\begin{fmffile}{diagram}
|
||||
|
||||
\begin{center}
|
||||
\begin{fmfgraph*}(41,17)
|
||||
\fmfleftn{i}{2}
|
||||
\fmfrightn{o}{2}
|
||||
\fmflabel{$g_2$}{i1}
|
||||
\fmflabel{$g_1$}{i2}
|
||||
\fmflabel{$p_2$}{o1}
|
||||
\fmflabel{$p_1$}{o2}
|
||||
\fmf{quark}{i1,v1}
|
||||
\fmf{quark}{i2,v1}
|
||||
\fmfblob{.35w}{v1}
|
||||
\fmf{quark}{v1,o1}
|
||||
\fmf{quark}{v1,o2}
|
||||
\end{fmfgraph*}
|
||||
\end{center}
|
||||
|
||||
\end{fmffile}
|
||||
|
||||
\end{document}
|
Binary file not shown.
|
@ -0,0 +1,28 @@
|
|||
\documentclass[a4paper]{article}
|
||||
\usepackage{feynmp}
|
||||
|
||||
\begin{document}
|
||||
|
||||
\setlength{\unitlength}{1mm}
|
||||
|
||||
\begin{fmffile}{diagram}
|
||||
|
||||
\begin{center}
|
||||
\begin{fmfgraph*}(41,17)
|
||||
\fmfleftn{i}{2}
|
||||
\fmfrightn{o}{2}
|
||||
\fmflabel{$g_2$}{i1}
|
||||
\fmflabel{$g_1$}{i2}
|
||||
\fmflabel{$p_2$}{o1}
|
||||
\fmflabel{$p_1$}{o2}
|
||||
\fmf{quark}{i1,v1}
|
||||
\fmf{quark}{i2,v1}
|
||||
\fmfblob{.35w}{v1}
|
||||
\fmf{quark}{v1,o1}
|
||||
\fmf{quark}{v1,o2}
|
||||
\end{fmfgraph*}
|
||||
\end{center}
|
||||
|
||||
\end{fmffile}
|
||||
|
||||
\end{document}
|
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"compiler": "latex"
|
||||
}
|
Binary file not shown.
|
@ -0,0 +1,17 @@
|
|||
\documentclass{article}
|
||||
|
||||
\usepackage{glossaries}
|
||||
\makeglossaries
|
||||
|
||||
\newglossaryentry{Physics}{
|
||||
name=Physics,
|
||||
description={is the study of stuff}
|
||||
}
|
||||
|
||||
\begin{document}
|
||||
|
||||
To solve various problems in \Gls{Physics} it can useful to express any arbitrary piecewise-smooth function as a Fourier Series composed of multiple sine and cosine funcions.
|
||||
|
||||
\printglossaries
|
||||
|
||||
\end{document}
|
|
@ -0,0 +1,7 @@
|
|||
This is makeindex, version 2.15 [TeX Live 2011] (kpathsea + Thai support).
|
||||
Scanning style file ./output.ist...........................done (27 attributes redefined, 0 ignored).
|
||||
Scanning input file output.glo....done (1 entries accepted, 0 rejected).
|
||||
Sorting entries...done (0 comparisons).
|
||||
Generating output file output.gls....done (6 lines written, 0 warnings).
|
||||
Output written in output.gls.
|
||||
Transcript written in output.glg.
|
|
@ -0,0 +1 @@
|
|||
\glossaryentry{Physics?\glossaryentryfield{Physics}{\glsnamefont{Physics}}{is the study of stuff}{\relax }|setentrycounter[]{page}\glsnumberformat}{1}
|
|
@ -0,0 +1,6 @@
|
|||
\glossarysection[\glossarytoctitle]{\glossarytitle}\glossarypreamble
|
||||
\begin{theglossary}\glossaryheader
|
||||
\glsgroupheading{P}\relax \glsresetentrylist %
|
||||
\glossaryentryfield{Physics}{\glsnamefont{Physics}}{is the study of stuff}{\relax }{\glossaryentrynumbers{\relax
|
||||
\setentrycounter[]{page}\glsnumberformat{1}}}%
|
||||
\end{theglossary}\glossarypostamble
|
|
@ -0,0 +1,29 @@
|
|||
% makeindex style file created by the glossaries package
|
||||
% for document 'output' on 2013-7-28
|
||||
actual '?'
|
||||
encap '|'
|
||||
level '!'
|
||||
quote '"'
|
||||
keyword "\\glossaryentry"
|
||||
preamble "\\glossarysection[\\glossarytoctitle]{\\glossarytitle}\\glossarypreamble\n\\begin{theglossary}\\glossaryheader\n"
|
||||
postamble "\%\n\\end{theglossary}\\glossarypostamble\n"
|
||||
group_skip "\\glsgroupskip\n"
|
||||
item_0 "\%\n"
|
||||
item_1 "\%\n"
|
||||
item_2 "\%\n"
|
||||
item_01 "\%\n"
|
||||
item_x1 "\\relax \\glsresetentrylist\n"
|
||||
item_12 "\%\n"
|
||||
item_x2 "\\relax \\glsresetentrylist\n"
|
||||
delim_0 "\{\\glossaryentrynumbers\{\\relax "
|
||||
delim_1 "\{\\glossaryentrynumbers\{\\relax "
|
||||
delim_2 "\{\\glossaryentrynumbers\{\\relax "
|
||||
delim_t "\}\}"
|
||||
delim_n "\\delimN "
|
||||
delim_r "\\delimR "
|
||||
headings_flag 1
|
||||
heading_prefix "\\glsgroupheading\{"
|
||||
heading_suffix "\}\\relax \\glsresetentrylist "
|
||||
symhead_positive "glssymbols"
|
||||
numhead_positive "glsnumbers"
|
||||
page_compositor "."
|
Binary file not shown.
|
@ -0,0 +1,26 @@
|
|||
\documentclass{article}
|
||||
\usepackage{pgfplots}
|
||||
\usepackage{nopageno}
|
||||
|
||||
\pgfplotsset{compat=newest}
|
||||
|
||||
\begin{document}
|
||||
|
||||
\begin{tikzpicture}
|
||||
\begin{axis}
|
||||
\addplot +[no markers,
|
||||
raw gnuplot,
|
||||
thick,
|
||||
empty line = jump
|
||||
] gnuplot {
|
||||
set contour base;
|
||||
set cntrparam levels discrete 0.003;
|
||||
unset surface;
|
||||
set view map;
|
||||
set isosamples 500;
|
||||
splot x**3-3*x+3-y**2;
|
||||
};
|
||||
\end{axis}
|
||||
\end{tikzpicture}
|
||||
|
||||
\end{document}
|
Binary file not shown.
|
@ -0,0 +1,13 @@
|
|||
\documentclass{article}
|
||||
\begin{document}
|
||||
|
||||
Hello world $x^2 = 0$.
|
||||
|
||||
%% chunk options: cache this chunk
|
||||
%% begin.rcode my-cache, cache=TRUE
|
||||
% set.seed(123)
|
||||
% x = runif(10)
|
||||
% sd(x) # standard deviation
|
||||
%% end.rcode
|
||||
|
||||
\end{document}
|
BIN
services/clsi/test/acceptance/fixtures/examples/knitr/output.pdf
Normal file
BIN
services/clsi/test/acceptance/fixtures/examples/knitr/output.pdf
Normal file
Binary file not shown.
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,9 @@
|
|||
\documentclass{article}
|
||||
|
||||
\usepackage{graphicx}
|
||||
|
||||
\begin{document}
|
||||
|
||||
\includegraphics[width=\textwidth]{image.eps}
|
||||
|
||||
\end{document}
|
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"compiler": "latex"
|
||||
}
|
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,8 @@
|
|||
\documentclass{article}
|
||||
\usepackage{luacode}
|
||||
|
||||
\begin{document}
|
||||
\begin{luacode}
|
||||
tex.print("Hello world")
|
||||
\end{luacode}
|
||||
\end{document}
|
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"compiler": "lualatex"
|
||||
}
|
Binary file not shown.
|
@ -0,0 +1,12 @@
|
|||
\documentclass{article}
|
||||
|
||||
\usepackage{makeidx}
|
||||
\makeindex
|
||||
|
||||
\begin{document}
|
||||
|
||||
To solve various problems in Physics \index{Physics} it can useful to express any arbitrary piecewise-smooth function as a Fourier Series \index{Fourier Series} composed of multiple sine and cosine funcions.
|
||||
|
||||
\printindex
|
||||
|
||||
\end{document}
|
Binary file not shown.
|
@ -0,0 +1,10 @@
|
|||
\documentclass{article}
|
||||
\usepackage{minted}
|
||||
\begin{document}
|
||||
\begin{minted}{c}
|
||||
int main() {
|
||||
printf("hello, world");
|
||||
return 0;
|
||||
}
|
||||
\end{minted}
|
||||
\end{document}
|
Binary file not shown.
|
@ -0,0 +1,15 @@
|
|||
@book{DouglasAdams,
|
||||
title={The Hitchhiker's Guide to the Galaxy},
|
||||
author={Adams, Douglas},
|
||||
isbn={9781417642595},
|
||||
url={http://books.google.com/books?id=W-xMPgAACAAJ},
|
||||
year={1995},
|
||||
publisher={San Val}
|
||||
}
|
||||
|
||||
@book{Tolkien,
|
||||
title={The Hobbit},
|
||||
author={Tolkien, J. R. R.},
|
||||
year={1904?}
|
||||
}
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
\documentclass{report}
|
||||
|
||||
\usepackage{multibib}
|
||||
\newcites{one}{First references}
|
||||
|
||||
\begin{document}
|
||||
|
||||
\chapter{First chapter}
|
||||
|
||||
The answer to life the universe and everything is 42 \citeone{DouglasAdams}
|
||||
|
||||
\bibliographystyleone{plain}
|
||||
\bibliographyone{bibliography}
|
||||
|
||||
\chapter{Second chapter}
|
||||
|
||||
All that glitters is not gold \cite{Tolkien}
|
||||
|
||||
\bibliographystyle{plain}
|
||||
\bibliography{bibliography}
|
||||
|
||||
\end{document}
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
\begin{thebibliography}{1}
|
||||
|
||||
\bibitem{DouglasAdams}
|
||||
Douglas Adams.
|
||||
\newblock {\em The Hitchhiker's Guide to the Galaxy}.
|
||||
\newblock San Val, 1995.
|
||||
|
||||
\end{thebibliography}
|
|
@ -0,0 +1,8 @@
|
|||
\begin{thebibliography}{1}
|
||||
|
||||
\bibitem{Tolkien}
|
||||
J.~R.~R. Tolkien.
|
||||
\newblock {\em The Hobbit}.
|
||||
\newblock 1904?
|
||||
|
||||
\end{thebibliography}
|
Binary file not shown.
|
@ -0,0 +1 @@
|
|||
\ref{two}
|
|
@ -0,0 +1,2 @@
|
|||
\section{Two}
|
||||
\label{two}
|
|
@ -0,0 +1,8 @@
|
|||
\documentclass{article}
|
||||
|
||||
\begin{document}
|
||||
|
||||
\include{chapter1}
|
||||
\include{chapter2}
|
||||
|
||||
\end{document}
|
Binary file not shown.
|
@ -0,0 +1,9 @@
|
|||
@book{DouglasAdams,
|
||||
title={The Hitchhiker's Guide to the Galaxy},
|
||||
author={Adams, Douglas},
|
||||
isbn={9781417642595},
|
||||
url={http://books.google.com/books?id=W-xMPgAACAAJ},
|
||||
year={1995},
|
||||
publisher={San Val}
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
\documentclass{article}
|
||||
|
||||
\begin{document}
|
||||
|
||||
The meaning of life, the universe and everything is 42 \cite{DouglasAdams}
|
||||
|
||||
\bibliographystyle{plain}
|
||||
\bibliography{bibliography}
|
||||
|
||||
\end{document}
|
|
@ -0,0 +1,8 @@
|
|||
\begin{thebibliography}{1}
|
||||
|
||||
\bibitem{DouglasAdams}
|
||||
Douglas Adams.
|
||||
\newblock {\em The Hitchhiker's Guide to the Galaxy}.
|
||||
\newblock San Val, 1995.
|
||||
|
||||
\end{thebibliography}
|
Binary file not shown.
|
@ -0,0 +1 @@
|
|||
This is chapter2.tex, included from main.tex. It's not in the same directory but can still be found.
|
Binary file not shown.
|
@ -0,0 +1,10 @@
|
|||
@book{DouglasAdams,
|
||||
title={The Hitchhiker's Guide to the Galaxy},
|
||||
author={Adams, Douglas},
|
||||
isbn={9781417642595},
|
||||
url={http://books.google.com/books?id=W-xMPgAACAAJ},
|
||||
year={1995},
|
||||
publisher={San Val}
|
||||
}
|
||||
|
||||
|
|
@ -0,0 +1 @@
|
|||
This is chapter1.tex, included from main.tex
|
Binary file not shown.
After Width: | Height: | Size: 10 KiB |
|
@ -0,0 +1,19 @@
|
|||
\documentclass{article}
|
||||
|
||||
\usepackage{graphicx}
|
||||
|
||||
\begin{document}
|
||||
|
||||
Hello world, I'm in a subdirectory \cite{DouglasAdams}
|
||||
|
||||
\input{chapter1.tex}
|
||||
\input{chapter2.tex}
|
||||
|
||||
\begin{centering}
|
||||
\includegraphics[width=0.5\textwidth]{image.png}
|
||||
\end{centering}
|
||||
|
||||
\bibliographystyle{plain}
|
||||
\bibliography{bibliography}
|
||||
|
||||
\end{document}
|
Binary file not shown.
|
@ -0,0 +1,7 @@
|
|||
\documentclass[11pt]{article}
|
||||
\usepackage{fontspec}
|
||||
\setmainfont[Ligatures=TeX]{Zapfino.ttf}
|
||||
\begin{document}
|
||||
The quick brown fox jumps over the lazy dog
|
||||
\end{document}
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"compiler": "xelatex"
|
||||
}
|
Binary file not shown.
BIN
services/clsi/test/acceptance/fixtures/lion.png
Normal file
BIN
services/clsi/test/acceptance/fixtures/lion.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 6.3 KiB |
35
services/clsi/test/smoke/coffee/SmokeTests.coffee
Normal file
35
services/clsi/test/smoke/coffee/SmokeTests.coffee
Normal file
|
@ -0,0 +1,35 @@
|
|||
chai = require("chai")
|
||||
chai.should()
|
||||
expect = chai.expect
|
||||
request = require "request"
|
||||
Settings = require "../../../app/js/Settings"
|
||||
|
||||
buildUrl = (path) -> "http://localhost:#{Settings.listen.port}/#{path}"
|
||||
|
||||
describe "Running a compile", ->
|
||||
before (done) ->
|
||||
request.post {
|
||||
url: buildUrl("project/smoketest/compile")
|
||||
json:
|
||||
compile:
|
||||
resources: [
|
||||
path: "main.tex"
|
||||
content: """
|
||||
\\documentclass{article}
|
||||
\\begin{document}
|
||||
Hello world
|
||||
\\end{document}
|
||||
"""
|
||||
]
|
||||
}, (@error, @response, @body) =>
|
||||
done()
|
||||
|
||||
it "should return the pdf", ->
|
||||
for file in @body.compile.outputFiles
|
||||
return if file.type == "pdf"
|
||||
throw new Error("no pdf returned")
|
||||
|
||||
it "should return the log", ->
|
||||
for file in @body.compile.outputFiles
|
||||
return if file.type == "log"
|
||||
throw new Error("no log returned")
|
64
services/clsi/test/smoke/js/SmokeTests.js
Normal file
64
services/clsi/test/smoke/js/SmokeTests.js
Normal file
|
@ -0,0 +1,64 @@
|
|||
(function() {
|
||||
var Settings, buildUrl, chai, expect, request;
|
||||
|
||||
chai = require("chai");
|
||||
|
||||
chai.should();
|
||||
|
||||
expect = chai.expect;
|
||||
|
||||
request = require("request");
|
||||
|
||||
Settings = require("../../../app/js/Settings");
|
||||
|
||||
buildUrl = function(path) {
|
||||
return "http://localhost:" + Settings.listen.port + "/" + path;
|
||||
};
|
||||
|
||||
describe("Running a compile", function() {
|
||||
before(function(done) {
|
||||
var _this = this;
|
||||
return request.post({
|
||||
url: buildUrl("project/smoketest/compile"),
|
||||
json: {
|
||||
compile: {
|
||||
resources: [
|
||||
{
|
||||
path: "main.tex",
|
||||
content: "\\documentclass{article}\n\\begin{document}\nHello world\n\\end{document}"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}, function(error, response, body) {
|
||||
_this.error = error;
|
||||
_this.response = response;
|
||||
_this.body = body;
|
||||
return done();
|
||||
});
|
||||
});
|
||||
it("should return the pdf", function() {
|
||||
var file, _i, _len, _ref;
|
||||
_ref = this.body.compile.outputFiles;
|
||||
for (_i = 0, _len = _ref.length; _i < _len; _i++) {
|
||||
file = _ref[_i];
|
||||
if (file.type === "pdf") {
|
||||
return;
|
||||
}
|
||||
}
|
||||
throw new Error("no pdf returned");
|
||||
});
|
||||
return it("should return the log", function() {
|
||||
var file, _i, _len, _ref;
|
||||
_ref = this.body.compile.outputFiles;
|
||||
for (_i = 0, _len = _ref.length; _i < _len; _i++) {
|
||||
file = _ref[_i];
|
||||
if (file.type === "log") {
|
||||
return;
|
||||
}
|
||||
}
|
||||
throw new Error("no log returned");
|
||||
});
|
||||
});
|
||||
|
||||
}).call(this);
|
92
services/clsi/test/unit/coffee/CompileControllerTests.coffee
Normal file
92
services/clsi/test/unit/coffee/CompileControllerTests.coffee
Normal file
|
@ -0,0 +1,92 @@
|
|||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
require('chai').should()
|
||||
modulePath = require('path').join __dirname, '../../../app/js/CompileController'
|
||||
tk = require("timekeeper")
|
||||
|
||||
describe "CompileController", ->
|
||||
beforeEach ->
|
||||
@CompileController = SandboxedModule.require modulePath, requires:
|
||||
"./CompileManager": @CompileManager = {}
|
||||
"./RequestParser": @RequestParser = {}
|
||||
"settings-sharelatex": @Settings =
|
||||
apis:
|
||||
clsi:
|
||||
url: "http://clsi.example.com"
|
||||
"./ProjectPersistenceManager": @ProjectPersistenceManager = {}
|
||||
"logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() }
|
||||
@Settings.externalUrl = "http://www.example.com"
|
||||
@req = {}
|
||||
@res = {}
|
||||
|
||||
describe "compile", ->
|
||||
beforeEach ->
|
||||
@req.body = {
|
||||
compile: "mock-body"
|
||||
}
|
||||
@req.params =
|
||||
project_id: @project_id = "project-id-123"
|
||||
@request = {
|
||||
compile: "mock-parsed-request"
|
||||
}
|
||||
@request_with_project_id =
|
||||
compile: @request.compile
|
||||
project_id: @project_id
|
||||
@output_files = [{
|
||||
path: "output.pdf"
|
||||
type: "pdf"
|
||||
}, {
|
||||
path: "output.log"
|
||||
type: "log"
|
||||
}]
|
||||
@RequestParser.parse = sinon.stub().callsArgWith(1, null, @request)
|
||||
@ProjectPersistenceManager.markProjectAsJustAccessed = sinon.stub().callsArg(1)
|
||||
@res.send = sinon.stub()
|
||||
|
||||
describe "successfully", ->
|
||||
beforeEach ->
|
||||
@CompileManager.doCompile = sinon.stub().callsArgWith(1, null, @output_files)
|
||||
@CompileController.compile @req, @res
|
||||
|
||||
it "should parse the request", ->
|
||||
@RequestParser.parse
|
||||
.calledWith(@req.body)
|
||||
.should.equal true
|
||||
|
||||
it "should run the compile for the specified project", ->
|
||||
@CompileManager.doCompile
|
||||
.calledWith(@request_with_project_id)
|
||||
.should.equal true
|
||||
|
||||
it "should mark the project as accessed", ->
|
||||
@ProjectPersistenceManager.markProjectAsJustAccessed
|
||||
.calledWith(@project_id)
|
||||
.should.equal true
|
||||
|
||||
it "should return the JSON response", ->
|
||||
@res.send
|
||||
.calledWith(JSON.stringify
|
||||
compile:
|
||||
status: "success"
|
||||
error: null
|
||||
outputFiles: @output_files.map (file) =>
|
||||
url: "#{@Settings.apis.clsi.url}/project/#{@project_id}/output/#{file.path}"
|
||||
type: file.type
|
||||
)
|
||||
.should.equal true
|
||||
|
||||
describe "with an error", ->
|
||||
beforeEach ->
|
||||
@CompileManager.doCompile = sinon.stub().callsArgWith(1, new Error(@message = "error message"), null)
|
||||
@CompileController.compile @req, @res
|
||||
|
||||
it "should return the JSON response with the error", ->
|
||||
@res.send
|
||||
.calledWith(JSON.stringify
|
||||
compile:
|
||||
status: "failure"
|
||||
error: @message
|
||||
outputFiles: []
|
||||
)
|
||||
.should.equal true
|
||||
|
73
services/clsi/test/unit/coffee/CompileManagerTests.coffee
Normal file
73
services/clsi/test/unit/coffee/CompileManagerTests.coffee
Normal file
|
@ -0,0 +1,73 @@
|
|||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
require('chai').should()
|
||||
modulePath = require('path').join __dirname, '../../../app/js/CompileManager'
|
||||
tk = require("timekeeper")
|
||||
|
||||
describe "CompileManager", ->
|
||||
beforeEach ->
|
||||
@CompileManager = SandboxedModule.require modulePath, requires:
|
||||
"./LatexRunner": @LatexRunner = {}
|
||||
"./ResourceWriter": @ResourceWriter = {}
|
||||
"./OutputFileFinder": @OutputFileFinder = {}
|
||||
"settings-sharelatex": @Settings = { path: compilesDir: "/compiles/dir" }
|
||||
"logger-sharelatex": @logger = { log: sinon.stub() }
|
||||
"rimraf": @rimraf = sinon.stub().callsArg(1)
|
||||
@callback = sinon.stub()
|
||||
|
||||
describe "doCompile", ->
|
||||
beforeEach ->
|
||||
@output_files = [{
|
||||
path: "output.log"
|
||||
type: "log"
|
||||
}, {
|
||||
path: "output.pdf"
|
||||
type: "pdf"
|
||||
}]
|
||||
@request =
|
||||
resources: @resources = "mock-resources"
|
||||
rootResourcePath: @rootResourcePath = "main.tex"
|
||||
project_id: @project_id = "project-id-123"
|
||||
compiler: @compiler = "pdflatex"
|
||||
timeout: @timeout = 42000
|
||||
@Settings.compileDir = "compiles"
|
||||
@compileDir = "#{@Settings.path.compilesDir}/#{@project_id}"
|
||||
@ResourceWriter.syncResourcesToDisk = sinon.stub().callsArg(3)
|
||||
@LatexRunner.runLatex = sinon.stub().callsArg(2)
|
||||
@OutputFileFinder.findOutputFiles = sinon.stub().callsArgWith(2, null, @output_files)
|
||||
@CompileManager.doCompile @request, @callback
|
||||
|
||||
it "should write the resources to disk", ->
|
||||
@ResourceWriter.syncResourcesToDisk
|
||||
.calledWith(@project_id, @resources, @compileDir)
|
||||
.should.equal true
|
||||
|
||||
it "should run LaTeX", ->
|
||||
@LatexRunner.runLatex
|
||||
.calledWith(@project_id, {
|
||||
directory: @compileDir
|
||||
mainFile: @rootResourcePath
|
||||
compiler: @compiler
|
||||
timeout: @timeout
|
||||
})
|
||||
.should.equal true
|
||||
|
||||
it "should find the output files", ->
|
||||
@OutputFileFinder.findOutputFiles
|
||||
.calledWith(@resources, @compileDir)
|
||||
.should.equal true
|
||||
|
||||
it "should return the output files", ->
|
||||
@callback.calledWith(null, @output_files).should.equal true
|
||||
|
||||
describe "clearProject", ->
|
||||
beforeEach ->
|
||||
@Settings.compileDir = "compiles"
|
||||
@CompileManager.clearProject @project_id, @callback
|
||||
|
||||
it "should remove the project directory", ->
|
||||
@rimraf.calledWith("#{@Settings.compileDir}/#{@project_id}")
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
56
services/clsi/test/unit/coffee/LatexRunnerTests.coffee
Normal file
56
services/clsi/test/unit/coffee/LatexRunnerTests.coffee
Normal file
|
@ -0,0 +1,56 @@
|
|||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
require('chai').should()
|
||||
modulePath = require('path').join __dirname, '../../../app/js/LatexRunner'
|
||||
Path = require "path"
|
||||
|
||||
describe "LatexRunner", ->
|
||||
beforeEach ->
|
||||
@LatexRunner = SandboxedModule.require modulePath, requires:
|
||||
"settings-sharelatex": @Settings =
|
||||
docker:
|
||||
socketPath: "/var/run/docker.sock"
|
||||
"logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() }
|
||||
"./Metrics":
|
||||
Timer: class Timer
|
||||
done: () ->
|
||||
"./CommandRunner": @CommandRunner = {}
|
||||
|
||||
@directory = "/local/compile/directory"
|
||||
@mainFile = "main-file.tex"
|
||||
@compiler = "pdflatex"
|
||||
@callback = sinon.stub()
|
||||
@project_id = "project-id-123"
|
||||
|
||||
describe "runLatex", ->
|
||||
beforeEach ->
|
||||
@CommandRunner.run = sinon.stub().callsArg(4)
|
||||
|
||||
describe "normally", ->
|
||||
beforeEach ->
|
||||
@LatexRunner.runLatex @project_id,
|
||||
directory: @directory
|
||||
mainFile: @mainFile
|
||||
compiler: @compiler
|
||||
timeout: @timeout = 42000
|
||||
@callback
|
||||
|
||||
it "should run the latex command", ->
|
||||
@CommandRunner.run
|
||||
.calledWith(@project_id, sinon.match.any, @directory, @timeout)
|
||||
.should.equal true
|
||||
|
||||
describe "with an .Rtex main file", ->
|
||||
beforeEach ->
|
||||
@LatexRunner.runLatex @project_id,
|
||||
directory: @directory
|
||||
mainFile: "main-file.Rtex"
|
||||
compiler: @compiler
|
||||
timeout: @timeout = 42000
|
||||
@callback
|
||||
|
||||
it "should run the latex command on the equivalent .tex file", ->
|
||||
command = @CommandRunner.run.args[0][1]
|
||||
mainFile = command.slice(-1)[0]
|
||||
mainFile.should.equal "$COMPILE_DIR/main-file.tex"
|
||||
|
41
services/clsi/test/unit/coffee/OutputFileFinderTests.coffee
Normal file
41
services/clsi/test/unit/coffee/OutputFileFinderTests.coffee
Normal file
|
@ -0,0 +1,41 @@
|
|||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
require('chai').should()
|
||||
modulePath = require('path').join __dirname, '../../../app/js/OutputFileFinder'
|
||||
path = require "path"
|
||||
expect = require("chai").expect
|
||||
|
||||
describe "OutputFileFinder", ->
|
||||
beforeEach ->
|
||||
@OutputFileFinder = SandboxedModule.require modulePath, requires:
|
||||
"fs": @fs = {}
|
||||
"wrench": @wrench = {}
|
||||
@directory = "/test/dir"
|
||||
@callback = sinon.stub()
|
||||
|
||||
describe "findOutputFiles", ->
|
||||
beforeEach ->
|
||||
@resource_path = "resource/path.tex"
|
||||
@output_paths = ["output.pdf", "extra", "extra/file.tex"]
|
||||
@resources = [
|
||||
path: @resource_path = "resource/path.tex"
|
||||
]
|
||||
@OutputFileFinder._isDirectory = (dirPath, callback = (error, directory) ->) =>
|
||||
callback null, dirPath == path.join(@directory, "extra")
|
||||
|
||||
@wrench.readdirRecursive = (dir, callback) =>
|
||||
callback(null, [@resource_path].concat(@output_paths))
|
||||
callback(null, null)
|
||||
sinon.spy @wrench, "readdirRecursive"
|
||||
@OutputFileFinder.findOutputFiles @resources, @directory, (error, @outputFiles) =>
|
||||
|
||||
it "should only return the output files, not directories or resource paths", ->
|
||||
expect(@outputFiles).to.deep.equal [{
|
||||
path: "output.pdf"
|
||||
type: "pdf"
|
||||
}, {
|
||||
path: "extra/file.tex",
|
||||
type: "tex"
|
||||
}]
|
||||
|
||||
|
|
@ -0,0 +1,60 @@
|
|||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
require('chai').should()
|
||||
modulePath = require('path').join __dirname, '../../../app/js/ProjectPersistenceManager'
|
||||
tk = require("timekeeper")
|
||||
|
||||
describe "ProjectPersistenceManager", ->
|
||||
beforeEach ->
|
||||
@ProjectPersistenceManager = SandboxedModule.require modulePath, requires:
|
||||
"./UrlCache": @UrlCache = {}
|
||||
"./CompileManager": @CompileManager = {}
|
||||
"logger-sharelatex": @logger = { log: sinon.stub() }
|
||||
"./db": @db = {}
|
||||
@callback = sinon.stub()
|
||||
@project_id = "project-id-123"
|
||||
|
||||
describe "clearExpiredProjects", ->
|
||||
beforeEach ->
|
||||
@project_ids = [
|
||||
"project-id-1"
|
||||
"project-id-2"
|
||||
]
|
||||
@ProjectPersistenceManager._findExpiredProjectIds = sinon.stub().callsArgWith(0, null, @project_ids)
|
||||
@ProjectPersistenceManager.clearProject = sinon.stub().callsArg(1)
|
||||
@ProjectPersistenceManager.clearExpiredProjects @callback
|
||||
|
||||
it "should clear each expired project", ->
|
||||
for project_id in @project_ids
|
||||
@ProjectPersistenceManager.clearProject
|
||||
.calledWith(project_id)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
|
||||
describe "clearProject", ->
|
||||
beforeEach ->
|
||||
@ProjectPersistenceManager._clearProjectFromDatabase = sinon.stub().callsArg(1)
|
||||
@UrlCache.clearProject = sinon.stub().callsArg(1)
|
||||
@CompileManager.clearProject = sinon.stub().callsArg(1)
|
||||
@ProjectPersistenceManager.clearProject @project_id, @callback
|
||||
|
||||
it "should clear the project from the database", ->
|
||||
@ProjectPersistenceManager._clearProjectFromDatabase
|
||||
.calledWith(@project_id)
|
||||
.should.equal true
|
||||
|
||||
it "should clear all the cached Urls for the project", ->
|
||||
@UrlCache.clearProject
|
||||
.calledWith(@project_id)
|
||||
.should.equal true
|
||||
|
||||
it "should clear the project compile folder", ->
|
||||
@CompileManager.clearProject
|
||||
.calledWith(@project_id)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
|
209
services/clsi/test/unit/coffee/RequestParserTests.coffee
Normal file
209
services/clsi/test/unit/coffee/RequestParserTests.coffee
Normal file
|
@ -0,0 +1,209 @@
|
|||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
require('chai').should()
|
||||
modulePath = require('path').join __dirname, '../../../app/js/RequestParser'
|
||||
tk = require("timekeeper")
|
||||
|
||||
describe "RequestParser", ->
|
||||
beforeEach ->
|
||||
tk.freeze()
|
||||
@callback = sinon.stub()
|
||||
@validResource =
|
||||
path: "main.tex"
|
||||
date: "12:00 01/02/03"
|
||||
content: "Hello world"
|
||||
@validRequest =
|
||||
compile:
|
||||
token: "token-123"
|
||||
options:
|
||||
compiler: "pdflatex"
|
||||
timeout: 42
|
||||
resources: []
|
||||
@RequestParser = SandboxedModule.require modulePath
|
||||
|
||||
afterEach ->
|
||||
tk.reset()
|
||||
|
||||
describe "without a top level object", ->
|
||||
beforeEach ->
|
||||
@RequestParser.parse [], @callback
|
||||
|
||||
it "should return an error", ->
|
||||
@callback.calledWith("top level object should have a compile attribute")
|
||||
.should.equal true
|
||||
|
||||
describe "without a compile attribute", ->
|
||||
beforeEach ->
|
||||
@RequestParser.parse {}, @callback
|
||||
|
||||
it "should return an error", ->
|
||||
@callback.calledWith("top level object should have a compile attribute")
|
||||
.should.equal true
|
||||
|
||||
describe "without a valid compiler", ->
|
||||
beforeEach ->
|
||||
@validRequest.compile.options.compiler = "not-a-compiler"
|
||||
@RequestParser.parse @validRequest, @callback
|
||||
|
||||
it "should return an error", ->
|
||||
@callback.calledWith("compiler attribute should be one of: pdflatex, latex, xelatex, lualatex")
|
||||
.should.equal true
|
||||
|
||||
describe "without a compiler specified", ->
|
||||
beforeEach ->
|
||||
delete @validRequest.compile.options.compiler
|
||||
@RequestParser.parse @validRequest, (error, @data) =>
|
||||
|
||||
it "should set the compiler to pdflatex by default", ->
|
||||
@data.compiler.should.equal "pdflatex"
|
||||
|
||||
describe "without a timeout specified", ->
|
||||
beforeEach ->
|
||||
delete @validRequest.compile.options.timeout
|
||||
@RequestParser.parse @validRequest, (error, @data) =>
|
||||
|
||||
it "should set the timeout to MAX_TIMEOUT", ->
|
||||
@data.timeout.should.equal @RequestParser.MAX_TIMEOUT * 1000
|
||||
|
||||
describe "with a timeout larger than the maximum", ->
|
||||
beforeEach ->
|
||||
@validRequest.compile.options.timeout = @RequestParser.MAX_TIMEOUT + 1
|
||||
@RequestParser.parse @validRequest, (error, @data) =>
|
||||
|
||||
it "should set the timeout to MAX_TIMEOUT", ->
|
||||
@data.timeout.should.equal @RequestParser.MAX_TIMEOUT * 1000
|
||||
|
||||
describe "with a timeout", ->
|
||||
beforeEach ->
|
||||
@RequestParser.parse @validRequest, (error, @data) =>
|
||||
|
||||
it "should set the timeout (in milliseconds)", ->
|
||||
@data.timeout.should.equal @validRequest.compile.options.timeout * 1000
|
||||
|
||||
describe "with a resource without a path", ->
|
||||
beforeEach ->
|
||||
delete @validResource.path
|
||||
@validRequest.compile.resources.push @validResource
|
||||
@RequestParser.parse @validRequest, @callback
|
||||
|
||||
it "should return an error", ->
|
||||
@callback.calledWith("all resources should have a path attribute")
|
||||
.should.equal true
|
||||
|
||||
describe "with a resource with a path", ->
|
||||
beforeEach ->
|
||||
@validResource.path = @path = "test.tex"
|
||||
@validRequest.compile.resources.push @validResource
|
||||
@RequestParser.parse @validRequest, @callback
|
||||
@data = @callback.args[0][1]
|
||||
|
||||
it "should return the path in the parsed response", ->
|
||||
@data.resources[0].path.should.equal @path
|
||||
|
||||
describe "with a resource with a malformed modified date", ->
|
||||
beforeEach ->
|
||||
@validResource.modified = "not-a-date"
|
||||
@validRequest.compile.resources.push @validResource
|
||||
@RequestParser.parse @validRequest, @callback
|
||||
|
||||
it "should return an error", ->
|
||||
@callback
|
||||
.calledWith(
|
||||
"resource modified date could not be understood: "+
|
||||
@validResource.modified
|
||||
)
|
||||
.should.equal true
|
||||
|
||||
describe "with a resource with a valid date", ->
|
||||
beforeEach ->
|
||||
@date = "12:00 01/02/03"
|
||||
@validResource.modified = @date
|
||||
@validRequest.compile.resources.push @validResource
|
||||
@RequestParser.parse @validRequest, @callback
|
||||
@data = @callback.args[0][1]
|
||||
|
||||
it "should return the date as a Javascript Date object", ->
|
||||
(@data.resources[0].modified instanceof Date).should.equal true
|
||||
@data.resources[0].modified.getTime().should.equal Date.parse(@date)
|
||||
|
||||
describe "with a resource without either a content or URL attribute", ->
|
||||
beforeEach ->
|
||||
delete @validResource.url
|
||||
delete @validResource.content
|
||||
@validRequest.compile.resources.push @validResource
|
||||
@RequestParser.parse @validRequest, @callback
|
||||
|
||||
it "should return an error", ->
|
||||
@callback.calledWith("all resources should have either a url or content attribute")
|
||||
.should.equal true
|
||||
|
||||
describe "with a resource where the content is not a string", ->
|
||||
beforeEach ->
|
||||
@validResource.content = []
|
||||
@validRequest.compile.resources.push @validResource
|
||||
@RequestParser.parse (@validRequest), @callback
|
||||
|
||||
it "should return an error", ->
|
||||
@callback.calledWith("content attribute should be a string")
|
||||
.should.equal true
|
||||
|
||||
describe "with a resource where the url is not a string", ->
|
||||
beforeEach ->
|
||||
@validResource.url = []
|
||||
@validRequest.compile.resources.push @validResource
|
||||
@RequestParser.parse (@validRequest), @callback
|
||||
|
||||
it "should return an error", ->
|
||||
@callback.calledWith("url attribute should be a string")
|
||||
.should.equal true
|
||||
|
||||
describe "with a resource with a url", ->
|
||||
beforeEach ->
|
||||
@validResource.url = @url = "www.example.com"
|
||||
@validRequest.compile.resources.push @validResource
|
||||
@RequestParser.parse (@validRequest), @callback
|
||||
@data = @callback.args[0][1]
|
||||
|
||||
it "should return the url in the parsed response", ->
|
||||
@data.resources[0].url.should.equal @url
|
||||
|
||||
describe "with a resource with a content attribute", ->
|
||||
beforeEach ->
|
||||
@validResource.content = @content = "Hello world"
|
||||
@validRequest.compile.resources.push @validResource
|
||||
@RequestParser.parse (@validRequest), @callback
|
||||
@data = @callback.args[0][1]
|
||||
|
||||
it "should return the content in the parsed response", ->
|
||||
@data.resources[0].content.should.equal @content
|
||||
|
||||
describe "without a root resource path", ->
|
||||
beforeEach ->
|
||||
delete @validRequest.compile.rootResourcePath
|
||||
@RequestParser.parse (@validRequest), @callback
|
||||
@data = @callback.args[0][1]
|
||||
|
||||
it "should set the root resource path to 'main.tex' by default", ->
|
||||
@data.rootResourcePath.should.equal "main.tex"
|
||||
|
||||
describe "with a root resource path", ->
|
||||
beforeEach ->
|
||||
@validRequest.compile.rootResourcePath = @path = "test.tex"
|
||||
@RequestParser.parse (@validRequest), @callback
|
||||
@data = @callback.args[0][1]
|
||||
|
||||
it "should return the root resource path in the parsed response", ->
|
||||
@data.rootResourcePath.should.equal @path
|
||||
|
||||
describe "with a root resource path that is not a string", ->
|
||||
beforeEach ->
|
||||
@validRequest.compile.rootResourcePath = []
|
||||
@RequestParser.parse (@validRequest), @callback
|
||||
|
||||
it "should return an error", ->
|
||||
@callback.calledWith("rootResourcePath attribute should be a string")
|
||||
.should.equal true
|
||||
|
||||
|
||||
|
||||
|
152
services/clsi/test/unit/coffee/ResourceWriterTests.coffee
Normal file
152
services/clsi/test/unit/coffee/ResourceWriterTests.coffee
Normal file
|
@ -0,0 +1,152 @@
|
|||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
require('chai').should()
|
||||
modulePath = require('path').join __dirname, '../../../app/js/ResourceWriter'
|
||||
path = require "path"
|
||||
|
||||
describe "ResourceWriter", ->
|
||||
beforeEach ->
|
||||
@ResourceWriter = SandboxedModule.require modulePath, requires:
|
||||
"fs": @fs = {}
|
||||
"wrench": @wrench = {}
|
||||
"./UrlCache" : @UrlCache = {}
|
||||
"mkdirp" : @mkdirp = sinon.stub().callsArg(1)
|
||||
"./OutputFileFinder": @OutputFileFinder = {}
|
||||
"./Metrics": @Metrics =
|
||||
Timer: class Timer
|
||||
done: sinon.stub()
|
||||
@project_id = "project-id-123"
|
||||
@basePath = "/path/to/write/files/to"
|
||||
@callback = sinon.stub()
|
||||
|
||||
describe "syncResourcesToDisk", ->
|
||||
beforeEach ->
|
||||
@resources = [
|
||||
"resource-1-mock"
|
||||
"resource-2-mock"
|
||||
"resource-3-mock"
|
||||
]
|
||||
@ResourceWriter._writeResourceToDisk = sinon.stub().callsArg(3)
|
||||
@ResourceWriter._removeExtraneousFiles = sinon.stub().callsArg(2)
|
||||
@ResourceWriter.syncResourcesToDisk(@project_id, @resources, @basePath, @callback)
|
||||
|
||||
it "should remove old files", ->
|
||||
@ResourceWriter._removeExtraneousFiles
|
||||
.calledWith(@resources, @basePath)
|
||||
.should.equal true
|
||||
|
||||
it "should write each resource to disk", ->
|
||||
for resource in @resources
|
||||
@ResourceWriter._writeResourceToDisk
|
||||
.calledWith(@project_id, resource, @basePath)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
|
||||
describe "_removeExtraneousFiles", ->
|
||||
beforeEach ->
|
||||
@output_files = [{
|
||||
path: "output.pdf"
|
||||
type: "pdf"
|
||||
}, {
|
||||
path: "extra/file.tex"
|
||||
type: "tex"
|
||||
}, {
|
||||
path: "extra.aux"
|
||||
type: "aux"
|
||||
}]
|
||||
@resources = "mock-resources"
|
||||
@OutputFileFinder.findOutputFiles = sinon.stub().callsArgWith(2, null, @output_files)
|
||||
@ResourceWriter._deleteFileIfNotDirectory = sinon.stub().callsArg(1)
|
||||
@ResourceWriter._removeExtraneousFiles(@resources, @basePath, @callback)
|
||||
|
||||
it "should find the existing output files", ->
|
||||
@OutputFileFinder.findOutputFiles
|
||||
.calledWith(@resources, @basePath)
|
||||
.should.equal true
|
||||
|
||||
it "should delete the output files", ->
|
||||
@ResourceWriter._deleteFileIfNotDirectory
|
||||
.calledWith(path.join(@basePath, "output.pdf"))
|
||||
.should.equal true
|
||||
|
||||
it "should delete the extra files", ->
|
||||
@ResourceWriter._deleteFileIfNotDirectory
|
||||
.calledWith(path.join(@basePath, "extra/file.tex"))
|
||||
.should.equal true
|
||||
|
||||
it "should not delete the extra aux files", ->
|
||||
@ResourceWriter._deleteFileIfNotDirectory
|
||||
.calledWith(path.join(@basePath, "extra.aux"))
|
||||
.should.equal false
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
|
||||
it "should time the request", ->
|
||||
@Metrics.Timer::done.called.should.equal true
|
||||
|
||||
describe "_writeResourceToDisk", ->
|
||||
describe "with a url based resource", ->
|
||||
beforeEach ->
|
||||
@resource =
|
||||
path: "main.tex"
|
||||
url: "http://www.example.com/main.tex"
|
||||
modified: Date.now()
|
||||
@UrlCache.downloadUrlToFile = sinon.stub().callsArg(4)
|
||||
@ResourceWriter._writeResourceToDisk(@project_id, @resource, @basePath, @callback)
|
||||
|
||||
it "should ensure the directory exists", ->
|
||||
@mkdirp
|
||||
.calledWith(path.dirname(path.join(@basePath, @resource.path)))
|
||||
.should.equal true
|
||||
|
||||
it "should write the URL from the cache", ->
|
||||
@UrlCache.downloadUrlToFile
|
||||
.calledWith(@project_id, @resource.url, path.join(@basePath, @resource.path), @resource.modified)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
|
||||
describe "with a content based resource", ->
|
||||
beforeEach ->
|
||||
@resource =
|
||||
path: "main.tex"
|
||||
content: "Hello world"
|
||||
@fs.writeFile = sinon.stub().callsArg(2)
|
||||
@ResourceWriter._writeResourceToDisk(@project_id, @resource, @basePath, @callback)
|
||||
|
||||
it "should ensure the directory exists", ->
|
||||
@mkdirp
|
||||
.calledWith(path.dirname(path.join(@basePath, @resource.path)))
|
||||
.should.equal true
|
||||
|
||||
it "should write the contents to disk", ->
|
||||
@fs.writeFile
|
||||
.calledWith(path.join(@basePath, @resource.path), @resource.content)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
|
||||
describe "with a file path that breaks out of the root folder", ->
|
||||
beforeEach ->
|
||||
@resource =
|
||||
path: "../../main.tex"
|
||||
content: "Hello world"
|
||||
@fs.writeFile = sinon.stub().callsArg(2)
|
||||
@ResourceWriter._writeResourceToDisk(@project_id, @resource, @basePath, @callback)
|
||||
|
||||
it "should not write to disk", ->
|
||||
@fs.writeFile.called.should.equal false
|
||||
|
||||
it "should return an error", ->
|
||||
@callback
|
||||
.calledWith(new Error("resource path is outside root directory"))
|
||||
.should.equal true
|
||||
|
||||
|
||||
|
||||
|
200
services/clsi/test/unit/coffee/UrlCacheTests.coffee
Normal file
200
services/clsi/test/unit/coffee/UrlCacheTests.coffee
Normal file
|
@ -0,0 +1,200 @@
|
|||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
require('chai').should()
|
||||
modulePath = require('path').join __dirname, '../../../app/js/UrlCache'
|
||||
EventEmitter = require("events").EventEmitter
|
||||
|
||||
describe "UrlCache", ->
|
||||
beforeEach ->
|
||||
@callback = sinon.stub()
|
||||
@url = "www.example.com/file"
|
||||
@project_id = "project-id-123"
|
||||
@UrlCache = SandboxedModule.require modulePath, requires:
|
||||
"./db" : {}
|
||||
"./UrlFetcher" : @UrlFetcher = {}
|
||||
"logger-sharelatex": @logger = {log: sinon.stub()}
|
||||
"settings-sharelatex": @Settings = { path: clsiCacheDir: "/cache/dir" }
|
||||
"fs": @fs = {}
|
||||
|
||||
describe "_doesUrlNeedDownloading", ->
|
||||
beforeEach ->
|
||||
@lastModified = new Date()
|
||||
@lastModifiedRoundedToSeconds = new Date(Math.floor(@lastModified.getTime() / 1000) * 1000)
|
||||
|
||||
describe "when URL does not exist in cache", ->
|
||||
beforeEach ->
|
||||
@UrlCache._findUrlDetails = sinon.stub().callsArgWith(2, null, null)
|
||||
@UrlCache._doesUrlNeedDownloading(@project_id, @url, @lastModified, @callback)
|
||||
|
||||
it "should return the callback with true", ->
|
||||
@callback.calledWith(null, true).should.equal true
|
||||
|
||||
describe "when URL does exist in cache", ->
|
||||
beforeEach ->
|
||||
@urlDetails = {}
|
||||
@UrlCache._findUrlDetails = sinon.stub().callsArgWith(2, null, @urlDetails)
|
||||
|
||||
describe "when the modified date is more recent than the cached modified date", ->
|
||||
beforeEach ->
|
||||
@urlDetails.lastModified = new Date(@lastModified.getTime() - 1000)
|
||||
@UrlCache._doesUrlNeedDownloading(@project_id, @url, @lastModified, @callback)
|
||||
|
||||
it "should get the url details", ->
|
||||
@UrlCache._findUrlDetails
|
||||
.calledWith(@project_id, @url)
|
||||
.should.equal true
|
||||
|
||||
it "should return the callback with true", ->
|
||||
@callback.calledWith(null, true).should.equal true
|
||||
|
||||
describe "when the cached modified date is more recent than the modified date", ->
|
||||
beforeEach ->
|
||||
@urlDetails.lastModified = new Date(@lastModified.getTime() + 1000)
|
||||
@UrlCache._doesUrlNeedDownloading(@project_id, @url, @lastModified, @callback)
|
||||
|
||||
it "should return the callback with false", ->
|
||||
@callback.calledWith(null, false).should.equal true
|
||||
|
||||
describe "when the cached modified date is equal to the modified date", ->
|
||||
beforeEach ->
|
||||
@urlDetails.lastModified = @lastModified
|
||||
@UrlCache._doesUrlNeedDownloading(@project_id, @url, @lastModified, @callback)
|
||||
|
||||
it "should return the callback with false", ->
|
||||
@callback.calledWith(null, false).should.equal true
|
||||
|
||||
describe "when the provided modified date does not exist", ->
|
||||
beforeEach ->
|
||||
@lastModified = null
|
||||
@UrlCache._doesUrlNeedDownloading(@project_id, @url, @lastModified, @callback)
|
||||
|
||||
it "should return the callback with true", ->
|
||||
@callback.calledWith(null, true).should.equal true
|
||||
|
||||
describe "when the URL does not have a modified date", ->
|
||||
beforeEach ->
|
||||
@urlDetails.lastModified = null
|
||||
@UrlCache._doesUrlNeedDownloading(@project_id, @url, @lastModified, @callback)
|
||||
|
||||
it "should return the callback with true", ->
|
||||
@callback.calledWith(null, true).should.equal true
|
||||
|
||||
describe "_ensureUrlIsInCache", ->
|
||||
beforeEach ->
|
||||
@UrlFetcher.pipeUrlToFile = sinon.stub().callsArg(2)
|
||||
@UrlCache._updateOrCreateUrlDetails = sinon.stub().callsArg(3)
|
||||
|
||||
describe "when the URL needs updating", ->
|
||||
beforeEach ->
|
||||
@UrlCache._doesUrlNeedDownloading = sinon.stub().callsArgWith(3, null, true)
|
||||
@UrlCache._ensureUrlIsInCache(@project_id, @url, @lastModified, @callback)
|
||||
|
||||
it "should check that the url needs downloading", ->
|
||||
@UrlCache._doesUrlNeedDownloading
|
||||
.calledWith(@project_id, @url, @lastModifiedRoundedToSeconds)
|
||||
.should.equal true
|
||||
|
||||
it "should download the URL to the cache file", ->
|
||||
@UrlFetcher.pipeUrlToFile
|
||||
.calledWith(@url, @UrlCache._cacheFilePathForUrl(@project_id, @url))
|
||||
.should.equal true
|
||||
|
||||
|
||||
it "should update the database entry", ->
|
||||
@UrlCache._updateOrCreateUrlDetails
|
||||
.calledWith(@project_id, @url, @lastModifiedRoundedToSeconds)
|
||||
.should.equal true
|
||||
|
||||
it "should return the callback with the cache file path", ->
|
||||
@callback
|
||||
.calledWith(null, @UrlCache._cacheFilePathForUrl(@project_id, @url))
|
||||
.should.equal true
|
||||
|
||||
describe "when the URL does not need updating", ->
|
||||
beforeEach ->
|
||||
@UrlCache._doesUrlNeedDownloading = sinon.stub().callsArgWith(3, null, false)
|
||||
@UrlCache._ensureUrlIsInCache(@project_id, @url, @lastModified, @callback)
|
||||
|
||||
it "should not download the URL to the cache file", ->
|
||||
@UrlFetcher.pipeUrlToFile
|
||||
.called.should.equal false
|
||||
|
||||
it "should return the callback with the cache file path", ->
|
||||
@callback
|
||||
.calledWith(null, @UrlCache._cacheFilePathForUrl(@project_id, @url))
|
||||
.should.equal true
|
||||
|
||||
describe "downloadUrlToFile", ->
|
||||
beforeEach ->
|
||||
@cachePath = "path/to/cached/url"
|
||||
@destPath = "path/to/destination"
|
||||
@UrlCache._copyFile = sinon.stub().callsArg(2)
|
||||
@UrlCache._ensureUrlIsInCache = sinon.stub().callsArgWith(3, null, @cachePath)
|
||||
@UrlCache.downloadUrlToFile(@project_id, @url, @destPath, @lastModified, @callback)
|
||||
|
||||
it "should ensure the URL is downloaded and updated in the cache", ->
|
||||
@UrlCache._ensureUrlIsInCache
|
||||
.calledWith(@project_id, @url, @lastModified)
|
||||
.should.equal true
|
||||
|
||||
it "should copy the file to the new location", ->
|
||||
@UrlCache._copyFile
|
||||
.calledWith(@cachePath, @destPath)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
|
||||
describe "_deleteUrlCacheFromDisk", ->
|
||||
beforeEach ->
|
||||
@fs.unlink = sinon.stub().callsArg(1)
|
||||
@UrlCache._deleteUrlCacheFromDisk(@project_id, @url, @callback)
|
||||
|
||||
it "should delete the cache file", ->
|
||||
@fs.unlink
|
||||
.calledWith(@UrlCache._cacheFilePathForUrl(@project_id, @url))
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
|
||||
describe "_clearUrlFromCache", ->
|
||||
beforeEach ->
|
||||
@UrlCache._deleteUrlCacheFromDisk = sinon.stub().callsArg(2)
|
||||
@UrlCache._clearUrlDetails = sinon.stub().callsArg(2)
|
||||
@UrlCache._clearUrlFromCache @project_id, @url, @callback
|
||||
|
||||
it "should delete the file on disk", ->
|
||||
@UrlCache._deleteUrlCacheFromDisk
|
||||
.calledWith(@project_id, @url)
|
||||
.should.equal true
|
||||
|
||||
it "should clear the entry in the database", ->
|
||||
@UrlCache._clearUrlDetails
|
||||
.calledWith(@project_id, @url)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
|
||||
describe "clearProject", ->
|
||||
beforeEach ->
|
||||
@urls = [
|
||||
"www.example.com/file1"
|
||||
"www.example.com/file2"
|
||||
]
|
||||
@UrlCache._findAllUrlsInProject = sinon.stub().callsArgWith(1, null, @urls)
|
||||
@UrlCache._clearUrlFromCache = sinon.stub().callsArg(2)
|
||||
@UrlCache.clearProject @project_id, @callback
|
||||
|
||||
it "should clear the cache for each url in the project", ->
|
||||
for url in @urls
|
||||
@UrlCache._clearUrlFromCache
|
||||
.calledWith(@project_id, url)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
|
||||
|
||||
|
74
services/clsi/test/unit/coffee/UrlFetcherTests.coffee
Normal file
74
services/clsi/test/unit/coffee/UrlFetcherTests.coffee
Normal file
|
@ -0,0 +1,74 @@
|
|||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
require('chai').should()
|
||||
modulePath = require('path').join __dirname, '../../../app/js/UrlFetcher'
|
||||
EventEmitter = require("events").EventEmitter
|
||||
|
||||
describe "UrlFetcher", ->
|
||||
beforeEach ->
|
||||
@callback = sinon.stub()
|
||||
@url = "www.example.com/file"
|
||||
@UrlFetcher = SandboxedModule.require modulePath, requires:
|
||||
request: defaults: @defaults = sinon.stub().returns(@request = {})
|
||||
fs: @fs = {}
|
||||
|
||||
it "should turn off the cookie jar in request", ->
|
||||
@defaults.calledWith(jar: false)
|
||||
.should.equal true
|
||||
|
||||
describe "_pipeUrlToFile", ->
|
||||
beforeEach ->
|
||||
@path = "/path/to/file/on/disk"
|
||||
@request.get = sinon.stub().returns(@urlStream = new EventEmitter)
|
||||
@urlStream.pipe = sinon.stub()
|
||||
@fs.createWriteStream = sinon.stub().returns(@fileStream = "write-stream-stub")
|
||||
@UrlFetcher.pipeUrlToFile(@url, @path, @callback)
|
||||
|
||||
it "should request the URL", ->
|
||||
@request.get
|
||||
.calledWith(@url)
|
||||
.should.equal true
|
||||
|
||||
it "should open the file for writing", ->
|
||||
@fs.createWriteStream
|
||||
.calledWith(@path)
|
||||
.should.equal true
|
||||
|
||||
describe "successfully", ->
|
||||
beforeEach ->
|
||||
@res = statusCode: 200
|
||||
@urlStream.emit "response", @res
|
||||
@urlStream.emit "end"
|
||||
|
||||
it "should pipe the URL to the file", ->
|
||||
@urlStream.pipe
|
||||
.calledWith(@fileStream)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
|
||||
describe "with non success status code", ->
|
||||
beforeEach ->
|
||||
@res = statusCode: 404
|
||||
@urlStream.emit "response", @res
|
||||
@urlStream.emit "end"
|
||||
|
||||
it "should call the callback with an error", ->
|
||||
@callback
|
||||
.calledWith(new Error("URL returned non-success status code: 404"))
|
||||
.should.equal true
|
||||
|
||||
describe "with error", ->
|
||||
beforeEach ->
|
||||
@urlStream.emit "error", @error = new Error("something went wrong")
|
||||
|
||||
it "should call the callback with the error", ->
|
||||
@callback
|
||||
.calledWith(@error)
|
||||
.should.equal true
|
||||
|
||||
it "should only call the callback once, even if end is called", ->
|
||||
@urlStream.emit "end"
|
||||
@callback.calledOnce.should.equal true
|
||||
|
Loading…
Reference in a new issue