mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-07 20:31:06 -05:00
Merge pull request #569 from sharelatex/bg-compile-from-redis
compile from redis
This commit is contained in:
commit
f9d1650c6a
13 changed files with 535 additions and 69 deletions
|
@ -3,17 +3,31 @@ async = require "async"
|
|||
Settings = require "settings-sharelatex"
|
||||
request = require('request')
|
||||
Project = require("../../models/Project").Project
|
||||
ProjectGetter = require("../Project/ProjectGetter")
|
||||
ProjectEntityHandler = require("../Project/ProjectEntityHandler")
|
||||
logger = require "logger-sharelatex"
|
||||
Url = require("url")
|
||||
ClsiCookieManager = require("./ClsiCookieManager")
|
||||
ClsiStateManager = require("./ClsiStateManager")
|
||||
_ = require("underscore")
|
||||
async = require("async")
|
||||
ClsiFormatChecker = require("./ClsiFormatChecker")
|
||||
DocumentUpdaterHandler = require "../DocumentUpdater/DocumentUpdaterHandler"
|
||||
Metrics = require('metrics-sharelatex')
|
||||
|
||||
module.exports = ClsiManager =
|
||||
|
||||
sendRequest: (project_id, user_id, options = {}, callback = (error, status, outputFiles, clsiServerId, validationProblems) ->) ->
|
||||
sendRequest: (project_id, user_id, options = {}, callback) ->
|
||||
ClsiManager.sendRequestOnce project_id, user_id, options, (error, status, result...) ->
|
||||
return callback(error) if error?
|
||||
if status is 'conflict'
|
||||
options = _.clone(options)
|
||||
options.syncType = "full" # force full compile
|
||||
ClsiManager.sendRequestOnce project_id, user_id, options, callback # try again
|
||||
else
|
||||
callback(error, status, result...)
|
||||
|
||||
sendRequestOnce: (project_id, user_id, options = {}, callback = (error, status, outputFiles, clsiServerId, validationProblems) ->) ->
|
||||
ClsiManager._buildRequest project_id, options, (error, req) ->
|
||||
return callback(error) if error?
|
||||
logger.log project_id: project_id, "sending compile to CLSI"
|
||||
|
@ -28,7 +42,7 @@ module.exports = ClsiManager =
|
|||
if error?
|
||||
logger.err err:error, project_id:project_id, "error sending request to clsi"
|
||||
return callback(error)
|
||||
logger.log project_id: project_id, outputFilesLength: response?.outputFiles?.length, status: response?.status, "received compile response from CLSI"
|
||||
logger.log project_id: project_id, outputFilesLength: response?.outputFiles?.length, status: response?.status, compile_status: response?.compile?.status, "received compile response from CLSI"
|
||||
ClsiCookieManager._getServerId project_id, (err, clsiServerId)->
|
||||
if err?
|
||||
logger.err err:err, project_id:project_id, "error getting server id"
|
||||
|
@ -85,6 +99,8 @@ module.exports = ClsiManager =
|
|||
callback null, body
|
||||
else if response.statusCode == 413
|
||||
callback null, compile:status:"project-too-large"
|
||||
else if response.statusCode == 409
|
||||
callback null, compile:status:"conflict"
|
||||
else
|
||||
error = new Error("CLSI returned non-success code: #{response.statusCode}")
|
||||
logger.error err: error, project_id: project_id, "CLSI returned failure code"
|
||||
|
@ -101,19 +117,77 @@ module.exports = ClsiManager =
|
|||
return outputFiles
|
||||
|
||||
VALID_COMPILERS: ["pdflatex", "latex", "xelatex", "lualatex"]
|
||||
|
||||
_buildRequest: (project_id, options={}, callback = (error, request) ->) ->
|
||||
Project.findById project_id, {compiler: 1, rootDoc_id: 1, imageName: 1}, (error, project) ->
|
||||
ProjectGetter.getProject project_id, {compiler: 1, rootDoc_id: 1, imageName: 1, rootFolder:1}, (error, project) ->
|
||||
return callback(error) if error?
|
||||
return callback(new Errors.NotFoundError("project does not exist: #{project_id}")) if !project?
|
||||
|
||||
if project.compiler not in ClsiManager.VALID_COMPILERS
|
||||
project.compiler = "pdflatex"
|
||||
|
||||
if options.incrementalCompilesEnabled or options.syncType? # new way, either incremental or full
|
||||
timer = new Metrics.Timer("editor.compile-getdocs-redis")
|
||||
ClsiManager.getContentFromDocUpdaterIfMatch project_id, project, (error, projectStateHash, docUpdaterDocs) ->
|
||||
timer.done()
|
||||
return callback(error) if error?
|
||||
logger.log project_id: project_id, projectStateHash: projectStateHash, docs: docUpdaterDocs?, "checked project state"
|
||||
# see if we can send an incremental update to the CLSI
|
||||
if docUpdaterDocs? and options.syncType isnt "full"
|
||||
# Workaround: for now, always flush project to mongo on compile
|
||||
# until we have automatic periodic flushing on the docupdater
|
||||
# side, to prevent documents staying in redis too long.
|
||||
DocumentUpdaterHandler.flushProjectToMongo project_id, (error) ->
|
||||
return callback(error) if error?
|
||||
Metrics.inc "compile-from-redis"
|
||||
ClsiManager._buildRequestFromDocupdater project_id, options, project, projectStateHash, docUpdaterDocs, callback
|
||||
else
|
||||
Metrics.inc "compile-from-mongo"
|
||||
ClsiManager._buildRequestFromMongo project_id, options, project, projectStateHash, callback
|
||||
else # old way, always from mongo
|
||||
timer = new Metrics.Timer("editor.compile-getdocs-mongo")
|
||||
ClsiManager._getContentFromMongo project_id, (error, docs, files) ->
|
||||
timer.done()
|
||||
return callback(error) if error?
|
||||
ClsiManager._finaliseRequest project_id, options, project, docs, files, callback
|
||||
|
||||
getContentFromDocUpdaterIfMatch: (project_id, project, callback = (error, projectStateHash, docs) ->) ->
|
||||
ClsiStateManager.computeHash project, (error, projectStateHash) ->
|
||||
return callback(error) if error?
|
||||
DocumentUpdaterHandler.getProjectDocsIfMatch project_id, projectStateHash, (error, docs) ->
|
||||
return callback(error) if error?
|
||||
callback(null, projectStateHash, docs)
|
||||
|
||||
_buildRequestFromDocupdater: (project_id, options, project, projectStateHash, docUpdaterDocs, callback = (error, request) ->) ->
|
||||
ProjectEntityHandler.getAllDocPathsFromProject project, (error, docPath) ->
|
||||
return callback(error) if error?
|
||||
docs = {}
|
||||
for doc in docUpdaterDocs or []
|
||||
path = docPath[doc._id]
|
||||
docs[path] = doc
|
||||
# send new docs but not files as those are already on the clsi
|
||||
options = _.clone(options)
|
||||
options.syncType = "incremental"
|
||||
options.syncState = projectStateHash
|
||||
ClsiManager._finaliseRequest project_id, options, project, docs, [], callback
|
||||
|
||||
_buildRequestFromMongo: (project_id, options, project, projectStateHash, callback = (error, request) ->) ->
|
||||
ClsiManager._getContentFromMongo project_id, (error, docs, files) ->
|
||||
return callback(error) if error?
|
||||
options = _.clone(options)
|
||||
options.syncType = "full"
|
||||
options.syncState = projectStateHash
|
||||
ClsiManager._finaliseRequest project_id, options, project, docs, files, callback
|
||||
|
||||
_getContentFromMongo: (project_id, callback = (error, docs, files) ->) ->
|
||||
DocumentUpdaterHandler.flushProjectToMongo project_id, (error) ->
|
||||
return callback(error) if error?
|
||||
ProjectEntityHandler.getAllDocs project_id, (error, docs = {}) ->
|
||||
return callback(error) if error?
|
||||
ProjectEntityHandler.getAllFiles project_id, (error, files = {}) ->
|
||||
return callback(error) if error?
|
||||
callback(null, docs, files)
|
||||
|
||||
_finaliseRequest: (project_id, options, project, docs, files, callback = (error, params) -> ) ->
|
||||
resources = []
|
||||
rootResourcePath = null
|
||||
rootResourcePathOverride = null
|
||||
|
@ -148,6 +222,8 @@ module.exports = ClsiManager =
|
|||
imageName: project.imageName
|
||||
draft: !!options.draft
|
||||
check: options.check
|
||||
syncType: options.syncType
|
||||
syncState: options.syncState
|
||||
rootResourcePath: rootResourcePath
|
||||
resources: resources
|
||||
}
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
Settings = require "settings-sharelatex"
|
||||
logger = require "logger-sharelatex"
|
||||
crypto = require "crypto"
|
||||
ProjectEntityHandler = require "../Project/ProjectEntityHandler"
|
||||
|
||||
# The "state" of a project is a hash of the relevant attributes in the
|
||||
# project object in this case we only need the rootFolder.
|
||||
#
|
||||
# The idea is that it will change if any doc or file is
|
||||
# created/renamed/deleted, and also if the content of any file (not
|
||||
# doc) changes.
|
||||
#
|
||||
# When the hash changes the full set of files on the CLSI will need to
|
||||
# be updated. If it doesn't change then we can overwrite changed docs
|
||||
# in place on the clsi, getting them from the docupdater.
|
||||
#
|
||||
# The docupdater is responsible for setting the key in redis, and
|
||||
# unsetting it if it removes any documents from the doc updater.
|
||||
|
||||
buildState = (s) ->
|
||||
return crypto.createHash('sha1').update(s, 'utf8').digest('hex')
|
||||
|
||||
module.exports = ClsiStateManager =
|
||||
|
||||
computeHash: (project, callback = (err, hash) ->) ->
|
||||
ProjectEntityHandler.getAllEntitiesFromProject project, (err, docs, files) ->
|
||||
fileList = ("#{f.file._id}:#{f.file.rev}:#{f.file.created}:#{f.path}" for f in files or [])
|
||||
docList = ("#{d.doc._id}:#{d.path}" for d in docs or [])
|
||||
sortedEntityList = [docList..., fileList...].sort()
|
||||
hash = buildState(sortedEntityList.join("\n"))
|
||||
callback(null, hash)
|
|
@ -30,6 +30,8 @@ module.exports = CompileController =
|
|||
options.draft = req.body.draft
|
||||
if req.body?.check in ['validate', 'error', 'silent']
|
||||
options.check = req.body.check
|
||||
if req.body?.incrementalCompilesEnabled
|
||||
options.incrementalCompilesEnabled = true
|
||||
logger.log {options:options, project_id:project_id, user_id:user_id}, "got compile request"
|
||||
CompileManager.compile project_id, user_id, options, (error, status, outputFiles, clsiServerId, limits, validationProblems) ->
|
||||
return next(error) if error?
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
Settings = require('settings-sharelatex')
|
||||
RedisWrapper = require("../../infrastructure/RedisWrapper")
|
||||
rclient = RedisWrapper.client("clsi_recently_compiled")
|
||||
DocumentUpdaterHandler = require "../DocumentUpdater/DocumentUpdaterHandler"
|
||||
Project = require("../../models/Project").Project
|
||||
ProjectRootDocManager = require "../Project/ProjectRootDocManager"
|
||||
UserGetter = require "../User/UserGetter"
|
||||
|
@ -30,8 +29,6 @@ module.exports = CompileManager =
|
|||
return callback null, "too-recently-compiled", []
|
||||
|
||||
CompileManager._ensureRootDocumentIsSet project_id, (error) ->
|
||||
return callback(error) if error?
|
||||
DocumentUpdaterHandler.flushProjectToMongo project_id, (error) ->
|
||||
return callback(error) if error?
|
||||
CompileManager.getProjectCompileLimits project_id, (error, limits) ->
|
||||
return callback(error) if error?
|
||||
|
|
|
@ -123,6 +123,36 @@ module.exports = DocumentUpdaterHandler =
|
|||
logger.error project_id:project_id, doc_id:doc_id, url: url, "doc updater returned a non-success status code: #{res.statusCode}"
|
||||
callback new Error("doc updater returned a non-success status code: #{res.statusCode}")
|
||||
|
||||
getProjectDocsIfMatch: (project_id, projectStateHash, callback = (error, docs) ->) ->
|
||||
# If the project state hasn't changed, we can get all the latest
|
||||
# docs from redis via the docupdater. Otherwise we will need to
|
||||
# fall back to getting them from mongo.
|
||||
timer = new metrics.Timer("get-project-docs")
|
||||
url = "#{settings.apis.documentupdater.url}/project/#{project_id}/doc?state=#{projectStateHash}"
|
||||
logger.log project_id:project_id, "getting project docs from document updater"
|
||||
request.get url, (error, res, body)->
|
||||
timer.done()
|
||||
if error?
|
||||
logger.error err:error, url:url, project_id:project_id, "error getting project docs from doc updater"
|
||||
return callback(error)
|
||||
if res.statusCode is 409 # HTTP response code "409 Conflict"
|
||||
# Docupdater has checked the projectStateHash and found that
|
||||
# it has changed. This means that the docs currently in redis
|
||||
# aren't the only change to the project and the full set of
|
||||
# docs/files should be retreived from docstore/filestore
|
||||
# instead.
|
||||
return callback()
|
||||
else if res.statusCode >= 200 and res.statusCode < 300
|
||||
logger.log project_id:project_id, "got project docs from document document updater"
|
||||
try
|
||||
docs = JSON.parse(body)
|
||||
catch error
|
||||
return callback(error)
|
||||
callback null, docs
|
||||
else
|
||||
logger.error project_id:project_id, url: url, "doc updater returned a non-success status code: #{res.statusCode}"
|
||||
callback new Error("doc updater returned a non-success status code: #{res.statusCode}")
|
||||
|
||||
acceptChanges: (project_id, doc_id, change_ids = [], callback = (error) ->) ->
|
||||
timer = new metrics.Timer("accept-changes")
|
||||
reqSettings =
|
||||
|
|
|
@ -21,18 +21,10 @@ CooldownManager = require '../Cooldown/CooldownManager'
|
|||
module.exports = ProjectEntityHandler =
|
||||
getAllFolders: (project_id, callback) ->
|
||||
logger.log project_id:project_id, "getting all folders for project"
|
||||
folders = {}
|
||||
processFolder = (basePath, folder) ->
|
||||
folders[basePath] = folder
|
||||
for childFolder in (folder.folders or [])
|
||||
if childFolder.name?
|
||||
processFolder path.join(basePath, childFolder.name), childFolder
|
||||
|
||||
ProjectGetter.getProjectWithoutDocLines project_id, (err, project) ->
|
||||
return callback(err) if err?
|
||||
return callback("no project") if !project?
|
||||
processFolder "/", project.rootFolder[0]
|
||||
callback null, folders
|
||||
ProjectEntityHandler.getAllFoldersFromProject project, callback
|
||||
|
||||
getAllDocs: (project_id, callback) ->
|
||||
logger.log project_id:project_id, "getting all docs for project"
|
||||
|
@ -74,6 +66,43 @@ module.exports = ProjectEntityHandler =
|
|||
files[path.join(folderPath, file.name)] = file
|
||||
callback null, files
|
||||
|
||||
getAllFoldersFromProject: (project, callback) ->
|
||||
folders = {}
|
||||
processFolder = (basePath, folder) ->
|
||||
folders[basePath] = folder
|
||||
for childFolder in (folder.folders or [])
|
||||
if childFolder.name?
|
||||
processFolder path.join(basePath, childFolder.name), childFolder
|
||||
|
||||
processFolder "/", project.rootFolder[0]
|
||||
callback null, folders
|
||||
|
||||
getAllEntitiesFromProject: (project, callback) ->
|
||||
logger.log project:project, "getting all files for project"
|
||||
@getAllFoldersFromProject project, (err, folders = {}) ->
|
||||
return callback(err) if err?
|
||||
docs = []
|
||||
files = []
|
||||
for folderPath, folder of folders
|
||||
for doc in (folder.docs or [])
|
||||
if doc?
|
||||
docs.push({path: path.join(folderPath, doc.name), doc:doc})
|
||||
for file in (folder.fileRefs or [])
|
||||
if file?
|
||||
files.push({path: path.join(folderPath, file.name), file:file})
|
||||
callback null, docs, files
|
||||
|
||||
getAllDocPathsFromProject: (project, callback) ->
|
||||
logger.log project:project, "getting all docs for project"
|
||||
@getAllFoldersFromProject project, (err, folders = {}) ->
|
||||
return callback(err) if err?
|
||||
docPath = {}
|
||||
for folderPath, folder of folders
|
||||
for doc in (folder.docs or [])
|
||||
docPath[doc._id] = path.join(folderPath, doc.name)
|
||||
logger.log count:_.keys(docPath).length, project_id:project._id, "returning docPaths for project"
|
||||
callback null, docPath
|
||||
|
||||
flushProjectToThirdPartyDataStore: (project_id, callback) ->
|
||||
self = @
|
||||
logger.log project_id:project_id, "flushing project to tpds"
|
||||
|
|
|
@ -18,7 +18,7 @@ block content
|
|||
| #{translate("beta_program_badge_description")}
|
||||
span.beta-feature-badge
|
||||
p.text-centered
|
||||
strong We're not currently testing anything in beta, but keep checking back!
|
||||
strong We're currently testing lower latency compilation features in beta.
|
||||
.row.text-centered
|
||||
.col-md-12
|
||||
if user.betaProgram
|
||||
|
|
|
@ -105,6 +105,7 @@ define [
|
|||
rootDoc_id: options.rootDocOverride_id or null
|
||||
draft: $scope.draft
|
||||
check: checkType
|
||||
incrementalCompilesEnabled: window.user?.betaProgram
|
||||
_csrf: window.csrfToken
|
||||
}, {params: params}
|
||||
|
||||
|
|
|
@ -12,6 +12,8 @@ describe "ClsiManager", ->
|
|||
getCookieJar: sinon.stub().callsArgWith(1, null, @jar)
|
||||
setServerId: sinon.stub().callsArgWith(2)
|
||||
_getServerId:sinon.stub()
|
||||
@ClsiStateManager =
|
||||
computeHash: sinon.stub().callsArgWith(1, null, "01234567890abcdef")
|
||||
@ClsiFormatChecker =
|
||||
checkRecoursesForProblems:sinon.stub().callsArgWith(1)
|
||||
@ClsiManager = SandboxedModule.require modulePath, requires:
|
||||
|
@ -26,10 +28,18 @@ describe "ClsiManager", ->
|
|||
url: "https://clsipremium.example.com"
|
||||
"../../models/Project": Project: @Project = {}
|
||||
"../Project/ProjectEntityHandler": @ProjectEntityHandler = {}
|
||||
"../Project/ProjectGetter": @ProjectGetter = {}
|
||||
"../DocumentUpdater/DocumentUpdaterHandler": @DocumentUpdaterHandler =
|
||||
getProjectDocsIfMatch: sinon.stub().callsArgWith(2,null,null)
|
||||
"./ClsiCookieManager": @ClsiCookieManager
|
||||
"./ClsiStateManager": @ClsiStateManager
|
||||
"logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub(), warn: sinon.stub() }
|
||||
"request": @request = sinon.stub()
|
||||
"./ClsiFormatChecker": @ClsiFormatChecker
|
||||
"metrics-sharelatex": @Metrics =
|
||||
Timer: class Timer
|
||||
done: sinon.stub()
|
||||
inc: sinon.stub()
|
||||
@project_id = "project-id"
|
||||
@user_id = "user-id"
|
||||
@callback = sinon.stub()
|
||||
|
@ -93,6 +103,25 @@ describe "ClsiManager", ->
|
|||
it "should call the callback with a failure statue", ->
|
||||
@callback.calledWith(null, @status).should.equal true
|
||||
|
||||
describe "with a sync conflict", ->
|
||||
beforeEach ->
|
||||
@ClsiManager.sendRequestOnce = sinon.stub()
|
||||
@ClsiManager.sendRequestOnce.withArgs(@project_id, @user_id, {syncType:"full"}).callsArgWith(3, null, @status = "success")
|
||||
@ClsiManager.sendRequestOnce.withArgs(@project_id, @user_id, {}).callsArgWith(3, null, "conflict")
|
||||
@ClsiManager.sendRequest @project_id, @user_id, {}, @callback
|
||||
|
||||
it "should call the sendRequestOnce method twice", ->
|
||||
@ClsiManager.sendRequestOnce.calledTwice.should.equal true
|
||||
|
||||
it "should call the sendRequestOnce method with syncType:full", ->
|
||||
@ClsiManager.sendRequestOnce.calledWith(@project_id, @user_id, {syncType:"full"}).should.equal true
|
||||
|
||||
it "should call the sendRequestOnce method without syncType:full", ->
|
||||
@ClsiManager.sendRequestOnce.calledWith(@project_id, @user_id, {}).should.equal true
|
||||
|
||||
it "should call the callback with a success status", ->
|
||||
@callback.calledWith(null, @status, ).should.equal true
|
||||
|
||||
describe "deleteAuxFiles", ->
|
||||
beforeEach ->
|
||||
@ClsiManager._makeRequest = sinon.stub().callsArg(2)
|
||||
|
@ -144,6 +173,8 @@ describe "ClsiManager", ->
|
|||
@Project.findById = sinon.stub().callsArgWith(2, null, @project)
|
||||
@ProjectEntityHandler.getAllDocs = sinon.stub().callsArgWith(1, null, @docs)
|
||||
@ProjectEntityHandler.getAllFiles = sinon.stub().callsArgWith(1, null, @files)
|
||||
@ProjectGetter.getProject = sinon.stub().callsArgWith(2, null, @project)
|
||||
@DocumentUpdaterHandler.flushProjectToMongo = sinon.stub().callsArgWith(1, null)
|
||||
|
||||
describe "with a valid project", ->
|
||||
beforeEach (done) ->
|
||||
|
@ -152,8 +183,13 @@ describe "ClsiManager", ->
|
|||
done()
|
||||
|
||||
it "should get the project with the required fields", ->
|
||||
@Project.findById
|
||||
.calledWith(@project_id, {compiler:1, rootDoc_id: 1, imageName: 1})
|
||||
@ProjectGetter.getProject
|
||||
.calledWith(@project_id, {compiler:1, rootDoc_id: 1, imageName: 1, rootFolder: 1})
|
||||
.should.equal true
|
||||
|
||||
it "should flush the project to the database", ->
|
||||
@DocumentUpdaterHandler.flushProjectToMongo
|
||||
.calledWith(@project_id)
|
||||
.should.equal true
|
||||
|
||||
it "should get all the docs", ->
|
||||
|
@ -175,6 +211,8 @@ describe "ClsiManager", ->
|
|||
imageName: @image
|
||||
draft: false
|
||||
check: undefined
|
||||
syncType: undefined # "full"
|
||||
syncState: undefined # "01234567890abcdef"
|
||||
rootResourcePath: "main.tex"
|
||||
resources: [{
|
||||
path: "main.tex"
|
||||
|
@ -189,6 +227,51 @@ describe "ClsiManager", ->
|
|||
}]
|
||||
)
|
||||
|
||||
describe "with the incremental compile option", ->
|
||||
beforeEach (done) ->
|
||||
@ClsiStateManager.computeHash = sinon.stub().callsArgWith(1, null, @project_state_hash = "01234567890abcdef")
|
||||
@DocumentUpdaterHandler.getProjectDocsIfMatch = sinon.stub().callsArgWith(2, null, [{_id:@doc_1._id, lines: @doc_1.lines, v: 123}])
|
||||
@ProjectEntityHandler.getAllDocPathsFromProject = sinon.stub().callsArgWith(1, null, {"mock-doc-id-1":"main.tex"})
|
||||
@ClsiManager._buildRequest @project_id, {timeout:100, incrementalCompilesEnabled:true}, (error, request) =>
|
||||
@request = request
|
||||
done()
|
||||
|
||||
it "should get the project with the required fields", ->
|
||||
@ProjectGetter.getProject
|
||||
.calledWith(@project_id, {compiler:1, rootDoc_id: 1, imageName: 1, rootFolder: 1})
|
||||
.should.equal true
|
||||
|
||||
it "should flush the project to the database", ->
|
||||
@DocumentUpdaterHandler.flushProjectToMongo
|
||||
.calledWith(@project_id)
|
||||
.should.equal true
|
||||
|
||||
it "should get only the live docs from the docupdater", ->
|
||||
@DocumentUpdaterHandler.getProjectDocsIfMatch
|
||||
.calledWith(@project_id)
|
||||
.should.equal true
|
||||
|
||||
it "should not get any of the files", ->
|
||||
@ProjectEntityHandler.getAllFiles
|
||||
.called.should.equal false
|
||||
|
||||
it "should build up the CLSI request", ->
|
||||
expect(@request).to.deep.equal(
|
||||
compile:
|
||||
options:
|
||||
compiler: @compiler
|
||||
timeout : 100
|
||||
imageName: @image
|
||||
draft: false
|
||||
check: undefined
|
||||
syncType: "incremental"
|
||||
syncState: "01234567890abcdef"
|
||||
rootResourcePath: "main.tex"
|
||||
resources: [{
|
||||
path: "main.tex"
|
||||
content: @doc_1.lines.join("\n")
|
||||
}]
|
||||
)
|
||||
|
||||
describe "when root doc override is valid", ->
|
||||
beforeEach (done) ->
|
||||
|
|
|
@ -0,0 +1,148 @@
|
|||
sinon = require('sinon')
|
||||
chai = require('chai')
|
||||
should = chai.should()
|
||||
expect = chai.expect
|
||||
modulePath = "../../../../app/js/Features/Compile/ClsiStateManager.js"
|
||||
SandboxedModule = require('sandboxed-module')
|
||||
|
||||
describe "ClsiStateManager", ->
|
||||
beforeEach ->
|
||||
@ClsiStateManager = SandboxedModule.require modulePath, requires:
|
||||
"settings-sharelatex": @settings = {}
|
||||
"../Project/ProjectEntityHandler": @ProjectEntityHandler = {}
|
||||
"logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub(), warn: sinon.stub() }
|
||||
@project = "project"
|
||||
@callback = sinon.stub()
|
||||
|
||||
describe "computeHash", ->
|
||||
beforeEach (done) ->
|
||||
@docs = [
|
||||
{path: "/main.tex", doc: {_id: "doc-id-1"}}
|
||||
{path: "/folder/sub.tex", doc: {_id: "doc-id-2"}}
|
||||
]
|
||||
@files = [
|
||||
{path: "/figure.pdf", file: {_id: "file-id-1", rev: 123, created: "aaaaaa"}}
|
||||
{path: "/folder/fig2.pdf", file: {_id: "file-id-2", rev: 456, created: "bbbbbb"}}
|
||||
]
|
||||
@ProjectEntityHandler.getAllEntitiesFromProject = sinon.stub().callsArgWith(1, null, @docs, @files)
|
||||
@ClsiStateManager.computeHash @project, (err, hash) =>
|
||||
@hash0 = hash
|
||||
done()
|
||||
|
||||
describe "with a sample project", ->
|
||||
beforeEach ->
|
||||
@ClsiStateManager.computeHash @project, @callback
|
||||
|
||||
it "should call the callback with a hash value", ->
|
||||
@callback
|
||||
.calledWith(null, "9c2c2428e4147db63cacabf6f357af483af6551d")
|
||||
.should.equal true
|
||||
|
||||
describe "when the files and docs are in a different order", ->
|
||||
beforeEach ->
|
||||
[@docs[0], @docs[1]] = [@docs[1], @docs[0]]
|
||||
[@files[0], @files[1]] = [@files[1], @files[0]]
|
||||
@ClsiStateManager.computeHash @project, @callback
|
||||
|
||||
it "should call the callback with the same hash value", ->
|
||||
@callback
|
||||
.calledWith(null, @hash0)
|
||||
.should.equal true
|
||||
|
||||
describe "when a doc is renamed", ->
|
||||
beforeEach (done) ->
|
||||
@docs[0].path = "/new.tex"
|
||||
@ClsiStateManager.computeHash @project, (err, hash) =>
|
||||
@hash1 = hash
|
||||
done()
|
||||
|
||||
it "should call the callback with a different hash value", ->
|
||||
@callback
|
||||
.neverCalledWith(null, @hash0)
|
||||
.should.equal true
|
||||
|
||||
describe "when a file is renamed", ->
|
||||
beforeEach (done) ->
|
||||
@files[0].path = "/newfigure.pdf"
|
||||
@ClsiStateManager.computeHash @project, (err, hash) =>
|
||||
@hash1 = hash
|
||||
done()
|
||||
|
||||
it "should call the callback with a different hash value", ->
|
||||
@callback
|
||||
.neverCalledWith(null, @hash0)
|
||||
.should.equal true
|
||||
|
||||
describe "when a doc is added", ->
|
||||
beforeEach (done) ->
|
||||
@docs.push { path: "/newdoc.tex", doc: {_id: "newdoc-id"}}
|
||||
@ClsiStateManager.computeHash @project, (err, hash) =>
|
||||
@hash1 = hash
|
||||
done()
|
||||
|
||||
it "should call the callback with a different hash value", ->
|
||||
@callback
|
||||
.neverCalledWith(null, @hash0)
|
||||
.should.equal true
|
||||
|
||||
describe "when a file is added", ->
|
||||
beforeEach (done) ->
|
||||
@files.push { path: "/newfile.tex", file: {_id: "newfile-id", rev: 123}}
|
||||
@ClsiStateManager.computeHash @project, (err, hash) =>
|
||||
@hash1 = hash
|
||||
done()
|
||||
|
||||
it "should call the callback with a different hash value", ->
|
||||
@callback
|
||||
.neverCalledWith(null, @hash0)
|
||||
.should.equal true
|
||||
|
||||
describe "when a doc is removed", ->
|
||||
beforeEach (done) ->
|
||||
@docs.pop()
|
||||
@ClsiStateManager.computeHash @project, (err, hash) =>
|
||||
@hash1 = hash
|
||||
done()
|
||||
|
||||
it "should call the callback with a different hash value", ->
|
||||
@callback
|
||||
.neverCalledWith(null, @hash0)
|
||||
.should.equal true
|
||||
|
||||
describe "when a file is removed", ->
|
||||
beforeEach (done) ->
|
||||
@files.pop()
|
||||
@ClsiStateManager.computeHash @project, (err, hash) =>
|
||||
@hash1 = hash
|
||||
done()
|
||||
|
||||
it "should call the callback with a different hash value", ->
|
||||
@callback
|
||||
.neverCalledWith(null, @hash0)
|
||||
.should.equal true
|
||||
|
||||
describe "when a file's revision is updated", ->
|
||||
beforeEach (done) ->
|
||||
@files[0].file.rev++
|
||||
@ClsiStateManager.computeHash @project, (err, hash) =>
|
||||
@hash1 = hash
|
||||
done()
|
||||
|
||||
it "should call the callback with a different hash value", ->
|
||||
@callback
|
||||
.neverCalledWith(null, @hash0)
|
||||
.should.equal true
|
||||
|
||||
|
||||
describe "when a file's date is updated", ->
|
||||
beforeEach (done) ->
|
||||
@files[0].file.created = "zzzzzz"
|
||||
@ClsiStateManager.computeHash @project, (err, hash) =>
|
||||
@hash1 = hash
|
||||
done()
|
||||
|
||||
it "should call the callback with a different hash value", ->
|
||||
@callback
|
||||
.neverCalledWith(null, @hash0)
|
||||
.should.equal true
|
||||
|
|
@ -17,7 +17,6 @@ describe "CompileManager", ->
|
|||
redis: web: {host: "localhost", port: 42}
|
||||
"../../infrastructure/RedisWrapper":
|
||||
client: () => @rclient = { auth: () -> }
|
||||
"../DocumentUpdater/DocumentUpdaterHandler": @DocumentUpdaterHandler = {}
|
||||
"../Project/ProjectRootDocManager": @ProjectRootDocManager = {}
|
||||
"../../models/Project": Project: @Project = {}
|
||||
"../User/UserGetter": @UserGetter = {}
|
||||
|
@ -40,7 +39,6 @@ describe "CompileManager", ->
|
|||
beforeEach ->
|
||||
@CompileManager._checkIfRecentlyCompiled = sinon.stub().callsArgWith(2, null, false)
|
||||
@CompileManager._ensureRootDocumentIsSet = sinon.stub().callsArgWith(1, null)
|
||||
@DocumentUpdaterHandler.flushProjectToMongo = sinon.stub().callsArgWith(1, null)
|
||||
@CompileManager.getProjectCompileLimits = sinon.stub().callsArgWith(1, null, @limits)
|
||||
@ClsiManager.sendRequest = sinon.stub().callsArgWith(3, null, @status = "mock-status", @outputFiles = "mock output files", @output = "mock output")
|
||||
|
||||
|
@ -54,11 +52,6 @@ describe "CompileManager", ->
|
|||
.calledWith(@project_id, @user_id)
|
||||
.should.equal true
|
||||
|
||||
it "should flush the project to the database", ->
|
||||
@DocumentUpdaterHandler.flushProjectToMongo
|
||||
.calledWith(@project_id)
|
||||
.should.equal true
|
||||
|
||||
it "should ensure that the root document is set", ->
|
||||
@CompileManager._ensureRootDocumentIsSet
|
||||
.calledWith(@project_id)
|
||||
|
|
|
@ -252,6 +252,47 @@ describe 'DocumentUpdaterHandler', ->
|
|||
.calledWith(new Error("doc updater returned failure status code: 500"))
|
||||
.should.equal true
|
||||
|
||||
describe "getProjectDocsIfMatch", ->
|
||||
beforeEach ->
|
||||
@callback = sinon.stub()
|
||||
@project_state_hash = "1234567890abcdef"
|
||||
|
||||
describe "successfully", ->
|
||||
beforeEach ->
|
||||
@doc0 =
|
||||
_id: @doc_id
|
||||
lines: @lines
|
||||
v: @version
|
||||
@docs = [ @doc0, @doc0, @doc0 ]
|
||||
@body = JSON.stringify @docs
|
||||
@request.get = sinon.stub().callsArgWith(1, null, {statusCode: 200}, @body)
|
||||
@handler.getProjectDocsIfMatch @project_id, @project_state_hash, @callback
|
||||
|
||||
it 'should get the documenst from the document updater', ->
|
||||
url = "#{@settings.apis.documentupdater.url}/project/#{@project_id}/doc?state=#{@project_state_hash}"
|
||||
@request.get.calledWith(url).should.equal true
|
||||
|
||||
it "should call the callback with the documents", ->
|
||||
@callback.calledWithExactly(null, @docs).should.equal true
|
||||
|
||||
describe "when the document updater API returns an error", ->
|
||||
beforeEach ->
|
||||
@request.get = sinon.stub().callsArgWith(1, @error = new Error("something went wrong"), null, null)
|
||||
@handler.getProjectDocsIfMatch @project_id, @project_state_hash, @callback
|
||||
|
||||
it "should return an error to the callback", ->
|
||||
@callback.calledWith(@error).should.equal true
|
||||
|
||||
describe "when the document updater returns a conflict error code", ->
|
||||
beforeEach ->
|
||||
@request.get = sinon.stub().callsArgWith(1, null, { statusCode: 409 }, "Conflict")
|
||||
@handler.getProjectDocsIfMatch @project_id, @project_state_hash, @callback
|
||||
|
||||
it "should return the callback with no documents", ->
|
||||
@callback
|
||||
.alwaysCalledWithExactly()
|
||||
.should.equal true
|
||||
|
||||
describe "acceptChanges", ->
|
||||
beforeEach ->
|
||||
@change_id = "mock-change-id-1"
|
||||
|
|
|
@ -781,6 +781,41 @@ describe 'ProjectEntityHandler', ->
|
|||
})
|
||||
.should.equal true
|
||||
|
||||
describe "getAllFoldersFromProject", ->
|
||||
beforeEach ->
|
||||
@callback = sinon.stub()
|
||||
@ProjectEntityHandler.getAllFoldersFromProject @project, @callback
|
||||
|
||||
it "should call the callback with the folders", ->
|
||||
@callback
|
||||
.calledWith(null, {
|
||||
"/": @project.rootFolder[0]
|
||||
"/folder1": @folder1
|
||||
})
|
||||
.should.equal true
|
||||
|
||||
describe "getAllDocPathsFromProject", ->
|
||||
beforeEach ->
|
||||
@docs = [{
|
||||
_id: @doc1._id
|
||||
lines: @lines1 = ["one"]
|
||||
rev: @rev1 = 1
|
||||
}, {
|
||||
_id: @doc2._id
|
||||
lines: @lines2 = ["two"]
|
||||
rev: @rev2 = 2
|
||||
}]
|
||||
@callback = sinon.stub()
|
||||
@ProjectEntityHandler.getAllDocPathsFromProject @project, @callback
|
||||
|
||||
it "should call the callback with the path for each doc_id", ->
|
||||
@expected = {}
|
||||
@expected[@doc1._id] = "/#{@doc1.name}"
|
||||
@expected[@doc2._id] = "/folder1/#{@doc2.name}"
|
||||
@callback
|
||||
.calledWith(null, @expected)
|
||||
.should.equal true
|
||||
|
||||
describe "flushProjectToThirdPartyDataStore", ->
|
||||
beforeEach (done) ->
|
||||
@project = {
|
||||
|
|
Loading…
Reference in a new issue