mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
Merge branch 'master' into pr-ol-beta-theme
This commit is contained in:
commit
70a56d0474
45 changed files with 5953 additions and 1804 deletions
|
@ -20,6 +20,8 @@ module.exports = (grunt) ->
|
|||
grunt.loadNpmTasks 'grunt-parallel'
|
||||
grunt.loadNpmTasks 'grunt-exec'
|
||||
grunt.loadNpmTasks 'grunt-postcss'
|
||||
grunt.loadNpmTasks 'grunt-forever'
|
||||
grunt.loadNpmTasks 'grunt-shell'
|
||||
# grunt.loadNpmTasks 'grunt-contrib-imagemin'
|
||||
# grunt.loadNpmTasks 'grunt-sprity'
|
||||
|
||||
|
@ -33,6 +35,10 @@ module.exports = (grunt) ->
|
|||
cssmin_ol:
|
||||
command:"node_modules/clean-css/bin/cleancss --s0 --source-map -o public/stylesheets/ol-style.css public/stylesheets/ol-style.css"
|
||||
|
||||
forever:
|
||||
app:
|
||||
options:
|
||||
index: "app.js"
|
||||
|
||||
watch:
|
||||
coffee:
|
||||
|
@ -257,8 +263,11 @@ module.exports = (grunt) ->
|
|||
pattern: "@@RELEASE@@"
|
||||
replacement: process.env.BUILD_NUMBER || "(unknown build)"
|
||||
|
||||
|
||||
|
||||
shell:
|
||||
fullAcceptanceTests:
|
||||
command: "bash ./test/acceptance/scripts/full-test.sh"
|
||||
dockerTests:
|
||||
command: 'docker run -v "$(pwd):/app" --env SHARELATEX_ALLOW_PUBLIC_ACCESS=true --rm sharelatex/acceptance-test-runner'
|
||||
|
||||
availabletasks:
|
||||
tasks:
|
||||
|
@ -409,6 +418,18 @@ module.exports = (grunt) ->
|
|||
grunt.registerTask 'test:acceptance', 'Run the acceptance tests (use --grep=<regex> or --feature=<feature> for individual tests)', ['compile:acceptance_tests', 'mochaTest:acceptance']
|
||||
grunt.registerTask 'test:smoke', 'Run the smoke tests', ['compile:smoke_tests', 'mochaTest:smoke']
|
||||
|
||||
grunt.registerTask(
|
||||
'test:acceptance:full',
|
||||
"Start server and run acceptance tests",
|
||||
['shell:fullAcceptanceTests']
|
||||
)
|
||||
|
||||
grunt.registerTask(
|
||||
'test:acceptance:docker',
|
||||
"Run acceptance tests inside docker container",
|
||||
['compile:acceptance_tests', 'shell:dockerTests']
|
||||
)
|
||||
|
||||
grunt.registerTask 'test:modules:unit', 'Run the unit tests for the modules', ['compile:modules:server', 'compile:modules:unit_tests'].concat(moduleUnitTestTasks)
|
||||
|
||||
grunt.registerTask 'run:watch', "Compile and run the web-sharelatex server", ['compile', 'env:run', 'parallel']
|
||||
|
|
13
services/web/Jenkinsfile
vendored
13
services/web/Jenkinsfile
vendored
|
@ -42,6 +42,7 @@ pipeline {
|
|||
checkout([$class: 'GitSCM', branches: [[name: '*/master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'modules/templates'], [$class: 'CloneOption', shallow: true]], userRemoteConfigs: [[credentialsId: 'GIT_DEPLOY_KEY', url: 'git@github.com:sharelatex/templates-webmodule.git']]])
|
||||
checkout([$class: 'GitSCM', branches: [[name: '*/master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'modules/track-changes'], [$class: 'CloneOption', shallow: true]], userRemoteConfigs: [[credentialsId: 'GIT_DEPLOY_KEY', url: 'git@github.com:sharelatex/track-changes-web-module.git']]])
|
||||
checkout([$class: 'GitSCM', branches: [[name: '*/master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'modules/overleaf-integration'], [$class: 'CloneOption', shallow: true]], userRemoteConfigs: [[credentialsId: 'GIT_DEPLOY_KEY', url: 'git@github.com:sharelatex/overleaf-integration-web-module.git']]])
|
||||
checkout([$class: 'GitSCM', branches: [[name: '*/master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'modules/overleaf-account-merge'], [$class: 'CloneOption', shallow: true]], userRemoteConfigs: [[credentialsId: 'GIT_DEPLOY_KEY', url: 'git@github.com:sharelatex/overleaf-account-merge.git']]])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -59,6 +60,9 @@ pipeline {
|
|||
sh 'mv app/views/external/googlebdb0f8f7f4a17241.html public/googlebdb0f8f7f4a17241.html'
|
||||
sh 'npm install'
|
||||
sh 'npm rebuild'
|
||||
// It's too easy to end up shrinkwrapping to an outdated version of translations.
|
||||
// Ensure translations are always latest, regardless of shrinkwrap
|
||||
sh 'npm install git+https://github.com/sharelatex/translations-sharelatex.git#master'
|
||||
sh 'npm install --quiet grunt'
|
||||
sh 'npm install --quiet grunt-cli'
|
||||
sh 'ls -l node_modules/.bin'
|
||||
|
@ -74,6 +78,8 @@ pipeline {
|
|||
}
|
||||
steps {
|
||||
sh 'node_modules/.bin/grunt compile --verbose'
|
||||
// replace the build number placeholder for sentry
|
||||
sh 'node_modules/.bin/grunt version'
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -113,6 +119,13 @@ pipeline {
|
|||
}
|
||||
}
|
||||
|
||||
stage('Acceptance Tests') {
|
||||
steps {
|
||||
sh 'docker pull sharelatex/acceptance-test-runner'
|
||||
sh 'docker run --rm -v $(pwd):/app --env SHARELATEX_ALLOW_PUBLIC_ACCESS=true sharelatex/acceptance-test-runner'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Package') {
|
||||
steps {
|
||||
sh 'rm -rf ./node_modules/grunt*'
|
||||
|
|
|
@ -157,3 +157,55 @@ module.exports = CollaboratorsHandler =
|
|||
return callback(error)
|
||||
{owner, members} = ProjectEditorHandler.buildOwnerAndMembersViews(rawMembers)
|
||||
callback(null, members)
|
||||
|
||||
transferProjects: (from_user_id, to_user_id, callback=(err, projects) ->) ->
|
||||
MEMBER_KEYS = ['collaberator_refs', 'readOnly_refs']
|
||||
|
||||
# Find all the projects this user is part of so we can flush them to TPDS
|
||||
query =
|
||||
$or:
|
||||
[{ owner_ref: from_user_id }]
|
||||
.concat(
|
||||
MEMBER_KEYS.map (key) ->
|
||||
q = {}
|
||||
q[key] = from_user_id
|
||||
return q
|
||||
) # [{ collaberator_refs: from_user_id }, ...]
|
||||
Project.find query, { _id: 1 }, (error, projects = []) ->
|
||||
return callback(error) if error?
|
||||
|
||||
project_ids = projects.map (p) -> p._id
|
||||
logger.log {project_ids, from_user_id, to_user_id}, "transferring projects"
|
||||
|
||||
update_jobs = []
|
||||
update_jobs.push (cb) ->
|
||||
Project.update { owner_ref: from_user_id }, { $set: { owner_ref: to_user_id }}, { multi: true }, cb
|
||||
for key in MEMBER_KEYS
|
||||
do (key) ->
|
||||
update_jobs.push (cb) ->
|
||||
query = {}
|
||||
addNewUserUpdate = $addToSet: {}
|
||||
removeOldUserUpdate = $pull: {}
|
||||
query[key] = from_user_id
|
||||
removeOldUserUpdate.$pull[key] = from_user_id
|
||||
addNewUserUpdate.$addToSet[key] = to_user_id
|
||||
# Mongo won't let us pull and addToSet in the same query, so do it in
|
||||
# two. Note we need to add first, since the query is based on the old user.
|
||||
Project.update query, addNewUserUpdate, { multi: true }, (error) ->
|
||||
return cb(error) if error?
|
||||
Project.update query, removeOldUserUpdate, { multi: true }, cb
|
||||
|
||||
# Flush each project to TPDS to add files to new user's Dropbox
|
||||
ProjectEntityHandler = require("../Project/ProjectEntityHandler")
|
||||
flush_jobs = []
|
||||
for project_id in project_ids
|
||||
do (project_id) ->
|
||||
flush_jobs.push (cb) ->
|
||||
ProjectEntityHandler.flushProjectToThirdPartyDataStore project_id, cb
|
||||
|
||||
# Flush in background, no need to block on this
|
||||
async.series flush_jobs, (error) ->
|
||||
if error?
|
||||
logger.err {err: error, project_ids, from_user_id, to_user_id}, "error flushing tranferred projects to TPDS"
|
||||
|
||||
async.series update_jobs, callback
|
||||
|
|
|
@ -29,7 +29,11 @@ module.exports = ClsiManager =
|
|||
|
||||
sendRequestOnce: (project_id, user_id, options = {}, callback = (error, status, outputFiles, clsiServerId, validationProblems) ->) ->
|
||||
ClsiManager._buildRequest project_id, options, (error, req) ->
|
||||
return callback(error) if error?
|
||||
if error?
|
||||
if error.message is "no main file specified"
|
||||
return callback(null, "validation-problems", null, null, {mainFile:error.message})
|
||||
else
|
||||
return callback(error)
|
||||
logger.log project_id: project_id, "sending compile to CLSI"
|
||||
ClsiFormatChecker.checkRecoursesForProblems req.compile?.resources, (err, validationProblems)->
|
||||
if err?
|
||||
|
@ -38,17 +42,17 @@ module.exports = ClsiManager =
|
|||
if validationProblems?
|
||||
logger.log project_id:project_id, validationProblems:validationProblems, "problems with users latex before compile was attempted"
|
||||
return callback(null, "validation-problems", null, null, validationProblems)
|
||||
ClsiManager._postToClsi project_id, user_id, req, options.compileGroup, (error, response) ->
|
||||
if error?
|
||||
logger.err err:error, project_id:project_id, "error sending request to clsi"
|
||||
return callback(error)
|
||||
logger.log project_id: project_id, outputFilesLength: response?.outputFiles?.length, status: response?.status, compile_status: response?.compile?.status, "received compile response from CLSI"
|
||||
ClsiCookieManager._getServerId project_id, (err, clsiServerId)->
|
||||
if err?
|
||||
logger.err err:err, project_id:project_id, "error getting server id"
|
||||
return callback(err)
|
||||
outputFiles = ClsiManager._parseOutputFiles(project_id, response?.compile?.outputFiles)
|
||||
callback(null, response?.compile?.status, outputFiles, clsiServerId)
|
||||
ClsiManager._postToClsi project_id, user_id, req, options.compileGroup, (error, response) ->
|
||||
if error?
|
||||
logger.err err:error, project_id:project_id, "error sending request to clsi"
|
||||
return callback(error)
|
||||
logger.log project_id: project_id, outputFilesLength: response?.outputFiles?.length, status: response?.status, compile_status: response?.compile?.status, "received compile response from CLSI"
|
||||
ClsiCookieManager._getServerId project_id, (err, clsiServerId)->
|
||||
if err?
|
||||
logger.err err:err, project_id:project_id, "error getting server id"
|
||||
return callback(err)
|
||||
outputFiles = ClsiManager._parseOutputFiles(project_id, response?.compile?.outputFiles)
|
||||
callback(null, response?.compile?.status, outputFiles, clsiServerId)
|
||||
|
||||
stopCompile: (project_id, user_id, options, callback = (error) ->) ->
|
||||
compilerUrl = @_getCompilerUrl(options?.compileGroup, project_id, user_id, "compile/stop")
|
||||
|
@ -107,6 +111,8 @@ module.exports = ClsiManager =
|
|||
callback null, compile:status:"project-too-large"
|
||||
else if response.statusCode == 409
|
||||
callback null, compile:status:"conflict"
|
||||
else if response.statusCode == 423
|
||||
callback null, compile:status:"compile-in-progress"
|
||||
else
|
||||
error = new Error("CLSI returned non-success code: #{response.statusCode}")
|
||||
logger.error err: error, project_id: project_id, "CLSI returned failure code"
|
||||
|
@ -144,13 +150,8 @@ module.exports = ClsiManager =
|
|||
logger.log project_id: project_id, projectStateHash: projectStateHash, docs: docUpdaterDocs?, "checked project state"
|
||||
# see if we can send an incremental update to the CLSI
|
||||
if docUpdaterDocs? and (options.syncType isnt "full") and not error?
|
||||
# Workaround: for now, always flush project to mongo on compile
|
||||
# until we have automatic periodic flushing on the docupdater
|
||||
# side, to prevent documents staying in redis too long.
|
||||
DocumentUpdaterHandler.flushProjectToMongo project_id, (error) ->
|
||||
return callback(error) if error?
|
||||
Metrics.inc "compile-from-redis"
|
||||
ClsiManager._buildRequestFromDocupdater project_id, options, project, projectStateHash, docUpdaterDocs, callback
|
||||
Metrics.inc "compile-from-redis"
|
||||
ClsiManager._buildRequestFromDocupdater project_id, options, project, projectStateHash, docUpdaterDocs, callback
|
||||
else
|
||||
Metrics.inc "compile-from-mongo"
|
||||
ClsiManager._buildRequestFromMongo project_id, options, project, projectStateHash, callback
|
||||
|
@ -183,7 +184,7 @@ module.exports = ClsiManager =
|
|||
# present in the docupdater. This allows finaliseRequest to
|
||||
# identify the root doc.
|
||||
possibleRootDocIds = [options.rootDoc_id, project.rootDoc_id]
|
||||
for rootDoc_id in possibleRootDocIds when rootDoc_id?
|
||||
for rootDoc_id in possibleRootDocIds when rootDoc_id? and rootDoc_id of docPath
|
||||
path = docPath[rootDoc_id]
|
||||
docs[path] ?= {_id: rootDoc_id, path: path}
|
||||
ClsiManager._finaliseRequest project_id, options, project, docs, [], callback
|
||||
|
@ -209,9 +210,12 @@ module.exports = ClsiManager =
|
|||
resources = []
|
||||
rootResourcePath = null
|
||||
rootResourcePathOverride = null
|
||||
hasMainFile = false
|
||||
numberOfDocsInProject = 0
|
||||
|
||||
for path, doc of docs
|
||||
path = path.replace(/^\//, "") # Remove leading /
|
||||
numberOfDocsInProject++
|
||||
if doc.lines? # add doc to resources unless it is just a stub entry
|
||||
resources.push
|
||||
path: path
|
||||
|
@ -220,11 +224,20 @@ module.exports = ClsiManager =
|
|||
rootResourcePath = path
|
||||
if options.rootDoc_id? and doc._id.toString() == options.rootDoc_id.toString()
|
||||
rootResourcePathOverride = path
|
||||
if path is "main.tex"
|
||||
hasMainFile = true
|
||||
|
||||
rootResourcePath = rootResourcePathOverride if rootResourcePathOverride?
|
||||
if !rootResourcePath?
|
||||
logger.warn {project_id}, "no root document found, setting to main.tex"
|
||||
rootResourcePath = "main.tex"
|
||||
if hasMainFile
|
||||
logger.warn {project_id}, "no root document found, setting to main.tex"
|
||||
rootResourcePath = "main.tex"
|
||||
else if numberOfDocsInProject is 1 # only one file, must be the main document
|
||||
for path, doc of docs
|
||||
rootResourcePath = path.replace(/^\//, "") # Remove leading /
|
||||
logger.warn {project_id, rootResourcePath: rootResourcePath}, "no root document found, single document in project"
|
||||
else
|
||||
return callback new Error("no main file specified")
|
||||
|
||||
for path, file of files
|
||||
path = path.replace(/^\//, "") # Remove leading /
|
||||
|
|
|
@ -18,7 +18,7 @@ module.exports = CompileManager =
|
|||
timer.done()
|
||||
_callback(args...)
|
||||
|
||||
@_checkIfAutoCompileLimitHasBeenHit options.isAutoCompile, (err, canCompile)->
|
||||
@_checkIfAutoCompileLimitHasBeenHit options.isAutoCompile, "everyone", (err, canCompile)->
|
||||
if !canCompile
|
||||
return callback null, "autocompile-backoff", []
|
||||
logger.log project_id: project_id, user_id: user_id, "compiling project"
|
||||
|
@ -34,12 +34,16 @@ module.exports = CompileManager =
|
|||
return callback(error) if error?
|
||||
for key, value of limits
|
||||
options[key] = value
|
||||
# only pass user_id down to clsi if this is a per-user compile
|
||||
compileAsUser = if Settings.disablePerUserCompiles then undefined else user_id
|
||||
ClsiManager.sendRequest project_id, compileAsUser, options, (error, status, outputFiles, clsiServerId, validationProblems) ->
|
||||
return callback(error) if error?
|
||||
logger.log files: outputFiles, "output files"
|
||||
callback(null, status, outputFiles, clsiServerId, limits, validationProblems)
|
||||
# Put a lower limit on autocompiles for free users, based on compileGroup
|
||||
CompileManager._checkCompileGroupAutoCompileLimit options.isAutoCompile, limits.compileGroup, (err, canCompile)->
|
||||
if !canCompile
|
||||
return callback null, "autocompile-backoff", []
|
||||
# only pass user_id down to clsi if this is a per-user compile
|
||||
compileAsUser = if Settings.disablePerUserCompiles then undefined else user_id
|
||||
ClsiManager.sendRequest project_id, compileAsUser, options, (error, status, outputFiles, clsiServerId, validationProblems) ->
|
||||
return callback(error) if error?
|
||||
logger.log files: outputFiles, "output files"
|
||||
callback(null, status, outputFiles, clsiServerId, limits, validationProblems)
|
||||
|
||||
|
||||
stopCompile: (project_id, user_id, callback = (error) ->) ->
|
||||
|
@ -72,18 +76,30 @@ module.exports = CompileManager =
|
|||
else
|
||||
return callback null, true
|
||||
|
||||
_checkIfAutoCompileLimitHasBeenHit: (isAutoCompile, callback = (err, canCompile)->)->
|
||||
_checkCompileGroupAutoCompileLimit: (isAutoCompile, compileGroup, callback = (err, canCompile)->)->
|
||||
if !isAutoCompile
|
||||
return callback(null, true)
|
||||
opts =
|
||||
if compileGroup is "standard"
|
||||
# apply extra limits to the standard compile group
|
||||
CompileManager._checkIfAutoCompileLimitHasBeenHit isAutoCompile, compileGroup, callback
|
||||
else
|
||||
Metrics.inc "auto-compile-#{compileGroup}"
|
||||
return callback(null, true) # always allow priority group users to compile
|
||||
|
||||
_checkIfAutoCompileLimitHasBeenHit: (isAutoCompile, compileGroup, callback = (err, canCompile)->)->
|
||||
if !isAutoCompile
|
||||
return callback(null, true)
|
||||
Metrics.inc "auto-compile-#{compileGroup}"
|
||||
opts =
|
||||
endpointName:"auto_compile"
|
||||
timeInterval:20
|
||||
subjectName:"everyone"
|
||||
throttle: 25
|
||||
subjectName:compileGroup
|
||||
throttle: Settings?.rateLimit?.autoCompile?[compileGroup] || 25
|
||||
rateLimiter.addCount opts, (err, canCompile)->
|
||||
if err?
|
||||
canCompile = false
|
||||
logger.log canCompile:canCompile, opts:opts, "checking if auto compile limit has been hit"
|
||||
if !canCompile
|
||||
Metrics.inc "auto-compile-#{compileGroup}-limited"
|
||||
callback err, canCompile
|
||||
|
||||
_ensureRootDocumentIsSet: (project_id, callback = (error) ->) ->
|
||||
|
|
|
@ -128,9 +128,9 @@ module.exports = DocumentUpdaterHandler =
|
|||
# docs from redis via the docupdater. Otherwise we will need to
|
||||
# fall back to getting them from mongo.
|
||||
timer = new metrics.Timer("get-project-docs")
|
||||
url = "#{settings.apis.documentupdater.url}/project/#{project_id}/doc?state=#{projectStateHash}"
|
||||
url = "#{settings.apis.documentupdater.url}/project/#{project_id}/get_and_flush_if_old?state=#{projectStateHash}"
|
||||
logger.log project_id:project_id, "getting project docs from document updater"
|
||||
request.get url, (error, res, body)->
|
||||
request.post url, (error, res, body)->
|
||||
timer.done()
|
||||
if error?
|
||||
logger.error err:error, url:url, project_id:project_id, "error getting project docs from doc updater"
|
||||
|
|
|
@ -7,7 +7,7 @@ module.exports =
|
|||
doc_id = req.params.doc_id
|
||||
plain = req?.query?.plain == 'true'
|
||||
logger.log doc_id:doc_id, project_id:project_id, "receiving get document request from api (docupdater)"
|
||||
ProjectEntityHandler.getDoc project_id, doc_id, (error, lines, rev, version, ranges) ->
|
||||
ProjectEntityHandler.getDoc project_id, doc_id, {pathname: true}, (error, lines, rev, version, ranges, pathname) ->
|
||||
if error?
|
||||
logger.err err:error, doc_id:doc_id, project_id:project_id, "error finding element for getDocument"
|
||||
return next(error)
|
||||
|
@ -20,6 +20,7 @@ module.exports =
|
|||
lines: lines
|
||||
version: version
|
||||
ranges: ranges
|
||||
pathname: pathname
|
||||
}
|
||||
|
||||
setDocument: (req, res, next = (error) ->) ->
|
||||
|
@ -33,6 +34,3 @@ module.exports =
|
|||
return next(error)
|
||||
logger.log doc_id:doc_id, project_id:project_id, "finished receiving set document request from api (docupdater)"
|
||||
res.sendStatus 200
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -6,7 +6,8 @@ AuthenticationController = require "../Authentication/AuthenticationController"
|
|||
module.exports = HistoryController =
|
||||
proxyToHistoryApi: (req, res, next = (error) ->) ->
|
||||
user_id = AuthenticationController.getLoggedInUserId req
|
||||
url = settings.apis.trackchanges.url + req.url
|
||||
url = HistoryController.buildHistoryServiceUrl() + req.url
|
||||
|
||||
logger.log url: url, "proxying to track-changes api"
|
||||
getReq = request(
|
||||
url: url
|
||||
|
@ -18,3 +19,9 @@ module.exports = HistoryController =
|
|||
getReq.on "error", (error) ->
|
||||
logger.error err: error, "track-changes API error"
|
||||
next(error)
|
||||
|
||||
buildHistoryServiceUrl: () ->
|
||||
if settings.apis.project_history?.enabled
|
||||
return settings.apis.project_history.url
|
||||
else
|
||||
return settings.apis.trackchanges.url
|
||||
|
|
|
@ -1,28 +0,0 @@
|
|||
settings = require "settings-sharelatex"
|
||||
request = require "request"
|
||||
logger = require "logger-sharelatex"
|
||||
|
||||
module.exports = HistoryManager =
|
||||
flushProject: (project_id, callback = (error) ->) ->
|
||||
logger.log project_id: project_id, "flushing project in track-changes api"
|
||||
url = "#{settings.apis.trackchanges.url}/project/#{project_id}/flush"
|
||||
request.post url, (error, res, body) ->
|
||||
return callback(error) if error?
|
||||
if 200 <= res.statusCode < 300
|
||||
callback(null)
|
||||
else
|
||||
error = new Error("track-changes api responded with non-success code: #{res.statusCode} #{url}")
|
||||
logger.error err: error, project_id: project_id, "error flushing project in track-changes api"
|
||||
callback(error)
|
||||
|
||||
archiveProject: (project_id, callback = ()->)->
|
||||
logger.log project_id: project_id, "archving project in track-changes api"
|
||||
url = "#{settings.apis.trackchanges.url}/project/#{project_id}/archive"
|
||||
request.post url, (error, res, body) ->
|
||||
return callback(error) if error?
|
||||
if 200 <= res.statusCode < 300
|
||||
callback(null)
|
||||
else
|
||||
error = new Error("track-changes api responded with non-success code: #{res.statusCode} #{url}")
|
||||
logger.error err: error, project_id: project_id, "error archving project in track-changes api"
|
||||
callback(error)
|
|
@ -229,6 +229,7 @@ module.exports = ProjectController =
|
|||
else if event?
|
||||
return cb(null, false)
|
||||
else
|
||||
logger.log { user_id, event }, "track changes onboarding not shown yet to this user"
|
||||
return cb(null, true)
|
||||
showPerUserTCNotice: (cb) ->
|
||||
cb = underscore.once(cb)
|
||||
|
@ -247,6 +248,34 @@ module.exports = ProjectController =
|
|||
else if event?
|
||||
return cb(null, false)
|
||||
else
|
||||
logger.log { user_id, event }, "per user track changes notice not shown yet to this user"
|
||||
return cb(null, true)
|
||||
showAutoCompileOnboarding: (cb) ->
|
||||
cb = underscore.once(cb)
|
||||
if !user_id?
|
||||
return cb()
|
||||
|
||||
# Extract data from user's ObjectId
|
||||
timestamp = parseInt(user_id.toString().substring(0, 8), 16)
|
||||
counter = parseInt(user_id.toString().substring(18, 24), 16)
|
||||
|
||||
rolloutPercentage = 5 # Percentage of users to roll out to
|
||||
if counter % 100 > rolloutPercentage
|
||||
# Don't show if user is not part of roll out
|
||||
return cb(null, false)
|
||||
userSignupDate = new Date(timestamp * 1000)
|
||||
if userSignupDate > new Date("2017-10-16")
|
||||
# Don't show for users who registered after it was released
|
||||
return cb(null, false)
|
||||
timeout = setTimeout cb, 500
|
||||
AnalyticsManager.getLastOccurance user_id, "shown-autocompile-onboarding", (error, event) ->
|
||||
clearTimeout timeout
|
||||
if error?
|
||||
return cb(null, false)
|
||||
else if event?
|
||||
return cb(null, false)
|
||||
else
|
||||
logger.log { user_id, event }, "autocompile onboarding not shown yet to this user"
|
||||
return cb(null, true)
|
||||
}, (err, results)->
|
||||
if err?
|
||||
|
@ -255,9 +284,9 @@ module.exports = ProjectController =
|
|||
project = results.project
|
||||
user = results.user
|
||||
subscription = results.subscription
|
||||
{ showTrackChangesOnboarding, showPerUserTCNotice } = results
|
||||
{ showTrackChangesOnboarding, showPerUserTCNotice, showAutoCompileOnboarding } = results
|
||||
|
||||
daysSinceLastUpdated = (new Date() - project.lastUpdated) /86400000
|
||||
daysSinceLastUpdated = (new Date() - project.lastUpdated) / 86400000
|
||||
logger.log project_id:project_id, daysSinceLastUpdated:daysSinceLastUpdated, "got db results for loading editor"
|
||||
|
||||
AuthorizationManager.getPrivilegeLevelForProject user_id, project_id, (error, privilegeLevel)->
|
||||
|
@ -300,6 +329,7 @@ module.exports = ProjectController =
|
|||
trackChangesState: project.track_changes
|
||||
showTrackChangesOnboarding: !!showTrackChangesOnboarding
|
||||
showPerUserTCNotice: !!showPerUserTCNotice
|
||||
showAutoCompileOnboarding: !!showAutoCompileOnboarding
|
||||
privilegeLevel: privilegeLevel
|
||||
chatUrl: Settings.apis.chat.url
|
||||
anonymous: anonymous
|
||||
|
|
|
@ -9,7 +9,7 @@ Errors = require("../Errors/Errors")
|
|||
|
||||
module.exports = ProjectDetailsHandler =
|
||||
getDetails: (project_id, callback)->
|
||||
ProjectGetter.getProject project_id, {name:true, description:true, compiler:true, features:true, owner_ref:true}, (err, project)->
|
||||
ProjectGetter.getProject project_id, {name:true, description:true, compiler:true, features:true, owner_ref:true, overleaf:true}, (err, project)->
|
||||
if err?
|
||||
logger.err err:err, project_id:project_id, "error getting project"
|
||||
return callback(err)
|
||||
|
@ -21,7 +21,11 @@ module.exports = ProjectDetailsHandler =
|
|||
description: project.description
|
||||
compiler: project.compiler
|
||||
features: user.features
|
||||
logger.log project_id:project_id, details:details, "getting project details"
|
||||
|
||||
if project.overleaf?
|
||||
details.overleaf = project.overleaf
|
||||
|
||||
logger.log project_id:project_id, details: details, "getting project details"
|
||||
callback(err, details)
|
||||
|
||||
getProjectDescription: (project_id, callback)->
|
||||
|
@ -53,7 +57,7 @@ module.exports = ProjectDetailsHandler =
|
|||
|
||||
MAX_PROJECT_NAME_LENGTH: 150
|
||||
validateProjectName: (name, callback = (error) ->) ->
|
||||
if name.length == 0
|
||||
if !name? or name.length == 0
|
||||
return callback(new Errors.InvalidNameError("Project name cannot be blank"))
|
||||
else if name.length > @MAX_PROJECT_NAME_LENGTH
|
||||
return callback(new Errors.InvalidNameError("Project name is too long"))
|
||||
|
@ -66,4 +70,4 @@ module.exports = ProjectDetailsHandler =
|
|||
logger.log project_id: project_id, level: newAccessLevel, "set public access level"
|
||||
if project_id? && newAccessLevel? and _.include [PublicAccessLevels.READ_ONLY, PublicAccessLevels.READ_AND_WRITE, PublicAccessLevels.PRIVATE], newAccessLevel
|
||||
Project.update {_id:project_id},{publicAccesLevel:newAccessLevel}, (err)->
|
||||
callback()
|
||||
callback()
|
||||
|
|
|
@ -131,7 +131,7 @@ module.exports = ProjectEntityHandler =
|
|||
setRootDoc: (project_id, newRootDocID, callback = (error) ->)->
|
||||
logger.log project_id: project_id, rootDocId: newRootDocID, "setting root doc"
|
||||
Project.update {_id:project_id}, {rootDoc_id:newRootDocID}, {}, callback
|
||||
|
||||
|
||||
unsetRootDoc: (project_id, callback = (error) ->) ->
|
||||
logger.log project_id: project_id, "removing root doc"
|
||||
Project.update {_id:project_id}, {$unset: {rootDoc_id: true}}, {}, callback
|
||||
|
@ -140,8 +140,15 @@ module.exports = ProjectEntityHandler =
|
|||
if typeof(options) == "function"
|
||||
callback = options
|
||||
options = {}
|
||||
DocstoreManager.getDoc project_id, doc_id, options, callback
|
||||
|
||||
if options["pathname"]
|
||||
delete options["pathname"]
|
||||
projectLocator.findElement {project_id: project_id, element_id: doc_id, type: 'doc'}, (error, doc, path) =>
|
||||
return callback(error) if error?
|
||||
DocstoreManager.getDoc project_id, doc_id, options, (error, lines, rev, version, ranges) =>
|
||||
callback(error, lines, rev, version, ranges, path.fileSystem)
|
||||
else
|
||||
DocstoreManager.getDoc project_id, doc_id, options, callback
|
||||
|
||||
addDoc: (project_id, folder_id, docName, docLines, callback = (error, doc, folder_id) ->)=>
|
||||
ProjectGetter.getProjectWithOnlyFolders project_id, (err, project) ->
|
||||
|
@ -158,7 +165,7 @@ module.exports = ProjectEntityHandler =
|
|||
# Put doc in docstore first, so that if it errors, we don't have a doc_id in the project
|
||||
# which hasn't been created in docstore.
|
||||
DocstoreManager.updateDoc project_id.toString(), doc._id.toString(), docLines, 0, {}, (err, modified, rev) ->
|
||||
return callback(err) if err?
|
||||
return callback(err) if err?
|
||||
|
||||
ProjectEntityHandler._putElement project, folder_id, doc, "doc", (err, result)=>
|
||||
return callback(err) if err?
|
||||
|
@ -207,7 +214,7 @@ module.exports = ProjectEntityHandler =
|
|||
replaceFile: (project_id, file_id, fsPath, callback)->
|
||||
ProjectGetter.getProject project_id, {name:true}, (err, project) ->
|
||||
return callback(err) if err?
|
||||
findOpts =
|
||||
findOpts =
|
||||
project_id:project._id
|
||||
element_id:file_id
|
||||
type:"file"
|
||||
|
@ -280,7 +287,7 @@ module.exports = ProjectEntityHandler =
|
|||
procesFolder = (previousFolders, folderName, callback)=>
|
||||
previousFolders = previousFolders || []
|
||||
parentFolder = previousFolders[previousFolders.length-1]
|
||||
if parentFolder?
|
||||
if parentFolder?
|
||||
parentFolder_id = parentFolder._id
|
||||
builtUpPath = "#{builtUpPath}/#{folderName}"
|
||||
projectLocator.findElementByPath project, builtUpPath, (err, foundFolder)=>
|
||||
|
@ -360,7 +367,7 @@ module.exports = ProjectEntityHandler =
|
|||
return callback(err) if err?
|
||||
projectLocator.findElement {project:project, element_id:entity_id, type:entityType}, (err, entity, path)->
|
||||
return callback(err) if err?
|
||||
|
||||
|
||||
if entityType.match(/folder/)
|
||||
ensureFolderIsNotMovedIntoChild = (callback = (error) ->) ->
|
||||
projectLocator.findElement {project: project, element_id: folder_id, type:"folder"}, (err, destEntity, destPath) ->
|
||||
|
@ -372,7 +379,7 @@ module.exports = ProjectEntityHandler =
|
|||
callback()
|
||||
else
|
||||
ensureFolderIsNotMovedIntoChild = (callback = () ->) -> callback()
|
||||
|
||||
|
||||
ensureFolderIsNotMovedIntoChild (error) ->
|
||||
return callback(error) if error?
|
||||
self._removeElementFromMongoArray Project, project_id, path.mongo, (err)->
|
||||
|
@ -382,7 +389,7 @@ module.exports = ProjectEntityHandler =
|
|||
return callback(err) if err?
|
||||
ProjectEntityHandler._putElement project, destinationFolder_id, entity, entityType, (err, result)->
|
||||
return callback(err) if err?
|
||||
opts =
|
||||
opts =
|
||||
project_id:project_id
|
||||
project_name:project.name
|
||||
startPath:path.fileSystem
|
||||
|
@ -506,7 +513,7 @@ module.exports = ProjectEntityHandler =
|
|||
|
||||
|
||||
_countElements : (project, callback)->
|
||||
|
||||
|
||||
countFolder = (folder, cb = (err, count)->)->
|
||||
|
||||
jobs = _.map folder?.folders, (folder)->
|
||||
|
|
|
@ -1,56 +1,105 @@
|
|||
child = require "child_process"
|
||||
logger = require "logger-sharelatex"
|
||||
metrics = require "metrics-sharelatex"
|
||||
fs = require "fs"
|
||||
Path = require "path"
|
||||
fse = require "fs-extra"
|
||||
yauzl = require "yauzl"
|
||||
Settings = require "settings-sharelatex"
|
||||
_ = require("underscore")
|
||||
|
||||
ONE_MEG = 1024 * 1024
|
||||
|
||||
module.exports = ArchiveManager =
|
||||
|
||||
|
||||
_isZipTooLarge: (source, callback = (err, isTooLarge)->)->
|
||||
callback = _.once callback
|
||||
|
||||
unzip = child.spawn("unzip", ["-l", source])
|
||||
totalSizeInBytes = null
|
||||
yauzl.open source, {lazyEntries: true}, (err, zipfile) ->
|
||||
return callback(err) if err?
|
||||
|
||||
output = ""
|
||||
unzip.stdout.on "data", (d)->
|
||||
output += d
|
||||
if Settings.maxEntitiesPerProject? and zipfile.entryCount > Settings.maxEntitiesPerProject
|
||||
return callback(null, true) # too many files in zip file
|
||||
|
||||
error = null
|
||||
unzip.stderr.on "data", (chunk) ->
|
||||
error ||= ""
|
||||
error += chunk
|
||||
zipfile.on "error", callback
|
||||
|
||||
unzip.on "error", (err) ->
|
||||
logger.error {err, source}, "unzip failed"
|
||||
if err.code == "ENOENT"
|
||||
logger.error "unzip command not found. Please check the unzip command is installed"
|
||||
callback(err)
|
||||
# read all the entries
|
||||
zipfile.readEntry()
|
||||
zipfile.on "entry", (entry) ->
|
||||
totalSizeInBytes += entry.uncompressedSize
|
||||
zipfile.readEntry() # get the next entry
|
||||
|
||||
unzip.on "close", (exitCode) ->
|
||||
if error?
|
||||
error = new Error(error)
|
||||
logger.warn err:error, source: source, "error checking zip size"
|
||||
# no more entries to read
|
||||
zipfile.on "end", () ->
|
||||
if !totalSizeInBytes? or isNaN(totalSizeInBytes)
|
||||
logger.err source:source, totalSizeInBytes:totalSizeInBytes, "error getting bytes of zip"
|
||||
return callback(new Error("error getting bytes of zip"))
|
||||
isTooLarge = totalSizeInBytes > (ONE_MEG * 300)
|
||||
callback(null, isTooLarge)
|
||||
|
||||
lines = output.split("\n")
|
||||
lastLine = lines[lines.length - 2]?.trim()
|
||||
totalSizeInBytes = lastLine?.split(" ")?[0]
|
||||
_checkFilePath: (entry, destination, callback = (err, destFile) ->) ->
|
||||
# check if the entry is a directory
|
||||
endsWithSlash = /\/$/
|
||||
if endsWithSlash.test(entry.fileName)
|
||||
return callback() # don't give a destfile for directory
|
||||
# check that the file does not use a relative path
|
||||
for dir in entry.fileName.split('/')
|
||||
if dir == '..'
|
||||
return callback(new Error("relative path"))
|
||||
# check that the destination file path is normalized
|
||||
dest = "#{destination}/#{entry.fileName}"
|
||||
if dest != Path.normalize(dest)
|
||||
return callback(new Error("unnormalized path"))
|
||||
else
|
||||
return callback(null, dest)
|
||||
|
||||
totalSizeInBytesAsInt = parseInt(totalSizeInBytes)
|
||||
_writeFileEntry: (zipfile, entry, destFile, callback = (err)->) ->
|
||||
callback = _.once callback
|
||||
|
||||
if !totalSizeInBytesAsInt? or isNaN(totalSizeInBytesAsInt)
|
||||
logger.err source:source, totalSizeInBytes:totalSizeInBytes, totalSizeInBytesAsInt:totalSizeInBytesAsInt, lastLine:lastLine, exitCode:exitCode, "error getting bytes of zip"
|
||||
return callback(new Error("error getting bytes of zip"))
|
||||
zipfile.openReadStream entry, (err, readStream) ->
|
||||
return callback(err) if err?
|
||||
readStream.on "error", callback
|
||||
readStream.on "end", callback
|
||||
|
||||
isTooLarge = totalSizeInBytes > (ONE_MEG * 300)
|
||||
errorHandler = (err) -> # clean up before calling callback
|
||||
readStream.unpipe()
|
||||
readStream.destroy()
|
||||
callback(err)
|
||||
|
||||
callback(error, isTooLarge)
|
||||
fse.ensureDir Path.dirname(destFile), (err) ->
|
||||
return errorHandler(err) if err?
|
||||
writeStream = fs.createWriteStream destFile
|
||||
writeStream.on 'error', errorHandler
|
||||
readStream.pipe(writeStream)
|
||||
|
||||
_extractZipFiles: (source, destination, callback = (err) ->) ->
|
||||
callback = _.once callback
|
||||
|
||||
yauzl.open source, {lazyEntries: true}, (err, zipfile) ->
|
||||
return callback(err) if err?
|
||||
zipfile.on "error", callback
|
||||
# read all the entries
|
||||
zipfile.readEntry()
|
||||
zipfile.on "entry", (entry) ->
|
||||
logger.log {source:source, fileName: entry.fileName}, "processing zip file entry"
|
||||
ArchiveManager._checkFilePath entry, destination, (err, destFile) ->
|
||||
if err?
|
||||
logger.warn err:err, source:source, destination:destination, "skipping bad file path"
|
||||
zipfile.readEntry() # bad path, just skip to the next file
|
||||
return
|
||||
if destFile? # only write files
|
||||
ArchiveManager._writeFileEntry zipfile, entry, destFile, (err) ->
|
||||
if err?
|
||||
logger.error err:err, source:source, destFile:destFile, "error unzipping file entry"
|
||||
zipfile.close() # bail out, stop reading file entries
|
||||
return callback(err)
|
||||
else
|
||||
zipfile.readEntry() # continue to the next file
|
||||
else # if it's a directory, continue
|
||||
zipfile.readEntry()
|
||||
# no more entries to read
|
||||
zipfile.on "end", callback
|
||||
|
||||
|
||||
extractZipArchive: (source, destination, _callback = (err) ->) ->
|
||||
callback = (args...) ->
|
||||
_callback(args...)
|
||||
|
@ -62,36 +111,19 @@ module.exports = ArchiveManager =
|
|||
return callback(err)
|
||||
|
||||
if isTooLarge
|
||||
return callback(new Error("zip_too_large"))
|
||||
|
||||
return callback(new Error("zip_too_large"))
|
||||
|
||||
timer = new metrics.Timer("unzipDirectory")
|
||||
logger.log source: source, destination: destination, "unzipping file"
|
||||
|
||||
unzip = child.spawn("unzip", [source, "-d", destination])
|
||||
|
||||
# don't remove this line, some zips need
|
||||
# us to listen on this for some unknow reason
|
||||
unzip.stdout.on "data", (d)->
|
||||
|
||||
error = null
|
||||
unzip.stderr.on "data", (chunk) ->
|
||||
error ||= ""
|
||||
error += chunk
|
||||
|
||||
unzip.on "error", (err) ->
|
||||
logger.error {err, source, destination}, "unzip failed"
|
||||
if err.code == "ENOENT"
|
||||
logger.error "unzip command not found. Please check the unzip command is installed"
|
||||
callback(err)
|
||||
|
||||
unzip.on "close", () ->
|
||||
ArchiveManager._extractZipFiles source, destination, (err) ->
|
||||
timer.done()
|
||||
if error?
|
||||
error = new Error(error)
|
||||
logger.error err:error, source: source, destination: destination, "error unzipping file"
|
||||
callback(error)
|
||||
|
||||
if err?
|
||||
logger.error {err, source, destination}, "unzip failed"
|
||||
callback(err)
|
||||
else
|
||||
callback()
|
||||
|
||||
findTopLevelDirectory: (directory, callback = (error, topLevelDir) ->) ->
|
||||
fs.readdir directory, (error, files) ->
|
||||
return callback(error) if error?
|
||||
|
|
|
@ -18,6 +18,8 @@ module.exports = FileTypeManager =
|
|||
|
||||
IGNORE_FILENAMES : [
|
||||
"__MACOSX"
|
||||
".git"
|
||||
".gitignore"
|
||||
]
|
||||
|
||||
MAX_TEXT_FILE_SIZE: 1 * 1024 * 1024 # 1 MB
|
||||
|
|
|
@ -4,6 +4,7 @@ UserDeleter = require("./UserDeleter")
|
|||
UserUpdater = require("./UserUpdater")
|
||||
sanitize = require('sanitizer')
|
||||
AuthenticationController = require('../Authentication/AuthenticationController')
|
||||
ObjectId = require("mongojs").ObjectId
|
||||
|
||||
module.exports = UserController =
|
||||
getLoggedInUsersPersonalInfo: (req, res, next = (error) ->) ->
|
||||
|
@ -19,8 +20,17 @@ module.exports = UserController =
|
|||
UserController.sendFormattedPersonalInfo(user, res, next)
|
||||
|
||||
getPersonalInfo: (req, res, next = (error) ->) ->
|
||||
UserGetter.getUser req.params.user_id, { _id: true, first_name: true, last_name: true, email: true}, (error, user) ->
|
||||
logger.log user_id: req.params.user_id, "reciving request for getting users personal info"
|
||||
{user_id} = req.params
|
||||
|
||||
if user_id.match(/^\d+$/)
|
||||
query = { "overleaf.id": parseInt(user_id, 10) }
|
||||
else if user_id.match(/^[a-f0-9]{24}$/)
|
||||
query = { _id: ObjectId(user_id) }
|
||||
else
|
||||
return res.send(400)
|
||||
|
||||
UserGetter.getUser query, { _id: true, first_name: true, last_name: true, email: true}, (error, user) ->
|
||||
logger.log user_id: req.params.user_id, "receiving request for getting users personal info"
|
||||
return next(error) if error?
|
||||
return res.send(404) if !user?
|
||||
UserController.sendFormattedPersonalInfo(user, res, next)
|
||||
|
|
|
@ -122,6 +122,7 @@ block requirejs
|
|||
window.trackChangesState = data.trackChangesState;
|
||||
window.showTrackChangesOnboarding = #{!!showTrackChangesOnboarding};
|
||||
window.showPerUserTCNotice = #{!!showPerUserTCNotice};
|
||||
window.showAutoCompileOnboarding = #{!!showAutoCompileOnboarding}
|
||||
window.wikiEnabled = #{!!(settings.apis.wiki && settings.apis.wiki.url)};
|
||||
window.requirejs = {
|
||||
"paths" : {
|
||||
|
|
|
@ -94,4 +94,17 @@ div.full-size(
|
|||
ng-show="ui.view == 'pdf'"
|
||||
)
|
||||
include ./pdf
|
||||
|
||||
|
||||
#onboarding-autocompile.onboarding-autocompile.popover(
|
||||
ng-controller="AutoCompileOnboardingController"
|
||||
ng-if="onboarding.autoCompile == 'show'"
|
||||
ng-class="placement"
|
||||
)
|
||||
.popover-inner
|
||||
h3.popover-title #{translate("auto_compile")}
|
||||
.popover-content
|
||||
p #{translate("try_out_auto_compile_setting")}
|
||||
img(src="/img/onboarding/autocompile/setting-dropdown.png" width="100%")
|
||||
p #{translate("auto_compile_onboarding_description")}
|
||||
button.btn.btn-default.btn-block(ng-click="dismiss()")
|
||||
| #{translate("got_it")}
|
||||
|
|
|
@ -33,7 +33,7 @@
|
|||
loop
|
||||
)
|
||||
source(ng-src="{{ '/img/onboarding/review-panel/open-review.mp4' }}", type="video/mp4")
|
||||
img(src="/img/onboarding/review-panel/open-review.gif")
|
||||
img(ng-src="{{ '/img/onboarding/review-panel/open-review.gif' }}", alt="Open review panel demo")
|
||||
div(ng-show="onboarding.innerStep === 2;")
|
||||
video.feat-onboard-video(
|
||||
video-play-state="onboarding.innerStep === 2;"
|
||||
|
@ -41,7 +41,7 @@
|
|||
loop
|
||||
)
|
||||
source(ng-src="{{ '/img/onboarding/review-panel/commenting.mp4' }}", type="video/mp4")
|
||||
img(src="/img/onboarding/review-panel/commenting.gif")
|
||||
img(ng-src="{{ '/img/onboarding/review-panel/commenting.gif' }}", alt="Commenting demo")
|
||||
div(ng-show="onboarding.innerStep === 3;")
|
||||
video.feat-onboard-video(
|
||||
video-play-state="onboarding.innerStep === 3;"
|
||||
|
@ -49,7 +49,7 @@
|
|||
loop
|
||||
)
|
||||
source(ng-src="{{ '/img/onboarding/review-panel/add-changes.mp4' }}", type="video/mp4")
|
||||
img(src="/img/onboarding/review-panel/add-changes.gif")
|
||||
img(ng-src="{{ '/img/onboarding/review-panel/add-changes.gif' }}", alt="Add changes demo")
|
||||
div(ng-show="onboarding.innerStep === 4;")
|
||||
video.feat-onboard-video(
|
||||
video-play-state="onboarding.innerStep === 4;"
|
||||
|
@ -57,7 +57,7 @@
|
|||
loop
|
||||
)
|
||||
source(ng-src="{{ '/img/onboarding/review-panel/accept-changes.mp4' }}", type="video/mp4")
|
||||
img(src="/img/onboarding/review-panel/accept-changes.gif")
|
||||
img(ng-src="{{ '/img/onboarding/review-panel/accept-changes.gif' }}", alt="Accept changes demo")
|
||||
button.btn.btn-primary.feat-onboard-nav-btn(
|
||||
ng-click="gotoNextStep();"
|
||||
ng-disabled="onboarding.innerStep === onboarding.nSteps;")
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
div.full-size.pdf(ng-controller="PdfController")
|
||||
.toolbar.toolbar-tall
|
||||
.btn-group(
|
||||
.btn-group#recompile(
|
||||
dropdown,
|
||||
tooltip-html="'"+translate('recompile_pdf')+" <span class=\"keyboard-shortcut\">({{modifierKey}} + Enter)</span>'"
|
||||
tooltip-class="keyboard-tooltip"
|
||||
|
@ -26,8 +26,8 @@ div.full-size.pdf(ng-controller="PdfController")
|
|||
)
|
||||
span.caret
|
||||
ul.dropdown-menu.dropdown-menu-left
|
||||
// Only show on beta program?
|
||||
if user.betaProgram
|
||||
// Only show if on beta program or part of rollout
|
||||
if user.betaProgram || showAutoCompileOnboarding
|
||||
li.dropdown-header #{translate("auto_compile")}
|
||||
li
|
||||
a(href, ng-click="autocompile_enabled = true")
|
||||
|
@ -317,6 +317,9 @@ div.full-size.pdf(ng-controller="PdfController")
|
|||
div
|
||||
li(ng-repeat="entry in pdf.validation.conflictedPaths") {{ '/'+entry['path'] }}
|
||||
|
||||
.alert.alert-danger(ng-show="pdf.validation.mainFile")
|
||||
strong #{translate("main_file_not_found")}
|
||||
span #{translate("please_set_main_file")}
|
||||
|
||||
.pdf-errors(ng-switch-when="errors")
|
||||
|
||||
|
@ -344,6 +347,10 @@ div.full-size.pdf(ng-controller="PdfController")
|
|||
strong #{translate("pdf_compile_rate_limit_hit")}
|
||||
span #{translate("project_flagged_too_many_compiles")}
|
||||
|
||||
.alert.alert-danger(ng-show="pdf.compileInProgress")
|
||||
strong #{translate("pdf_compile_in_progress_error")}.
|
||||
span #{translate("pdf_compile_try_again")}
|
||||
|
||||
.alert.alert-danger(ng-show="pdf.timedout")
|
||||
p
|
||||
strong #{translate("timedout")}.
|
||||
|
@ -387,6 +394,12 @@ div.full-size.pdf(ng-controller="PdfController")
|
|||
ng-click="startFreeTrial('compile-timeout')"
|
||||
) #{translate("start_free_trial")}
|
||||
|
||||
|
||||
.alert.alert-danger(ng-show="pdf.autoCompileDisabled")
|
||||
p
|
||||
strong #{translate("autocompile_disabled")}.
|
||||
span #{translate("autocompile_disabled_reason")}
|
||||
|
||||
.alert.alert-danger(ng-show="pdf.projectTooLarge")
|
||||
strong #{translate("project_too_large")}
|
||||
span #{translate("project_too_large_please_reduce")}
|
||||
|
|
|
@ -104,6 +104,9 @@ module.exports = settings =
|
|||
url : "http://localhost:3005"
|
||||
trackchanges:
|
||||
url : "http://localhost:3015"
|
||||
project_history:
|
||||
enabled: false
|
||||
url : "http://localhost:3054"
|
||||
docstore:
|
||||
url : "http://localhost:3016"
|
||||
pubUrl: "http://localhost:3016"
|
||||
|
@ -437,3 +440,8 @@ module.exports = settings =
|
|||
# name : "all projects",
|
||||
# url: "/templates/all"
|
||||
#}]
|
||||
|
||||
rateLimits:
|
||||
autoCompile:
|
||||
everyone: 100
|
||||
standard: 25
|
||||
|
|
6325
services/web/npm-shrinkwrap.json
generated
6325
services/web/npm-shrinkwrap.json
generated
File diff suppressed because it is too large
Load diff
|
@ -25,11 +25,13 @@
|
|||
"dateformat": "1.0.4-1.2.3",
|
||||
"express": "4.13.0",
|
||||
"express-session": "^1.14.2",
|
||||
"fs-extra": "^4.0.2",
|
||||
"heapdump": "^0.3.7",
|
||||
"helmet": "^3.8.1",
|
||||
"http-proxy": "^1.8.1",
|
||||
"ioredis": "^2.4.0",
|
||||
"jade": "~1.3.1",
|
||||
"jsonwebtoken": "^8.0.1",
|
||||
"ldapjs": "^0.7.1",
|
||||
"lodash": "^4.13.1",
|
||||
"logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#master",
|
||||
|
@ -68,16 +70,17 @@
|
|||
"underscore": "1.6.0",
|
||||
"uuid": "^3.0.1",
|
||||
"v8-profiler": "^5.2.3",
|
||||
"xml2js": "0.2.0"
|
||||
"xml2js": "0.2.0",
|
||||
"yauzl": "^2.8.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"autoprefixer": "^6.6.1",
|
||||
"bunyan": "0.22.1",
|
||||
"chai": "3.5.0",
|
||||
"chai-spies": "",
|
||||
"grunt": "0.4.5",
|
||||
"clean-css": "^3.4.18",
|
||||
"es6-promise": "^4.0.5",
|
||||
"grunt": "0.4.5",
|
||||
"grunt-available-tasks": "0.4.1",
|
||||
"grunt-bunyan": "0.5.0",
|
||||
"grunt-contrib-clean": "0.5.0",
|
||||
|
@ -89,12 +92,14 @@
|
|||
"grunt-exec": "^0.4.7",
|
||||
"grunt-execute": "^0.2.2",
|
||||
"grunt-file-append": "0.0.6",
|
||||
"grunt-forever": "^0.4.7",
|
||||
"grunt-git-rev-parse": "^0.1.4",
|
||||
"grunt-mocha-test": "0.9.0",
|
||||
"grunt-newer": "^1.2.0",
|
||||
"grunt-parallel": "^0.5.1",
|
||||
"grunt-postcss": "^0.8.0",
|
||||
"grunt-sed": "^0.1.1",
|
||||
"grunt-shell": "^2.1.0",
|
||||
"sandboxed-module": "0.2.0",
|
||||
"sinon": "^1.17.0",
|
||||
"timekeeper": "",
|
||||
|
|
|
@ -12,7 +12,8 @@ define [
|
|||
"ide/labels/LabelsManager"
|
||||
"ide/review-panel/ReviewPanelManager"
|
||||
"ide/SafariScrollPatcher"
|
||||
"ide/FeatureOnboardingController"
|
||||
"ide/FeatureOnboardingController",
|
||||
"ide/AutoCompileOnboardingController",
|
||||
"ide/settings/index"
|
||||
"ide/share/index"
|
||||
"ide/chat/index"
|
||||
|
@ -71,11 +72,16 @@ define [
|
|||
view: "editor"
|
||||
chatOpen: false
|
||||
pdfLayout: 'sideBySide'
|
||||
reviewPanelOpen: localStorage("ui.reviewPanelOpen.#{window.project_id}")
|
||||
miniReviewPanelVisible: false
|
||||
pdfHidden: false,
|
||||
pdfWidth: 0,
|
||||
reviewPanelOpen: localStorage("ui.reviewPanelOpen.#{window.project_id}"),
|
||||
miniReviewPanelVisible: false,
|
||||
}
|
||||
$scope.onboarding = {
|
||||
autoCompile: if window.showAutoCompileOnboarding then 'unseen' else 'dismissed'
|
||||
}
|
||||
$scope.user = window.user
|
||||
|
||||
|
||||
$scope.$watch "project.features.trackChangesVisible", (visible) ->
|
||||
return if !visible?
|
||||
$scope.ui.showCollabFeaturesOnboarding = window.showTrackChangesOnboarding and visible
|
||||
|
@ -99,6 +105,10 @@ define [
|
|||
if value?
|
||||
localStorage "ui.reviewPanelOpen.#{window.project_id}", value
|
||||
|
||||
$scope.$on "layout:pdf:resize", (_, layoutState) ->
|
||||
$scope.ui.pdfHidden = layoutState.east.initClosed
|
||||
$scope.ui.pdfWidth = layoutState.east.size
|
||||
|
||||
# Tracking code.
|
||||
$scope.$watch "ui.view", (newView, oldView) ->
|
||||
if newView? and newView != "editor" and newView != "pdf"
|
||||
|
@ -181,6 +191,20 @@ define [
|
|||
if ide.browserIsSafari
|
||||
ide.safariScrollPatcher = new SafariScrollPatcher($scope)
|
||||
|
||||
# Fix Chrome 61 and 62 text-shadow rendering
|
||||
browserIsChrome61or62 = false
|
||||
try
|
||||
chromeVersion = parseFloat(navigator.userAgent.split(" Chrome/")[1]) || null;
|
||||
browserIsChrome61or62 = (
|
||||
chromeVersion? &&
|
||||
(chromeVersion == 61 || chromeVersion == 62)
|
||||
)
|
||||
if browserIsChrome61or62
|
||||
document.styleSheets[0].insertRule(".ace_editor.ace_autocomplete .ace_completion-highlight { text-shadow: none !important; }", 1)
|
||||
catch err
|
||||
console.error err
|
||||
|
||||
|
||||
# User can append ?ft=somefeature to url to activate a feature toggle
|
||||
ide.featureToggle = location?.search?.match(/^\?ft=(\w+)$/)?[1]
|
||||
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
define [
|
||||
"base"
|
||||
], (App) ->
|
||||
App.controller "AutoCompileOnboardingController", ($scope, event_tracking) ->
|
||||
recompileBtn = angular.element('#recompile')
|
||||
popover = angular.element('#onboarding-autocompile')
|
||||
{ top, left } = recompileBtn.offset()
|
||||
|
||||
# If pdf panel smaller than recompile button + popover, show to left.
|
||||
# Otherwise show to right
|
||||
if $scope.ui.pdfWidth < 475
|
||||
$scope.placement = 'left'
|
||||
popover.offset({
|
||||
top: top,
|
||||
left: left - popover.width()
|
||||
})
|
||||
else
|
||||
$scope.placement = 'right'
|
||||
popover.offset({
|
||||
top: top,
|
||||
left: left + recompileBtn.width()
|
||||
})
|
||||
|
||||
$scope.dismiss = () ->
|
||||
$scope.onboarding.autoCompile = 'dismissed'
|
||||
event_tracking.sendMB "shown-autocompile-onboarding"
|
|
@ -115,7 +115,10 @@ define [
|
|||
$scope.resetHoverState()
|
||||
|
||||
$scope.displayName = (user) ->
|
||||
full_name = "#{user.first_name} #{user.last_name}"
|
||||
if user.name?
|
||||
full_name = user.name
|
||||
else
|
||||
full_name = "#{user.first_name} #{user.last_name}"
|
||||
fallback_name = "Unknown"
|
||||
if !user?
|
||||
fallback_name
|
||||
|
|
|
@ -68,7 +68,7 @@ define [
|
|||
$scope.$on "project:joined", () ->
|
||||
return if !autoCompile
|
||||
autoCompile = false
|
||||
$scope.recompile(isAutoCompile: true)
|
||||
$scope.recompile(isAutoCompileOnLoad: true)
|
||||
$scope.hasPremiumCompile = $scope.project.features.compileGroup == "priority"
|
||||
|
||||
$scope.$on "pdf:error:display", () ->
|
||||
|
@ -77,7 +77,7 @@ define [
|
|||
|
||||
autoCompileTimeout = null
|
||||
triggerAutoCompile = () ->
|
||||
return if autoCompileTimeout
|
||||
return if autoCompileTimeout or $scope.ui.pdfHidden
|
||||
|
||||
timeSinceLastCompile = Date.now() - $scope.recompiledAt
|
||||
# If time is non-monotonic, assume that the user's system clock has been
|
||||
|
@ -86,7 +86,7 @@ define [
|
|||
|
||||
if isTimeNonMonotonic || timeSinceLastCompile >= AUTO_COMPILE_TIMEOUT
|
||||
if (!ide.$scope.hasLintingError)
|
||||
$scope.recompile(isBackgroundAutoCompile: true)
|
||||
$scope.recompile(isAutoCompileOnChange: true)
|
||||
else
|
||||
# Extend remainder of timeout
|
||||
autoCompileTimeout = setTimeout () ->
|
||||
|
@ -109,7 +109,7 @@ define [
|
|||
toggleAutoCompile(newValue)
|
||||
event_tracking.sendMB "autocompile-setting-changed", { value: newValue }
|
||||
|
||||
if window.user?.betaProgram and $scope.autocompile_enabled
|
||||
if (window.user?.betaProgram or window.showAutoCompileOnboarding) and $scope.autocompile_enabled
|
||||
toggleAutoCompile(true)
|
||||
|
||||
# abort compile if syntax checks fail
|
||||
|
@ -127,7 +127,7 @@ define [
|
|||
sendCompileRequest = (options = {}) ->
|
||||
url = "/project/#{$scope.project_id}/compile"
|
||||
params = {}
|
||||
if options.isAutoCompile
|
||||
if options.isAutoCompileOnLoad or options.isAutoCompileOnChange
|
||||
params["auto_compile"]=true
|
||||
# if the previous run was a check, clear the error logs
|
||||
$scope.pdf.logEntries = [] if $scope.check
|
||||
|
@ -144,9 +144,9 @@ define [
|
|||
rootDoc_id: options.rootDocOverride_id or null
|
||||
draft: $scope.draft
|
||||
check: checkType
|
||||
# use incremental compile for beta users but revert to a full
|
||||
# use incremental compile for all users but revert to a full
|
||||
# compile if there is a server error
|
||||
incrementalCompilesEnabled: window.user?.betaProgram and not $scope.pdf.error
|
||||
incrementalCompilesEnabled: not $scope.pdf.error
|
||||
_csrf: window.csrfToken
|
||||
}, {params: params}
|
||||
|
||||
|
@ -167,6 +167,8 @@ define [
|
|||
$scope.pdf.compileTerminated = false
|
||||
$scope.pdf.compileExited = false
|
||||
$scope.pdf.failedCheck = false
|
||||
$scope.pdf.compileInProgress = false
|
||||
$scope.pdf.autoCompileDisabled = false
|
||||
|
||||
# make a cache to look up files by name
|
||||
fileByPath = {}
|
||||
|
@ -205,7 +207,13 @@ define [
|
|||
$scope.shouldShowLogs = true
|
||||
fetchLogs(fileByPath)
|
||||
else if response.status == "autocompile-backoff"
|
||||
$scope.pdf.view = 'uncompiled'
|
||||
if $scope.pdf.isAutoCompileOnLoad # initial autocompile
|
||||
$scope.pdf.view = 'uncompiled'
|
||||
else # background autocompile from typing
|
||||
$scope.pdf.view = 'errors'
|
||||
$scope.pdf.autoCompileDisabled = true
|
||||
$scope.autocompile_enabled = false # disable any further autocompiles
|
||||
event_tracking.sendMB "autocompile-rate-limited", {hasPremiumCompile: $scope.hasPremiumCompile}
|
||||
else if response.status == "project-too-large"
|
||||
$scope.pdf.view = 'errors'
|
||||
$scope.pdf.projectTooLarge = true
|
||||
|
@ -223,6 +231,10 @@ define [
|
|||
else if response.status == "validation-problems"
|
||||
$scope.pdf.view = "validation-problems"
|
||||
$scope.pdf.validation = response.validationProblems
|
||||
$scope.shouldShowLogs = false
|
||||
else if response.status == "compile-in-progress"
|
||||
$scope.pdf.view = 'errors'
|
||||
$scope.pdf.compileInProgress = true
|
||||
else if response.status == "success"
|
||||
$scope.pdf.view = 'pdf'
|
||||
$scope.shouldShowLogs = false
|
||||
|
@ -408,9 +420,13 @@ define [
|
|||
$scope.recompile = (options = {}) ->
|
||||
return if $scope.pdf.compiling
|
||||
|
||||
if !options.isAutoCompileOnLoad and $scope.onboarding.autoCompile == 'unseen'
|
||||
$scope.onboarding.autoCompile = 'show'
|
||||
|
||||
event_tracking.sendMBSampled "editor-recompile-sampled", options
|
||||
|
||||
$scope.pdf.compiling = true
|
||||
$scope.pdf.isAutoCompileOnLoad = options?.isAutoCompileOnLoad # initial autocompile
|
||||
|
||||
if options?.force
|
||||
# for forced compile, turn off validation check and ignore errors
|
||||
|
|
|
@ -55,7 +55,11 @@ define [
|
|||
|
||||
$scope.$watch "project.rootDoc_id", (rootDoc_id, oldRootDoc_id) =>
|
||||
return if @ignoreUpdates
|
||||
if oldRootDoc_id? and rootDoc_id != oldRootDoc_id
|
||||
# don't save on initialisation, Angular passes oldRootDoc_id as
|
||||
# undefined in this case.
|
||||
return if typeof oldRootDoc_id is "undefined"
|
||||
# otherwise only save changes, null values are allowed
|
||||
if (rootDoc_id != oldRootDoc_id)
|
||||
settings.saveProjectSettings({rootDocId: rootDoc_id})
|
||||
|
||||
|
||||
|
|
BIN
services/web/public/img/ol_plus_sl.png
Normal file
BIN
services/web/public/img/ol_plus_sl.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 61 KiB |
Binary file not shown.
After Width: | Height: | Size: 22 KiB |
|
@ -98,4 +98,48 @@ a.feat-onboard-dismiss {
|
|||
color: #FFF;
|
||||
opacity: 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.onboarding-autocompile {
|
||||
display: block;
|
||||
top: 10px;
|
||||
|
||||
img {
|
||||
margin-bottom: 10px;
|
||||
border: 1px solid @gray-lighter;
|
||||
}
|
||||
|
||||
&::before, &::after {
|
||||
content: '';
|
||||
border-width: 11px;
|
||||
border-style: solid;
|
||||
border-color: transparent;
|
||||
top: 7px;
|
||||
display: block;
|
||||
position: absolute;
|
||||
}
|
||||
|
||||
&.right::before {
|
||||
border-left-width: 0;
|
||||
border-right-color: rgba(0, 0, 0, .3);
|
||||
left: -11px;
|
||||
}
|
||||
|
||||
&.right::after {
|
||||
border-left-width: 0;
|
||||
border-right-color: #f7f7f7;
|
||||
left: -9.5px;
|
||||
}
|
||||
|
||||
&.left::before {
|
||||
border-right-width: 0;
|
||||
border-left-color: rgba(0, 0, 0, .3);
|
||||
right: -11px
|
||||
}
|
||||
|
||||
&.left::after {
|
||||
border-right-width: 0;
|
||||
border-left-color: #f7f7f7;
|
||||
right: -9.5px;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -344,3 +344,103 @@ describe "CollaboratorsHandler", ->
|
|||
|
||||
it 'should not call ProjectEditorHandler.buildOwnerAndMembersViews', ->
|
||||
@ProjectEditorHandler.buildOwnerAndMembersViews.callCount.should.equal 0
|
||||
|
||||
describe 'transferProjects', ->
|
||||
beforeEach ->
|
||||
@from_user_id = "from-user-id"
|
||||
@to_user_id = "to-user-id"
|
||||
@projects = [{
|
||||
_id: "project-id-1"
|
||||
}, {
|
||||
_id: "project-id-2"
|
||||
}]
|
||||
@Project.find = sinon.stub().yields(null, @projects)
|
||||
@Project.update = sinon.stub().yields()
|
||||
@ProjectEntityHandler.flushProjectToThirdPartyDataStore = sinon.stub().yields()
|
||||
|
||||
describe "successfully", ->
|
||||
beforeEach ->
|
||||
@CollaboratorHandler.transferProjects @from_user_id, @to_user_id, @callback
|
||||
|
||||
it "should look up the affected projects", ->
|
||||
@Project.find
|
||||
.calledWith({
|
||||
$or : [
|
||||
{ owner_ref: @from_user_id }
|
||||
{ collaberator_refs: @from_user_id }
|
||||
{ readOnly_refs: @from_user_id }
|
||||
]
|
||||
})
|
||||
.should.equal true
|
||||
|
||||
it "should transfer owned projects", ->
|
||||
@Project.update
|
||||
.calledWith({
|
||||
owner_ref: @from_user_id
|
||||
}, {
|
||||
$set: { owner_ref: @to_user_id }
|
||||
}, {
|
||||
multi: true
|
||||
})
|
||||
.should.equal true
|
||||
|
||||
it "should transfer collaborator projects", ->
|
||||
@Project.update
|
||||
.calledWith({
|
||||
collaberator_refs: @from_user_id
|
||||
}, {
|
||||
$addToSet: { collaberator_refs: @to_user_id }
|
||||
}, {
|
||||
multi: true
|
||||
})
|
||||
.should.equal true
|
||||
@Project.update
|
||||
.calledWith({
|
||||
collaberator_refs: @from_user_id
|
||||
}, {
|
||||
$pull: { collaberator_refs: @from_user_id }
|
||||
}, {
|
||||
multi: true
|
||||
})
|
||||
.should.equal true
|
||||
|
||||
it "should transfer read only collaborator projects", ->
|
||||
@Project.update
|
||||
.calledWith({
|
||||
readOnly_refs: @from_user_id
|
||||
}, {
|
||||
$addToSet: { readOnly_refs: @to_user_id }
|
||||
}, {
|
||||
multi: true
|
||||
})
|
||||
.should.equal true
|
||||
@Project.update
|
||||
.calledWith({
|
||||
readOnly_refs: @from_user_id
|
||||
}, {
|
||||
$pull: { readOnly_refs: @from_user_id }
|
||||
}, {
|
||||
multi: true
|
||||
})
|
||||
.should.equal true
|
||||
|
||||
it "should flush each project to the TPDS", ->
|
||||
for project in @projects
|
||||
@ProjectEntityHandler.flushProjectToThirdPartyDataStore
|
||||
.calledWith(project._id)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
|
||||
describe "when flushing to TPDS fails", ->
|
||||
beforeEach ->
|
||||
@ProjectEntityHandler.flushProjectToThirdPartyDataStore = sinon.stub().yields(new Error('oops'))
|
||||
@CollaboratorHandler.transferProjects @from_user_id, @to_user_id, @callback
|
||||
|
||||
it "should log an error", ->
|
||||
@logger.err.called.should.equal true
|
||||
|
||||
it "should not return an error since it happens in the background", ->
|
||||
@callback.called.should.equal true
|
||||
@callback.calledWith(new Error('oops')).should.equal false
|
|
@ -33,7 +33,7 @@ describe "ClsiManager", ->
|
|||
getProjectDocsIfMatch: sinon.stub().callsArgWith(2,null,null)
|
||||
"./ClsiCookieManager": @ClsiCookieManager
|
||||
"./ClsiStateManager": @ClsiStateManager
|
||||
"logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub(), warn: sinon.stub() }
|
||||
"logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub(), err: sinon.stub(), warn: sinon.stub() }
|
||||
"request": @request = sinon.stub()
|
||||
"./ClsiFormatChecker": @ClsiFormatChecker
|
||||
"metrics-sharelatex": @Metrics =
|
||||
|
@ -122,6 +122,21 @@ describe "ClsiManager", ->
|
|||
it "should call the callback with a success status", ->
|
||||
@callback.calledWith(null, @status, ).should.equal true
|
||||
|
||||
describe "when the resources fail the precompile check", ->
|
||||
beforeEach ->
|
||||
@ClsiFormatChecker.checkRecoursesForProblems = sinon.stub().callsArgWith(1, new Error("failed"))
|
||||
@ClsiManager._postToClsi = sinon.stub().callsArgWith(4, null, {
|
||||
compile:
|
||||
status: @status = "failure"
|
||||
})
|
||||
@ClsiManager.sendRequest @project_id, @user_id, {}, @callback
|
||||
|
||||
it "should call the callback only once", ->
|
||||
@callback.calledOnce.should.equal true
|
||||
|
||||
it "should call the callback with an error", ->
|
||||
@callback.calledWithExactly(new Error("failed")).should.equal true
|
||||
|
||||
describe "deleteAuxFiles", ->
|
||||
beforeEach ->
|
||||
@ClsiManager._makeRequest = sinon.stub().callsArg(2)
|
||||
|
@ -247,12 +262,12 @@ describe "ClsiManager", ->
|
|||
.calledWith(@project_id, {compiler:1, rootDoc_id: 1, imageName: 1, rootFolder: 1})
|
||||
.should.equal true
|
||||
|
||||
it "should flush the project to the database", ->
|
||||
it "should not explicitly flush the project to the database", ->
|
||||
@DocumentUpdaterHandler.flushProjectToMongo
|
||||
.calledWith(@project_id)
|
||||
.should.equal true
|
||||
.should.equal false
|
||||
|
||||
it "should get only the live docs from the docupdater", ->
|
||||
it "should get only the live docs from the docupdater with a background flush in docupdater", ->
|
||||
@DocumentUpdaterHandler.getProjectDocsIfMatch
|
||||
.calledWith(@project_id)
|
||||
.should.equal true
|
||||
|
@ -331,7 +346,49 @@ describe "ClsiManager", ->
|
|||
|
||||
it "should set to main.tex", ->
|
||||
@request.compile.rootResourcePath.should.equal "main.tex"
|
||||
|
||||
|
||||
describe "when there is no valid root document and no main.tex document", ->
|
||||
beforeEach () ->
|
||||
@project.rootDoc_id = "not-valid"
|
||||
@docs = {
|
||||
"/other.tex": @doc_1 = {
|
||||
name: "other.tex"
|
||||
_id: "mock-doc-id-1"
|
||||
lines: ["Hello", "world"]
|
||||
},
|
||||
"/chapters/chapter1.tex": @doc_2 = {
|
||||
name: "chapter1.tex"
|
||||
_id: "mock-doc-id-2"
|
||||
lines: [
|
||||
"Chapter 1"
|
||||
]
|
||||
}
|
||||
}
|
||||
@ProjectEntityHandler.getAllDocs = sinon.stub().callsArgWith(1, null, @docs)
|
||||
@ClsiManager._buildRequest @project, null, @callback
|
||||
|
||||
it "should report an error", ->
|
||||
@callback.calledWith(new Error("no main file specified")).should.equal true
|
||||
|
||||
|
||||
describe "when there is no valid root document and a single document which is not main.tex", ->
|
||||
beforeEach (done) ->
|
||||
@project.rootDoc_id = "not-valid"
|
||||
@docs = {
|
||||
"/other.tex": @doc_1 = {
|
||||
name: "other.tex"
|
||||
_id: "mock-doc-id-1"
|
||||
lines: ["Hello", "world"]
|
||||
}
|
||||
}
|
||||
@ProjectEntityHandler.getAllDocs = sinon.stub().callsArgWith(1, null, @docs)
|
||||
@ClsiManager._buildRequest @project, null, (@error, @request) =>
|
||||
done()
|
||||
|
||||
it "should set io to the only file", ->
|
||||
@request.compile.rootResourcePath.should.equal "other.tex"
|
||||
|
||||
|
||||
describe "with the draft option", ->
|
||||
it "should add the draft option into the request", (done) ->
|
||||
@ClsiManager._buildRequest @project_id, {timeout:100, draft: true}, (error, request) =>
|
||||
|
|
|
@ -44,7 +44,7 @@ describe "CompileManager", ->
|
|||
|
||||
describe "succesfully", ->
|
||||
beforeEach ->
|
||||
@CompileManager._checkIfAutoCompileLimitHasBeenHit = (_, cb)-> cb(null, true)
|
||||
@CompileManager._checkIfAutoCompileLimitHasBeenHit = (isAutoCompile, compileGroup, cb)-> cb(null, true)
|
||||
@CompileManager.compile @project_id, @user_id, {}, @callback
|
||||
|
||||
it "should check the project has not been recently compiled", ->
|
||||
|
@ -84,7 +84,7 @@ describe "CompileManager", ->
|
|||
|
||||
describe "when the project has been recently compiled", ->
|
||||
it "should return", (done)->
|
||||
@CompileManager._checkIfAutoCompileLimitHasBeenHit = (_, cb)-> cb(null, true)
|
||||
@CompileManager._checkIfAutoCompileLimitHasBeenHit = (isAutoCompile, compileGroup, cb)-> cb(null, true)
|
||||
@CompileManager._checkIfRecentlyCompiled = sinon.stub().callsArgWith(2, null, true)
|
||||
@CompileManager.compile @project_id, @user_id, {}, (err, status)->
|
||||
status.should.equal "too-recently-compiled"
|
||||
|
@ -92,7 +92,7 @@ describe "CompileManager", ->
|
|||
|
||||
describe "should check the rate limit", ->
|
||||
it "should return", (done)->
|
||||
@CompileManager._checkIfAutoCompileLimitHasBeenHit = sinon.stub().callsArgWith(1, null, false)
|
||||
@CompileManager._checkIfAutoCompileLimitHasBeenHit = sinon.stub().callsArgWith(2, null, false)
|
||||
@CompileManager.compile @project_id, @user_id, {}, (err, status)->
|
||||
status.should.equal "autocompile-backoff"
|
||||
done()
|
||||
|
@ -222,14 +222,14 @@ describe "CompileManager", ->
|
|||
describe "_checkIfAutoCompileLimitHasBeenHit", ->
|
||||
|
||||
it "should be able to compile if it is not an autocompile", (done)->
|
||||
@ratelimiter.addCount.callsArgWith(1, null, true)
|
||||
@CompileManager._checkIfAutoCompileLimitHasBeenHit false, (err, canCompile)=>
|
||||
@ratelimiter.addCount.callsArgWith(2, null, true)
|
||||
@CompileManager._checkIfAutoCompileLimitHasBeenHit false, "everyone", (err, canCompile)=>
|
||||
canCompile.should.equal true
|
||||
done()
|
||||
|
||||
it "should be able to compile if rate limit has remianing", (done)->
|
||||
@ratelimiter.addCount.callsArgWith(1, null, true)
|
||||
@CompileManager._checkIfAutoCompileLimitHasBeenHit true, (err, canCompile)=>
|
||||
@CompileManager._checkIfAutoCompileLimitHasBeenHit true, "everyone", (err, canCompile)=>
|
||||
args = @ratelimiter.addCount.args[0][0]
|
||||
args.throttle.should.equal 25
|
||||
args.subjectName.should.equal "everyone"
|
||||
|
@ -240,13 +240,13 @@ describe "CompileManager", ->
|
|||
|
||||
it "should be not able to compile if rate limit has no remianing", (done)->
|
||||
@ratelimiter.addCount.callsArgWith(1, null, false)
|
||||
@CompileManager._checkIfAutoCompileLimitHasBeenHit true, (err, canCompile)=>
|
||||
@CompileManager._checkIfAutoCompileLimitHasBeenHit true, "everyone", (err, canCompile)=>
|
||||
canCompile.should.equal false
|
||||
done()
|
||||
|
||||
it "should return false if there is an error in the rate limit", (done)->
|
||||
@ratelimiter.addCount.callsArgWith(1, "error")
|
||||
@CompileManager._checkIfAutoCompileLimitHasBeenHit true, (err, canCompile)=>
|
||||
@CompileManager._checkIfAutoCompileLimitHasBeenHit true, "everyone", (err, canCompile)=>
|
||||
canCompile.should.equal false
|
||||
done()
|
||||
|
||||
|
|
|
@ -265,19 +265,19 @@ describe 'DocumentUpdaterHandler', ->
|
|||
v: @version
|
||||
@docs = [ @doc0, @doc0, @doc0 ]
|
||||
@body = JSON.stringify @docs
|
||||
@request.get = sinon.stub().callsArgWith(1, null, {statusCode: 200}, @body)
|
||||
@request.post = sinon.stub().callsArgWith(1, null, {statusCode: 200}, @body)
|
||||
@handler.getProjectDocsIfMatch @project_id, @project_state_hash, @callback
|
||||
|
||||
it 'should get the documenst from the document updater', ->
|
||||
url = "#{@settings.apis.documentupdater.url}/project/#{@project_id}/doc?state=#{@project_state_hash}"
|
||||
@request.get.calledWith(url).should.equal true
|
||||
url = "#{@settings.apis.documentupdater.url}/project/#{@project_id}/get_and_flush_if_old?state=#{@project_state_hash}"
|
||||
@request.post.calledWith(url).should.equal true
|
||||
|
||||
it "should call the callback with the documents", ->
|
||||
@callback.calledWithExactly(null, @docs).should.equal true
|
||||
|
||||
describe "when the document updater API returns an error", ->
|
||||
beforeEach ->
|
||||
@request.get = sinon.stub().callsArgWith(1, @error = new Error("something went wrong"), null, null)
|
||||
@request.post = sinon.stub().callsArgWith(1, @error = new Error("something went wrong"), null, null)
|
||||
@handler.getProjectDocsIfMatch @project_id, @project_state_hash, @callback
|
||||
|
||||
it "should return an error to the callback", ->
|
||||
|
@ -285,7 +285,7 @@ describe 'DocumentUpdaterHandler', ->
|
|||
|
||||
describe "when the document updater returns a conflict error code", ->
|
||||
beforeEach ->
|
||||
@request.get = sinon.stub().callsArgWith(1, null, { statusCode: 409 }, "Conflict")
|
||||
@request.post = sinon.stub().callsArgWith(1, null, { statusCode: 409 }, "Conflict")
|
||||
@handler.getProjectDocsIfMatch @project_id, @project_state_hash, @callback
|
||||
|
||||
it "should return the callback with no documents", ->
|
||||
|
@ -312,7 +312,7 @@ describe 'DocumentUpdaterHandler', ->
|
|||
|
||||
describe "when the document updater API returns an error", ->
|
||||
beforeEach ->
|
||||
@request.get = sinon.stub().callsArgWith(1, @error = new Error("something went wrong"), null, null)
|
||||
@request.post = sinon.stub().callsArgWith(1, @error = new Error("something went wrong"), null, null)
|
||||
@handler.getProjectDocsIfMatch @project_id, @project_state_hash, @callback
|
||||
|
||||
it "should return an error to the callback", ->
|
||||
|
@ -320,7 +320,7 @@ describe 'DocumentUpdaterHandler', ->
|
|||
|
||||
describe "when the document updater returns a conflict error code", ->
|
||||
beforeEach ->
|
||||
@request.get = sinon.stub().callsArgWith(1, null, { statusCode: 409 }, "Conflict")
|
||||
@request.post = sinon.stub().callsArgWith(1, null, { statusCode: 409 }, "Conflict")
|
||||
@handler.getProjectDocsIfMatch @project_id, @project_state_hash, @callback
|
||||
|
||||
it "should return the callback with no documents", ->
|
||||
|
|
|
@ -24,6 +24,7 @@ describe "DocumentController", ->
|
|||
@doc_lines = ["one", "two", "three"]
|
||||
@version = 42
|
||||
@ranges = {"mock": "ranges"}
|
||||
@pathname = '/a/b/c/file.tex'
|
||||
@rev = 5
|
||||
|
||||
describe "getDocument", ->
|
||||
|
@ -34,12 +35,12 @@ describe "DocumentController", ->
|
|||
|
||||
describe "when the document exists", ->
|
||||
beforeEach ->
|
||||
@ProjectEntityHandler.getDoc = sinon.stub().callsArgWith(2, null, @doc_lines, @rev, @version, @ranges)
|
||||
@ProjectEntityHandler.getDoc = sinon.stub().callsArgWith(3, null, @doc_lines, @rev, @version, @ranges, @pathname)
|
||||
@DocumentController.getDocument(@req, @res, @next)
|
||||
|
||||
it "should get the document from Mongo", ->
|
||||
@ProjectEntityHandler.getDoc
|
||||
.calledWith(@project_id, @doc_id)
|
||||
.calledWith(@project_id, @doc_id, pathname: true)
|
||||
.should.equal true
|
||||
|
||||
it "should return the document data to the client as JSON", ->
|
||||
|
@ -48,10 +49,11 @@ describe "DocumentController", ->
|
|||
lines: @doc_lines
|
||||
version: @version
|
||||
ranges: @ranges
|
||||
pathname: @pathname
|
||||
|
||||
describe "when the document doesn't exist", ->
|
||||
beforeEach ->
|
||||
@ProjectEntityHandler.getDoc = sinon.stub().callsArgWith(2, new Errors.NotFoundError("not found"), null)
|
||||
@ProjectEntityHandler.getDoc = sinon.stub().callsArgWith(3, new Errors.NotFoundError("not found"), null)
|
||||
@DocumentController.getDocument(@req, @res, @next)
|
||||
|
||||
it "should call next with the NotFoundError", ->
|
||||
|
|
|
@ -22,37 +22,60 @@ describe "HistoryController", ->
|
|||
@next = sinon.stub()
|
||||
@settings.apis =
|
||||
trackchanges:
|
||||
enabled: false
|
||||
url: "http://trackchanges.example.com"
|
||||
project_history:
|
||||
url: "http://project_history.example.com"
|
||||
@proxy =
|
||||
events: {}
|
||||
pipe: sinon.stub()
|
||||
on: (event, handler) -> @events[event] = handler
|
||||
@request.returns @proxy
|
||||
@HistoryController.proxyToHistoryApi @req, @res, @next
|
||||
|
||||
describe "successfully", ->
|
||||
it "should get the user id", ->
|
||||
@AuthenticationController.getLoggedInUserId
|
||||
.calledWith(@req)
|
||||
.should.equal true
|
||||
describe "with project history enabled", ->
|
||||
beforeEach ->
|
||||
@settings.apis.project_history.enabled = true
|
||||
@HistoryController.proxyToHistoryApi @req, @res, @next
|
||||
|
||||
it "should call the track changes api", ->
|
||||
@request
|
||||
.calledWith({
|
||||
url: "#{@settings.apis.trackchanges.url}#{@req.url}"
|
||||
method: @req.method
|
||||
headers:
|
||||
"X-User-Id": @user_id
|
||||
})
|
||||
.should.equal true
|
||||
it "should get the user id", ->
|
||||
@AuthenticationController.getLoggedInUserId
|
||||
.calledWith(@req)
|
||||
.should.equal true
|
||||
|
||||
it "should pipe the response to the client", ->
|
||||
@proxy.pipe
|
||||
.calledWith(@res)
|
||||
.should.equal true
|
||||
it "should call the project history api", ->
|
||||
@request
|
||||
.calledWith({
|
||||
url: "#{@settings.apis.project_history.url}#{@req.url}"
|
||||
method: @req.method
|
||||
headers:
|
||||
"X-User-Id": @user_id
|
||||
})
|
||||
.should.equal true
|
||||
|
||||
it "should pipe the response to the client", ->
|
||||
@proxy.pipe
|
||||
.calledWith(@res)
|
||||
.should.equal true
|
||||
|
||||
describe "with project history disabled", ->
|
||||
beforeEach ->
|
||||
@settings.apis.project_history.enabled = false
|
||||
@HistoryController.proxyToHistoryApi @req, @res, @next
|
||||
|
||||
it "should call the track changes api", ->
|
||||
@request
|
||||
.calledWith({
|
||||
url: "#{@settings.apis.trackchanges.url}#{@req.url}"
|
||||
method: @req.method
|
||||
headers:
|
||||
"X-User-Id": @user_id
|
||||
})
|
||||
.should.equal true
|
||||
|
||||
describe "with an error", ->
|
||||
beforeEach ->
|
||||
@HistoryController.proxyToHistoryApi @req, @res, @next
|
||||
@proxy.events["error"].call(@proxy, @error = new Error("oops"))
|
||||
|
||||
it "should pass the error up the call chain", ->
|
||||
|
|
|
@ -1,63 +0,0 @@
|
|||
chai = require('chai')
|
||||
expect = chai.expect
|
||||
chai.should()
|
||||
sinon = require("sinon")
|
||||
modulePath = "../../../../app/js/Features/History/HistoryManager"
|
||||
SandboxedModule = require('sandboxed-module')
|
||||
|
||||
describe "HistoryManager", ->
|
||||
beforeEach ->
|
||||
@HistoryManager = SandboxedModule.require modulePath, requires:
|
||||
"request" : @request = sinon.stub()
|
||||
"settings-sharelatex": @settings =
|
||||
apis:
|
||||
trackchanges:
|
||||
url: "trackchanges.sharelatex.com"
|
||||
"logger-sharelatex": @logger = {log: sinon.stub(), error: sinon.stub()}
|
||||
@project_id = "project-id-123"
|
||||
@callback = sinon.stub()
|
||||
@request.post = sinon.stub()
|
||||
|
||||
describe "flushProject", ->
|
||||
describe "with a successful response code", ->
|
||||
beforeEach ->
|
||||
@request.post = sinon.stub().callsArgWith(1, null, statusCode: 204, "")
|
||||
@HistoryManager.flushProject @project_id, @callback
|
||||
|
||||
it "should flush the project in the track changes api", ->
|
||||
@request.post
|
||||
.calledWith("#{@settings.apis.trackchanges.url}/project/#{@project_id}/flush")
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback without an error", ->
|
||||
@callback.calledWith(null).should.equal true
|
||||
|
||||
describe "with a failed response code", ->
|
||||
beforeEach ->
|
||||
@request.post = sinon.stub().callsArgWith(1, null, statusCode: 500, "")
|
||||
@HistoryManager.flushProject @project_id, @callback
|
||||
|
||||
it "should call the callback with an error", ->
|
||||
@callback.calledWith(new Error("track-changes api responded with a non-success code: 500")).should.equal true
|
||||
|
||||
it "should log the error", ->
|
||||
@logger.error
|
||||
.calledWith({
|
||||
err: new Error("track-changes api responded with a non-success code: 500")
|
||||
project_id: @project_id
|
||||
}, "error flushing project in track-changes api")
|
||||
.should.equal true
|
||||
|
||||
describe "ArchiveProject", ->
|
||||
|
||||
it "should call the post endpoint", (done)->
|
||||
@request.post.callsArgWith(1, null, {})
|
||||
@HistoryManager.archiveProject @project_id, (err)=>
|
||||
@request.post.calledWith("#{@settings.apis.trackchanges.url}/project/#{@project_id}/archive")
|
||||
done()
|
||||
|
||||
it "should return an error on a non success", (done)->
|
||||
@request.post.callsArgWith(1, null, {statusCode:500})
|
||||
@HistoryManager.archiveProject @project_id, (err)=>
|
||||
expect(err).to.exist
|
||||
done()
|
|
@ -12,7 +12,7 @@ describe 'ProjectDetailsHandler', ->
|
|||
beforeEach ->
|
||||
@project_id = "321l3j1kjkjl"
|
||||
@user_id = "user-id-123"
|
||||
@project =
|
||||
@project =
|
||||
name: "project"
|
||||
description: "this is a great project"
|
||||
something:"should not exist"
|
||||
|
@ -20,7 +20,7 @@ describe 'ProjectDetailsHandler', ->
|
|||
owner_ref: @user_id
|
||||
@user =
|
||||
features: "mock-features"
|
||||
@ProjectGetter =
|
||||
@ProjectGetter =
|
||||
getProjectWithoutDocLines: sinon.stub().callsArgWith(1, null, @project)
|
||||
getProject: sinon.stub().callsArgWith(2, null, @project)
|
||||
@ProjectModel =
|
||||
|
@ -42,7 +42,7 @@ describe 'ProjectDetailsHandler', ->
|
|||
describe "getDetails", ->
|
||||
|
||||
it "should find the project and owner", (done)->
|
||||
@handler.getDetails @project_id, (err, details)=>
|
||||
@handler.getDetails @project_id, (err, details)=>
|
||||
details.name.should.equal @project.name
|
||||
details.description.should.equal @project.description
|
||||
details.compiler.should.equal @project.compiler
|
||||
|
@ -50,6 +50,13 @@ describe 'ProjectDetailsHandler', ->
|
|||
assert.equal(details.something, undefined)
|
||||
done()
|
||||
|
||||
it "should find overleaf metadata if it exists", (done)->
|
||||
@project.overleaf = { id: 'id' }
|
||||
@handler.getDetails @project_id, (err, details)=>
|
||||
details.overleaf.should.equal @project.overleaf
|
||||
assert.equal(details.something, undefined)
|
||||
done()
|
||||
|
||||
it "should return an error for a non-existent project", (done)->
|
||||
@ProjectGetter.getProject.callsArg(2, null, null)
|
||||
err = new Errors.NotFoundError("project not found")
|
||||
|
@ -79,7 +86,7 @@ describe 'ProjectDetailsHandler', ->
|
|||
@handler.getProjectDescription @project_id, (returnedErr, returnedDescription)=>
|
||||
err.should.equal returnedErr
|
||||
description.should.equal returnedDescription
|
||||
done()
|
||||
done()
|
||||
|
||||
describe "setProjectDescription", ->
|
||||
|
||||
|
@ -110,7 +117,7 @@ describe 'ProjectDetailsHandler', ->
|
|||
@handler.renameProject @project_id, @newName, =>
|
||||
@tpdsUpdateSender.moveEntity.calledWith({project_id:@project_id, project_name:@project.name, newProjectName:@newName}).should.equal true
|
||||
done()
|
||||
|
||||
|
||||
it "should not do anything with an invalid name", (done) ->
|
||||
@handler.validateProjectName = sinon.stub().yields(new Error("invalid name"))
|
||||
@handler.renameProject @project_id, @newName, =>
|
||||
|
@ -119,6 +126,12 @@ describe 'ProjectDetailsHandler', ->
|
|||
done()
|
||||
|
||||
describe "validateProjectName", ->
|
||||
|
||||
it "should reject undefined names", (done) ->
|
||||
@handler.validateProjectName undefined, (error) ->
|
||||
expect(error).to.exist
|
||||
done()
|
||||
|
||||
it "should reject empty names", (done) ->
|
||||
@handler.validateProjectName "", (error) ->
|
||||
expect(error).to.exist
|
||||
|
|
|
@ -14,17 +14,17 @@ describe 'ProjectEntityHandler', ->
|
|||
doc_id = '4eecb1c1bffa66588e0000a2'
|
||||
folder_id = "4eecaffcbffa66588e000008"
|
||||
rootFolderId = "4eecaffcbffa66588e000007"
|
||||
|
||||
|
||||
beforeEach ->
|
||||
@FileStoreHandler =
|
||||
@FileStoreHandler =
|
||||
uploadFileFromDisk:(project_id, fileRef, localImagePath, callback)->callback()
|
||||
copyFile: sinon.stub().callsArgWith(4, null)
|
||||
@tpdsUpdateSender =
|
||||
addDoc:sinon.stub().callsArg(1)
|
||||
addFile:sinon.stub().callsArg(1)
|
||||
addFolder:sinon.stub().callsArg(1)
|
||||
@rootFolder =
|
||||
_id:rootFolderId,
|
||||
@rootFolder =
|
||||
_id:rootFolderId,
|
||||
folders:[
|
||||
{name:"level1", folders:[]}
|
||||
]
|
||||
|
@ -46,7 +46,7 @@ describe 'ProjectEntityHandler', ->
|
|||
@FileModel = class File
|
||||
constructor:(options)->
|
||||
{@name} = options
|
||||
@._id = "file_id"
|
||||
@._id = "file_id"
|
||||
@rev = 0
|
||||
@FolderModel = class Folder
|
||||
constructor:(options)->
|
||||
|
@ -57,12 +57,12 @@ describe 'ProjectEntityHandler', ->
|
|||
|
||||
@ProjectModel.findById = (project_id, callback)=> callback(null, @project)
|
||||
@ProjectModel.getProject = (project_id, fields, callback)=> callback(null, @project)
|
||||
@ProjectGetter =
|
||||
@ProjectGetter =
|
||||
getProjectWithOnlyFolders : (project_id, callback)=> callback(null, @project)
|
||||
getProjectWithoutDocLines : (project_id, callback)=> callback(null, @project)
|
||||
getProject:sinon.stub()
|
||||
@projectUpdater = markAsUpdated:sinon.stub()
|
||||
@projectLocator =
|
||||
@projectLocator =
|
||||
findElement : sinon.stub()
|
||||
@settings =
|
||||
maxEntitiesPerProject:200
|
||||
|
@ -97,8 +97,8 @@ describe 'ProjectEntityHandler', ->
|
|||
else
|
||||
cb null, @parentFolder
|
||||
@ProjectEntityHandler.addFolder = (project_id, parentFolder_id, folderName, callback)=>
|
||||
callback null, {name:folderName}, @parentFolder_id
|
||||
|
||||
callback null, {name:folderName}, @parentFolder_id
|
||||
|
||||
it 'should return the root folder if the path is just a slash', (done)->
|
||||
path = "/"
|
||||
@ProjectEntityHandler.mkdirp project_id, path, (err, folders, lastFolder)=>
|
||||
|
@ -239,7 +239,7 @@ describe 'ProjectEntityHandler', ->
|
|||
@ProjectEntityHandler._putElement = sinon.stub().callsArgWith(4, null, path: @pathAfterMove)
|
||||
@ProjectGetter.getProject.callsArgWith(2, null, @project)
|
||||
@tpdsUpdateSender.moveEntity = sinon.stub().callsArg(1)
|
||||
|
||||
|
||||
describe "moving a doc", ->
|
||||
beforeEach (done) ->
|
||||
@docId = "4eecaffcbffa66588e000009"
|
||||
|
@ -257,10 +257,10 @@ describe 'ProjectEntityHandler', ->
|
|||
it 'should remove the element from its current position', ->
|
||||
@ProjectEntityHandler._removeElementFromMongoArray
|
||||
.calledWith(@ProjectModel, project_id, @path.mongo ).should.equal true
|
||||
|
||||
|
||||
it "should put the element back in the new folder", ->
|
||||
@ProjectEntityHandler._putElement.calledWith(@project, folder_id, @doc, "docs").should.equal true
|
||||
|
||||
|
||||
it 'should tell the third party data store', ->
|
||||
@tpdsUpdateSender.moveEntity
|
||||
.calledWith({
|
||||
|
@ -271,7 +271,7 @@ describe 'ProjectEntityHandler', ->
|
|||
rev: @doc.rev
|
||||
})
|
||||
.should.equal true
|
||||
|
||||
|
||||
describe "moving a folder", ->
|
||||
beforeEach ->
|
||||
@folder_id = "folder-to-move"
|
||||
|
@ -294,7 +294,7 @@ describe 'ProjectEntityHandler', ->
|
|||
else
|
||||
console.log "UNKNOWN ID", options
|
||||
sinon.spy @projectLocator, "findElement"
|
||||
|
||||
|
||||
describe "when the destination folder is outside the moving folder", ->
|
||||
beforeEach (done) ->
|
||||
@path.fileSystem = "/one/directory"
|
||||
|
@ -318,7 +318,7 @@ describe 'ProjectEntityHandler', ->
|
|||
@path.mongo
|
||||
)
|
||||
.should.equal true
|
||||
|
||||
|
||||
it "should put the element back in the new folder", ->
|
||||
@ProjectEntityHandler._putElement
|
||||
.calledWith(
|
||||
|
@ -328,7 +328,7 @@ describe 'ProjectEntityHandler', ->
|
|||
"folder"
|
||||
)
|
||||
.should.equal true
|
||||
|
||||
|
||||
it 'should tell the third party data store', ->
|
||||
@tpdsUpdateSender.moveEntity
|
||||
.calledWith({
|
||||
|
@ -339,7 +339,7 @@ describe 'ProjectEntityHandler', ->
|
|||
rev: @folder.rev
|
||||
})
|
||||
.should.equal true
|
||||
|
||||
|
||||
describe "when the destination folder is inside the moving folder", ->
|
||||
beforeEach ->
|
||||
@path.fileSystem = "/one/two"
|
||||
|
@ -355,7 +355,7 @@ describe 'ProjectEntityHandler', ->
|
|||
project: @project
|
||||
})
|
||||
.should.equal true
|
||||
|
||||
|
||||
it "should return an error", ->
|
||||
@callback
|
||||
.calledWith(new Error("destination folder is a child folder of me"))
|
||||
|
@ -385,16 +385,41 @@ describe 'ProjectEntityHandler', ->
|
|||
@rev = 5
|
||||
@version = 42
|
||||
@ranges = {"mock": "ranges"}
|
||||
|
||||
@DocstoreManager.getDoc = sinon.stub().callsArgWith(3, null, @lines, @rev, @version, @ranges)
|
||||
@ProjectEntityHandler.getDoc project_id, doc_id, @callback
|
||||
|
||||
it "should call the docstore", ->
|
||||
@DocstoreManager.getDoc
|
||||
.calledWith(project_id, doc_id)
|
||||
.should.equal true
|
||||
describe 'without pathname option', ->
|
||||
beforeEach ->
|
||||
@ProjectEntityHandler.getDoc project_id, doc_id, @callback
|
||||
|
||||
it "should call the docstore", ->
|
||||
@DocstoreManager.getDoc
|
||||
.calledWith(project_id, doc_id)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback with the lines, version and rev", ->
|
||||
@callback.calledWith(null, @lines, @rev, @version, @ranges).should.equal true
|
||||
|
||||
describe 'with pathname option', ->
|
||||
beforeEach ->
|
||||
@project = 'a project'
|
||||
@path = mongo: "mongo.path", fileSystem: "/file/system/path"
|
||||
@projectLocator.findElement = sinon.stub().callsArgWith(1, null, {}, @path)
|
||||
@ProjectEntityHandler.getDoc project_id, doc_id, {pathname: true}, @callback
|
||||
|
||||
it "should call the project locator", ->
|
||||
@projectLocator.findElement
|
||||
.calledWith({project_id: project_id, element_id: doc_id, type: 'doc'})
|
||||
.should.equal true
|
||||
|
||||
it "should call the docstore", ->
|
||||
@DocstoreManager.getDoc
|
||||
.calledWith(project_id, doc_id)
|
||||
.should.equal true
|
||||
|
||||
it "should return the pathname if option given", ->
|
||||
@callback.calledWith(null, @lines, @rev, @version, @ranges, @path.fileSystem).should.equal true
|
||||
|
||||
it "should call the callback with the lines, version and rev", ->
|
||||
@callback.calledWith(null, @lines, @rev, @version, @ranges).should.equal true
|
||||
|
||||
describe 'addDoc', ->
|
||||
beforeEach ->
|
||||
|
@ -876,7 +901,7 @@ describe 'ProjectEntityHandler', ->
|
|||
path: path
|
||||
})
|
||||
.should.equal true
|
||||
|
||||
|
||||
describe "setRootDoc", ->
|
||||
it "should call Project.update", ->
|
||||
@project_id = "project-id-123234adfs"
|
||||
|
@ -907,22 +932,22 @@ describe 'ProjectEntityHandler', ->
|
|||
|
||||
it 'should copy the file in FileStoreHandler', (done)->
|
||||
@ProjectEntityHandler._putElement = sinon.stub().callsArgWith(4, null, {path:{fileSystem:"somehintg"}})
|
||||
@ProjectEntityHandler.copyFileFromExistingProject project_id, folder_id, oldProject_id, oldFileRef, (err, fileRef, parentFolder)=>
|
||||
@ProjectEntityHandler.copyFileFromExistingProject project_id, folder_id, oldProject_id, oldFileRef, (err, fileRef, parentFolder)=>
|
||||
@FileStoreHandler.copyFile.calledWith(oldProject_id, oldFileRef._id, project_id, fileRef._id).should.equal true
|
||||
done()
|
||||
|
||||
it 'should put file into folder by calling put element', (done)->
|
||||
@ProjectEntityHandler._putElement = (passedProject, passedFolder_id, passedFileRef, passedType, callback)->
|
||||
@ProjectEntityHandler._putElement = (passedProject, passedFolder_id, passedFileRef, passedType, callback)->
|
||||
passedProject._id.should.equal project_id
|
||||
passedFolder_id.should.equal folder_id
|
||||
passedFileRef.name.should.equal fileName
|
||||
passedType.should.equal 'file'
|
||||
done()
|
||||
|
||||
@ProjectEntityHandler.copyFileFromExistingProject project_id, folder_id, oldProject_id, oldFileRef, (err, fileRef, parentFolder)->
|
||||
@ProjectEntityHandler.copyFileFromExistingProject project_id, folder_id, oldProject_id, oldFileRef, (err, fileRef, parentFolder)->
|
||||
|
||||
it 'should return doc and parent folder', (done)->
|
||||
@ProjectEntityHandler.copyFileFromExistingProject project_id, folder_id, oldProject_id, oldFileRef, (err, fileRef, parentFolder)->
|
||||
@ProjectEntityHandler.copyFileFromExistingProject project_id, folder_id, oldProject_id, oldFileRef, (err, fileRef, parentFolder)->
|
||||
parentFolder.should.equal folder_id
|
||||
fileRef.name.should.equal fileName
|
||||
done()
|
||||
|
@ -942,7 +967,7 @@ describe 'ProjectEntityHandler', ->
|
|||
options.rev.should.equal 0
|
||||
done()
|
||||
|
||||
@ProjectEntityHandler.copyFileFromExistingProject project_id, folder_id, oldProject_id, oldFileRef, (err, fileRef, parentFolder)->
|
||||
@ProjectEntityHandler.copyFileFromExistingProject project_id, folder_id, oldProject_id, oldFileRef, (err, fileRef, parentFolder)->
|
||||
|
||||
|
||||
describe "renameEntity", ->
|
||||
|
@ -1054,7 +1079,7 @@ describe 'ProjectEntityHandler', ->
|
|||
@folder =
|
||||
_id: ObjectId()
|
||||
name: "someFolder"
|
||||
@doc =
|
||||
@doc =
|
||||
_id: ObjectId()
|
||||
name: "new.tex"
|
||||
@path = mongo: "mongo.path", fileSystem: "/file/system/old.tex"
|
||||
|
@ -1064,7 +1089,7 @@ describe 'ProjectEntityHandler', ->
|
|||
|
||||
|
||||
describe "updating the project", ->
|
||||
|
||||
|
||||
|
||||
it "should use the correct mongo path", (done)->
|
||||
@ProjectEntityHandler._putElement @project, @folder._id, @doc, "docs", (err)=>
|
||||
|
@ -1089,12 +1114,12 @@ describe 'ProjectEntityHandler', ->
|
|||
done()
|
||||
|
||||
it "should error if the element has no _id", (done)->
|
||||
doc =
|
||||
doc =
|
||||
name:"something"
|
||||
@ProjectEntityHandler._putElement @project, @folder._id, doc, "doc", (err)=>
|
||||
@ProjectModel.update.called.should.equal false
|
||||
done()
|
||||
|
||||
|
||||
|
||||
|
||||
describe "_countElements", ->
|
||||
|
@ -1109,7 +1134,7 @@ describe 'ProjectEntityHandler', ->
|
|||
fileRefs:{}
|
||||
folders: [
|
||||
{
|
||||
docs:[_id:1234],
|
||||
docs:[_id:1234],
|
||||
fileRefs:[{_id:23123}, {_id:123213}, {_id:2312}]
|
||||
folders:[
|
||||
{
|
||||
|
@ -1131,7 +1156,7 @@ describe 'ProjectEntityHandler', ->
|
|||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
]
|
||||
|
||||
it "should return the correct number", (done)->
|
||||
@ProjectEntityHandler._countElements @project, (err, count)->
|
||||
|
@ -1142,19 +1167,19 @@ describe 'ProjectEntityHandler', ->
|
|||
@project.rootFolder[0].folders[0].folders = undefined
|
||||
@ProjectEntityHandler._countElements @project, (err, count)->
|
||||
count.should.equal 17
|
||||
done()
|
||||
done()
|
||||
|
||||
it "should deal with null docs", (done)->
|
||||
@project.rootFolder[0].folders[0].docs = undefined
|
||||
@ProjectEntityHandler._countElements @project, (err, count)->
|
||||
count.should.equal 23
|
||||
done()
|
||||
done()
|
||||
|
||||
it "should deal with null fileRefs", (done)->
|
||||
@project.rootFolder[0].folders[0].folders[0].fileRefs = undefined
|
||||
@ProjectEntityHandler._countElements @project, (err, count)->
|
||||
count.should.equal 23
|
||||
done()
|
||||
done()
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -10,24 +10,22 @@ describe "ArchiveManager", ->
|
|||
beforeEach ->
|
||||
@logger =
|
||||
error: sinon.stub()
|
||||
warn: sinon.stub()
|
||||
err:->
|
||||
log: sinon.stub()
|
||||
@process = new events.EventEmitter
|
||||
@process.stdout = new events.EventEmitter
|
||||
@process.stderr = new events.EventEmitter
|
||||
|
||||
@child =
|
||||
spawn: sinon.stub().returns(@process)
|
||||
|
||||
|
||||
@metrics =
|
||||
Timer: class Timer
|
||||
done: sinon.stub()
|
||||
@zipfile = new events.EventEmitter
|
||||
@zipfile.readEntry = sinon.stub()
|
||||
@zipfile.close = sinon.stub()
|
||||
|
||||
@ArchiveManager = SandboxedModule.require modulePath, requires:
|
||||
"child_process": @child
|
||||
"yauzl": @yauzl = {open: sinon.stub().callsArgWith(2, null, @zipfile)}
|
||||
"logger-sharelatex": @logger
|
||||
"metrics-sharelatex": @metrics
|
||||
"fs": @fs = {}
|
||||
"fs-extra": @fse = {}
|
||||
|
||||
describe "extractZipArchive", ->
|
||||
beforeEach ->
|
||||
|
@ -39,10 +37,10 @@ describe "ArchiveManager", ->
|
|||
describe "successfully", ->
|
||||
beforeEach (done) ->
|
||||
@ArchiveManager.extractZipArchive @source, @destination, done
|
||||
@process.emit "close"
|
||||
@zipfile.emit "end"
|
||||
|
||||
it "should run unzip", ->
|
||||
@child.spawn.calledWithExactly("unzip", [@source, "-d", @destination]).should.equal true
|
||||
it "should run yauzl", ->
|
||||
@yauzl.open.calledWith(@source).should.equal true
|
||||
|
||||
it "should time the unzip", ->
|
||||
@metrics.Timer::done.called.should.equal true
|
||||
|
@ -50,13 +48,12 @@ describe "ArchiveManager", ->
|
|||
it "should log the unzip", ->
|
||||
@logger.log.calledWith(sinon.match.any, "unzipping file").should.equal true
|
||||
|
||||
describe "with an error on stderr", ->
|
||||
describe "with an error in the zip file header", ->
|
||||
beforeEach (done) ->
|
||||
@yauzl.open = sinon.stub().callsArgWith(2, new Error("Something went wrong"))
|
||||
@ArchiveManager.extractZipArchive @source, @destination, (error) =>
|
||||
@callback(error)
|
||||
done()
|
||||
@process.stderr.emit "data", "Something went wrong"
|
||||
@process.emit "close"
|
||||
|
||||
it "should return the callback with an error", ->
|
||||
@callback.calledWithExactly(new Error("Something went wrong")).should.equal true
|
||||
|
@ -74,60 +71,177 @@ describe "ArchiveManager", ->
|
|||
it "should return the callback with an error", ->
|
||||
@callback.calledWithExactly(new Error("zip_too_large")).should.equal true
|
||||
|
||||
it "should not call spawn", ->
|
||||
@child.spawn.called.should.equal false
|
||||
it "should not call yauzl.open", ->
|
||||
@yauzl.open.called.should.equal false
|
||||
|
||||
describe "with an error on the process", ->
|
||||
describe "with an error in the extracted files", ->
|
||||
beforeEach (done) ->
|
||||
@ArchiveManager.extractZipArchive @source, @destination, (error) =>
|
||||
@callback(error)
|
||||
done()
|
||||
@process.emit "error", new Error("Something went wrong")
|
||||
@zipfile.emit "error", new Error("Something went wrong")
|
||||
|
||||
it "should return the callback with an error", ->
|
||||
@callback.calledWithExactly(new Error("Something went wrong")).should.equal true
|
||||
|
||||
it "should log out the error", ->
|
||||
@logger.error.called.should.equal true
|
||||
|
||||
|
||||
describe "with a relative extracted file path", ->
|
||||
beforeEach (done) ->
|
||||
@zipfile.openReadStream = sinon.stub()
|
||||
@ArchiveManager.extractZipArchive @source, @destination, (error) =>
|
||||
@callback(error)
|
||||
done()
|
||||
@zipfile.emit "entry", {fileName: "../testfile.txt"}
|
||||
@zipfile.emit "end"
|
||||
|
||||
it "should not write try to read the file entry", ->
|
||||
@zipfile.openReadStream.called.should.equal false
|
||||
|
||||
it "should log out a warning", ->
|
||||
@logger.warn.called.should.equal true
|
||||
|
||||
describe "with an unnormalized extracted file path", ->
|
||||
beforeEach (done) ->
|
||||
@zipfile.openReadStream = sinon.stub()
|
||||
@ArchiveManager.extractZipArchive @source, @destination, (error) =>
|
||||
@callback(error)
|
||||
done()
|
||||
@zipfile.emit "entry", {fileName: "foo/./testfile.txt"}
|
||||
@zipfile.emit "end"
|
||||
|
||||
it "should not write try to read the file entry", ->
|
||||
@zipfile.openReadStream.called.should.equal false
|
||||
|
||||
it "should log out a warning", ->
|
||||
@logger.warn.called.should.equal true
|
||||
|
||||
describe "with a directory entry", ->
|
||||
beforeEach (done) ->
|
||||
@zipfile.openReadStream = sinon.stub()
|
||||
@ArchiveManager.extractZipArchive @source, @destination, (error) =>
|
||||
@callback(error)
|
||||
done()
|
||||
@zipfile.emit "entry", {fileName: "testdir/"}
|
||||
@zipfile.emit "end"
|
||||
|
||||
it "should not write try to read the entry", ->
|
||||
@zipfile.openReadStream.called.should.equal false
|
||||
|
||||
it "should not log out a warning", ->
|
||||
@logger.warn.called.should.equal false
|
||||
|
||||
describe "with an error opening the file read stream", ->
|
||||
beforeEach (done) ->
|
||||
@zipfile.openReadStream = sinon.stub().callsArgWith(1, new Error("Something went wrong"))
|
||||
@writeStream = new events.EventEmitter
|
||||
@ArchiveManager.extractZipArchive @source, @destination, (error) =>
|
||||
@callback(error)
|
||||
done()
|
||||
@zipfile.emit "entry", {fileName: "testfile.txt"}
|
||||
@zipfile.emit "end"
|
||||
|
||||
it "should return the callback with an error", ->
|
||||
@callback.calledWithExactly(new Error("Something went wrong")).should.equal true
|
||||
|
||||
it "should log out the error", ->
|
||||
@logger.error.called.should.equal true
|
||||
|
||||
it "should close the zipfile", ->
|
||||
@zipfile.close.called.should.equal true
|
||||
|
||||
describe "with an error in the file read stream", ->
|
||||
beforeEach (done) ->
|
||||
@readStream = new events.EventEmitter
|
||||
@readStream.pipe = sinon.stub()
|
||||
@zipfile.openReadStream = sinon.stub().callsArgWith(1, null, @readStream)
|
||||
@writeStream = new events.EventEmitter
|
||||
@fs.createWriteStream = sinon.stub().returns @writeStream
|
||||
@fse.ensureDir = sinon.stub().callsArg(1)
|
||||
@ArchiveManager.extractZipArchive @source, @destination, (error) =>
|
||||
@callback(error)
|
||||
done()
|
||||
@zipfile.emit "entry", {fileName: "testfile.txt"}
|
||||
@readStream.emit "error", new Error("Something went wrong")
|
||||
@zipfile.emit "end"
|
||||
|
||||
it "should return the callback with an error", ->
|
||||
@callback.calledWithExactly(new Error("Something went wrong")).should.equal true
|
||||
|
||||
it "should log out the error", ->
|
||||
@logger.error.called.should.equal true
|
||||
|
||||
it "should close the zipfile", ->
|
||||
@zipfile.close.called.should.equal true
|
||||
|
||||
describe "with an error in the file write stream", ->
|
||||
beforeEach (done) ->
|
||||
@readStream = new events.EventEmitter
|
||||
@readStream.pipe = sinon.stub()
|
||||
@readStream.unpipe = sinon.stub()
|
||||
@readStream.destroy = sinon.stub()
|
||||
@zipfile.openReadStream = sinon.stub().callsArgWith(1, null, @readStream)
|
||||
@writeStream = new events.EventEmitter
|
||||
@fs.createWriteStream = sinon.stub().returns @writeStream
|
||||
@fse.ensureDir = sinon.stub().callsArg(1)
|
||||
@ArchiveManager.extractZipArchive @source, @destination, (error) =>
|
||||
@callback(error)
|
||||
done()
|
||||
@zipfile.emit "entry", {fileName: "testfile.txt"}
|
||||
@writeStream.emit "error", new Error("Something went wrong")
|
||||
@zipfile.emit "end"
|
||||
|
||||
it "should return the callback with an error", ->
|
||||
@callback.calledWithExactly(new Error("Something went wrong")).should.equal true
|
||||
|
||||
it "should log out the error", ->
|
||||
@logger.error.called.should.equal true
|
||||
|
||||
it "should unpipe from the readstream", ->
|
||||
@readStream.unpipe.called.should.equal true
|
||||
|
||||
it "should destroy the readstream", ->
|
||||
@readStream.destroy.called.should.equal true
|
||||
|
||||
it "should close the zipfile", ->
|
||||
@zipfile.close.called.should.equal true
|
||||
|
||||
describe "_isZipTooLarge", ->
|
||||
beforeEach ->
|
||||
@output = (totalSize)->" Length Date Time Name \n-------- ---- ---- ---- \n241 03-12-16 12:20 main.tex \n108801 03-12-16 12:20 ddd/x1J5kHh.jpg \n-------- ------- \n#{totalSize} 2 files\n"
|
||||
|
||||
it "should return false with small output", (done)->
|
||||
@ArchiveManager._isZipTooLarge @source, (error, isTooLarge) =>
|
||||
isTooLarge.should.equal false
|
||||
done()
|
||||
@process.stdout.emit "data", @output("109042")
|
||||
@process.emit "close"
|
||||
@zipfile.emit "entry", {uncompressedSize: 109042}
|
||||
@zipfile.emit "end"
|
||||
|
||||
it "should return true with large bytes", (done)->
|
||||
@ArchiveManager._isZipTooLarge @source, (error, isTooLarge) =>
|
||||
isTooLarge.should.equal true
|
||||
done()
|
||||
@process.stdout.emit "data", @output("1090000000000000042")
|
||||
@process.emit "close"
|
||||
@zipfile.emit "entry", {uncompressedSize: 1090000000000000042}
|
||||
@zipfile.emit "end"
|
||||
|
||||
it "should return error on no data", (done)->
|
||||
@ArchiveManager._isZipTooLarge @source, (error, isTooLarge) =>
|
||||
expect(error).to.exist
|
||||
done()
|
||||
@process.stdout.emit "data", ""
|
||||
@process.emit "close"
|
||||
@zipfile.emit "entry", {}
|
||||
@zipfile.emit "end"
|
||||
|
||||
it "should return error if it didn't get a number", (done)->
|
||||
@ArchiveManager._isZipTooLarge @source, (error, isTooLarge) =>
|
||||
expect(error).to.exist
|
||||
done()
|
||||
@process.stdout.emit "data", @output("total_size_string")
|
||||
@process.emit "close"
|
||||
@zipfile.emit "entry", {uncompressedSize:"random-error"}
|
||||
@zipfile.emit "end"
|
||||
|
||||
it "should return error if the is only a bit of data", (done)->
|
||||
it "should return error if there is no data", (done)->
|
||||
@ArchiveManager._isZipTooLarge @source, (error, isTooLarge) =>
|
||||
expect(error).to.exist
|
||||
done()
|
||||
@process.stdout.emit "data", " Length Date Time Name \n--------"
|
||||
@process.emit "close"
|
||||
@zipfile.emit "end"
|
||||
|
||||
describe "findTopLevelDirectory", ->
|
||||
beforeEach ->
|
||||
|
|
|
@ -12,9 +12,9 @@ ObjectId = require("mongojs").ObjectId
|
|||
|
||||
describe "UserInfoController", ->
|
||||
beforeEach ->
|
||||
@UserDeleter =
|
||||
@UserDeleter =
|
||||
deleteUser: sinon.stub().callsArgWith(1)
|
||||
@UserUpdater =
|
||||
@UserUpdater =
|
||||
updatePersonalInfo: sinon.stub()
|
||||
@sanitizer = escape:(v)->v
|
||||
sinon.spy @sanitizer, "escape"
|
||||
|
@ -50,23 +50,47 @@ describe "UserInfoController", ->
|
|||
.should.equal true
|
||||
|
||||
describe "getPersonalInfo", ->
|
||||
beforeEach ->
|
||||
@user_id = ObjectId().toString()
|
||||
@user =
|
||||
_id: ObjectId(@user_id)
|
||||
@req.params = user_id: @user_id
|
||||
|
||||
describe "when the user exists", ->
|
||||
describe "when the user exists with sharelatex id", ->
|
||||
beforeEach ->
|
||||
@user_id = ObjectId().toString()
|
||||
@user =
|
||||
_id: ObjectId(@user_id)
|
||||
@req.params = user_id: @user_id
|
||||
@UserGetter.getUser = sinon.stub().callsArgWith(2, null, @user)
|
||||
@UserInfoController.sendFormattedPersonalInfo = sinon.stub()
|
||||
@UserInfoController.getPersonalInfo(@req, @res, @next)
|
||||
|
||||
it "should look up the user in the database", ->
|
||||
@UserGetter.getUser
|
||||
.calledWith(@user_id, { _id: true, first_name: true, last_name: true, email: true })
|
||||
.calledWith(
|
||||
{ _id: ObjectId(@user_id) },
|
||||
{ _id: true, first_name: true, last_name: true, email: true }
|
||||
).should.equal true
|
||||
|
||||
it "should send the formatted details back to the client", ->
|
||||
@UserInfoController.sendFormattedPersonalInfo
|
||||
.calledWith(@user, @res, @next)
|
||||
.should.equal true
|
||||
|
||||
|
||||
describe "when the user exists with overleaf id", ->
|
||||
beforeEach ->
|
||||
@user_id = 12345
|
||||
@user =
|
||||
_id: ObjectId()
|
||||
overleaf:
|
||||
id: @user_id
|
||||
@req.params = user_id: @user_id.toString()
|
||||
@UserGetter.getUser = sinon.stub().callsArgWith(2, null, @user)
|
||||
@UserInfoController.sendFormattedPersonalInfo = sinon.stub()
|
||||
@UserInfoController.getPersonalInfo(@req, @res, @next)
|
||||
|
||||
it "should look up the user in the database", ->
|
||||
@UserGetter.getUser
|
||||
.calledWith(
|
||||
{ "overleaf.id": @user_id },
|
||||
{ _id: true, first_name: true, last_name: true, email: true }
|
||||
).should.equal true
|
||||
|
||||
it "should send the formatted details back to the client", ->
|
||||
@UserInfoController.sendFormattedPersonalInfo
|
||||
.calledWith(@user, @res, @next)
|
||||
|
@ -74,13 +98,24 @@ describe "UserInfoController", ->
|
|||
|
||||
describe "when the user does not exist", ->
|
||||
beforeEach ->
|
||||
@user_id = ObjectId().toString()
|
||||
@req.params = user_id: @user_id
|
||||
@UserGetter.getUser = sinon.stub().callsArgWith(2, null, null)
|
||||
@UserInfoController.sendFormattedPersonalInfo = sinon.stub()
|
||||
@UserInfoController.getPersonalInfo(@req, @res, @next)
|
||||
|
||||
it "should return 404 to the client", ->
|
||||
@res.statusCode.should.equal 404
|
||||
|
||||
describe "when the user id is invalid", ->
|
||||
beforeEach ->
|
||||
@user_id = "invalid"
|
||||
@req.params = user_id: @user_id
|
||||
@UserGetter.getUser = sinon.stub().callsArgWith(2, null, null)
|
||||
@UserInfoController.getPersonalInfo(@req, @res, @next)
|
||||
|
||||
it "should return 400 to the client", ->
|
||||
@res.statusCode.should.equal 400
|
||||
|
||||
describe "sendFormattedPersonalInfo", ->
|
||||
beforeEach ->
|
||||
@user =
|
||||
|
|
|
@ -4,6 +4,9 @@ User = require "./helpers/User"
|
|||
request = require "./helpers/request"
|
||||
settings = require "settings-sharelatex"
|
||||
|
||||
MockDocstoreApi = require './helpers/MockDocstoreApi'
|
||||
MockDocUpdaterApi = require './helpers/MockDocUpdaterApi'
|
||||
|
||||
try_read_access = (user, project_id, test, callback) ->
|
||||
async.series [
|
||||
(cb) ->
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
express = require("express")
|
||||
app = express()
|
||||
|
||||
module.exports = MockDocUpdaterApi =
|
||||
run: () ->
|
||||
app.post "/project/:project_id/flush", (req, res, next) =>
|
||||
res.sendStatus 200
|
||||
|
||||
app.listen 3003, (error) ->
|
||||
throw error if error?
|
||||
|
||||
MockDocUpdaterApi.run()
|
|
@ -0,0 +1,29 @@
|
|||
express = require("express")
|
||||
bodyParser = require "body-parser"
|
||||
app = express()
|
||||
|
||||
module.exports = MockDocStoreApi =
|
||||
docs: {}
|
||||
|
||||
run: () ->
|
||||
app.post "/project/:project_id/doc/:doc_id", bodyParser.json(), (req, res, next) =>
|
||||
{project_id, doc_id} = req.params
|
||||
{lines, version, ranges} = req.body
|
||||
@docs[project_id] ?= {}
|
||||
@docs[project_id][doc_id] = {lines, version, ranges}
|
||||
@docs[project_id][doc_id].rev ?= 0
|
||||
@docs[project_id][doc_id].rev += 1
|
||||
res.json {
|
||||
modified: true
|
||||
rev: @docs[project_id][doc_id].rev
|
||||
}
|
||||
|
||||
app.get "/project/:project_id/doc", (req, res, next) =>
|
||||
docs = (doc for doc_id, doc of @docs[req.params.project_id])
|
||||
res.send JSON.stringify docs
|
||||
|
||||
app.listen 3016, (error) ->
|
||||
throw error if error?
|
||||
|
||||
MockDocUpdaterApi.run()
|
||||
|
26
services/web/test/acceptance/scripts/full-test.sh
Executable file
26
services/web/test/acceptance/scripts/full-test.sh
Executable file
|
@ -0,0 +1,26 @@
|
|||
#! /usr/bin/env bash
|
||||
|
||||
# If you're running on OS X, you probably need to manually
|
||||
# 'rm -r node_modules/bcrypt; npm install bcrypt' inside
|
||||
# the docker container, before it will start.
|
||||
# npm rebuild bcrypt
|
||||
|
||||
echo ">> Starting server..."
|
||||
|
||||
grunt --no-color forever:app:start
|
||||
|
||||
echo ">> Server started"
|
||||
|
||||
sleep 5
|
||||
|
||||
echo ">> Running acceptance tests..."
|
||||
grunt --no-color mochaTest:acceptance
|
||||
_test_exit_code=$?
|
||||
|
||||
echo ">> Killing server"
|
||||
|
||||
grunt --no-color forever:app:stop
|
||||
|
||||
echo ">> Done"
|
||||
|
||||
exit $_test_exit_code
|
Loading…
Reference in a new issue