Merge branch 'master' into sk-account-sync
1
services/web/.gitignore
vendored
|
@ -73,3 +73,4 @@ Gemfile.lock
|
|||
app/views/external
|
||||
|
||||
/modules/
|
||||
docker-shared.yml
|
||||
|
|
|
@ -196,6 +196,7 @@ module.exports = (grunt) ->
|
|||
"mathjax": "/js/libs/mathjax/MathJax.js?config=TeX-AMS_HTML"
|
||||
"pdfjs-dist/build/pdf": "libs/#{PackageVersions.lib('pdfjs')}/pdf"
|
||||
"ace": "#{PackageVersions.lib('ace')}"
|
||||
"fineuploader": "libs/#{PackageVersions.lib('fineuploader')}"
|
||||
shim:
|
||||
"pdfjs-dist/build/pdf":
|
||||
deps: ["libs/#{PackageVersions.lib('pdfjs')}/compatibility"]
|
||||
|
|
39
services/web/Jenkinsfile
vendored
|
@ -42,7 +42,7 @@ pipeline {
|
|||
checkout([$class: 'GitSCM', branches: [[name: '*/master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'modules/tpr-webmodule'], [$class: 'CloneOption', shallow: true]], userRemoteConfigs: [[credentialsId: 'GIT_DEPLOY_KEY', url: 'git@github.com:sharelatex/tpr-webmodule.git ']]])
|
||||
checkout([$class: 'GitSCM', branches: [[name: '*/master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'modules/learn-wiki'], [$class: 'CloneOption', shallow: true]], userRemoteConfigs: [[credentialsId: 'GIT_DEPLOY_KEY', url: 'git@bitbucket.org:sharelatex/learn-wiki-web-module.git']]])
|
||||
checkout([$class: 'GitSCM', branches: [[name: '*/master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'modules/templates'], [$class: 'CloneOption', shallow: true]], userRemoteConfigs: [[credentialsId: 'GIT_DEPLOY_KEY', url: 'git@github.com:sharelatex/templates-webmodule.git']]])
|
||||
checkout([$class: 'GitSCM', branches: [[name: '*/sk-unlisted-projects']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'modules/track-changes'], [$class: 'CloneOption', shallow: true]], userRemoteConfigs: [[credentialsId: 'GIT_DEPLOY_KEY', url: 'git@github.com:sharelatex/track-changes-web-module.git']]])
|
||||
checkout([$class: 'GitSCM', branches: [[name: '*/master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'modules/track-changes'], [$class: 'CloneOption', shallow: true]], userRemoteConfigs: [[credentialsId: 'GIT_DEPLOY_KEY', url: 'git@github.com:sharelatex/track-changes-web-module.git']]])
|
||||
checkout([$class: 'GitSCM', branches: [[name: '*/master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'modules/overleaf-integration'], [$class: 'CloneOption', shallow: true]], userRemoteConfigs: [[credentialsId: 'GIT_DEPLOY_KEY', url: 'git@github.com:sharelatex/overleaf-integration-web-module.git']]])
|
||||
checkout([$class: 'GitSCM', branches: [[name: '*/master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'modules/overleaf-account-merge'], [$class: 'CloneOption', shallow: true]], userRemoteConfigs: [[credentialsId: 'GIT_DEPLOY_KEY', url: 'git@github.com:sharelatex/overleaf-account-merge.git']]])
|
||||
}
|
||||
|
@ -70,6 +70,19 @@ pipeline {
|
|||
sh 'ls -l node_modules/.bin'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Unit Tests') {
|
||||
steps {
|
||||
sh 'make clean install' // Removes js files, so do before compile
|
||||
sh 'make test_unit MOCHA_ARGS="--reporter=tap"'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Acceptance Tests') {
|
||||
steps {
|
||||
sh 'make test_acceptance MOCHA_ARGS="--reporter=tap"'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Compile') {
|
||||
agent {
|
||||
|
@ -109,30 +122,6 @@ pipeline {
|
|||
}
|
||||
}
|
||||
|
||||
stage('Unit Test') {
|
||||
agent {
|
||||
docker {
|
||||
image 'node:6.9.5'
|
||||
reuseNode true
|
||||
}
|
||||
}
|
||||
steps {
|
||||
sh 'env NODE_ENV=development ./node_modules/.bin/grunt mochaTest:unit --reporter=tap'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Acceptance Tests') {
|
||||
steps {
|
||||
// This tagged relase of the acceptance test runner is a temporary fix
|
||||
// to get the acceptance tests working before we move to a
|
||||
// docker-compose workflow. See:
|
||||
// https://github.com/sharelatex/web-sharelatex-internal/pull/148
|
||||
|
||||
sh 'docker pull sharelatex/sl-acceptance-test-runner:node-6.9-mongo-3.4'
|
||||
sh 'docker run --rm -v $(pwd):/app --env SHARELATEX_ALLOW_PUBLIC_ACCESS=true sharelatex/sl-acceptance-test-runner:node-6.9-mongo-3.4 || (cat forever/app.log && false)'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Package') {
|
||||
steps {
|
||||
sh 'rm -rf ./node_modules/grunt*'
|
||||
|
|
77
services/web/Makefile
Normal file
|
@ -0,0 +1,77 @@
|
|||
DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml
|
||||
NPM := docker-compose ${DOCKER_COMPOSE_FLAGS} run --rm npm npm -q
|
||||
BUILD_NUMBER ?= local
|
||||
BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||
PROJECT_NAME = web
|
||||
|
||||
all: install test
|
||||
@echo "Run:"
|
||||
@echo " make install to set up the project dependencies (in docker)"
|
||||
@echo " make test to run all the tests for the project (in docker)"
|
||||
|
||||
add: docker-shared.yml
|
||||
$(NPM) install --save ${P}
|
||||
|
||||
add_dev: docker-shared.yml
|
||||
$(NPM) install --save-dev ${P}
|
||||
|
||||
install: docker-shared.yml
|
||||
$(NPM) install
|
||||
|
||||
clean:
|
||||
rm -f app.js
|
||||
rm -rf app/js
|
||||
rm -rf test/unit/js
|
||||
rm -rf test/acceptance/js
|
||||
for dir in modules/*; \
|
||||
do \
|
||||
rm -f $$dir/index.js; \
|
||||
rm -rf $$dir/app/js; \
|
||||
rm -rf $$dir/test/unit/js; \
|
||||
rm -rf $$dir/test/acceptance/js; \
|
||||
done
|
||||
# Regenerate docker-shared.yml - not stictly a 'clean',
|
||||
# but lets `make clean install` work nicely
|
||||
bin/generate_volumes_file
|
||||
# Deletes node_modules volume
|
||||
docker-compose down --volumes
|
||||
|
||||
# Need regenerating if you change the web modules you have installed
|
||||
docker-shared.yml:
|
||||
bin/generate_volumes_file
|
||||
|
||||
test: test_unit test_acceptance
|
||||
|
||||
test_unit: docker-shared.yml
|
||||
docker-compose ${DOCKER_COMPOSE_FLAGS} run --rm test_unit npm -q run test:unit -- ${MOCHA_ARGS}
|
||||
|
||||
test_acceptance: test_acceptance_app test_acceptance_modules
|
||||
|
||||
test_acceptance_app: test_acceptance_app_start_service test_acceptance_app_run test_acceptance_app_stop_service
|
||||
|
||||
test_acceptance_app_start_service: test_acceptance_app_stop_service docker-shared.yml
|
||||
docker-compose ${DOCKER_COMPOSE_FLAGS} up -d test_acceptance
|
||||
|
||||
test_acceptance_app_stop_service: docker-shared.yml
|
||||
docker-compose ${DOCKER_COMPOSE_FLAGS} stop test_acceptance redis mongo
|
||||
|
||||
test_acceptance_app_run: docker-shared.yml
|
||||
docker-compose ${DOCKER_COMPOSE_FLAGS} exec -T test_acceptance npm -q run test:acceptance -- ${MOCHA_ARGS}
|
||||
|
||||
test_acceptance_modules: docker-shared.yml
|
||||
# Break and error on any module failure
|
||||
set -e; \
|
||||
for dir in modules/*; \
|
||||
do \
|
||||
if [ -e $$dir/Makefile ]; then \
|
||||
(make test_acceptance_module MODULE=$$dir) \
|
||||
fi \
|
||||
done
|
||||
|
||||
test_acceptance_module: docker-shared.yml
|
||||
cd $(MODULE) && make test_acceptance
|
||||
|
||||
.PHONY:
|
||||
all add install update test test_unit test_acceptance \
|
||||
test_acceptance_start_service test_acceptance_stop_service \
|
||||
test_acceptance_run
|
|
@ -17,6 +17,69 @@ web-sharelatex uses [Grunt](http://gruntjs.com/) to build its front-end related
|
|||
|
||||
Image processing tasks are commented out in the gruntfile and the needed packages aren't presently in the project's `package.json`. If the images need to be processed again (minified and sprited), start by fetching the packages (`npm install grunt-contrib-imagemin grunt-sprity`), then *decomment* the tasks in `Gruntfile.coffee`. After this, the tasks can be called (explicitly, via `grunt imagemin` and `grunt sprity`).
|
||||
|
||||
New Docker-based build process
|
||||
------------------------------
|
||||
|
||||
Note that the Grunt workflow from above should still work, but we are transitioning to a
|
||||
Docker based testing workflow, which is documented below:
|
||||
|
||||
### Running the app
|
||||
|
||||
The app runs natively using npm and Node on the local system:
|
||||
|
||||
```
|
||||
$ npm install
|
||||
$ npm run start
|
||||
```
|
||||
|
||||
*Ideally the app would run in Docker like the tests below, but with host networking not supported in OS X, we need to run it natively until all services are Dockerised.*
|
||||
|
||||
### Unit Tests
|
||||
|
||||
The test suites run in Docker.
|
||||
|
||||
Unit tests can be run in the `test_unit` container defined in `docker-compose.tests.yml`.
|
||||
|
||||
The makefile contains a short cut to run these:
|
||||
|
||||
```
|
||||
make install # Only needs running once, or when npm packages are updated
|
||||
make unit_test
|
||||
```
|
||||
|
||||
During development it is often useful to only run a subset of tests, which can be configured with arguments to the mocha CLI:
|
||||
|
||||
```
|
||||
make unit_test MOCHA_ARGS='--grep=AuthorizationManager'
|
||||
```
|
||||
|
||||
### Acceptance Tests
|
||||
|
||||
Acceptance tests are run against a live service, which runs in the `acceptance_test` container defined in `docker-compose.tests.yml`.
|
||||
|
||||
To run the tests out-of-the-box, the makefile defines:
|
||||
|
||||
```
|
||||
make install # Only needs running once, or when npm packages are updated
|
||||
make acceptance_test
|
||||
```
|
||||
|
||||
However, during development it is often useful to leave the service running for rapid iteration on the acceptance tests. This can be done with:
|
||||
|
||||
```
|
||||
make acceptance_test_start_service
|
||||
make acceptance_test_run # Run as many times as needed during development
|
||||
make acceptance_test_stop_service
|
||||
```
|
||||
|
||||
`make acceptance_test` just runs these three commands in sequence.
|
||||
|
||||
During development it is often useful to only run a subset of tests, which can be configured with arguments to the mocha CLI:
|
||||
|
||||
```
|
||||
make acceptance_test_run MOCHA_ARGS='--grep=AuthorizationManager'
|
||||
```
|
||||
|
||||
Unit test status
|
||||
----------------
|
||||
|
||||
|
|
|
@ -204,11 +204,11 @@ module.exports = DocumentUpdaterHandler =
|
|||
logger.error {project_id, doc_id, thread_id}, "doc updater returned a non-success status code: #{res.statusCode}"
|
||||
callback new Error("doc updater returned a non-success status code: #{res.statusCode}")
|
||||
|
||||
updateProjectStructure : (project_id, userId, oldDocs, newDocs, oldFiles, newFiles, callback = (error) ->)->
|
||||
updateProjectStructure : (project_id, userId, changes, callback = (error) ->)->
|
||||
return callback() if !settings.apis.project_history?.enabled
|
||||
|
||||
docUpdates = DocumentUpdaterHandler._getRenameUpdates('doc', oldDocs, newDocs)
|
||||
fileUpdates = DocumentUpdaterHandler._getRenameUpdates('file', oldFiles, newFiles)
|
||||
docUpdates = DocumentUpdaterHandler._getRenameUpdates('doc', changes.oldDocs, changes.newDocs)
|
||||
fileUpdates = DocumentUpdaterHandler._getRenameUpdates('file', changes.oldFiles, changes.newFiles)
|
||||
|
||||
timer = new metrics.Timer("set-document")
|
||||
url = "#{settings.apis.documentupdater.url}/project/#{project_id}"
|
||||
|
@ -231,14 +231,25 @@ module.exports = DocumentUpdaterHandler =
|
|||
callback new Error("doc updater returned a non-success status code: #{res.statusCode}")
|
||||
|
||||
_getRenameUpdates: (entityType, oldEntities, newEntities) ->
|
||||
oldEntities ||= []
|
||||
newEntities ||= []
|
||||
updates = []
|
||||
|
||||
for oldEntity in oldEntities
|
||||
id = oldEntity[entityType]._id
|
||||
newEntity = _.find newEntities, (newEntity) ->
|
||||
newEntity[entityType]._id.toString() == id.toString()
|
||||
oldEntitiesHash = _.indexBy oldEntities, (entity) -> entity[entityType]._id.toString()
|
||||
newEntitiesHash = _.indexBy newEntities, (entity) -> entity[entityType]._id.toString()
|
||||
|
||||
if newEntity.path != oldEntity.path
|
||||
for id, newEntity of newEntitiesHash
|
||||
oldEntity = oldEntitiesHash[id]
|
||||
|
||||
if !oldEntity?
|
||||
# entity added
|
||||
updates.push
|
||||
id: id
|
||||
pathname: newEntity.path
|
||||
docLines: newEntity.docLines
|
||||
url: newEntity.url
|
||||
else if newEntity.path != oldEntity.path
|
||||
# entity renamed
|
||||
updates.push
|
||||
id: id
|
||||
pathname: oldEntity.path
|
||||
|
|
|
@ -19,48 +19,48 @@ module.exports = EditorController =
|
|||
DocumentUpdaterHandler.flushDocToMongo project_id, doc_id, callback
|
||||
|
||||
|
||||
addDoc: (project_id, folder_id, docName, docLines, source, callback = (error, doc)->)->
|
||||
addDoc: (project_id, folder_id, docName, docLines, source, user_id, callback = (error, doc)->)->
|
||||
LockManager.getLock project_id, (err)->
|
||||
if err?
|
||||
logger.err err:err, project_id:project_id, source:source, "could not get lock to addDoc"
|
||||
return callback(err)
|
||||
EditorController.addDocWithoutLock project_id, folder_id, docName, docLines, source, (error, doc)->
|
||||
EditorController.addDocWithoutLock project_id, folder_id, docName, docLines, source, user_id, (error, doc)->
|
||||
LockManager.releaseLock project_id, ->
|
||||
callback(error, doc)
|
||||
|
||||
addDocWithoutLock: (project_id, folder_id, docName, docLines, source, callback = (error, doc)->)->
|
||||
addDocWithoutLock: (project_id, folder_id, docName, docLines, source, user_id, callback = (error, doc)->)->
|
||||
docName = docName.trim()
|
||||
logger.log {project_id, folder_id, docName, source}, "sending new doc to project"
|
||||
Metrics.inc "editor.add-doc"
|
||||
ProjectEntityHandler.addDoc project_id, folder_id, docName, docLines, (err, doc, folder_id)=>
|
||||
ProjectEntityHandler.addDoc project_id, folder_id, docName, docLines, user_id, (err, doc, folder_id)=>
|
||||
if err?
|
||||
logger.err err:err, project_id:project_id, docName:docName, "error adding doc without lock"
|
||||
return callback(err)
|
||||
EditorRealTimeController.emitToRoom(project_id, 'reciveNewDoc', folder_id, doc, source)
|
||||
callback(err, doc)
|
||||
|
||||
addFile: (project_id, folder_id, fileName, path, source, callback = (error, file)->)->
|
||||
addFile: (project_id, folder_id, fileName, path, source, user_id, callback = (error, file)->)->
|
||||
LockManager.getLock project_id, (err)->
|
||||
if err?
|
||||
logger.err err:err, project_id:project_id, source:source, "could not get lock to addFile"
|
||||
return callback(err)
|
||||
EditorController.addFileWithoutLock project_id, folder_id, fileName, path, source, (error, file)->
|
||||
EditorController.addFileWithoutLock project_id, folder_id, fileName, path, source, user_id, (error, file)->
|
||||
LockManager.releaseLock project_id, ->
|
||||
callback(error, file)
|
||||
|
||||
addFileWithoutLock: (project_id, folder_id, fileName, path, source, callback = (error, file)->)->
|
||||
addFileWithoutLock: (project_id, folder_id, fileName, path, source, user_id, callback = (error, file)->)->
|
||||
fileName = fileName.trim()
|
||||
logger.log {project_id, folder_id, fileName, path}, "sending new file to project"
|
||||
Metrics.inc "editor.add-file"
|
||||
ProjectEntityHandler.addFile project_id, folder_id, fileName, path, (err, fileRef, folder_id)=>
|
||||
ProjectEntityHandler.addFile project_id, folder_id, fileName, path, user_id, (err, fileRef, folder_id)=>
|
||||
if err?
|
||||
logger.err err:err, project_id:project_id, folder_id:folder_id, fileName:fileName, "error adding file without lock"
|
||||
return callback(err)
|
||||
EditorRealTimeController.emitToRoom(project_id, 'reciveNewFile', folder_id, fileRef, source)
|
||||
callback(err, fileRef)
|
||||
|
||||
replaceFile: (project_id, file_id, fsPath, source, callback = (error) ->)->
|
||||
ProjectEntityHandler.replaceFile project_id, file_id, fsPath, callback
|
||||
replaceFile: (project_id, file_id, fsPath, source, user_id, callback = (error) ->)->
|
||||
ProjectEntityHandler.replaceFile project_id, file_id, fsPath, user_id, callback
|
||||
|
||||
addFolder : (project_id, folder_id, folderName, source, callback = (error, folder)->)->
|
||||
LockManager.getLock project_id, (err)->
|
||||
|
|
|
@ -81,10 +81,11 @@ module.exports = EditorHttpController =
|
|||
project_id = req.params.Project_id
|
||||
name = req.body.name
|
||||
parent_folder_id = req.body.parent_folder_id
|
||||
user_id = AuthenticationController.getLoggedInUserId(req)
|
||||
logger.log project_id:project_id, name:name, parent_folder_id:parent_folder_id, "getting request to add doc to project"
|
||||
if !EditorHttpController._nameIsAcceptableLength(name)
|
||||
return res.sendStatus 400
|
||||
EditorController.addDoc project_id, parent_folder_id, name, [], "editor", (error, doc) ->
|
||||
EditorController.addDoc project_id, parent_folder_id, name, [], "editor", user_id, (error, doc) ->
|
||||
if error == "project_has_to_many_files"
|
||||
res.status(400).json(req.i18n.translate("project_has_to_many_files"))
|
||||
else if error?
|
||||
|
@ -113,9 +114,9 @@ module.exports = EditorHttpController =
|
|||
entity_id = req.params.entity_id
|
||||
entity_type = req.params.entity_type
|
||||
name = req.body.name
|
||||
user_id = AuthenticationController.getLoggedInUserId(req)
|
||||
if !EditorHttpController._nameIsAcceptableLength(name)
|
||||
return res.sendStatus 400
|
||||
user_id = AuthenticationController.getLoggedInUserId(req)
|
||||
EditorController.renameEntity project_id, entity_id, entity_type, name, user_id, (error) ->
|
||||
return next(error) if error?
|
||||
res.sendStatus 204
|
||||
|
|
|
@ -21,9 +21,9 @@ module.exports = FileStoreHandler =
|
|||
return callback(new Error("can not upload symlink"))
|
||||
|
||||
_cb = callback
|
||||
callback = (err) ->
|
||||
callback = (err, url) ->
|
||||
callback = -> # avoid double callbacks
|
||||
_cb(err)
|
||||
_cb(err, url)
|
||||
|
||||
logger.log project_id:project_id, file_id:file_id, fsPath:fsPath, "uploading file from disk"
|
||||
readStream = fs.createReadStream(fsPath)
|
||||
|
@ -31,9 +31,10 @@ module.exports = FileStoreHandler =
|
|||
logger.err err:err, project_id:project_id, file_id:file_id, fsPath:fsPath, "something went wrong on the read stream of uploadFileFromDisk"
|
||||
callback err
|
||||
readStream.on "open", () ->
|
||||
url = FileStoreHandler._buildUrl(project_id, file_id)
|
||||
opts =
|
||||
method: "post"
|
||||
uri: FileStoreHandler._buildUrl(project_id, file_id)
|
||||
uri: url
|
||||
timeout:fiveMinsInMs
|
||||
writeStream = request(opts)
|
||||
writeStream.on "error", (err)->
|
||||
|
@ -45,7 +46,7 @@ module.exports = FileStoreHandler =
|
|||
logger.err {err, statusCode: response.statusCode}, "error uploading to filestore"
|
||||
callback(err)
|
||||
else
|
||||
callback(null)
|
||||
callback(null, url)
|
||||
readStream.pipe writeStream
|
||||
|
||||
getFileStream: (project_id, file_id, query, callback)->
|
||||
|
@ -91,7 +92,7 @@ module.exports = FileStoreHandler =
|
|||
request opts, (err)->
|
||||
if err?
|
||||
logger.err err:err, oldProject_id:oldProject_id, oldFile_id:oldFile_id, newProject_id:newProject_id, newFile_id:newFile_id, "something went wrong telling filestore api to copy file"
|
||||
callback(err)
|
||||
callback(err, opts.uri)
|
||||
|
||||
_buildUrl: (project_id, file_id)->
|
||||
return "#{settings.apis.filestore.url}/project/#{project_id}/file/#{file_id}"
|
||||
|
|
|
@ -170,6 +170,8 @@ module.exports = ProjectController =
|
|||
return notification
|
||||
projects = ProjectController._buildProjectList results.projects, results.v1Projects?.projects
|
||||
user = results.user
|
||||
warnings = ProjectController._buildWarningsList results.v1Projects
|
||||
|
||||
ProjectController._injectProjectOwners projects, (error, projects) ->
|
||||
return next(error) if error?
|
||||
viewModel = {
|
||||
|
@ -181,7 +183,7 @@ module.exports = ProjectController =
|
|||
user: user
|
||||
hasSubscription: results.hasSubscription[0]
|
||||
isShowingV1Projects: results.v1Projects?
|
||||
noV1Connection: results.v1Projects?.noConnection
|
||||
warnings: warnings
|
||||
}
|
||||
|
||||
if Settings?.algolia?.app_id? and Settings?.algolia?.read_only_api_key?
|
||||
|
@ -251,7 +253,7 @@ module.exports = ProjectController =
|
|||
# Extract data from user's ObjectId
|
||||
timestamp = parseInt(user_id.toString().substring(0, 8), 16)
|
||||
|
||||
rolloutPercentage = 10 # Percentage of users to roll out to
|
||||
rolloutPercentage = 40 # Percentage of users to roll out to
|
||||
if !ProjectController._isInPercentageRollout('autocompile', user_id, rolloutPercentage)
|
||||
# Don't show if user is not part of roll out
|
||||
return cb(null, { enabled: false, showOnboarding: false })
|
||||
|
@ -427,6 +429,14 @@ module.exports = ProjectController =
|
|||
project.owner = users[project.owner_ref.toString()]
|
||||
callback null, projects
|
||||
|
||||
_buildWarningsList: (v1ProjectData = {}) ->
|
||||
warnings = []
|
||||
if v1ProjectData.noConnection
|
||||
warnings.push 'No V1 Connection'
|
||||
if v1ProjectData.hasHiddenV1Projects
|
||||
warnings.push "Looks like you've got a lot of V1 projects! Some of them may be hidden on V2. To view them all, use the V1 dashboard."
|
||||
return warnings
|
||||
|
||||
defaultSettingsForAnonymousUser = (user_id)->
|
||||
id : user_id
|
||||
ace:
|
||||
|
|
|
@ -52,7 +52,7 @@ module.exports = ProjectCreationHandler =
|
|||
return callback(error) if error?
|
||||
self._buildTemplate "mainbasic.tex", owner_id, projectName, (error, docLines)->
|
||||
return callback(error) if error?
|
||||
ProjectEntityHandler.addDoc project._id, project.rootFolder[0]._id, "main.tex", docLines, (error, doc)->
|
||||
ProjectEntityHandler.addDoc project._id, project.rootFolder[0]._id, "main.tex", docLines, owner_id, (error, doc)->
|
||||
if error?
|
||||
logger.err err:error, "error adding doc when creating basic project"
|
||||
return callback(error)
|
||||
|
@ -67,17 +67,17 @@ module.exports = ProjectCreationHandler =
|
|||
(callback) ->
|
||||
self._buildTemplate "main.tex", owner_id, projectName, (error, docLines)->
|
||||
return callback(error) if error?
|
||||
ProjectEntityHandler.addDoc project._id, project.rootFolder[0]._id, "main.tex", docLines, (error, doc)->
|
||||
ProjectEntityHandler.addDoc project._id, project.rootFolder[0]._id, "main.tex", docLines, owner_id, (error, doc)->
|
||||
return callback(error) if error?
|
||||
ProjectEntityHandler.setRootDoc project._id, doc._id, callback
|
||||
(callback) ->
|
||||
self._buildTemplate "references.bib", owner_id, projectName, (error, docLines)->
|
||||
return callback(error) if error?
|
||||
ProjectEntityHandler.addDoc project._id, project.rootFolder[0]._id, "references.bib", docLines, (error, doc)->
|
||||
ProjectEntityHandler.addDoc project._id, project.rootFolder[0]._id, "references.bib", docLines, owner_id, (error, doc)->
|
||||
callback(error)
|
||||
(callback) ->
|
||||
universePath = Path.resolve(__dirname + "/../../../templates/project_files/universe.jpg")
|
||||
ProjectEntityHandler.addFile project._id, project.rootFolder[0]._id, "universe.jpg", universePath, callback
|
||||
ProjectEntityHandler.addFile project._id, project.rootFolder[0]._id, "universe.jpg", universePath, owner_id, callback
|
||||
], (error) ->
|
||||
callback(error, project)
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ logger = require("logger-sharelatex")
|
|||
|
||||
module.exports = ProjectDuplicator =
|
||||
|
||||
_copyDocs: (newProject, originalRootDoc, originalFolder, desFolder, docContents, callback)->
|
||||
_copyDocs: (owner_id, newProject, originalRootDoc, originalFolder, desFolder, docContents, callback)->
|
||||
setRootDoc = _.once (doc_id)->
|
||||
projectEntityHandler.setRootDoc newProject._id, doc_id
|
||||
docs = originalFolder.docs or []
|
||||
|
@ -21,7 +21,7 @@ module.exports = ProjectDuplicator =
|
|||
if !doc?._id?
|
||||
return callback()
|
||||
content = docContents[doc._id.toString()]
|
||||
projectEntityHandler.addDocWithProject newProject, desFolder._id, doc.name, content.lines, (err, newDoc)->
|
||||
projectEntityHandler.addDocWithProject newProject, desFolder._id, doc.name, content.lines, owner_id, (err, newDoc)->
|
||||
if err?
|
||||
logger.err err:err, "error copying doc"
|
||||
return callback(err)
|
||||
|
@ -31,15 +31,15 @@ module.exports = ProjectDuplicator =
|
|||
|
||||
async.series jobs, callback
|
||||
|
||||
_copyFiles: (newProject, originalProject_id, originalFolder, desFolder, callback)->
|
||||
_copyFiles: (owner_id, newProject, originalProject_id, originalFolder, desFolder, callback)->
|
||||
fileRefs = originalFolder.fileRefs or []
|
||||
jobs = fileRefs.map (file)->
|
||||
return (cb)->
|
||||
projectEntityHandler.copyFileFromExistingProjectWithProject newProject, desFolder._id, originalProject_id, file, cb
|
||||
projectEntityHandler.copyFileFromExistingProjectWithProject newProject, desFolder._id, originalProject_id, file, owner_id, cb
|
||||
async.parallelLimit jobs, 5, callback
|
||||
|
||||
|
||||
_copyFolderRecursivly: (newProject_id, originalProject_id, originalRootDoc, originalFolder, desFolder, docContents, callback)->
|
||||
_copyFolderRecursivly: (owner_id, newProject_id, originalProject_id, originalRootDoc, originalFolder, desFolder, docContents, callback)->
|
||||
ProjectGetter.getProject newProject_id, {rootFolder:true, name:true}, (err, newProject)->
|
||||
if err?
|
||||
logger.err project_id:newProject_id, "could not get project"
|
||||
|
@ -53,12 +53,12 @@ module.exports = ProjectDuplicator =
|
|||
return cb()
|
||||
projectEntityHandler.addFolderWithProject newProject, desFolder?._id, childFolder.name, (err, newFolder)->
|
||||
return cb(err) if err?
|
||||
ProjectDuplicator._copyFolderRecursivly newProject_id, originalProject_id, originalRootDoc, childFolder, newFolder, docContents, cb
|
||||
ProjectDuplicator._copyFolderRecursivly owner_id, newProject_id, originalProject_id, originalRootDoc, childFolder, newFolder, docContents, cb
|
||||
|
||||
jobs.push (cb)->
|
||||
ProjectDuplicator._copyFiles newProject, originalProject_id, originalFolder, desFolder, cb
|
||||
ProjectDuplicator._copyFiles owner_id, newProject, originalProject_id, originalFolder, desFolder, cb
|
||||
jobs.push (cb)->
|
||||
ProjectDuplicator._copyDocs newProject, originalRootDoc, originalFolder, desFolder, docContents, cb
|
||||
ProjectDuplicator._copyDocs owner_id, newProject, originalRootDoc, originalFolder, desFolder, docContents, cb
|
||||
|
||||
async.series jobs, callback
|
||||
|
||||
|
@ -90,7 +90,7 @@ module.exports = ProjectDuplicator =
|
|||
|
||||
projectOptionsHandler.setCompiler newProject._id, originalProject.compiler, ->
|
||||
|
||||
ProjectDuplicator._copyFolderRecursivly newProject._id, originalProject_id, originalRootDoc, originalProject.rootFolder[0], newProject.rootFolder[0], docContents, ->
|
||||
ProjectDuplicator._copyFolderRecursivly owner._id, newProject._id, originalProject_id, originalRootDoc, originalProject.rootFolder[0], newProject.rootFolder[0], docContents, ->
|
||||
if err?
|
||||
logger.err err:err, originalProject_id:originalProject_id, newProjectName:newProjectName, "error cloning project"
|
||||
callback(err, newProject)
|
||||
|
|
|
@ -16,7 +16,7 @@ projectUpdateHandler = require('./ProjectUpdateHandler')
|
|||
DocstoreManager = require "../Docstore/DocstoreManager"
|
||||
ProjectGetter = require "./ProjectGetter"
|
||||
CooldownManager = require '../Cooldown/CooldownManager'
|
||||
|
||||
DocumentUpdaterHandler = require('../../Features/DocumentUpdater/DocumentUpdaterHandler')
|
||||
|
||||
module.exports = ProjectEntityHandler =
|
||||
getAllFolders: (project_id, callback) ->
|
||||
|
@ -106,8 +106,7 @@ module.exports = ProjectEntityHandler =
|
|||
flushProjectToThirdPartyDataStore: (project_id, callback) ->
|
||||
self = @
|
||||
logger.log project_id:project_id, "flushing project to tpds"
|
||||
documentUpdaterHandler = require('../../Features/DocumentUpdater/DocumentUpdaterHandler')
|
||||
documentUpdaterHandler.flushProjectToMongo project_id, (error) ->
|
||||
DocumentUpdaterHandler.flushProjectToMongo project_id, (error) ->
|
||||
return callback(error) if error?
|
||||
ProjectGetter.getProject project_id, {name:true}, (error, project) ->
|
||||
return callback(error) if error?
|
||||
|
@ -150,14 +149,14 @@ module.exports = ProjectEntityHandler =
|
|||
else
|
||||
DocstoreManager.getDoc project_id, doc_id, options, callback
|
||||
|
||||
addDoc: (project_id, folder_id, docName, docLines, callback = (error, doc, folder_id) ->)=>
|
||||
addDoc: (project_id, folder_id, docName, docLines, userId, callback = (error, doc, folder_id) ->)=>
|
||||
ProjectGetter.getProjectWithOnlyFolders project_id, (err, project) ->
|
||||
if err?
|
||||
logger.err project_id:project_id, err:err, "error getting project for add doc"
|
||||
return callback(err)
|
||||
ProjectEntityHandler.addDocWithProject project, folder_id, docName, docLines, callback
|
||||
ProjectEntityHandler.addDocWithProject project, folder_id, docName, docLines, userId, callback
|
||||
|
||||
addDocWithProject: (project, folder_id, docName, docLines, callback = (error, doc, folder_id) ->)=>
|
||||
addDocWithProject: (project, folder_id, docName, docLines, userId, callback = (error, doc, folder_id) ->)=>
|
||||
project_id = project._id
|
||||
logger.log project_id: project_id, folder_id: folder_id, doc_name: docName, "adding doc to project with project"
|
||||
confirmFolder project, folder_id, (folder_id)=>
|
||||
|
@ -177,7 +176,14 @@ module.exports = ProjectEntityHandler =
|
|||
rev: 0
|
||||
}, (err) ->
|
||||
return callback(err) if err?
|
||||
callback(null, doc, folder_id)
|
||||
newDocs = [
|
||||
doc: doc
|
||||
path: result?.path?.fileSystem
|
||||
docLines: docLines.join('\n')
|
||||
]
|
||||
DocumentUpdaterHandler.updateProjectStructure project_id, userId, {newDocs}, (error) ->
|
||||
return callback(error) if error?
|
||||
callback null, doc, folder_id
|
||||
|
||||
restoreDoc: (project_id, doc_id, name, callback = (error, doc, folder_id) ->) ->
|
||||
# getDoc will return the deleted doc's lines, but we don't actually remove
|
||||
|
@ -186,20 +192,20 @@ module.exports = ProjectEntityHandler =
|
|||
return callback(error) if error?
|
||||
ProjectEntityHandler.addDoc project_id, null, name, lines, callback
|
||||
|
||||
addFile: (project_id, folder_id, fileName, path, callback = (error, fileRef, folder_id) ->)->
|
||||
addFile: (project_id, folder_id, fileName, path, userId, callback = (error, fileRef, folder_id) ->)->
|
||||
ProjectGetter.getProjectWithOnlyFolders project_id, (err, project) ->
|
||||
if err?
|
||||
logger.err project_id:project_id, err:err, "error getting project for add file"
|
||||
return callback(err)
|
||||
ProjectEntityHandler.addFileWithProject project, folder_id, fileName, path, callback
|
||||
ProjectEntityHandler.addFileWithProject project, folder_id, fileName, path, userId, callback
|
||||
|
||||
addFileWithProject: (project, folder_id, fileName, path, callback = (error, fileRef, folder_id) ->)->
|
||||
addFileWithProject: (project, folder_id, fileName, path, userId, callback = (error, fileRef, folder_id) ->)->
|
||||
project_id = project._id
|
||||
logger.log project_id: project._id, folder_id: folder_id, file_name: fileName, path:path, "adding file"
|
||||
return callback(err) if err?
|
||||
confirmFolder project, folder_id, (folder_id)->
|
||||
fileRef = new File name : fileName
|
||||
FileStoreHandler.uploadFileFromDisk project._id, fileRef._id, path, (err)->
|
||||
FileStoreHandler.uploadFileFromDisk project._id, fileRef._id, path, (err, fileStoreUrl)->
|
||||
if err?
|
||||
logger.err err:err, project_id: project._id, folder_id: folder_id, file_name: fileName, fileRef:fileRef, "error uploading image to s3"
|
||||
return callback(err)
|
||||
|
@ -209,27 +215,31 @@ module.exports = ProjectEntityHandler =
|
|||
return callback(err)
|
||||
tpdsUpdateSender.addFile {project_id:project._id, file_id:fileRef._id, path:result?.path?.fileSystem, project_name:project.name, rev:fileRef.rev}, (err) ->
|
||||
return callback(err) if err?
|
||||
callback(null, fileRef, folder_id)
|
||||
newFiles = [
|
||||
file: fileRef
|
||||
path: result?.path?.fileSystem
|
||||
url: fileStoreUrl
|
||||
]
|
||||
DocumentUpdaterHandler.updateProjectStructure project_id, userId, {newFiles}, (error) ->
|
||||
return callback(error) if error?
|
||||
callback null, fileRef, folder_id
|
||||
|
||||
replaceFile: (project_id, file_id, fsPath, callback)->
|
||||
ProjectGetter.getProject project_id, {name:true}, (err, project) ->
|
||||
replaceFile: (project_id, file_id, fsPath, userId, callback)->
|
||||
self = ProjectEntityHandler
|
||||
FileStoreHandler.uploadFileFromDisk project_id, file_id, fsPath, (err, fileStoreUrl)->
|
||||
return callback(err) if err?
|
||||
findOpts =
|
||||
project_id:project._id
|
||||
element_id:file_id
|
||||
type:"file"
|
||||
FileStoreHandler.uploadFileFromDisk project._id, file_id, fsPath, (err)->
|
||||
ProjectGetter.getProject project_id, {rootFolder: true, name:true}, (err, project) ->
|
||||
return callback(err) if err?
|
||||
# Note there is a potential race condition here (and elsewhere)
|
||||
# If the file tree changes between findElement and the Project.update
|
||||
# then the path to the file element will be out of date. In practice
|
||||
# this is not a problem so long as we do not do anything longer running
|
||||
# between them (like waiting for the file to upload.)
|
||||
projectLocator.findElement findOpts, (err, fileRef, path)=>
|
||||
projectLocator.findElement {project:project, element_id: file_id, type: 'file'}, (err, fileRef, path)=>
|
||||
return callback(err) if err?
|
||||
tpdsUpdateSender.addFile {project_id:project._id, file_id:fileRef._id, path:path.fileSystem, rev:fileRef.rev+1, project_name:project.name}, (error) ->
|
||||
tpdsUpdateSender.addFile {project_id:project._id, file_id:fileRef._id, path:path.fileSystem, rev:fileRef.rev+1, project_name:project.name}, (err) ->
|
||||
return callback(err) if err?
|
||||
conditons = _id:project._id
|
||||
conditions = _id:project._id
|
||||
inc = {}
|
||||
inc["#{path.mongo}.rev"] = 1
|
||||
set = {}
|
||||
|
@ -237,39 +247,43 @@ module.exports = ProjectEntityHandler =
|
|||
update =
|
||||
"$inc": inc
|
||||
"$set": set
|
||||
Project.update conditons, update, {}, (err, second)->
|
||||
callback()
|
||||
Project.findOneAndUpdate conditions, update, { "new": true}, (err) ->
|
||||
return callback(err) if err?
|
||||
newFiles = [
|
||||
file: fileRef
|
||||
path: path.fileSystem
|
||||
url: fileStoreUrl
|
||||
]
|
||||
DocumentUpdaterHandler.updateProjectStructure project_id, userId, {newFiles}, callback
|
||||
|
||||
copyFileFromExistingProject: (project_id, folder_id, originalProject_id, origonalFileRef, callback = (error, fileRef, folder_id) ->)->
|
||||
logger.log project_id:project_id, folder_id:folder_id, originalProject_id:originalProject_id, origonalFileRef:origonalFileRef, "copying file in s3"
|
||||
ProjectGetter.getProject project_id, {name:true}, (err, project) ->
|
||||
if err?
|
||||
logger.err project_id:project_id, err:err, "error getting project for copy file from existing project"
|
||||
return callback(err)
|
||||
ProjectEntityHandler.copyFileFromExistingProjectWithProject project, folder_id, originalProject_id, origonalFileRef, callback
|
||||
|
||||
|
||||
copyFileFromExistingProjectWithProject: (project, folder_id, originalProject_id, origonalFileRef, callback = (error, fileRef, folder_id) ->)->
|
||||
copyFileFromExistingProjectWithProject: (project, folder_id, originalProject_id, origonalFileRef, userId, callback = (error, fileRef, folder_id) ->)->
|
||||
project_id = project._id
|
||||
logger.log project_id:project_id, folder_id:folder_id, originalProject_id:originalProject_id, origonalFileRef:origonalFileRef, "copying file in s3 with project"
|
||||
logger.log { project_id, folder_id, originalProject_id, origonalFileRef }, "copying file in s3 with project"
|
||||
return callback(err) if err?
|
||||
confirmFolder project, folder_id, (folder_id)=>
|
||||
if !origonalFileRef?
|
||||
logger.err project_id:project._id, folder_id:folder_id, originalProject_id:originalProject_id, origonalFileRef:origonalFileRef, "file trying to copy is null"
|
||||
logger.err { project_id, folder_id, originalProject_id, origonalFileRef }, "file trying to copy is null"
|
||||
return callback()
|
||||
fileRef = new File name : origonalFileRef.name
|
||||
FileStoreHandler.copyFile originalProject_id, origonalFileRef._id, project._id, fileRef._id, (err)->
|
||||
FileStoreHandler.copyFile originalProject_id, origonalFileRef._id, project._id, fileRef._id, (err, fileStoreUrl)->
|
||||
if err?
|
||||
logger.err err:err, project_id:project._id, folder_id:folder_id, originalProject_id:originalProject_id, origonalFileRef:origonalFileRef, "error coping file in s3"
|
||||
logger.err { err, project_id, folder_id, originalProject_id, origonalFileRef }, "error coping file in s3"
|
||||
return callback(err)
|
||||
ProjectEntityHandler._putElement project, folder_id, fileRef, "file", (err, result)=>
|
||||
if err?
|
||||
logger.err err:err, project_id:project._id, folder_id:folder_id, "error putting element as part of copy"
|
||||
logger.err { err, project_id, folder_id }, "error putting element as part of copy"
|
||||
return callback(err)
|
||||
tpdsUpdateSender.addFile {project_id:project._id, file_id:fileRef._id, path:result?.path?.fileSystem, rev:fileRef.rev, project_name:project.name}, (err) ->
|
||||
tpdsUpdateSender.addFile { project_id, file_id:fileRef._id, path:result?.path?.fileSystem, rev:fileRef.rev, project_name:project.name}, (err) ->
|
||||
if err?
|
||||
logger.err err:err, project_id:project._id, folder_id:folder_id, originalProject_id:originalProject_id, origonalFileRef:origonalFileRef, "error sending file to tpds worker"
|
||||
callback(null, fileRef, folder_id)
|
||||
logger.err { err, project_id, folder_id, originalProject_id, origonalFileRef }, "error sending file to tpds worker"
|
||||
newFiles = [
|
||||
file: fileRef
|
||||
path: result?.path?.fileSystem
|
||||
url: fileStoreUrl
|
||||
]
|
||||
DocumentUpdaterHandler.updateProjectStructure project_id, userId, {newFiles}, (error) ->
|
||||
return callback(error) if error?
|
||||
callback null, fileRef, folder_id
|
||||
|
||||
mkdirp: (project_id, path, callback = (err, newlyCreatedFolders, lastFolderInPath)->)->
|
||||
self = @
|
||||
|
@ -381,11 +395,9 @@ module.exports = ProjectEntityHandler =
|
|||
endPath: result.path.fileSystem,
|
||||
rev: entity.rev
|
||||
tpdsUpdateSender.moveEntity opts
|
||||
self.getAllEntitiesFromProject newProject, (error, newDocs, newFiles
|
||||
) =>
|
||||
self.getAllEntitiesFromProject newProject, (error, newDocs, newFiles) =>
|
||||
return callback(error) if error?
|
||||
documentUpdaterHandler = require('../../Features/DocumentUpdater/DocumentUpdaterHandler')
|
||||
documentUpdaterHandler.updateProjectStructure project_id, userId, oldDocs, newDocs, oldFiles, newFiles, callback
|
||||
DocumentUpdaterHandler.updateProjectStructure project_id, userId, {oldDocs, newDocs, oldFiles, newFiles}, callback
|
||||
|
||||
_checkValidMove: (project, entityType, entityPath, destFolderId, callback = (error) ->) ->
|
||||
return callback() if !entityType.match(/folder/)
|
||||
|
@ -442,8 +454,7 @@ module.exports = ProjectEntityHandler =
|
|||
return callback(error) if error?
|
||||
ProjectEntityHandler.getAllEntitiesFromProject newProject, (error, newDocs, newFiles) =>
|
||||
return callback(error) if error?
|
||||
documentUpdaterHandler = require('../../Features/DocumentUpdater/DocumentUpdaterHandler')
|
||||
documentUpdaterHandler.updateProjectStructure project_id, userId, oldDocs, newDocs, oldFiles, newFiles, callback
|
||||
DocumentUpdaterHandler.updateProjectStructure project_id, userId, {oldDocs, newDocs, oldFiles, newFiles}, callback
|
||||
|
||||
_cleanUpEntity: (project, entity, entityType, callback = (error) ->) ->
|
||||
if(entityType.indexOf("file") != -1)
|
||||
|
@ -466,7 +477,7 @@ module.exports = ProjectEntityHandler =
|
|||
|
||||
unsetRootDocIfRequired (error) ->
|
||||
return callback(error) if error?
|
||||
require('../../Features/DocumentUpdater/DocumentUpdaterHandler').deleteDoc project_id, doc_id, (error) ->
|
||||
DocumentUpdaterHandler.deleteDoc project_id, doc_id, (error) ->
|
||||
return callback(error) if error?
|
||||
ProjectEntityHandler._insertDeletedDocReference project._id, doc, (error) ->
|
||||
return callback(error) if error?
|
||||
|
|
|
@ -28,7 +28,7 @@ module.exports =
|
|||
FileTypeManager.isBinary path, fsPath, (err, isFile)->
|
||||
return callback(err) if err?
|
||||
if isFile
|
||||
self.p.processFile project_id, elementId, fsPath, path, source, callback
|
||||
self.p.processFile project_id, elementId, fsPath, path, source, user_id, callback
|
||||
else
|
||||
self.p.processDoc project_id, elementId, user_id, fsPath, path, source, callback
|
||||
|
||||
|
@ -57,9 +57,9 @@ module.exports =
|
|||
if err?
|
||||
logger.err err:err, project_id:project_id, doc_id:doc_id, path:path, "error processing file"
|
||||
return callback(err)
|
||||
editorController.addDoc project_id, folder._id, fileName, docLines, source, callback
|
||||
editorController.addDoc project_id, folder._id, fileName, docLines, source, user_id, callback
|
||||
|
||||
processFile: (project_id, file_id, fsPath, path, source, callback)->
|
||||
processFile: (project_id, file_id, fsPath, path, source, user_id, callback)->
|
||||
finish = (err)->
|
||||
logger.log project_id:project_id, file_id:file_id, path:path, "completed processing file update from tpds"
|
||||
callback(err)
|
||||
|
@ -69,9 +69,9 @@ module.exports =
|
|||
logger.err err:err, project_id:project_id, file_id:file_id, path:path, "error processing file"
|
||||
return callback(err)
|
||||
else if file_id?
|
||||
editorController.replaceFile project_id, file_id, fsPath, source, finish
|
||||
editorController.replaceFile project_id, file_id, fsPath, source, user_id, finish
|
||||
else
|
||||
editorController.addFile project_id, folder?._id, fileName, fsPath, source, finish
|
||||
editorController.addFile project_id, folder?._id, fileName, fsPath, source, user_id, finish
|
||||
|
||||
writeStreamToDisk: (project_id, file_id, stream, callback = (err, fsPath)->)->
|
||||
if !file_id?
|
||||
|
|
|
@ -28,9 +28,9 @@ module.exports = FileSystemImportManager =
|
|||
if existingDoc?
|
||||
EditorController.setDoc project_id, existingDoc._id, user_id, lines, "upload", callback
|
||||
else
|
||||
EditorController.addDocWithoutLock project_id, folder_id, name, lines, "upload", callback
|
||||
EditorController.addDocWithoutLock project_id, folder_id, name, lines, "upload", user_id, callback
|
||||
else
|
||||
EditorController.addDocWithoutLock project_id, folder_id, name, lines, "upload", callback
|
||||
EditorController.addDocWithoutLock project_id, folder_id, name, lines, "upload", user_id, callback
|
||||
|
||||
addFile: (user_id, project_id, folder_id, name, path, replace, callback = (error, file)-> )->
|
||||
FileSystemImportManager._isSafeOnFileSystem path, (err, isSafe)->
|
||||
|
@ -39,7 +39,7 @@ module.exports = FileSystemImportManager =
|
|||
return callback("path is symlink")
|
||||
|
||||
if !replace
|
||||
EditorController.addFileWithoutLock project_id, folder_id, name, path, "upload", callback
|
||||
EditorController.addFileWithoutLock project_id, folder_id, name, path, "upload", user_id, callback
|
||||
else
|
||||
ProjectLocator.findElement project_id: project_id, element_id: folder_id, type: "folder", (error, folder) ->
|
||||
return callback(error) if error?
|
||||
|
@ -50,9 +50,9 @@ module.exports = FileSystemImportManager =
|
|||
existingFile = fileRef
|
||||
break
|
||||
if existingFile?
|
||||
EditorController.replaceFile project_id, existingFile._id, path, "upload", callback
|
||||
EditorController.replaceFile project_id, existingFile._id, path, "upload", user_id, callback
|
||||
else
|
||||
EditorController.addFileWithoutLock project_id, folder_id, name, path, "upload", callback
|
||||
EditorController.addFileWithoutLock project_id, folder_id, name, path, "upload", user_id, callback
|
||||
|
||||
addFolder: (user_id, project_id, folder_id, name, path, replace, callback = (error)-> ) ->
|
||||
FileSystemImportManager._isSafeOnFileSystem path, (err, isSafe)->
|
||||
|
|
|
@ -24,6 +24,7 @@ jsPath =
|
|||
|
||||
ace = PackageVersions.lib('ace')
|
||||
pdfjs = PackageVersions.lib('pdfjs')
|
||||
fineuploader = PackageVersions.lib('fineuploader')
|
||||
|
||||
getFileContent = (filePath)->
|
||||
filePath = Path.join __dirname, "../../../", "public#{filePath}"
|
||||
|
@ -37,6 +38,7 @@ getFileContent = (filePath)->
|
|||
|
||||
logger.log "Generating file fingerprints..."
|
||||
pathList = [
|
||||
["#{jsPath}libs/#{fineuploader}.js"]
|
||||
["#{jsPath}libs/require.js"]
|
||||
["#{jsPath}ide.js"]
|
||||
["#{jsPath}main.js"]
|
||||
|
|
|
@ -2,6 +2,7 @@ version = {
|
|||
"pdfjs": "1.7.225"
|
||||
"moment": "2.9.0"
|
||||
"ace": "1.2.5"
|
||||
"fineuploader": "5.15.4"
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
|
|
@ -132,7 +132,8 @@ html(itemscope, itemtype='http://schema.org/Product')
|
|||
// minimal requirejs configuration (can be extended/overridden)
|
||||
window.requirejs = {
|
||||
"paths" : {
|
||||
"moment": "libs/#{lib('moment')}"
|
||||
"moment": "libs/#{lib('moment')}",
|
||||
"fineuploader": "libs/#{lib('fineuploader')}"
|
||||
},
|
||||
"urlArgs": "fingerprint=#{fingerprint(jsPath + 'main.js')}-#{fingerprint(jsPath + 'libs.js')}",
|
||||
"config":{
|
||||
|
|
|
@ -130,7 +130,8 @@ block requirejs
|
|||
"moment": "libs/#{lib('moment')}",
|
||||
"pdfjs-dist/build/pdf": "libs/#{lib('pdfjs')}/pdf",
|
||||
"pdfjs-dist/build/pdf.worker": "#{pdfWorkerPath}",
|
||||
"ace": "#{lib('ace')}"
|
||||
"ace": "#{lib('ace')}",
|
||||
"fineuploader": "libs/#{lib('fineuploader')}"
|
||||
},
|
||||
"urlArgs" : "fingerprint=#{fingerprint(jsPath + 'ide.js')}-#{fingerprint(jsPath + 'libs.js')}",
|
||||
"waitSeconds": 0,
|
||||
|
|
|
@ -118,7 +118,11 @@ div.full-size(
|
|||
h3.popover-title #{translate("link_sharing")}
|
||||
.popover-content
|
||||
p #{translate("try_out_link_sharing")}
|
||||
img(src="/img/onboarding/linksharing/link-sharing.png" width="100%")
|
||||
img(
|
||||
src="/img/onboarding/linksharing/link-sharing.png"
|
||||
alt="Link sharing demo"
|
||||
width="100%"
|
||||
)
|
||||
p #{translate("try_link_sharing_description")}
|
||||
button.btn.btn-default.btn-block(ng-click="dismiss()")
|
||||
| #{translate("got_it")}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
aside#file-tree(ng-controller="FileTreeController", ng-class="{ 'multi-selected': multiSelectedCount > 0 }").full-size
|
||||
.toolbar.toolbar-small.toolbar-alt(ng-if="permissions.write")
|
||||
.toolbar.toolbar-filetree(ng-if="permissions.write")
|
||||
a(
|
||||
href,
|
||||
ng-click="openNewDocModal()",
|
||||
|
|
|
@ -7,9 +7,9 @@ header.toolbar.toolbar-header.toolbar-with-labels(
|
|||
href,
|
||||
ng-click="ui.leftMenuShown = true;",
|
||||
)
|
||||
i.fa.fa-fw.fa-bars
|
||||
i.fa.fa-fw.fa-bars.editor-menu-icon
|
||||
p.toolbar-label #{translate("menu")}
|
||||
a(
|
||||
a.toolbar-header-back-projects(
|
||||
href="/project"
|
||||
)
|
||||
i.fa.fa-fw.fa-level-up
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
div.full-size.pdf(ng-controller="PdfController")
|
||||
.toolbar.toolbar-tall
|
||||
.btn-group#recompile(
|
||||
.toolbar.toolbar-pdf
|
||||
.btn-group.btn-recompile-group#recompile(
|
||||
dropdown,
|
||||
tooltip-html="'"+translate('recompile_pdf')+" <span class=\"keyboard-shortcut\">({{modifierKey}} + Enter)</span>'"
|
||||
tooltip-class="keyboard-tooltip"
|
||||
|
@ -8,7 +8,7 @@ div.full-size.pdf(ng-controller="PdfController")
|
|||
tooltip-append-to-body="true"
|
||||
tooltip-placement="bottom"
|
||||
)
|
||||
a.btn.btn-info(
|
||||
a.btn.btn-recompile(
|
||||
href,
|
||||
ng-disabled="pdf.compiling",
|
||||
ng-click="recompile()"
|
||||
|
@ -19,7 +19,7 @@ div.full-size.pdf(ng-controller="PdfController")
|
|||
|
|
||||
span(ng-show="!pdf.compiling") #{translate("recompile")}
|
||||
span(ng-show="pdf.compiling") #{translate("compiling")}...
|
||||
a.btn.btn-info.dropdown-toggle(
|
||||
a.btn.btn-recompile.dropdown-toggle(
|
||||
href,
|
||||
ng-disabled="pdf.compiling",
|
||||
dropdown-toggle
|
||||
|
@ -102,6 +102,7 @@ div.full-size.pdf(ng-controller="PdfController")
|
|||
tooltip-placement="bottom"
|
||||
tooltip-append-to-body="true"
|
||||
)
|
||||
i.fa.fa-expand
|
||||
i.full-screen
|
||||
a(
|
||||
href,
|
||||
|
@ -111,6 +112,7 @@ div.full-size.pdf(ng-controller="PdfController")
|
|||
tooltip-placement="bottom"
|
||||
tooltip-append-to-body="true"
|
||||
)
|
||||
i.fa.fa-compress
|
||||
i.split-screen
|
||||
i.split-screen
|
||||
// end of toolbar
|
||||
|
|
|
@ -313,3 +313,59 @@ script(type="text/ng-template", id="userProfileModalTemplate")
|
|||
|
||||
.modal-footer
|
||||
button.btn.btn-info(ng-click="done()") #{translate("done")}
|
||||
|
||||
script(type="text/ng-template", id="v1ImportModalTemplate")
|
||||
.modal-header
|
||||
button.close(ng-click="dismiss()") ×
|
||||
h3 #{translate("import_project_to_v2")}
|
||||
|
||||
.modal-body.v1-import-wrapper
|
||||
.v1-import-step-1(ng-show="step === 1")
|
||||
img.v1-import-img(
|
||||
src="/img/v1-import/v2-editor.png"
|
||||
alt="The new V2 Editor."
|
||||
)
|
||||
h2.v1-import-title Try importing your project to V2!
|
||||
p Some exciting copy about the new features:
|
||||
ul
|
||||
li Some stuff
|
||||
li Some more stuff
|
||||
li Yet more stuff
|
||||
|
||||
.v1-import-step-2(ng-show="step === 2")
|
||||
div.v1-import-warning(aria-label="Warning symbol.")
|
||||
i.fa.fa-exclamation-triangle
|
||||
h2.v1-import-title #[strong Warning:] Overleaf V2 is in beta
|
||||
p Once importing your project you will lose access to the some of the features of Overleaf V1. This includes the git bridge, journal integrations, WYSIWYG and linked files. We’re working on bringing these features to V2!
|
||||
p Once you have imported a project to V2 you #[strong cannot go back to V1].
|
||||
p Are you sure you want to import to V2?
|
||||
|
||||
.modal-footer.v1-import-footer
|
||||
div(ng-show="step === 1")
|
||||
if settings.overleaf && settings.overleaf.host
|
||||
a.btn.btn-primary.v1-import-btn(
|
||||
ng-href=settings.overleaf.host + "/{{project.id}}"
|
||||
) #{translate("open_in_v1")}
|
||||
button.btn.btn-primary.v1-import-btn(
|
||||
ng-click="moveToConfirmation()"
|
||||
) #{translate("import_to_v2")}
|
||||
div(ng-show="step === 2")
|
||||
form(
|
||||
async-form="v1Import",
|
||||
name="v1ImportForm",
|
||||
action="{{'/overleaf/project/'+ project.id + '/import'}}"
|
||||
method="POST"
|
||||
ng-cloak
|
||||
)
|
||||
input(name='_csrf', type='hidden', value=csrfToken)
|
||||
form-messages(for="v1ImportForm")
|
||||
if settings.overleaf && settings.overleaf.host
|
||||
a.btn.btn-primary.v1-import-btn(
|
||||
ng-href=settings.overleaf.host + "/{{project.id}}"
|
||||
ng-class="{disabled: v1ImportForm.inflight || v1ImportForm.success}"
|
||||
) #{translate("never_mind_open_in_v1")}
|
||||
input.btn.btn-primary.v1-import-btn(
|
||||
type="submit",
|
||||
value=translate('yes_im_sure')
|
||||
ng-disabled="v1ImportForm.inflight || v1ImportForm.success"
|
||||
)
|
||||
|
|
|
@ -114,9 +114,9 @@
|
|||
) #{translate("delete_forever")}
|
||||
|
||||
.row.row-spaced
|
||||
if noV1Connection
|
||||
each warning in warnings
|
||||
.col-xs-12
|
||||
.alert.alert-warning No V1 Connection
|
||||
.alert.alert-warning(role="alert")= warning
|
||||
|
||||
.col-xs-12
|
||||
.card.card-thin.project-list-card
|
||||
|
|
|
@ -5,11 +5,10 @@
|
|||
tooltip-append-to-body="true"
|
||||
)
|
||||
span
|
||||
if settings.overleaf && settings.overleaf.host
|
||||
a.projectName(
|
||||
href=settings.overleaf.host + "/{{project.id}}"
|
||||
stop-propagation="click"
|
||||
) {{project.name}}
|
||||
button.btn.btn-link.v1ProjectName(
|
||||
ng-click="openV1ImportModal(project)"
|
||||
stop-propagation="click"
|
||||
) {{project.name}}
|
||||
|
||||
.col-xs-2
|
||||
span.owner {{ownerName()}}
|
||||
|
|
4
services/web/bin/acceptance_test
Executable file
|
@ -0,0 +1,4 @@
|
|||
#!/bin/bash
|
||||
set -e;
|
||||
MOCHA="node_modules/.bin/mocha --recursive --reporter spec --timeout 15000"
|
||||
$MOCHA "$@"
|
16
services/web/bin/compile_acceptance_tests
Executable file
|
@ -0,0 +1,16 @@
|
|||
#!/bin/bash
|
||||
set -e;
|
||||
|
||||
COFFEE=node_modules/.bin/coffee
|
||||
|
||||
echo Compiling test/acceptance/coffee;
|
||||
$COFFEE -o test/acceptance/js -c test/acceptance/coffee;
|
||||
|
||||
for dir in modules/*;
|
||||
do
|
||||
|
||||
if [ -d $dir/test/acceptance ]; then
|
||||
echo Compiling $dir/test/acceptance/coffee;
|
||||
$COFFEE -o $dir/test/acceptance/js -c $dir/test/acceptance/coffee;
|
||||
fi
|
||||
done
|
23
services/web/bin/compile_app
Executable file
|
@ -0,0 +1,23 @@
|
|||
#!/bin/bash
|
||||
set -e;
|
||||
|
||||
COFFEE=node_modules/.bin/coffee
|
||||
|
||||
echo Compiling app.coffee;
|
||||
$COFFEE -c app.coffee;
|
||||
|
||||
echo Compiling app/coffee;
|
||||
$COFFEE -o app/js -c app/coffee;
|
||||
|
||||
for dir in modules/*;
|
||||
do
|
||||
if [ -d $dir/app/coffee ]; then
|
||||
echo Compiling $dir/app/coffee;
|
||||
$COFFEE -o $dir/app/js -c $dir/app/coffee;
|
||||
fi
|
||||
|
||||
if [ -e $dir/index.coffee ]; then
|
||||
echo Compiling $dir/index.coffee;
|
||||
$COFFEE -c $dir/index.coffee;
|
||||
fi
|
||||
done
|
15
services/web/bin/compile_unit_tests
Executable file
|
@ -0,0 +1,15 @@
|
|||
#!/bin/bash
|
||||
set -e;
|
||||
|
||||
COFFEE=node_modules/.bin/coffee
|
||||
|
||||
echo Compiling test/unit/coffee;
|
||||
$COFFEE -o test/unit/js -c test/unit/coffee;
|
||||
|
||||
for dir in modules/*;
|
||||
do
|
||||
if [ -d $dir/test/unit ]; then
|
||||
echo Compiling $dir/test/unit/coffee;
|
||||
$COFFEE -o $dir/test/unit/js -c $dir/test/unit/coffee;
|
||||
fi
|
||||
done
|
26
services/web/bin/generate_volumes_file
Executable file
|
@ -0,0 +1,26 @@
|
|||
#!/usr/bin/env python2
|
||||
|
||||
from os import listdir
|
||||
from os.path import isfile, isdir, join
|
||||
|
||||
volumes = []
|
||||
|
||||
for module in listdir("modules/"):
|
||||
if module[0] != '.':
|
||||
if isfile(join("modules", module, 'index.coffee')):
|
||||
volumes.append(join("modules", module, 'index.coffee'))
|
||||
for directory in ['app/coffee', 'app/views', 'public/coffee', 'test/unit/coffee', 'test/acceptance/coffee', 'test/acceptance/config', 'test/acceptance/files']:
|
||||
if isdir(join("modules", module, directory)):
|
||||
volumes.append(join("modules", module, directory))
|
||||
|
||||
volumes_string = map(lambda vol: "- ./" + vol + ":/app/" + vol + ":ro", volumes)
|
||||
volumes_string = "\n ".join(volumes_string)
|
||||
|
||||
with open("docker-shared.template.yml", "r") as f:
|
||||
docker_shared_file = f.read()
|
||||
|
||||
docker_shared_file = docker_shared_file.replace("MODULE_VOLUMES", volumes_string)
|
||||
|
||||
with open("docker-shared.yml", "w") as f:
|
||||
f.write(docker_shared_file)
|
||||
|
14
services/web/bin/unit_test
Executable file
|
@ -0,0 +1,14 @@
|
|||
#!/bin/bash
|
||||
set -e;
|
||||
|
||||
MOCHA="node_modules/.bin/mocha --recursive --reporter spec"
|
||||
|
||||
$MOCHA "$@" test/unit/js
|
||||
|
||||
for dir in modules/*;
|
||||
do
|
||||
if [ -d $dir/test/unit/js ]; then
|
||||
$MOCHA "$@" $dir/test/unit/js
|
||||
fi
|
||||
done
|
||||
|
|
@ -35,12 +35,12 @@ module.exports = settings =
|
|||
# Databases
|
||||
# ---------
|
||||
mongo:
|
||||
url : 'mongodb://127.0.0.1/sharelatex'
|
||||
url : process.env['MONGO_URL'] || "mongodb://127.0.0.1/sharelatex"
|
||||
|
||||
redis:
|
||||
web:
|
||||
host: "localhost"
|
||||
port: "6379"
|
||||
host: process.env['REDIS_HOST'] || "localhost"
|
||||
port: process.env['REDIS_PORT'] || "6379"
|
||||
password: ""
|
||||
|
||||
# websessions:
|
||||
|
@ -74,8 +74,8 @@ module.exports = settings =
|
|||
# ]
|
||||
|
||||
api:
|
||||
host: "localhost"
|
||||
port: "6379"
|
||||
host: process.env['REDIS_HOST'] || "localhost"
|
||||
port: process.env['REDIS_PORT'] || "6379"
|
||||
password: ""
|
||||
|
||||
# Service locations
|
||||
|
@ -87,6 +87,7 @@ module.exports = settings =
|
|||
internal:
|
||||
web:
|
||||
port: webPort = 3000
|
||||
host: process.env['LISTEN_ADDRESS'] or 'localhost'
|
||||
documentupdater:
|
||||
port: docUpdaterPort = 3003
|
||||
|
||||
|
@ -99,7 +100,7 @@ module.exports = settings =
|
|||
user: httpAuthUser
|
||||
pass: httpAuthPass
|
||||
documentupdater:
|
||||
url : "http://localhost:#{docUpdaterPort}"
|
||||
url : "http://#{process.env['DOCUPDATER_HOST'] or 'localhost'}:#{docUpdaterPort}"
|
||||
thirdPartyDataStore:
|
||||
url : "http://localhost:3002"
|
||||
emptyProjectFlushDelayMiliseconds: 5 * seconds
|
||||
|
@ -110,10 +111,10 @@ module.exports = settings =
|
|||
trackchanges:
|
||||
url : "http://localhost:3015"
|
||||
project_history:
|
||||
enabled: false
|
||||
enabled: process.env.PROJECT_HISTORY_ENABLED == 'true' or false
|
||||
url : "http://localhost:3054"
|
||||
docstore:
|
||||
url : "http://localhost:3016"
|
||||
url : "http://#{process.env['DOCSTORE_HOST'] or 'localhost'}:3016"
|
||||
pubUrl: "http://localhost:3016"
|
||||
chat:
|
||||
url: "http://localhost:3010"
|
||||
|
|
37
services/web/docker-compose.yml
Normal file
|
@ -0,0 +1,37 @@
|
|||
version: "2"
|
||||
|
||||
volumes:
|
||||
node_modules:
|
||||
|
||||
services:
|
||||
npm:
|
||||
extends:
|
||||
file: docker-shared.yml
|
||||
service: app
|
||||
command: npm install
|
||||
|
||||
test_unit:
|
||||
extends:
|
||||
file: docker-shared.yml
|
||||
service: app
|
||||
command: npm run test:unit
|
||||
|
||||
test_acceptance:
|
||||
extends:
|
||||
file: docker-shared.yml
|
||||
service: app
|
||||
environment:
|
||||
REDIS_HOST: redis
|
||||
MONGO_URL: "mongodb://mongo/sharelatex"
|
||||
SHARELATEX_ALLOW_PUBLIC_ACCESS: 'true'
|
||||
PROJECT_HISTORY_ENABLED: 'true'
|
||||
depends_on:
|
||||
- redis
|
||||
- mongo
|
||||
command: npm run start
|
||||
|
||||
redis:
|
||||
image: redis
|
||||
|
||||
mongo:
|
||||
image: mongo:3.4.6
|
31
services/web/docker-shared.template.yml
Normal file
|
@ -0,0 +1,31 @@
|
|||
version: "2"
|
||||
|
||||
# We mount all the directories explicitly so that we are only mounting
|
||||
# the coffee directories, so that the compiled js is only written inside
|
||||
# the container, and not back to the local filesystem, where it would be
|
||||
# root owned, and conflict with working outside of the container.
|
||||
|
||||
services:
|
||||
app:
|
||||
image: node:6.9.5
|
||||
volumes:
|
||||
- ./package.json:/app/package.json
|
||||
- ./npm-shrinkwrap.json:/app/npm-shrinkwrap.json
|
||||
- node_modules:/app/node_modules
|
||||
- ./bin:/app/bin
|
||||
# Copying the whole public dir is fine for now, and needed for
|
||||
# some unit tests to pass, but we will want to isolate the coffee
|
||||
# and vendor js files, so that the compiled js files are not written
|
||||
# back to the local filesystem.
|
||||
- ./public:/app/public
|
||||
- ./app.coffee:/app/app.coffee:ro
|
||||
- ./app/coffee:/app/app/coffee:ro
|
||||
- ./app/templates:/app/app/templates:ro
|
||||
- ./app/views:/app/app/views:ro
|
||||
- ./config:/app/config
|
||||
- ./test/unit/coffee:/app/test/unit/coffee:ro
|
||||
- ./test/acceptance/coffee:/app/test/acceptance/coffee:ro
|
||||
- ./test/acceptance/files:/app/test/acceptance/files:ro
|
||||
- ./test/smoke/coffee:/app/test/smoke/coffee:ro
|
||||
MODULE_VOLUMES
|
||||
working_dir: /app
|
18
services/web/modules/.gitignore
vendored
|
@ -1,12 +1,6 @@
|
|||
*/app/js
|
||||
*/test/unit/js
|
||||
*/index.js
|
||||
ldap
|
||||
admin-panel
|
||||
groovehq
|
||||
launchpad
|
||||
learn-wiki
|
||||
references-search
|
||||
sharelatex-saml
|
||||
templates
|
||||
tpr-webmodule
|
||||
# Ignore all modules except for a whitelist
|
||||
*
|
||||
!dropbox
|
||||
!github-sync
|
||||
!public-registration
|
||||
!.gitignore
|
||||
|
|
|
@ -9,6 +9,17 @@
|
|||
"directories": {
|
||||
"public": "./public"
|
||||
},
|
||||
"scripts": {
|
||||
"test:acceptance:wait_for_app": "echo 'Waiting for app to be accessible' && while (! curl -s -o /dev/null localhost:3000/status) do sleep 1; done",
|
||||
"test:acceptance:run": "bin/acceptance_test $@",
|
||||
"test:acceptance:dir": "npm -q run compile:acceptance_tests && npm -q run test:acceptance:wait_for_app && npm -q run test:acceptance:run -- $@",
|
||||
"test:acceptance": "npm -q run test:acceptance:dir -- $@ test/acceptance/js",
|
||||
"test:unit": "npm -q run compile:app && npm -q run compile:unit_tests && bin/unit_test $@",
|
||||
"compile:unit_tests": "bin/compile_unit_tests",
|
||||
"compile:acceptance_tests": "bin/compile_acceptance_tests",
|
||||
"compile:app": "bin/compile_app",
|
||||
"start": "npm -q run compile:app && node app.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"archiver": "0.9.0",
|
||||
"async": "0.6.2",
|
||||
|
@ -98,6 +109,7 @@
|
|||
"grunt-postcss": "^0.8.0",
|
||||
"grunt-sed": "^0.1.1",
|
||||
"grunt-shell": "^2.1.0",
|
||||
"mkdirp": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz",
|
||||
"sandboxed-module": "0.2.0",
|
||||
"sinon": "^1.17.0",
|
||||
"timekeeper": "",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
define [
|
||||
"base"
|
||||
"libs/fineuploader"
|
||||
"fineuploader"
|
||||
], (App, qq) ->
|
||||
App.directive 'fineUpload', ($timeout) ->
|
||||
return {
|
||||
|
|
|
@ -15,6 +15,7 @@ define [
|
|||
"ide/metadata/services/metadata"
|
||||
"ide/graphics/services/graphics"
|
||||
"ide/preamble/services/preamble"
|
||||
"ide/files/services/files"
|
||||
], (App, Ace, SearchBox, ModeList, UndoManager, AutoCompleteManager, SpellCheckManager, HighlightsManager, CursorPositionManager, TrackChangesManager, MetadataManager, LabelsManager) ->
|
||||
EditSession = ace.require('ace/edit_session').EditSession
|
||||
ModeList = ace.require('ace/ext/modelist')
|
||||
|
@ -37,7 +38,7 @@ define [
|
|||
url = ace.config._moduleUrl(args...) + "?fingerprint=#{window.aceFingerprint}"
|
||||
return url
|
||||
|
||||
App.directive "aceEditor", ($timeout, $compile, $rootScope, event_tracking, localStorage, $cacheFactory, labels, metadata, graphics, preamble, $http, $q) ->
|
||||
App.directive "aceEditor", ($timeout, $compile, $rootScope, event_tracking, localStorage, $cacheFactory, labels, metadata, graphics, preamble, files, $http, $q) ->
|
||||
monkeyPatchSearch($rootScope, $compile)
|
||||
|
||||
return {
|
||||
|
@ -106,7 +107,7 @@ define [
|
|||
trackChangesManager = new TrackChangesManager(scope, editor, element)
|
||||
labelsManager = new LabelsManager(scope, editor, element, labels)
|
||||
metadataManager = new MetadataManager(scope, editor, element, metadata)
|
||||
autoCompleteManager = new AutoCompleteManager(scope, editor, element, metadataManager, labelsManager, graphics, preamble)
|
||||
autoCompleteManager = new AutoCompleteManager(scope, editor, element, metadataManager, labelsManager, graphics, preamble, files)
|
||||
|
||||
|
||||
# Prevert Ctrl|Cmd-S from triggering save dialog
|
||||
|
|
|
@ -10,7 +10,7 @@ define [
|
|||
aceSnippetManager = ace.require('ace/snippets').snippetManager
|
||||
|
||||
class AutoCompleteManager
|
||||
constructor: (@$scope, @editor, @element, @metadataManager, @labelsManager, @graphics, @preamble) ->
|
||||
constructor: (@$scope, @editor, @element, @metadataManager, @labelsManager, @graphics, @preamble, @files) ->
|
||||
|
||||
@monkeyPatchAutocomplete()
|
||||
|
||||
|
@ -41,6 +41,8 @@ define [
|
|||
|
||||
Graphics = @graphics
|
||||
Preamble = @preamble
|
||||
Files = @files
|
||||
|
||||
GraphicsCompleter =
|
||||
getCompletions: (editor, session, pos, prefix, callback) ->
|
||||
context = Helpers.getContext(editor, pos)
|
||||
|
@ -67,6 +69,27 @@ define [
|
|||
callback null, result
|
||||
|
||||
metadataManager = @metadataManager
|
||||
FilesCompleter =
|
||||
getCompletions: (editor, session, pos, prefix, callback) =>
|
||||
context = Helpers.getContext(editor, pos)
|
||||
{lineUpToCursor, commandFragment, lineBeyondCursor, needsClosingBrace} = context
|
||||
if commandFragment
|
||||
match = commandFragment.match(/^\\(input|include){(\w*)/)
|
||||
if match
|
||||
commandName = match[1]
|
||||
currentArg = match[2]
|
||||
result = []
|
||||
for file in Files.getTeXFiles()
|
||||
if file.id != @$scope.docId
|
||||
path = file.path
|
||||
result.push {
|
||||
caption: "\\#{commandName}{#{path}#{if needsClosingBrace then '}' else ''}",
|
||||
value: "\\#{commandName}{#{path}#{if needsClosingBrace then '}' else ''}",
|
||||
meta: "file",
|
||||
score: 50
|
||||
}
|
||||
callback null, result
|
||||
|
||||
LabelsCompleter =
|
||||
getCompletions: (editor, session, pos, prefix, callback) ->
|
||||
context = Helpers.getContext(editor, pos)
|
||||
|
@ -136,6 +159,7 @@ define [
|
|||
ReferencesCompleter
|
||||
LabelsCompleter
|
||||
GraphicsCompleter
|
||||
FilesCompleter
|
||||
]
|
||||
|
||||
disable: () ->
|
||||
|
|
|
@ -29,7 +29,7 @@ define () ->
|
|||
|
||||
packageSnippets.push {
|
||||
caption: "\\usepackage{}"
|
||||
snippet: "\\usepackage{}"
|
||||
snippet: "\\usepackage{$1}"
|
||||
meta: "pkg"
|
||||
score: 70
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ define [
|
|||
end = change.end
|
||||
range = new Range(end.row, 0, end.row, end.column)
|
||||
lineUpToCursor = @editor.getSession().getTextRange range
|
||||
if lineUpToCursor.trim() == '%' or lineUpToCursor.startsWith '\\'
|
||||
if lineUpToCursor.trim() == '%' or lineUpToCursor.slice(0, 1) == '\\'
|
||||
range = new Range(end.row, 0, end.row, end.column + 80)
|
||||
lineUpToCursor = @editor.getSession().getTextRange range
|
||||
commandFragment = getLastCommandFragment lineUpToCursor
|
||||
|
@ -44,9 +44,9 @@ define [
|
|||
linesContainLabel or
|
||||
linesContainReqPackage
|
||||
|
||||
lastCommandFragmentIsLabel = commandFragment?.startsWith '\\label{'
|
||||
lastCommandFragmentIsPackage = commandFragment?.startsWith '\\usepackage'
|
||||
lastCommandFragmentIsReqPack = commandFragment?.startsWith '\\RequirePackage'
|
||||
lastCommandFragmentIsLabel = commandFragment?.slice(0, 7) == '\\label{'
|
||||
lastCommandFragmentIsPackage = commandFragment?.slice(0, 11) == '\\usepackage'
|
||||
lastCommandFragmentIsReqPack = commandFragment?.slice(0, 15) == '\\RequirePackage'
|
||||
lastCommandFragmentIsMeta =
|
||||
lastCommandFragmentIsPackage or
|
||||
lastCommandFragmentIsLabel or
|
||||
|
|
|
@ -13,15 +13,15 @@ define [
|
|||
@loadRootFolder()
|
||||
@loadDeletedDocs()
|
||||
@$scope.$emit "file-tree:initialized"
|
||||
|
||||
|
||||
@$scope.$watch "rootFolder", (rootFolder) =>
|
||||
if rootFolder?
|
||||
@recalculateDocList()
|
||||
|
||||
@_bindToSocketEvents()
|
||||
|
||||
|
||||
@$scope.multiSelectedCount = 0
|
||||
|
||||
|
||||
$(document).on "click", =>
|
||||
@clearMultiSelectedEntities()
|
||||
$scope.$digest()
|
||||
|
@ -46,7 +46,7 @@ define [
|
|||
type: "file"
|
||||
}
|
||||
@recalculateDocList()
|
||||
|
||||
|
||||
@ide.socket.on "reciveNewFolder", (parent_folder_id, folder) =>
|
||||
parent_folder = @findEntityById(parent_folder_id) or @$scope.rootFolder
|
||||
@$scope.$apply () =>
|
||||
|
@ -85,25 +85,25 @@ define [
|
|||
@ide.fileTreeManager.forEachEntity (entity) ->
|
||||
entity.selected = false
|
||||
entity.selected = true
|
||||
|
||||
|
||||
toggleMultiSelectEntity: (entity) ->
|
||||
entity.multiSelected = !entity.multiSelected
|
||||
@$scope.multiSelectedCount = @multiSelectedCount()
|
||||
|
||||
|
||||
multiSelectedCount: () ->
|
||||
count = 0
|
||||
@forEachEntity (entity) ->
|
||||
if entity.multiSelected
|
||||
count++
|
||||
return count
|
||||
|
||||
|
||||
getMultiSelectedEntities: () ->
|
||||
entities = []
|
||||
@forEachEntity (e) ->
|
||||
if e.multiSelected
|
||||
entities.push e
|
||||
return entities
|
||||
|
||||
|
||||
getMultiSelectedEntityChildNodes: () ->
|
||||
entities = @getMultiSelectedEntities()
|
||||
paths = {}
|
||||
|
@ -125,13 +125,13 @@ define [
|
|||
if !prefixes[path]?
|
||||
child_entities.push entity
|
||||
return child_entities
|
||||
|
||||
|
||||
clearMultiSelectedEntities: () ->
|
||||
return if @$scope.multiSelectedCount == 0 # Be efficient, this is called a lot on 'click'
|
||||
@forEachEntity (entity) ->
|
||||
entity.multiSelected = false
|
||||
@$scope.multiSelectedCount = 0
|
||||
|
||||
|
||||
multiSelectSelectedEntity: () ->
|
||||
@findSelectedEntity()?.multiSelected = true
|
||||
|
||||
|
@ -140,7 +140,7 @@ define [
|
|||
return false if !folder?
|
||||
entity = @_findEntityByPathInFolder(folder, name)
|
||||
return entity?
|
||||
|
||||
|
||||
findSelectedEntity: () ->
|
||||
selected = null
|
||||
@forEachEntity (entity) ->
|
||||
|
@ -178,7 +178,7 @@ define [
|
|||
parts = path.split("/")
|
||||
name = parts.shift()
|
||||
rest = parts.join("/")
|
||||
|
||||
|
||||
if name == "."
|
||||
return @_findEntityByPathInFolder(folder, rest)
|
||||
|
||||
|
@ -268,7 +268,7 @@ define [
|
|||
type: "doc"
|
||||
deleted: true
|
||||
}
|
||||
|
||||
|
||||
recalculateDocList: () ->
|
||||
@$scope.docs = []
|
||||
@forEachEntity (entity, parentFolder, path) =>
|
||||
|
@ -287,7 +287,7 @@ define [
|
|||
return -1
|
||||
else
|
||||
return 1
|
||||
|
||||
|
||||
getEntityPath: (entity) ->
|
||||
@_getEntityPathInFolder @$scope.rootFolder, entity
|
||||
|
||||
|
@ -349,7 +349,7 @@ define [
|
|||
}
|
||||
|
||||
deleteEntity: (entity, callback = (error) ->) ->
|
||||
# We'll wait for the socket.io notification to
|
||||
# We'll wait for the socket.io notification to
|
||||
# delete from scope.
|
||||
return @ide.queuedHttp {
|
||||
method: "DELETE"
|
||||
|
@ -367,7 +367,7 @@ define [
|
|||
folder_id: parent_folder.id
|
||||
_csrf: window.csrfToken
|
||||
}
|
||||
|
||||
|
||||
_isChildFolder: (parent_folder, child_folder) ->
|
||||
parent_path = @getEntityPath(parent_folder) or "" # null if root folder
|
||||
child_path = @getEntityPath(child_folder) or "" # null if root folder
|
||||
|
|
17
services/web/public/coffee/ide/files/services/files.coffee
Normal file
|
@ -0,0 +1,17 @@
|
|||
define [
|
||||
"base"
|
||||
], (App) ->
|
||||
|
||||
App.factory 'files', (ide) ->
|
||||
|
||||
Files =
|
||||
getTeXFiles: () ->
|
||||
texFiles = []
|
||||
ide.fileTreeManager.forEachEntity (entity, folder, path) ->
|
||||
if entity.type == 'doc' && entity?.name?.match?(/.*\.(tex|txt|md)/)
|
||||
cloned = _.clone(entity)
|
||||
cloned.path = path
|
||||
texFiles.push cloned
|
||||
return texFiles
|
||||
|
||||
return Files
|
|
@ -1,12 +1,10 @@
|
|||
define [
|
||||
"moment"
|
||||
"libs/angular-autocomplete/angular-autocomplete"
|
||||
"libs/ui-bootstrap"
|
||||
"libs/ng-context-menu-0.1.4"
|
||||
"libs/underscore-1.3.3"
|
||||
"libs/algolia-2.5.2"
|
||||
"libs/jquery.storage"
|
||||
"libs/fineuploader"
|
||||
"libs/angular-sanitize-1.2.17"
|
||||
"libs/angular-cookie"
|
||||
"libs/passfield"
|
||||
|
|
|
@ -123,3 +123,13 @@ define [
|
|||
$scope.onComplete = (error, name, response) ->
|
||||
if response.project_id?
|
||||
window.location = '/project/' + response.project_id
|
||||
|
||||
App.controller 'V1ImportModalController', ($scope, $modalInstance, project) ->
|
||||
$scope.project = project
|
||||
$scope.step = 1
|
||||
|
||||
$scope.dismiss = () ->
|
||||
$modalInstance.dismiss('cancel')
|
||||
|
||||
$scope.moveToConfirmation = () ->
|
||||
$scope.step = 2
|
||||
|
|
|
@ -442,6 +442,16 @@ define [
|
|||
path = "/project/#{selected_project_ids[0]}/download/zip"
|
||||
|
||||
window.location = path
|
||||
|
||||
$scope.openV1ImportModal = (project) ->
|
||||
$modal.open(
|
||||
templateUrl: 'v1ImportModalTemplate'
|
||||
controller: 'V1ImportModalController'
|
||||
size: 'lg'
|
||||
windowClass: 'v1-import-modal'
|
||||
resolve:
|
||||
project: () -> project
|
||||
)
|
||||
|
||||
if storedUIOpts?.filter?
|
||||
if storedUIOpts.filter == "tag" and storedUIOpts.selectedTagId?
|
||||
|
|
|
@ -1,18 +1 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 21.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 136 157" style="enable-background:new 0 0 136 157;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#9B9B9B;}
|
||||
</style>
|
||||
<g id="Page-1">
|
||||
<g id="overleaf">
|
||||
<g id="Group">
|
||||
<path id="Fill-1" class="st0" d="M37.2,39.7C14.8,54,0,77.3,0,102.3C0,132.5,24.5,157,54.7,157c30.2,0,54.7-24.5,54.7-54.7
|
||||
c0-23.3-14.6-43.3-35.2-51.1c-4-1.5-12.6-4.2-19.4-3.6c-9.8,6.2-21.8,19-27.4,31.8c8.4-10.1,21.5-14.5,33.2-12.6
|
||||
c17.1,2.8,30.2,17.6,30.2,35.6c0,19.9-16.1,36-36,36c-11,0-20.8-4.9-27.4-12.6C17.5,114.3,15,101.9,17,89.8
|
||||
c6.9-42.4,57.2-66.5,94.6-75.8C99.4,20.5,77.4,31.1,62,42.6c44.9,17.3,52.2-20.5,73.2-37.5C114-3.1,37.3-6.1,37.2,39.7z"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 136 157"><path d="M37.2 39.7C14.8 54 0 77.3 0 102.3 0 132.5 24.5 157 54.7 157c30.2 0 54.7-24.5 54.7-54.7 0-23.3-14.6-43.3-35.2-51.1-4-1.5-12.6-4.2-19.4-3.6-9.8 6.2-21.8 19-27.4 31.8 8.4-10.1 21.5-14.5 33.2-12.6 17.1 2.8 30.2 17.6 30.2 35.6 0 19.9-16.1 36-36 36-11 0-20.8-4.9-27.4-12.6-9.9-11.5-12.4-23.9-10.4-36 6.9-42.4 57.2-66.5 94.6-75.8C99.4 20.5 77.4 31.1 62 42.6c44.9 17.3 52.2-20.5 73.2-37.5-21.2-8.2-97.9-11.2-98 34.6z" fill="#9b9b9b"/></svg>
|
Before Width: | Height: | Size: 947 B After Width: | Height: | Size: 503 B |
1
services/web/public/img/ol-brand/overleaf-o-white.svg
Normal file
|
@ -0,0 +1 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 136 157"><path d="M37.2 39.7C14.8 54 0 77.3 0 102.3 0 132.5 24.5 157 54.7 157s54.7-24.5 54.7-54.7c0-23.3-14.6-43.3-35.2-51.1-4-1.5-12.6-4.2-19.4-3.6-9.8 6.2-21.8 19-27.4 31.8 8.4-10.1 21.5-14.5 33.2-12.6 17.1 2.8 30.2 17.6 30.2 35.6 0 19.9-16.1 36-36 36-11 0-20.8-4.9-27.4-12.6-9.9-11.5-12.4-23.9-10.4-36 6.9-42.4 57.2-66.5 94.6-75.8C99.4 20.5 77.4 31.1 62 42.6c44.9 17.3 52.2-20.5 73.2-37.5-21.2-8.2-97.9-11.2-98 34.6z" fill="#fff"/></svg>
|
After Width: | Height: | Size: 493 B |
|
@ -1,10 +1 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg width="136px" height="157px" viewBox="0 0 136 157" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<g id="Page-1" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
|
||||
<g id="overleaf" fill="#4F9C45">
|
||||
<g id="Group">
|
||||
<path d="M37.205,39.652 C14.822,53.982 0,77.339 0,102.326 C0,132.522 24.48,157 54.681,157 C84.879,157 109.355,132.522 109.355,102.326 C109.355,78.986 94.729,59.05 74.151,51.215 C70.193,49.71 61.595,47.002 54.73,47.58 C44.924,53.814 32.979,66.624 27.319,79.389 C35.735,69.296 48.856,64.901 60.489,66.77 C77.615,69.547 90.697,84.408 90.697,102.321 C90.697,122.217 74.571,138.342 54.681,138.342 C43.719,138.342 33.896,133.446 27.293,125.723 C17.516,114.299 15,101.91 16.975,89.809 C23.902,47.434 74.208,23.279 111.611,14.01 C99.404,20.468 77.384,31.084 61.985,42.64 C106.909,59.981 114.169,22.123 135.202,5.181 C114.038,-3.07 37.33,-6.117 37.205,39.652 Z" id="Fill-1"></path>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
<svg width="136" height="157" xmlns="http://www.w3.org/2000/svg"><path d="M37.205 39.652C14.822 53.982 0 77.339 0 102.326 0 132.522 24.48 157 54.681 157c30.198 0 54.674-24.478 54.674-54.674 0-23.34-14.626-43.276-35.204-51.111-3.958-1.505-12.556-4.213-19.421-3.635-9.806 6.234-21.751 19.044-27.411 31.809 8.416-10.093 21.537-14.488 33.17-12.619 17.126 2.777 30.208 17.638 30.208 35.551 0 19.896-16.126 36.021-36.016 36.021-10.962 0-20.785-4.896-27.388-12.619C17.516 114.299 15 101.91 16.975 89.809c6.927-42.375 57.233-66.53 94.636-75.799-12.207 6.458-34.227 17.074-49.626 28.63 44.924 17.341 52.184-20.517 73.217-37.459C114.038-3.07 37.33-6.117 37.205 39.652z" fill="#4F9C45" fill-rule="evenodd"/></svg>
|
Before Width: | Height: | Size: 1 KiB After Width: | Height: | Size: 702 B |
BIN
services/web/public/img/v1-import/v2-editor.png
Normal file
After Width: | Height: | Size: 205 KiB |
7455
services/web/public/js/libs/fineuploader-5.15.4.js
Normal file
|
@ -2,7 +2,7 @@
|
|||
padding: (@line-height-computed / 4) (@line-height-computed / 2);
|
||||
background-color: @state-warning-bg;
|
||||
color: #333;
|
||||
border-bottom: 1px solid @toolbar-border-color;
|
||||
border-bottom: 1px solid @common-border-color;
|
||||
}
|
||||
|
||||
.clickable {
|
||||
|
|
|
@ -26,6 +26,20 @@
|
|||
}
|
||||
}
|
||||
|
||||
.editor-menu-icon when (@is-overleaf = true) {
|
||||
&.fa {
|
||||
width: 1em;
|
||||
background: url(/img/ol-brand/overleaf-o-white.svg) center / contain no-repeat;
|
||||
|
||||
&::before {
|
||||
// Disable the font-awesome icon when in Overleaf by replacing it with a
|
||||
// non-breakable space instead (otherwise the browser would render a
|
||||
// zero-width element).
|
||||
content: "\00a0";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.full-size {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
|
@ -250,8 +264,8 @@
|
|||
.ui-layout-resizer {
|
||||
width: 6px;
|
||||
background-color: #f4f4f4;
|
||||
border-left: 1px solid @toolbar-border-color;
|
||||
border-right: 1px solid @toolbar-border-color;
|
||||
border-left: 1px solid @editor-border-color;
|
||||
border-right: 1px solid @editor-border-color;
|
||||
.ui-layout-toggler {
|
||||
color: #999;
|
||||
font-family: FontAwesome;
|
||||
|
|
|
@ -126,12 +126,12 @@
|
|||
height: @new-message-height;
|
||||
background-color: @gray-lightest;
|
||||
padding: @line-height-computed / 4;
|
||||
border-top: 1px solid @toolbar-border-color;
|
||||
border-top: 1px solid @editor-border-color;
|
||||
textarea {
|
||||
overflow: auto;
|
||||
resize: none;
|
||||
border-radius: @border-radius-base;
|
||||
border: 1px solid @toolbar-border-color;
|
||||
border: 1px solid @editor-border-color;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
color: @gray-dark;
|
||||
|
|
|
@ -1,5 +1,20 @@
|
|||
.fake-full-width-bg(@bg-color) {
|
||||
&::before {
|
||||
content: '\00a0';
|
||||
position: absolute;
|
||||
width: 100%;
|
||||
right: 100%;
|
||||
background-color: @bg-color;
|
||||
}
|
||||
}
|
||||
|
||||
aside#file-tree {
|
||||
#file-tree {
|
||||
.toolbar.toolbar-filetree {
|
||||
.toolbar-small-mixin;
|
||||
.toolbar-alt-mixin;
|
||||
padding: 0 5px;
|
||||
}
|
||||
|
||||
.file-tree-inner {
|
||||
position: absolute;
|
||||
top: 32px;
|
||||
|
@ -7,12 +22,14 @@ aside#file-tree {
|
|||
left: 0;
|
||||
right: 0;
|
||||
overflow-y: auto;
|
||||
background-color: @file-tree-bg;
|
||||
|
||||
&.no-toolbar {
|
||||
top: 0;
|
||||
}
|
||||
}
|
||||
|
||||
// TODO; Consolidate with "Project files" in Overleaf
|
||||
h3 {
|
||||
font-size: 1rem;
|
||||
border-bottom: 1px solid @gray;
|
||||
|
@ -20,10 +37,13 @@ aside#file-tree {
|
|||
margin: (@line-height-computed / 2);
|
||||
}
|
||||
|
||||
ul.file-tree-list {
|
||||
ul.file-tree-list when (@is-overleaf = false) {
|
||||
font-size: 0.8rem;
|
||||
margin: 0;
|
||||
padding: (@line-height-computed / 4) 0;
|
||||
}
|
||||
|
||||
ul.file-tree-list {
|
||||
margin: 0;
|
||||
overflow-x: hidden;
|
||||
height: 100%;
|
||||
|
||||
|
@ -32,7 +52,7 @@ aside#file-tree {
|
|||
}
|
||||
|
||||
li {
|
||||
line-height: 2.6;
|
||||
line-height: @file-tree-line-height;
|
||||
position: relative;
|
||||
|
||||
.entity {
|
||||
|
@ -40,29 +60,42 @@ aside#file-tree {
|
|||
}
|
||||
|
||||
.entity-name {
|
||||
color: @gray-darker;
|
||||
color: @file-tree-item-color;
|
||||
cursor: pointer;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
&:hover {
|
||||
background-color: @gray-lightest;
|
||||
background-color: @file-tree-item-hover-bg;
|
||||
}
|
||||
&:hover when (@is-overleaf = true) {
|
||||
// When the entity is a subfolder, the DOM element is "indented" via margin-left. This makes the
|
||||
// element not fill the entire file-tree width (as it's spaced from the left-hand side via margin)
|
||||
// and, in consequence, the background gets clipped. The ::before pseudo-selector is used to fill
|
||||
// the empty space.
|
||||
.fake-full-width-bg(@file-tree-item-hover-bg);
|
||||
}
|
||||
input {
|
||||
line-height: 1.6;
|
||||
}
|
||||
&.droppable-hover {
|
||||
background-color: fade(@file-tree-droppable-background-color, 60%);
|
||||
&.droppable-hover when (@is-overleaf = false) {
|
||||
background-color: fade(@file-tree-droppable-bg-color, 60%);
|
||||
}
|
||||
|
||||
&.droppable-hover when (@is-overleaf = true) {
|
||||
background-color: @file-tree-droppable-bg-color;
|
||||
.fake-full-width-bg(@file-tree-droppable-bg-color);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
i.fa {
|
||||
color: @gray-light;
|
||||
color: @file-tree-item-icon-color;
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
i.fa-folder-open, i.fa-folder {
|
||||
color: lighten(desaturate(@link-color, 10%), 5%);
|
||||
color: @file-tree-item-folder-color;
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
|
@ -70,14 +103,31 @@ aside#file-tree {
|
|||
width: 24px;
|
||||
padding: 6px;
|
||||
font-size: 0.7rem;
|
||||
color: @gray
|
||||
color: @file-tree-item-toggle-color;
|
||||
}
|
||||
|
||||
&.multi-selected {
|
||||
> .entity > .entity-name {
|
||||
background-color: lighten(@brand-info, 40%);
|
||||
> .entity > .entity-name when (@is-overleaf = false) {
|
||||
background-color: @file-tree-multiselect-bg;
|
||||
&:hover {
|
||||
background-color: lighten(@brand-info, 30%);
|
||||
background-color: @file-tree-multiselect-hover-bg;
|
||||
}
|
||||
}
|
||||
> .entity when (@is-overleaf = true) {
|
||||
> .entity-name {
|
||||
> div > i.fa,
|
||||
> i.fa,
|
||||
.entity-menu-toggle i.fa {
|
||||
color: #FFF;
|
||||
}
|
||||
color: #FFF;
|
||||
font-weight: bold;
|
||||
background-color: @file-tree-multiselect-bg;
|
||||
.fake-full-width-bg(@file-tree-multiselect-bg);
|
||||
&:hover {
|
||||
background-color: @file-tree-multiselect-hover-bg;
|
||||
.fake-full-width-bg(@file-tree-multiselect-hover-bg);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -96,6 +146,7 @@ aside#file-tree {
|
|||
top: 1px;
|
||||
left: 44px;
|
||||
right: 32px;
|
||||
color: @file-tree-item-input-color;
|
||||
input {
|
||||
width: 100%;
|
||||
}
|
||||
|
@ -111,7 +162,7 @@ aside#file-tree {
|
|||
|
||||
&:not(.multi-selected) {
|
||||
ul.file-tree-list li.selected {
|
||||
> .entity > .entity-name {
|
||||
> .entity > .entity-name when (@is-overleaf = false) {
|
||||
color: @link-color;
|
||||
border-right: 4px solid @link-color;
|
||||
font-weight: bold;
|
||||
|
@ -123,15 +174,38 @@ aside#file-tree {
|
|||
display: inline;
|
||||
}
|
||||
}
|
||||
|
||||
> .entity when (@is-overleaf = true) {
|
||||
> .entity-name {
|
||||
> div > i.fa,
|
||||
> i.fa,
|
||||
.entity-menu-toggle i.fa {
|
||||
color: #FFF;
|
||||
}
|
||||
background-color: @file-tree-item-selected-bg;
|
||||
font-weight: bold;
|
||||
padding-right: 32px;
|
||||
.fake-full-width-bg(@file-tree-item-selected-bg);
|
||||
|
||||
.entity-menu-toggle {
|
||||
display: inline;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ul.droppable-hover {
|
||||
background-color: fade(@file-tree-droppable-background-color, 60%);
|
||||
ul.droppable-hover.file-tree-list when (@is-overleaf = false) {
|
||||
background-color: fade(@file-tree-droppable-bg-color, 60%);
|
||||
}
|
||||
ul.droppable-hover.file-tree-list when (@is-overleaf = true) {
|
||||
background-color: @file-tree-droppable-bg-color;
|
||||
.fake-full-width-bg(@file-tree-droppable-bg-color);
|
||||
}
|
||||
}
|
||||
|
||||
.editor-dark {
|
||||
// TODO check if the OL Beta theme is OK with darker themes.
|
||||
.editor-dark when (@is-overleaf = false) {
|
||||
aside#file-tree {
|
||||
// background-color: lighten(@editor-dark-background-color, 10%);
|
||||
|
||||
|
@ -159,6 +233,4 @@ aside#file-tree {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -70,7 +70,7 @@
|
|||
}
|
||||
|
||||
aside.change-list {
|
||||
border-left: 1px solid @toolbar-border-color;
|
||||
border-left: 1px solid @editor-border-color;
|
||||
height: 100%;
|
||||
width: @changesListWidth;
|
||||
position: absolute;
|
||||
|
@ -91,7 +91,7 @@
|
|||
|
||||
.day {
|
||||
background-color: #fafafa;
|
||||
border-bottom: 1px solid @toolbar-border-color;
|
||||
border-bottom: 1px solid @editor-border-color;
|
||||
padding: 4px;
|
||||
font-weight: bold;
|
||||
text-align: center;
|
||||
|
@ -132,7 +132,7 @@
|
|||
padding: (@line-height-computed / 4);
|
||||
padding-left: 38px;
|
||||
min-height: 38px;
|
||||
border-bottom: 1px solid @toolbar-border-color;
|
||||
border-bottom: 1px solid @editor-border-color;
|
||||
cursor: pointer;
|
||||
&:hover {
|
||||
background-color: @gray-lightest;
|
||||
|
|
|
@ -1,3 +1,15 @@
|
|||
.pdf .toolbar.toolbar-pdf when (@is-overleaf = true) {
|
||||
.toolbar-small-mixin;
|
||||
.toolbar-alt-mixin;
|
||||
border-bottom: 0;
|
||||
padding-right: 5px;
|
||||
}
|
||||
|
||||
.pdf .toolbar.toolbar-pdf when (@is-overleaf = false) {
|
||||
.toolbar-tall-mixin;
|
||||
padding: 0 (@line-height-computed / 2);
|
||||
}
|
||||
|
||||
.pdf-viewer, .pdf-logs, .pdf-errors, .pdf-uncompiled {
|
||||
.full-size;
|
||||
top: 58px;
|
||||
|
@ -13,6 +25,42 @@
|
|||
}
|
||||
}
|
||||
|
||||
.btn-recompile-group when (@is-overleaf = true) {
|
||||
align-self: stretch;
|
||||
margin-right: 5px;
|
||||
}
|
||||
|
||||
.btn-recompile-group when (@is-overleaf = false) {
|
||||
margin-right: (@line-height-computed / 2);
|
||||
}
|
||||
|
||||
|
||||
.btn-recompile when (@is-overleaf = true) {
|
||||
height: 100%;
|
||||
.btn-primary;
|
||||
padding-top: 3px;
|
||||
padding-bottom: 3px;
|
||||
&:first-child {
|
||||
border-top-left-radius: 0;
|
||||
border-bottom-left-radius: 0;
|
||||
}
|
||||
}
|
||||
|
||||
.btn-recompile when (@is-overleaf = false) {
|
||||
.btn-info;
|
||||
}
|
||||
|
||||
.btn-split-screen when (@is-overleaf = false) {
|
||||
.fa {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
.btn-split-screen when (@is-overleaf = true) {
|
||||
.fa {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
|
||||
.pdf-viewer {
|
||||
iframe {
|
||||
width: 100%;
|
||||
|
@ -82,31 +130,43 @@
|
|||
|
||||
.pdf .toolbar {
|
||||
.toolbar-right {
|
||||
margin-right: @line-height-computed / 2;
|
||||
a {
|
||||
&:hover {
|
||||
i {
|
||||
i when (@is-overleaf = false) {
|
||||
box-shadow: 1px 1px 0 rgba(0, 0, 0, 0.25);
|
||||
border-color: @gray-dark;
|
||||
}
|
||||
}
|
||||
i {
|
||||
i when (@is-overleaf = false) {
|
||||
display: inline-block;
|
||||
width: 16px;
|
||||
height: 16px;
|
||||
border: 1px solid @gray-light;
|
||||
margin-top: 5px;
|
||||
}
|
||||
|
||||
i.full-screen {
|
||||
border-top-width: 3px;
|
||||
border-radius: 2px;
|
||||
}
|
||||
|
||||
i.full-screen when (@is-overleaf = true) {
|
||||
display: none;
|
||||
}
|
||||
i.split-screen {
|
||||
width: 7px;
|
||||
border-top-width: 3px;
|
||||
border-radius: 2px;
|
||||
margin-left: 2px;
|
||||
}
|
||||
|
||||
i.split-screen when (@is-overleaf = true) {
|
||||
display: none;
|
||||
}
|
||||
|
||||
i.fa when (@is-overleaf = false) {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -902,6 +902,13 @@
|
|||
}
|
||||
}
|
||||
|
||||
.review-icon when (@is-overleaf) {
|
||||
background-position-y: -60px;
|
||||
.toolbar .btn-full-height:hover & {
|
||||
background-position-y: -60px;
|
||||
}
|
||||
}
|
||||
|
||||
.resolved-comments-toggle {
|
||||
font-size: 14px;
|
||||
color: lighten(@rp-type-blue, 25%);
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
.ace_search {
|
||||
background-color: @gray-lightest;
|
||||
border: 1px solid @toolbar-border-color;
|
||||
border: 1px solid @editor-border-color;
|
||||
border-top: 0 none;
|
||||
width: 350px;
|
||||
overflow: hidden;
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
.toolbar {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
height: 40px;
|
||||
border-bottom: 1px solid @toolbar-border-color;
|
||||
border-bottom: @toolbar-border-bottom;
|
||||
|
||||
> a, .toolbar-right > a {
|
||||
position: relative;
|
||||
|
@ -22,13 +24,16 @@
|
|||
.toolbar-left > a:not(.btn),
|
||||
.toolbar-right > a:not(.btn) {
|
||||
display: inline-block;
|
||||
color: @gray-light;
|
||||
padding: 4px 10px 5px;
|
||||
margin: 1px 2px;
|
||||
color: @toolbar-icon-btn-color;
|
||||
padding: 0 5px;
|
||||
border-radius: @border-radius-small;
|
||||
&.toolbar-header-back-projects {
|
||||
padding: 5px 10px 4px;
|
||||
margin-bottom: 1px;
|
||||
}
|
||||
&:hover {
|
||||
text-shadow: 0 1px 0 rgba(0, 0, 0, 0.25);
|
||||
color: @gray-dark;
|
||||
text-shadow: @toolbar-icon-btn-hover-shadow;
|
||||
color: @toolbar-icon-btn-hover-color;
|
||||
text-decoration: none;
|
||||
}
|
||||
&.active, &:active {
|
||||
|
@ -37,7 +42,7 @@
|
|||
}
|
||||
color: white;
|
||||
background-color: @link-color;
|
||||
.box-shadow(inset 0 3px 5px rgba(0, 0, 0, 0.225));
|
||||
box-shadow: @toolbar-icon-btn-hover-boxshadow;
|
||||
&:hover {
|
||||
color: white;
|
||||
}
|
||||
|
@ -48,18 +53,18 @@
|
|||
border: none;
|
||||
border-radius: 0;
|
||||
border-right: 1px solid @toolbar-border-color;
|
||||
color: @link-color;
|
||||
color: @toolbar-btn-color;
|
||||
padding: 3px 10px 5px;
|
||||
font-size: 20px;
|
||||
&:hover {
|
||||
text-shadow: 0 1px 0 rgba(0, 0, 0, 0.15);
|
||||
background-color: darken(white, 10%);
|
||||
color: @link-hover-color;
|
||||
text-shadow: @toolbar-btn-hover-text-shadow;
|
||||
background-color: @toolbar-btn-hover-bg-color;
|
||||
color: @toolbar-btn-hover-color;
|
||||
}
|
||||
&.active, &:active {
|
||||
color: white;
|
||||
background-color: @link-color;
|
||||
.box-shadow(inset 0 3px 5px rgba(0, 0, 0, 0.225));
|
||||
color: @toolbar-btn-active-color;
|
||||
background-color: @toolbar-btn-active-bg-color;
|
||||
box-shadow: @toolbar-btn-active-shadow;
|
||||
}
|
||||
.label {
|
||||
top: 4px;
|
||||
|
@ -72,12 +77,17 @@
|
|||
}
|
||||
|
||||
.toolbar-left {
|
||||
display: flex;
|
||||
float: left;
|
||||
text-align: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.toolbar-right {
|
||||
float: right;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
flex-grow: 1;
|
||||
justify-content: flex-end;
|
||||
.btn-full-height {
|
||||
border-right: 0;
|
||||
border-left: 1px solid @toolbar-border-color;
|
||||
|
@ -96,7 +106,8 @@
|
|||
}
|
||||
|
||||
&.toolbar-header {
|
||||
box-shadow: 0 0 2px #ccc;
|
||||
background-color: @toolbar-header-bg-color;
|
||||
box-shadow: @toolbar-header-shadow;
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
|
@ -105,43 +116,30 @@
|
|||
}
|
||||
|
||||
&.toolbar-small {
|
||||
height: 32px;
|
||||
> a, .toolbar-right > a {
|
||||
padding: 2px 4px 1px 4px;
|
||||
margin: 0;
|
||||
margin-top: 2px;
|
||||
}
|
||||
> a {
|
||||
margin-left: 2px;
|
||||
}
|
||||
.toolbar-right > a {
|
||||
margin-left: 0;
|
||||
margin-right: 2px;
|
||||
}
|
||||
.toolbar-small-mixin;
|
||||
}
|
||||
|
||||
&.toolbar-tall {
|
||||
height: 58px;
|
||||
padding-top: 10px;
|
||||
> a, .toolbar-right > a {
|
||||
padding: 4px 10px 5px;
|
||||
}
|
||||
> a.btn, .toolbar-right > a.btn {
|
||||
margin: 0 (@line-height-computed / 2);
|
||||
}
|
||||
.btn-group {
|
||||
margin: 0 (@line-height-computed / 2);
|
||||
> .btn-group {
|
||||
margin: 0;
|
||||
}
|
||||
}
|
||||
.toolbar-small-mixin;
|
||||
}
|
||||
|
||||
&.toolbar-alt {
|
||||
background-color: #fafafa;
|
||||
.toolbar-alt-mixin;
|
||||
}
|
||||
}
|
||||
|
||||
.toolbar-small-mixin() {
|
||||
height: 32px;
|
||||
}
|
||||
|
||||
.toolbar-tall-mixin() {
|
||||
height: 58px;
|
||||
padding-top: 10px;
|
||||
}
|
||||
.toolbar-alt-mixin() {
|
||||
background-color: @toolbar-alt-bg-color;
|
||||
}
|
||||
|
||||
.toolbar-label {
|
||||
display: none;
|
||||
margin: 0 4px;
|
||||
|
|
|
@ -0,0 +1,24 @@
|
|||
.v1-import-title {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.v1-import-img {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.v1-import-warning {
|
||||
text-align: center;
|
||||
color: #fdce02;
|
||||
font-size: 14em;
|
||||
line-height: 1em;
|
||||
}
|
||||
|
||||
.v1-import-footer {
|
||||
display: flex;
|
||||
justify-content: space-evenly;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.v1-import-btn {
|
||||
width: 20rem;
|
||||
}
|
|
@ -1,3 +1,5 @@
|
|||
@import "./list/v1-import-modal.less";
|
||||
|
||||
@announcements-shadow: 0 2px 20px rgba(0, 0, 0, 0.5);
|
||||
|
||||
@keyframes pulse {
|
||||
|
@ -332,6 +334,10 @@ ul.project-list {
|
|||
.projectName {
|
||||
margin-right: @line-height-computed / 4;
|
||||
}
|
||||
.v1ProjectName {
|
||||
margin-right: @line-height-computed / 4;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.tag-label {
|
||||
margin-left: @line-height-computed / 4;
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
padding: (@line-height-computed / 4) (@line-height-computed / 2);
|
||||
background-color: @state-warning-bg;
|
||||
color: #333;
|
||||
border-bottom: 1px solid @toolbar-border-color;
|
||||
border-bottom: 1px solid @common-border-color;
|
||||
text-align:center;
|
||||
|
||||
img {
|
||||
|
|
|
@ -794,7 +794,9 @@
|
|||
@left-menu-animation-duration: 0.35s;
|
||||
|
||||
@toolbar-border-color: @gray-lighter;
|
||||
@file-tree-droppable-background-color: rgb(252, 231, 199);
|
||||
@common-border-color: @gray-lighter;
|
||||
@editor-border-color: @gray-lighter;
|
||||
@file-tree-droppable-bg-color: rgb(252, 231, 199);
|
||||
|
||||
@editor-dark-background-color: #333;
|
||||
@editor-dark-toolbar-border-color: #222;
|
||||
|
@ -876,16 +878,45 @@
|
|||
@folders-untagged-line-height : 1.7;
|
||||
|
||||
// Progress bars
|
||||
@progress-border-radius : @border-radius-base;
|
||||
@progress-border-width : 1px;
|
||||
@progress-bar-shadow : inset 0 -1px 0 rgba(0,0,0,.15);
|
||||
@progress-border-radius : @border-radius-base;
|
||||
@progress-border-width : 1px;
|
||||
@progress-bar-shadow : inset 0 -1px 0 rgba(0,0,0,.15);
|
||||
|
||||
// Footer
|
||||
@footer-link-color : @link-color;
|
||||
@footer-link-hover-color : @link-hover-color;
|
||||
@footer-bg-color : transparent;
|
||||
@footer-padding : 2em;
|
||||
@footer-link-color : @link-color;
|
||||
@footer-link-hover-color : @link-hover-color;
|
||||
@footer-bg-color : transparent;
|
||||
@footer-padding : 2em;
|
||||
|
||||
// Editor header
|
||||
@toolbar-header-bg-color : transparent;
|
||||
@toolbar-header-shadow : 0 0 2px #ccc;
|
||||
@toolbar-btn-color : @link-color;
|
||||
@toolbar-btn-hover-color : @link-hover-color;
|
||||
@toolbar-btn-hover-bg-color : darken(white, 10%);
|
||||
@toolbar-btn-hover-text-shadow : 0 1px 0 rgba(0, 0, 0, 0.15);
|
||||
@toolbar-btn-active-color : white;
|
||||
@toolbar-btn-active-bg-color : @link-color;
|
||||
@toolbar-btn-active-shadow : inset 0 3px 5px rgba(0, 0, 0, 0.225);
|
||||
@toolbar-alt-bg-color : #fafafa;
|
||||
@toolbar-icon-btn-color : @gray-light;
|
||||
@toolbar-icon-btn-hover-color : @gray-dark;
|
||||
@toolbar-icon-btn-hover-shadow : 0 1px 0 rgba(0, 0, 0, 0.25);
|
||||
@toolbar-icon-btn-hover-boxshadow : inset 0 3px 5px rgba(0, 0, 0, 0.225);
|
||||
@toolbar-border-bottom : 1px solid @toolbar-border-color;
|
||||
|
||||
// Editor file-tree
|
||||
@file-tree-bg : transparent;
|
||||
@file-tree-line-height : 2.6;
|
||||
@file-tree-item-color : @gray-darker;
|
||||
@file-tree-item-toggle-color : @gray;
|
||||
@file-tree-item-icon-color : @gray-light;
|
||||
@file-tree-item-input-color : inherit;
|
||||
@file-tree-item-folder-color : lighten(desaturate(@link-color, 10%), 5%);
|
||||
@file-tree-item-hover-bg : @gray-lightest;
|
||||
@file-tree-item-selected-bg : transparent;
|
||||
@file-tree-multiselect-bg : lighten(@brand-info, 40%);
|
||||
@file-tree-multiselect-hover-bg : lighten(@brand-info, 30%);
|
||||
// Tags
|
||||
@tag-border-radius : 0.25em;
|
||||
@tag-bg-color : @label-default-bg;
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
@ol-blue-gray-1 : #E4E8EE;
|
||||
@ol-blue-gray-2 : #9DA7B7;
|
||||
@ol-blue-gray-3 : #5D6879;
|
||||
@ol-blue-gray-4 : #485973;
|
||||
@ol-blue-gray-4 : #455265;
|
||||
@ol-blue-gray-5 : #2C3645;
|
||||
@ol-blue-gray-6 : #1E2530;
|
||||
|
||||
|
@ -156,6 +156,36 @@
|
|||
@footer-link-hover-color : @ol-dark-green;
|
||||
@footer-padding : 2em 0;
|
||||
|
||||
// Editor header
|
||||
@toolbar-header-bg-color : @ol-blue-gray-6;
|
||||
@toolbar-header-shadow : none;
|
||||
@toolbar-btn-color : #FFF;
|
||||
@toolbar-btn-hover-color : #FFF;
|
||||
@toolbar-btn-hover-bg-color : @ol-blue-gray-5;
|
||||
@toolbar-btn-hover-text-shadow : none;
|
||||
@toolbar-btn-active-color : #FFF;
|
||||
@toolbar-btn-active-bg-color : @ol-green;
|
||||
@toolbar-btn-active-shadow : none;
|
||||
@toolbar-border-color : @ol-blue-gray-5;
|
||||
@toolbar-alt-bg-color : @ol-blue-gray-5;
|
||||
@toolbar-icon-btn-color : #FFF;
|
||||
@toolbar-icon-btn-hover-color : #FFF;
|
||||
@toolbar-icon-btn-hover-shadow : none;
|
||||
@toolbar-icon-btn-hover-boxshadow : none;
|
||||
@toolbar-border-bottom : 1px solid @toolbar-border-color;
|
||||
// Editor file-tree
|
||||
@file-tree-bg : @ol-blue-gray-4;
|
||||
@file-tree-line-height : 2.05;
|
||||
@file-tree-item-color : #FFF;
|
||||
@file-tree-item-input-color : @ol-blue-gray-5;
|
||||
@file-tree-item-toggle-color : @ol-blue-gray-2;
|
||||
@file-tree-item-icon-color : @ol-blue-gray-2;
|
||||
@file-tree-item-folder-color : @ol-blue-gray-2;
|
||||
@file-tree-item-hover-bg : @ol-blue-gray-5;
|
||||
@file-tree-item-selected-bg : @ol-green;
|
||||
@file-tree-multiselect-bg : @ol-blue;
|
||||
@file-tree-multiselect-hover-bg : @ol-dark-blue;
|
||||
@file-tree-droppable-bg-color : tint(@ol-green, 5%);
|
||||
//== Colors
|
||||
//
|
||||
//## Gray and brand colors for use across Bootstrap.
|
||||
|
|
307
services/web/test/acceptance/coffee/ProjectStructureTests.coffee
Normal file
|
@ -0,0 +1,307 @@
|
|||
async = require "async"
|
||||
expect = require("chai").expect
|
||||
mkdirp = require "mkdirp"
|
||||
ObjectId = require("mongojs").ObjectId
|
||||
Path = require "path"
|
||||
fs = require "fs"
|
||||
Settings = require "settings-sharelatex"
|
||||
_ = require "underscore"
|
||||
|
||||
ProjectGetter = require "../../../app/js/Features/Project/ProjectGetter.js"
|
||||
|
||||
MockDocUpdaterApi = require './helpers/MockDocUpdaterApi'
|
||||
MockFileStoreApi = require './helpers/MockFileStoreApi'
|
||||
MockProjectHistoryApi = require './helpers/MockProjectHistoryApi'
|
||||
request = require "./helpers/request"
|
||||
User = require "./helpers/User"
|
||||
|
||||
describe "ProjectStructureChanges", ->
|
||||
before (done) ->
|
||||
@owner = new User()
|
||||
@owner.login done
|
||||
|
||||
describe "creating a project from the example template", ->
|
||||
before (done) ->
|
||||
MockDocUpdaterApi.clearProjectStructureUpdates()
|
||||
@owner.createProject "example-project", {template: "example"}, (error, project_id) =>
|
||||
throw error if error?
|
||||
@example_project_id = project_id
|
||||
done()
|
||||
|
||||
it "should version creating a doc", ->
|
||||
updates = MockDocUpdaterApi.getProjectStructureUpdates(@example_project_id).docUpdates
|
||||
expect(updates.length).to.equal(2)
|
||||
_.each updates, (update) =>
|
||||
expect(update.userId).to.equal(@owner._id)
|
||||
expect(update.docLines).to.be.a('string')
|
||||
expect(_.where(updates, pathname: "/main.tex").length).to.equal 1
|
||||
expect(_.where(updates, pathname: "/references.bib").length).to.equal 1
|
||||
|
||||
it "should version creating a file", ->
|
||||
updates = MockDocUpdaterApi.getProjectStructureUpdates(@example_project_id).fileUpdates
|
||||
expect(updates.length).to.equal(1)
|
||||
update = updates[0]
|
||||
expect(update.userId).to.equal(@owner._id)
|
||||
expect(update.pathname).to.equal("/universe.jpg")
|
||||
expect(update.url).to.be.a('string');
|
||||
|
||||
describe "duplicating a project", ->
|
||||
before (done) ->
|
||||
MockDocUpdaterApi.clearProjectStructureUpdates()
|
||||
@owner.request.post {
|
||||
uri: "/Project/#{@example_project_id}/clone",
|
||||
json:
|
||||
projectName: 'new.tex'
|
||||
}, (error, res, body) =>
|
||||
throw error if error?
|
||||
if res.statusCode < 200 || res.statusCode >= 300
|
||||
throw new Error("failed to add doc #{res.statusCode}")
|
||||
@dup_project_id = body.project_id
|
||||
done()
|
||||
|
||||
it "should version the dosc created", ->
|
||||
updates = MockDocUpdaterApi.getProjectStructureUpdates(@dup_project_id).docUpdates
|
||||
expect(updates.length).to.equal(2)
|
||||
_.each updates, (update) =>
|
||||
expect(update.userId).to.equal(@owner._id)
|
||||
expect(update.docLines).to.be.a('string')
|
||||
expect(_.where(updates, pathname: "/main.tex").length).to.equal(1)
|
||||
expect(_.where(updates, pathname: "/references.bib").length).to.equal(1)
|
||||
|
||||
it "should version the files created", ->
|
||||
updates = MockDocUpdaterApi.getProjectStructureUpdates(@dup_project_id).fileUpdates
|
||||
expect(updates.length).to.equal(1)
|
||||
update = updates[0]
|
||||
expect(update.userId).to.equal(@owner._id)
|
||||
expect(update.pathname).to.equal("/universe.jpg")
|
||||
expect(update.url).to.be.a('string');
|
||||
|
||||
describe "adding a doc", ->
|
||||
before (done) ->
|
||||
MockDocUpdaterApi.clearProjectStructureUpdates()
|
||||
|
||||
ProjectGetter.getProject @example_project_id, (error, projects) =>
|
||||
throw error if error?
|
||||
@owner.request.post {
|
||||
uri: "project/#{@example_project_id}/doc",
|
||||
json:
|
||||
name: 'new.tex'
|
||||
parent_folder_id: projects[0].rootFolder[0]._id
|
||||
}, (error, res, body) =>
|
||||
throw error if error?
|
||||
if res.statusCode < 200 || res.statusCode >= 300
|
||||
throw new Error("failed to add doc #{res.statusCode}")
|
||||
done()
|
||||
|
||||
it "should version the doc added", ->
|
||||
updates = MockDocUpdaterApi.getProjectStructureUpdates(@example_project_id).docUpdates
|
||||
expect(updates.length).to.equal(1)
|
||||
update = updates[0]
|
||||
expect(update.userId).to.equal(@owner._id)
|
||||
expect(update.pathname).to.equal("/new.tex")
|
||||
expect(update.docLines).to.be.a('string');
|
||||
|
||||
describe "uploading a project", ->
|
||||
before (done) ->
|
||||
MockDocUpdaterApi.clearProjectStructureUpdates()
|
||||
|
||||
zip_file = fs.createReadStream(Path.resolve(__dirname + '/../files/test_project.zip'))
|
||||
|
||||
req = @owner.request.post {
|
||||
uri: "project/new/upload",
|
||||
formData:
|
||||
qqfile: zip_file
|
||||
}, (error, res, body) =>
|
||||
throw error if error?
|
||||
if res.statusCode < 200 || res.statusCode >= 300
|
||||
throw new Error("failed to upload project #{res.statusCode}")
|
||||
@uploaded_project_id = JSON.parse(body).project_id
|
||||
done()
|
||||
|
||||
it "should version the dosc created", ->
|
||||
updates = MockDocUpdaterApi.getProjectStructureUpdates(@uploaded_project_id).docUpdates
|
||||
expect(updates.length).to.equal(1)
|
||||
update = updates[0]
|
||||
expect(update.userId).to.equal(@owner._id)
|
||||
expect(update.pathname).to.equal("/main.tex")
|
||||
expect(update.docLines).to.equal("Test")
|
||||
|
||||
it "should version the files created", ->
|
||||
updates = MockDocUpdaterApi.getProjectStructureUpdates(@uploaded_project_id).fileUpdates
|
||||
expect(updates.length).to.equal(1)
|
||||
update = updates[0]
|
||||
expect(update.userId).to.equal(@owner._id)
|
||||
expect(update.pathname).to.equal("/1pixel.png")
|
||||
expect(update.url).to.be.a('string');
|
||||
|
||||
describe "uploading a file", ->
|
||||
before (done) ->
|
||||
ProjectGetter.getProject @example_project_id, (error, projects) =>
|
||||
throw error if error?
|
||||
@root_folder_id = projects[0].rootFolder[0]._id.toString()
|
||||
done()
|
||||
|
||||
beforeEach () ->
|
||||
MockDocUpdaterApi.clearProjectStructureUpdates()
|
||||
|
||||
it "should version a newly uploaded file", (done) ->
|
||||
image_file = fs.createReadStream(Path.resolve(__dirname + '/../files/1pixel.png'))
|
||||
|
||||
req = @owner.request.post {
|
||||
uri: "project/#{@example_project_id}/upload",
|
||||
qs:
|
||||
folder_id: @root_folder_id
|
||||
formData:
|
||||
qqfile:
|
||||
value: image_file
|
||||
options:
|
||||
filename: '1pixel.png',
|
||||
contentType: 'image/png'
|
||||
}, (error, res, body) =>
|
||||
throw error if error?
|
||||
if res.statusCode < 200 || res.statusCode >= 300
|
||||
throw new Error("failed to upload file #{res.statusCode}")
|
||||
|
||||
updates = MockDocUpdaterApi.getProjectStructureUpdates(@example_project_id).fileUpdates
|
||||
expect(updates.length).to.equal(1)
|
||||
update = updates[0]
|
||||
expect(update.userId).to.equal(@owner._id)
|
||||
expect(update.pathname).to.equal("/1pixel.png")
|
||||
expect(update.url).to.be.a('string');
|
||||
@original_file_url = update.url
|
||||
|
||||
done()
|
||||
|
||||
it "should version a replacement file", (done) ->
|
||||
image_file = fs.createReadStream(Path.resolve(__dirname + '/../files/2pixel.png'))
|
||||
|
||||
req = @owner.request.post {
|
||||
uri: "project/#{@example_project_id}/upload",
|
||||
qs:
|
||||
folder_id: @root_folder_id
|
||||
formData:
|
||||
qqfile:
|
||||
value: image_file
|
||||
options:
|
||||
filename: '1pixel.png',
|
||||
contentType: 'image/png'
|
||||
}, (error, res, body) =>
|
||||
throw error if error?
|
||||
if res.statusCode < 200 || res.statusCode >= 300
|
||||
throw new Error("failed to upload file #{res.statusCode}")
|
||||
|
||||
updates = MockDocUpdaterApi.getProjectStructureUpdates(@example_project_id).fileUpdates
|
||||
expect(updates.length).to.equal(1)
|
||||
update = updates[0]
|
||||
expect(update.userId).to.equal(@owner._id)
|
||||
expect(update.pathname).to.equal("/1pixel.png")
|
||||
expect(update.url).to.be.a('string');
|
||||
|
||||
done()
|
||||
|
||||
describe "tpds", ->
|
||||
before (done) ->
|
||||
@tpds_project_name = "tpds-project-#{new ObjectId().toString()}"
|
||||
@owner.createProject @tpds_project_name, (error, project_id) =>
|
||||
throw error if error?
|
||||
@tpds_project_id = project_id
|
||||
mkdirp Settings.path.dumpFolder, done
|
||||
|
||||
beforeEach () ->
|
||||
MockDocUpdaterApi.clearProjectStructureUpdates()
|
||||
|
||||
it "should version adding a doc", (done) ->
|
||||
tex_file = fs.createReadStream(Path.resolve(__dirname + '/../files/test.tex'))
|
||||
|
||||
req = @owner.request.post {
|
||||
uri: "/user/#{@owner._id}/update/#{@tpds_project_name}/test.tex",
|
||||
auth:
|
||||
user: _.keys(Settings.httpAuthUsers)[0]
|
||||
pass: _.values(Settings.httpAuthUsers)[0]
|
||||
sendImmediately: true
|
||||
}
|
||||
|
||||
tex_file.on "error", (err) ->
|
||||
throw err
|
||||
|
||||
req.on "error", (err) ->
|
||||
throw err
|
||||
|
||||
req.on "response", (res) =>
|
||||
if res.statusCode < 200 || res.statusCode >= 300
|
||||
throw new Error("failed to upload file #{res.statusCode}")
|
||||
|
||||
updates = MockDocUpdaterApi.getProjectStructureUpdates(@tpds_project_id).docUpdates
|
||||
expect(updates.length).to.equal(1)
|
||||
update = updates[0]
|
||||
expect(update.userId).to.equal(@owner._id)
|
||||
expect(update.pathname).to.equal("/test.tex")
|
||||
expect(update.docLines).to.equal("Test")
|
||||
|
||||
done()
|
||||
|
||||
tex_file.pipe(req)
|
||||
|
||||
it "should version adding a new file", (done) ->
|
||||
image_file = fs.createReadStream(Path.resolve(__dirname + '/../files/1pixel.png'))
|
||||
|
||||
req = @owner.request.post {
|
||||
uri: "/user/#{@owner._id}/update/#{@tpds_project_name}/1pixel.png",
|
||||
auth:
|
||||
user: _.keys(Settings.httpAuthUsers)[0]
|
||||
pass: _.values(Settings.httpAuthUsers)[0]
|
||||
sendImmediately: true
|
||||
}
|
||||
|
||||
image_file.on "error", (err) ->
|
||||
throw err
|
||||
|
||||
req.on "error", (err) ->
|
||||
throw err
|
||||
|
||||
req.on "response", (res) =>
|
||||
if res.statusCode < 200 || res.statusCode >= 300
|
||||
throw new Error("failed to upload file #{res.statusCode}")
|
||||
|
||||
updates = MockDocUpdaterApi.getProjectStructureUpdates(@tpds_project_id).fileUpdates
|
||||
expect(updates.length).to.equal(1)
|
||||
update = updates[0]
|
||||
expect(update.userId).to.equal(@owner._id)
|
||||
expect(update.pathname).to.equal("/1pixel.png")
|
||||
expect(update.url).to.be.a('string');
|
||||
|
||||
done()
|
||||
|
||||
image_file.pipe(req)
|
||||
|
||||
it "should version replacing a file", (done) ->
|
||||
image_file = fs.createReadStream(Path.resolve(__dirname + '/../files/2pixel.png'))
|
||||
|
||||
req = @owner.request.post {
|
||||
uri: "/user/#{@owner._id}/update/#{@tpds_project_name}/1pixel.png",
|
||||
auth:
|
||||
user: _.keys(Settings.httpAuthUsers)[0]
|
||||
pass: _.values(Settings.httpAuthUsers)[0]
|
||||
sendImmediately: true
|
||||
}
|
||||
|
||||
image_file.on "error", (err) ->
|
||||
throw err
|
||||
|
||||
req.on "error", (err) ->
|
||||
throw err
|
||||
|
||||
req.on "response", (res) =>
|
||||
if res.statusCode < 200 || res.statusCode >= 300
|
||||
throw new Error("failed to upload file #{res.statusCode}")
|
||||
|
||||
updates = MockDocUpdaterApi.getProjectStructureUpdates(@tpds_project_id).fileUpdates
|
||||
expect(updates.length).to.equal(1)
|
||||
update = updates[0]
|
||||
expect(update.userId).to.equal(@owner._id)
|
||||
expect(update.pathname).to.equal("/1pixel.png")
|
||||
expect(update.url).to.be.a('string');
|
||||
|
||||
done()
|
||||
|
||||
image_file.pipe(req)
|
|
@ -1,11 +1,38 @@
|
|||
express = require("express")
|
||||
app = express()
|
||||
bodyParser = require "body-parser"
|
||||
jsonParser = bodyParser.json()
|
||||
|
||||
module.exports = MockDocUpdaterApi =
|
||||
updates: {}
|
||||
|
||||
clearProjectStructureUpdates: () ->
|
||||
@updates = {}
|
||||
|
||||
getProjectStructureUpdates: (project_id) ->
|
||||
@updates[project_id] || { docUpdates: [], fileUpdates: [] }
|
||||
|
||||
addProjectStructureUpdates: (project_id, userId, docUpdates, fileUpdates) ->
|
||||
@updates[project_id] ||= { docUpdates: [], fileUpdates: [] }
|
||||
|
||||
for update in docUpdates
|
||||
update.userId = userId
|
||||
@updates[project_id].docUpdates.push(update)
|
||||
|
||||
for update in fileUpdates
|
||||
update.userId = userId
|
||||
@updates[project_id].fileUpdates.push(update)
|
||||
|
||||
run: () ->
|
||||
app.post "/project/:project_id/flush", (req, res, next) =>
|
||||
res.sendStatus 200
|
||||
|
||||
app.post "/project/:project_id", jsonParser, (req, res, next) =>
|
||||
project_id = req.params.project_id
|
||||
{userId, docUpdates, fileUpdates} = req.body
|
||||
@addProjectStructureUpdates(project_id, userId, docUpdates, fileUpdates)
|
||||
res.sendStatus 200
|
||||
|
||||
app.listen 3003, (error) ->
|
||||
throw error if error?
|
||||
.on "error", (error) ->
|
||||
|
|
|
@ -13,6 +13,7 @@ module.exports = MockDocStoreApi =
|
|||
@docs[project_id][doc_id] = {lines, version, ranges}
|
||||
@docs[project_id][doc_id].rev ?= 0
|
||||
@docs[project_id][doc_id].rev += 1
|
||||
@docs[project_id][doc_id]._id = doc_id
|
||||
res.json {
|
||||
modified: true
|
||||
rev: @docs[project_id][doc_id].rev
|
||||
|
|
|
@ -0,0 +1,20 @@
|
|||
express = require("express")
|
||||
app = express()
|
||||
|
||||
module.exports = MockFileStoreApi =
|
||||
files: {}
|
||||
|
||||
run: () ->
|
||||
app.post "/project/:project_id/file/:file_id", (req, res, next) =>
|
||||
req.on 'data', ->
|
||||
|
||||
req.on 'end', ->
|
||||
res.send 200
|
||||
|
||||
app.listen 3009, (error) ->
|
||||
throw error if error?
|
||||
.on "error", (error) ->
|
||||
console.error "error starting MockFileStoreApi:", error.message
|
||||
process.exit(1)
|
||||
|
||||
MockFileStoreApi.run()
|
|
@ -0,0 +1,18 @@
|
|||
express = require("express")
|
||||
app = express()
|
||||
|
||||
module.exports = MockProjectHistoryApi =
|
||||
docs: {}
|
||||
|
||||
run: () ->
|
||||
app.post "/project", (req, res, next) =>
|
||||
res.json project: id: 1
|
||||
|
||||
app.listen 3054, (error) ->
|
||||
throw error if error?
|
||||
.on "error", (error) ->
|
||||
console.error "error starting MockProjectHistoryApi:", error.message
|
||||
process.exit(1)
|
||||
|
||||
|
||||
MockProjectHistoryApi.run()
|
|
@ -1,6 +1,9 @@
|
|||
request = require("./request")
|
||||
_ = require("underscore")
|
||||
settings = require("settings-sharelatex")
|
||||
{db, ObjectId} = require("../../../../app/js/infrastructure/mongojs")
|
||||
UserModel = require("../../../../app/js/models/User").User
|
||||
AuthenticationManager = require("../../../../app/js/Features/Authentication/AuthenticationManager")
|
||||
|
||||
count = 0
|
||||
|
||||
|
@ -17,20 +20,20 @@ class User
|
|||
login: (callback = (error) ->) ->
|
||||
@getCsrfToken (error) =>
|
||||
return callback(error) if error?
|
||||
@request.post {
|
||||
url: "/register" # Register will log in, but also ensure user exists
|
||||
json:
|
||||
email: @email
|
||||
password: @password
|
||||
}, (error, response, body) =>
|
||||
filter = {@email}
|
||||
options = {upsert: true, new: true, setDefaultsOnInsert: true}
|
||||
UserModel.findOneAndUpdate filter, {}, options, (error, user) =>
|
||||
return callback(error) if error?
|
||||
db.users.findOne {email: @email}, (error, user) =>
|
||||
AuthenticationManager.setUserPassword user._id, @password, (error) =>
|
||||
return callback(error) if error?
|
||||
@id = user?._id?.toString()
|
||||
@_id = user?._id?.toString()
|
||||
@first_name = user?.first_name
|
||||
@referal_id = user?.referal_id
|
||||
callback()
|
||||
@request.post {
|
||||
url: "/login"
|
||||
json: { @email, @password }
|
||||
}, callback
|
||||
|
||||
logout: (callback = (error) ->) ->
|
||||
@getCsrfToken (error) =>
|
||||
|
@ -96,16 +99,21 @@ class User
|
|||
getProject: (project_id, callback = (error, project)->) ->
|
||||
db.projects.findOne {_id: ObjectId(project_id.toString())}, callback
|
||||
|
||||
createProject: (name, callback = (error, project_id) ->) ->
|
||||
createProject: (name, options, callback = (error, oroject_id) ->) ->
|
||||
if typeof options == "function"
|
||||
callback = options
|
||||
options = {}
|
||||
|
||||
@request.post {
|
||||
url: "/project/new",
|
||||
json:
|
||||
projectName: name
|
||||
json: Object.assign({projectName: name}, options)
|
||||
}, (error, response, body) ->
|
||||
return callback(error) if error?
|
||||
if !body?.project_id?
|
||||
console.error "SOMETHING WENT WRONG CREATING PROJECT", response.statusCode, response.headers["location"], body
|
||||
callback(null, body.project_id)
|
||||
error = new Error("SOMETHING WENT WRONG CREATING PROJECT", response.statusCode, response.headers["location"], body)
|
||||
callback error
|
||||
else
|
||||
callback(null, body.project_id)
|
||||
|
||||
deleteProject: (project_id, callback=(error)) ->
|
||||
@request.delete {
|
||||
|
@ -161,13 +169,10 @@ class User
|
|||
|
||||
getCsrfToken: (callback = (error) ->) ->
|
||||
@request.get {
|
||||
url: "/register"
|
||||
url: "/dev/csrf"
|
||||
}, (err, response, body) =>
|
||||
return callback(err) if err?
|
||||
csrfMatches = body.match("window.csrfToken = \"(.*?)\";")
|
||||
if !csrfMatches?
|
||||
return callback(new Error("no csrf token found"))
|
||||
@csrfToken = csrfMatches[1]
|
||||
@csrfToken = body
|
||||
@request = @request.defaults({
|
||||
headers:
|
||||
"x-csrf-token": @csrfToken
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
Settings = require('settings-sharelatex')
|
||||
redis = require('redis-sharelatex')
|
||||
logger = require("logger-sharelatex")
|
||||
Async = require('async')
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
BASE_URL = "http://localhost:3000"
|
||||
BASE_URL = "http://#{process.env["HTTP_TEST_HOST"] or "localhost"}:3000"
|
||||
module.exports = require("request").defaults({
|
||||
baseUrl: BASE_URL,
|
||||
followRedirect: false
|
||||
|
|
BIN
services/web/test/acceptance/files/1pixel.png
Normal file
After Width: | Height: | Size: 3.6 KiB |
BIN
services/web/test/acceptance/files/2pixel.png
Normal file
After Width: | Height: | Size: 3.6 KiB |
1
services/web/test/acceptance/files/test.tex
Normal file
|
@ -0,0 +1 @@
|
|||
Test
|
BIN
services/web/test/acceptance/files/test_project.zip
Normal file
|
@ -3,7 +3,7 @@
|
|||
# If you're running on OS X, you probably need to rebuild
|
||||
# some dependencies in the docker container, before it will start.
|
||||
#
|
||||
# npm rebuild --update-binary
|
||||
#npm rebuild --update-binary
|
||||
|
||||
echo ">> Starting server..."
|
||||
|
||||
|
|
|
@ -390,26 +390,13 @@ describe 'DocumentUpdaterHandler', ->
|
|||
describe "updateProjectStructure ", ->
|
||||
beforeEach ->
|
||||
@user_id = 1234
|
||||
@docIdA = new ObjectId()
|
||||
@docIdB = new ObjectId()
|
||||
@oldDocs = [
|
||||
{ path: '/old_a', doc: _id: @docIdA }
|
||||
{ path: '/old_b', doc: _id: @docIdB }
|
||||
]
|
||||
# create new instances of the same ObjectIds so that == doens't pass
|
||||
@newDocs = [
|
||||
{ path: '/old_a', doc: _id: new ObjectId(@docIdA.toString()) }
|
||||
{ path: '/new_b', doc: _id: new ObjectId(@docIdB.toString()) }
|
||||
]
|
||||
@oldFiles = []
|
||||
@newFiles = []
|
||||
|
||||
describe "with project history disabled", ->
|
||||
beforeEach ->
|
||||
@settings.apis.project_history.enabled = false
|
||||
@request.post = sinon.stub()
|
||||
|
||||
@handler.updateProjectStructure @project_id, @user_id, @oldDocs, @newDocs, @oldFiles, @newFiles, @callback
|
||||
@handler.updateProjectStructure @project_id, @user_id, {}, @callback
|
||||
|
||||
it 'does not make a web request', ->
|
||||
@request.post.called.should.equal false
|
||||
|
@ -420,20 +407,85 @@ describe 'DocumentUpdaterHandler', ->
|
|||
describe "with project history enabled", ->
|
||||
beforeEach ->
|
||||
@settings.apis.project_history.enabled = true
|
||||
@url = "#{@settings.apis.documentupdater.url}/project/#{@project_id}"
|
||||
@request.post = sinon.stub().callsArgWith(1, null, {statusCode: 204}, "")
|
||||
@handler.updateProjectStructure @project_id, @user_id, @oldDocs, @newDocs, @oldFiles, @newFiles, @callback
|
||||
|
||||
it 'should send the structure update to the document updater', ->
|
||||
docUpdates = [
|
||||
id: @docIdB,
|
||||
pathname: "/old_b"
|
||||
newPathname: "/new_b"
|
||||
]
|
||||
describe "when an entity has changed name", ->
|
||||
it 'should send the structure update to the document updater', (done) ->
|
||||
@docIdA = new ObjectId()
|
||||
@docIdB = new ObjectId()
|
||||
@changes = {
|
||||
oldDocs: [
|
||||
{ path: '/old_a', doc: _id: @docIdA }
|
||||
{ path: '/old_b', doc: _id: @docIdB }
|
||||
]
|
||||
# create new instances of the same ObjectIds so that == doesn't pass
|
||||
newDocs: [
|
||||
{ path: '/old_a', doc: _id: new ObjectId(@docIdA.toString()) }
|
||||
{ path: '/new_b', doc: _id: new ObjectId(@docIdB.toString()) }
|
||||
]
|
||||
}
|
||||
|
||||
url = "#{@settings.apis.documentupdater.url}/project/#{@project_id}"
|
||||
@request.post
|
||||
.calledWith(url: url, json: {docUpdates, fileUpdates: [], userId: @user_id})
|
||||
.should.equal true
|
||||
docUpdates = [
|
||||
id: @docIdB.toString(),
|
||||
pathname: "/old_b"
|
||||
newPathname: "/new_b"
|
||||
]
|
||||
|
||||
@handler.updateProjectStructure @project_id, @user_id, @changes, () =>
|
||||
@request.post
|
||||
.calledWith(url: @url, json: {docUpdates, fileUpdates: [], userId: @user_id})
|
||||
.should.equal true
|
||||
done()
|
||||
|
||||
describe "when a doc has been added", ->
|
||||
it 'should send the structure update to the document updater', (done) ->
|
||||
@docId = new ObjectId()
|
||||
@changes = newDocs: [
|
||||
{ path: '/foo', docLines: 'a\nb', doc: _id: @docId }
|
||||
]
|
||||
|
||||
docUpdates = [
|
||||
id: @docId.toString(),
|
||||
pathname: "/foo"
|
||||
docLines: 'a\nb'
|
||||
url: undefined
|
||||
]
|
||||
|
||||
@handler.updateProjectStructure @project_id, @user_id, @changes, () =>
|
||||
@request.post
|
||||
.calledWith(url: @url, json: {docUpdates, fileUpdates: [], userId: @user_id})
|
||||
.should.equal true
|
||||
done()
|
||||
|
||||
describe "when a file has been added", ->
|
||||
it 'should send the structure update to the document updater', (done) ->
|
||||
@fileId = new ObjectId()
|
||||
@changes = newFiles: [
|
||||
{ path: '/bar', url: 'filestore.example.com/file', file: _id: @fileId }
|
||||
]
|
||||
|
||||
fileUpdates = [
|
||||
id: @fileId.toString(),
|
||||
pathname: "/bar"
|
||||
url: 'filestore.example.com/file'
|
||||
docLines: undefined
|
||||
]
|
||||
|
||||
@handler.updateProjectStructure @project_id, @user_id, @changes, () =>
|
||||
@request.post
|
||||
.calledWith(url: @url, json: {docUpdates: [], fileUpdates, userId: @user_id})
|
||||
.should.equal true
|
||||
done()
|
||||
|
||||
describe "when a doc has been deleted", ->
|
||||
it 'should do nothing', (done) ->
|
||||
@docId = new ObjectId()
|
||||
@changes = oldDocs: [
|
||||
{ path: '/foo', docLines: 'a\nb', doc: _id: @docId }
|
||||
]
|
||||
|
||||
@handler.updateProjectStructure @project_id, @user_id, @changes, () =>
|
||||
@request.post.called.should.equal false
|
||||
done()
|
||||
|
||||
it "should call the callback with no error", ->
|
||||
@callback.calledWith(null).should.equal true
|
||||
|
|
|
@ -131,24 +131,24 @@ describe "EditorController", ->
|
|||
@docLines = ["1234","dskl"]
|
||||
|
||||
it 'should add the doc using the project entity handler', (done)->
|
||||
mock = sinon.mock(@ProjectEntityHandler).expects("addDoc").withArgs(@project_id, @folder_id, @docName, @docLines).callsArg(4)
|
||||
mock = sinon.mock(@ProjectEntityHandler).expects("addDoc").withArgs(@project_id, @folder_id, @docName, @docLines).callsArg(5)
|
||||
|
||||
@EditorController.addDocWithoutLock @project_id, @folder_id, @docName, @docLines, @source, ->
|
||||
@EditorController.addDocWithoutLock @project_id, @folder_id, @docName, @docLines, @source, @user_id, ->
|
||||
mock.verify()
|
||||
done()
|
||||
|
||||
it 'should send the update out to the users in the project', (done)->
|
||||
@ProjectEntityHandler.addDoc = sinon.stub().callsArgWith(4, null, @doc, @folder_id)
|
||||
@ProjectEntityHandler.addDoc = sinon.stub().callsArgWith(5, null, @doc, @folder_id)
|
||||
|
||||
@EditorController.addDocWithoutLock @project_id, @folder_id, @docName, @docLines, @source, =>
|
||||
@EditorController.addDocWithoutLock @project_id, @folder_id, @docName, @docLines, @source, @user_id, =>
|
||||
@EditorRealTimeController.emitToRoom
|
||||
.calledWith(@project_id, "reciveNewDoc", @folder_id, @doc, @source)
|
||||
.should.equal true
|
||||
done()
|
||||
|
||||
it 'should return the doc to the callback', (done) ->
|
||||
@ProjectEntityHandler.addDoc = sinon.stub().callsArgWith(4, null, @doc, @folder_id)
|
||||
@EditorController.addDocWithoutLock @project_id, @folder_id, @docName, @docLines, @source, (error, doc) =>
|
||||
@ProjectEntityHandler.addDoc = sinon.stub().callsArgWith(5, null, @doc, @folder_id)
|
||||
@EditorController.addDocWithoutLock @project_id, @folder_id, @docName, @docLines, @source, @user_id, (error, doc) =>
|
||||
doc.should.equal @doc
|
||||
done()
|
||||
|
||||
|
@ -157,32 +157,29 @@ describe "EditorController", ->
|
|||
beforeEach ->
|
||||
@LockManager.getLock.callsArgWith(1)
|
||||
@LockManager.releaseLock.callsArgWith(1)
|
||||
@EditorController.addDocWithoutLock = sinon.stub().callsArgWith(5)
|
||||
@EditorController.addDocWithoutLock = sinon.stub().callsArgWith(6)
|
||||
|
||||
it "should call addDocWithoutLock", (done)->
|
||||
@EditorController.addDoc @project_id, @folder_id, @docName, @docLines, @source, =>
|
||||
@EditorController.addDocWithoutLock.calledWith(@project_id, @folder_id, @docName, @docLines, @source).should.equal true
|
||||
@EditorController.addDoc @project_id, @folder_id, @docName, @docLines, @source, @user_id, =>
|
||||
@EditorController.addDocWithoutLock.calledWith(@project_id, @folder_id, @docName, @docLines, @source, @user_id).should.equal true
|
||||
done()
|
||||
|
||||
it "should take the lock", (done)->
|
||||
@EditorController.addDoc @project_id, @folder_id, @docName, @docLines, @source, =>
|
||||
@EditorController.addDoc @project_id, @folder_id, @docName, @docLines, @source, @user_id, =>
|
||||
@LockManager.getLock.calledWith(@project_id).should.equal true
|
||||
done()
|
||||
|
||||
it "should release the lock", (done)->
|
||||
@EditorController.addDoc @project_id, @folder_id, @docName, @docLines, @source, =>
|
||||
@EditorController.addDoc @project_id, @folder_id, @docName, @docLines, @source, @user_id, =>
|
||||
@LockManager.releaseLock.calledWith(@project_id).should.equal true
|
||||
done()
|
||||
|
||||
it "should error if it can't cat the lock", (done)->
|
||||
@LockManager.getLock = sinon.stub().callsArgWith(1, "timed out")
|
||||
@EditorController.addDoc @project_id, @folder_id, @docName, @docLines, @source, (err)=>
|
||||
@EditorController.addDoc @project_id, @folder_id, @docName, @docLines, @source, @user_id, (err)=>
|
||||
expect(err).to.exist
|
||||
err.should.equal "timed out"
|
||||
done()
|
||||
|
||||
|
||||
|
||||
done()
|
||||
|
||||
describe 'addFileWithoutLock:', ->
|
||||
beforeEach ->
|
||||
|
@ -196,23 +193,23 @@ describe "EditorController", ->
|
|||
@stream = new ArrayBuffer()
|
||||
|
||||
it 'should add the folder using the project entity handler', (done)->
|
||||
@ProjectEntityHandler.addFile = sinon.stub().callsArgWith(4)
|
||||
@EditorController.addFileWithoutLock @project_id, @folder_id, @fileName, @stream, @source, =>
|
||||
@ProjectEntityHandler.addFile.calledWith(@project_id, @folder_id).should.equal true
|
||||
@ProjectEntityHandler.addFile = sinon.stub().callsArgWith(5)
|
||||
@EditorController.addFileWithoutLock @project_id, @folder_id, @fileName, @stream, @source, @user_id, =>
|
||||
@ProjectEntityHandler.addFile.calledWith(@project_id, @folder_id, @fileName, @stream, @user_id).should.equal true
|
||||
done()
|
||||
|
||||
it 'should send the update of a new folder out to the users in the project', (done)->
|
||||
@ProjectEntityHandler.addFile = sinon.stub().callsArgWith(4, null, @file, @folder_id)
|
||||
@ProjectEntityHandler.addFile = sinon.stub().callsArgWith(5, null, @file, @folder_id)
|
||||
|
||||
@EditorController.addFileWithoutLock @project_id, @folder_id, @fileName, @stream, @source, =>
|
||||
@EditorController.addFileWithoutLock @project_id, @folder_id, @fileName, @stream, @source, @user_id, =>
|
||||
@EditorRealTimeController.emitToRoom
|
||||
.calledWith(@project_id, "reciveNewFile", @folder_id, @file, @source)
|
||||
.should.equal true
|
||||
done()
|
||||
|
||||
it "should return the file in the callback", (done) ->
|
||||
@ProjectEntityHandler.addFile = sinon.stub().callsArgWith(4, null, @file, @folder_id)
|
||||
@EditorController.addFileWithoutLock @project_id, @folder_id, @fileName, @stream, @source, (error, file) =>
|
||||
@ProjectEntityHandler.addFile = sinon.stub().callsArgWith(5, null, @file, @folder_id)
|
||||
@EditorController.addFileWithoutLock @project_id, @folder_id, @fileName, @stream, @source, @user_id, (error, file) =>
|
||||
file.should.equal @file
|
||||
done()
|
||||
|
||||
|
@ -222,28 +219,28 @@ describe "EditorController", ->
|
|||
beforeEach ->
|
||||
@LockManager.getLock.callsArgWith(1)
|
||||
@LockManager.releaseLock.callsArgWith(1)
|
||||
@EditorController.addFileWithoutLock = sinon.stub().callsArgWith(5)
|
||||
@EditorController.addFileWithoutLock = sinon.stub().callsArgWith(6)
|
||||
|
||||
it "should call addFileWithoutLock", (done)->
|
||||
@EditorController.addFile @project_id, @folder_id, @fileName, @stream, @source, (error, file) =>
|
||||
@EditorController.addFileWithoutLock.calledWith(@project_id, @folder_id, @fileName, @stream, @source).should.equal true
|
||||
@EditorController.addFile @project_id, @folder_id, @fileName, @stream, @source, @user_id, (error, file) =>
|
||||
@EditorController.addFileWithoutLock.calledWith(@project_id, @folder_id, @fileName, @stream, @source, @user_id).should.equal true
|
||||
done()
|
||||
|
||||
it "should take the lock", (done)->
|
||||
@EditorController.addFile @project_id, @folder_id, @fileName, @stream, @source, (error, file) =>
|
||||
@EditorController.addFile @project_id, @folder_id, @fileName, @stream, @source, @user_id, (error, file) =>
|
||||
@LockManager.getLock.calledWith(@project_id).should.equal true
|
||||
done()
|
||||
|
||||
it "should release the lock", (done)->
|
||||
@EditorController.addFile @project_id, @folder_id, @fileName, @stream, @source, (error, file) =>
|
||||
@EditorController.addFile @project_id, @folder_id, @fileName, @stream, @source, @user_id, (error, file) =>
|
||||
@LockManager.releaseLock.calledWith(@project_id).should.equal true
|
||||
done()
|
||||
|
||||
it "should error if it can't cat the lock", (done)->
|
||||
@LockManager.getLock = sinon.stub().callsArgWith(1, "timed out")
|
||||
@EditorController.addFile @project_id, @folder_id, @fileName, @stream, @source, (err, file) =>
|
||||
expect(err).to.exist
|
||||
err.should.equal "timed out"
|
||||
@EditorController.addFile @project_id, @folder_id, @fileName, @stream, @source, @user_id, (error, file) =>
|
||||
expect(error).to.exist
|
||||
error.should.equal "timed out"
|
||||
done()
|
||||
|
||||
|
||||
|
@ -256,9 +253,9 @@ describe "EditorController", ->
|
|||
@fsPath = "/folder/file.png"
|
||||
|
||||
it 'should send the replace file message to the editor controller', (done)->
|
||||
@ProjectEntityHandler.replaceFile = sinon.stub().callsArgWith(3)
|
||||
@EditorController.replaceFile @project_id, @file_id, @fsPath, @source, =>
|
||||
@ProjectEntityHandler.replaceFile.calledWith(@project_id, @file_id, @fsPath).should.equal true
|
||||
@ProjectEntityHandler.replaceFile = sinon.stub().callsArgWith(4)
|
||||
@EditorController.replaceFile @project_id, @file_id, @fsPath, @source, @user_id, =>
|
||||
@ProjectEntityHandler.replaceFile.calledWith(@project_id, @file_id, @fsPath, @user_id).should.equal true
|
||||
done()
|
||||
|
||||
describe 'addFolderWithoutLock :', ->
|
||||
|
|
|
@ -201,7 +201,7 @@ describe "EditorHttpController", ->
|
|||
@req.body =
|
||||
name: @name = "doc-name"
|
||||
parent_folder_id: @parent_folder_id
|
||||
@EditorController.addDoc = sinon.stub().callsArgWith(5, null, @doc)
|
||||
@EditorController.addDoc = sinon.stub().callsArgWith(6, null, @doc)
|
||||
|
||||
describe "successfully", ->
|
||||
beforeEach ->
|
||||
|
@ -209,7 +209,7 @@ describe "EditorHttpController", ->
|
|||
|
||||
it "should call EditorController.addDoc", ->
|
||||
@EditorController.addDoc
|
||||
.calledWith(@project_id, @parent_folder_id, @name, [], "editor")
|
||||
.calledWith(@project_id, @parent_folder_id, @name, [], "editor", @userId)
|
||||
.should.equal true
|
||||
|
||||
it "should send the doc back as JSON", ->
|
||||
|
|
|
@ -16,7 +16,7 @@ describe "FileStoreHandler", ->
|
|||
})
|
||||
@writeStream =
|
||||
my:"writeStream"
|
||||
on: (type, cb)->
|
||||
on: (type, cb)->
|
||||
if type == "response"
|
||||
cb({statusCode: 200})
|
||||
@readStream = {my:"readStream", on: sinon.stub()}
|
||||
|
@ -38,7 +38,7 @@ describe "FileStoreHandler", ->
|
|||
@isSafeOnFileSystem = true
|
||||
|
||||
it "should create read stream", (done)->
|
||||
@fs.createReadStream.returns
|
||||
@fs.createReadStream.returns
|
||||
pipe:->
|
||||
on: (type, cb)->
|
||||
if type == "open"
|
||||
|
@ -49,8 +49,8 @@ describe "FileStoreHandler", ->
|
|||
|
||||
it "should pipe the read stream to request", (done)->
|
||||
@request.returns(@writeStream)
|
||||
@fs.createReadStream.returns
|
||||
on: (type, cb)->
|
||||
@fs.createReadStream.returns
|
||||
on: (type, cb)->
|
||||
if type == "open"
|
||||
cb()
|
||||
pipe:(o)=>
|
||||
|
@ -59,9 +59,9 @@ describe "FileStoreHandler", ->
|
|||
@handler.uploadFileFromDisk @project_id, @file_id, @fsPath, =>
|
||||
|
||||
it "should pass the correct options to request", (done)->
|
||||
@fs.createReadStream.returns
|
||||
@fs.createReadStream.returns
|
||||
pipe:->
|
||||
on: (type, cb)->
|
||||
on: (type, cb)->
|
||||
if type == "open"
|
||||
cb()
|
||||
@handler.uploadFileFromDisk @project_id, @file_id, @fsPath, =>
|
||||
|
@ -70,23 +70,24 @@ describe "FileStoreHandler", ->
|
|||
done()
|
||||
|
||||
it "builds the correct url", (done)->
|
||||
@fs.createReadStream.returns
|
||||
@fs.createReadStream.returns
|
||||
pipe:->
|
||||
on: (type, cb)->
|
||||
on: (type, cb)->
|
||||
if type == "open"
|
||||
cb()
|
||||
@handler.uploadFileFromDisk @project_id, @file_id, @fsPath, =>
|
||||
@handler._buildUrl.calledWith(@project_id, @file_id).should.equal true
|
||||
done()
|
||||
|
||||
it 'should callback with null', (done) ->
|
||||
it 'should callback with the url', (done) ->
|
||||
@fs.createReadStream.returns
|
||||
pipe:->
|
||||
on: (type, cb)->
|
||||
if type == "open"
|
||||
cb()
|
||||
@handler.uploadFileFromDisk @project_id, @file_id, @fsPath, (err) =>
|
||||
@handler.uploadFileFromDisk @project_id, @file_id, @fsPath, (err, url) =>
|
||||
expect(err).to.not.exist
|
||||
expect(url).to.equal(@handler._buildUrl())
|
||||
done()
|
||||
|
||||
describe "symlink", ->
|
||||
|
@ -218,6 +219,11 @@ describe "FileStoreHandler", ->
|
|||
@handler._buildUrl.calledWith(@newProject_id, @newFile_id).should.equal true
|
||||
done()
|
||||
|
||||
it "returns the url", (done)->
|
||||
@request.callsArgWith(1, null)
|
||||
@handler.copyFile @project_id, @file_id, @newProject_id, @newFile_id, (err, url) =>
|
||||
url.should.equal "http://filestore.stubbedBuilder.com"
|
||||
done()
|
||||
|
||||
it "should return the err", (done)->
|
||||
error = "errrror"
|
||||
|
|
|
@ -33,8 +33,8 @@ describe 'ProjectCreationHandler', ->
|
|||
constructor:(options)->
|
||||
{@name} = options
|
||||
@ProjectEntityHandler =
|
||||
addDoc: sinon.stub().callsArgWith(4, null, {_id: docId})
|
||||
addFile: sinon.stub().callsArg(4)
|
||||
addDoc: sinon.stub().callsArgWith(5, null, {_id: docId})
|
||||
addFile: sinon.stub().callsArg(5)
|
||||
setRootDoc: sinon.stub().callsArg(2)
|
||||
@ProjectDetailsHandler =
|
||||
validateProjectName: sinon.stub().yields()
|
||||
|
@ -149,7 +149,7 @@ describe 'ProjectCreationHandler', ->
|
|||
.should.equal true
|
||||
|
||||
it 'should insert main.tex', ->
|
||||
@ProjectEntityHandler.addDoc.calledWith(project_id, rootFolderId, "main.tex", ["mainbasic.tex", "lines"])
|
||||
@ProjectEntityHandler.addDoc.calledWith(project_id, rootFolderId, "main.tex", ["mainbasic.tex", "lines"], ownerId)
|
||||
.should.equal true
|
||||
|
||||
it 'should set the main doc id', ->
|
||||
|
@ -180,19 +180,20 @@ describe 'ProjectCreationHandler', ->
|
|||
|
||||
it 'should insert main.tex', ->
|
||||
@ProjectEntityHandler.addDoc
|
||||
.calledWith(project_id, rootFolderId, "main.tex", ["main.tex", "lines"])
|
||||
.calledWith(project_id, rootFolderId, "main.tex", ["main.tex", "lines"], ownerId)
|
||||
.should.equal true
|
||||
|
||||
it 'should insert references.bib', ->
|
||||
@ProjectEntityHandler.addDoc
|
||||
.calledWith(project_id, rootFolderId, "references.bib", ["references.bib", "lines"])
|
||||
.calledWith(project_id, rootFolderId, "references.bib", ["references.bib", "lines"], ownerId)
|
||||
.should.equal true
|
||||
|
||||
it 'should insert universe.jpg', ->
|
||||
@ProjectEntityHandler.addFile
|
||||
.calledWith(
|
||||
project_id, rootFolderId, "universe.jpg",
|
||||
Path.resolve(__dirname + "/../../../../app/templates/project_files/universe.jpg")
|
||||
Path.resolve(__dirname + "/../../../../app/templates/project_files/universe.jpg"),
|
||||
ownerId
|
||||
)
|
||||
.should.equal true
|
||||
|
||||
|
|
|
@ -64,8 +64,8 @@ describe 'ProjectDuplicator', ->
|
|||
@projectOptionsHandler =
|
||||
setCompiler : sinon.stub()
|
||||
@entityHandler =
|
||||
addDocWithProject: sinon.stub().callsArgWith(4, null, {name:"somDoc"})
|
||||
copyFileFromExistingProjectWithProject: sinon.stub().callsArgWith(4)
|
||||
addDocWithProject: sinon.stub().callsArgWith(5, null, {name:"somDoc"})
|
||||
copyFileFromExistingProjectWithProject: sinon.stub().callsArgWith(5)
|
||||
setRootDoc: sinon.stub()
|
||||
addFolderWithProject: sinon.stub().callsArgWith(3, null, @newFolder)
|
||||
|
||||
|
@ -112,13 +112,13 @@ describe 'ProjectDuplicator', ->
|
|||
done()
|
||||
|
||||
it 'should use the same compiler', (done)->
|
||||
@entityHandler.addDocWithProject.callsArgWith(4, null, @rootFolder.docs[0])
|
||||
@entityHandler.addDocWithProject.callsArgWith(5, null, @rootFolder.docs[0], @owner._id)
|
||||
@duplicator.duplicate @owner, @old_project_id, "", (err, newProject)=>
|
||||
@projectOptionsHandler.setCompiler.calledWith(@stubbedNewProject._id, @project.compiler).should.equal true
|
||||
done()
|
||||
|
||||
it 'should use the same root doc', (done)->
|
||||
@entityHandler.addDocWithProject.callsArgWith(4, null, @rootFolder.docs[0])
|
||||
@entityHandler.addDocWithProject.callsArgWith(5, null, @rootFolder.docs[0], @owner._id)
|
||||
@duplicator.duplicate @owner, @old_project_id, "", (err, newProject)=>
|
||||
@entityHandler.setRootDoc.calledWith(@stubbedNewProject._id, @rootFolder.docs[0]._id).should.equal true
|
||||
done()
|
||||
|
@ -139,14 +139,26 @@ describe 'ProjectDuplicator', ->
|
|||
it 'should copy all the docs', (done)->
|
||||
@duplicator.duplicate @owner, @old_project_id, "", (err, newProject)=>
|
||||
@DocstoreManager.getAllDocs.calledWith(@old_project_id).should.equal true
|
||||
@entityHandler.addDocWithProject.calledWith(@stubbedNewProject, @stubbedNewProject.rootFolder[0]._id, @doc0.name, @doc0_lines).should.equal true
|
||||
@entityHandler.addDocWithProject.calledWith(@stubbedNewProject, @newFolder._id, @doc1.name, @doc1_lines).should.equal true
|
||||
@entityHandler.addDocWithProject.calledWith(@stubbedNewProject, @newFolder._id, @doc2.name, @doc2_lines).should.equal true
|
||||
@entityHandler.addDocWithProject
|
||||
.calledWith(@stubbedNewProject, @stubbedNewProject.rootFolder[0]._id, @doc0.name, @doc0_lines, @owner._id)
|
||||
.should.equal true
|
||||
@entityHandler.addDocWithProject
|
||||
.calledWith(@stubbedNewProject, @newFolder._id, @doc1.name, @doc1_lines, @owner._id)
|
||||
.should.equal true
|
||||
@entityHandler.addDocWithProject
|
||||
.calledWith(@stubbedNewProject, @newFolder._id, @doc2.name, @doc2_lines, @owner._id)
|
||||
.should.equal true
|
||||
done()
|
||||
|
||||
it 'should copy all the files', (done)->
|
||||
@duplicator.duplicate @owner, @old_project_id, "", (err, newProject)=>
|
||||
@entityHandler.copyFileFromExistingProjectWithProject.calledWith(@stubbedNewProject, @stubbedNewProject.rootFolder[0]._id, @project._id, @rootFolder.fileRefs[0]).should.equal true
|
||||
@entityHandler.copyFileFromExistingProjectWithProject.calledWith(@stubbedNewProject, @newFolder._id, @project._id, @level1folder.fileRefs[0]).should.equal true
|
||||
@entityHandler.copyFileFromExistingProjectWithProject.calledWith(@stubbedNewProject, @newFolder._id, @project._id, @level2folder.fileRefs[0]).should.equal true
|
||||
@entityHandler.copyFileFromExistingProjectWithProject
|
||||
.calledWith(@stubbedNewProject, @stubbedNewProject.rootFolder[0]._id, @project._id, @rootFolder.fileRefs[0], @owner._id)
|
||||
.should.equal true
|
||||
@entityHandler.copyFileFromExistingProjectWithProject
|
||||
.calledWith(@stubbedNewProject, @newFolder._id, @project._id, @level1folder.fileRefs[0], @owner._id)
|
||||
.should.equal true
|
||||
@entityHandler.copyFileFromExistingProjectWithProject
|
||||
.calledWith(@stubbedNewProject, @newFolder._id, @project._id, @level2folder.fileRefs[0], @owner._id)
|
||||
.should.equal true
|
||||
done()
|
||||
|
|
|
@ -17,9 +17,10 @@ describe 'ProjectEntityHandler', ->
|
|||
userId = 1234
|
||||
|
||||
beforeEach ->
|
||||
@fileUrl = 'filestore.example.com/file'
|
||||
@FileStoreHandler =
|
||||
uploadFileFromDisk:(project_id, fileRef, localImagePath, callback)->callback()
|
||||
copyFile: sinon.stub().callsArgWith(4, null)
|
||||
uploadFileFromDisk: sinon.stub().callsArgWith(3, null, @fileUrl)
|
||||
copyFile: sinon.stub().callsArgWith(4, null, @fileUrl)
|
||||
@tpdsUpdateSender =
|
||||
addDoc:sinon.stub().callsArg(1)
|
||||
addFile:sinon.stub().callsArg(1)
|
||||
|
@ -67,6 +68,9 @@ describe 'ProjectEntityHandler', ->
|
|||
findElement : sinon.stub()
|
||||
@settings =
|
||||
maxEntitiesPerProject:200
|
||||
@documentUpdaterHandler =
|
||||
updateProjectStructure: sinon.stub().yields()
|
||||
deleteDoc: sinon.stub().callsArg(2)
|
||||
@ProjectEntityHandler = SandboxedModule.require modulePath, requires:
|
||||
'../../models/Project': Project:@ProjectModel
|
||||
'../../models/Doc': Doc:@DocModel
|
||||
|
@ -75,7 +79,7 @@ describe 'ProjectEntityHandler', ->
|
|||
'../FileStore/FileStoreHandler':@FileStoreHandler
|
||||
'../ThirdPartyDataStore/TpdsUpdateSender':@tpdsUpdateSender
|
||||
'./ProjectLocator': @projectLocator
|
||||
'../../Features/DocumentUpdater/DocumentUpdaterHandler':@documentUpdaterHandler = {}
|
||||
'../../Features/DocumentUpdater/DocumentUpdaterHandler':@documentUpdaterHandler
|
||||
'../Docstore/DocstoreManager': @DocstoreManager = {}
|
||||
'logger-sharelatex': @logger = {log:sinon.stub(), error: sinon.stub(), err:->}
|
||||
'./ProjectUpdateHandler': @projectUpdater
|
||||
|
@ -184,7 +188,6 @@ describe 'ProjectEntityHandler', ->
|
|||
describe "_cleanUpEntity", ->
|
||||
beforeEach ->
|
||||
@entity_id = "4eecaffcbffa66588e000009"
|
||||
@documentUpdaterHandler.deleteDoc = sinon.stub().callsArg(2)
|
||||
@FileStoreHandler.deleteFile = sinon.stub().callsArg(2)
|
||||
@ProjectEntityHandler.unsetRootDoc = sinon.stub().callsArg(1)
|
||||
|
||||
|
@ -240,7 +243,6 @@ describe 'ProjectEntityHandler', ->
|
|||
@ProjectEntityHandler._putElement = sinon.stub().callsArgWith(4, null, path: @pathAfterMove)
|
||||
@ProjectGetter.getProject.callsArgWith(2, null, @project)
|
||||
@tpdsUpdateSender.moveEntity = sinon.stub()
|
||||
@documentUpdaterHandler.updateProjectStructure = sinon.stub().callsArg(6)
|
||||
@ProjectEntityHandler.getAllEntitiesFromProject = sinon.stub()
|
||||
@ProjectEntityHandler.getAllEntitiesFromProject
|
||||
.onFirstCall()
|
||||
|
@ -272,7 +274,7 @@ describe 'ProjectEntityHandler', ->
|
|||
|
||||
it "should should send the update to the doc updater", ->
|
||||
@documentUpdaterHandler.updateProjectStructure
|
||||
.calledWith(project_id, userId, @oldDocs, @newDocs, @oldFiles, @newFiles)
|
||||
.calledWith(project_id, userId, {@oldDocs, @newDocs, @oldFiles, @newFiles})
|
||||
.should.equal true
|
||||
|
||||
it 'should remove the element from its current position', ->
|
||||
|
@ -324,7 +326,7 @@ describe 'ProjectEntityHandler', ->
|
|||
|
||||
it "should should send the update to the doc updater", ->
|
||||
@documentUpdaterHandler.updateProjectStructure
|
||||
.calledWith(project_id, userId, @oldDocs, @newDocs, @oldFiles, @newFiles)
|
||||
.calledWith(project_id, userId, {@oldDocs, @newDocs, @oldFiles, @newFiles})
|
||||
.should.equal true
|
||||
|
||||
it 'should remove the element from its current position', ->
|
||||
|
@ -455,7 +457,7 @@ describe 'ProjectEntityHandler', ->
|
|||
@tpdsUpdateSender.addDoc = sinon.stub().callsArg(1)
|
||||
@DocstoreManager.updateDoc = sinon.stub().yields(null, true, 0)
|
||||
|
||||
@ProjectEntityHandler.addDoc project_id, folder_id, @name, @lines, @callback
|
||||
@ProjectEntityHandler.addDoc project_id, folder_id, @name, @lines, userId, @callback
|
||||
|
||||
# Created doc
|
||||
@doc = @ProjectEntityHandler._putElement.args[0][2]
|
||||
|
@ -484,6 +486,16 @@ describe 'ProjectEntityHandler', ->
|
|||
.calledWith(project_id, @doc._id.toString(), @lines)
|
||||
.should.equal true
|
||||
|
||||
it "should should send the change in project structure to the doc updater", () ->
|
||||
newDocs = [
|
||||
doc: @doc
|
||||
path: @path
|
||||
docLines: @lines.join('\n')
|
||||
]
|
||||
@documentUpdaterHandler.updateProjectStructure
|
||||
.calledWith(project_id, userId, {newDocs})
|
||||
.should.equal true
|
||||
|
||||
describe "restoreDoc", ->
|
||||
beforeEach ->
|
||||
@name = "doc-name"
|
||||
|
@ -512,7 +524,10 @@ describe 'ProjectEntityHandler', ->
|
|||
describe 'addFile', ->
|
||||
fileName = "something.jpg"
|
||||
beforeEach ->
|
||||
@fileSystemPath = "somehintg"
|
||||
@ProjectEntityHandler._putElement = sinon.stub().callsArgWith(4, null, {path:{fileSystem: @fileSystemPath}})
|
||||
@filePath = "somewhere"
|
||||
|
||||
it 'should upload it via the FileStoreHandler', (done)->
|
||||
@FileStoreHandler.uploadFileFromDisk = (passedProject_id, file_id, filePath, callback)=>
|
||||
file_id.should.equal "file_id"
|
||||
|
@ -520,7 +535,7 @@ describe 'ProjectEntityHandler', ->
|
|||
filePath.should.equal @filePath
|
||||
done()
|
||||
|
||||
@ProjectEntityHandler.addFile project_id, folder_id, fileName, @filePath, (err, fileRef, parentFolder)->
|
||||
@ProjectEntityHandler.addFile project_id, folder_id, fileName, @filePath, userId, (err, fileRef, parentFolder)->
|
||||
|
||||
it 'should put file into folder by calling put element', (done)->
|
||||
@ProjectEntityHandler._putElement = (passedProject, passedFolder_id, passedFileRef, passedType, callback)->
|
||||
|
@ -530,11 +545,10 @@ describe 'ProjectEntityHandler', ->
|
|||
passedType.should.equal 'file'
|
||||
done()
|
||||
|
||||
@ProjectEntityHandler.addFile project_id, folder_id, fileName, {}, (err, fileRef, parentFolder)->
|
||||
@ProjectEntityHandler.addFile project_id, folder_id, fileName, {}, userId, (err, fileRef, parentFolder)->
|
||||
|
||||
it 'should return doc and parent folder', (done)->
|
||||
@ProjectEntityHandler._putElement = sinon.stub().callsArgWith(4, null, {path:{fileSystem:"somehintg"}})
|
||||
@ProjectEntityHandler.addFile project_id, folder_id, fileName, {}, (err, fileRef, parentFolder)->
|
||||
@ProjectEntityHandler.addFile project_id, folder_id, fileName, {}, userId, (err, fileRef, parentFolder)->
|
||||
parentFolder.should.equal folder_id
|
||||
fileRef.name.should.equal fileName
|
||||
done()
|
||||
|
@ -554,33 +568,45 @@ describe 'ProjectEntityHandler', ->
|
|||
options.rev.should.equal 0
|
||||
done()
|
||||
|
||||
@ProjectEntityHandler.addFile project_id, folder_id, fileName, {}, (err, fileRef, parentFolder)->
|
||||
@ProjectEntityHandler.addFile project_id, folder_id, fileName, {}, userId, (err, fileRef, parentFolder)->
|
||||
|
||||
describe 'replacing a file', ->
|
||||
it "should should send the change in project structure to the doc updater", (done) ->
|
||||
@documentUpdaterHandler.updateProjectStructure = (passed_project_id, passed_user_id, changes) =>
|
||||
passed_project_id.should.equal project_id
|
||||
passed_user_id.should.equal userId
|
||||
{ newFiles } = changes
|
||||
newFiles.length.should.equal 1
|
||||
newFile = newFiles[0]
|
||||
newFile.file.name.should.equal fileName
|
||||
newFile.path.should.equal @fileSystemPath
|
||||
newFile.url.should.equal @fileUrl
|
||||
done()
|
||||
|
||||
@ProjectEntityHandler.addFile project_id, folder_id, fileName, {}, userId, () ->
|
||||
|
||||
describe 'replaceFile', ->
|
||||
beforeEach ->
|
||||
@projectLocator
|
||||
@file_id = "file_id_here"
|
||||
@fsPath = "fs_path_here.png"
|
||||
@fileRef = {rev:3, _id:@file_id}
|
||||
@filePaths = {fileSystem:"/folder1/file.png", mongo:"folder.1.files.somewhere"}
|
||||
@fileRef = {rev:3, _id: @file_id, name: @fileName = "fileName"}
|
||||
@filePaths = {fileSystem: @fileSystemPath="/folder1/file.png", mongo:"folder.1.files.somewhere"}
|
||||
@projectLocator.findElement = sinon.stub().callsArgWith(1, null, @fileRef, @filePaths)
|
||||
@ProjectModel.update = (_, __, ___, cb)-> cb()
|
||||
@ProjectModel.findOneAndUpdate = sinon.stub().callsArgWith(3)
|
||||
@ProjectGetter.getProject = sinon.stub().callsArgWith(2, null, @project)
|
||||
|
||||
it 'should find the file', (done)->
|
||||
|
||||
@ProjectEntityHandler.replaceFile project_id, @file_id, @fsPath, =>
|
||||
@projectLocator.findElement.calledWith({element_id:@file_id, type:"file", project_id:project_id}).should.equal true
|
||||
@ProjectEntityHandler.replaceFile project_id, @file_id, @fsPath, userId, =>
|
||||
@projectLocator.findElement
|
||||
.calledWith({element_id:@file_id, type:"file", project: @project})
|
||||
.should.equal true
|
||||
done()
|
||||
|
||||
it 'should tell the file store handler to upload the file from disk', (done)->
|
||||
@FileStoreHandler.uploadFileFromDisk = sinon.stub().callsArgWith(3)
|
||||
@ProjectEntityHandler.replaceFile project_id, @file_id, @fsPath, =>
|
||||
@ProjectEntityHandler.replaceFile project_id, @file_id, @fsPath, userId, =>
|
||||
@FileStoreHandler.uploadFileFromDisk.calledWith(project_id, @file_id, @fsPath).should.equal true
|
||||
done()
|
||||
|
||||
|
||||
it 'should send the file to the tpds with an incremented rev', (done)->
|
||||
@tpdsUpdateSender.addFile = (options)=>
|
||||
options.project_id.should.equal project_id
|
||||
|
@ -590,26 +616,39 @@ describe 'ProjectEntityHandler', ->
|
|||
options.rev.should.equal @fileRef.rev + 1
|
||||
done()
|
||||
|
||||
@ProjectEntityHandler.replaceFile project_id, @file_id, @fsPath, =>
|
||||
@ProjectEntityHandler.replaceFile project_id, @file_id, @fsPath, userId, =>
|
||||
|
||||
it 'should inc the rev id', (done)->
|
||||
@ProjectModel.update = (conditions, update, options, callback)=>
|
||||
@ProjectModel.findOneAndUpdate = (conditions, update, options, callback)=>
|
||||
conditions._id.should.equal project_id
|
||||
update.$inc["#{@filePaths.mongo}.rev"].should.equal 1
|
||||
done()
|
||||
|
||||
@ProjectEntityHandler.replaceFile project_id, @file_id, @fsPath, =>
|
||||
@ProjectEntityHandler.replaceFile project_id, @file_id, @fsPath, userId, =>
|
||||
|
||||
it 'should update the created at date', (done)->
|
||||
d = new Date()
|
||||
@ProjectModel.update = (conditions, update, options, callback)=>
|
||||
@ProjectModel.findOneAndUpdate = (conditions, update, options, callback)=>
|
||||
conditions._id.should.equal project_id
|
||||
differenceInMs = update.$set["#{@filePaths.mongo}.created"].getTime() - d.getTime()
|
||||
differenceInMs.should.be.below(20)
|
||||
done()
|
||||
|
||||
@ProjectEntityHandler.replaceFile project_id, @file_id, @fsPath, =>
|
||||
@ProjectEntityHandler.replaceFile project_id, @file_id, @fsPath, userId, =>
|
||||
|
||||
it "should should send the old and new project structure to the doc updater", (done) ->
|
||||
@documentUpdaterHandler.updateProjectStructure = (passed_project_id, passed_user_id, changes) =>
|
||||
passed_project_id.should.equal project_id
|
||||
passed_user_id.should.equal userId
|
||||
{ newFiles } = changes
|
||||
newFiles.length.should.equal 1
|
||||
newFile = newFiles[0]
|
||||
newFile.file.name.should.equal @fileName
|
||||
newFile.path.should.equal @fileSystemPath
|
||||
newFile.url.should.equal @fileUrl
|
||||
done()
|
||||
|
||||
@ProjectEntityHandler.replaceFile project_id, @file_id, @fsPath, userId, =>
|
||||
|
||||
describe 'addFolder', ->
|
||||
folderName = "folder1234"
|
||||
|
@ -943,19 +982,19 @@ describe 'ProjectEntityHandler', ->
|
|||
@ProjectModel.update.calledWith({_id : @project_id}, {$unset : {rootDoc_id: true}})
|
||||
.should.equal true
|
||||
|
||||
describe 'copyFileFromExistingProject', ->
|
||||
describe 'copyFileFromExistingProjectWithProject', ->
|
||||
fileName = "something.jpg"
|
||||
filePath = "dumpFolder/somewhere/image.jpeg"
|
||||
oldProject_id = "123kljadas"
|
||||
oldFileRef = {name:fileName, _id:"oldFileRef"}
|
||||
beforeEach ->
|
||||
@ProjectGetter.getProject = (project_id, fields, callback)=> callback(null, {name:@project.name, _id:@project._id})
|
||||
@ProjectEntityHandler._putElement = sinon.stub().callsArgWith(4, null, {path:{fileSystem:"somehintg"}})
|
||||
|
||||
beforeEach ->
|
||||
@fileSystemPath = "somehintg"
|
||||
@ProjectEntityHandler._putElement = sinon.stub().callsArgWith(4, null, {path:{fileSystem: @fileSystemPath}})
|
||||
|
||||
it 'should copy the file in FileStoreHandler', (done)->
|
||||
@ProjectEntityHandler._putElement = sinon.stub().callsArgWith(4, null, {path:{fileSystem:"somehintg"}})
|
||||
@ProjectEntityHandler.copyFileFromExistingProject project_id, folder_id, oldProject_id, oldFileRef, (err, fileRef, parentFolder)=>
|
||||
@ProjectEntityHandler.copyFileFromExistingProjectWithProject @project, folder_id, oldProject_id, oldFileRef, userId, (err, fileRef, parentFolder)=>
|
||||
@FileStoreHandler.copyFile.calledWith(oldProject_id, oldFileRef._id, project_id, fileRef._id).should.equal true
|
||||
done()
|
||||
|
||||
|
@ -967,10 +1006,10 @@ describe 'ProjectEntityHandler', ->
|
|||
passedType.should.equal 'file'
|
||||
done()
|
||||
|
||||
@ProjectEntityHandler.copyFileFromExistingProject project_id, folder_id, oldProject_id, oldFileRef, (err, fileRef, parentFolder)->
|
||||
@ProjectEntityHandler.copyFileFromExistingProjectWithProject @project, folder_id, oldProject_id, oldFileRef, userId, (err, fileRef, parentFolder)->
|
||||
|
||||
it 'should return doc and parent folder', (done)->
|
||||
@ProjectEntityHandler.copyFileFromExistingProject project_id, folder_id, oldProject_id, oldFileRef, (err, fileRef, parentFolder)->
|
||||
@ProjectEntityHandler.copyFileFromExistingProjectWithProject @project, folder_id, oldProject_id, oldFileRef, userId, (err, fileRef, parentFolder)->
|
||||
parentFolder.should.equal folder_id
|
||||
fileRef.name.should.equal fileName
|
||||
done()
|
||||
|
@ -990,8 +1029,21 @@ describe 'ProjectEntityHandler', ->
|
|||
options.rev.should.equal 0
|
||||
done()
|
||||
|
||||
@ProjectEntityHandler.copyFileFromExistingProject project_id, folder_id, oldProject_id, oldFileRef, (err, fileRef, parentFolder)->
|
||||
@ProjectEntityHandler.copyFileFromExistingProjectWithProject @project, folder_id, oldProject_id, oldFileRef, userId, (err, fileRef, parentFolder)->
|
||||
|
||||
it "should should send the change in project structure to the doc updater", (done) ->
|
||||
@documentUpdaterHandler.updateProjectStructure = (passed_project_id, passed_user_id, changes) =>
|
||||
passed_project_id.should.equal project_id
|
||||
passed_user_id.should.equal userId
|
||||
{ newFiles } = changes
|
||||
newFiles.length.should.equal 1
|
||||
newFile = newFiles[0]
|
||||
newFile.file.name.should.equal fileName
|
||||
newFile.path.should.equal @fileSystemPath
|
||||
newFile.url.should.equal @fileUrl
|
||||
done()
|
||||
|
||||
@ProjectEntityHandler.copyFileFromExistingProjectWithProject @project, folder_id, oldProject_id, oldFileRef, userId, (err, fileRef, parentFolder)->
|
||||
|
||||
describe "renameEntity", ->
|
||||
beforeEach ->
|
||||
|
@ -1012,12 +1064,12 @@ describe 'ProjectEntityHandler', ->
|
|||
@projectLocator.findElement = sinon.stub().callsArgWith(1, null, @entity = { _id: @entity_id, name:"old.tex", rev:4 }, @path)
|
||||
@tpdsUpdateSender.moveEntity = sinon.stub()
|
||||
@ProjectModel.findOneAndUpdate = sinon.stub().callsArgWith(3, null, @project)
|
||||
@documentUpdaterHandler.updateProjectStructure = sinon.stub().callsArg(6)
|
||||
@documentUpdaterHandler.updateProjectStructure = sinon.stub().yields()
|
||||
|
||||
it "should should send the old and new project structure to the doc updater", (done) ->
|
||||
@ProjectEntityHandler.renameEntity project_id, @entity_id, @entityType, @newName, userId, =>
|
||||
@documentUpdaterHandler.updateProjectStructure
|
||||
.calledWith(project_id, userId, @oldDocs, @newDocs, @oldFiles, @newFiles)
|
||||
.calledWith(project_id, userId, {@oldDocs, @newDocs, @oldFiles, @newFiles})
|
||||
.should.equal true
|
||||
done()
|
||||
|
||||
|
|
|
@ -63,11 +63,11 @@ describe 'UpdateMerger :', ->
|
|||
file_id = "1231"
|
||||
@projectLocator.findElementByPath = (_, __, cb)->cb(null, {_id:file_id})
|
||||
@FileTypeManager.isBinary.callsArgWith(2, null, true)
|
||||
@updateMerger.p.processFile = sinon.stub().callsArgWith(5)
|
||||
@updateMerger.p.processFile = sinon.stub().callsArgWith(6)
|
||||
filePath = "/folder/file1.png"
|
||||
|
||||
@updateMerger.mergeUpdate @user_id, @project_id, filePath, @update, @source, =>
|
||||
@updateMerger.p.processFile.calledWith(@project_id, file_id, @fsPath, filePath, @source).should.equal true
|
||||
@updateMerger.p.processFile.calledWith(@project_id, file_id, @fsPath, filePath, @source, @user_id).should.equal true
|
||||
@FileTypeManager.isBinary.calledWith(filePath, @fsPath).should.equal true
|
||||
@fs.unlink.calledWith(@fsPath).should.equal true
|
||||
done()
|
||||
|
@ -97,7 +97,7 @@ describe 'UpdateMerger :', ->
|
|||
path = "folder1/folder2/#{docName}"
|
||||
@editorController.mkdirp = sinon.stub().withArgs(@project_id).callsArgWith(2, null, [folder], folder)
|
||||
@editorController.addDoc = ->
|
||||
mock = sinon.mock(@editorController).expects("addDoc").withArgs(@project_id, folder._id, docName, @splitDocLines, @source).callsArg(5)
|
||||
mock = sinon.mock(@editorController).expects("addDoc").withArgs(@project_id, folder._id, docName, @splitDocLines, @source, @user_id).callsArg(6)
|
||||
|
||||
@update.write(@docLines)
|
||||
@update.end()
|
||||
|
@ -114,22 +114,22 @@ describe 'UpdateMerger :', ->
|
|||
@folder = _id: @folder_id
|
||||
@fileName = "file.png"
|
||||
@fsPath = "fs/path.tex"
|
||||
@editorController.addFile = sinon.stub().callsArg(5)
|
||||
@editorController.replaceFile = sinon.stub().callsArg(4)
|
||||
@editorController.addFile = sinon.stub().callsArg(6)
|
||||
@editorController.replaceFile = sinon.stub().callsArg(5)
|
||||
@editorController.deleteEntity = sinon.stub()
|
||||
@editorController.mkdirp = sinon.stub().withArgs(@project_id).callsArgWith(2, null, [@folder], @folder)
|
||||
@updateMerger.p.writeStreamToDisk = sinon.stub().withArgs(@project_id, @file_id, @update).callsArgWith(3, null, @fsPath)
|
||||
|
||||
it 'should replace file if the file already exists', (done)->
|
||||
@updateMerger.p.processFile @project_id, @file_id, @fsPath, @path, @source, =>
|
||||
@updateMerger.p.processFile @project_id, @file_id, @fsPath, @path, @source, @user_id, =>
|
||||
@editorController.addFile.called.should.equal false
|
||||
@editorController.replaceFile.calledWith(@project_id, @file_id, @fsPath, @source).should.equal true
|
||||
@editorController.replaceFile.calledWith(@project_id, @file_id, @fsPath, @source, @user_id).should.equal true
|
||||
done()
|
||||
|
||||
it 'should call add file if the file does not exist', (done)->
|
||||
@updateMerger.p.processFile @project_id, undefined, @fsPath, @path, @source, =>
|
||||
@updateMerger.p.processFile @project_id, undefined, @fsPath, @path, @source, @user_id, =>
|
||||
@editorController.mkdirp.calledWith(@project_id, "folder/").should.equal true
|
||||
@editorController.addFile.calledWith(@project_id, @folder_id, @fileName, @fsPath, @source).should.equal true
|
||||
@editorController.addFile.calledWith(@project_id, @folder_id, @fileName, @fsPath, @source, @user_id).should.equal true
|
||||
@editorController.replaceFile.called.should.equal false
|
||||
done()
|
||||
|
||||
|
|
|
@ -44,14 +44,14 @@ describe "FileSystemImportManager", ->
|
|||
|
||||
describe "with replace set to false", ->
|
||||
beforeEach ->
|
||||
@EditorController.addDocWithoutLock = sinon.stub().callsArg(5)
|
||||
@EditorController.addDocWithoutLock = sinon.stub().callsArg(6)
|
||||
@FileSystemImportManager.addDoc @user_id, @project_id, @folder_id, @name, @path_on_disk, false, @callback
|
||||
|
||||
it "should read the file from disk", ->
|
||||
@fs.readFile.calledWith(@path_on_disk, "utf8").should.equal true
|
||||
|
||||
it "should insert the doc", ->
|
||||
@EditorController.addDocWithoutLock.calledWith(@project_id, @folder_id, @name, @docLines, "upload")
|
||||
@EditorController.addDocWithoutLock.calledWith(@project_id, @folder_id, @name, @docLines, "upload", @user_id)
|
||||
.should.equal true
|
||||
|
||||
describe "with windows line ending", ->
|
||||
|
@ -59,11 +59,11 @@ describe "FileSystemImportManager", ->
|
|||
@docContent = "one\r\ntwo\r\nthree"
|
||||
@docLines = ["one", "two", "three"]
|
||||
@fs.readFile = sinon.stub().callsArgWith(2, null, @docContent)
|
||||
@EditorController.addDocWithoutLock = sinon.stub().callsArg(5)
|
||||
@EditorController.addDocWithoutLock = sinon.stub().callsArg(6)
|
||||
@FileSystemImportManager.addDoc @user_id, @project_id, @folder_id, @name, @path_on_disk, false, @callback
|
||||
|
||||
it "should strip the \\r characters before adding", ->
|
||||
@EditorController.addDocWithoutLock.calledWith(@project_id, @folder_id, @name, @docLines, "upload")
|
||||
@EditorController.addDocWithoutLock.calledWith(@project_id, @folder_id, @name, @docLines, "upload", @user_id)
|
||||
.should.equal true
|
||||
|
||||
describe "with replace set to true", ->
|
||||
|
@ -76,7 +76,7 @@ describe "FileSystemImportManager", ->
|
|||
}]
|
||||
}
|
||||
@ProjectLocator.findElement = sinon.stub().callsArgWith(1, null, @folder)
|
||||
@EditorController.addDocWithoutLock = sinon.stub().callsArg(5)
|
||||
@EditorController.addDocWithoutLock = sinon.stub().callsArg(6)
|
||||
@FileSystemImportManager.addDoc @user_id, @project_id, @folder_id, @name, @path_on_disk, true, @callback
|
||||
|
||||
it "should look up the folder", ->
|
||||
|
@ -85,7 +85,7 @@ describe "FileSystemImportManager", ->
|
|||
.should.equal true
|
||||
|
||||
it "should insert the doc", ->
|
||||
@EditorController.addDocWithoutLock.calledWith(@project_id, @folder_id, @name, @docLines, "upload")
|
||||
@EditorController.addDocWithoutLock.calledWith(@project_id, @folder_id, @name, @docLines, "upload", @user_id)
|
||||
.should.equal true
|
||||
|
||||
describe "when the doc does exist", ->
|
||||
|
@ -114,12 +114,12 @@ describe "FileSystemImportManager", ->
|
|||
|
||||
describe "addFile with replace set to false", ->
|
||||
beforeEach ->
|
||||
@EditorController.addFileWithoutLock = sinon.stub().callsArg(5)
|
||||
@EditorController.addFileWithoutLock = sinon.stub().callsArg(6)
|
||||
@FileSystemImportManager._isSafeOnFileSystem = sinon.stub().callsArgWith(1, null, true)
|
||||
@FileSystemImportManager.addFile @user_id, @project_id, @folder_id, @name, @path_on_disk, false, @callback
|
||||
|
||||
it "should add the file", ->
|
||||
@EditorController.addFileWithoutLock.calledWith(@project_id, @folder_id, @name, @path_on_disk, "upload")
|
||||
@EditorController.addFileWithoutLock.calledWith(@project_id, @folder_id, @name, @path_on_disk, "upload", @user_id)
|
||||
.should.equal true
|
||||
|
||||
describe "addFile with symlink", ->
|
||||
|
@ -144,7 +144,7 @@ describe "FileSystemImportManager", ->
|
|||
}
|
||||
@FileSystemImportManager._isSafeOnFileSystem = sinon.stub().callsArgWith(1, null, true)
|
||||
@ProjectLocator.findElement = sinon.stub().callsArgWith(1, null, @folder)
|
||||
@EditorController.addFileWithoutLock = sinon.stub().callsArg(5)
|
||||
@EditorController.addFileWithoutLock = sinon.stub().callsArg(6)
|
||||
@FileSystemImportManager.addFile @user_id, @project_id, @folder_id, @name, @path_on_disk, true, @callback
|
||||
|
||||
it "should look up the folder", ->
|
||||
|
@ -153,7 +153,7 @@ describe "FileSystemImportManager", ->
|
|||
.should.equal true
|
||||
|
||||
it "should add the file", ->
|
||||
@EditorController.addFileWithoutLock.calledWith(@project_id, @folder_id, @name, @path_on_disk, "upload")
|
||||
@EditorController.addFileWithoutLock.calledWith(@project_id, @folder_id, @name, @path_on_disk, "upload", @user_id)
|
||||
.should.equal true
|
||||
|
||||
describe "when the file does exist", ->
|
||||
|
@ -169,7 +169,7 @@ describe "FileSystemImportManager", ->
|
|||
}
|
||||
@FileSystemImportManager._isSafeOnFileSystem = sinon.stub().callsArgWith(1, null, true)
|
||||
@ProjectLocator.findElement = sinon.stub().callsArgWith(1, null, @folder)
|
||||
@EditorController.replaceFile = sinon.stub().callsArg(4)
|
||||
@EditorController.replaceFile = sinon.stub().callsArg(5)
|
||||
@FileSystemImportManager.addFile @user_id, @project_id, @folder_id, @name, @path_on_disk, true, @callback
|
||||
|
||||
it "should look up the folder", ->
|
||||
|
@ -178,7 +178,7 @@ describe "FileSystemImportManager", ->
|
|||
.should.equal true
|
||||
|
||||
it "should replace the file", ->
|
||||
@EditorController.replaceFile.calledWith(@project_id, @file_id, @path_on_disk, "upload")
|
||||
@EditorController.replaceFile.calledWith(@project_id, @file_id, @path_on_disk, "upload", @user_id)
|
||||
.should.equal true
|
||||
|
||||
describe "addFolder", ->
|
||||
|
|