mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-14 20:40:17 -05:00
Intial open source comment
This commit is contained in:
commit
8715690ce9
1694 changed files with 426896 additions and 0 deletions
69
services/web/.gitignore
vendored
Normal file
69
services/web/.gitignore
vendored
Normal file
|
@ -0,0 +1,69 @@
|
||||||
|
Compiled source #
|
||||||
|
###################
|
||||||
|
*.com
|
||||||
|
*.class
|
||||||
|
*.dll
|
||||||
|
*.exe
|
||||||
|
*.o
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Packages #
|
||||||
|
############
|
||||||
|
# it's better to unpack these files and commit the raw source
|
||||||
|
# git has its own built in compression methods
|
||||||
|
*.7z
|
||||||
|
*.dmg
|
||||||
|
*.gz
|
||||||
|
*.iso
|
||||||
|
*.jar
|
||||||
|
*.rar
|
||||||
|
*.tar
|
||||||
|
*.zip
|
||||||
|
|
||||||
|
# Logs and databases #
|
||||||
|
######################
|
||||||
|
*.log
|
||||||
|
*.sql
|
||||||
|
*.sqlite
|
||||||
|
|
||||||
|
# OS generated files #
|
||||||
|
######################
|
||||||
|
.DS_Store?
|
||||||
|
ehthumbs.db
|
||||||
|
Icon?
|
||||||
|
Thumbs.db
|
||||||
|
|
||||||
|
node_modules/*
|
||||||
|
data/*
|
||||||
|
|
||||||
|
app.js
|
||||||
|
app/js/*
|
||||||
|
test/UnitTests/js/*
|
||||||
|
test/smoke/js/*
|
||||||
|
cookies.txt
|
||||||
|
requestQueueWorker.js
|
||||||
|
TpdsWorker.js
|
||||||
|
BackgroundJobsWorker.js
|
||||||
|
|
||||||
|
public/js/history/versiondetail.js
|
||||||
|
!public/js/libs/*
|
||||||
|
public/js/*
|
||||||
|
!public/js/ace/*
|
||||||
|
!public/js/libs/*
|
||||||
|
public/js/editor.js
|
||||||
|
public/js/home.js
|
||||||
|
public/js/forms.js
|
||||||
|
public/js/gui.js
|
||||||
|
public/js/admin.js
|
||||||
|
public/js/history/*
|
||||||
|
public/stylesheets/mainStyle.css
|
||||||
|
public/stylesheets/plans.css
|
||||||
|
public/minjs/
|
||||||
|
|
||||||
|
public/js/main.js
|
||||||
|
Gemfile.lock
|
||||||
|
|
||||||
|
*.swp
|
||||||
|
.DS_Store
|
||||||
|
|
||||||
|
|
4
services/web/.npmignore
Normal file
4
services/web/.npmignore
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
node_modules
|
||||||
|
data
|
||||||
|
log
|
||||||
|
public/minjs
|
27
services/web/BackgroundJobsWorker.coffee
Normal file
27
services/web/BackgroundJobsWorker.coffee
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
settings = require('settings-sharelatex')
|
||||||
|
SubscriptionBackgroundTasks = require("./app/js/Features/Subscription/SubscriptionBackgroundTasks")
|
||||||
|
TpdsPollingBackgroundTasks = require("./app/js/Features/ThirdPartyDataStore/TpdsPollingBackgroundTasks")
|
||||||
|
AutomaticSnapshotManager = require("./app/js/Features/Versioning/AutomaticSnapshotManager")
|
||||||
|
|
||||||
|
time =
|
||||||
|
oneHour : 60 * 60 * 1000
|
||||||
|
fifteenMinutes : 15 * 60 * 1000
|
||||||
|
thirtySeconds : 30 * 1000
|
||||||
|
betweenThirtyAndFiveHundredSeconds: =>
|
||||||
|
random = Math.floor(Math.random() * 500) * 1000
|
||||||
|
if random < time.thirtySeconds
|
||||||
|
return time.betweenThirtyAndFiveHundredSeconds()
|
||||||
|
else
|
||||||
|
return random
|
||||||
|
|
||||||
|
runPeriodically = (funcToRun, periodLength)->
|
||||||
|
recursiveReference = ->
|
||||||
|
funcToRun ->
|
||||||
|
setTimeout recursiveReference, periodLength
|
||||||
|
setTimeout recursiveReference, 0
|
||||||
|
|
||||||
|
# TODO: Remove this one month after the ability to start free trials was removed
|
||||||
|
runPeriodically ((cb) -> SubscriptionBackgroundTasks.downgradeExpiredFreeTrials(cb)), time.oneHour
|
||||||
|
|
||||||
|
runPeriodically ((cb) -> TpdsPollingBackgroundTasks.pollUsersWithDropbox(cb)), time.fifteenMinutes
|
||||||
|
runPeriodically ((cb) -> AutomaticSnapshotManager.takeAutomaticSnapshots(cb)), time.thirtySeconds
|
197
services/web/Gruntfile.coffee
Normal file
197
services/web/Gruntfile.coffee
Normal file
|
@ -0,0 +1,197 @@
|
||||||
|
fs = require "fs"
|
||||||
|
|
||||||
|
module.exports = (grunt) ->
|
||||||
|
grunt.loadNpmTasks 'grunt-contrib-coffee'
|
||||||
|
grunt.loadNpmTasks 'grunt-contrib-less'
|
||||||
|
grunt.loadNpmTasks 'grunt-contrib-clean'
|
||||||
|
grunt.loadNpmTasks 'grunt-mocha-test'
|
||||||
|
grunt.loadNpmTasks 'grunt-available-tasks'
|
||||||
|
grunt.loadNpmTasks 'grunt-contrib-requirejs'
|
||||||
|
grunt.loadNpmTasks 'grunt-execute'
|
||||||
|
grunt.loadNpmTasks 'grunt-bunyan'
|
||||||
|
|
||||||
|
grunt.initConfig
|
||||||
|
execute:
|
||||||
|
app:
|
||||||
|
src: "app.js"
|
||||||
|
|
||||||
|
coffee:
|
||||||
|
app_dir:
|
||||||
|
expand: true,
|
||||||
|
flatten: false,
|
||||||
|
cwd: 'app/coffee',
|
||||||
|
src: ['**/*.coffee'],
|
||||||
|
dest: 'app/js/',
|
||||||
|
ext: '.js'
|
||||||
|
|
||||||
|
app:
|
||||||
|
src: 'app.coffee'
|
||||||
|
dest: 'app.js'
|
||||||
|
|
||||||
|
sharejs:
|
||||||
|
options:
|
||||||
|
join: true
|
||||||
|
files:
|
||||||
|
"public/js/libs/sharejs.js": [
|
||||||
|
"public/coffee/editor/ShareJSHeader.coffee"
|
||||||
|
"public/coffee/editor/sharejs/types/helpers.coffee"
|
||||||
|
"public/coffee/editor/sharejs/types/text.coffee"
|
||||||
|
"public/coffee/editor/sharejs/types/text-api.coffee"
|
||||||
|
"public/coffee/editor/sharejs/types/json.coffee"
|
||||||
|
"public/coffee/editor/sharejs/types/json-api.coffee"
|
||||||
|
"public/coffee/editor/sharejs/client/microevent.coffee"
|
||||||
|
"public/coffee/editor/sharejs/client/doc.coffee"
|
||||||
|
"public/coffee/editor/sharejs/client/ace.coffee"
|
||||||
|
]
|
||||||
|
|
||||||
|
client:
|
||||||
|
expand: true,
|
||||||
|
flatten: false,
|
||||||
|
cwd: 'public/coffee',
|
||||||
|
src: ['**/*.coffee'],
|
||||||
|
dest: 'public/js/',
|
||||||
|
ext: '.js'
|
||||||
|
|
||||||
|
smoke_tests:
|
||||||
|
expand: true,
|
||||||
|
flatten: false,
|
||||||
|
cwd: 'test/smoke/coffee',
|
||||||
|
src: ['**/*.coffee'],
|
||||||
|
dest: 'test/smoke/js/',
|
||||||
|
ext: '.js'
|
||||||
|
|
||||||
|
unit_tests:
|
||||||
|
expand: true,
|
||||||
|
flatten: false,
|
||||||
|
cwd: 'test/UnitTests/coffee',
|
||||||
|
src: ['**/*.coffee'],
|
||||||
|
dest: 'test/UnitTests/js/',
|
||||||
|
ext: '.js'
|
||||||
|
|
||||||
|
less:
|
||||||
|
app:
|
||||||
|
files:
|
||||||
|
"public/stylesheets/mainStyle.css": "public/stylesheets/mainStyle.less"
|
||||||
|
plans:
|
||||||
|
files:
|
||||||
|
"public/stylesheets/plans.css": "public/stylesheets/less/plans.less"
|
||||||
|
|
||||||
|
requirejs:
|
||||||
|
compile:
|
||||||
|
options:
|
||||||
|
appDir: "public/js"
|
||||||
|
baseUrl: "./"
|
||||||
|
dir: "public/minjs"
|
||||||
|
inlineText: false
|
||||||
|
preserveLicenseComments: false
|
||||||
|
paths:
|
||||||
|
"underscore": "libs/underscore"
|
||||||
|
"jquery": "libs/jquery"
|
||||||
|
shim:
|
||||||
|
"libs/backbone":
|
||||||
|
deps: ["libs/underscore"]
|
||||||
|
"libs/pdfListView/PdfListView":
|
||||||
|
deps: ["libs/pdf"]
|
||||||
|
"libs/pdf":
|
||||||
|
deps: ["libs/compatibility"]
|
||||||
|
|
||||||
|
skipDirOptimize: true
|
||||||
|
modules: [
|
||||||
|
{
|
||||||
|
name: "main",
|
||||||
|
exclude: ["jquery"]
|
||||||
|
}, {
|
||||||
|
name: "ide",
|
||||||
|
exclude: ["jquery"]
|
||||||
|
}, {
|
||||||
|
name: "home",
|
||||||
|
exclude: ["jquery"]
|
||||||
|
}, {
|
||||||
|
name: "list",
|
||||||
|
exclude: ["jquery"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
clean:
|
||||||
|
app: ["app/js"]
|
||||||
|
unit_tests: ["test/UnitTests/js"]
|
||||||
|
|
||||||
|
mochaTest:
|
||||||
|
unit:
|
||||||
|
src: ["test/UnitTests/js/#{grunt.option('feature') or '**'}/*.js"]
|
||||||
|
options:
|
||||||
|
reporter: grunt.option('reporter') or 'spec'
|
||||||
|
grep: grunt.option("grep")
|
||||||
|
smoke:
|
||||||
|
src: ['test/smoke/js/**/*.js']
|
||||||
|
options:
|
||||||
|
reporter: grunt.option('reporter') or 'spec'
|
||||||
|
grep: grunt.option("grep")
|
||||||
|
|
||||||
|
|
||||||
|
availabletasks:
|
||||||
|
tasks:
|
||||||
|
options:
|
||||||
|
filter: 'exclude',
|
||||||
|
tasks: [
|
||||||
|
'coffee'
|
||||||
|
'less'
|
||||||
|
'clean'
|
||||||
|
'mochaTest'
|
||||||
|
'availabletasks'
|
||||||
|
'wrap_sharejs'
|
||||||
|
'requirejs'
|
||||||
|
'execute'
|
||||||
|
'bunyan'
|
||||||
|
]
|
||||||
|
groups:
|
||||||
|
"Compile tasks": [
|
||||||
|
"compile:server"
|
||||||
|
"compile:client"
|
||||||
|
"compile:tests"
|
||||||
|
"compile"
|
||||||
|
"compile:unit_tests"
|
||||||
|
"compile:smoke_tests"
|
||||||
|
"compile:css"
|
||||||
|
"compile:minify"
|
||||||
|
"install"
|
||||||
|
]
|
||||||
|
"Test tasks": [
|
||||||
|
"test:unit"
|
||||||
|
]
|
||||||
|
"Run tasks": [
|
||||||
|
"run"
|
||||||
|
"default"
|
||||||
|
]
|
||||||
|
"Misc": [
|
||||||
|
"help"
|
||||||
|
]
|
||||||
|
|
||||||
|
grunt.registerTask 'wrap_sharejs', 'Wrap the compiled ShareJS code for AMD module loading', () ->
|
||||||
|
content = fs.readFileSync "public/js/libs/sharejs.js"
|
||||||
|
fs.writeFileSync "public/js/libs/sharejs.js", """
|
||||||
|
define(["ace/range"], function() {
|
||||||
|
#{content}
|
||||||
|
return window.sharejs;
|
||||||
|
});
|
||||||
|
"""
|
||||||
|
|
||||||
|
grunt.registerTask 'help', 'Display this help list', 'availabletasks'
|
||||||
|
|
||||||
|
grunt.registerTask 'compile:server', 'Compile the server side coffee script', ['clean:app', 'coffee:app', 'coffee:app_dir']
|
||||||
|
grunt.registerTask 'compile:client', 'Compile the client side coffee script', ['coffee:client', 'coffee:sharejs', 'wrap_sharejs']
|
||||||
|
grunt.registerTask 'compile:css', 'Compile the less files to css', ['less']
|
||||||
|
grunt.registerTask 'compile:minify', 'Concat and minify the client side js', ['requirejs']
|
||||||
|
grunt.registerTask 'compile:unit_tests', 'Compile the unit tests', ['clean:unit_tests', 'coffee:unit_tests']
|
||||||
|
grunt.registerTask 'compile:smoke_tests', 'Compile the smoke tests', ['coffee:smoke_tests']
|
||||||
|
grunt.registerTask 'compile:tests', 'Compile all the tests', ['compile:smoke_tests', 'compile:unit_tests']
|
||||||
|
grunt.registerTask 'compile', 'Compiles everything need to run web-sharelatex', ['compile:server', 'compile:client', 'compile:css']
|
||||||
|
|
||||||
|
grunt.registerTask 'install', "Compile everything when installing as an npm module", ['compile']
|
||||||
|
|
||||||
|
grunt.registerTask 'test:unit', 'Run the unit tests (use --grep=<regex> or --feature=<feature> for individual tests)', ['compile:server', 'compile:unit_tests', 'mochaTest:unit']
|
||||||
|
grunt.registerTask 'test:smoke', 'Run the smoke tests', ['compile:smoke_tests', 'mochaTest:smoke']
|
||||||
|
|
||||||
|
grunt.registerTask 'run', "Compile and run the web-sharelatex server", ['compile', 'bunyan', 'execute']
|
||||||
|
grunt.registerTask 'default', 'run'
|
||||||
|
|
98
services/web/TpdsWorker.coffee
Normal file
98
services/web/TpdsWorker.coffee
Normal file
|
@ -0,0 +1,98 @@
|
||||||
|
async = require('async')
|
||||||
|
request = require('request')
|
||||||
|
keys = require('./app/js/infrastructure/Keys')
|
||||||
|
settings = require('settings-sharelatex')
|
||||||
|
logger = require('logger-sharelatex')
|
||||||
|
_ = require('underscore')
|
||||||
|
childProcess = require("child_process")
|
||||||
|
metrics = require("./app/js/infrastructure/Metrics")
|
||||||
|
|
||||||
|
fiveMinutes = 5 * 60 * 1000
|
||||||
|
|
||||||
|
|
||||||
|
processingFuncs =
|
||||||
|
|
||||||
|
sendDoc : (options, callback)->
|
||||||
|
if !options.docLines? || options.docLines.length == 0
|
||||||
|
logger.err options:options, "doc lines not added to options for processing"
|
||||||
|
return callback()
|
||||||
|
docLines = options.docLines.reduce (singleLine, line)-> "#{singleLine}\n#{line}"
|
||||||
|
post = request(options)
|
||||||
|
post.on 'error', (err)->
|
||||||
|
if err?
|
||||||
|
callback(err)
|
||||||
|
else
|
||||||
|
callback()
|
||||||
|
post.on 'end', callback
|
||||||
|
post.write(docLines, 'utf-8')
|
||||||
|
|
||||||
|
standardHttpRequest: (options, callback)->
|
||||||
|
request options, (err, reponse, body)->
|
||||||
|
if err?
|
||||||
|
callback(err)
|
||||||
|
else
|
||||||
|
callback()
|
||||||
|
|
||||||
|
pipeStreamFrom: (options, callback)->
|
||||||
|
if options.filePath == "/droppy/main.tex"
|
||||||
|
request options.streamOrigin, (err,res, body)->
|
||||||
|
logger.log options:options, body:body
|
||||||
|
origin = request(options.streamOrigin)
|
||||||
|
origin.on 'error', (err)->
|
||||||
|
logger.error err:err, options:options, "something went wrong in pipeStreamFrom origin"
|
||||||
|
if err?
|
||||||
|
callback(err)
|
||||||
|
else
|
||||||
|
callback()
|
||||||
|
dest = request(options)
|
||||||
|
origin.pipe(dest)
|
||||||
|
dest.on "error", (err)->
|
||||||
|
logger.error err:err, options:options, "something went wrong in pipeStreamFrom dest"
|
||||||
|
if err?
|
||||||
|
callback(err)
|
||||||
|
else
|
||||||
|
callback()
|
||||||
|
dest.on 'end', callback
|
||||||
|
|
||||||
|
|
||||||
|
workerRegistration = (groupKey, method, options, callback)->
|
||||||
|
callback = _.once callback
|
||||||
|
setTimeout callback, fiveMinutes
|
||||||
|
metrics.inc "tpds-worker-processing"
|
||||||
|
logger.log groupKey:groupKey, method:method, options:options, "processing http request from queue"
|
||||||
|
processingFuncs[method] options, (err)->
|
||||||
|
if err?
|
||||||
|
logger.err err:err, user_id:groupKey, method:method, options:options, "something went wrong processing tpdsUpdateSender update"
|
||||||
|
return callback("skip-after-retry")
|
||||||
|
callback()
|
||||||
|
|
||||||
|
|
||||||
|
setupWebToTpdsWorkers = (queueName)->
|
||||||
|
logger.log worker_count:worker_count, queueName:queueName, "fairy workers"
|
||||||
|
worker_count = 4
|
||||||
|
while worker_count-- > 0
|
||||||
|
workerQueueRef = require('fairy').connect(settings.redis.fairy).queue(queueName)
|
||||||
|
workerQueueRef.polling_interval = 100
|
||||||
|
workerQueueRef.regist workerRegistration
|
||||||
|
|
||||||
|
|
||||||
|
cleanupPreviousQueues = (queueName, callback)->
|
||||||
|
#cleanup queues then setup workers
|
||||||
|
fairy = require('fairy').connect(settings.redis.fairy)
|
||||||
|
queuePrefix = "FAIRY:QUEUED:#{queueName}:"
|
||||||
|
fairy.redis.keys "#{queuePrefix}*", (err, keys)->
|
||||||
|
logger.log "#{keys.length} fairy queues need cleanup"
|
||||||
|
queueNames = keys.map (key)->
|
||||||
|
key.replace queuePrefix, ""
|
||||||
|
cleanupJobs = queueNames.map (projectQueueName)->
|
||||||
|
return (cb)->
|
||||||
|
cleanup = childProcess.fork(__dirname + '/cleanup.js', [queueName, projectQueueName])
|
||||||
|
cleanup.on 'exit', cb
|
||||||
|
async.series cleanupJobs, callback
|
||||||
|
|
||||||
|
|
||||||
|
cleanupPreviousQueues keys.queue.web_to_tpds_http_requests, ->
|
||||||
|
setupWebToTpdsWorkers keys.queue.web_to_tpds_http_requests
|
||||||
|
|
||||||
|
cleanupPreviousQueues keys.queue.tpds_to_web_http_requests, ->
|
||||||
|
setupWebToTpdsWorkers keys.queue.tpds_to_web_http_requests
|
43
services/web/app.coffee
Normal file
43
services/web/app.coffee
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
Settings = require('settings-sharelatex')
|
||||||
|
logger = require 'logger-sharelatex'
|
||||||
|
logger.initialize("web-sharelatex")
|
||||||
|
logger.logger.serializers.user = require("./app/js/infrastructure/LoggerSerializers").user
|
||||||
|
logger.logger.serializers.project = require("./app/js/infrastructure/LoggerSerializers").project
|
||||||
|
Server = require("./app/js/infrastructure/Server")
|
||||||
|
BackgroundTasks = require("./app/js/infrastructure/BackgroundTasks")
|
||||||
|
Errors = require "./app/js/errors"
|
||||||
|
|
||||||
|
argv = require("optimist")
|
||||||
|
.options("user", {alias : "u", description : "Run the server with permissions of the specified user"})
|
||||||
|
.options("group", {alias : "g", description : "Run the server with permissions of the specified group"})
|
||||||
|
.usage("Usage: $0")
|
||||||
|
.argv
|
||||||
|
|
||||||
|
Server.app.use (error, req, res, next) ->
|
||||||
|
logger.error err: error
|
||||||
|
res.statusCode = error.status or 500
|
||||||
|
if res.statusCode == 500
|
||||||
|
res.end("Oops, something went wrong with your request, sorry. If this continues, please contact us at team@sharelatex.com")
|
||||||
|
else
|
||||||
|
res.end()
|
||||||
|
|
||||||
|
if Settings.catchErrors
|
||||||
|
# fairy cleans then exits on an uncaughtError, but we don't want
|
||||||
|
# to exit so it doesn't need to do this.
|
||||||
|
require "fairy"
|
||||||
|
process.removeAllListeners "uncaughtException"
|
||||||
|
process.on "uncaughtException", (error) ->
|
||||||
|
logger.error err: error, "uncaughtException"
|
||||||
|
|
||||||
|
BackgroundTasks.run()
|
||||||
|
|
||||||
|
port = Settings.port or Settings.internal?.web?.port or 3000
|
||||||
|
Server.server.listen port, ->
|
||||||
|
logger.info("web-sharelatex listening on port #{port}")
|
||||||
|
logger.info("#{require('http').globalAgent.maxSockets} sockets enabled")
|
||||||
|
if argv.user
|
||||||
|
process.setuid argv.user
|
||||||
|
logger.info "Running as user: #{argv.user}"
|
||||||
|
if argv.group
|
||||||
|
process.setgid argv.group
|
||||||
|
logger.info "Running as group: #{argv.group}"
|
|
@ -0,0 +1,69 @@
|
||||||
|
Settings = require 'settings-sharelatex'
|
||||||
|
if Settings.analytics?.mixpanel?
|
||||||
|
Mixpanel = require("mixpanel").init(Settings.analytics.mixpanel.token)
|
||||||
|
else
|
||||||
|
Mixpanel = null
|
||||||
|
logger = require "logger-sharelatex"
|
||||||
|
async = require 'async'
|
||||||
|
|
||||||
|
module.exports = AnalyticsManager =
|
||||||
|
|
||||||
|
track: (user, event, properties, callback = (error)->) ->
|
||||||
|
properties.distinct_id = @getDistinctId user
|
||||||
|
properties.mp_name_tag = user.email if user.email?
|
||||||
|
logger.log user_id: properties.distinct_id, event: event, properties: properties, "tracking event"
|
||||||
|
Mixpanel?.track event, properties
|
||||||
|
callback()
|
||||||
|
|
||||||
|
set: (user, properties, callback = (error)->) ->
|
||||||
|
properties["$first_name"] = user.first_name if user.first_name?
|
||||||
|
properties["$last_name"] = user.last_name if user.last_name?
|
||||||
|
properties["$email"] = user.email if user.email?
|
||||||
|
Mixpanel?.people.set @getDistinctId(user), properties
|
||||||
|
callback()
|
||||||
|
|
||||||
|
increment: (user, property, amount, callback = (error)->) ->
|
||||||
|
Mixpanel?.people.increment @getDistinctId(user), property, amount
|
||||||
|
callback()
|
||||||
|
|
||||||
|
# TODO: Remove this one month after the ability to start free trials was removed
|
||||||
|
trackFreeTrialExpired: (user, callback = (error)->) ->
|
||||||
|
async.series [
|
||||||
|
(callback) => @track user, "free trial expired", {}, callback
|
||||||
|
(callback) => @set user, { free_trial_expired_at: new Date() }, callback
|
||||||
|
], callback
|
||||||
|
|
||||||
|
trackSubscriptionStarted: (user, plan_code, callback = (error)->) ->
|
||||||
|
async.series [
|
||||||
|
(callback) => @track user, "subscribed", plan_code: plan_code, callback
|
||||||
|
(callback) => @set user, { plan_code: plan_code, subscribed_at: new Date() }, callback
|
||||||
|
], callback
|
||||||
|
|
||||||
|
trackSubscriptionCancelled: (user, callback = (error)->) ->
|
||||||
|
async.series [
|
||||||
|
(callback) => @track user, "cancelled", callback
|
||||||
|
(callback) => @set user, { cancelled_at: new Date() }, callback
|
||||||
|
], callback
|
||||||
|
|
||||||
|
trackLogIn: (user, callback = (error)->) ->
|
||||||
|
async.series [
|
||||||
|
(callback) => @track user, "logged in", {}, callback
|
||||||
|
(callback) => @set user, { last_logged_id: new Date() }, callback
|
||||||
|
], callback
|
||||||
|
|
||||||
|
trackOpenEditor: (user, project, callback = (error)->) ->
|
||||||
|
async.series [
|
||||||
|
(callback) => @set user, { last_opened_editor: new Date() }, callback
|
||||||
|
(callback) => @increment user, "editor_opens", 1, callback
|
||||||
|
], callback
|
||||||
|
|
||||||
|
trackReferral: (user, referal_source, referal_medium, callback = (error) ->) ->
|
||||||
|
async.series [
|
||||||
|
(callback) =>
|
||||||
|
@track user, "Referred another user", { source: referal_source, medium: referal_medium }, callback
|
||||||
|
(callback) =>
|
||||||
|
@track user, "Referred another user via #{referal_source}", { medium: referal_medium }, callback
|
||||||
|
], callback
|
||||||
|
|
||||||
|
getDistinctId: (user) -> user.id || user._id || user
|
||||||
|
|
|
@ -0,0 +1,113 @@
|
||||||
|
AuthenticationManager = require ("./AuthenticationManager")
|
||||||
|
LoginRateLimiter = require("../Security/LoginRateLimiter")
|
||||||
|
UserGetter = require "../User/UserGetter"
|
||||||
|
UserUpdater = require "../User/UserUpdater"
|
||||||
|
Metrics = require('../../infrastructure/Metrics')
|
||||||
|
logger = require("logger-sharelatex")
|
||||||
|
querystring = require('querystring')
|
||||||
|
Url = require("url")
|
||||||
|
|
||||||
|
module.exports = AuthenticationController =
|
||||||
|
login: (req, res, next = (error) ->) ->
|
||||||
|
email = req.body?.email?.toLowerCase()
|
||||||
|
password = req.body?.password
|
||||||
|
redir = Url.parse(req.body?.redir or "/project").path
|
||||||
|
LoginRateLimiter.processLoginRequest email, (err, isAllowed)->
|
||||||
|
if !isAllowed
|
||||||
|
logger.log email:email, "too many login requests"
|
||||||
|
res.statusCode = 429
|
||||||
|
return res.send
|
||||||
|
message:
|
||||||
|
text: 'This account has had too many login requests, <br> please wait 2 minutes before trying to log in again',
|
||||||
|
type: 'error'
|
||||||
|
AuthenticationManager.authenticate email: email, password, (error, user) ->
|
||||||
|
return next(error) if error?
|
||||||
|
if user?
|
||||||
|
LoginRateLimiter.recordSuccessfulLogin email
|
||||||
|
AuthenticationController._recordSuccessfulLogin user._id
|
||||||
|
AuthenticationController._establishUserSession req, user, (error) ->
|
||||||
|
return next(error) if error?
|
||||||
|
logger.log email: email, user_id: user._id.toString(), "successful log in"
|
||||||
|
res.send redir: redir
|
||||||
|
else
|
||||||
|
AuthenticationController._recordFailedLogin()
|
||||||
|
logger.log email: email, "failed log in"
|
||||||
|
res.send message:
|
||||||
|
text: 'Your email or password were incorrect. Please try again',
|
||||||
|
type: 'error'
|
||||||
|
|
||||||
|
getAuthToken: (req, res, next = (error) ->) ->
|
||||||
|
AuthenticationController.getLoggedInUserId req, (error, user_id) ->
|
||||||
|
return next(error) if error?
|
||||||
|
AuthenticationManager.getAuthToken user_id, (error, auth_token) ->
|
||||||
|
return next(error) if error?
|
||||||
|
res.send(auth_token)
|
||||||
|
|
||||||
|
getLoggedInUserId: (req, callback = (error, user_id) ->) ->
|
||||||
|
callback null, req.session.user._id.toString()
|
||||||
|
|
||||||
|
getLoggedInUser: (req, options = {allow_auth_token: false}, callback = (error, user) ->) ->
|
||||||
|
if req.session?.user?._id?
|
||||||
|
query = req.session.user._id
|
||||||
|
else if req.query?.auth_token? and options.allow_auth_token
|
||||||
|
query = { auth_token: req.query.auth_token }
|
||||||
|
else
|
||||||
|
return callback null, null
|
||||||
|
|
||||||
|
UserGetter.getUser query, callback
|
||||||
|
|
||||||
|
requireLogin: (options = {allow_auth_token: false, load_from_db: false}) ->
|
||||||
|
doRequest = (req, res, next = (error) ->) ->
|
||||||
|
load_from_db = options.load_from_db
|
||||||
|
if req.query?.auth_token? and options.allow_auth_token
|
||||||
|
load_from_db = true
|
||||||
|
if load_from_db
|
||||||
|
AuthenticationController.getLoggedInUser req, { allow_auth_token: options.allow_auth_token }, (error, user) ->
|
||||||
|
return next(error) if error?
|
||||||
|
return AuthenticationController._redirectToRegisterPage(req, res) if !user?
|
||||||
|
req.user = user
|
||||||
|
return next()
|
||||||
|
else
|
||||||
|
if !req.session.user?
|
||||||
|
return AuthenticationController._redirectToRegisterPage(req, res)
|
||||||
|
else
|
||||||
|
req.user = req.session.user
|
||||||
|
return next()
|
||||||
|
|
||||||
|
return doRequest
|
||||||
|
|
||||||
|
_redirectToRegisterPage: (req, res) ->
|
||||||
|
logger.log url: req.url, "user not logged in so redirecting to register page"
|
||||||
|
req.query.redir = req.path
|
||||||
|
url = "/register?#{querystring.stringify(req.query)}"
|
||||||
|
res.redirect url
|
||||||
|
Metrics.inc "security.login-redirect"
|
||||||
|
|
||||||
|
_recordSuccessfulLogin: (user_id, callback = (error) ->) ->
|
||||||
|
UserUpdater.updateUser user_id.toString(), {
|
||||||
|
$set: { "lastLoggedIn": new Date() },
|
||||||
|
$inc: { "loginCount": 1 }
|
||||||
|
}, (error) ->
|
||||||
|
callback(error) if error?
|
||||||
|
Metrics.inc "user.login.success"
|
||||||
|
callback()
|
||||||
|
|
||||||
|
_recordFailedLogin: (callback = (error) ->) ->
|
||||||
|
Metrics.inc "user.login.failed"
|
||||||
|
callback()
|
||||||
|
|
||||||
|
_establishUserSession: (req, user, callback = (error) ->) ->
|
||||||
|
lightUser =
|
||||||
|
_id: user._id
|
||||||
|
first_name: user.first_name
|
||||||
|
last_name: user.last_name
|
||||||
|
isAdmin: user.isAdmin
|
||||||
|
email: user.email
|
||||||
|
referal_id: user.referal_id
|
||||||
|
req.session.user = lightUser
|
||||||
|
req.session.justLoggedIn = true
|
||||||
|
req.session.save callback
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,54 @@
|
||||||
|
Settings = require 'settings-sharelatex'
|
||||||
|
User = require("../../models/User").User
|
||||||
|
{db, ObjectId} = require("../../infrastructure/mongojs")
|
||||||
|
crypto = require 'crypto'
|
||||||
|
bcrypt = require 'bcrypt'
|
||||||
|
|
||||||
|
module.exports = AuthenticationManager =
|
||||||
|
authenticate: (query, password, callback = (error, user) ->) ->
|
||||||
|
# Using Mongoose for legacy reasons here. The returned User instance
|
||||||
|
# gets serialized into the session and there may be subtle differences
|
||||||
|
# between the user returned by Mongoose vs mongojs (such as default values)
|
||||||
|
User.findOne query, (error, user) =>
|
||||||
|
return callback(error) if error?
|
||||||
|
if user?
|
||||||
|
if user.hashedPassword?
|
||||||
|
bcrypt.compare password, user.hashedPassword, (error, match) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
if match
|
||||||
|
callback null, user
|
||||||
|
else
|
||||||
|
callback null, null
|
||||||
|
else
|
||||||
|
callback null, null
|
||||||
|
else
|
||||||
|
callback null, null
|
||||||
|
|
||||||
|
setUserPassword: (user_id, password, callback = (error) ->) ->
|
||||||
|
bcrypt.genSalt 7, (error, salt) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
bcrypt.hash password, salt, (error, hash) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
db.users.update({
|
||||||
|
_id: ObjectId(user_id.toString())
|
||||||
|
}, {
|
||||||
|
$set: hashedPassword: hash
|
||||||
|
$unset: password: true
|
||||||
|
}, callback)
|
||||||
|
|
||||||
|
getAuthToken: (user_id, callback = (error, auth_token) ->) ->
|
||||||
|
db.users.findOne { _id: ObjectId(user_id.toString()) }, { auth_token : true }, (error, user) =>
|
||||||
|
return callback(error) if error?
|
||||||
|
return callback(new Error("user could not be found: #{user_id}")) if !user?
|
||||||
|
if user.auth_token?
|
||||||
|
callback null, user.auth_token
|
||||||
|
else
|
||||||
|
@_createSecureToken (error, auth_token) ->
|
||||||
|
db.users.update { _id: ObjectId(user_id.toString()) }, { $set : auth_token: auth_token }, (error) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
callback null, auth_token
|
||||||
|
|
||||||
|
_createSecureToken: (callback = (error, token) ->) ->
|
||||||
|
crypto.randomBytes 48, (error, buffer) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
callback null, buffer.toString("hex")
|
|
@ -0,0 +1,49 @@
|
||||||
|
ProjectGetter = require "../Project/ProjectGetter"
|
||||||
|
ProjectHandler = require "../../handlers/ProjectHandler"
|
||||||
|
|
||||||
|
|
||||||
|
module.exports = CollaboratorsController =
|
||||||
|
getCollaborators: (req, res, next = (error) ->) ->
|
||||||
|
req.session.destroy()
|
||||||
|
ProjectGetter.getProject req.params.Project_id, { owner_ref: true, collaberator_refs: true, readOnly_refs: true}, (error, project) ->
|
||||||
|
return next(error) if error?
|
||||||
|
ProjectGetter.populateProjectWithUsers project, (error, project) ->
|
||||||
|
return next(error) if error?
|
||||||
|
CollaboratorsController._formatCollaborators project, (error, collaborators) ->
|
||||||
|
return next(error) if error?
|
||||||
|
res.send(JSON.stringify(collaborators))
|
||||||
|
|
||||||
|
removeSelfFromProject: (req, res, next = (error) ->) ->
|
||||||
|
user_id = req.session?.user?._id
|
||||||
|
if !user_id?
|
||||||
|
return next(new Error("User should be logged in"))
|
||||||
|
ProjectHandler::removeUserFromProject req.params.project_id, user_id, (error) ->
|
||||||
|
return next(error) if error?
|
||||||
|
res.redirect "/project"
|
||||||
|
|
||||||
|
_formatCollaborators: (project, callback = (error, collaborators) ->) ->
|
||||||
|
collaborators = []
|
||||||
|
|
||||||
|
pushCollaborator = (user, permissions, owner) ->
|
||||||
|
collaborators.push {
|
||||||
|
id: user._id.toString()
|
||||||
|
first_name: user.first_name
|
||||||
|
last_name: user.last_name
|
||||||
|
email: user.email
|
||||||
|
permissions: permissions
|
||||||
|
owner: owner
|
||||||
|
}
|
||||||
|
|
||||||
|
if project.owner_ref?
|
||||||
|
pushCollaborator(project.owner_ref, ["read", "write", "admin"], true)
|
||||||
|
|
||||||
|
if project.collaberator_refs? and project.collaberator_refs.length > 0
|
||||||
|
for user in project.collaberator_refs
|
||||||
|
pushCollaborator(user, ["read", "write"], false)
|
||||||
|
|
||||||
|
if project.readOnly_refs? and project.readOnly_refs.length > 0
|
||||||
|
for user in project.readOnly_refs
|
||||||
|
pushCollaborator(user, ["read"], false)
|
||||||
|
|
||||||
|
callback null, collaborators
|
||||||
|
|
97
services/web/app/coffee/Features/Compile/ClsiManager.coffee
Normal file
97
services/web/app/coffee/Features/Compile/ClsiManager.coffee
Normal file
|
@ -0,0 +1,97 @@
|
||||||
|
Path = require "path"
|
||||||
|
async = require "async"
|
||||||
|
Settings = require "settings-sharelatex"
|
||||||
|
request = require('request')
|
||||||
|
Project = require("../../models/Project").Project
|
||||||
|
logger = require "logger-sharelatex"
|
||||||
|
url = require("url")
|
||||||
|
|
||||||
|
module.exports = ClsiManager =
|
||||||
|
sendRequest: (project_id, callback = (error, success) ->) ->
|
||||||
|
Project.findById project_id, (error, project) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
ClsiManager._buildRequest project, (error, req) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
logger.log project_id: project_id, "sending compile to CLSI"
|
||||||
|
ClsiManager._postToClsi project_id, req, (error, response) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
logger.log project_id: project_id, response: response, "received compile response from CLSI"
|
||||||
|
callback(
|
||||||
|
null
|
||||||
|
(response?.compile?.status == "success")
|
||||||
|
ClsiManager._parseOutputFiles(project_id, response?.compile?.outputFiles)
|
||||||
|
)
|
||||||
|
|
||||||
|
getLogLines: (project_id, callback = (error, lines) ->) ->
|
||||||
|
request "#{Settings.apis.clsi.url}/project/#{project_id}/output/output.log", (error, response, body) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
callback null, body?.split("\n") or []
|
||||||
|
|
||||||
|
_postToClsi: (project_id, req, callback = (error, response) ->) ->
|
||||||
|
request.post {
|
||||||
|
url: "#{Settings.apis.clsi.url}/project/#{project_id}/compile"
|
||||||
|
json: req
|
||||||
|
jar: false
|
||||||
|
}, (error, response, body) ->
|
||||||
|
callback error, body
|
||||||
|
|
||||||
|
_parseOutputFiles: (project_id, rawOutputFiles = []) ->
|
||||||
|
outputFiles = []
|
||||||
|
for file in rawOutputFiles
|
||||||
|
outputFiles.push
|
||||||
|
path: url.parse(file.url).path.replace("/project/#{project_id}/output/", "")
|
||||||
|
type: file.type
|
||||||
|
return outputFiles
|
||||||
|
|
||||||
|
VALID_COMPILERS: ["pdflatex", "latex", "xelatex", "lualatex"]
|
||||||
|
_buildRequest: (project, callback = (error, request) ->) ->
|
||||||
|
if project.compiler not in @VALID_COMPILERS
|
||||||
|
project.compiler = "pdflatex"
|
||||||
|
|
||||||
|
resources = []
|
||||||
|
rootResourcePath = null
|
||||||
|
|
||||||
|
addDoc = (basePath, doc, callback = (error) ->) ->
|
||||||
|
path = Path.join(basePath, doc.name)
|
||||||
|
resources.push
|
||||||
|
path: path
|
||||||
|
content: doc.lines.join("\n")
|
||||||
|
if doc._id.toString() == project.rootDoc_id.toString()
|
||||||
|
rootResourcePath = path
|
||||||
|
callback()
|
||||||
|
|
||||||
|
addFile = (basePath, file, callback = (error) ->) ->
|
||||||
|
resources.push
|
||||||
|
path: Path.join(basePath, file.name)
|
||||||
|
url: "#{Settings.apis.filestore.url}/project/#{project._id}/file/#{file._id}"
|
||||||
|
modified: file.created?.getTime()
|
||||||
|
callback()
|
||||||
|
|
||||||
|
addFolder = (basePath, folder, callback = (error) ->) ->
|
||||||
|
jobs = []
|
||||||
|
for doc in folder.docs
|
||||||
|
do (doc) ->
|
||||||
|
jobs.push (callback) -> addDoc(basePath, doc, callback)
|
||||||
|
|
||||||
|
for file in folder.fileRefs
|
||||||
|
do (file) ->
|
||||||
|
jobs.push (callback) -> addFile(basePath, file, callback)
|
||||||
|
|
||||||
|
for childFolder in folder.folders
|
||||||
|
do (childFolder) ->
|
||||||
|
jobs.push (callback) -> addFolder(Path.join(basePath, childFolder.name), childFolder, callback)
|
||||||
|
|
||||||
|
async.series jobs, callback
|
||||||
|
|
||||||
|
addFolder "", project.rootFolder[0], (error) ->
|
||||||
|
if !rootResourcePath?
|
||||||
|
callback new Error("no root document exists")
|
||||||
|
else
|
||||||
|
callback null, {
|
||||||
|
compile:
|
||||||
|
options:
|
||||||
|
compiler: project.compiler
|
||||||
|
rootResourcePath: rootResourcePath
|
||||||
|
resources: resources
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,45 @@
|
||||||
|
Metrics = require "../../infrastructure/Metrics"
|
||||||
|
Project = require("../../models/Project").Project
|
||||||
|
CompileManager = require("./CompileManager")
|
||||||
|
logger = require "logger-sharelatex"
|
||||||
|
request = require "request"
|
||||||
|
Settings = require "settings-sharelatex"
|
||||||
|
|
||||||
|
module.exports = CompileController =
|
||||||
|
downloadPdf: (req, res, next = (error) ->)->
|
||||||
|
Metrics.inc "pdf-downloads"
|
||||||
|
project_id = req.params.Project_id
|
||||||
|
Project.findById project_id, {name: 1}, (err, project)->
|
||||||
|
res.contentType("application/pdf")
|
||||||
|
if !!req.query.popupDownload
|
||||||
|
logger.log project_id: project_id, "download pdf as popup download"
|
||||||
|
res.header('Content-Disposition', "attachment; filename=#{project.getSafeProjectName()}.pdf")
|
||||||
|
else
|
||||||
|
logger.log project_id: project_id, "download pdf to embed in browser"
|
||||||
|
res.header('Content-Disposition', "filename=#{project.getSafeProjectName()}.pdf")
|
||||||
|
CompileController.proxyToClsi("/project/#{project_id}/output/output.pdf", req, res, next)
|
||||||
|
|
||||||
|
|
||||||
|
compileAndDownloadPdf: (req, res, next)->
|
||||||
|
project_id = req.params.project_id
|
||||||
|
CompileManager.compile project_id, null, {}, (err)->
|
||||||
|
if err?
|
||||||
|
logger.err err:err, project_id:project_id, "something went wrong compile and downloading pdf"
|
||||||
|
res.send 500
|
||||||
|
url = "/project/#{project_id}/output/output.pdf"
|
||||||
|
CompileController.proxyToClsi url, req, res, next
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
getFileFromClsi: (req, res, next = (error) ->) ->
|
||||||
|
CompileController.proxyToClsi("/project/#{req.params.Project_id}/output/#{req.params.file}", req, res, next)
|
||||||
|
|
||||||
|
proxyToClsi: (url, req, res, next = (error) ->) ->
|
||||||
|
logger.log url: url, "proxying to CLSI"
|
||||||
|
url = "#{Settings.apis.clsi.url}#{url}"
|
||||||
|
oneMinute = 60 * 1000
|
||||||
|
proxy = request.get(url: url, timeout: oneMinute)
|
||||||
|
proxy.pipe(res)
|
||||||
|
proxy.on "error", (error) ->
|
||||||
|
logger.error err: error, url: url, "CLSI proxy error"
|
||||||
|
|
|
@ -0,0 +1,82 @@
|
||||||
|
Settings = require('settings-sharelatex')
|
||||||
|
redis = require('redis')
|
||||||
|
rclient = redis.createClient(Settings.redis.web.port, Settings.redis.web.host)
|
||||||
|
rclient.auth(Settings.redis.web.password)
|
||||||
|
DocumentUpdaterHandler = require "../DocumentUpdater/DocumentUpdaterHandler"
|
||||||
|
Project = require("../../models/Project").Project
|
||||||
|
ProjectRootDocManager = require "../Project/ProjectRootDocManager"
|
||||||
|
ClsiManager = require "./ClsiManager"
|
||||||
|
Metrics = require('../../infrastructure/Metrics')
|
||||||
|
logger = require("logger-sharelatex")
|
||||||
|
RateLimiter = require("ratelimiter")
|
||||||
|
|
||||||
|
|
||||||
|
module.exports = CompileManager =
|
||||||
|
compile: (project_id, user_id, opt = {}, _callback = (error) ->) ->
|
||||||
|
timer = new Metrics.Timer("editor.compile")
|
||||||
|
callback = (args...) ->
|
||||||
|
timer.done()
|
||||||
|
_callback(args...)
|
||||||
|
|
||||||
|
@_checkIfAutoCompileLimitHasBeenHit opt.isAutoCompile, (err, canCompile)->
|
||||||
|
if !canCompile
|
||||||
|
err = {rateLimitHit:true}
|
||||||
|
return callback(err)
|
||||||
|
logger.log project_id: project_id, user_id: user_id, "compiling project"
|
||||||
|
CompileManager._checkIfRecentlyCompiled project_id, user_id, (error, recentlyCompiled) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
if recentlyCompiled
|
||||||
|
return callback new Error("project was recently compiled so not continuing")
|
||||||
|
|
||||||
|
CompileManager._ensureRootDocumentIsSet project_id, (error) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
DocumentUpdaterHandler.flushProjectToMongo project_id, (error) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
ClsiManager.sendRequest project_id, (error, success, outputFiles) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
logger.log files: outputFiles, "output files"
|
||||||
|
callback(null, success, outputFiles)
|
||||||
|
|
||||||
|
getLogLines: (project_id, callback)->
|
||||||
|
Metrics.inc "editor.raw-logs"
|
||||||
|
ClsiManager.getLogLines project_id, (error, logLines)->
|
||||||
|
return callback(error) if error?
|
||||||
|
callback null, logLines
|
||||||
|
|
||||||
|
COMPILE_DELAY: 1 # seconds
|
||||||
|
_checkIfRecentlyCompiled: (project_id, user_id, callback = (error, recentlyCompiled) ->) ->
|
||||||
|
key = "compile:#{project_id}:#{user_id}"
|
||||||
|
rclient.set key, true, "EX", @COMPILE_DELAY, "NX", (error, ok) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
if ok == "OK"
|
||||||
|
return callback null, false
|
||||||
|
else
|
||||||
|
return callback null, true
|
||||||
|
|
||||||
|
_checkIfAutoCompileLimitHasBeenHit: (isAutoCompile, callback = (err, canCompile)->)->
|
||||||
|
if !isAutoCompile
|
||||||
|
return callback(null, true)
|
||||||
|
key = "auto_compile_rate_limit"
|
||||||
|
ten_seconds = (10 * 1000)
|
||||||
|
limit = new RateLimiter(db:rclient, id:key, max:7, duration:ten_seconds)
|
||||||
|
limit.get (err, limit)->
|
||||||
|
Metrics.inc("compile.autocompile.rateLimitCheck")
|
||||||
|
if limit.remaining > 0 and !err?
|
||||||
|
canCompile = true
|
||||||
|
else
|
||||||
|
canCompile = false
|
||||||
|
Metrics.inc("compile.autocompile.rateLimitHit")
|
||||||
|
logger.log canCompile:canCompile, limit:limit, "checking if auto compile limit has been hit"
|
||||||
|
callback err, canCompile
|
||||||
|
|
||||||
|
_ensureRootDocumentIsSet: (project_id, callback = (error) ->) ->
|
||||||
|
Project.findById project_id, 'rootDoc_id', (error, project)=>
|
||||||
|
return callback(error) if error?
|
||||||
|
if !project?
|
||||||
|
return callback new Error("project not found")
|
||||||
|
|
||||||
|
if project.rootDoc_id?
|
||||||
|
callback()
|
||||||
|
else
|
||||||
|
ProjectRootDocManager.setRootDocAutomatically project_id, callback
|
||||||
|
|
|
@ -0,0 +1,141 @@
|
||||||
|
request = require 'request'
|
||||||
|
request = request.defaults()
|
||||||
|
async = require 'async'
|
||||||
|
settings = require 'settings-sharelatex'
|
||||||
|
_ = require 'underscore'
|
||||||
|
async = require 'async'
|
||||||
|
logger = require('logger-sharelatex')
|
||||||
|
metrics = require('../../infrastructure/Metrics')
|
||||||
|
slReqIdHelper = require('soa-req-id')
|
||||||
|
redis = require('redis')
|
||||||
|
rclient = redis.createClient(settings.redis.web.port, settings.redis.web.host)
|
||||||
|
rclient.auth(settings.redis.web.password)
|
||||||
|
Project = require("../../models/Project").Project
|
||||||
|
ProjectLocator = require('../../Features/Project/ProjectLocator')
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
queueChange : (project_id, doc_id, change, sl_req_id, callback = ()->)->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
jsonChange = JSON.stringify change
|
||||||
|
rclient.rpush keys.pendingUpdates(doc_id:doc_id), jsonChange, (error)->
|
||||||
|
return callback(error) if error?
|
||||||
|
doc_key = keys.combineProjectIdAndDocId(project_id, doc_id)
|
||||||
|
rclient.sadd keys.docsWithPendingUpdates, doc_key, (error) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
rclient.publish "pending-updates", doc_key, callback
|
||||||
|
|
||||||
|
flushProjectToMongo: (project_id, sl_req_id, callback = (error) ->)->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
logger.log project_id:project_id, sl_req_id:sl_req_id, "flushing project from document updater"
|
||||||
|
timer = new metrics.Timer("flushing.mongo.project")
|
||||||
|
url = "#{settings.apis.documentupdater.url}/project/#{project_id}/flush"
|
||||||
|
request.post url, (error, res, body)->
|
||||||
|
if error?
|
||||||
|
logger.error err: error, project_id: project_id, sl_req_id: sl_req_id, "error flushing project from document updater"
|
||||||
|
return callback(error)
|
||||||
|
else if res.statusCode >= 200 and res.statusCode < 300
|
||||||
|
logger.log project_id: project_id, sl_req_id: sl_req_id, "flushed project from document updater"
|
||||||
|
return callback(null)
|
||||||
|
else
|
||||||
|
error = new Error("document updater returned a failure status code: #{res.statusCode}")
|
||||||
|
logger.error err: error, project_id: project_id, sl_req_id: sl_req_id, "document updater returned failure status code: #{res.statusCode}"
|
||||||
|
return callback(error)
|
||||||
|
|
||||||
|
flushProjectToMongoAndDelete: (project_id, sl_req_id, callback = ()->) ->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
logger.log project_id:project_id, sl_req_id:sl_req_id, "deleting project from document updater"
|
||||||
|
timer = new metrics.Timer("delete.mongo.project")
|
||||||
|
url = "#{settings.apis.documentupdater.url}/project/#{project_id}"
|
||||||
|
request.del url, (error, res, body)->
|
||||||
|
if error?
|
||||||
|
logger.error err: error, project_id: project_id, sl_req_id: sl_req_id, "error deleting project from document updater"
|
||||||
|
return callback(error)
|
||||||
|
else if res.statusCode >= 200 and res.statusCode < 300
|
||||||
|
logger.log project_id: project_id, sl_req_id: sl_req_id, "deleted project from document updater"
|
||||||
|
return callback(null)
|
||||||
|
else
|
||||||
|
error = new Error("document updater returned a failure status code: #{res.statusCode}")
|
||||||
|
logger.error err: error, project_id: project_id, sl_req_id: sl_req_id, "document updater returned failure status code: #{res.statusCode}"
|
||||||
|
return callback(error)
|
||||||
|
|
||||||
|
deleteDoc : (project_id, doc_id, sl_req_id, callback = ()->)->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
logger.log project_id:project_id, doc_id: doc_id, sl_req_id:sl_req_id, "deleting doc from document updater"
|
||||||
|
timer = new metrics.Timer("delete.mongo.doc")
|
||||||
|
url = "#{settings.apis.documentupdater.url}/project/#{project_id}/doc/#{doc_id}"
|
||||||
|
request.del url, (error, res, body)->
|
||||||
|
if error?
|
||||||
|
logger.error err: error, project_id: project_id, doc_id: doc_id, sl_req_id: sl_req_id, "error deleting doc from document updater"
|
||||||
|
return callback(error)
|
||||||
|
else if res.statusCode >= 200 and res.statusCode < 300
|
||||||
|
logger.log project_id: project_id, doc_id: doc_id, sl_req_id: sl_req_id, "deleted doc from document updater"
|
||||||
|
return callback(null)
|
||||||
|
else
|
||||||
|
error = new Error("document updater returned a failure status code: #{res.statusCode}")
|
||||||
|
logger.error err: error, project_id: project_id, doc_id: doc_id, sl_req_id: sl_req_id, "document updater returned failure status code: #{res.statusCode}"
|
||||||
|
return callback(error)
|
||||||
|
|
||||||
|
getDocument: (project_id, doc_id, fromVersion, sl_req_id, callback = (error, exists, doclines, version) ->) ->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
timer = new metrics.Timer("get-document")
|
||||||
|
url = "#{settings.apis.documentupdater.url}/project/#{project_id}/doc/#{doc_id}?fromVersion=#{fromVersion}"
|
||||||
|
logger.log project_id:project_id, doc_id: doc_id, sl_req_id:sl_req_id, "getting doc from document updater"
|
||||||
|
request.get url, (error, res, body)->
|
||||||
|
timer.done()
|
||||||
|
if error?
|
||||||
|
logger.error err:error, url:url, project_id:project_id, doc_id:doc_id, "error getting doc from doc updater"
|
||||||
|
return callback(error)
|
||||||
|
if res.statusCode >= 200 and res.statusCode < 300
|
||||||
|
logger.log project_id:project_id, doc_id:doc_id, "got doc from document document updater"
|
||||||
|
try
|
||||||
|
body = JSON.parse(body)
|
||||||
|
catch error
|
||||||
|
return callback(error)
|
||||||
|
callback null, body.lines, body.version, body.ops
|
||||||
|
else
|
||||||
|
logger.error project_id:project_id, doc_id:doc_id, url: url, "doc updater returned a non-success status code: #{res.statusCode}"
|
||||||
|
callback new Error("doc updater returned a non-success status code: #{res.statusCode}")
|
||||||
|
|
||||||
|
setDocument : (project_id, doc_id, docLines, sl_req_id, callback = (error) ->)->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
timer = new metrics.Timer("set-document")
|
||||||
|
url = "#{settings.apis.documentupdater.url}/project/#{project_id}/doc/#{doc_id}"
|
||||||
|
body =
|
||||||
|
url: url
|
||||||
|
json:
|
||||||
|
lines: docLines
|
||||||
|
logger.log project_id:project_id, doc_id: doc_id, sl_req_id:sl_req_id, "setting doc in document updater"
|
||||||
|
request.post body, (error, res, body)->
|
||||||
|
timer.done()
|
||||||
|
if error?
|
||||||
|
logger.error err:error, url:url, project_id:project_id, doc_id:doc_id, "error setting doc in doc updater"
|
||||||
|
return callback(error)
|
||||||
|
if res.statusCode >= 200 and res.statusCode < 300
|
||||||
|
logger.log project_id: project_id, doc_id: doc_id, sl_req_id: sl_req_id, "set doc in document updater"
|
||||||
|
return callback(null)
|
||||||
|
else
|
||||||
|
logger.error project_id:project_id, doc_id:doc_id, url: url, "doc updater returned a non-success status code: #{res.statusCode}"
|
||||||
|
callback new Error("doc updater returned a non-success status code: #{res.statusCode}")
|
||||||
|
|
||||||
|
getNumberOfDocsInMemory : (callback)->
|
||||||
|
request.get "#{settings.apis.documentupdater.url}/total", (err, req, body)->
|
||||||
|
try
|
||||||
|
body = JSON.parse body
|
||||||
|
catch err
|
||||||
|
logger.err err:err, "error parsing response from doc updater about the total number of docs"
|
||||||
|
callback(err, body?.total)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
PENDINGUPDATESKEY = "PendingUpdates"
|
||||||
|
DOCLINESKEY = "doclines"
|
||||||
|
DOCIDSWITHPENDINGUPDATES = "DocsWithPendingUpdates"
|
||||||
|
|
||||||
|
keys =
|
||||||
|
pendingUpdates : (op) -> "#{PENDINGUPDATESKEY}:#{op.doc_id}"
|
||||||
|
docsWithPendingUpdates: DOCIDSWITHPENDINGUPDATES
|
||||||
|
docLines : (op) -> "#{DOCLINESKEY}:#{op.doc_id}"
|
||||||
|
combineProjectIdAndDocId: (project_id, doc_id) -> "#{project_id}:#{doc_id}"
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,36 @@
|
||||||
|
ProjectLocator = require "../Project/ProjectLocator"
|
||||||
|
ProjectEntityHandler = require "../Project/ProjectEntityHandler"
|
||||||
|
Errors = require "../../errors"
|
||||||
|
logger = require("logger-sharelatex")
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
getDocument: (req, res, next = (error) ->) ->
|
||||||
|
project_id = req.params.Project_id
|
||||||
|
doc_id = req.params.doc_id
|
||||||
|
logger.log doc_id:doc_id, project_id:project_id, "receiving get document request from api (docupdater)"
|
||||||
|
ProjectLocator.findElement project_id: project_id, element_id: doc_id, type: "doc", (error, doc) ->
|
||||||
|
if error?
|
||||||
|
logger.err err:error, doc_id:doc_id, project_id:project_id, "error finding element for getDocument"
|
||||||
|
return next(error)
|
||||||
|
res.type "json"
|
||||||
|
res.send JSON.stringify {
|
||||||
|
lines: doc.lines
|
||||||
|
}
|
||||||
|
req.session.destroy()
|
||||||
|
|
||||||
|
|
||||||
|
setDocument: (req, res, next = (error) ->) ->
|
||||||
|
project_id = req.params.Project_id
|
||||||
|
doc_id = req.params.doc_id
|
||||||
|
lines = req.body.lines
|
||||||
|
logger.log doc_id:doc_id, project_id:project_id, "receiving set document request from api (docupdater)"
|
||||||
|
ProjectEntityHandler.updateDocLines project_id, doc_id, lines, (error) ->
|
||||||
|
if error?
|
||||||
|
logger.err err:error, doc_id:doc_id, project_id:project_id, "error finding element for getDocument"
|
||||||
|
return next(error)
|
||||||
|
res.send 200
|
||||||
|
req.session.destroy()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,25 @@
|
||||||
|
logger = require "logger-sharelatex"
|
||||||
|
Metrics = require "../../infrastructure/Metrics"
|
||||||
|
Project = require("../../models/Project").Project
|
||||||
|
ProjectZipStreamManager = require "./ProjectZipStreamManager"
|
||||||
|
DocumentUpdaterHandler = require "../DocumentUpdater/DocumentUpdaterHandler"
|
||||||
|
|
||||||
|
module.exports = ProjectDownloadsController =
|
||||||
|
downloadProject: (req, res, next) ->
|
||||||
|
project_id = req.params.Project_id
|
||||||
|
Metrics.inc "zip-downloads"
|
||||||
|
logger.log project_id: project_id, "downloading project"
|
||||||
|
DocumentUpdaterHandler.flushProjectToMongo project_id, (error)->
|
||||||
|
return next(error) if error?
|
||||||
|
Project.findById project_id, "name", (error, project) ->
|
||||||
|
return next(error) if error?
|
||||||
|
ProjectZipStreamManager.createZipStreamForProject project_id, (error, stream) ->
|
||||||
|
return next(error) if error?
|
||||||
|
res.header(
|
||||||
|
"Content-Disposition",
|
||||||
|
"attachment; filename=#{encodeURIComponent(project.name)}.zip"
|
||||||
|
)
|
||||||
|
res.contentType('application/zip')
|
||||||
|
stream.pipe(res)
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,46 @@
|
||||||
|
archiver = require "archiver"
|
||||||
|
async = require "async"
|
||||||
|
logger = require "logger-sharelatex"
|
||||||
|
ProjectEntityHandler = require "../Project/ProjectEntityHandler"
|
||||||
|
FileStoreHandler = require("../FileStore/FileStoreHandler")
|
||||||
|
|
||||||
|
module.exports = ProjectZipStreamManager =
|
||||||
|
createZipStreamForProject: (project_id, callback = (error, stream) ->) ->
|
||||||
|
archive = archiver("zip")
|
||||||
|
# return stream immediately before we start adding things to it
|
||||||
|
callback(null, archive)
|
||||||
|
@addAllDocsToArchive project_id, archive, (error) =>
|
||||||
|
if error?
|
||||||
|
logger.error err: error, project_id: project_id, "error adding docs to zip stream"
|
||||||
|
@addAllFilesToArchive project_id, archive, (error) =>
|
||||||
|
if error?
|
||||||
|
logger.error err: error, project_id: project_id, "error adding files to zip stream"
|
||||||
|
archive.finalize()
|
||||||
|
|
||||||
|
|
||||||
|
addAllDocsToArchive: (project_id, archive, callback = (error) ->) ->
|
||||||
|
ProjectEntityHandler.getAllDocs project_id, (error, docs) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
jobs = []
|
||||||
|
for path, doc of docs
|
||||||
|
do (path, doc) ->
|
||||||
|
path = path.slice(1) if path[0] == "/"
|
||||||
|
jobs.push (callback) ->
|
||||||
|
logger.log project_id: project_id, "Adding doc"
|
||||||
|
archive.append doc.lines.join("\n"), name: path, callback
|
||||||
|
async.series jobs, callback
|
||||||
|
|
||||||
|
addAllFilesToArchive: (project_id, archive, callback = (error) ->) ->
|
||||||
|
ProjectEntityHandler.getAllFiles project_id, (error, files) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
jobs = []
|
||||||
|
for path, file of files
|
||||||
|
do (path, file) ->
|
||||||
|
jobs.push (callback) ->
|
||||||
|
FileStoreHandler.getFileStream project_id, file._id, {}, (error, stream) ->
|
||||||
|
if error?
|
||||||
|
logger.err err:error, project_id:project_id, file_id:file._id, "something went wrong adding file to zip archive"
|
||||||
|
return callback(err)
|
||||||
|
path = path.slice(1) if path[0] == "/"
|
||||||
|
archive.append stream, name: path, callback
|
||||||
|
async.series jobs, callback
|
|
@ -0,0 +1,81 @@
|
||||||
|
request = require('request')
|
||||||
|
settings = require('settings-sharelatex')
|
||||||
|
logger = require('logger-sharelatex')
|
||||||
|
Project = require('../../models/Project').Project
|
||||||
|
projectEntityHandler = require '../Project/ProjectEntityHandler'
|
||||||
|
_ = require('underscore')
|
||||||
|
async = require('async')
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
getUserRegistrationStatus: (user_id, callback)->
|
||||||
|
logger.log user_id:user_id, "getting dropbox registration status from tpds"
|
||||||
|
opts =
|
||||||
|
url : "#{settings.apis.thirdPartyDataStore.url}/user/#{user_id}/dropbox/status"
|
||||||
|
timeout: 5000
|
||||||
|
request.get opts, (err, response, body)->
|
||||||
|
safelyGetResponse err, response, body, (err, body)->
|
||||||
|
if err?
|
||||||
|
logger.err err:err, response:response, "getUserRegistrationStatus problem"
|
||||||
|
return callback err
|
||||||
|
logger.log status:body, "getting dropbox registration status for user #{user_id}"
|
||||||
|
callback err, body
|
||||||
|
|
||||||
|
getDropboxRegisterUrl: (user_id, callback)->
|
||||||
|
opts =
|
||||||
|
url: "#{settings.apis.thirdPartyDataStore.url}/user/#{user_id}/dropbox/register"
|
||||||
|
timeout: 5000
|
||||||
|
request.get opts, (err, response, body)->
|
||||||
|
safelyGetResponse err, response, body, (err, body)->
|
||||||
|
if err?
|
||||||
|
logger.err err:err, response:response, "getUserRegistrationStatus problem"
|
||||||
|
return callback err
|
||||||
|
url = "#{body.authorize_url}&oauth_callback=#{settings.siteUrl}/dropbox/completeRegistration"
|
||||||
|
logger.log user_id:user_id, url:url, "starting dropbox register"
|
||||||
|
callback err, url
|
||||||
|
|
||||||
|
completeRegistration: (user_id, callback)->
|
||||||
|
opts =
|
||||||
|
url: "#{settings.apis.thirdPartyDataStore.url}/user/#{user_id}/dropbox/getaccesstoken"
|
||||||
|
timeout: 5000
|
||||||
|
request.get opts, (err, response, body)=>
|
||||||
|
safelyGetResponse err, response, body, (err, body)=>
|
||||||
|
if err?
|
||||||
|
logger.err err:err, response:response, "getUserRegistrationStatus problem"
|
||||||
|
return callback err
|
||||||
|
success = body.success
|
||||||
|
logger.log user_id:user_id, success:body.success, "completing dropbox register"
|
||||||
|
if success
|
||||||
|
@flushUsersProjectToDropbox user_id
|
||||||
|
callback err, body.success
|
||||||
|
|
||||||
|
|
||||||
|
unlinkAccount: (user_id, callback)->
|
||||||
|
opts =
|
||||||
|
url: "#{settings.apis.thirdPartyDataStore.url}/user/#{user_id}/dropbox"
|
||||||
|
timeout: 5000
|
||||||
|
request.del opts, (err, response, body)=>
|
||||||
|
callback(err)
|
||||||
|
|
||||||
|
flushUsersProjectToDropbox: (user_id, callback)->
|
||||||
|
Project.findAllUsersProjects user_id, '_id', (projects = [], collabertions = [], readOnlyProjects = [])->
|
||||||
|
projectList = []
|
||||||
|
projectList = projectList.concat(projects)
|
||||||
|
projectList = projectList.concat(collabertions)
|
||||||
|
projectList = projectList.concat(readOnlyProjects)
|
||||||
|
projectIds = _.pluck(projectList, "_id")
|
||||||
|
logger.log projectIds:projectIds, user_id:user_id, "flushing all a users projects to tpds"
|
||||||
|
jobs = projectIds.map (project_id)->
|
||||||
|
return (cb)->
|
||||||
|
projectEntityHandler.flushProjectToThirdPartyDataStore project_id, cb
|
||||||
|
async.series jobs, callback
|
||||||
|
|
||||||
|
safelyGetResponse = (err, res, body, callback)->
|
||||||
|
statusCode = if res? then res.statusCode else 500
|
||||||
|
if err? or statusCode != 200
|
||||||
|
e = new Error("something went wrong getting response from dropbox, #{err}, #{statusCode}")
|
||||||
|
logger.err err:err
|
||||||
|
callback(e, [])
|
||||||
|
else
|
||||||
|
body = JSON.parse body
|
||||||
|
callback(null, body)
|
245
services/web/app/coffee/Features/Editor/EditorController.coffee
Normal file
245
services/web/app/coffee/Features/Editor/EditorController.coffee
Normal file
|
@ -0,0 +1,245 @@
|
||||||
|
logger = require('logger-sharelatex')
|
||||||
|
Metrics = require('../../infrastructure/Metrics')
|
||||||
|
sanitize = require('validator').sanitize
|
||||||
|
ProjectEditorHandler = require('../Project/ProjectEditorHandler')
|
||||||
|
ProjectEntityHandler = require('../Project/ProjectEntityHandler')
|
||||||
|
ProjectOptionsHandler = require('../Project/ProjectOptionsHandler')
|
||||||
|
ProjectDetailsHandler = require('../Project/ProjectDetailsHandler')
|
||||||
|
ProjectGetter = require('../Project/ProjectGetter')
|
||||||
|
ProjectHandler = new (require('../../handlers/ProjectHandler'))()
|
||||||
|
DocumentUpdaterHandler = require('../DocumentUpdater/DocumentUpdaterHandler')
|
||||||
|
LimitationsManager = require("../Subscription/LimitationsManager")
|
||||||
|
AuthorizationManager = require("../Security/AuthorizationManager")
|
||||||
|
AutomaticSnapshotManager = require("../Versioning/AutomaticSnapshotManager")
|
||||||
|
VersioningApiHandler = require("../Versioning/VersioningApiHandler")
|
||||||
|
AnalyticsManager = require("../Analytics/AnalyticsManager")
|
||||||
|
EditorRealTimeController = require("./EditorRealTimeController")
|
||||||
|
settings = require('settings-sharelatex')
|
||||||
|
slReqIdHelper = require('soa-req-id')
|
||||||
|
tpdsPollingBackgroundTasks = require('../ThirdPartyDataStore/TpdsPollingBackgroundTasks')
|
||||||
|
async = require('async')
|
||||||
|
_ = require('underscore')
|
||||||
|
rclientPub = require("redis").createClient(settings.redis.web.port, settings.redis.web.host)
|
||||||
|
rclientPub.auth(settings.redis.web.password)
|
||||||
|
rclientSub = require("redis").createClient(settings.redis.web.port, settings.redis.web.host)
|
||||||
|
rclientSub.auth(settings.redis.web.password)
|
||||||
|
|
||||||
|
module.exports = EditorController =
|
||||||
|
protocolVersion: 2
|
||||||
|
|
||||||
|
reportError: (client, clientError, callback = () ->) ->
|
||||||
|
client.get "project_id", (error, project_id) ->
|
||||||
|
client.get "user_id", (error, user_id) ->
|
||||||
|
logger.error err: clientError, project_id: project_id, user_id: user_id, "client error"
|
||||||
|
callback()
|
||||||
|
|
||||||
|
joinProject: (client, user, project_id, callback) ->
|
||||||
|
logger.log user_id:user._id, project_id:project_id, "user joining project"
|
||||||
|
Metrics.inc "editor.join-project"
|
||||||
|
ProjectGetter.getProjectWithoutDocLines project_id, (error, project) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
ProjectGetter.populateProjectWithUsers project, (error, project) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
VersioningApiHandler.enableVersioning project, (error) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
AuthorizationManager.getPrivilegeLevelForProject project, user,
|
||||||
|
(error, canAccess, privilegeLevel) ->
|
||||||
|
if error? or !canAccess
|
||||||
|
callback new Error("Not authorized")
|
||||||
|
else
|
||||||
|
AnalyticsManager.trackOpenEditor user, project
|
||||||
|
client.join(project_id)
|
||||||
|
client.set("project_id", project_id)
|
||||||
|
client.set("owner_id", project.owner_ref._id)
|
||||||
|
client.set("user_id", user._id)
|
||||||
|
client.set("first_name", user.first_name)
|
||||||
|
client.set("last_name", user.last_name)
|
||||||
|
client.set("email", user.email)
|
||||||
|
client.set("connected_time", new Date())
|
||||||
|
client.set("signup_date", user.signUpDate)
|
||||||
|
client.set("login_count", user.loginCount)
|
||||||
|
client.set("take_snapshots", project.existsInVersioningApi)
|
||||||
|
AuthorizationManager.setPrivilegeLevelOnClient client, privilegeLevel
|
||||||
|
callback null, ProjectEditorHandler.buildProjectModelView(project), privilegeLevel, EditorController.protocolVersion
|
||||||
|
|
||||||
|
leaveProject: (client, user) ->
|
||||||
|
self = @
|
||||||
|
client.get "project_id", (error, project_id) ->
|
||||||
|
return if error? or !project_id?
|
||||||
|
EditorRealTimeController.emitToRoom(project_id, "clientTracking.clientDisconnected", client.id)
|
||||||
|
logger.log user_id:user._id, project_id:project_id, "user leaving project"
|
||||||
|
self.flushProjectIfEmpty(project_id)
|
||||||
|
|
||||||
|
joinDoc: (client, project_id, doc_id, fromVersion, callback = (error, docLines, version) ->) ->
|
||||||
|
# fromVersion is optional
|
||||||
|
if typeof fromVersion == "function"
|
||||||
|
callback = fromVersion
|
||||||
|
fromVersion = -1
|
||||||
|
|
||||||
|
client.get "user_id", (error, user_id) ->
|
||||||
|
logger.log user_id: user_id, project_id: project_id, doc_id: doc_id, "user joining doc"
|
||||||
|
Metrics.inc "editor.join-doc"
|
||||||
|
client.join doc_id
|
||||||
|
DocumentUpdaterHandler.getDocument project_id, doc_id, fromVersion, (err, docLines, version, ops)->
|
||||||
|
# Encode any binary bits of data so it can go via WebSockets
|
||||||
|
# See http://ecmanaut.blogspot.co.uk/2006/07/encoding-decoding-utf8-in-javascript.html
|
||||||
|
if docLines?
|
||||||
|
docLines = for line in docLines
|
||||||
|
if line.text?
|
||||||
|
line.text = unescape(encodeURIComponent(line.text))
|
||||||
|
else
|
||||||
|
line = unescape(encodeURIComponent(line))
|
||||||
|
line
|
||||||
|
callback(err, docLines, version, ops)
|
||||||
|
|
||||||
|
leaveDoc: (client, project_id, doc_id, callback = (error) ->) ->
|
||||||
|
client.get "user_id", (error, user_id) ->
|
||||||
|
logger.log user_id: user_id, project_id: project_id, doc_id: doc_id, "user leaving doc"
|
||||||
|
Metrics.inc "editor.leave-doc"
|
||||||
|
client.leave doc_id
|
||||||
|
callback()
|
||||||
|
|
||||||
|
flushProjectIfEmpty: (project_id, callback = ->)->
|
||||||
|
setTimeout (->
|
||||||
|
io = require('../../infrastructure/Server').io
|
||||||
|
peopleStillInProject = io.sockets.clients(project_id).length
|
||||||
|
logger.log project_id: project_id, connectedCount: peopleStillInProject, "flushing if empty"
|
||||||
|
if peopleStillInProject == 0
|
||||||
|
DocumentUpdaterHandler.flushProjectToMongoAndDelete(project_id)
|
||||||
|
callback()
|
||||||
|
), 500
|
||||||
|
|
||||||
|
updateClientPosition: (client, cursorData, callback = (error) ->) ->
|
||||||
|
client.get "project_id", (error, project_id) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
client.get "first_name", (error, first_name) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
client.get "last_name", (error, last_name) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
cursorData.id = client.id
|
||||||
|
if first_name? and last_name?
|
||||||
|
cursorData.name = first_name + " " + last_name
|
||||||
|
else
|
||||||
|
cursorData.name = "Anonymous"
|
||||||
|
EditorRealTimeController.emitToRoom(project_id, "clientTracking.clientUpdated", cursorData)
|
||||||
|
|
||||||
|
addUserToProject: (project_id, email, privlages, callback = (error, collaborator_added)->)->
|
||||||
|
email = email.toLowerCase()
|
||||||
|
LimitationsManager.isCollaboratorLimitReached project_id, (error, limit_reached) =>
|
||||||
|
if error?
|
||||||
|
logger.error err:error, "error adding user to to project when checking if collaborator limit has been reached"
|
||||||
|
return callback(new Error("Something went wrong"))
|
||||||
|
|
||||||
|
if limit_reached
|
||||||
|
callback null, false
|
||||||
|
else
|
||||||
|
ProjectHandler.addUserToProject project_id, email, privlages, (user)=>
|
||||||
|
EditorRealTimeController.emitToRoom(project_id, 'userAddedToProject', user, privlages)
|
||||||
|
callback null, true
|
||||||
|
|
||||||
|
removeUserFromProject: (project_id, user_id, callback)->
|
||||||
|
ProjectHandler.removeUserFromProject project_id, user_id, =>
|
||||||
|
EditorRealTimeController.emitToRoom(project_id, 'userRemovedFromProject', user_id)
|
||||||
|
if callback?
|
||||||
|
callback()
|
||||||
|
|
||||||
|
setCompiler : (project_id, compiler, callback = ()->)->
|
||||||
|
ProjectOptionsHandler.setCompiler project_id, compiler, (err)->
|
||||||
|
logger.log compiler:compiler, project_id:project_id, "setting compiler"
|
||||||
|
callback()
|
||||||
|
|
||||||
|
setSpellCheckLanguage : (project_id, languageCode, callback = ()->)->
|
||||||
|
ProjectOptionsHandler.setSpellCheckLanguage project_id, languageCode, (err)->
|
||||||
|
logger.log languageCode:languageCode, project_id:project_id, "setting languageCode for spell check"
|
||||||
|
callback()
|
||||||
|
|
||||||
|
setDoc: (project_id, doc_id, docLines, sl_req_id, callback = (err)->)->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
DocumentUpdaterHandler.setDocument project_id, doc_id, docLines, (err)=>
|
||||||
|
logger.log project_id:project_id, doc_id:doc_id, "notifying users that the document has been updated"
|
||||||
|
EditorRealTimeController.emitToRoom(project_id, "entireDocUpdate", doc_id)
|
||||||
|
ProjectEntityHandler.updateDocLines project_id, doc_id, docLines, sl_req_id, (err)->
|
||||||
|
callback(err)
|
||||||
|
|
||||||
|
addDoc: (project_id, folder_id, docName, docLines, sl_req_id, callback = (error, doc)->)->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
docName = sanitize(docName).xss()
|
||||||
|
logger.log sl_req_id:sl_req_id, "sending new doc to project #{project_id}"
|
||||||
|
Metrics.inc "editor.add-doc"
|
||||||
|
ProjectEntityHandler.addDoc project_id, folder_id, docName, docLines, sl_req_id, (err, doc, folder_id)=>
|
||||||
|
EditorRealTimeController.emitToRoom(project_id, 'reciveNewDoc', folder_id, doc)
|
||||||
|
callback(err, doc)
|
||||||
|
|
||||||
|
addFile: (project_id, folder_id, fileName, path, sl_req_id, callback = (error, file)->)->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
fileName = sanitize(fileName).xss()
|
||||||
|
logger.log sl_req_id:sl_req_id, "sending new file to project #{project_id} with folderid: #{folder_id}"
|
||||||
|
Metrics.inc "editor.add-file"
|
||||||
|
ProjectEntityHandler.addFile project_id, folder_id, fileName, path, (err, fileRef, folder_id)=>
|
||||||
|
EditorRealTimeController.emitToRoom(project_id, 'reciveNewFile', folder_id, fileRef)
|
||||||
|
callback(err, fileRef)
|
||||||
|
|
||||||
|
replaceFile: (project_id, file_id, fsPath, callback)->
|
||||||
|
ProjectEntityHandler.replaceFile project_id, file_id, fsPath, (err) ->
|
||||||
|
callback()
|
||||||
|
|
||||||
|
addFolder: (project_id, folder_id, folderName, sl_req_id, callback = (error, folder)->)->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
folderName = sanitize(folderName).xss()
|
||||||
|
logger.log "sending new folder to project #{project_id}"
|
||||||
|
Metrics.inc "editor.add-folder"
|
||||||
|
ProjectEntityHandler.addFolder project_id, folder_id, folderName, (err, folder, folder_id)=>
|
||||||
|
@p.notifyProjectUsersOfNewFolder project_id, folder_id, folder, (error) ->
|
||||||
|
callback error, folder
|
||||||
|
|
||||||
|
mkdirp: (project_id, path, callback)->
|
||||||
|
logger.log project_id:project_id, path:path, "making directories if they don't exist"
|
||||||
|
ProjectEntityHandler.mkdirp project_id, path, (err, newFolders, lastFolder)=>
|
||||||
|
self = @
|
||||||
|
jobs = _.map newFolders, (folder, index)->
|
||||||
|
return (cb)->
|
||||||
|
self.p.notifyProjectUsersOfNewFolder project_id, folder.parentFolder_id, folder, cb
|
||||||
|
async.series jobs, (err)->
|
||||||
|
callback err, newFolders, lastFolder
|
||||||
|
|
||||||
|
deleteEntity: (project_id, entity_id, entityType, sl_req_id, callback)->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
logger.log project_id:project_id, entity_id:entity_id, entityType:entityType, "start delete process of entity"
|
||||||
|
Metrics.inc "editor.delete-entity"
|
||||||
|
ProjectEntityHandler.deleteEntity project_id, entity_id, entityType, =>
|
||||||
|
logger.log sl_req_id: sl_req_id, project_id:project_id, entity_id:entity_id, entityType:entityType, "telling users entity has been deleted"
|
||||||
|
EditorRealTimeController.emitToRoom(project_id, 'removeEntity', entity_id)
|
||||||
|
if callback?
|
||||||
|
callback()
|
||||||
|
|
||||||
|
getListOfDocPaths: (project_id, callback)->
|
||||||
|
ProjectEntityHandler.getAllDocs project_id, (err, docs)->
|
||||||
|
docList = _.map docs, (doc, path)->
|
||||||
|
return {_id:doc._id, path:path.substring(1)}
|
||||||
|
callback(null, docList)
|
||||||
|
|
||||||
|
forceResyncOfDropbox: (project_id, callback)->
|
||||||
|
ProjectEntityHandler.flushProjectToThirdPartyDataStore project_id, callback
|
||||||
|
|
||||||
|
notifyUsersProjectHasBeenDeletedOrRenamed: (project_id, callback)->
|
||||||
|
EditorRealTimeController.emitToRoom(project_id, 'projectRenamedOrDeletedByExternalSource')
|
||||||
|
callback()
|
||||||
|
|
||||||
|
getLastTimePollHappned: (callback)->
|
||||||
|
tpdsPollingBackgroundTasks.getLastTimePollHappned callback
|
||||||
|
|
||||||
|
updateProjectDescription: (project_id, description, callback = ->)->
|
||||||
|
logger.log project_id:project_id, description:description, "updating project description"
|
||||||
|
ProjectDetailsHandler.setProjectDescription project_id, description, (err)->
|
||||||
|
if err?
|
||||||
|
logger.err err:err, project_id:project_id, description:description, "something went wrong setting the project description"
|
||||||
|
return callback(err)
|
||||||
|
EditorRealTimeController.emitToRoom(project_id, 'projectDescriptionUpdated', description)
|
||||||
|
callback()
|
||||||
|
|
||||||
|
p:
|
||||||
|
notifyProjectUsersOfNewFolder: (project_id, folder_id, folder, callback = (error)->)->
|
||||||
|
logger.log project_id:project_id, folder:folder, parentFolder_id:folder_id, "sending newly created folder out to users"
|
||||||
|
EditorRealTimeController.emitToRoom(project_id, "reciveNewFolder", folder_id, folder)
|
||||||
|
callback()
|
||||||
|
|
|
@ -0,0 +1,31 @@
|
||||||
|
settings = require 'settings-sharelatex'
|
||||||
|
rclientPub = require("redis").createClient(settings.redis.web.port, settings.redis.web.host)
|
||||||
|
rclientPub.auth(settings.redis.web.password)
|
||||||
|
rclientSub = require("redis").createClient(settings.redis.web.port, settings.redis.web.host)
|
||||||
|
rclientSub.auth(settings.redis.web.password)
|
||||||
|
|
||||||
|
module.exports = EditorRealTimeController =
|
||||||
|
rclientPub: rclientPub
|
||||||
|
rclientSub: rclientSub
|
||||||
|
|
||||||
|
emitToRoom: (room_id, message, payload...) ->
|
||||||
|
@rclientPub.publish "editor-events", JSON.stringify
|
||||||
|
room_id: room_id
|
||||||
|
message: message
|
||||||
|
payload: payload
|
||||||
|
|
||||||
|
emitToAll: (message, payload...) ->
|
||||||
|
@emitToRoom "all", message, payload...
|
||||||
|
|
||||||
|
listenForEditorEvents: () ->
|
||||||
|
@rclientSub.subscribe "editor-events"
|
||||||
|
@rclientSub.on "message", @_processEditorEvent.bind(@)
|
||||||
|
|
||||||
|
_processEditorEvent: (channel, message) ->
|
||||||
|
io = require('../../infrastructure/Server').io
|
||||||
|
message = JSON.parse(message)
|
||||||
|
if message.room_id == "all"
|
||||||
|
io.sockets.emit(message.message, message.payload...)
|
||||||
|
else
|
||||||
|
io.sockets.in(message.room_id).emit(message.message, message.payload...)
|
||||||
|
|
|
@ -0,0 +1,68 @@
|
||||||
|
logger = require "logger-sharelatex"
|
||||||
|
metrics = require('../../infrastructure/Metrics')
|
||||||
|
Settings = require 'settings-sharelatex'
|
||||||
|
rclient = require("redis").createClient(Settings.redis.web.port, Settings.redis.web.host)
|
||||||
|
rclient.auth(Settings.redis.web.password)
|
||||||
|
DocumentUpdaterHandler = require('../DocumentUpdater/DocumentUpdaterHandler')
|
||||||
|
AutomaticSnapshotManager = require("../Versioning/AutomaticSnapshotManager")
|
||||||
|
EditorRealTimeController = require("./EditorRealTimeController")
|
||||||
|
|
||||||
|
module.exports = EditorUpdatesController =
|
||||||
|
_applyUpdate: (client, project_id, doc_id, update, callback = (error) ->) ->
|
||||||
|
metrics.inc "editor.doc-update", 0.3
|
||||||
|
metrics.set "editor.active-projects", project_id, 0.3
|
||||||
|
client.get "user_id", (error, user_id) ->
|
||||||
|
metrics.set "editor.active-users", user_id, 0.3
|
||||||
|
|
||||||
|
client.get "take_snapshots", (error, takeSnapshot) ->
|
||||||
|
if takeSnapshot
|
||||||
|
AutomaticSnapshotManager.markProjectAsUpdated(project_id)
|
||||||
|
|
||||||
|
DocumentUpdaterHandler.queueChange project_id, doc_id, update, (error) ->
|
||||||
|
if error?
|
||||||
|
logger.error err:error, project_id: project_id, "document was not available for update"
|
||||||
|
client.disconnect()
|
||||||
|
callback(error)
|
||||||
|
|
||||||
|
applyOtUpdate: (client, project_id, doc_id, update) ->
|
||||||
|
update.meta ||= {}
|
||||||
|
update.meta.source = client.id
|
||||||
|
client.get "user_id", (error, user_id) ->
|
||||||
|
update.meta.user_id = user_id
|
||||||
|
EditorUpdatesController._applyUpdate client, project_id, doc_id, update
|
||||||
|
|
||||||
|
applyAceUpdate: (client, project_id, doc_id, window_name, update) ->
|
||||||
|
# This is deprecated now and should never be used. Kick the client off if they call it.
|
||||||
|
# After the initial deploy this can be removed safely
|
||||||
|
logger.err project_id: project_id, doc_id: doc_id, "client using old Ace Update method"
|
||||||
|
client.disconnect()
|
||||||
|
|
||||||
|
listenForUpdatesFromDocumentUpdater: () ->
|
||||||
|
rclient.subscribe "applied-ops"
|
||||||
|
rclient.on "message", @_processMessageFromDocumentUpdater.bind(@)
|
||||||
|
|
||||||
|
_processMessageFromDocumentUpdater: (channel, message) ->
|
||||||
|
message = JSON.parse message
|
||||||
|
if message.op?
|
||||||
|
@_applyUpdateFromDocumentUpdater(message.doc_id, message.op)
|
||||||
|
else if message.error?
|
||||||
|
@_processErrorFromDocumentUpdater(message.doc_id, message.error, message)
|
||||||
|
|
||||||
|
_applyUpdateFromDocumentUpdater: (doc_id, update) ->
|
||||||
|
io = require('../../infrastructure/Server').io
|
||||||
|
for client in io.sockets.clients(doc_id)
|
||||||
|
if client.id == update.meta.source
|
||||||
|
client.emit "otUpdateApplied", v: update.v, doc: update.doc
|
||||||
|
else
|
||||||
|
client.emit "otUpdateApplied", update
|
||||||
|
|
||||||
|
_processErrorFromDocumentUpdater: (doc_id, error, message) ->
|
||||||
|
io = require('../../infrastructure/Server').io
|
||||||
|
logger.error err: error, doc_id: doc_id, "error from document updater"
|
||||||
|
for client in io.sockets.clients(doc_id)
|
||||||
|
client.emit "otUpdateError", error, message
|
||||||
|
client.disconnect()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,61 @@
|
||||||
|
logger = require("logger-sharelatex")
|
||||||
|
fs = require("fs")
|
||||||
|
request = require("request")
|
||||||
|
settings = require("settings-sharelatex")
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
uploadFileFromDisk: (project_id, file_id, fsPath, callback)->
|
||||||
|
logger.log project_id:project_id, file_id:file_id, fsPath:fsPath, "uploading file from disk"
|
||||||
|
readStream = fs.createReadStream(fsPath)
|
||||||
|
opts =
|
||||||
|
method: "post"
|
||||||
|
uri: @_buildUrl(project_id, file_id)
|
||||||
|
writeStream = request(opts)
|
||||||
|
readStream.pipe writeStream
|
||||||
|
readStream.on "end", callback
|
||||||
|
readStream.on "error", (err)->
|
||||||
|
logger.err err:err, project_id:project_id, file_id:file_id, fsPath:fsPath, "something went wrong on the read stream of uploadFileFromDisk"
|
||||||
|
callback err
|
||||||
|
writeStream.on "error", (err)->
|
||||||
|
logger.err err:err, project_id:project_id, file_id:file_id, fsPath:fsPath, "something went wrong on the write stream of uploadFileFromDisk"
|
||||||
|
callback err
|
||||||
|
|
||||||
|
getFileStream: (project_id, file_id, query, callback)->
|
||||||
|
logger.log project_id:project_id, file_id:file_id, query:query, "getting file stream from file store"
|
||||||
|
queryString = ""
|
||||||
|
if query? and query["format"]?
|
||||||
|
queryString = "?format=#{query['format']}"
|
||||||
|
opts =
|
||||||
|
method : "get"
|
||||||
|
uri: "#{@_buildUrl(project_id, file_id)}#{queryString}"
|
||||||
|
readStream = request(opts)
|
||||||
|
callback(null, readStream)
|
||||||
|
|
||||||
|
deleteFile: (project_id, file_id, callback)->
|
||||||
|
logger.log project_id:project_id, file_id:file_id, "telling file store to delete file"
|
||||||
|
opts =
|
||||||
|
method : "delete"
|
||||||
|
uri: @_buildUrl(project_id, file_id)
|
||||||
|
request opts, (err, response)->
|
||||||
|
if err?
|
||||||
|
logger.err err:err, project_id:project_id, file_id:file_id, "something went wrong deleting file from filestore"
|
||||||
|
callback(err)
|
||||||
|
|
||||||
|
copyFile: (oldProject_id, oldFile_id, newProject_id, newFile_id, callback)->
|
||||||
|
logger.log oldProject_id:oldProject_id, oldFile_id:oldFile_id, newProject_id:newProject_id, newFile_id:newFile_id, "telling filestore to copy a file"
|
||||||
|
opts =
|
||||||
|
method : "put"
|
||||||
|
json:
|
||||||
|
source:
|
||||||
|
project_id:oldProject_id
|
||||||
|
file_id:oldFile_id
|
||||||
|
uri: @_buildUrl(newProject_id, newFile_id)
|
||||||
|
|
||||||
|
request opts, (err)->
|
||||||
|
if err?
|
||||||
|
logger.err err:err, oldProject_id:oldProject_id, oldFile_id:oldFile_id, newProject_id:newProject_id, newFile_id:newFile_id, "something went wrong telling filestore api to copy file"
|
||||||
|
callback(err)
|
||||||
|
|
||||||
|
_buildUrl: (project_id, file_id)->
|
||||||
|
return "#{settings.apis.filestore.url}/project/#{project_id}/file/#{file_id}"
|
|
@ -0,0 +1,42 @@
|
||||||
|
Mocha = require "mocha"
|
||||||
|
Base = require("mocha/lib/reporters/base")
|
||||||
|
|
||||||
|
module.exports = HealthCheckController =
|
||||||
|
check: (req, res, next = (error) ->) ->
|
||||||
|
mocha = new Mocha(reporter: Reporter(res), timeout: 10000)
|
||||||
|
mocha.addFile("test/smoke/js/SmokeTests.js")
|
||||||
|
mocha.run () ->
|
||||||
|
path = require.resolve(__dirname + "/../../../../test/smoke/js/SmokeTests.js")
|
||||||
|
delete require.cache[path]
|
||||||
|
|
||||||
|
Reporter = (res) ->
|
||||||
|
(runner) ->
|
||||||
|
Base.call(this, runner)
|
||||||
|
|
||||||
|
tests = []
|
||||||
|
passes = []
|
||||||
|
failures = []
|
||||||
|
|
||||||
|
runner.on 'test end', (test) -> tests.push(test)
|
||||||
|
runner.on 'pass', (test) -> passes.push(test)
|
||||||
|
runner.on 'fail', (test) -> failures.push(test)
|
||||||
|
|
||||||
|
runner.on 'end', () =>
|
||||||
|
clean = (test) ->
|
||||||
|
title: test.fullTitle()
|
||||||
|
duration: test.duration
|
||||||
|
err: test.err
|
||||||
|
timedOut: test.timedOut
|
||||||
|
|
||||||
|
results = {
|
||||||
|
stats: @stats
|
||||||
|
failures: failures.map(clean)
|
||||||
|
passes: passes.map(clean)
|
||||||
|
}
|
||||||
|
|
||||||
|
res.contentType("application/json")
|
||||||
|
if failures.length > 0
|
||||||
|
res.send 500, JSON.stringify(results, null, 2)
|
||||||
|
else
|
||||||
|
res.send 200, JSON.stringify(results, null, 2)
|
||||||
|
|
|
@ -0,0 +1,10 @@
|
||||||
|
_ = require "underscore"
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
areSame: (lines1, lines2)->
|
||||||
|
if !Array.isArray(lines1) or !Array.isArray(lines2)
|
||||||
|
return false
|
||||||
|
|
||||||
|
return _.isEqual(lines1, lines2)
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
ProjectDetailsHandler = require("./ProjectDetailsHandler")
|
||||||
|
logger = require("logger-sharelatex")
|
||||||
|
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
getProjectDetails : (req, res)->
|
||||||
|
{project_id} = req.params
|
||||||
|
ProjectDetailsHandler.getDetails project_id, (err, projDetails)->
|
||||||
|
if err?
|
||||||
|
logger.log err:err, project_id:project_id, "something went wrong getting project details"
|
||||||
|
return res.send 500
|
||||||
|
res.json(projDetails)
|
|
@ -0,0 +1,82 @@
|
||||||
|
logger = require('logger-sharelatex')
|
||||||
|
async = require("async")
|
||||||
|
metrics = require('../../infrastructure/Metrics')
|
||||||
|
Settings = require('settings-sharelatex')
|
||||||
|
ObjectId = require('mongoose').Types.ObjectId
|
||||||
|
Project = require('../../models/Project').Project
|
||||||
|
Folder = require('../../models/Folder').Folder
|
||||||
|
VersioningApiHandler = require('../Versioning/VersioningApiHandler')
|
||||||
|
ProjectEntityHandler = require('./ProjectEntityHandler')
|
||||||
|
User = require('../../models/User').User
|
||||||
|
fs = require('fs')
|
||||||
|
Path = require "path"
|
||||||
|
_ = require "underscore"
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
createBlankProject : (owner_id, projectName, callback = (error, project) ->)->
|
||||||
|
metrics.inc("project-creation")
|
||||||
|
logger.log owner_id:owner_id, projectName:projectName, "creating blank project"
|
||||||
|
rootFolder = new Folder {'name':'rootFolder'}
|
||||||
|
project = new Project
|
||||||
|
owner_ref : new ObjectId(owner_id)
|
||||||
|
name : projectName
|
||||||
|
useClsi2 : true
|
||||||
|
project.rootFolder[0] = rootFolder
|
||||||
|
User.findById owner_id, "ace.spellCheckLanguage", (err, user)->
|
||||||
|
project.spellCheckLanguage = user.ace.spellCheckLanguage
|
||||||
|
project.save (err)->
|
||||||
|
return callback(err) if err?
|
||||||
|
VersioningApiHandler.enableVersioning project._id, (err) ->
|
||||||
|
callback err, project
|
||||||
|
|
||||||
|
createBasicProject : (owner_id, projectName, callback = (error, project) ->)->
|
||||||
|
self = @
|
||||||
|
@createBlankProject owner_id, projectName, (error, project)->
|
||||||
|
return callback(error) if error?
|
||||||
|
self._buildTemplate "mainbasic.tex", owner_id, projectName, (error, docLines)->
|
||||||
|
return callback(error) if error?
|
||||||
|
ProjectEntityHandler.addDoc project._id, project.rootFolder[0]._id, "main.tex", docLines, "", (error, doc)->
|
||||||
|
return callback(error) if error?
|
||||||
|
ProjectEntityHandler.setRootDoc project._id, doc._id, (error) ->
|
||||||
|
callback(error, project)
|
||||||
|
|
||||||
|
createExampleProject: (owner_id, projectName, callback = (error, project) ->)->
|
||||||
|
self = @
|
||||||
|
@createBlankProject owner_id, projectName, (error, project)->
|
||||||
|
return callback(error) if error?
|
||||||
|
async.series [
|
||||||
|
(callback) ->
|
||||||
|
self._buildTemplate "main.tex", owner_id, projectName, (error, docLines)->
|
||||||
|
return callback(error) if error?
|
||||||
|
ProjectEntityHandler.addDoc project._id, project.rootFolder[0]._id, "main.tex", docLines, "", (error, doc)->
|
||||||
|
return callback(error) if error?
|
||||||
|
ProjectEntityHandler.setRootDoc project._id, doc._id, callback
|
||||||
|
(callback) ->
|
||||||
|
self._buildTemplate "references.bib", owner_id, projectName, (error, docLines)->
|
||||||
|
return callback(error) if error?
|
||||||
|
ProjectEntityHandler.addDoc project._id, project.rootFolder[0]._id, "references.bib", docLines, "", (error, doc)->
|
||||||
|
callback(error)
|
||||||
|
(callback) ->
|
||||||
|
universePath = Path.resolve(__dirname + "/../../../templates/project_files/universe.jpg")
|
||||||
|
ProjectEntityHandler.addFile project._id, project.rootFolder[0]._id, "universe.jpg", universePath, callback
|
||||||
|
], (error) ->
|
||||||
|
callback(error, project)
|
||||||
|
|
||||||
|
_buildTemplate: (template_name, user_id, project_name, callback = (error, output) ->)->
|
||||||
|
User.findById user_id, "first_name last_name", (error, user)->
|
||||||
|
return callback(error) if error?
|
||||||
|
monthNames = [ "January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December" ]
|
||||||
|
|
||||||
|
templatePath = Path.resolve(__dirname + "/../../../templates/project_files/#{template_name}")
|
||||||
|
fs.readFile templatePath, (error, template) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
data =
|
||||||
|
project_name: project_name
|
||||||
|
user: user
|
||||||
|
year: new Date().getUTCFullYear()
|
||||||
|
month: monthNames[new Date().getUTCMonth()]
|
||||||
|
output = _.template(template.toString(), data)
|
||||||
|
callback null, output.split("\n")
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,19 @@
|
||||||
|
Project = require('../../models/Project').Project
|
||||||
|
logger = require('logger-sharelatex')
|
||||||
|
editorController = require('../Editor/EditorController')
|
||||||
|
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
markAsDeletedByExternalSource : (project_id, callback)->
|
||||||
|
logger.log project_id:project_id, "marking project as deleted by external data source"
|
||||||
|
conditions = {_id:project_id}
|
||||||
|
update = {deletedByExternalDataSource:true}
|
||||||
|
|
||||||
|
Project.update conditions, update, {}, (err)->
|
||||||
|
editorController.notifyUsersProjectHasBeenDeletedOrRenamed project_id, ->
|
||||||
|
callback()
|
||||||
|
|
||||||
|
deleteUsersProjects: (owner_id, callback)->
|
||||||
|
logger.log owner_id:owner_id, "deleting users projects"
|
||||||
|
Project.remove owner_ref:owner_id, callback
|
|
@ -0,0 +1,26 @@
|
||||||
|
ProjectGetter = require("./ProjectGetter")
|
||||||
|
Project = require('../../models/Project').Project
|
||||||
|
logger = require("logger-sharelatex")
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
getDetails: (project_id, callback)->
|
||||||
|
ProjectGetter.getProjectWithoutDocLines project_id, (err, project)->
|
||||||
|
if err?
|
||||||
|
logger.err err:err, project_id:project_id, "error getting project"
|
||||||
|
return callback(err)
|
||||||
|
details =
|
||||||
|
name : project.name
|
||||||
|
description: project.description
|
||||||
|
compiler: project.compiler
|
||||||
|
logger.log project_id:project_id, details:details, "getting project details"
|
||||||
|
callback(err, details)
|
||||||
|
|
||||||
|
setProjectDescription: (project_id, description, callback)->
|
||||||
|
conditions = _id:project_id
|
||||||
|
update = description:description
|
||||||
|
logger.log conditions:conditions, update:update, project_id:project_id, description:description, "setting project description"
|
||||||
|
Project.update conditions, update, (err)->
|
||||||
|
if err?
|
||||||
|
logger.err err:err, "something went wrong setting project description"
|
||||||
|
callback(err)
|
|
@ -0,0 +1,53 @@
|
||||||
|
projectCreationHandler = require('./ProjectCreationHandler')
|
||||||
|
projectEntityHandler = require('./ProjectEntityHandler')
|
||||||
|
projectLocator = require('./ProjectLocator')
|
||||||
|
projectOptionsHandler = require('./ProjectOptionsHandler')
|
||||||
|
DocumentUpdaterHandler = require("../DocumentUpdater/DocumentUpdaterHandler")
|
||||||
|
Project = require("../../models/Project").Project
|
||||||
|
_ = require('underscore')
|
||||||
|
async = require('async')
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
duplicate: (owner, originalProjectId, newProjectName, callback)->
|
||||||
|
DocumentUpdaterHandler.flushProjectToMongo originalProjectId, (err) ->
|
||||||
|
return callback(err) if err?
|
||||||
|
Project.findById originalProjectId, (err, originalProject) ->
|
||||||
|
return callback(err) if err?
|
||||||
|
projectCreationHandler.createBlankProject owner._id, newProjectName, (err, newProject)->
|
||||||
|
return callback(err) if err?
|
||||||
|
projectLocator.findRootDoc {project:originalProject}, (err, originalRootDoc)->
|
||||||
|
projectOptionsHandler.setCompiler newProject._id, originalProject.compiler
|
||||||
|
|
||||||
|
setRootDoc = _.once (doc_id)->
|
||||||
|
projectEntityHandler.setRootDoc newProject, doc_id
|
||||||
|
|
||||||
|
copyDocs = (originalFolder, newParentFolder, callback)->
|
||||||
|
jobs = originalFolder.docs.map (doc)->
|
||||||
|
return (callback)->
|
||||||
|
projectEntityHandler.addDoc newProject, newParentFolder._id, doc.name, doc.lines, (err, newDoc)->
|
||||||
|
if originalRootDoc? and newDoc.name == originalRootDoc.name
|
||||||
|
setRootDoc newDoc._id
|
||||||
|
callback()
|
||||||
|
async.series jobs, callback
|
||||||
|
|
||||||
|
copyFiles = (originalFolder, newParentFolder, callback)->
|
||||||
|
jobs = originalFolder.fileRefs.map (file)->
|
||||||
|
return (callback)->
|
||||||
|
projectEntityHandler.copyFileFromExistingProject newProject, newParentFolder._id, originalProject._id, file, callback
|
||||||
|
async.parallelLimit jobs, 5, callback
|
||||||
|
|
||||||
|
copyFolder = (folder, desFolder, callback)->
|
||||||
|
jobs = folder.folders.map (childFolder)->
|
||||||
|
return (callback)->
|
||||||
|
projectEntityHandler.addFolder newProject, desFolder._id, childFolder.name, (err, newFolder)->
|
||||||
|
copyFolder childFolder, newFolder, callback
|
||||||
|
jobs.push (cb)->
|
||||||
|
copyDocs folder, desFolder, cb
|
||||||
|
jobs.push (cb)->
|
||||||
|
copyFiles folder, desFolder, cb
|
||||||
|
|
||||||
|
async.series jobs, callback
|
||||||
|
|
||||||
|
copyFolder originalProject.rootFolder[0], newProject.rootFolder[0], ->
|
||||||
|
callback(err, newProject)
|
||||||
|
|
|
@ -0,0 +1,63 @@
|
||||||
|
module.exports = ProjectEditorHandler =
|
||||||
|
buildProjectModelView: (project, options) ->
|
||||||
|
options ||= {}
|
||||||
|
if !options.includeUsers?
|
||||||
|
options.includeUsers = true
|
||||||
|
|
||||||
|
result =
|
||||||
|
_id : project._id
|
||||||
|
name : project.name
|
||||||
|
rootDoc_id : project.rootDoc_id
|
||||||
|
rootFolder : [@buildFolderModelView project.rootFolder[0]]
|
||||||
|
publicAccesLevel : project.publicAccesLevel
|
||||||
|
versioningVisible : !!project.existsInVersioningApi
|
||||||
|
dropboxEnabled : !!project.existsInDropbox
|
||||||
|
compiler : project.compiler
|
||||||
|
description: project.description
|
||||||
|
spellCheckLanguage: project.spellCheckLanguage
|
||||||
|
deletedByExternalDataSource : project.deletedByExternalDataSource || false
|
||||||
|
|
||||||
|
if options.includeUsers
|
||||||
|
result.features =
|
||||||
|
collaborators: -1 # Infinite
|
||||||
|
versioning: false
|
||||||
|
dropbox:false
|
||||||
|
|
||||||
|
if project.owner_ref.features?
|
||||||
|
if project.owner_ref.features.collaborators?
|
||||||
|
result.features.collaborators = project.owner_ref.features.collaborators
|
||||||
|
if project.owner_ref.features.versioning?
|
||||||
|
result.features.versioning = project.owner_ref.features.versioning
|
||||||
|
if project.owner_ref.features.dropbox?
|
||||||
|
result.features.dropbox = project.owner_ref.features.dropbox
|
||||||
|
|
||||||
|
result.owner = @buildUserModelView project.owner_ref, "owner"
|
||||||
|
result.members = []
|
||||||
|
for ref in project.readOnly_refs
|
||||||
|
result.members.push @buildUserModelView ref, "readOnly"
|
||||||
|
for ref in project.collaberator_refs
|
||||||
|
result.members.push @buildUserModelView ref, "readAndWrite"
|
||||||
|
return result
|
||||||
|
|
||||||
|
buildUserModelView: (user, privileges) ->
|
||||||
|
_id : user._id
|
||||||
|
first_name : user.first_name
|
||||||
|
last_name : user.last_name
|
||||||
|
email : user.email
|
||||||
|
privileges : privileges
|
||||||
|
signUpDate : user.signUpDate
|
||||||
|
|
||||||
|
buildFolderModelView: (folder) ->
|
||||||
|
_id : folder._id
|
||||||
|
name : folder.name
|
||||||
|
folders : @buildFolderModelView childFolder for childFolder in folder.folders
|
||||||
|
fileRefs : @buildFileModelView file for file in folder.fileRefs
|
||||||
|
docs : @buildDocModelView doc for doc in folder.docs
|
||||||
|
|
||||||
|
buildFileModelView: (file) ->
|
||||||
|
_id : file._id
|
||||||
|
name : file.name
|
||||||
|
|
||||||
|
buildDocModelView: (doc) ->
|
||||||
|
_id : doc._id
|
||||||
|
name : doc.name
|
|
@ -0,0 +1,355 @@
|
||||||
|
Project = require('../../models/Project').Project
|
||||||
|
Doc = require('../../models/Doc').Doc
|
||||||
|
Folder = require('../../models/Folder').Folder
|
||||||
|
File = require('../../models/File').File
|
||||||
|
FileStoreHandler = require("../FileStore/FileStoreHandler")
|
||||||
|
Errors = require "../../errors"
|
||||||
|
tpdsUpdateSender = require('../ThirdPartyDataStore/TpdsUpdateSender')
|
||||||
|
projectLocator = require('./ProjectLocator')
|
||||||
|
path = require "path"
|
||||||
|
async = require "async"
|
||||||
|
_ = require('underscore')
|
||||||
|
logger = require('logger-sharelatex')
|
||||||
|
slReqIdHelper = require('soa-req-id')
|
||||||
|
docComparitor = require('./DocLinesComparitor')
|
||||||
|
projectUpdateHandler = require('./ProjectUpdateHandler')
|
||||||
|
|
||||||
|
module.exports = ProjectEntityHandler =
|
||||||
|
getAllFolders: (project_id, sl_req_id, callback) ->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
logger.log sl_req_id: sl_req_id, project_id:project_id, "getting all folders for project"
|
||||||
|
folders = {}
|
||||||
|
processFolder = (basePath, folder) ->
|
||||||
|
folders[basePath] = folder
|
||||||
|
processFolder path.join(basePath, childFolder.name), childFolder for childFolder in folder.folders
|
||||||
|
|
||||||
|
Project.findById project_id, (err, project) ->
|
||||||
|
return callback(err) if err?
|
||||||
|
return callback("no project") if !project?
|
||||||
|
processFolder "/", project.rootFolder[0]
|
||||||
|
callback null, folders
|
||||||
|
|
||||||
|
getAllDocs: (project_id, sl_req_id, callback) ->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
logger.log project_id:project_id, "getting all docs for project"
|
||||||
|
@getAllFolders project_id, sl_req_id, (err, folders) ->
|
||||||
|
return callback(err) if err?
|
||||||
|
docs = {}
|
||||||
|
for folderPath, folder of folders
|
||||||
|
for doc in folder.docs
|
||||||
|
docs[path.join(folderPath, doc.name)] = doc
|
||||||
|
logger.log count:_.keys(docs).length, project_id:project_id, "returning docs for project"
|
||||||
|
callback null, docs
|
||||||
|
|
||||||
|
getAllFiles: (project_id, sl_req_id, callback) ->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
logger.log project_id:project_id, "getting all files for project"
|
||||||
|
@getAllFolders project_id, sl_req_id, (err, folders) ->
|
||||||
|
return callback(err) if err?
|
||||||
|
files = {}
|
||||||
|
for folderPath, folder of folders
|
||||||
|
for file in folder.fileRefs
|
||||||
|
files[path.join(folderPath, file.name)] = file
|
||||||
|
callback null, files
|
||||||
|
|
||||||
|
flushProjectToThirdPartyDataStore: (project_id, sl_req_id, callback) ->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
self = @
|
||||||
|
logger.log sl_req_id: sl_req_id, project_id:project_id, "flushing project to tpds"
|
||||||
|
documentUpdaterHandler = require('../../Features/DocumentUpdater/DocumentUpdaterHandler')
|
||||||
|
documentUpdaterHandler.flushProjectToMongo project_id, undefined, (error) ->
|
||||||
|
Project.findById project_id, (err, project) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
requests = []
|
||||||
|
self.getAllDocs project_id, (err, docs) ->
|
||||||
|
for docPath, doc of docs
|
||||||
|
do (docPath, doc) ->
|
||||||
|
requests.push (callback) ->
|
||||||
|
tpdsUpdateSender.addDoc {project_id:project_id, docLines:doc.lines, path:docPath, project_name:project.name, rev:doc.rev||0},
|
||||||
|
sl_req_id,
|
||||||
|
callback
|
||||||
|
self.getAllFiles project_id, (err, files) ->
|
||||||
|
for filePath, file of files
|
||||||
|
do (filePath, file) ->
|
||||||
|
requests.push (callback) ->
|
||||||
|
tpdsUpdateSender.addFile {project_id:project_id, file_id:file._id, path:filePath, project_name:project.name, rev:file.rev},
|
||||||
|
sl_req_id,
|
||||||
|
callback
|
||||||
|
async.series requests, (err) ->
|
||||||
|
logger.log sl_req_id: sl_req_id, project_id:project_id, "finished flushing project to tpds"
|
||||||
|
callback(err)
|
||||||
|
|
||||||
|
setRootDoc: (project_id, newRootDocID, sl_req_id, callback = (error) ->)->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
logger.log sl_req_id: sl_req_id, project_id: project_id, rootDocId: newRootDocID, "setting root doc"
|
||||||
|
Project.update {_id:project_id}, {rootDoc_id:newRootDocID}, {}, callback
|
||||||
|
|
||||||
|
unsetRootDoc: (project_id, sl_req_id, callback = (error) ->) ->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
logger.log sl_req_id: sl_req_id, project_id: project_id, "removing root doc"
|
||||||
|
Project.update {_id:project_id}, {$unset: {rootDoc_id: true}}, {}, callback
|
||||||
|
|
||||||
|
addDoc: (project_or_id, folder_id, docName, docLines, sl_req_id, callback = (error, doc, folder_id) ->)=>
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
Project.getProject project_or_id, "", (err, project) ->
|
||||||
|
logger.log sl_req_id: sl_req_id, project: project._id, folder_id: folder_id, doc_name: docName, "adding doc"
|
||||||
|
return callback(err) if err?
|
||||||
|
confirmFolder project, folder_id, (folder_id)=>
|
||||||
|
doc = new Doc name: docName, lines: docLines
|
||||||
|
Project.putElement project._id, folder_id, doc, "doc", (err, result)=>
|
||||||
|
tpdsUpdateSender.addDoc {project_id:project._id, docLines:docLines, path:result.path.fileSystem, project_name:project.name, rev:doc.rev}, sl_req_id, ->
|
||||||
|
callback(err, doc, folder_id)
|
||||||
|
|
||||||
|
addFile: (project_or_id, folder_id, fileName, path, sl_req_id, callback = (error, fileRef, folder_id) ->)->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
Project.getProject project_or_id, "", (err, project) ->
|
||||||
|
logger.log sl_req_id: sl_req_id, project_id: project._id, folder_id: folder_id, file_name: fileName, path:path, "adding file"
|
||||||
|
return callback(err) if err?
|
||||||
|
confirmFolder project, folder_id, (folder_id)->
|
||||||
|
fileRef = new File name : fileName
|
||||||
|
FileStoreHandler.uploadFileFromDisk project._id, fileRef._id, path, (err)->
|
||||||
|
if err?
|
||||||
|
logger.err err:err, project_id: project._id, folder_id: folder_id, file_name: fileName, fileRef:fileRef, "error uploading image to s3"
|
||||||
|
return callback(err)
|
||||||
|
Project.putElement project._id, folder_id, fileRef, "file", (err, result)=>
|
||||||
|
tpdsUpdateSender.addFile {project_id:project._id, file_id:fileRef._id, path:result.path.fileSystem, project_name:project.name, rev:fileRef.rev}, "sl_req_id_here", ->
|
||||||
|
callback(err, fileRef, folder_id)
|
||||||
|
|
||||||
|
replaceFile: (project_or_id, file_id, fsPath, callback)->
|
||||||
|
Project.getProject project_or_id, "", (err, project) ->
|
||||||
|
findOpts =
|
||||||
|
project_id:project._id
|
||||||
|
element_id:file_id
|
||||||
|
type:"file"
|
||||||
|
projectLocator.findElement findOpts, (err, fileRef, path)=>
|
||||||
|
FileStoreHandler.uploadFileFromDisk project._id, fileRef._id, fsPath, (err)->
|
||||||
|
tpdsUpdateSender.addFile {project_id:project._id, file_id:fileRef._id, path:path.fileSystem, rev:fileRef.rev+1, project_name:project.name}, "sl_req_id_here", (error) ->
|
||||||
|
conditons = _id:project._id
|
||||||
|
inc = {}
|
||||||
|
inc["#{path.mongo}.rev"] = 1
|
||||||
|
set = {}
|
||||||
|
set["#{path.mongo}.created"] = new Date()
|
||||||
|
update =
|
||||||
|
"$inc": inc
|
||||||
|
"$set": set
|
||||||
|
Project.update conditons, update, {}, (err, second)->
|
||||||
|
callback()
|
||||||
|
|
||||||
|
copyFileFromExistingProject: (project_or_id, folder_id, originalProject_id, origonalFileRef, sl_req_id, callback = (error, fileRef, folder_id) ->)->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
Project.getProject project_or_id, "", (err, project) ->
|
||||||
|
logger.log sl_req_id: sl_req_id, project_id:project._id, folder_id:folder_id, originalProject_id:originalProject_id, origonalFileRef:origonalFileRef, "copying file in s3"
|
||||||
|
return callback(err) if err?
|
||||||
|
confirmFolder project, folder_id, (folder_id)=>
|
||||||
|
fileRef = new File name : origonalFileRef.name
|
||||||
|
FileStoreHandler.copyFile originalProject_id, origonalFileRef._id, project._id, fileRef._id, (err)->
|
||||||
|
if err?
|
||||||
|
logger.err err:err, project_id:project._id, folder_id:folder_id, originalProject_id:originalProject_id, origonalFileRef:origonalFileRef, "error coping file in s3"
|
||||||
|
Project.putElement project._id, folder_id, fileRef, "file", (err, result)=>
|
||||||
|
tpdsUpdateSender.addFile {project_id:project._id, file_id:fileRef._id, path:result.path.fileSystem, rev:fileRef.rev, project_name:project.name}, sl_req_id, (error) ->
|
||||||
|
callback(error, fileRef, folder_id)
|
||||||
|
|
||||||
|
mkdirp: (project_or_id, path, sl_req_id, callback = (err, newlyCreatedFolders, lastFolderInPath)->)->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
self = @
|
||||||
|
folders = path.split('/')
|
||||||
|
folders = _.select folders, (folder)->
|
||||||
|
return folder.length != 0
|
||||||
|
|
||||||
|
Project.getProject project_or_id, "", (err, project)=>
|
||||||
|
if path == '/'
|
||||||
|
logger.log project_id: project._id, "mkdir is only trying to make path of / so sending back root folder"
|
||||||
|
return callback(null, [], project.rootFolder[0])
|
||||||
|
logger.log project_id: project._id, path:path, folders:folders, "running mkdirp"
|
||||||
|
|
||||||
|
builtUpPath = ''
|
||||||
|
procesFolder = (previousFolders, folderName, callback)=>
|
||||||
|
previousFolders = previousFolders || []
|
||||||
|
parentFolder = previousFolders[previousFolders.length-1]
|
||||||
|
if parentFolder?
|
||||||
|
parentFolder_id = parentFolder._id
|
||||||
|
builtUpPath = "#{builtUpPath}/#{folderName}"
|
||||||
|
projectLocator.findElementByPath project_or_id, builtUpPath, (err, foundFolder)=>
|
||||||
|
if !foundFolder?
|
||||||
|
logger.log sl_req_id: sl_req_id, path:path, project_id:project._id, folderName:folderName, "making folder from mkdirp"
|
||||||
|
@addFolder project_or_id, parentFolder_id, folderName, sl_req_id, (err, newFolder, parentFolder_id)->
|
||||||
|
newFolder.parentFolder_id = parentFolder_id
|
||||||
|
previousFolders.push newFolder
|
||||||
|
callback null, previousFolders
|
||||||
|
else
|
||||||
|
foundFolder.filterOut = true
|
||||||
|
previousFolders.push foundFolder
|
||||||
|
callback null, previousFolders
|
||||||
|
|
||||||
|
|
||||||
|
async.reduce folders, [], procesFolder, (err, folders)->
|
||||||
|
lastFolder = folders[folders.length-1]
|
||||||
|
folders = _.select folders, (folder)->
|
||||||
|
!folder.filterOut
|
||||||
|
callback(null, folders, lastFolder)
|
||||||
|
|
||||||
|
addFolder: (project_or_id, parentFolder_id, folderName, sl_req_id, callback)->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
folder = new Folder name: folderName
|
||||||
|
Project.getProject project_or_id, "", (err, project) ->
|
||||||
|
return callback(err) if err?
|
||||||
|
confirmFolder project, parentFolder_id, (parentFolder_id)=>
|
||||||
|
logger.log sl_req_id: sl_req_id, project: project_or_id, parentFolder_id:parentFolder_id, folderName:folderName, "new folder added"
|
||||||
|
Project.putElement project._id, parentFolder_id, folder, "folder", (err, result)=>
|
||||||
|
if callback?
|
||||||
|
callback(err, folder, parentFolder_id)
|
||||||
|
|
||||||
|
updateDocLines : (project_or_id, doc_id, docLines, sl_req_id, callback = (error) ->)->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
Project.getProject project_or_id, "", (err, project)->
|
||||||
|
return callback(err) if err?
|
||||||
|
return callback(new Errors.NotFoundError("project not found")) if !project?
|
||||||
|
project_id = project._id
|
||||||
|
if err?
|
||||||
|
logger.err err:err,project_id:project_id, "error finding project"
|
||||||
|
callback err
|
||||||
|
else if !project?
|
||||||
|
logger.err project_id:project_id, doc_id:doc_id, err: new Error("project #{project_id} could not be found for doc #{doc_id}")
|
||||||
|
callback "could not find project #{project_id}"
|
||||||
|
else
|
||||||
|
projectLocator.findElement {project:project, element_id:doc_id, type:"docs"}, (err, doc, path)->
|
||||||
|
if err?
|
||||||
|
logger.err "error putting doc #{doc_id} in project #{project_id} #{err}"
|
||||||
|
callback err
|
||||||
|
else if docComparitor.areSame docLines, doc.lines
|
||||||
|
logger.log sl_req_id: sl_req_id, docLines:docLines, project_id:project_id, doc_id:doc_id, rev:doc.rev, "old doc lines are same as the new doc lines, not updating them"
|
||||||
|
callback()
|
||||||
|
else
|
||||||
|
logger.log sl_req_id: sl_req_id, project_id:project_id, doc_id:doc_id, docLines: docLines, oldDocLines: doc.lines, rev:doc.rev, "updating doc lines"
|
||||||
|
conditons = _id:project_id
|
||||||
|
update = {$set:{}, $inc:{}}
|
||||||
|
changeLines = {}
|
||||||
|
changeLines["#{path.mongo}.lines"] = docLines
|
||||||
|
inc = {}
|
||||||
|
inc["#{path.mongo}.rev"] = 1
|
||||||
|
update["$set"] = changeLines
|
||||||
|
update["$inc"] = inc
|
||||||
|
Project.update conditons, update, {}, (err, second)->
|
||||||
|
if(err)
|
||||||
|
logger.err(sl_req_id:sl_req_id, doc_id:doc_id, project_id:project_id, err:err, "error saving doc to mongo")
|
||||||
|
callback(err)
|
||||||
|
else
|
||||||
|
logger.log sl_req_id:sl_req_id, doc_id:doc_id, project_id:project_id, newDocLines:docLines, oldDocLines:doc.lines, "doc saved to mongo"
|
||||||
|
rev = doc.rev+1
|
||||||
|
projectUpdateHandler.markAsUpdated project_id
|
||||||
|
tpdsUpdateSender.addDoc {project_id:project_id, path:path.fileSystem, docLines:docLines, project_name:project.name, rev:rev}, sl_req_id, callback
|
||||||
|
|
||||||
|
moveEntity: (project_id, entity_id, folder_id, entityType, sl_req_id, callback = (error) ->)->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
self = @
|
||||||
|
destinationFolder_id = folder_id
|
||||||
|
logger.log sl_req_id: sl_req_id, entityType:entityType, entity_id:entity_id, project_id:project_id, folder_id:folder_id, "moving entity"
|
||||||
|
if !entityType?
|
||||||
|
logger.err err: "No entityType set", project_id: project_id, entity_id: entity_id
|
||||||
|
return callback("No entityType set")
|
||||||
|
entityType = entityType.toLowerCase()
|
||||||
|
Project.findById project_id, (err, project)=>
|
||||||
|
projectLocator.findElement {project:project, element_id:entity_id, type:entityType}, (err, entity, path)->
|
||||||
|
return callback(err) if err?
|
||||||
|
self._removeElementFromMongoArray Project, project_id, path.mongo, (err)->
|
||||||
|
return callback(err) if err?
|
||||||
|
Project.putElement project_id, destinationFolder_id, entity, entityType, (err, result)->
|
||||||
|
return callback(err) if err?
|
||||||
|
opts =
|
||||||
|
project_id:project_id
|
||||||
|
project_name:project.name
|
||||||
|
startPath:path.fileSystem
|
||||||
|
endPath:result.path.fileSystem,
|
||||||
|
rev:entity.rev
|
||||||
|
tpdsUpdateSender.moveEntity opts, sl_req_id, callback
|
||||||
|
|
||||||
|
deleteEntity: (project_id, entity_id, entityType, sl_req_id, callback = (error) ->)->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
self = @
|
||||||
|
logger.log entity_id:entity_id, type:entityType, project_id:project_id, "deleting project entity"
|
||||||
|
if !entityType?
|
||||||
|
logger.err err: "No entityType set", project_id: project_id, entity_id: entity_id
|
||||||
|
return callback("No entityType set")
|
||||||
|
entityType = entityType.toLowerCase()
|
||||||
|
Project.findById project_id, (err, project)=>
|
||||||
|
return callback(error) if error?
|
||||||
|
projectLocator.findElement {project: project, element_id: entity_id, type: entityType}, (error, entity, path)=>
|
||||||
|
return callback(error) if error?
|
||||||
|
ProjectEntityHandler._cleanUpEntity project, entity, entityType, (error) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
tpdsUpdateSender.deleteEntity project_id:project_id, path:path.fileSystem, project_name:project.name, sl_req_id, (error) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
self._removeElementFromMongoArray Project, project_id, path.mongo, (error) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
callback null
|
||||||
|
|
||||||
|
_cleanUpEntity: (project, entity, entityType, sl_req_id, callback = (error) ->) ->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
|
||||||
|
if(entityType.indexOf("file") != -1)
|
||||||
|
ProjectEntityHandler._cleanUpFile project, entity, sl_req_id, callback
|
||||||
|
else if (entityType.indexOf("doc") != -1)
|
||||||
|
ProjectEntityHandler._cleanUpDoc project, entity, sl_req_id, callback
|
||||||
|
else if (entityType.indexOf("folder") != -1)
|
||||||
|
ProjectEntityHandler._cleanUpFolder project, entity, sl_req_id, callback
|
||||||
|
else
|
||||||
|
callback()
|
||||||
|
|
||||||
|
_cleanUpDoc: (project, doc, sl_req_id, callback = (error) ->) ->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
project_id = project._id.toString()
|
||||||
|
doc_id = doc._id.toString()
|
||||||
|
unsetRootDocIfRequired = (callback) =>
|
||||||
|
if project.rootDoc_id? and project.rootDoc_id.toString() == doc_id
|
||||||
|
@unsetRootDoc project_id, callback
|
||||||
|
else
|
||||||
|
callback()
|
||||||
|
|
||||||
|
unsetRootDocIfRequired (error) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
require('../../Features/DocumentUpdater/DocumentUpdaterHandler').deleteDoc project_id, doc_id, callback
|
||||||
|
|
||||||
|
_cleanUpFile: (project, file, sl_req_id, callback = (error) ->) ->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
project_id = project._id.toString()
|
||||||
|
file_id = file._id.toString()
|
||||||
|
FileStoreHandler.deleteFile project_id, file_id, callback
|
||||||
|
|
||||||
|
_cleanUpFolder: (project, folder, sl_req_id, callback = (error) ->) ->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
|
||||||
|
jobs = []
|
||||||
|
for doc in folder.docs
|
||||||
|
do (doc) ->
|
||||||
|
jobs.push (callback) -> ProjectEntityHandler._cleanUpDoc project, doc, sl_req_id, callback
|
||||||
|
|
||||||
|
for file in folder.fileRefs
|
||||||
|
do (file) ->
|
||||||
|
jobs.push (callback) -> ProjectEntityHandler._cleanUpFile project, file, sl_req_id, callback
|
||||||
|
|
||||||
|
for childFolder in folder.folders
|
||||||
|
do (childFolder) ->
|
||||||
|
jobs.push (callback) -> ProjectEntityHandler._cleanUpFolder project, childFolder, sl_req_id, callback
|
||||||
|
|
||||||
|
async.series jobs, callback
|
||||||
|
|
||||||
|
_removeElementFromMongoArray : (model, model_id, path, callback)->
|
||||||
|
conditons = {_id:model_id}
|
||||||
|
update = {"$unset":{}}
|
||||||
|
update["$unset"][path] = 1
|
||||||
|
model.update conditons, update, {}, (err)->
|
||||||
|
pullUpdate = {"$pull":{}}
|
||||||
|
nonArrayPath = path.slice(0, path.lastIndexOf("."))
|
||||||
|
pullUpdate["$pull"][nonArrayPath] = null
|
||||||
|
model.update conditons, pullUpdate, {}, (err)->
|
||||||
|
if callback?
|
||||||
|
callback(err)
|
||||||
|
|
||||||
|
confirmFolder = (project, folder_id, callback)->
|
||||||
|
logger.log folder_id:folder_id, project_id:project._id, "confirming folder in project"
|
||||||
|
if folder_id+'' == 'undefined'
|
||||||
|
callback(project.rootFolder[0]._id)
|
||||||
|
else if folder_id != null
|
||||||
|
callback folder_id
|
||||||
|
else
|
||||||
|
callback(project.rootFolder[0]._id)
|
|
@ -0,0 +1,61 @@
|
||||||
|
mongojs = require("../../infrastructure/mongojs")
|
||||||
|
db = mongojs.db
|
||||||
|
ObjectId = mongojs.ObjectId
|
||||||
|
async = require "async"
|
||||||
|
|
||||||
|
module.exports = ProjectGetter =
|
||||||
|
EXCLUDE_DEPTH: 8
|
||||||
|
|
||||||
|
getProjectWithoutDocLines: (project_id, callback=(error, project) ->) ->
|
||||||
|
excludes = {}
|
||||||
|
for i in [1..@EXCLUDE_DEPTH]
|
||||||
|
excludes["rootFolder#{Array(i).join(".folder")}.docs.lines"] = 0
|
||||||
|
db.projects.find _id: ObjectId(project_id), excludes, (error, projects = []) ->
|
||||||
|
callback error, projects[0]
|
||||||
|
|
||||||
|
getProject: (query, projection, callback = (error, project) ->) ->
|
||||||
|
if typeof query == "string"
|
||||||
|
query = _id: ObjectId(query)
|
||||||
|
else if query instanceof ObjectId
|
||||||
|
query = _id: query
|
||||||
|
db.projects.findOne query, projection, callback
|
||||||
|
|
||||||
|
populateProjectWithUsers: (project, callback=(error, project) ->) ->
|
||||||
|
# eventually this should be in a UserGetter.getUser module
|
||||||
|
getUser = (user_id, callback=(error, user) ->) ->
|
||||||
|
unless user_id instanceof ObjectId
|
||||||
|
user_id = ObjectId(user_id)
|
||||||
|
db.users.find _id: user_id, (error, users = []) ->
|
||||||
|
callback error, users[0]
|
||||||
|
|
||||||
|
jobs = []
|
||||||
|
jobs.push (callback) ->
|
||||||
|
getUser project.owner_ref, (error, user) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
if user?
|
||||||
|
project.owner_ref = user
|
||||||
|
callback null, project
|
||||||
|
|
||||||
|
readOnly_refs = project.readOnly_refs
|
||||||
|
project.readOnly_refs = []
|
||||||
|
for readOnly_ref in readOnly_refs
|
||||||
|
do (readOnly_ref) ->
|
||||||
|
jobs.push (callback) ->
|
||||||
|
getUser readOnly_ref, (error, user) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
if user?
|
||||||
|
project.readOnly_refs.push user
|
||||||
|
callback null, project
|
||||||
|
|
||||||
|
collaberator_refs = project.collaberator_refs
|
||||||
|
project.collaberator_refs = []
|
||||||
|
for collaberator_ref in collaberator_refs
|
||||||
|
do (collaberator_ref) ->
|
||||||
|
jobs.push (callback) ->
|
||||||
|
getUser collaberator_ref, (error, user) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
if user?
|
||||||
|
project.collaberator_refs.push user
|
||||||
|
callback null, project
|
||||||
|
|
||||||
|
async.series jobs, (error) -> callback error, project
|
141
services/web/app/coffee/Features/Project/ProjectLocator.coffee
Normal file
141
services/web/app/coffee/Features/Project/ProjectLocator.coffee
Normal file
|
@ -0,0 +1,141 @@
|
||||||
|
Project = require('../../models/Project').Project
|
||||||
|
Errors = require "../../errors"
|
||||||
|
_ = require('underscore')
|
||||||
|
logger = require('logger-sharelatex')
|
||||||
|
async = require('async')
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
findElement: (options, callback = (err, element, path, parentFolder)->)->
|
||||||
|
{project, project_id, element_id, type} = options
|
||||||
|
elementType = sanitizeTypeOfElement type
|
||||||
|
|
||||||
|
count = 0
|
||||||
|
endOfBranch = ->
|
||||||
|
if --count == 0
|
||||||
|
logger.warn "element #{element_id} could not be found for project #{project_id || project._id}"
|
||||||
|
return callback(new Errors.NotFoundError("entity not found"))
|
||||||
|
|
||||||
|
search = (searchFolder, path)->
|
||||||
|
count++
|
||||||
|
element = _.find searchFolder[elementType], (el)-> el._id+'' == element_id+'' #need to ToString both id's for robustness
|
||||||
|
if !element? && searchFolder.folders? && searchFolder.folders.length != 0
|
||||||
|
_.each searchFolder.folders, (folder, index)->
|
||||||
|
newPath = {}
|
||||||
|
newPath[key] = value for own key,value of path #make a value copy of the string
|
||||||
|
newPath.fileSystem += "/#{folder.name}"
|
||||||
|
newPath.mongo += ".folders.#{index}"
|
||||||
|
search folder, newPath
|
||||||
|
endOfBranch()
|
||||||
|
return
|
||||||
|
else if element?
|
||||||
|
elementPlaceInArray = getIndexOf(searchFolder[elementType], element_id)
|
||||||
|
path.fileSystem += "/#{element.name}"
|
||||||
|
path.mongo +=".#{elementType}.#{elementPlaceInArray}"
|
||||||
|
callback(null, element, path, searchFolder)
|
||||||
|
else if !element?
|
||||||
|
return endOfBranch()
|
||||||
|
|
||||||
|
path = {fileSystem:'',mongo:'rootFolder.0'}
|
||||||
|
|
||||||
|
startSearch = (project)->
|
||||||
|
if element_id+'' == project.rootFolder[0]._id+''
|
||||||
|
callback(null, project.rootFolder[0], path, null)
|
||||||
|
else
|
||||||
|
search project.rootFolder[0], path
|
||||||
|
|
||||||
|
if project?
|
||||||
|
startSearch(project)
|
||||||
|
else
|
||||||
|
Project.findById project_id, (err, project)->
|
||||||
|
return callback(err) if err?
|
||||||
|
if !project?
|
||||||
|
return callback(new Errors.NotFoundError("project not found"))
|
||||||
|
startSearch project
|
||||||
|
|
||||||
|
findRootDoc : (opts, callback)->
|
||||||
|
getRootDoc = (project)=>
|
||||||
|
@findElement {project:project, element_id:project.rootDoc_id, type:"docs"}, callback
|
||||||
|
{project, project_id} = opts
|
||||||
|
if project?
|
||||||
|
getRootDoc project
|
||||||
|
else
|
||||||
|
Project.findById project_id, (err, project)->
|
||||||
|
getRootDoc project
|
||||||
|
|
||||||
|
findElementByPath: (project_or_id, needlePath, callback = (err, foundEntity)->)->
|
||||||
|
|
||||||
|
getParentFolder = (haystackFolder, foldersList, level, cb)->
|
||||||
|
if foldersList.length == 0
|
||||||
|
return cb null, haystackFolder
|
||||||
|
needleFolderName = foldersList[level]
|
||||||
|
found = false
|
||||||
|
_.each haystackFolder.folders, (folder)->
|
||||||
|
if folder.name.toLowerCase() == needleFolderName.toLowerCase()
|
||||||
|
found = true
|
||||||
|
if level == foldersList.length-1
|
||||||
|
cb null, folder
|
||||||
|
else
|
||||||
|
getParentFolder(folder, foldersList, ++level, cb)
|
||||||
|
if !found
|
||||||
|
cb("not found project_or_id: #{project_or_id} search path: #{needlePath}, folder #{foldersList[level]} could not be found")
|
||||||
|
|
||||||
|
getEntity = (folder, entityName, cb)->
|
||||||
|
if !entityName?
|
||||||
|
return cb null, folder
|
||||||
|
enteties = _.union folder.fileRefs, folder.docs, folder.folders
|
||||||
|
result = _.find enteties, (entity)->
|
||||||
|
entity.name.toLowerCase() == entityName.toLowerCase()
|
||||||
|
if result?
|
||||||
|
cb null, result
|
||||||
|
else
|
||||||
|
cb("not found project_or_id: #{project_or_id} search path: #{needlePath}, entity #{entityName} could not be found")
|
||||||
|
|
||||||
|
|
||||||
|
Project.getProject project_or_id, "", (err, project)->
|
||||||
|
if needlePath == '' || needlePath == '/'
|
||||||
|
return callback(null, project.rootFolder[0])
|
||||||
|
|
||||||
|
if needlePath.indexOf('/') == 0
|
||||||
|
needlePath = needlePath.substring(1)
|
||||||
|
foldersList = needlePath.split('/')
|
||||||
|
needleName = foldersList.pop()
|
||||||
|
rootFolder = project.rootFolder[0]
|
||||||
|
|
||||||
|
logger.log project_id:project._id, path:needlePath, foldersList:foldersList, "looking for element by path"
|
||||||
|
jobs = new Array()
|
||||||
|
jobs.push(
|
||||||
|
(cb)->
|
||||||
|
getParentFolder rootFolder, foldersList, 0, cb
|
||||||
|
)
|
||||||
|
jobs.push(
|
||||||
|
(folder, cb)->
|
||||||
|
getEntity folder, needleName, cb
|
||||||
|
)
|
||||||
|
async.waterfall jobs, callback
|
||||||
|
|
||||||
|
findUsersProjectByName: (user_id, projectName, callback)->
|
||||||
|
Project.findAllUsersProjects user_id, 'name', (projects, collabertions=[])->
|
||||||
|
projects = projects.concat(collabertions)
|
||||||
|
projectName = projectName.toLowerCase()
|
||||||
|
project = _.find projects, (project)->
|
||||||
|
project.name.toLowerCase() == projectName
|
||||||
|
logger.log user_id:user_id, projectName:projectName, totalProjects:projects.length, project:project, "looking for project by name"
|
||||||
|
callback(null, project)
|
||||||
|
|
||||||
|
|
||||||
|
sanitizeTypeOfElement = (elementType)->
|
||||||
|
lastChar = elementType.slice -1
|
||||||
|
if lastChar != "s"
|
||||||
|
elementType +="s"
|
||||||
|
if elementType == "files"
|
||||||
|
elementType = "fileRefs"
|
||||||
|
return elementType
|
||||||
|
|
||||||
|
|
||||||
|
getIndexOf = (searchEntity, id)->
|
||||||
|
length = searchEntity.length
|
||||||
|
count = 0
|
||||||
|
while(count < length)
|
||||||
|
if searchEntity[count]._id+"" == id+""
|
||||||
|
return count
|
||||||
|
count++
|
|
@ -0,0 +1,38 @@
|
||||||
|
Project = require('../../models/Project').Project
|
||||||
|
logger = require('logger-sharelatex')
|
||||||
|
_ = require('underscore')
|
||||||
|
settings = require("settings-sharelatex")
|
||||||
|
|
||||||
|
safeCompilers = ["xelatex", "pdflatex", "latex", "lualatex"]
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
setCompiler : (project_id, compiler, callback = ()->)->
|
||||||
|
logger.log project_id:project_id, compiler:compiler, "setting the compiler"
|
||||||
|
compiler = compiler.toLowerCase()
|
||||||
|
if !_.contains safeCompilers, compiler
|
||||||
|
return callback()
|
||||||
|
conditions = {_id:project_id}
|
||||||
|
update = {compiler:compiler}
|
||||||
|
Project.update conditions, update, {}, (err)->
|
||||||
|
if callback?
|
||||||
|
callback()
|
||||||
|
|
||||||
|
|
||||||
|
setSpellCheckLanguage: (project_id, languageCode, callback = ()->)->
|
||||||
|
logger.log project_id:project_id, languageCode:languageCode, "setting the spell check language"
|
||||||
|
languageIsSafe = false
|
||||||
|
settings.languages.forEach (safeLang)->
|
||||||
|
if safeLang.code == languageCode
|
||||||
|
languageIsSafe = true
|
||||||
|
|
||||||
|
if languageCode == ""
|
||||||
|
languageIsSafe = true
|
||||||
|
|
||||||
|
if languageIsSafe
|
||||||
|
conditions = {_id:project_id}
|
||||||
|
update = {spellCheckLanguage:languageCode}
|
||||||
|
Project.update conditions, update, {}, (err)->
|
||||||
|
callback()
|
||||||
|
else
|
||||||
|
logger.err project_id:project_id, languageCode:languageCode, "tryed to set unsafe language"
|
||||||
|
callback()
|
|
@ -0,0 +1,19 @@
|
||||||
|
slReqIdHelper = require('soa-req-id')
|
||||||
|
ProjectEntityHandler = require "./ProjectEntityHandler"
|
||||||
|
Path = require "path"
|
||||||
|
|
||||||
|
module.exports = ProjectRootDocManager =
|
||||||
|
setRootDocAutomatically: (project_id, sl_req_id, callback = (error) ->) ->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
ProjectEntityHandler.getAllDocs project_id, sl_req_id, (error, docs) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
root_doc_id = null
|
||||||
|
for path, doc of docs
|
||||||
|
for line in doc.lines || []
|
||||||
|
if Path.extname(path).match(/\.R?tex$/) and line.match(/\\documentclass/)
|
||||||
|
root_doc_id = doc._id
|
||||||
|
if root_doc_id?
|
||||||
|
ProjectEntityHandler.setRootDoc project_id, root_doc_id, sl_req_id, callback
|
||||||
|
else
|
||||||
|
callback()
|
||||||
|
|
|
@ -0,0 +1,10 @@
|
||||||
|
Project = require('../../models/Project').Project
|
||||||
|
logger = require('logger-sharelatex')
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
markAsUpdated : (project_id, callback)->
|
||||||
|
conditions = {_id:project_id}
|
||||||
|
update = {lastUpdated:Date.now()}
|
||||||
|
Project.update conditions, update, {}, (err)->
|
||||||
|
if callback?
|
||||||
|
callback()
|
|
@ -0,0 +1,59 @@
|
||||||
|
logger = require('logger-sharelatex')
|
||||||
|
User = require('../../models/User').User
|
||||||
|
AnalyticsManager = require("../Analytics/AnalyticsManager")
|
||||||
|
SubscriptionLocator = require "../Subscription/SubscriptionLocator"
|
||||||
|
Settings = require "settings-sharelatex"
|
||||||
|
|
||||||
|
module.exports = ReferalAllocator =
|
||||||
|
allocate: (referal_id, new_user_id, referal_source, referal_medium, callback = ->)->
|
||||||
|
if !referal_id?
|
||||||
|
return logger.log new_user_id:new_user_id, "no referal for user"
|
||||||
|
logger.log referal_id:referal_id, new_user_id:new_user_id, "allocating users referal"
|
||||||
|
|
||||||
|
query = {"referal_id":referal_id}
|
||||||
|
User.findOne query, (error, user) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
return callback(new Error("user not found")) if !user? or !user._id?
|
||||||
|
|
||||||
|
# Can be backgrounded
|
||||||
|
AnalyticsManager.trackReferral user, referal_source, referal_medium
|
||||||
|
|
||||||
|
if referal_source == "bonus"
|
||||||
|
User.update query, {
|
||||||
|
$push:
|
||||||
|
refered_users: new_user_id
|
||||||
|
$inc:
|
||||||
|
refered_user_count: 1
|
||||||
|
}, {}, (err)->
|
||||||
|
if err?
|
||||||
|
logger.err err:err, referal_id:referal_id, new_user_id:new_user_id, "something went wrong allocating referal"
|
||||||
|
return callback(err)
|
||||||
|
ReferalAllocator.assignBonus user._id, callback
|
||||||
|
else
|
||||||
|
callback()
|
||||||
|
|
||||||
|
assignBonus: (user_id, callback = (error) ->) ->
|
||||||
|
SubscriptionLocator.getUsersSubscription user_id, (error, subscription) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
logger.log
|
||||||
|
subscription: subscription,
|
||||||
|
user_id: user_id,
|
||||||
|
"checking user doesn't have a subsciption before assigning bonus"
|
||||||
|
if !subscription? or !subscription.planCode?
|
||||||
|
query = _id: user_id
|
||||||
|
User.findOne query, (error, user) ->
|
||||||
|
return callback(error) if error
|
||||||
|
return callback(new Error("user not found")) if !user?
|
||||||
|
logger.log
|
||||||
|
user_id: user_id,
|
||||||
|
refered_user_count: user.refered_user_count,
|
||||||
|
bonus_features: Settings.bonus_features[user.refered_user_count],
|
||||||
|
"assigning bonus"
|
||||||
|
if user.refered_user_count? and Settings.bonus_features[user.refered_user_count]?
|
||||||
|
User.update query, { $set: features: Settings.bonus_features[user.refered_user_count] }, callback
|
||||||
|
else
|
||||||
|
callback()
|
||||||
|
else
|
||||||
|
callback()
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,34 @@
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
use: (req, res, next)->
|
||||||
|
if req.query?
|
||||||
|
if req.query.referal?
|
||||||
|
req.session.referal_id = req.query.referal
|
||||||
|
else if req.query.r? # Short hand for referal
|
||||||
|
req.session.referal_id = req.query.r
|
||||||
|
else if req.query.fb_ref?
|
||||||
|
req.session.referal_id = req.query.fb_ref
|
||||||
|
|
||||||
|
if req.query.rm? # referal medium e.g. twitter, facebook, email
|
||||||
|
switch req.query.rm
|
||||||
|
when "fb"
|
||||||
|
req.session.referal_medium = "facebook"
|
||||||
|
when "t"
|
||||||
|
req.session.referal_medium = "twitter"
|
||||||
|
when "gp"
|
||||||
|
req.session.referal_medium = "google_plus"
|
||||||
|
when "e"
|
||||||
|
req.session.referal_medium = "email"
|
||||||
|
when "d"
|
||||||
|
req.session.referal_medium = "direct"
|
||||||
|
|
||||||
|
if req.query.rs? # referal source e.g. project share, bonus
|
||||||
|
switch req.query.rs
|
||||||
|
when "b"
|
||||||
|
req.session.referal_source = "bonus"
|
||||||
|
when "ps"
|
||||||
|
req.session.referal_source = "public_share"
|
||||||
|
when "ci"
|
||||||
|
req.session.referal_source = "collaborator_invite"
|
||||||
|
|
||||||
|
next()
|
|
@ -0,0 +1,10 @@
|
||||||
|
logger = require('logger-sharelatex')
|
||||||
|
ReferalHandler = require('./ReferalHandler')
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
bonus: (req, res)->
|
||||||
|
ReferalHandler.getReferedUserIds req.session.user._id, (err, refered_users)->
|
||||||
|
res.render "referal/bonus",
|
||||||
|
title: "Bonus - Please recommend us"
|
||||||
|
refered_users: refered_users
|
||||||
|
refered_user_count: (refered_users or []).length
|
|
@ -0,0 +1,7 @@
|
||||||
|
User = require('../../models/User').User
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
getReferedUserIds: (user_id, callback)->
|
||||||
|
User.findById user_id, (err, user)->
|
||||||
|
refered_users = user.refered_users || []
|
||||||
|
callback "null", refered_users
|
|
@ -0,0 +1,11 @@
|
||||||
|
User = require("../../models/User").User
|
||||||
|
|
||||||
|
module.exports = RefererMiddleware =
|
||||||
|
getUserReferalId: (req, res, next) ->
|
||||||
|
if req.session? and req.session.user?
|
||||||
|
User.findById req.session.user._id, (error, user) ->
|
||||||
|
return next(error) if error?
|
||||||
|
req.session.user.referal_id = user.referal_id
|
||||||
|
next()
|
||||||
|
else
|
||||||
|
next()
|
|
@ -0,0 +1,38 @@
|
||||||
|
SecurityManager = require '../../managers/SecurityManager'
|
||||||
|
|
||||||
|
module.exports = AuthorizationManager =
|
||||||
|
getPrivilegeLevelForProject: (
|
||||||
|
project, user,
|
||||||
|
callback = (error, canAccess, privilegeLevel)->
|
||||||
|
) ->
|
||||||
|
# This is not tested because eventually this function should be brought into
|
||||||
|
# this module.
|
||||||
|
SecurityManager.userCanAccessProject user, project, (canAccess, privilegeLevel) ->
|
||||||
|
if canAccess
|
||||||
|
callback null, true, privilegeLevel
|
||||||
|
else
|
||||||
|
callback null, false
|
||||||
|
|
||||||
|
setPrivilegeLevelOnClient: (client, privilegeLevel) ->
|
||||||
|
client.set("privilege_level", privilegeLevel)
|
||||||
|
|
||||||
|
ensureClientCanViewProject: (client, callback = (error, project_id)->) ->
|
||||||
|
@ensureClientHasPrivilegeLevelForProject client, ["owner", "readAndWrite", "readOnly"], callback
|
||||||
|
|
||||||
|
ensureClientCanEditProject: (client, callback = (error, project_id)->) ->
|
||||||
|
@ensureClientHasPrivilegeLevelForProject client, ["owner", "readAndWrite"], callback
|
||||||
|
|
||||||
|
ensureClientCanAdminProject: (client, callback = (error, project_id)->) ->
|
||||||
|
@ensureClientHasPrivilegeLevelForProject client, ["owner"], callback
|
||||||
|
|
||||||
|
ensureClientHasPrivilegeLevelForProject: (client, levels, callback = (error, project_id)->) ->
|
||||||
|
client.get "privilege_level", (error, level) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
if level?
|
||||||
|
client.get "project_id", (error, project_id) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
if project_id?
|
||||||
|
if levels.indexOf(level) > -1
|
||||||
|
callback null, project_id
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,24 @@
|
||||||
|
Settings = require('settings-sharelatex')
|
||||||
|
redis = require('redis')
|
||||||
|
rclient = redis.createClient(Settings.redis.web.port, Settings.redis.web.host)
|
||||||
|
rclient.auth(Settings.redis.web.password)
|
||||||
|
|
||||||
|
buildKey = (k)->
|
||||||
|
return "LoginRateLimit:#{k}"
|
||||||
|
|
||||||
|
ONE_MIN = 60
|
||||||
|
ATTEMPT_LIMIT = 10
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
processLoginRequest: (email, callback)->
|
||||||
|
multi = rclient.multi()
|
||||||
|
multi.incr(buildKey(email))
|
||||||
|
multi.get(buildKey(email))
|
||||||
|
multi.expire(buildKey(email), ONE_MIN * 2)
|
||||||
|
multi.exec (err, results)->
|
||||||
|
loginCount = results[1]
|
||||||
|
allow = loginCount <= ATTEMPT_LIMIT
|
||||||
|
callback err, allow
|
||||||
|
|
||||||
|
recordSuccessfulLogin: (email, callback = ->)->
|
||||||
|
rclient.del buildKey(email), callback
|
|
@ -0,0 +1,14 @@
|
||||||
|
request = require 'request'
|
||||||
|
Settings = require 'settings-sharelatex'
|
||||||
|
logger = require 'logger-sharelatex'
|
||||||
|
|
||||||
|
module.exports = SpellingController =
|
||||||
|
proxyRequestToSpellingApi: (req, res, next) ->
|
||||||
|
url = req.url.slice("/spelling".length)
|
||||||
|
url = "/user/#{req.session.user._id}#{url}"
|
||||||
|
req.headers["Host"] = Settings.apis.spelling.host
|
||||||
|
getReq = request(url: Settings.apis.spelling.url + url, method: req.method, headers: req.headers, json: req.body)
|
||||||
|
getReq.pipe(res)
|
||||||
|
getReq.on "error", (error) ->
|
||||||
|
logger.error err: error, "Spelling API error"
|
||||||
|
res.send 500
|
|
@ -0,0 +1,62 @@
|
||||||
|
logger = require("logger-sharelatex")
|
||||||
|
Project = require("../../models/Project").Project
|
||||||
|
User = require("../../models/User").User
|
||||||
|
SubscriptionLocator = require("./SubscriptionLocator")
|
||||||
|
Settings = require("settings-sharelatex")
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
allowedNumberOfCollaboratorsInProject: (project_id, callback) ->
|
||||||
|
getOwnerOfProject project_id, (error, owner)->
|
||||||
|
return callback(error) if error?
|
||||||
|
if owner.features? and owner.features.collaborators?
|
||||||
|
callback null, owner.features.collaborators
|
||||||
|
else
|
||||||
|
callback null, Settings.defaultPlanCode.collaborators
|
||||||
|
|
||||||
|
currentNumberOfCollaboratorsInProject: (project_id, callback) ->
|
||||||
|
Project.findById project_id, 'collaberator_refs readOnly_refs', (error, project) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
callback null, (project.collaberator_refs.length + project.readOnly_refs.length)
|
||||||
|
|
||||||
|
isCollaboratorLimitReached: (project_id, callback = (error, limit_reached)->) ->
|
||||||
|
@allowedNumberOfCollaboratorsInProject project_id, (error, allowed_number) =>
|
||||||
|
return callback(error) if error?
|
||||||
|
@currentNumberOfCollaboratorsInProject project_id, (error, current_number) =>
|
||||||
|
return callback(error) if error?
|
||||||
|
if current_number < allowed_number or allowed_number < 0
|
||||||
|
callback null, false
|
||||||
|
else
|
||||||
|
callback null, true
|
||||||
|
|
||||||
|
userHasSubscriptionOrFreeTrial: (user, callback = (err, hasSubscriptionOrTrial, subscription)->) ->
|
||||||
|
@userHasSubscription user, (err, hasSubscription, subscription)=>
|
||||||
|
@userHasFreeTrial user, (err, hasFreeTrial)=>
|
||||||
|
logger.log user_id:user._id, subscription:subscription, hasFreeTrial:hasFreeTrial, hasSubscription:hasSubscription, "checking if user has subscription or free trial"
|
||||||
|
callback null, hasFreeTrial or hasSubscription, subscription
|
||||||
|
|
||||||
|
userHasSubscription: (user, callback = (err, hasSubscription, subscription)->) ->
|
||||||
|
logger.log user_id:user._id, "checking if user has subscription"
|
||||||
|
SubscriptionLocator.getUsersSubscription user._id, (err, subscription)->
|
||||||
|
logger.log user:user, subscription:subscription, "checking if user has subscription"
|
||||||
|
hasValidSubscription = subscription? and subscription.recurlySubscription_id? and subscription?.state != "expired"
|
||||||
|
callback err, hasValidSubscription, subscription
|
||||||
|
|
||||||
|
userHasFreeTrial: (user, callback = (err, hasFreeTrial, subscription)->) ->
|
||||||
|
logger.log user_id:user._id, "checking if user has free trial"
|
||||||
|
SubscriptionLocator.getUsersSubscription user, (err, subscription)->
|
||||||
|
callback err, subscription? and subscription.freeTrial? and subscription.freeTrial.expiresAt?, subscription
|
||||||
|
|
||||||
|
hasGroupMembersLimitReached: (user_id, callback)->
|
||||||
|
SubscriptionLocator.getUsersSubscription user_id, (err, subscription)->
|
||||||
|
limitReached = subscription.member_ids.length >= subscription.membersLimit
|
||||||
|
logger.log user_id:user_id, limitReached:limitReached, currentTotal: subscription.member_ids.length, membersLimit: subscription.membersLimit, "checking if subscription members limit has been reached"
|
||||||
|
|
||||||
|
callback(null, limitReached)
|
||||||
|
|
||||||
|
getOwnerOfProject = (project_id, callback)->
|
||||||
|
Project.findById project_id, 'owner_ref', (error, project) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
User.findById project.owner_ref, (error, owner) ->
|
||||||
|
callback(error, owner)
|
||||||
|
|
|
@ -0,0 +1,9 @@
|
||||||
|
Settings = require("settings-sharelatex")
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
findLocalPlanInSettings: (planCode) ->
|
||||||
|
for plan in Settings.plans
|
||||||
|
return plan if plan.planCode == planCode
|
||||||
|
return null
|
||||||
|
|
|
@ -0,0 +1,181 @@
|
||||||
|
querystring = require 'querystring'
|
||||||
|
crypto = require 'crypto'
|
||||||
|
request = require 'request'
|
||||||
|
Settings = require "settings-sharelatex"
|
||||||
|
xml2js = require "xml2js"
|
||||||
|
logger = require("logger-sharelatex")
|
||||||
|
|
||||||
|
module.exports = RecurlyWrapper =
|
||||||
|
apiUrl : "https://api.recurly.com/v2"
|
||||||
|
|
||||||
|
apiRequest : (options, callback) ->
|
||||||
|
options.url = @apiUrl + "/" + options.url
|
||||||
|
options.headers =
|
||||||
|
"Authorization" : "Basic " + new Buffer(Settings.apis.recurly.apiKey).toString("base64")
|
||||||
|
"Accept" : "application/xml"
|
||||||
|
"Content-Type" : "application/xml; charset=utf-8"
|
||||||
|
request options, (error, response, body) ->
|
||||||
|
unless error? or response.statusCode == 200 or response.statusCode == 201 or response.statusCode == 204
|
||||||
|
error = "Recurly API returned with status code: #{response.statusCode}"
|
||||||
|
callback(error, response, body)
|
||||||
|
|
||||||
|
sign : (parameters, callback) ->
|
||||||
|
nestAttributesForQueryString = (attributes, base) ->
|
||||||
|
newAttributes = {}
|
||||||
|
for key, value of attributes
|
||||||
|
if base?
|
||||||
|
newKey = "#{base}[#{key}]"
|
||||||
|
else
|
||||||
|
newKey = key
|
||||||
|
|
||||||
|
if typeof value == "object"
|
||||||
|
for key, value of nestAttributesForQueryString(value, newKey)
|
||||||
|
newAttributes[key] = value
|
||||||
|
else
|
||||||
|
newAttributes[newKey] = value
|
||||||
|
|
||||||
|
return newAttributes
|
||||||
|
|
||||||
|
crypto.randomBytes 32, (error, buffer) ->
|
||||||
|
return callback error if error?
|
||||||
|
parameters.nonce = buffer.toString "base64"
|
||||||
|
parameters.timestamp = Math.round((new Date()).getTime() / 1000)
|
||||||
|
|
||||||
|
unsignedQuery = querystring.stringify nestAttributesForQueryString(parameters)
|
||||||
|
|
||||||
|
signed = crypto.createHmac("sha1", Settings.apis.recurly.privateKey).update(unsignedQuery).digest("hex")
|
||||||
|
signature = "#{signed}|#{unsignedQuery}"
|
||||||
|
|
||||||
|
callback null, signature
|
||||||
|
|
||||||
|
getSubscription: (subscriptionId, options, callback) ->
|
||||||
|
callback = options unless callback?
|
||||||
|
options ||= {}
|
||||||
|
|
||||||
|
if options.recurlyJsResult
|
||||||
|
url = "recurly_js/result/#{subscriptionId}"
|
||||||
|
else
|
||||||
|
url = "subscriptions/#{subscriptionId}"
|
||||||
|
|
||||||
|
@apiRequest({
|
||||||
|
url: url
|
||||||
|
}, (error, response, body) =>
|
||||||
|
return callback(error) if error?
|
||||||
|
@_parseSubscriptionXml body, (error, recurlySubscription) =>
|
||||||
|
return callback(error) if error?
|
||||||
|
if options.includeAccount
|
||||||
|
if recurlySubscription.account? and recurlySubscription.account.url?
|
||||||
|
accountId = recurlySubscription.account.url.match(/accounts\/(.*)/)[1]
|
||||||
|
else
|
||||||
|
return callback "I don't understand the response from Recurly"
|
||||||
|
|
||||||
|
@getAccount accountId, (error, account) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
recurlySubscription.account = account
|
||||||
|
callback null, recurlySubscription
|
||||||
|
|
||||||
|
else
|
||||||
|
callback null, recurlySubscription
|
||||||
|
)
|
||||||
|
|
||||||
|
getAccount: (accountId, callback) ->
|
||||||
|
@apiRequest({
|
||||||
|
url: "accounts/#{accountId}"
|
||||||
|
}, (error, response, body) =>
|
||||||
|
return callback(error) if error?
|
||||||
|
@_parseAccountXml body, callback
|
||||||
|
)
|
||||||
|
|
||||||
|
updateSubscription: (subscriptionId, options, callback) ->
|
||||||
|
logger.log subscriptionId:subscriptionId, options:options, "telling recurly to update subscription"
|
||||||
|
requestBody = """
|
||||||
|
<subscription>
|
||||||
|
<plan_code>#{options.plan_code}</plan_code>
|
||||||
|
<timeframe>#{options.timeframe}</timeframe>
|
||||||
|
</subscription>
|
||||||
|
"""
|
||||||
|
@apiRequest({
|
||||||
|
url : "subscriptions/#{subscriptionId}"
|
||||||
|
method : "put"
|
||||||
|
body : requestBody
|
||||||
|
}, (error, response, responseBody) =>
|
||||||
|
return callback(error) if error?
|
||||||
|
@_parseSubscriptionXml responseBody, callback
|
||||||
|
)
|
||||||
|
|
||||||
|
cancelSubscription: (subscriptionId, callback) ->
|
||||||
|
logger.log subscriptionId:subscriptionId, "telling recurly to cancel subscription"
|
||||||
|
@apiRequest({
|
||||||
|
url: "subscriptions/#{subscriptionId}/cancel",
|
||||||
|
method: "put"
|
||||||
|
}, (error, response, body) ->
|
||||||
|
callback(error)
|
||||||
|
)
|
||||||
|
|
||||||
|
reactivateSubscription: (subscriptionId, callback) ->
|
||||||
|
logger.log subscriptionId:subscriptionId, "telling recurly to reactivating subscription"
|
||||||
|
@apiRequest({
|
||||||
|
url: "subscriptions/#{subscriptionId}/reactivate",
|
||||||
|
method: "put"
|
||||||
|
}, (error, response, body) ->
|
||||||
|
callback(error)
|
||||||
|
)
|
||||||
|
|
||||||
|
_parseSubscriptionXml: (xml, callback) ->
|
||||||
|
@_parseXml xml, (error, data) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
if data? and data.subscription?
|
||||||
|
recurlySubscription = data.subscription
|
||||||
|
else
|
||||||
|
return callback "I don't understand the response from Recurly"
|
||||||
|
callback null, recurlySubscription
|
||||||
|
|
||||||
|
_parseAccountXml: (xml, callback) ->
|
||||||
|
@_parseXml xml, (error, data) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
if data? and data.account?
|
||||||
|
account = data.account
|
||||||
|
else
|
||||||
|
return callback "I don't understand the response from Recurly"
|
||||||
|
callback null, account
|
||||||
|
|
||||||
|
_parseXml: (xml, callback) ->
|
||||||
|
convertDataTypes = (data) ->
|
||||||
|
if data? and data["$"]?
|
||||||
|
if data["$"]["nil"] == "nil"
|
||||||
|
data = null
|
||||||
|
else if data["$"].href?
|
||||||
|
data.url = data["$"].href
|
||||||
|
delete data["$"]
|
||||||
|
else if data["$"]["type"] == "integer"
|
||||||
|
data = parseInt(data["_"], 10)
|
||||||
|
else if data["$"]["type"] == "datetime"
|
||||||
|
data = new Date(data["_"])
|
||||||
|
else if data["$"]["type"] == "array"
|
||||||
|
delete data["$"]
|
||||||
|
array = []
|
||||||
|
for key, value of data
|
||||||
|
if value instanceof Array
|
||||||
|
array = array.concat(convertDataTypes(value))
|
||||||
|
else
|
||||||
|
array.push(convertDataTypes(value))
|
||||||
|
data = array
|
||||||
|
|
||||||
|
if data instanceof Array
|
||||||
|
data = (convertDataTypes(entry) for entry in data)
|
||||||
|
else if typeof data == "object"
|
||||||
|
for key, value of data
|
||||||
|
data[key] = convertDataTypes(value)
|
||||||
|
return data
|
||||||
|
|
||||||
|
parser = new xml2js.Parser(
|
||||||
|
explicitRoot : true
|
||||||
|
explicitArray : false
|
||||||
|
)
|
||||||
|
parser.parseString xml, (error, data) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
result = convertDataTypes(data)
|
||||||
|
callback null, result
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,21 @@
|
||||||
|
async = require 'async'
|
||||||
|
logger = require 'logger-sharelatex'
|
||||||
|
SubscriptionUpdater = require("./SubscriptionUpdater")
|
||||||
|
SubscriptionLocator = require("./SubscriptionLocator")
|
||||||
|
AnalyticsManager = require("../Analytics/AnalyticsManager")
|
||||||
|
|
||||||
|
module.exports = SubscriptionBackgroundJobs =
|
||||||
|
# TODO: Remove this one month after the ability to start free trials was removed
|
||||||
|
downgradeExpiredFreeTrials: (callback = (error, subscriptions)->) ->
|
||||||
|
SubscriptionLocator.expiredFreeTrials (error, subscriptions) =>
|
||||||
|
return callback(error) if error?
|
||||||
|
logger.log total_subscriptions:subscriptions.length, "downgraging subscriptions"
|
||||||
|
downgrades = []
|
||||||
|
for subscription in subscriptions
|
||||||
|
do (subscription) =>
|
||||||
|
downgrades.push (cb) =>
|
||||||
|
logger.log subscription: subscription, "downgrading free trial"
|
||||||
|
AnalyticsManager.trackFreeTrialExpired subscription.admin_id
|
||||||
|
SubscriptionUpdater.downgradeFreeTrial(subscription, cb)
|
||||||
|
async.series downgrades, (error) -> callback(error, subscriptions)
|
||||||
|
|
|
@ -0,0 +1,169 @@
|
||||||
|
SecurityManager = require '../../managers/SecurityManager'
|
||||||
|
SubscriptionHandler = require './SubscriptionHandler'
|
||||||
|
PlansLocator = require("./PlansLocator")
|
||||||
|
SubscriptionFormatters = require("./SubscriptionFormatters")
|
||||||
|
SubscriptionViewModelBuilder = require('./SubscriptionViewModelBuilder')
|
||||||
|
LimitationsManager = require("./LimitationsManager")
|
||||||
|
RecurlyWrapper = require './RecurlyWrapper'
|
||||||
|
Settings = require 'settings-sharelatex'
|
||||||
|
logger = require('logger-sharelatex')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
module.exports = SubscriptionController =
|
||||||
|
|
||||||
|
plansPage: (req, res, next) ->
|
||||||
|
plans = SubscriptionViewModelBuilder.buildViewModel()
|
||||||
|
if !req.session.user?
|
||||||
|
for plan in plans
|
||||||
|
plan.href = "/register?redir=#{plan.href}"
|
||||||
|
viewName = "subscriptions/plans"
|
||||||
|
if req.query.variant?
|
||||||
|
viewName += req.query.variant
|
||||||
|
logger.log viewName:viewName, "showing plans page"
|
||||||
|
res.render viewName,
|
||||||
|
title: "Plans and Pricing"
|
||||||
|
plans: plans
|
||||||
|
|
||||||
|
|
||||||
|
#get to show the recurly.js page
|
||||||
|
paymentPage: (req, res, next) ->
|
||||||
|
SecurityManager.getCurrentUser req, (error, user) =>
|
||||||
|
return next(error) if error?
|
||||||
|
plan = PlansLocator.findLocalPlanInSettings(req.query.planCode)
|
||||||
|
LimitationsManager.userHasSubscription user, (err, hasSubscription)->
|
||||||
|
if hasSubscription or !plan?
|
||||||
|
res.redirect "/user/subscription"
|
||||||
|
else
|
||||||
|
RecurlyWrapper.sign {
|
||||||
|
subscription:
|
||||||
|
plan_code : req.query.planCode
|
||||||
|
account_code: user.id
|
||||||
|
}, (error, signature) ->
|
||||||
|
return next(error) if error?
|
||||||
|
res.render "subscriptions/new",
|
||||||
|
title : "Subscribe"
|
||||||
|
plan_code: req.query.planCode
|
||||||
|
recurlyConfig: JSON.stringify
|
||||||
|
currency: "USD"
|
||||||
|
subdomain: Settings.apis.recurly.subdomain
|
||||||
|
subscriptionFormOptions: JSON.stringify
|
||||||
|
acceptedCards: ['discover', 'mastercard', 'visa']
|
||||||
|
target : "#subscribeForm"
|
||||||
|
signature : signature
|
||||||
|
planCode : req.query.planCode
|
||||||
|
successURL : "#{Settings.siteUrl}/user/subscription/create?_csrf=#{req.session._csrf}"
|
||||||
|
accountCode : user.id
|
||||||
|
enableCoupons: true
|
||||||
|
acceptPaypal: true
|
||||||
|
account :
|
||||||
|
firstName : user.first_name
|
||||||
|
lastName : user.last_name
|
||||||
|
email : user.email
|
||||||
|
|
||||||
|
|
||||||
|
userSubscriptionPage: (req, res, next) ->
|
||||||
|
SecurityManager.getCurrentUser req, (error, user) =>
|
||||||
|
return next(error) if error?
|
||||||
|
LimitationsManager.userHasSubscriptionOrFreeTrial user, (err, hasSubOrFreeTrial)->
|
||||||
|
if !hasSubOrFreeTrial
|
||||||
|
logger.log user: user, "redirecting to plans"
|
||||||
|
res.redirect "/user/subscription/plans"
|
||||||
|
else
|
||||||
|
SubscriptionViewModelBuilder.buildUsersSubscriptionViewModel user, (error, subscription) ->
|
||||||
|
return next(error) if error?
|
||||||
|
logger.log user: user, subscription:subscription, hasSubOrFreeTrial:hasSubOrFreeTrial, "showing subscription dashboard"
|
||||||
|
plans = SubscriptionViewModelBuilder.buildViewModel()
|
||||||
|
res.render "subscriptions/dashboard",
|
||||||
|
title: "Your Subscription"
|
||||||
|
plans: plans
|
||||||
|
subscription: subscription
|
||||||
|
subscriptionTabActive: true
|
||||||
|
|
||||||
|
|
||||||
|
editBillingDetailsPage: (req, res, next) ->
|
||||||
|
SecurityManager.getCurrentUser req, (error, user) ->
|
||||||
|
return next(error) if error?
|
||||||
|
LimitationsManager.userHasSubscription user, (err, hasSubscription)->
|
||||||
|
if !hasSubscription
|
||||||
|
res.redirect "/user/subscription"
|
||||||
|
else
|
||||||
|
RecurlyWrapper.sign {
|
||||||
|
account_code: user.id
|
||||||
|
}, (error, signature) ->
|
||||||
|
return next(error) if error?
|
||||||
|
res.render "subscriptions/edit-billing-details",
|
||||||
|
title : "Update Billing Details"
|
||||||
|
recurlyConfig: JSON.stringify
|
||||||
|
currency: "USD"
|
||||||
|
subdomain: Settings.apis.recurly.subdomain
|
||||||
|
signature : signature
|
||||||
|
successURL : "#{Settings.siteUrl}/user/subscription/update"
|
||||||
|
user :
|
||||||
|
id : user.id
|
||||||
|
|
||||||
|
createSubscription: (req, res, next)->
|
||||||
|
SecurityManager.getCurrentUser req, (error, user) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
subscriptionId = req.body.recurly_token
|
||||||
|
logger.log subscription_id: subscriptionId, user_id:user._id, "creating subscription"
|
||||||
|
SubscriptionHandler.createSubscription user, subscriptionId, (err)->
|
||||||
|
if err?
|
||||||
|
logger.err err:err, user_id:user._id, "something went wrong creating subscription"
|
||||||
|
res.redirect "/user/subscription/thank-you"
|
||||||
|
|
||||||
|
successful_subscription: (req, res)->
|
||||||
|
SecurityManager.getCurrentUser req, (error, user) =>
|
||||||
|
SubscriptionViewModelBuilder.buildUsersSubscriptionViewModel user, (error, subscription) ->
|
||||||
|
res.render "subscriptions/successful_subscription",
|
||||||
|
title: "Thank you!"
|
||||||
|
subscription:subscription
|
||||||
|
|
||||||
|
cancelSubscription: (req, res, next) ->
|
||||||
|
SecurityManager.getCurrentUser req, (error, user) ->
|
||||||
|
logger.log user_id:user._id, "canceling subscription"
|
||||||
|
return next(error) if error?
|
||||||
|
SubscriptionHandler.cancelSubscription user, (err)->
|
||||||
|
if err?
|
||||||
|
logger.err err:err, user_id:user._id, "something went wrong canceling subscription"
|
||||||
|
res.redirect "/user/subscription"
|
||||||
|
|
||||||
|
|
||||||
|
updateSubscription: (req, res)->
|
||||||
|
SecurityManager.getCurrentUser req, (error, user) ->
|
||||||
|
return next(error) if error?
|
||||||
|
planCode = req.body.plan_code
|
||||||
|
logger.log planCode: planCode, user_id:user._id, "updating subscription"
|
||||||
|
SubscriptionHandler.updateSubscription user, planCode, (err)->
|
||||||
|
if err?
|
||||||
|
logger.err err:err, user_id:user._id, "something went wrong updating subscription"
|
||||||
|
res.redirect "/user/subscription"
|
||||||
|
|
||||||
|
reactivateSubscription: (req, res)->
|
||||||
|
SecurityManager.getCurrentUser req, (error, user) ->
|
||||||
|
logger.log user_id:user._id, "reactivating subscription"
|
||||||
|
return next(error) if error?
|
||||||
|
SubscriptionHandler.reactivateSubscription user, (err)->
|
||||||
|
if err?
|
||||||
|
logger.err err:err, user_id:user._id, "something went wrong reactivating subscription"
|
||||||
|
res.redirect "/user/subscription"
|
||||||
|
|
||||||
|
recurlyCallback: (req, res)->
|
||||||
|
logger.log data: req.body, "received recurly callback"
|
||||||
|
# we only care if a subscription has exipired
|
||||||
|
if req.body? and req.body["expired_subscription_notification"]?
|
||||||
|
recurlySubscription = req.body["expired_subscription_notification"].subscription
|
||||||
|
SubscriptionHandler.recurlyCallback recurlySubscription, ->
|
||||||
|
res.send 200
|
||||||
|
else
|
||||||
|
res.send 200
|
||||||
|
|
||||||
|
recurlyNotificationParser: (req, res, next) ->
|
||||||
|
xml = ""
|
||||||
|
req.on "data", (chunk) ->
|
||||||
|
xml += chunk
|
||||||
|
req.on "end", () ->
|
||||||
|
RecurlyWrapper._parseXml xml, (error, body) ->
|
||||||
|
return next(error) if error?
|
||||||
|
req.body = body
|
||||||
|
next()
|
|
@ -0,0 +1,15 @@
|
||||||
|
dateformat = require 'dateformat'
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
formatPrice: (priceInCents) ->
|
||||||
|
string = priceInCents + ""
|
||||||
|
string = "0" + string if string.length == 2
|
||||||
|
string = "00" + string if string.length == 1
|
||||||
|
string = "000" if string.length == 0
|
||||||
|
cents = string.slice(-2)
|
||||||
|
dollars = string.slice(0, -2)
|
||||||
|
return "$#{dollars}.#{cents}"
|
||||||
|
|
||||||
|
formatDate: (date) ->
|
||||||
|
dateformat date, "dS mmmm yyyy"
|
|
@ -0,0 +1,34 @@
|
||||||
|
SubscriptionGroupHandler = require("./SubscriptionGroupHandler")
|
||||||
|
logger = require("logger-sharelatex")
|
||||||
|
SubscriptionLocator = require("./SubscriptionLocator")
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
addUserToGroup: (req, res)->
|
||||||
|
adminUserId = req.session.user._id
|
||||||
|
newEmail = req.body.email
|
||||||
|
logger.log adminUserId:adminUserId, newEmail:newEmail, "adding user to group subscription"
|
||||||
|
SubscriptionGroupHandler.addUserToGroup adminUserId, newEmail, (err, user)->
|
||||||
|
result =
|
||||||
|
user:user
|
||||||
|
if err and err.limitReached
|
||||||
|
result.limitReached = true
|
||||||
|
res.json(result)
|
||||||
|
|
||||||
|
removeUserFromGroup: (req, res)->
|
||||||
|
adminUserId = req.session.user._id
|
||||||
|
userToRemove_id = req.params.user_id
|
||||||
|
logger.log adminUserId:adminUserId, userToRemove_id:userToRemove_id, "removing user from group subscription"
|
||||||
|
SubscriptionGroupHandler.removeUserFromGroup adminUserId, userToRemove_id, ->
|
||||||
|
res.send()
|
||||||
|
|
||||||
|
renderSubscriptionGroupAdminPage: (req, res)->
|
||||||
|
user_id = req.session.user._id
|
||||||
|
SubscriptionLocator.getUsersSubscription user_id, (err, subscription)->
|
||||||
|
if !subscription.groupPlan
|
||||||
|
return res.redirect("/")
|
||||||
|
SubscriptionGroupHandler.getPopulatedListOfMembers user_id, (err, users)->
|
||||||
|
res.render "subscriptions/group_admin",
|
||||||
|
title: 'Group Admin'
|
||||||
|
users: users
|
||||||
|
subscription: subscription
|
|
@ -0,0 +1,45 @@
|
||||||
|
async = require("async")
|
||||||
|
_ = require("underscore")
|
||||||
|
UserCreator = require("../User/UserCreator")
|
||||||
|
SubscriptionUpdater = require("./SubscriptionUpdater")
|
||||||
|
SubscriptionLocator = require("./SubscriptionLocator")
|
||||||
|
UserLocator = require("../User/UserLocator")
|
||||||
|
LimitationsManager = require("./LimitationsManager")
|
||||||
|
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
addUserToGroup: (adminUser_id, newEmail, callback)->
|
||||||
|
UserCreator.getUserOrCreateHoldingAccount newEmail, (err, user)->
|
||||||
|
LimitationsManager.hasGroupMembersLimitReached adminUser_id, (err, limitReached)->
|
||||||
|
if limitReached
|
||||||
|
return callback(limitReached:limitReached)
|
||||||
|
SubscriptionUpdater.addUserToGroup adminUser_id, user._id, (err)->
|
||||||
|
userViewModel = buildUserViewModel(user)
|
||||||
|
callback(err, userViewModel)
|
||||||
|
|
||||||
|
removeUserFromGroup: (adminUser_id, userToRemove_id, callback)->
|
||||||
|
SubscriptionUpdater.removeUserFromGroup adminUser_id, userToRemove_id, callback
|
||||||
|
|
||||||
|
|
||||||
|
getPopulatedListOfMembers: (adminUser_id, callback)->
|
||||||
|
SubscriptionLocator.getUsersSubscription adminUser_id, (err, subscription)->
|
||||||
|
users = []
|
||||||
|
jobs = _.map subscription.member_ids, (user_id)->
|
||||||
|
return (cb)->
|
||||||
|
UserLocator.findById user_id, (err, user)->
|
||||||
|
userViewModel = buildUserViewModel(user)
|
||||||
|
users.push(userViewModel)
|
||||||
|
cb()
|
||||||
|
async.series jobs, (err)->
|
||||||
|
callback(err, users)
|
||||||
|
|
||||||
|
|
||||||
|
buildUserViewModel = (user)->
|
||||||
|
u =
|
||||||
|
email: user.email
|
||||||
|
first_name: user.first_name
|
||||||
|
last_name: user.last_name
|
||||||
|
holdingAccount: user.holdingAccount
|
||||||
|
_id: user._id
|
||||||
|
return u
|
|
@ -0,0 +1,58 @@
|
||||||
|
RecurlyWrapper = require("./RecurlyWrapper")
|
||||||
|
Settings = require "settings-sharelatex"
|
||||||
|
User = require('../../models/User').User
|
||||||
|
logger = require('logger-sharelatex')
|
||||||
|
AnalyticsManager = require '../../Features/Analytics/AnalyticsManager'
|
||||||
|
SubscriptionUpdater = require("./SubscriptionUpdater")
|
||||||
|
LimitationsManager = require('./LimitationsManager')
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
createSubscription: (user, recurlySubscriptionId, callback)->
|
||||||
|
self = @
|
||||||
|
RecurlyWrapper.getSubscription recurlySubscriptionId, {recurlyJsResult: true}, (error, recurlySubscription) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
SubscriptionUpdater.syncSubscription recurlySubscription, user._id, (error) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
AnalyticsManager.trackSubscriptionStarted user, recurlySubscription?.plan?.plan_code
|
||||||
|
callback()
|
||||||
|
|
||||||
|
updateSubscription: (user, plan_code, callback)->
|
||||||
|
logger.log user:user, plan_code:plan_code, "updating subscription"
|
||||||
|
LimitationsManager.userHasSubscription user, (err, hasSubscription, subscription)->
|
||||||
|
if hasSubscription
|
||||||
|
RecurlyWrapper.updateSubscription subscription.recurlySubscription_id, {plan_code: plan_code, timeframe: "now"}, (error, recurlySubscription) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
SubscriptionUpdater.syncSubscription recurlySubscription, user._id, callback
|
||||||
|
else
|
||||||
|
callback()
|
||||||
|
|
||||||
|
cancelSubscription: (user, callback) ->
|
||||||
|
LimitationsManager.userHasSubscription user, (err, hasSubscription, subscription)->
|
||||||
|
if hasSubscription
|
||||||
|
RecurlyWrapper.cancelSubscription subscription.recurlySubscription_id, (error) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
AnalyticsManager.trackSubscriptionCancelled user
|
||||||
|
callback()
|
||||||
|
else
|
||||||
|
callback()
|
||||||
|
|
||||||
|
reactivateSubscription: (user, callback) ->
|
||||||
|
LimitationsManager.userHasSubscription user, (err, hasSubscription, subscription)->
|
||||||
|
if hasSubscription
|
||||||
|
RecurlyWrapper.reactivateSubscription subscription.recurlySubscription_id, (error) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
callback()
|
||||||
|
else
|
||||||
|
callback()
|
||||||
|
|
||||||
|
recurlyCallback: (recurlySubscription, callback) ->
|
||||||
|
RecurlyWrapper.getSubscription recurlySubscription.uuid, includeAccount: true, (error, recurlySubscription) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
User.findById recurlySubscription.account.account_code, (error, user) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
SubscriptionUpdater.syncSubscription recurlySubscription, user._id, callback
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,20 @@
|
||||||
|
Subscription = require('../../models/Subscription').Subscription
|
||||||
|
logger = require("logger-sharelatex")
|
||||||
|
ObjectId = require('mongoose').Types.ObjectId
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
getUsersSubscription: (user_or_id, callback)->
|
||||||
|
if user_or_id? and user_or_id._id?
|
||||||
|
user_id = user_or_id._id
|
||||||
|
else if user_or_id?
|
||||||
|
user_id = user_or_id
|
||||||
|
logger.log user_id:user_id, "getting users subscription"
|
||||||
|
Subscription.findOne admin_id:user_id, callback
|
||||||
|
|
||||||
|
# TODO: Remove this one month after the ability to start free trials was removed
|
||||||
|
expiredFreeTrials: (callback = (error, subscriptions)->) ->
|
||||||
|
query =
|
||||||
|
"freeTrial.expiresAt": "$lt": new Date()
|
||||||
|
"freeTrial.downgraded": "$ne": true
|
||||||
|
Subscription.find query, callback
|
|
@ -0,0 +1,36 @@
|
||||||
|
AuthenticationController = require('../Authentication/AuthenticationController')
|
||||||
|
SubscriptionController = require('./SubscriptionController')
|
||||||
|
SubscriptionGroupController = require './SubscriptionGroupController'
|
||||||
|
Settings = require "settings-sharelatex"
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
apply: (app) ->
|
||||||
|
return unless Settings.enableSubscriptions
|
||||||
|
|
||||||
|
app.get '/user/subscription/plans', SubscriptionController.plansPage
|
||||||
|
|
||||||
|
app.get '/user/subscription', AuthenticationController.requireLogin(), SubscriptionController.userSubscriptionPage
|
||||||
|
|
||||||
|
app.get '/user/subscription/new', AuthenticationController.requireLogin(), SubscriptionController.paymentPage
|
||||||
|
app.get '/user/subscription/billing-details/edit', AuthenticationController.requireLogin(), SubscriptionController.editBillingDetailsPage
|
||||||
|
|
||||||
|
app.get '/user/subscription/thank-you', AuthenticationController.requireLogin(), SubscriptionController.successful_subscription
|
||||||
|
|
||||||
|
|
||||||
|
app.get '/subscription/group', AuthenticationController.requireLogin(), SubscriptionGroupController.renderSubscriptionGroupAdminPage
|
||||||
|
app.post '/subscription/group/user', AuthenticationController.requireLogin(), SubscriptionGroupController.addUserToGroup
|
||||||
|
app.del '/subscription/group/user/:user_id', AuthenticationController.requireLogin(), SubscriptionGroupController.removeUserFromGroup
|
||||||
|
|
||||||
|
|
||||||
|
#recurly callback
|
||||||
|
app.post '/user/subscription/callback', SubscriptionController.recurlyNotificationParser, SubscriptionController.recurlyCallback
|
||||||
|
app.ignoreCsrf("post", '/user/subscription/callback')
|
||||||
|
|
||||||
|
#user changes there account state
|
||||||
|
app.post '/user/subscription/create', AuthenticationController.requireLogin(), SubscriptionController.createSubscription
|
||||||
|
app.post '/user/subscription/update', AuthenticationController.requireLogin(), SubscriptionController.updateSubscription
|
||||||
|
app.post '/user/subscription/cancel', AuthenticationController.requireLogin(), SubscriptionController.cancelSubscription
|
||||||
|
app.post '/user/subscription/reactivate', AuthenticationController.requireLogin(), SubscriptionController.reactivateSubscription
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,80 @@
|
||||||
|
async = require("async")
|
||||||
|
_ = require("underscore")
|
||||||
|
Subscription = require('../../models/Subscription').Subscription
|
||||||
|
SubscriptionLocator = require("./SubscriptionLocator")
|
||||||
|
UserFeaturesUpdater = require("./UserFeaturesUpdater")
|
||||||
|
PlansLocator = require("./PlansLocator")
|
||||||
|
Settings = require("settings-sharelatex")
|
||||||
|
logger = require("logger-sharelatex")
|
||||||
|
ObjectId = require('mongoose').Types.ObjectId
|
||||||
|
|
||||||
|
oneMonthInSeconds = 60 * 60 * 24 * 30
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
syncSubscription: (recurlySubscription, adminUser_id, callback) ->
|
||||||
|
self = @
|
||||||
|
logger.log adminUser_id:adminUser_id, recurlySubscription:recurlySubscription, "syncSubscription, creating new if subscription does not exist"
|
||||||
|
SubscriptionLocator.getUsersSubscription adminUser_id, (err, subscription)->
|
||||||
|
if subscription?
|
||||||
|
logger.log adminUser_id:adminUser_id, recurlySubscription:recurlySubscription, "subscription does exist"
|
||||||
|
self._updateSubscription recurlySubscription, subscription, callback
|
||||||
|
else
|
||||||
|
logger.log adminUser_id:adminUser_id, recurlySubscription:recurlySubscription, "subscription does not exist, creating a new one"
|
||||||
|
self._createNewSubscription adminUser_id, (err, subscription)->
|
||||||
|
self._updateSubscription recurlySubscription, subscription, callback
|
||||||
|
|
||||||
|
# TODO: Remove this one month after the ability to start free trials was removed
|
||||||
|
downgradeFreeTrial: (subscription, callback = (error)->) ->
|
||||||
|
UserFeaturesUpdater.updateFeatures subscription.admin_id, Settings.defaultPlanCode, ->
|
||||||
|
subscription.freeTrial.downgraded = true
|
||||||
|
subscription.save callback
|
||||||
|
|
||||||
|
addUserToGroup: (adminUser_id, user_id, callback)->
|
||||||
|
logger.log adminUser_id:adminUser_id, user_id:user_id, "adding user into mongo subscription"
|
||||||
|
searchOps =
|
||||||
|
admin_id: adminUser_id
|
||||||
|
insertOperation =
|
||||||
|
"$addToSet": {member_ids:user_id}
|
||||||
|
Subscription.findAndModify searchOps, insertOperation, (err, subscription)->
|
||||||
|
UserFeaturesUpdater.updateFeatures user_id, subscription.planCode, callback
|
||||||
|
|
||||||
|
removeUserFromGroup: (adminUser_id, user_id, callback)->
|
||||||
|
searchOps =
|
||||||
|
admin_id: adminUser_id
|
||||||
|
removeOperation =
|
||||||
|
"$pull": {member_ids:user_id}
|
||||||
|
Subscription.update searchOps, removeOperation, ->
|
||||||
|
UserFeaturesUpdater.updateFeatures user_id, Settings.defaultPlanCode, callback
|
||||||
|
|
||||||
|
|
||||||
|
_createNewSubscription: (adminUser_id, callback)->
|
||||||
|
logger.log adminUser_id:adminUser_id, "creating new subscription"
|
||||||
|
subscription = new Subscription(admin_id:adminUser_id)
|
||||||
|
subscription.freeTrial.allowed = false
|
||||||
|
subscription.save (err)->
|
||||||
|
callback err, subscription
|
||||||
|
|
||||||
|
_updateSubscription: (recurlySubscription, subscription, callback)->
|
||||||
|
logger.log recurlySubscription:recurlySubscription, subscription:subscription, "updaing subscription"
|
||||||
|
plan = PlansLocator.findLocalPlanInSettings(recurlySubscription.plan.plan_code)
|
||||||
|
if recurlySubscription.state == "expired"
|
||||||
|
subscription.recurlySubscription_id = undefined
|
||||||
|
subscription.planCode = Settings.defaultPlanCode
|
||||||
|
else
|
||||||
|
subscription.recurlySubscription_id = recurlySubscription.uuid
|
||||||
|
subscription.freeTrial.expiresAt = undefined
|
||||||
|
subscription.freeTrial.planCode = undefined
|
||||||
|
subscription.freeTrial.allowed = true
|
||||||
|
subscription.planCode = recurlySubscription.plan.plan_code
|
||||||
|
if plan.groupPlan
|
||||||
|
subscription.groupPlan = true
|
||||||
|
subscription.membersLimit = plan.membersLimit
|
||||||
|
subscription.save ->
|
||||||
|
allIds = _.union subscription.members_id, [subscription.admin_id]
|
||||||
|
jobs = allIds.map (user_id)->
|
||||||
|
return (cb)->
|
||||||
|
UserFeaturesUpdater.updateFeatures user_id, subscription.planCode, cb
|
||||||
|
async.parallel jobs, callback
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,63 @@
|
||||||
|
Settings = require('settings-sharelatex')
|
||||||
|
RecurlyWrapper = require("./RecurlyWrapper")
|
||||||
|
PlansLocator = require("./PlansLocator")
|
||||||
|
SubscriptionFormatters = require("./SubscriptionFormatters")
|
||||||
|
LimitationsManager = require("./LimitationsManager")
|
||||||
|
SubscriptionLocator = require("./SubscriptionLocator")
|
||||||
|
_ = require("underscore")
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
buildUsersSubscriptionViewModel: (user, callback) ->
|
||||||
|
SubscriptionLocator.getUsersSubscription user, (err, subscription)->
|
||||||
|
LimitationsManager.userHasFreeTrial user, (err, hasFreeTrial)->
|
||||||
|
LimitationsManager.userHasSubscription user, (err, hasSubscription)->
|
||||||
|
if hasSubscription
|
||||||
|
return callback(error) if error?
|
||||||
|
plan = PlansLocator.findLocalPlanInSettings(subscription.planCode)
|
||||||
|
RecurlyWrapper.getSubscription subscription.recurlySubscription_id, (err, recurlySubscription)->
|
||||||
|
callback null,
|
||||||
|
name: plan.name
|
||||||
|
nextPaymentDueAt: SubscriptionFormatters.formatDate(recurlySubscription.current_period_ends_at)
|
||||||
|
state: recurlySubscription.state
|
||||||
|
price: SubscriptionFormatters.formatPrice recurlySubscription.unit_amount_in_cents
|
||||||
|
planCode: subscription.planCode
|
||||||
|
groupPlan: subscription.groupPlan
|
||||||
|
else if hasFreeTrial
|
||||||
|
plan = PlansLocator.findLocalPlanInSettings(subscription.freeTrial.planCode)
|
||||||
|
callback null,
|
||||||
|
name: plan.name
|
||||||
|
state: "free-trial"
|
||||||
|
planCode: plan.planCode
|
||||||
|
groupPlan: subscription.groupPlan
|
||||||
|
expiresAt: SubscriptionFormatters.formatDate(subscription.freeTrial.expiresAt)
|
||||||
|
else
|
||||||
|
callback "User has no subscription"
|
||||||
|
|
||||||
|
|
||||||
|
buildViewModel : ->
|
||||||
|
plans = Settings.plans
|
||||||
|
|
||||||
|
result =
|
||||||
|
allPlans: plans
|
||||||
|
|
||||||
|
result.personalAccount = _.find plans, (plan)->
|
||||||
|
plan.planCode == "personal"
|
||||||
|
|
||||||
|
result.studentAccounts = _.filter plans, (plan)->
|
||||||
|
plan.planCode.indexOf("student") != -1
|
||||||
|
|
||||||
|
result.groupMonthlyPlans = _.filter plans, (plan)->
|
||||||
|
plan.groupPlan and !plan.annual
|
||||||
|
|
||||||
|
result.groupAnnualPlans = _.filter plans, (plan)->
|
||||||
|
plan.groupPlan and plan.annual
|
||||||
|
|
||||||
|
result.individualMonthlyPlans = _.filter plans, (plan)->
|
||||||
|
!plan.groupPlan and !plan.annual and plan.planCode != "personal" and plan.planCode.indexOf("student") == -1
|
||||||
|
|
||||||
|
result.individualAnnualPlans = _.filter plans, (plan)->
|
||||||
|
!plan.groupPlan and plan.annual and plan.planCode.indexOf("student") == -1
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
|
@ -0,0 +1,16 @@
|
||||||
|
Settings = require "settings-sharelatex"
|
||||||
|
logger = require("logger-sharelatex")
|
||||||
|
User = require('../../models/User').User
|
||||||
|
PlansLocator = require("./PlansLocator")
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
updateFeatures: (user_id, plan_code, callback = (err, features)->)->
|
||||||
|
conditions = _id:user_id
|
||||||
|
update = {}
|
||||||
|
plan = PlansLocator.findLocalPlanInSettings(plan_code)
|
||||||
|
logger.log user_id:user_id, plan:plan, plan_code:plan_code, "updating users features"
|
||||||
|
update["features.#{key}"] = value for key, value of plan.features
|
||||||
|
User.update conditions, update, (err)->
|
||||||
|
callback err, plan.features
|
||||||
|
|
21
services/web/app/coffee/Features/Tags/TagsController.coffee
Normal file
21
services/web/app/coffee/Features/Tags/TagsController.coffee
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
TagsHandler = require("./TagsHandler")
|
||||||
|
logger = require("logger-sharelatex")
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
processTagsUpdate: (req, res)->
|
||||||
|
user_id = req.session.user._id
|
||||||
|
project_id = req.params.project_id
|
||||||
|
if req.body.deletedTag?
|
||||||
|
tag = req.body.deletedTag
|
||||||
|
TagsHandler.deleteTag user_id, project_id, tag, ->
|
||||||
|
res.send()
|
||||||
|
else
|
||||||
|
tag = req.body.tag
|
||||||
|
TagsHandler.addTag user_id, project_id, tag, ->
|
||||||
|
res.send()
|
||||||
|
logger.log user_id:user_id, project_id:project_id, body:req.body, "processing tag update"
|
||||||
|
|
||||||
|
getAllTags: (req, res)->
|
||||||
|
TagsHandler.getAllTags req.session.user._id, (err, allTags)->
|
||||||
|
res.send(allTags)
|
71
services/web/app/coffee/Features/Tags/TagsHandler.coffee
Normal file
71
services/web/app/coffee/Features/Tags/TagsHandler.coffee
Normal file
|
@ -0,0 +1,71 @@
|
||||||
|
_ = require('underscore')
|
||||||
|
settings = require("settings-sharelatex")
|
||||||
|
request = require("request")
|
||||||
|
logger = require("logger-sharelatex")
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
|
||||||
|
deleteTag: (user_id, project_id, tag, callback)->
|
||||||
|
uri = buildUri(user_id, project_id)
|
||||||
|
opts =
|
||||||
|
uri:uri
|
||||||
|
json:
|
||||||
|
name:tag
|
||||||
|
logger.log user_id:user_id, project_id:project_id, tag:tag, "send delete tag to tags api"
|
||||||
|
request.del opts, callback
|
||||||
|
|
||||||
|
addTag: (user_id, project_id, tag, callback)->
|
||||||
|
uri = buildUri(user_id, project_id)
|
||||||
|
opts =
|
||||||
|
uri:uri
|
||||||
|
json:
|
||||||
|
name:tag
|
||||||
|
logger.log user_id:user_id, project_id:project_id, tag:tag, "send add tag to tags api"
|
||||||
|
request.post opts, callback
|
||||||
|
|
||||||
|
requestTags: (user_id, callback)->
|
||||||
|
opts =
|
||||||
|
uri: "#{settings.apis.tags.url}/user/#{user_id}/tag"
|
||||||
|
json: true
|
||||||
|
timeout: 2000
|
||||||
|
request.get opts, (err, res, body)->
|
||||||
|
statusCode = if res? then res.statusCode else 500
|
||||||
|
if err? or statusCode != 200
|
||||||
|
e = new Error("something went wrong getting tags, #{err}, #{statusCode}")
|
||||||
|
logger.err err:err
|
||||||
|
callback(e, [])
|
||||||
|
else
|
||||||
|
callback(null, body)
|
||||||
|
|
||||||
|
getAllTags: (user_id, callback)->
|
||||||
|
@requestTags user_id, (err, allTags)=>
|
||||||
|
if !allTags?
|
||||||
|
allTags = []
|
||||||
|
@groupTagsByProject allTags, (err, groupedByProject)->
|
||||||
|
logger.log allTags:allTags, user_id:user_id, groupedByProject:groupedByProject, "getting all tags from tags api"
|
||||||
|
callback err, allTags, groupedByProject
|
||||||
|
|
||||||
|
removeProjectFromAllTags: (user_id, project_id, callback)->
|
||||||
|
uri = buildUri(user_id, project_id)
|
||||||
|
opts =
|
||||||
|
uri:"#{settings.apis.tags.url}/user/#{user_id}/project/#{project_id}"
|
||||||
|
logger.log user_id:user_id, project_id:project_id, "removing project_id from tags"
|
||||||
|
request.del opts, callback
|
||||||
|
|
||||||
|
groupTagsByProject: (tags, callback)->
|
||||||
|
result = {}
|
||||||
|
_.each tags, (tag)->
|
||||||
|
_.each tag.project_ids, (project_id)->
|
||||||
|
result[project_id] = []
|
||||||
|
|
||||||
|
_.each tags, (tag)->
|
||||||
|
_.each tag.project_ids, (project_id)->
|
||||||
|
clonedTag = _.clone(tag)
|
||||||
|
delete clonedTag.project_ids
|
||||||
|
result[project_id].push(clonedTag)
|
||||||
|
callback null, result
|
||||||
|
|
||||||
|
|
||||||
|
buildUri = (user_id, project_id)->
|
||||||
|
uri = "#{settings.apis.tags.url}/user/#{user_id}/project/#{project_id}/tag"
|
|
@ -0,0 +1,44 @@
|
||||||
|
path = require('path')
|
||||||
|
ProjectUploadManager = require('../Uploads/ProjectUploadManager')
|
||||||
|
ProjectOptionsHandler = require("../Project/ProjectOptionsHandler")
|
||||||
|
TemplatesPublisher = require("./TemplatesPublisher")
|
||||||
|
settings = require('settings-sharelatex')
|
||||||
|
fs = require('fs')
|
||||||
|
request = require('request')
|
||||||
|
uuid = require('node-uuid')
|
||||||
|
logger = require('logger-sharelatex')
|
||||||
|
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
createProjectFromZipTemplate: (req, res)->
|
||||||
|
logger.log body:req.session.templateData, "creating project from zip"
|
||||||
|
if !req.session.templateData?
|
||||||
|
return res.redirect "/project"
|
||||||
|
|
||||||
|
dumpPath = "#{settings.path.dumpFolder}/#{uuid.v4()}"
|
||||||
|
writeStream = fs.createWriteStream(dumpPath)
|
||||||
|
zipUrl = req.session.templateData.zipUrl
|
||||||
|
if zipUrl.indexOf("www") == -1
|
||||||
|
zipUrl = "www.sharelatex.com#{zipUrl}"
|
||||||
|
request("http://#{zipUrl}").pipe(writeStream)
|
||||||
|
writeStream.on 'close', ->
|
||||||
|
ProjectUploadManager.createProjectFromZipArchive req.session.user._id, req.session.templateData.templateName, dumpPath, (err, project)->
|
||||||
|
setCompiler project._id, req.session.templateData.compiler, ->
|
||||||
|
fs.unlink dumpPath, ->
|
||||||
|
delete req.session.templateData
|
||||||
|
res.redirect "/project/#{project._id}"
|
||||||
|
|
||||||
|
publishProject: (user_id, project_id, callback)->
|
||||||
|
logger.log user_id:user_id, project_id:project_id, "reciving request to publish project as template"
|
||||||
|
TemplatesPublisher.publish user_id, project_id, callback
|
||||||
|
|
||||||
|
unPublishProject: (user_id, project_id, callback)->
|
||||||
|
logger.log user_id:user_id, project_id:project_id, "reciving request to unpublish project as template"
|
||||||
|
TemplatesPublisher.unpublish user_id, project_id, callback
|
||||||
|
|
||||||
|
setCompiler = (project_id, compiler, callback)->
|
||||||
|
if compiler?
|
||||||
|
ProjectOptionsHandler.setCompiler project_id, compiler, callback
|
||||||
|
else
|
||||||
|
callback()
|
|
@ -0,0 +1,7 @@
|
||||||
|
module.exports =
|
||||||
|
saveTemplateDataInSession: (req, res, next)->
|
||||||
|
if req.query.templateName
|
||||||
|
req.session.templateData = req.query
|
||||||
|
next()
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,21 @@
|
||||||
|
request = require("request")
|
||||||
|
settings = require("settings-sharelatex")
|
||||||
|
logger = require("logger-sharelatex")
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
publish : (user_id, project_id, callback)->
|
||||||
|
url = buildUrl(user_id, project_id)
|
||||||
|
request.post url, (err)->
|
||||||
|
if err?
|
||||||
|
logger.err err:err, "something went wrong publishing project as template"
|
||||||
|
callback err
|
||||||
|
|
||||||
|
unpublish: (user_id, project_id, callback)->
|
||||||
|
url = buildUrl(user_id, project_id)
|
||||||
|
request.del url, (err)->
|
||||||
|
callback()
|
||||||
|
|
||||||
|
|
||||||
|
buildUrl = (user_id, project_id)->
|
||||||
|
url = "#{settings.apis.templates_api.url}/templates-api/user/#{user_id}/project/#{project_id}"
|
|
@ -0,0 +1,49 @@
|
||||||
|
tpdsUpdateHandler = require('./TpdsUpdateHandler')
|
||||||
|
logger = require('logger-sharelatex')
|
||||||
|
Path = require('path')
|
||||||
|
metrics = require("../../infrastructure/Metrics")
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
mergeUpdate: (req, res)->
|
||||||
|
metrics.inc("tpds.merge-update")
|
||||||
|
{filePath, user_id, projectName} = parseParams(req)
|
||||||
|
logger.log user_id:user_id, filePath:filePath, fullPath:req.params[0], projectName:projectName, sl_req_id:req.sl_req_id, "reciving update request from tpds"
|
||||||
|
tpdsUpdateHandler.newUpdate user_id, projectName, filePath, req, req.sl_req_id, (err)->
|
||||||
|
logger.log user_id:user_id, filePath:filePath, fullPath:req.params[0], sl_req_id:req.sl_req_id, "sending response that tpdsUpdate has been completed"
|
||||||
|
if err?
|
||||||
|
logger.err err:err, user_id:user_id, filePath:filePath, "error reciving update from tpds"
|
||||||
|
res.send(500)
|
||||||
|
else
|
||||||
|
logger.log user_id:user_id, filePath:filePath, projectName:projectName, "telling tpds update has been processed"
|
||||||
|
res.send 200
|
||||||
|
req.session.destroy()
|
||||||
|
|
||||||
|
|
||||||
|
deleteUpdate: (req, res)->
|
||||||
|
metrics.inc("tpds.delete-update")
|
||||||
|
{filePath, user_id, projectName} = parseParams(req)
|
||||||
|
logger.log user_id:user_id, filePath:filePath, sl_req_id:req.sl_req_id, projectName:projectName, fullPath:req.params[0], "reciving delete request from tpds"
|
||||||
|
tpdsUpdateHandler.deleteUpdate user_id, projectName, filePath, req.sl_req_id, (err)->
|
||||||
|
if err?
|
||||||
|
logger.err err:err, user_id:user_id, filePath:filePath, "error reciving update from tpds"
|
||||||
|
res.send(500)
|
||||||
|
else
|
||||||
|
logger.log user_id:user_id, filePath:filePath, projectName:projectName, "telling tpds delete has been processed"
|
||||||
|
res.send 200
|
||||||
|
req.session.destroy()
|
||||||
|
|
||||||
|
parseParams: parseParams = (req)->
|
||||||
|
path = req.params[0]
|
||||||
|
user_id = req.params.user_id
|
||||||
|
|
||||||
|
path = Path.join("/",path)
|
||||||
|
if path.substring(1).indexOf('/') == -1
|
||||||
|
filePath = "/"
|
||||||
|
projectName = path.substring(1)
|
||||||
|
else
|
||||||
|
filePath = path.substring(path.indexOf("/",1))
|
||||||
|
projectName = path.substring(0, path.indexOf("/",1))
|
||||||
|
projectName = projectName.replace("/","")
|
||||||
|
|
||||||
|
return filePath:filePath, user_id:user_id, projectName:projectName
|
||||||
|
|
|
@ -0,0 +1,35 @@
|
||||||
|
User = require('../../models/User').User
|
||||||
|
settings = require('settings-sharelatex')
|
||||||
|
request = require "request"
|
||||||
|
logger = require('logger-sharelatex')
|
||||||
|
redis = require('redis')
|
||||||
|
rclient = redis.createClient(settings.redis.web.port, settings.redis.web.host)
|
||||||
|
rclient.auth(settings.redis.web.password)
|
||||||
|
|
||||||
|
LAST_TIME_POLL_HAPPEND_KEY = "LAST_TIME_POLL_HAPPEND_KEY"
|
||||||
|
|
||||||
|
self = module.exports =
|
||||||
|
|
||||||
|
pollUsersWithDropbox: (callback)->
|
||||||
|
self._getUserIdsWithDropbox (err, user_ids)=>
|
||||||
|
logger.log user_ids:user_ids, userCount:user_ids.length, "telling tpds to poll users with dropbox"
|
||||||
|
self._markPollHappened()
|
||||||
|
self._sendToTpds user_ids, callback
|
||||||
|
|
||||||
|
_sendToTpds : (user_ids, callback)->
|
||||||
|
if user_ids.length > 0
|
||||||
|
request.post {uri:"#{settings.apis.thirdPartyDataStore.url}/user/poll", json:{user_ids:user_ids}}, callback
|
||||||
|
else if callback?
|
||||||
|
callback()
|
||||||
|
|
||||||
|
_getUserIdsWithDropbox: (callback)->
|
||||||
|
User.find {"dropbox.access_token.oauth_token_secret":{"$exists":true}}, "_id", (err, users)->
|
||||||
|
ids = users.map (user)->
|
||||||
|
return user._id+""
|
||||||
|
callback err, ids
|
||||||
|
|
||||||
|
_markPollHappened: (callback)->
|
||||||
|
rclient.set LAST_TIME_POLL_HAPPEND_KEY, new Date().getTime(), callback
|
||||||
|
|
||||||
|
getLastTimePollHappned: (callback = (err, lastTimePollHappened)->)->
|
||||||
|
rclient.get LAST_TIME_POLL_HAPPEND_KEY, callback
|
|
@ -0,0 +1,46 @@
|
||||||
|
versioningApiHandler = require('../Versioning/VersioningApiHandler')
|
||||||
|
updateMerger = require('./UpdateMerger')
|
||||||
|
logger = require('logger-sharelatex')
|
||||||
|
projectLocator = require('../Project/ProjectLocator')
|
||||||
|
projectCreationHandler = require('../Project/ProjectCreationHandler')
|
||||||
|
projectDeleter = require('../Project/ProjectDeleter')
|
||||||
|
ProjectRootDocManager = require "../Project/ProjectRootDocManager"
|
||||||
|
|
||||||
|
commitMessage = "Before update from Dropbox"
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
newUpdate: (user_id, projectName, path, updateRequest, sl_req_id, callback)->
|
||||||
|
getOrCreateProject = (cb)=>
|
||||||
|
projectLocator.findUsersProjectByName user_id, projectName, (err, project)=>
|
||||||
|
logger.log user_id:user_id, filePath:path, projectName:projectName, "handling new update from tpds"
|
||||||
|
if !project?
|
||||||
|
projectCreationHandler.createBlankProject user_id, projectName, (err, project)=>
|
||||||
|
# have a crack at setting the root doc after a while, on creation we won't have it yet, but should have
|
||||||
|
# been sent it it within 30 seconds
|
||||||
|
setTimeout (-> ProjectRootDocManager.setRootDocAutomatically project._id, sl_req_id ), @_rootDocTimeoutLength
|
||||||
|
cb err, project
|
||||||
|
else
|
||||||
|
cb err, project
|
||||||
|
getOrCreateProject (err, project)->
|
||||||
|
versioningApiHandler.takeSnapshot project._id, commitMessage, sl_req_id, ->
|
||||||
|
updateMerger.mergeUpdate project._id, path, updateRequest, sl_req_id, (err)->
|
||||||
|
callback(err)
|
||||||
|
|
||||||
|
|
||||||
|
deleteUpdate: (user_id, projectName, path, sl_req_id, callback)->
|
||||||
|
logger.log user_id:user_id, filePath:path, "handling delete update from tpds"
|
||||||
|
projectLocator.findUsersProjectByName user_id, projectName, (err, project)->
|
||||||
|
if !project?
|
||||||
|
logger.log user_id:user_id, filePath:path, projectName:projectName, project_id:project._id, "project not found from tpds update, ignoring folder or project"
|
||||||
|
return callback()
|
||||||
|
if path == "/"
|
||||||
|
logger.log user_id:user_id, filePath:path, projectName:projectName, project_id:project._id, "project found for delete update, path is root so marking project as deleted"
|
||||||
|
return projectDeleter.markAsDeletedByExternalSource project._id, callback
|
||||||
|
else
|
||||||
|
versioningApiHandler.takeSnapshot project._id, commitMessage, sl_req_id, ->
|
||||||
|
updateMerger.deleteUpdate project._id, path, sl_req_id, (err)->
|
||||||
|
callback(err)
|
||||||
|
|
||||||
|
|
||||||
|
_rootDocTimeoutLength : 30 * 1000
|
|
@ -0,0 +1,109 @@
|
||||||
|
settings = require('settings-sharelatex')
|
||||||
|
logger = require('logger-sharelatex')
|
||||||
|
slReqIdHelper = require('soa-req-id')
|
||||||
|
path = require('path')
|
||||||
|
Project = require('../../models/Project').Project
|
||||||
|
keys = require('../../infrastructure/Keys')
|
||||||
|
metrics = require("../../infrastructure/Metrics")
|
||||||
|
|
||||||
|
buildPath = (user_id, project_name, filePath)->
|
||||||
|
projectPath = path.join(project_name, "/", filePath)
|
||||||
|
projectPath = encodeURIComponent(projectPath)
|
||||||
|
fullPath = path.join("/user/", "#{user_id}", "/entity/",projectPath)
|
||||||
|
return fullPath
|
||||||
|
|
||||||
|
queue = require('fairy').connect(settings.redis.fairy).queue(keys.queue.web_to_tpds_http_requests)
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
addFile : (options, sl_req_id, callback = (err)->)->
|
||||||
|
metrics.inc("tpds.add-file")
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
getProjectsUsersIds options.project_id, (err, user_id, allUserIds)->
|
||||||
|
logger.log project_id: options.project_id, user_id:user_id, path: options.path, uri:options.uri, sl_req_id:sl_req_id, rev:options.rev, "sending file to third party data store"
|
||||||
|
postOptions =
|
||||||
|
method : "post"
|
||||||
|
headers:
|
||||||
|
"sl_req_id":sl_req_id
|
||||||
|
sl_entity_rev:options.rev
|
||||||
|
sl_project_id:options.project_id
|
||||||
|
sl_all_user_ids:JSON.stringify(allUserIds)
|
||||||
|
uri : "#{settings.apis.thirdPartyDataStore.url}#{buildPath(user_id, options.project_name, options.path)}"
|
||||||
|
title:"addFile"
|
||||||
|
streamOrigin : settings.apis.filestore.url + path.join("/project/#{options.project_id}/file/","#{options.file_id}")
|
||||||
|
queue.enqueue options.project_id, "pipeStreamFrom", postOptions, ->
|
||||||
|
logger.log project_id: options.project_id, user_id:user_id, path: options.path, uri:options.uri, sl_req_id:sl_req_id, rev:options.rev, "sending file to third party data store queued up for processing"
|
||||||
|
callback()
|
||||||
|
|
||||||
|
addDoc : (options, sl_req_id, callback = (err)->)->
|
||||||
|
metrics.inc("tpds.add-doc")
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
getProjectsUsersIds options.project_id, (err, user_id, allUserIds)->
|
||||||
|
return callback(err) if err?
|
||||||
|
logger.log project_id: options.project_id, user_id:user_id, path: options.path, rev:options.rev, uri:options.uri, project_name:options.project_name, docLines:options.docLines, sl_req_id:sl_req_id, "sending doc to third party data store"
|
||||||
|
postOptions =
|
||||||
|
method : "post"
|
||||||
|
headers:
|
||||||
|
"sl_req_id":sl_req_id,
|
||||||
|
sl_entity_rev:options.rev,
|
||||||
|
sl_project_id:options.project_id
|
||||||
|
sl_all_user_ids:JSON.stringify(allUserIds)
|
||||||
|
uri : "#{settings.apis.thirdPartyDataStore.url}#{buildPath(user_id, options.project_name, options.path)}"
|
||||||
|
title: "addDoc"
|
||||||
|
docLines: options.docLines
|
||||||
|
queue.enqueue options.project_id, "sendDoc", postOptions, callback
|
||||||
|
|
||||||
|
|
||||||
|
moveEntity : (options, sl_req_id, callback = (err)->)->
|
||||||
|
metrics.inc("tpds.move-entity")
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
if options.newProjectName?
|
||||||
|
startPath = path.join("/#{options.project_name}/")
|
||||||
|
endPath = path.join("/#{options.newProjectName}/")
|
||||||
|
else
|
||||||
|
startPath = mergeProjectNameAndPath(options.project_name, options.startPath)
|
||||||
|
endPath = mergeProjectNameAndPath(options.project_name, options.endPath)
|
||||||
|
getProjectsUsersIds options.project_id, (err, user_id, allUserIds)->
|
||||||
|
logger.log project_id: options.project_id, user_id:user_id, startPath:startPath, endPath:endPath, uri:options.uri, sl_req_id:sl_req_id, "moving entity in third party data store"
|
||||||
|
moveOptions =
|
||||||
|
method : "put"
|
||||||
|
title:"moveEntity"
|
||||||
|
uri : "#{settings.apis.thirdPartyDataStore.url}/user/#{user_id}/entity"
|
||||||
|
headers:
|
||||||
|
"sl_req_id":sl_req_id,
|
||||||
|
sl_project_id:options.project_id,
|
||||||
|
sl_entity_rev:options.rev
|
||||||
|
sl_all_user_ids:JSON.stringify(allUserIds)
|
||||||
|
json :
|
||||||
|
user_id : user_id
|
||||||
|
endPath: endPath
|
||||||
|
startPath: startPath
|
||||||
|
queue.enqueue options.project_id, "standardHttpRequest", moveOptions, callback
|
||||||
|
|
||||||
|
deleteEntity : (options, sl_req_id, callback = (err)->)->
|
||||||
|
metrics.inc("tpds.delete-entity")
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
getProjectsUsersIds options.project_id, (err, user_id, allUserIds)->
|
||||||
|
logger.log project_id: options.project_id, user_id:user_id, path: options.path, uri:options.uri, sl_req_id:sl_req_id, "deleting entity in third party data store"
|
||||||
|
deleteOptions =
|
||||||
|
method : "DELETE"
|
||||||
|
headers:
|
||||||
|
"sl_req_id":sl_req_id,
|
||||||
|
sl_project_id:options.project_id
|
||||||
|
sl_all_user_ids:JSON.stringify(allUserIds)
|
||||||
|
uri : "#{settings.apis.thirdPartyDataStore.url}#{buildPath(user_id, options.project_name, options.path)}"
|
||||||
|
title:"deleteEntity"
|
||||||
|
sl_all_user_ids:JSON.stringify(allUserIds)
|
||||||
|
queue.enqueue options.project_id, "standardHttpRequest", deleteOptions, callback
|
||||||
|
|
||||||
|
|
||||||
|
getProjectsUsersIds = (project_id, callback = (err, owner_id, allUserIds)->)->
|
||||||
|
Project.findById project_id, "_id owner_ref readOnly_refs collaberator_refs", (err, project)->
|
||||||
|
allUserIds = [].concat(project.collaberator_refs).concat(project.readOnly_refs).concat(project.owner_ref)
|
||||||
|
callback err, project.owner_ref, allUserIds
|
||||||
|
|
||||||
|
mergeProjectNameAndPath = (project_name, path)->
|
||||||
|
if(path.indexOf('/') == 0)
|
||||||
|
path = path.substring(1)
|
||||||
|
fullPath = "/#{project_name}/#{path}"
|
||||||
|
return fullPath
|
|
@ -0,0 +1,116 @@
|
||||||
|
_ = require('underscore')
|
||||||
|
projectLocator = require('../Project/ProjectLocator')
|
||||||
|
editorController = require('../Editor/EditorController')
|
||||||
|
logger = require('logger-sharelatex')
|
||||||
|
Settings = require('settings-sharelatex')
|
||||||
|
slReqIdHelper = require('soa-req-id')
|
||||||
|
FileTypeManager = require('../Uploads/FileTypeManager')
|
||||||
|
GuidManager = require '../../managers/GuidManager'
|
||||||
|
fs = require('fs')
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
mergeUpdate: (project_id, path, updateRequest, sl_req_id, callback)->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
self = @
|
||||||
|
logger.log sl_req_id: sl_req_id, project_id:project_id, path:path, "merging update from tpds"
|
||||||
|
projectLocator.findElementByPath project_id, path, (err, element)=>
|
||||||
|
logger.log sl_req_id: sl_req_id, project_id:project_id, path:path, "found element by path for merging update from tpds"
|
||||||
|
elementId = undefined
|
||||||
|
if element?
|
||||||
|
elementId = element._id
|
||||||
|
self.p.writeStreamToDisk project_id, elementId, updateRequest, (err, fsPath)->
|
||||||
|
FileTypeManager.shouldIgnore path, (err, shouldIgnore)->
|
||||||
|
if shouldIgnore
|
||||||
|
return callback()
|
||||||
|
FileTypeManager.isBinary path, (err, isFile)->
|
||||||
|
if isFile
|
||||||
|
self.p.processFile project_id, elementId, fsPath, path, callback #TODO clean up the stream written to disk here
|
||||||
|
else
|
||||||
|
self.p.processDoc project_id, elementId, fsPath, path, sl_req_id, callback
|
||||||
|
|
||||||
|
deleteUpdate: (project_id, path, sl_req_id, callback)->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
projectLocator.findElementByPath project_id, path, (err, element)->
|
||||||
|
type = 'file'
|
||||||
|
if err? || !element?
|
||||||
|
logger.log sl_req_id: sl_req_id, element:element, project_id:project_id, path:path, "could not find entity for deleting, assuming it was already deleted"
|
||||||
|
return callback()
|
||||||
|
if element.lines?
|
||||||
|
type = 'doc'
|
||||||
|
else if element.folders?
|
||||||
|
type = 'folder'
|
||||||
|
logger.log sl_req_id: sl_req_id, project_id:project_id, path:path, type:type, element:element, "processing update to delete entity from tpds"
|
||||||
|
editorController.deleteEntity project_id, element._id, type, sl_req_id, (err)->
|
||||||
|
logger.log sl_req_id: sl_req_id, project_id:project_id, path:path, "finished processing update to delete entity from tpds"
|
||||||
|
callback()
|
||||||
|
|
||||||
|
p:
|
||||||
|
|
||||||
|
processDoc: (project_id, doc_id, fsPath, path, sl_req_id, callback)->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
readFileIntoTextArray fsPath, (err, docLines)->
|
||||||
|
if err?
|
||||||
|
logger.err project_id:project_id, doc_id:doc_id, fsPath:fsPath, "error reading file into text array for process doc update"
|
||||||
|
return callback(err)
|
||||||
|
logger.log docLines:docLines, doc_id:doc_id, project_id:project_id, sl_req_id:sl_req_id, "processing doc update from tpds"
|
||||||
|
if doc_id?
|
||||||
|
editorController.setDoc project_id, doc_id, docLines, sl_req_id, (err)->
|
||||||
|
callback()
|
||||||
|
else
|
||||||
|
setupNewEntity project_id, path, (err, folder, fileName)->
|
||||||
|
editorController.addDoc project_id, folder._id, fileName, docLines, sl_req_id, (err)->
|
||||||
|
callback()
|
||||||
|
|
||||||
|
processFile: (project_id, file_id, fsPath, path, sl_req_id, callback)->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
finish = (err)->
|
||||||
|
logger.log sl_req_id: sl_req_id, project_id:project_id, file_id:file_id, path:path, "completed processing file update from tpds"
|
||||||
|
callback(err)
|
||||||
|
logger.log sl_req_id: sl_req_id, project_id:project_id, file_id:file_id, path:path, "processing file update from tpds"
|
||||||
|
setupNewEntity project_id, path, (err, folder, fileName) =>
|
||||||
|
if file_id?
|
||||||
|
editorController.replaceFile project_id, file_id, fsPath, finish
|
||||||
|
else
|
||||||
|
editorController.addFile project_id, folder._id, fileName, fsPath, finish
|
||||||
|
|
||||||
|
writeStreamToDisk: (project_id, file_id, stream, callback = (err, fsPath)->)->
|
||||||
|
if !file_id?
|
||||||
|
file_id = GuidManager.newGuid()
|
||||||
|
dumpPath = "#{Settings.path.dumpFolder}/#{project_id}_#{file_id}"
|
||||||
|
|
||||||
|
writeStream = fs.createWriteStream(dumpPath)
|
||||||
|
stream.pipe(writeStream)
|
||||||
|
|
||||||
|
stream.on 'error', (err)->
|
||||||
|
logger.err err:err, project_id:project_id, file_id:file_id, dumpPath:dumpPath,
|
||||||
|
"something went wrong with incoming tpds update stream"
|
||||||
|
writeStream.on 'error', (err)->
|
||||||
|
logger.err err:err, project_id:project_id, file_id:file_id, dumpPath:dumpPath,
|
||||||
|
"something went wrong with writing tpds update to disk"
|
||||||
|
|
||||||
|
stream.on 'end', ->
|
||||||
|
logger.log project_id:project_id, file_id:file_id, dumpPath:dumpPath, "incoming tpds update stream ended"
|
||||||
|
writeStream.on "finish", ->
|
||||||
|
logger.log project_id:project_id, file_id:file_id, dumpPath:dumpPath, "tpds update write stream finished"
|
||||||
|
callback null, dumpPath
|
||||||
|
|
||||||
|
if stream.emitBufferedData?
|
||||||
|
stream.emitBufferedData()
|
||||||
|
stream.resume()
|
||||||
|
|
||||||
|
|
||||||
|
readFileIntoTextArray = (path, callback)->
|
||||||
|
fs.readFile path, "utf8", (error, content = "") ->
|
||||||
|
if error?
|
||||||
|
logger.err path:path, "error reading file into text array"
|
||||||
|
return callback(err)
|
||||||
|
lines = content.split("\n")
|
||||||
|
callback error, lines
|
||||||
|
|
||||||
|
|
||||||
|
setupNewEntity = (project_id, path, callback)->
|
||||||
|
lastIndexOfSlash = path.lastIndexOf("/")
|
||||||
|
fileName = path[lastIndexOfSlash+1 .. -1]
|
||||||
|
folderPath = path[0 .. lastIndexOfSlash]
|
||||||
|
editorController.mkdirp project_id, folderPath, (err, newFolders, lastFolder)->
|
||||||
|
callback err, lastFolder, fileName
|
|
@ -0,0 +1,23 @@
|
||||||
|
child = require "child_process"
|
||||||
|
logger = require "logger-sharelatex"
|
||||||
|
metrics = require "../../infrastructure/Metrics"
|
||||||
|
|
||||||
|
module.exports = ArchiveManager =
|
||||||
|
extractZipArchive: (source, destination, callback = (err) ->) ->
|
||||||
|
timer = new metrics.Timer("unzipDirectory")
|
||||||
|
logger.log source: source, destination: destination, "unzipping file"
|
||||||
|
|
||||||
|
unzip = child.spawn("unzip", [source, "-d", destination])
|
||||||
|
|
||||||
|
error = null
|
||||||
|
unzip.stderr.on "data", (chunk) ->
|
||||||
|
error ||= ""
|
||||||
|
error += chunk
|
||||||
|
|
||||||
|
unzip.on "exit", () ->
|
||||||
|
timer.done()
|
||||||
|
if error?
|
||||||
|
error = new Error(error)
|
||||||
|
logger.error err:error, source: source, destination: destination, "error unzipping file"
|
||||||
|
callback(error)
|
||||||
|
|
|
@ -0,0 +1,65 @@
|
||||||
|
async = require "async"
|
||||||
|
fs = require "fs"
|
||||||
|
_ = require "underscore"
|
||||||
|
FileTypeManager = require "./FileTypeManager"
|
||||||
|
EditorController = require "../Editor/EditorController"
|
||||||
|
ProjectLocator = require "../Project/ProjectLocator"
|
||||||
|
|
||||||
|
module.exports = FileSystemImportManager =
|
||||||
|
addDoc: (project_id, folder_id, name, path, replace, callback = (error, doc)-> )->
|
||||||
|
fs.readFile path, "utf8", (error, content = "") ->
|
||||||
|
return callback(error) if error?
|
||||||
|
content = content.replace(/\r/g, "")
|
||||||
|
lines = content.split("\n")
|
||||||
|
EditorController.addDoc project_id, folder_id, name, lines, callback
|
||||||
|
|
||||||
|
addFile: (project_id, folder_id, name, path, replace, callback = (error, file)-> )->
|
||||||
|
if replace
|
||||||
|
ProjectLocator.findElement project_id: project_id, element_id: folder_id, type: "folder", (error, folder) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
return callback(new Error("Couldn't find folder")) if !folder?
|
||||||
|
existingFile = null
|
||||||
|
for fileRef in folder.fileRefs
|
||||||
|
if fileRef.name == name
|
||||||
|
existingFile = fileRef
|
||||||
|
break
|
||||||
|
if existingFile?
|
||||||
|
EditorController.replaceFile project_id, existingFile._id, path, callback
|
||||||
|
else
|
||||||
|
EditorController.addFile project_id, folder_id, name, path, callback
|
||||||
|
else
|
||||||
|
EditorController.addFile project_id, folder_id, name, path, callback
|
||||||
|
|
||||||
|
addFolder: (project_id, folder_id, name, path, replace, callback = (error)-> ) ->
|
||||||
|
EditorController.addFolder project_id, folder_id, name, (error, new_folder) =>
|
||||||
|
return callback(error) if error?
|
||||||
|
@addFolderContents project_id, new_folder._id, path, replace, (error) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
callback null, new_folder
|
||||||
|
|
||||||
|
addFolderContents: (project_id, parent_folder_id, folderPath, replace, callback = (error)-> ) ->
|
||||||
|
fs.readdir folderPath, (error, entries = []) =>
|
||||||
|
return callback(error) if error?
|
||||||
|
jobs = _.map entries, (entry) =>
|
||||||
|
(callback) =>
|
||||||
|
FileTypeManager.shouldIgnore entry, (error, ignore) =>
|
||||||
|
return callback(error) if error?
|
||||||
|
if !ignore
|
||||||
|
@addEntity project_id, parent_folder_id, entry, "#{folderPath}/#{entry}", replace, callback
|
||||||
|
else
|
||||||
|
callback()
|
||||||
|
async.parallelLimit jobs, 5, callback
|
||||||
|
|
||||||
|
addEntity: (project_id, folder_id, name, path, replace, callback = (error, entity)-> ) ->
|
||||||
|
FileTypeManager.isDirectory path, (error, isDirectory) =>
|
||||||
|
return callback(error) if error?
|
||||||
|
if isDirectory
|
||||||
|
@addFolder project_id, folder_id, name, path, replace, callback
|
||||||
|
else
|
||||||
|
FileTypeManager.isBinary name, (error, isBinary) =>
|
||||||
|
return callback(error) if error?
|
||||||
|
if isBinary
|
||||||
|
@addFile project_id, folder_id, name, path, replace, callback
|
||||||
|
else
|
||||||
|
@addDoc project_id, folder_id, name, path, replace, callback
|
||||||
|
|
|
@ -0,0 +1,49 @@
|
||||||
|
fs = require "fs"
|
||||||
|
Path = require("path")
|
||||||
|
|
||||||
|
module.exports = FileTypeManager =
|
||||||
|
TEXT_EXTENSIONS : [
|
||||||
|
"tex", "latex", "sty", "cls", "bst", "bib", "bibtex", "txt", "tikz", "rtex", "md"
|
||||||
|
]
|
||||||
|
|
||||||
|
IGNORE_EXTENSIONS : [
|
||||||
|
"dvi", "aux", "log", "ps", "toc", "out", "pdfsync"
|
||||||
|
# Index and glossary files
|
||||||
|
"nlo", "ind", "glo", "gls", "glg"
|
||||||
|
# Bibtex
|
||||||
|
"bbl", "blg"
|
||||||
|
# Misc/bad
|
||||||
|
"doc", "docx", "gz"
|
||||||
|
]
|
||||||
|
|
||||||
|
IGNORE_FILENAMES : [
|
||||||
|
"__MACOSX"
|
||||||
|
]
|
||||||
|
|
||||||
|
isDirectory: (path, callback = (error, result) ->) ->
|
||||||
|
fs.stat path, (error, stats) ->
|
||||||
|
callback(error, stats.isDirectory())
|
||||||
|
|
||||||
|
isBinary: (path, callback = (error, result) ->) ->
|
||||||
|
parts = path.split(".")
|
||||||
|
extension = parts.slice(-1)[0]
|
||||||
|
if extension?
|
||||||
|
extension = extension.toLowerCase()
|
||||||
|
callback null, @TEXT_EXTENSIONS.indexOf(extension) == -1 or parts.length <= 1
|
||||||
|
|
||||||
|
shouldIgnore: (path, callback = (error, result) ->) ->
|
||||||
|
name = Path.basename(path)
|
||||||
|
extension = name.split(".").slice(-1)[0]
|
||||||
|
if extension?
|
||||||
|
extension = extension.toLowerCase()
|
||||||
|
ignore = false
|
||||||
|
if name[0] == "."
|
||||||
|
ignore = true
|
||||||
|
if @IGNORE_EXTENSIONS.indexOf(extension) != -1
|
||||||
|
ignore = true
|
||||||
|
if @IGNORE_FILENAMES.indexOf(name) != -1
|
||||||
|
ignore = true
|
||||||
|
callback null, ignore
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,49 @@
|
||||||
|
logger = require "logger-sharelatex"
|
||||||
|
metrics = require "../../infrastructure/Metrics"
|
||||||
|
fs = require "fs"
|
||||||
|
Path = require "path"
|
||||||
|
FileSystemImportManager = require "./FileSystemImportManager"
|
||||||
|
ProjectUploadManager = require "./ProjectUploadManager"
|
||||||
|
|
||||||
|
module.exports = ProjectUploadController =
|
||||||
|
uploadProject: (req, res, next) ->
|
||||||
|
timer = new metrics.Timer("project-upload")
|
||||||
|
user_id = req.session.user._id
|
||||||
|
{name, path} = req.files.qqfile
|
||||||
|
name = Path.basename(name, ".zip")
|
||||||
|
ProjectUploadManager.createProjectFromZipArchive user_id, name, path, (error, project) ->
|
||||||
|
fs.unlink path, ->
|
||||||
|
timer.done()
|
||||||
|
if error?
|
||||||
|
logger.error
|
||||||
|
err: error, file_path: path, file_name: name,
|
||||||
|
"error uploading project"
|
||||||
|
res.send success: false
|
||||||
|
else
|
||||||
|
logger.log
|
||||||
|
project: project._id, file_path: path, file_name: name,
|
||||||
|
"uploaded project"
|
||||||
|
res.send success: true, project_id: project._id
|
||||||
|
|
||||||
|
uploadFile: (req, res, next) ->
|
||||||
|
timer = new metrics.Timer("file-upload")
|
||||||
|
{name, path} = req.files.qqfile
|
||||||
|
project_id = req.params.Project_id
|
||||||
|
folder_id = req.query.folder_id
|
||||||
|
FileSystemImportManager.addEntity project_id, folder_id, name, path, true, (error, entity) ->
|
||||||
|
fs.unlink path, ->
|
||||||
|
timer.done()
|
||||||
|
if error?
|
||||||
|
logger.error
|
||||||
|
err: error, project_id: project_id, file_path: path,
|
||||||
|
file_name: name, folder_id: folder_id,
|
||||||
|
"error uploading file"
|
||||||
|
res.send success: false
|
||||||
|
else
|
||||||
|
logger.log
|
||||||
|
project_id: project_id, file_path: path, file_name: name, folder_id: folder_id
|
||||||
|
"uploaded file"
|
||||||
|
res.send success: true, entity_id: entity?._id
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,28 @@
|
||||||
|
path = require "path"
|
||||||
|
rimraf = require "rimraf"
|
||||||
|
ArchiveManager = require "./ArchiveManager"
|
||||||
|
FileSystemImportManager = require "./FileSystemImportManager"
|
||||||
|
ProjectCreationHandler = require "../Project/ProjectCreationHandler"
|
||||||
|
ProjectRootDocManager = require "../Project/ProjectRootDocManager"
|
||||||
|
|
||||||
|
module.exports = ProjectUploadHandler =
|
||||||
|
createProjectFromZipArchive: (owner_id, name, zipPath, callback = (error, project) ->) ->
|
||||||
|
ProjectCreationHandler.createBlankProject owner_id, name, (error, project) =>
|
||||||
|
return callback(error) if error?
|
||||||
|
@insertZipArchiveIntoFolder project._id, project.rootFolder[0]._id, zipPath, (error) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
ProjectRootDocManager.setRootDocAutomatically project._id, (error) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
callback(error, project)
|
||||||
|
|
||||||
|
insertZipArchiveIntoFolder: (project_id, folder_id, path, callback = (error) ->) ->
|
||||||
|
destination = @_getDestinationDirectory path
|
||||||
|
ArchiveManager.extractZipArchive path, destination, (error) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
FileSystemImportManager.addFolderContents project_id, folder_id, destination, false, (error) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
rimraf(destination, callback)
|
||||||
|
|
||||||
|
_getDestinationDirectory: (source) ->
|
||||||
|
return path.join(path.dirname(source), "#{path.basename(source, ".zip")}-#{Date.now()}")
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
SecurityManager = require('../../managers/SecurityManager')
|
||||||
|
AuthenticationController = require('../Authentication/AuthenticationController')
|
||||||
|
ProjectUploadController = require "./ProjectUploadController"
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
apply: (app) ->
|
||||||
|
app.post '/project/new/upload',
|
||||||
|
AuthenticationController.requireLogin(),
|
||||||
|
ProjectUploadController.uploadProject
|
||||||
|
app.post '/Project/:Project_id/upload',
|
||||||
|
SecurityManager.requestCanModifyProject,
|
||||||
|
ProjectUploadController.uploadFile
|
||||||
|
|
35
services/web/app/coffee/Features/User/UserController.coffee
Normal file
35
services/web/app/coffee/Features/User/UserController.coffee
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
UserGetter = require "./UserGetter"
|
||||||
|
logger = require("logger-sharelatex")
|
||||||
|
|
||||||
|
module.exports = UserController =
|
||||||
|
getLoggedInUsersPersonalInfo: (req, res, next = (error) ->) ->
|
||||||
|
# this is funcky as hell, we don't use the current session to get the user
|
||||||
|
# we use the auth token, actually destroying session from the chat api request
|
||||||
|
req.session.destroy()
|
||||||
|
logger.log user: req.user, "reciving request for getting logged in users personal info"
|
||||||
|
return next(new Error("User is not logged in")) if !req.user?
|
||||||
|
UserController.sendFormattedPersonalInfo(req.user, res, next)
|
||||||
|
|
||||||
|
getPersonalInfo: (req, res, next = (error) ->) ->
|
||||||
|
UserGetter.getUser req.params.user_id, { _id: true, first_name: true, last_name: true, email: true }, (error, user) ->
|
||||||
|
logger.log user: req.params.user_id, "reciving request for getting users personal info"
|
||||||
|
return next(error) if error?
|
||||||
|
return res.send(404) if !user?
|
||||||
|
UserController.sendFormattedPersonalInfo(user, res, next)
|
||||||
|
req.session.destroy()
|
||||||
|
|
||||||
|
|
||||||
|
sendFormattedPersonalInfo: (user, res, next = (error) ->) ->
|
||||||
|
UserController._formatPersonalInfo user, (error, info) ->
|
||||||
|
return next(error) if error?
|
||||||
|
res.send JSON.stringify(info)
|
||||||
|
|
||||||
|
_formatPersonalInfo: (user, callback = (error, info) ->) ->
|
||||||
|
callback null, {
|
||||||
|
id: user._id.toString()
|
||||||
|
first_name: user.first_name
|
||||||
|
last_name: user.last_name
|
||||||
|
email: user.email
|
||||||
|
signUpDate: user.signUpDate
|
||||||
|
}
|
||||||
|
|
19
services/web/app/coffee/Features/User/UserCreator.coffee
Normal file
19
services/web/app/coffee/Features/User/UserCreator.coffee
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
User = require("../../models/User").User
|
||||||
|
UserLocator = require("./UserLocator")
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
getUserOrCreateHoldingAccount: (email, callback = (err, user)->)->
|
||||||
|
self = @
|
||||||
|
UserLocator.findByEmail email, (err, user)->
|
||||||
|
if user?
|
||||||
|
callback(err, user)
|
||||||
|
else
|
||||||
|
self.createNewUser email:email, holdingAccount:true, callback
|
||||||
|
|
||||||
|
createNewUser: (opts, callback)->
|
||||||
|
user = new User()
|
||||||
|
user.email = opts.email
|
||||||
|
user.holdingAccount = opts.holdingAccount
|
||||||
|
user.save (err)->
|
||||||
|
callback(err, user)
|
22
services/web/app/coffee/Features/User/UserDeleter.coffee
Normal file
22
services/web/app/coffee/Features/User/UserDeleter.coffee
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
User = require("../../models/User").User
|
||||||
|
NewsletterManager = require "../../managers/NewsletterManager"
|
||||||
|
ProjectDeleter = require("../Project/ProjectDeleter")
|
||||||
|
logger = require("logger-sharelatex")
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
deleteUser: (user_id, callback = ()->)->
|
||||||
|
if !user_id?
|
||||||
|
logger.err "user_id is null when trying to delete user"
|
||||||
|
return callback("no user_id")
|
||||||
|
User.findById user_id, (err, user)->
|
||||||
|
logger.log user:user, "deleting user"
|
||||||
|
if err?
|
||||||
|
return callback(err)
|
||||||
|
NewsletterManager.unsubscribe user, (err)->
|
||||||
|
if err?
|
||||||
|
return callback(err)
|
||||||
|
ProjectDeleter.deleteUsersProjects user._id, (err)->
|
||||||
|
if err?
|
||||||
|
return callback(err)
|
||||||
|
user.remove callback
|
15
services/web/app/coffee/Features/User/UserGetter.coffee
Normal file
15
services/web/app/coffee/Features/User/UserGetter.coffee
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
mongojs = require("../../infrastructure/mongojs")
|
||||||
|
db = mongojs.db
|
||||||
|
ObjectId = mongojs.ObjectId
|
||||||
|
|
||||||
|
module.exports = UserGetter =
|
||||||
|
getUser: (query, projection, callback = (error, user) ->) ->
|
||||||
|
if arguments.length == 2
|
||||||
|
callback = projection
|
||||||
|
projection = {}
|
||||||
|
if typeof query == "string"
|
||||||
|
query = _id: ObjectId(query)
|
||||||
|
else if query instanceof ObjectId
|
||||||
|
query = _id: query
|
||||||
|
|
||||||
|
db.users.findOne query, projection, callback
|
13
services/web/app/coffee/Features/User/UserLocator.coffee
Normal file
13
services/web/app/coffee/Features/User/UserLocator.coffee
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
mongojs = require("../../infrastructure/mongojs")
|
||||||
|
db = mongojs.db
|
||||||
|
ObjectId = mongojs.ObjectId
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
findByEmail: (email, callback)->
|
||||||
|
email = email.trim()
|
||||||
|
db.users.findOne email:email, (err, user)->
|
||||||
|
callback(err, user)
|
||||||
|
|
||||||
|
findById: (_id, callback)->
|
||||||
|
db.users.findOne _id:ObjectId(_id+""), callback
|
|
@ -0,0 +1,30 @@
|
||||||
|
sanitize = require('validator').sanitize
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
validateEmail : (email) ->
|
||||||
|
re = /^(([^<>()[\]\\.,;:\s@\"]+(\.[^<>()[\]\\.,;:\s@\"]+)*)|(\ ".+\"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA -Z\-0-9]+\.)+[a-zA-Z]{2,}))$/
|
||||||
|
return re.test(email)
|
||||||
|
|
||||||
|
hasZeroLengths : (props) ->
|
||||||
|
hasZeroLength = false
|
||||||
|
props.forEach (prop) ->
|
||||||
|
if prop.length == 0
|
||||||
|
hasZeroLength = true
|
||||||
|
return hasZeroLength
|
||||||
|
|
||||||
|
validateRegisterRequest : (req, callback)->
|
||||||
|
email = sanitize(req.body.email).xss().trim().toLowerCase()
|
||||||
|
password = req.body.password
|
||||||
|
username = email.match(/^[^@]*/)
|
||||||
|
if username?
|
||||||
|
first_name = username[0]
|
||||||
|
else
|
||||||
|
first_name = ""
|
||||||
|
last_name = ""
|
||||||
|
|
||||||
|
if @hasZeroLengths([password, email])
|
||||||
|
callback('please fill in all the fields', null)
|
||||||
|
else if !@validateEmail(email)
|
||||||
|
callback('not valid email', null)
|
||||||
|
else
|
||||||
|
callback(null, {first_name:first_name, last_name:last_name, email:email, password:password})
|
12
services/web/app/coffee/Features/User/UserUpdater.coffee
Normal file
12
services/web/app/coffee/Features/User/UserUpdater.coffee
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
mongojs = require("../../infrastructure/mongojs")
|
||||||
|
db = mongojs.db
|
||||||
|
ObjectId = mongojs.ObjectId
|
||||||
|
|
||||||
|
module.exports = UserUpdater =
|
||||||
|
updateUser: (query, update, callback = (error) ->) ->
|
||||||
|
if typeof query == "string"
|
||||||
|
query = _id: ObjectId(query)
|
||||||
|
else if query instanceof ObjectId
|
||||||
|
query = _id: query
|
||||||
|
|
||||||
|
db.users.update query, update, callback
|
|
@ -0,0 +1,50 @@
|
||||||
|
Keys = require("./RedisKeys")
|
||||||
|
Settings = require "settings-sharelatex"
|
||||||
|
redis = require('redis')
|
||||||
|
rclient = redis.createClient(Settings.redis.web.port, Settings.redis.web.host)
|
||||||
|
rclient.auth(Settings.redis.web.password)
|
||||||
|
VersioningApiHandler = require('../../Features/Versioning/VersioningApiHandler')
|
||||||
|
async = require('async')
|
||||||
|
metrics = require('../../infrastructure/Metrics')
|
||||||
|
logger = require('logger-sharelatex')
|
||||||
|
|
||||||
|
|
||||||
|
module.exports = AutomaticSnapshotManager =
|
||||||
|
markProjectAsUpdated: (project_id, callback = (error) ->) ->
|
||||||
|
rclient.set Keys.buildLastUpdatedKey(project_id), Date.now(), (error) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
rclient.sadd Keys.projectsToSnapshotKey, project_id, (error) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
callback()
|
||||||
|
|
||||||
|
unmarkProjectAsUpdated: (project_id, callback = (err)->)->
|
||||||
|
rclient.del Keys.buildLastUpdatedKey(project_id), Date.now(), (error) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
rclient.srem Keys.projectsToSnapshotKey, project_id, (error) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
callback()
|
||||||
|
|
||||||
|
takeAutomaticSnapshots: (callback = (error) ->) ->
|
||||||
|
rclient.smembers Keys.projectsToSnapshotKey, (error, project_ids) =>
|
||||||
|
logger.log project_ids:project_ids, "taking automatic snapshots"
|
||||||
|
metrics.gauge "versioning.projectsToSnapshot", project_ids.length
|
||||||
|
return callback(error) if error?
|
||||||
|
methods = []
|
||||||
|
for project_id in project_ids
|
||||||
|
do (project_id) =>
|
||||||
|
methods.push((callback) => @takeSnapshotIfRequired(project_id, callback))
|
||||||
|
async.series methods, callback
|
||||||
|
|
||||||
|
takeSnapshotIfRequired: (project_id, callback = (error) ->) ->
|
||||||
|
rclient.get Keys.buildLastUpdatedKey(project_id), (error, lastUpdated) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
if lastUpdated? and lastUpdated < Date.now() - Settings.automaticSnapshots.waitTimeAfterLastEdit
|
||||||
|
VersioningApiHandler.takeSnapshot(project_id, "Automatic snapshot", callback)
|
||||||
|
else
|
||||||
|
rclient.get Keys.buildLastSnapshotKey(project_id), (error, lastSnapshot) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
if !lastSnapshot? or lastSnapshot < Date.now() - Settings.automaticSnapshots.maxTimeBetweenSnapshots
|
||||||
|
VersioningApiHandler.takeSnapshot(project_id, "Automatic snapshot", callback)
|
||||||
|
else
|
||||||
|
callback()
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
module.exports =
|
||||||
|
buildLastUpdatedKey: (project_id) -> "project_last_updated:#{project_id}"
|
||||||
|
buildLastSnapshotKey: (project_id) -> "project_last_snapshot:#{project_id}"
|
||||||
|
projectsToSnapshotKey: "projects_to_snapshot"
|
||||||
|
usersToPollTpdsForUpdates: "users_with_active_projects"
|
|
@ -0,0 +1,31 @@
|
||||||
|
versioningApiHandler = require './VersioningApiHandler'
|
||||||
|
metrics = require('../../infrastructure/Metrics')
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
enableVersioning: (project_id, callback)->
|
||||||
|
metrics.inc "versioning.enableVersioning"
|
||||||
|
versioningApiHandler.enableVersioning project_id, callback
|
||||||
|
|
||||||
|
listVersions : (req, res) ->
|
||||||
|
metrics.inc "versioning.listVersions"
|
||||||
|
versioningApiHandler.proxyToVersioningApi(req, res)
|
||||||
|
|
||||||
|
getVersion : (req, res) ->
|
||||||
|
metrics.inc "versioning.getVersion"
|
||||||
|
versioningApiHandler.proxyToVersioningApi(req, res)
|
||||||
|
|
||||||
|
getVersionFile : (req, res) ->
|
||||||
|
metrics.inc "versioning.getVersionFile"
|
||||||
|
versioningApiHandler.proxyToVersioningApi(req, res)
|
||||||
|
|
||||||
|
takeSnapshot: (req, res, next) ->
|
||||||
|
metrics.inc "versioning.takeSnapshot"
|
||||||
|
if req.body? and req.body.message? and req.body.message.length > 0
|
||||||
|
message = req.body.message
|
||||||
|
else
|
||||||
|
message = "Manual snapshot"
|
||||||
|
versioningApiHandler.takeSnapshot req.params.Project_id, message, (error) ->
|
||||||
|
if error?
|
||||||
|
next(error)
|
||||||
|
else
|
||||||
|
res.send(200, "{}")
|
|
@ -0,0 +1,75 @@
|
||||||
|
settings = require('settings-sharelatex')
|
||||||
|
logger = require('logger-sharelatex')
|
||||||
|
Project = require('../../models/Project').Project
|
||||||
|
request = require('request')
|
||||||
|
DocumentUpdaterHandler = require('../../Features/DocumentUpdater/DocumentUpdaterHandler')
|
||||||
|
redis = require('redis')
|
||||||
|
rclient = redis.createClient(settings.redis.web.port, settings.redis.web.host)
|
||||||
|
rclient.auth(settings.redis.web.password)
|
||||||
|
Keys = require("./RedisKeys")
|
||||||
|
ProjectEntityHandler = require('../../Features/Project/ProjectEntityHandler')
|
||||||
|
metrics = require('../../infrastructure/Metrics')
|
||||||
|
keys = require('../../infrastructure/Keys')
|
||||||
|
queue = require('fairy').connect(settings.redis.fairy).queue(keys.queue.web_to_tpds_http_requests)
|
||||||
|
slReqIdHelper = require('soa-req-id')
|
||||||
|
|
||||||
|
headers =
|
||||||
|
Authorization : "Basic " + new Buffer("#{settings.apis.versioning.username}:#{settings.apis.versioning.password}").toString("base64")
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
enableVersioning: (project_or_id, callback = (err)->)->
|
||||||
|
Project.getProject project_or_id, 'existsInVersioningApi', (error, project)=>
|
||||||
|
return callback error if error?
|
||||||
|
return callback new Error("project_id:#{project_id} does not exist") if !project?
|
||||||
|
project_id = project._id
|
||||||
|
if project.existsInVersioningApi
|
||||||
|
logger.log project_id: project_id, "versioning already enabled"
|
||||||
|
return callback()
|
||||||
|
logger.log project_id: project_id, "enabling versioning in versioning API"
|
||||||
|
@createProject project_id, (error) ->
|
||||||
|
return callback error if error?
|
||||||
|
logger.log project_id: project_id, "enabling versioning in Mongo"
|
||||||
|
project.existsInVersioningApi = true
|
||||||
|
update = existsInVersioningApi : true
|
||||||
|
conditions = _id:project_id
|
||||||
|
Project.update conditions, update, {}, ->
|
||||||
|
ProjectEntityHandler.flushProjectToThirdPartyDataStore project_id, (err) ->
|
||||||
|
callback(err)
|
||||||
|
|
||||||
|
proxyToVersioningApi : (req, res) ->
|
||||||
|
metrics.inc "versioning.proxy"
|
||||||
|
options =
|
||||||
|
url : settings.apis.versioning.url + req.url
|
||||||
|
headers : headers
|
||||||
|
logger.log url: req.url, "proxying to versioning api"
|
||||||
|
getReq = request.get(options)
|
||||||
|
getReq.pipe(res)
|
||||||
|
getReq.on "error", (error) ->
|
||||||
|
logger.error err: error, "versioning API error"
|
||||||
|
res.send 500
|
||||||
|
|
||||||
|
createProject : (project_id, callback) ->
|
||||||
|
url = "#{settings.apis.versioning.url}/project/#{project_id}"
|
||||||
|
options = {method:"post", url:url, headers:headers, title:"createVersioningProject"}
|
||||||
|
queue.enqueue project_id, "standardHttpRequest", options, callback
|
||||||
|
|
||||||
|
takeSnapshot: (project_id, message, sl_req_id, callback = (error) ->)->
|
||||||
|
{callback, sl_req_id} = slReqIdHelper.getCallbackAndReqId(callback, sl_req_id)
|
||||||
|
logger.log project_id: project_id, sl_req_id: sl_req_id, "taking snapshot of project"
|
||||||
|
|
||||||
|
# This isn't critical so we can do it async
|
||||||
|
rclient.set Keys.buildLastSnapshotKey(project_id), Date.now(), () ->
|
||||||
|
rclient.srem Keys.projectsToSnapshotKey, project_id, () ->
|
||||||
|
|
||||||
|
DocumentUpdaterHandler.flushProjectToMongo project_id, sl_req_id, (err) ->
|
||||||
|
return callback(err) if err?
|
||||||
|
url = "#{settings.apis.versioning.url}/project/#{project_id}/version"
|
||||||
|
json = version:{message:message}
|
||||||
|
options = {method:"post", json:json, url:url, headers:headers, title:"takeVersioningSnapshot"}
|
||||||
|
queue.enqueue project_id, "standardHttpRequest", options, ->
|
||||||
|
logger.log options:options, project_id, "take snapshot enqueued"
|
||||||
|
callback()
|
||||||
|
|
||||||
|
|
||||||
|
|
128
services/web/app/coffee/controllers/AdminController.coffee
Executable file
128
services/web/app/coffee/controllers/AdminController.coffee
Executable file
|
@ -0,0 +1,128 @@
|
||||||
|
logger = require('logger-sharelatex')
|
||||||
|
_ = require('underscore')
|
||||||
|
User = require('../models/User').User
|
||||||
|
Quote = require('../models/Quote').Quote
|
||||||
|
Project = require('../models/Project').Project
|
||||||
|
DocumentUpdaterHandler = require('../Features/DocumentUpdater/DocumentUpdaterHandler')
|
||||||
|
Settings = require('settings-sharelatex')
|
||||||
|
util = require('util')
|
||||||
|
redis = require('redis')
|
||||||
|
rclient = redis.createClient(Settings.redis.web.port, Settings.redis.web.host)
|
||||||
|
rclient.auth(Settings.redis.web.password)
|
||||||
|
RecurlyWrapper = require('../Features/Subscription/RecurlyWrapper')
|
||||||
|
SubscriptionHandler = require('../Features/Subscription/SubscriptionHandler')
|
||||||
|
projectEntityHandler = require('../Features/Project/ProjectEntityHandler')
|
||||||
|
TpdsPollingBackgroundTasks = require("../Features/ThirdPartyDataStore/TpdsPollingBackgroundTasks")
|
||||||
|
EditorRealTimeController = require("../Features/Editor/EditorRealTimeController")
|
||||||
|
|
||||||
|
module.exports = AdminController =
|
||||||
|
|
||||||
|
index : (req, res)=>
|
||||||
|
http = require('http')
|
||||||
|
openSockets = {}
|
||||||
|
for url, agents of require('http').globalAgent.sockets
|
||||||
|
openSockets["http://#{url}"] = (agent._httpMessage.path for agent in agents)
|
||||||
|
for url, agents of require('https').globalAgent.sockets
|
||||||
|
openSockets["https://#{url}"] = (agent._httpMessage.path for agent in agents)
|
||||||
|
memory = process.memoryUsage()
|
||||||
|
io = require("../infrastructure/Server").io
|
||||||
|
allUsers = io.sockets.clients()
|
||||||
|
users = []
|
||||||
|
allUsers.forEach (user)->
|
||||||
|
u = {}
|
||||||
|
user.get "email", (err, email)->
|
||||||
|
u.email = email
|
||||||
|
user.get "first_name", (err, first_name)->
|
||||||
|
u.first_name = first_name
|
||||||
|
user.get "last_name", (err, last_name)->
|
||||||
|
u.last_name = last_name
|
||||||
|
user.get "project_id", (err, project_id)->
|
||||||
|
u.project_id = project_id
|
||||||
|
user.get "user_id", (err, user_id)->
|
||||||
|
u.user_id = user_id
|
||||||
|
user.get "signup_date", (err, signup_date)->
|
||||||
|
u.signup_date = signup_date
|
||||||
|
user.get "login_count", (err, login_count)->
|
||||||
|
u.login_count = login_count
|
||||||
|
user.get "connected_time", (err, connected_time)->
|
||||||
|
now = new Date()
|
||||||
|
connected_mins = (((now - new Date(connected_time))/1000)/60).toFixed(2)
|
||||||
|
u.connected_mins = connected_mins
|
||||||
|
users.push u
|
||||||
|
|
||||||
|
d = new Date()
|
||||||
|
today = d.getDate()+":"+(d.getMonth()+1)+":"+d.getFullYear()+":"
|
||||||
|
yesterday = (d.getDate()-1)+":"+(d.getMonth()+1)+":"+d.getFullYear()+":"
|
||||||
|
|
||||||
|
multi = rclient.multi()
|
||||||
|
multi.get today+"docsets"
|
||||||
|
multi.get yesterday+"docsets"
|
||||||
|
multi.exec (err, replys)->
|
||||||
|
redisstats =
|
||||||
|
today:
|
||||||
|
docsets: replys[0]
|
||||||
|
compiles: replys[1]
|
||||||
|
yesterday:
|
||||||
|
docsets: replys[2]
|
||||||
|
compiles: replys[3]
|
||||||
|
DocumentUpdaterHandler.getNumberOfDocsInMemory (err, numberOfInMemoryDocs)=>
|
||||||
|
User.count (err, totalUsers)->
|
||||||
|
Project.count (err, totalProjects)->
|
||||||
|
res.render 'admin',
|
||||||
|
title: 'System Admin'
|
||||||
|
currentConnectedUsers:allUsers.length
|
||||||
|
users: users
|
||||||
|
numberOfAceDocs : numberOfInMemoryDocs
|
||||||
|
totalUsers: totalUsers
|
||||||
|
totalProjects: totalProjects
|
||||||
|
openSockets: openSockets
|
||||||
|
redisstats: redisstats
|
||||||
|
|
||||||
|
dissconectAllUsers: (req, res)=>
|
||||||
|
logger.warn "disconecting everyone"
|
||||||
|
EditorRealTimeController.emitToAll 'forceDisconnect', "Sorry, we are performing a quick update to the editor and need to close it down. Please refresh the page to continue."
|
||||||
|
res.send(200)
|
||||||
|
|
||||||
|
closeEditor : (req, res)->
|
||||||
|
logger.warn "closing editor"
|
||||||
|
Settings.editorIsOpen = req.body.isOpen
|
||||||
|
res.send(200)
|
||||||
|
|
||||||
|
writeAllToMongo : (req, res)->
|
||||||
|
logger.log "writing all docs to mongo"
|
||||||
|
Settings.mongo.writeAll = true
|
||||||
|
DocumentUpdaterHandler.flushAllDocsToMongo ()->
|
||||||
|
logger.log "all docs have been saved to mongo"
|
||||||
|
res.send()
|
||||||
|
|
||||||
|
addQuote : (req, res)->
|
||||||
|
quote = new Quote
|
||||||
|
author: req.body.author
|
||||||
|
quote: req.body.quote
|
||||||
|
quote.save (err)->
|
||||||
|
res.send 200
|
||||||
|
|
||||||
|
syncUserToSubscription: (req, res)->
|
||||||
|
{user_id, subscription_id} = req.body
|
||||||
|
RecurlyWrapper.getSubscription subscription_id, {}, (err, subscription)->
|
||||||
|
User.findById user_id, (err, user)->
|
||||||
|
SubscriptionHandler.syncSubscriptionToUser subscription, user._id, (err)->
|
||||||
|
logger.log user_id:user_id, subscription_id:subscription_id, "linked account to subscription"
|
||||||
|
res.send()
|
||||||
|
|
||||||
|
flushProjectToTpds: (req, res)->
|
||||||
|
projectEntityHandler.flushProjectToThirdPartyDataStore req.body.project_id, (err)->
|
||||||
|
res.send 200
|
||||||
|
|
||||||
|
pollUsersWithDropbox: (req, res)->
|
||||||
|
TpdsPollingBackgroundTasks.pollUsersWithDropbox ->
|
||||||
|
res.send 200
|
||||||
|
|
||||||
|
updateProjectCompiler: (req, res, next = (error) ->)->
|
||||||
|
Project.findOne _id: req.body.project_id, (error, project) ->
|
||||||
|
return next(error) if error?
|
||||||
|
project.useClsi2 = (req.body.new == "new")
|
||||||
|
logger.log project_id: req.body.project_id, useClsi2: project.useClsi2, "updating project compiler"
|
||||||
|
project.save (error) ->
|
||||||
|
return next(error) if error?
|
||||||
|
res.send(200)
|
53
services/web/app/coffee/controllers/HomeController.coffee
Executable file
53
services/web/app/coffee/controllers/HomeController.coffee
Executable file
|
@ -0,0 +1,53 @@
|
||||||
|
logger = require('logger-sharelatex')
|
||||||
|
_ = require('underscore')
|
||||||
|
User = require('./UserController')
|
||||||
|
Quotes = require('../models/Quote').Quote
|
||||||
|
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
index : (req,res)->
|
||||||
|
if req.session.user
|
||||||
|
if req.query.scribtex_path?
|
||||||
|
res.redirect "/project?scribtex_path=#{req.query.scribtex_path}"
|
||||||
|
else
|
||||||
|
res.redirect '/project'
|
||||||
|
else
|
||||||
|
res.render 'homepage/home',
|
||||||
|
title: 'ShareLaTeX.com'
|
||||||
|
|
||||||
|
comments : (req, res)->
|
||||||
|
res.render 'homepage/comments.jade',
|
||||||
|
title: 'User Comments'
|
||||||
|
|
||||||
|
resources : (req, res)->
|
||||||
|
res.render 'resources.jade',
|
||||||
|
title: 'LaTeX Resources'
|
||||||
|
|
||||||
|
tos : (req, res) ->
|
||||||
|
res.render 'about/tos',
|
||||||
|
title: "Terms of Service"
|
||||||
|
|
||||||
|
privacy : (req, res) ->
|
||||||
|
res.render 'about/privacy',
|
||||||
|
title: "Privacy Policy"
|
||||||
|
|
||||||
|
about : (req, res) ->
|
||||||
|
res.render 'about/about',
|
||||||
|
title: "About us"
|
||||||
|
|
||||||
|
notFound: (req, res)->
|
||||||
|
res.statusCode = 404
|
||||||
|
res.render 'general/404',
|
||||||
|
title: "Page Not Found"
|
||||||
|
|
||||||
|
security : (req, res) ->
|
||||||
|
res.render 'about/security',
|
||||||
|
title: "Security"
|
||||||
|
|
||||||
|
attribution: (req, res) ->
|
||||||
|
res.render 'about/attribution',
|
||||||
|
title: "Attribution"
|
||||||
|
|
||||||
|
planned_maintenance: (req, res) ->
|
||||||
|
res.render 'about/planned_maintenance',
|
||||||
|
title: "Planned Maintenance"
|
12
services/web/app/coffee/controllers/InfoController.coffee
Executable file
12
services/web/app/coffee/controllers/InfoController.coffee
Executable file
|
@ -0,0 +1,12 @@
|
||||||
|
module.exports=
|
||||||
|
themes : (req, res)=>
|
||||||
|
res.render "info/themes",
|
||||||
|
title: 'Themes'
|
||||||
|
|
||||||
|
dropbox: (req, res)->
|
||||||
|
res.render "info/dropbox",
|
||||||
|
title: 'Dropbox with LaTeX'
|
||||||
|
|
||||||
|
advisor: (req, res)->
|
||||||
|
res.render "info/advisor",
|
||||||
|
title: 'Advisor Program'
|
218
services/web/app/coffee/controllers/ProjectController.coffee
Executable file
218
services/web/app/coffee/controllers/ProjectController.coffee
Executable file
|
@ -0,0 +1,218 @@
|
||||||
|
User = require('../models/User').User
|
||||||
|
Project = require('../models/Project').Project
|
||||||
|
sanitize = require('validator').sanitize
|
||||||
|
path = require "path"
|
||||||
|
logger = require('logger-sharelatex')
|
||||||
|
_ = require('underscore')
|
||||||
|
fs = require('fs')
|
||||||
|
ProjectHandler = require '../handlers/ProjectHandler'
|
||||||
|
SecurityManager = require '../managers/SecurityManager'
|
||||||
|
GuidManager = require '../managers/GuidManager'
|
||||||
|
Settings = require('settings-sharelatex')
|
||||||
|
projectCreationHandler = require '../Features/Project/ProjectCreationHandler'
|
||||||
|
projectLocator = require '../Features/Project/ProjectLocator'
|
||||||
|
projectDuplicator = require('../Features/Project/ProjectDuplicator')
|
||||||
|
ProjectZipStreamManager = require '../Features/Downloads/ProjectZipStreamManager'
|
||||||
|
metrics = require('../infrastructure/Metrics')
|
||||||
|
TagsHandler = require('../Features/Tags/TagsHandler')
|
||||||
|
SubscriptionLocator = require("../Features/Subscription/SubscriptionLocator")
|
||||||
|
SubscriptionFormatters = require("../Features/Subscription/SubscriptionFormatters")
|
||||||
|
FileStoreHandler = require("../Features/FileStore/FileStoreHandler")
|
||||||
|
|
||||||
|
module.exports = class ProjectController
|
||||||
|
constructor: (@collaberationManager)->
|
||||||
|
ProjectHandler = new ProjectHandler()
|
||||||
|
|
||||||
|
list: (req, res, next)->
|
||||||
|
timer = new metrics.Timer("project-list")
|
||||||
|
user_id = req.session.user._id
|
||||||
|
startTime = new Date()
|
||||||
|
User.findById user_id, (error, user) ->
|
||||||
|
logger.log user_id: user_id, duration: (new Date() - startTime), "project list timer - User.findById"
|
||||||
|
startTime = new Date()
|
||||||
|
# TODO: Remove this one month after the ability to start free trials was removed
|
||||||
|
SubscriptionLocator.getUsersSubscription user._id, (err, subscription)->
|
||||||
|
logger.log user_id: user_id, duration: (new Date() - startTime), "project list timer - Subscription.getUsersSubscription"
|
||||||
|
startTime = new Date()
|
||||||
|
return next(error) if error?
|
||||||
|
# TODO: Remove this one month after the ability to start free trials was removed
|
||||||
|
if subscription? and subscription.freeTrial? and subscription.freeTrial.expiresAt?
|
||||||
|
freeTrial =
|
||||||
|
expired: !!subscription.freeTrial.downgraded
|
||||||
|
expiresAt: SubscriptionFormatters.formatDate(subscription.freeTrial.expiresAt)
|
||||||
|
TagsHandler.getAllTags user_id, (err, tags, tagsGroupedByProject)->
|
||||||
|
logger.log user_id: user_id, duration: (new Date() - startTime), "project list timer - TagsHandler.getAllTags"
|
||||||
|
startTime = new Date()
|
||||||
|
Project.findAllUsersProjects user_id, 'name lastUpdated publicAccesLevel', (projects, collabertions, readOnlyProjects)->
|
||||||
|
logger.log user_id: user_id, duration: (new Date() - startTime), "project list timer - Project.findAllUsersProjects"
|
||||||
|
startTime = new Date()
|
||||||
|
for project in projects
|
||||||
|
project.accessLevel = "owner"
|
||||||
|
for project in collabertions
|
||||||
|
project.accessLevel = "readWrite"
|
||||||
|
for project in readOnlyProjects
|
||||||
|
project.accessLevel = "readOnly"
|
||||||
|
projects = projects.concat(collabertions).concat(readOnlyProjects)
|
||||||
|
projects = projects.map (project)->
|
||||||
|
project.tags = tagsGroupedByProject[project._id] || []
|
||||||
|
return project
|
||||||
|
tags = _.sortBy tags, (tag)->
|
||||||
|
-tag.project_ids.length
|
||||||
|
logger.log projects:projects, collabertions:collabertions, readOnlyProjects:readOnlyProjects, user_id:user_id, "rendering project list"
|
||||||
|
sortedProjects = _.sortBy projects, (project)->
|
||||||
|
return - project.lastUpdated
|
||||||
|
res.render 'project/list',
|
||||||
|
title:'Your Projects'
|
||||||
|
priority_title: true
|
||||||
|
projects: sortedProjects
|
||||||
|
freeTrial: freeTrial
|
||||||
|
tags:tags
|
||||||
|
projectTabActive: true
|
||||||
|
logger.log user_id: user_id, duration: (new Date() - startTime), "project list timer - Finished"
|
||||||
|
timer.done()
|
||||||
|
|
||||||
|
apiNewProject: (req, res)->
|
||||||
|
user = req.session.user
|
||||||
|
projectName = sanitize(req.body.projectName).xss()
|
||||||
|
template = sanitize(req.body.template).xss()
|
||||||
|
logger.log user: user, type: template, name: projectName, "creating project"
|
||||||
|
if template == 'example'
|
||||||
|
projectCreationHandler.createExampleProject user._id, projectName, (err, project)->
|
||||||
|
if err?
|
||||||
|
logger.error err: err, project: project, user: user, name: projectName, type: "example", "error creating project"
|
||||||
|
res.send 500
|
||||||
|
else
|
||||||
|
logger.log project: project, user: user, name: projectName, type: "example", "created project"
|
||||||
|
res.send {project_id:project._id}
|
||||||
|
else
|
||||||
|
projectCreationHandler.createBasicProject user._id, projectName, (err, project)->
|
||||||
|
if err?
|
||||||
|
logger.error err: err, project: project, user: user, name: projectName, type: "basic", "error creating project"
|
||||||
|
res.send 500
|
||||||
|
else
|
||||||
|
logger.log project: project, user: user, name: projectName, type: "basic", "created project"
|
||||||
|
res.send {project_id:project._id}
|
||||||
|
|
||||||
|
loadEditor: (req, res)->
|
||||||
|
timer = new metrics.Timer("load-editor")
|
||||||
|
if !Settings.editorIsOpen
|
||||||
|
res.render("general/closed", {title:"updating site"})
|
||||||
|
else
|
||||||
|
if req.session.user?
|
||||||
|
user_id = req.session.user._id
|
||||||
|
else
|
||||||
|
user_id = 'openUser'
|
||||||
|
project_id = req.params.Project_id
|
||||||
|
Project.findPopulatedById project_id, (err, project)->
|
||||||
|
User.findById user_id, (err, user)->
|
||||||
|
if user_id == 'openUser'
|
||||||
|
anonymous = true
|
||||||
|
user =
|
||||||
|
id : user_id
|
||||||
|
ace:
|
||||||
|
mode:'none'
|
||||||
|
theme:'textmate'
|
||||||
|
fontSize: '12'
|
||||||
|
autoComplete: true
|
||||||
|
spellCheckLanguage: ""
|
||||||
|
pdfViewer: ""
|
||||||
|
subscription:
|
||||||
|
freeTrial:
|
||||||
|
allowed: true
|
||||||
|
featureSwitches:
|
||||||
|
dropbox: false
|
||||||
|
longPolling: false
|
||||||
|
else
|
||||||
|
anonymous = false
|
||||||
|
SubscriptionLocator.getUsersSubscription user._id, (err, subscription)->
|
||||||
|
SecurityManager.userCanAccessProject user, project, (canAccess, privlageLevel)->
|
||||||
|
allowedFreeTrial = true
|
||||||
|
if subscription? and subscription.freeTrial? and subscription.freeTrial.expiresAt?
|
||||||
|
allowedFreeTrial = !!subscription.freeTrial.allowed
|
||||||
|
if canAccess
|
||||||
|
timer.done()
|
||||||
|
res.render 'project/editor',
|
||||||
|
title: project.name
|
||||||
|
priority_title: true
|
||||||
|
bodyClasses: ["editor"]
|
||||||
|
project : project
|
||||||
|
owner : project.owner_ref
|
||||||
|
userObject : JSON.stringify({
|
||||||
|
id : user.id
|
||||||
|
email : user.email
|
||||||
|
first_name : user.first_name
|
||||||
|
last_name : user.last_name
|
||||||
|
referal_id : user.referal_id
|
||||||
|
subscription :
|
||||||
|
freeTrial: {allowed: allowedFreeTrial}
|
||||||
|
})
|
||||||
|
userSettingsObject: JSON.stringify({
|
||||||
|
mode : user.ace.mode
|
||||||
|
theme : user.ace.theme
|
||||||
|
project_id : project._id
|
||||||
|
fontSize : user.ace.fontSize
|
||||||
|
autoComplete: user.ace.autoComplete
|
||||||
|
spellCheckLanguage: user.ace.spellCheckLanguage
|
||||||
|
pdfViewer : user.ace.pdfViewer
|
||||||
|
docPositions: {}
|
||||||
|
longPolling: user.featureSwitches.longPolling
|
||||||
|
})
|
||||||
|
sharelatexObject : JSON.stringify({
|
||||||
|
siteUrl: Settings.siteUrl,
|
||||||
|
jsPath: res.locals.jsPath
|
||||||
|
})
|
||||||
|
privlageLevel: privlageLevel
|
||||||
|
userCanSeeDropbox: user.featureSwitches.dropbox and project.owner_ref._id+"" == user._id+""
|
||||||
|
loadPdfjs: (user.ace.pdfViewer == "pdfjs")
|
||||||
|
chatUrl: Settings.apis.chat.url
|
||||||
|
anonymous: anonymous
|
||||||
|
languages: Settings.languages,
|
||||||
|
|
||||||
|
startBufferingRequest: (req, res, next) ->
|
||||||
|
req.bufferedChunks = []
|
||||||
|
req.endEmitted = false
|
||||||
|
bufferChunk = (chunk) -> req.bufferedChunks.push(chunk)
|
||||||
|
req.on "data", bufferChunk
|
||||||
|
endCallback = () -> req.endEmitted = true
|
||||||
|
req.on "end", endCallback
|
||||||
|
req.emitBufferedData = () ->
|
||||||
|
logger.log chunks: @bufferedChunks.length, emittedEnd: @endEmitted, "emitting buffer chunks"
|
||||||
|
@removeListener "data", bufferChunk
|
||||||
|
while @bufferedChunks.length > 0
|
||||||
|
@emit "data", @bufferedChunks.shift()
|
||||||
|
@removeListener "end", endCallback
|
||||||
|
@emit "end" if @endEmitted
|
||||||
|
next()
|
||||||
|
|
||||||
|
downloadImageFile : (req, res)->
|
||||||
|
project_id = req.params.Project_id
|
||||||
|
file_id = req.params.File_id
|
||||||
|
queryString = req.query
|
||||||
|
logger.log project_id: project_id, file_id: file_id, queryString:queryString, "file download"
|
||||||
|
res.setHeader("Content-Disposition", "attachment")
|
||||||
|
FileStoreHandler.getFileStream project_id, file_id, queryString, (err, stream)->
|
||||||
|
stream.pipe res
|
||||||
|
|
||||||
|
cloneProject: (req, res)->
|
||||||
|
metrics.inc "cloned-project"
|
||||||
|
project_id = req.params.Project_id
|
||||||
|
projectName = req.body.projectName
|
||||||
|
logger.log project_id:project_id, projectName:projectName, "cloning project"
|
||||||
|
if !req.session.user?
|
||||||
|
return res.send redir:"/register"
|
||||||
|
projectDuplicator.duplicate req.session.user, project_id, projectName, (err, project)->
|
||||||
|
if err?
|
||||||
|
logger.error err:err, project_id: project_id, user_id: req.session.user._id, "error cloning project"
|
||||||
|
return next(err)
|
||||||
|
res.send(project_id:project._id)
|
||||||
|
|
||||||
|
deleteProject: (req, res)->
|
||||||
|
project_id = req.params.Project_id
|
||||||
|
logger.log project_id:project_id, "deleting project"
|
||||||
|
ProjectHandler.deleteProject project_id, (err)->
|
||||||
|
if err?
|
||||||
|
res.send 500
|
||||||
|
else
|
||||||
|
res.send 200
|
||||||
|
|
||||||
|
|
237
services/web/app/coffee/controllers/UserController.coffee
Normal file
237
services/web/app/coffee/controllers/UserController.coffee
Normal file
|
@ -0,0 +1,237 @@
|
||||||
|
User = require('../models/User').User
|
||||||
|
sanitize = require('validator').sanitize
|
||||||
|
fs = require('fs')
|
||||||
|
_ = require('underscore')
|
||||||
|
emailer = require('../managers/EmailManager')
|
||||||
|
logger = require('logger-sharelatex')
|
||||||
|
Security = require('../managers/SecurityManager')
|
||||||
|
Settings = require('settings-sharelatex')
|
||||||
|
newsLetterManager = require('../managers/NewsletterManager')
|
||||||
|
dropboxHandler = require('../Features/Dropbox/DropboxHandler')
|
||||||
|
userRegistrationHandler = require('../Features/User/UserRegistrationHandler')
|
||||||
|
metrics = require('../infrastructure/Metrics')
|
||||||
|
AnalyticsManager = require('../Features/Analytics/AnalyticsManager')
|
||||||
|
ReferalAllocator = require('../Features/Referal/ReferalAllocator')
|
||||||
|
AuthenticationManager = require("../Features/Authentication/AuthenticationManager")
|
||||||
|
AuthenticationController = require("../Features/Authentication/AuthenticationController")
|
||||||
|
SubscriptionLocator = require("../Features/Subscription/SubscriptionLocator")
|
||||||
|
UserDeleter = require("../Features/User/UserDeleter")
|
||||||
|
Url = require("url")
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
registerForm : (req, res)->
|
||||||
|
|
||||||
|
sharedProjectData =
|
||||||
|
project_name:req.query.project_name
|
||||||
|
user_first_name:req.query.user_first_name
|
||||||
|
|
||||||
|
newTemplateData = {}
|
||||||
|
if req.session.templateData?
|
||||||
|
newTemplateData.templateName = req.session.templateData.templateName
|
||||||
|
|
||||||
|
res.render 'user/register',
|
||||||
|
title: 'Register'
|
||||||
|
redir: req.query.redir
|
||||||
|
sharedProjectData: sharedProjectData
|
||||||
|
newTemplateData: newTemplateData
|
||||||
|
new_email:req.query.new_email || ""
|
||||||
|
|
||||||
|
|
||||||
|
loginForm : (req, res)->
|
||||||
|
res.render 'user/login',
|
||||||
|
title: 'Login',
|
||||||
|
redir: req.query.redir
|
||||||
|
|
||||||
|
apiRegister : (req, res, next = (error) ->)->
|
||||||
|
logger.log email: req.body.email, "attempted register"
|
||||||
|
redir = Url.parse(req.body.redir or "/project").path
|
||||||
|
userRegistrationHandler.validateRegisterRequest req, (err, data)->
|
||||||
|
if err?
|
||||||
|
logger.log validation_error: err, "user validation error"
|
||||||
|
metrics.inc "user.register.validation-error"
|
||||||
|
res.send message:
|
||||||
|
text:err
|
||||||
|
type:'error'
|
||||||
|
else
|
||||||
|
User.findOne {email:data.email}, (err, foundUser)->
|
||||||
|
if foundUser? && foundUser.holdingAccount == false
|
||||||
|
AuthenticationController.login req, res
|
||||||
|
logger.log email: data.email, "email already registered"
|
||||||
|
metrics.inc "user.register.already-registered"
|
||||||
|
return AuthenticationController.login req, res
|
||||||
|
else if foundUser? && foundUser.holdingAccount == true #someone put them in as a collaberator
|
||||||
|
user = foundUser
|
||||||
|
user.holdingAccount == false
|
||||||
|
else
|
||||||
|
user = new User email: data.email
|
||||||
|
d = new Date()
|
||||||
|
user.first_name = data.first_name
|
||||||
|
user.last_name = data.last_name
|
||||||
|
user.signUpDate = new Date()
|
||||||
|
metrics.inc "user.register.success"
|
||||||
|
user.save (err)->
|
||||||
|
req.session.user = user
|
||||||
|
req.session.justRegistered = true
|
||||||
|
logger.log user: user, "registered"
|
||||||
|
AuthenticationManager.setUserPassword user._id, data.password, (error) ->
|
||||||
|
return next(error) if error?
|
||||||
|
res.send
|
||||||
|
redir:redir
|
||||||
|
id:user._id.toString()
|
||||||
|
first_name: user.first_name
|
||||||
|
last_name: user.last_name
|
||||||
|
email: user.email
|
||||||
|
created: Date.now()
|
||||||
|
#things that can be fired and forgot.
|
||||||
|
newsLetterManager.subscribe user
|
||||||
|
ReferalAllocator.allocate req.session.referal_id, user._id, req.session.referal_source, req.session.referal_medium
|
||||||
|
|
||||||
|
requestPasswordReset : (req, res)->
|
||||||
|
res.render 'user/passwordReset',
|
||||||
|
title: 'Password Reset',
|
||||||
|
|
||||||
|
doRequestPasswordReset : (req, res, next = (error) ->)->
|
||||||
|
email = sanitize(req.body.email).xss()
|
||||||
|
email = sanitize(email).trim()
|
||||||
|
email = email.toLowerCase()
|
||||||
|
logger.log email: email, "password reset requested"
|
||||||
|
User.findOne {'email':email}, (err, user)->
|
||||||
|
if(user?)
|
||||||
|
randomPassword = generateRandomString 12
|
||||||
|
AuthenticationManager.setUserPassword user._id, randomPassword, (error) ->
|
||||||
|
return next(error) if error?
|
||||||
|
emailOptions =
|
||||||
|
receiver : user.email
|
||||||
|
subject : "Password Reset - ShareLatex.com"
|
||||||
|
heading : "Password Reset"
|
||||||
|
message : " Your password has been reset, the new password is <p> #{randomPassword}
|
||||||
|
<p> please login <a href=#{Settings.siteUrl}/user/settings>click here</a>
|
||||||
|
"
|
||||||
|
emailer.sendEmail emailOptions
|
||||||
|
metrics.inc "user.password-reset"
|
||||||
|
res.send message:
|
||||||
|
text:'An email with your new password has been sent to you'
|
||||||
|
type:'success'
|
||||||
|
else
|
||||||
|
res.send message:
|
||||||
|
text:'This email address has not been registered with us'
|
||||||
|
type:'failure'
|
||||||
|
logger.info email: email, "no user found with email"
|
||||||
|
|
||||||
|
logout : (req, res)->
|
||||||
|
metrics.inc "user.logout"
|
||||||
|
if req.session? && req.session.user?
|
||||||
|
logger.log user: req.session.user, "logging out"
|
||||||
|
req.session.destroy (err)->
|
||||||
|
if err
|
||||||
|
logger.err err: err, 'error destorying session'
|
||||||
|
res.redirect '/login'
|
||||||
|
|
||||||
|
settings : (req, res)->
|
||||||
|
logger.log user: req.session.user, "loading settings page"
|
||||||
|
User.findById req.session.user._id, (err, user)->
|
||||||
|
dropboxHandler.getUserRegistrationStatus user._id, (err, status)->
|
||||||
|
userIsRegisteredWithDropbox = !err? and status.registered
|
||||||
|
res.render 'user/settings',
|
||||||
|
title:'Your settings',
|
||||||
|
userCanSeeDropbox: user.featureSwitches.dropbox
|
||||||
|
userHasDropboxFeature: user.features.dropbox
|
||||||
|
userIsRegisteredWithDropbox: userIsRegisteredWithDropbox
|
||||||
|
user: user,
|
||||||
|
themes: THEME_LIST,
|
||||||
|
editors: ['default','vim','emacs'],
|
||||||
|
fontSizes: ['10','11','12','13','14','16','20','24']
|
||||||
|
languages: Settings.languages,
|
||||||
|
accountSettingsTabActive: true
|
||||||
|
|
||||||
|
unsubscribe: (req, res)->
|
||||||
|
User.findById req.session.user._id, (err, user)->
|
||||||
|
newsLetterManager.unsubscribe user, ->
|
||||||
|
res.send()
|
||||||
|
|
||||||
|
apiUpdate : (req, res)->
|
||||||
|
logger.log user: req.session.user, "updating account settings"
|
||||||
|
metrics.inc "user.settings-update"
|
||||||
|
User.findById req.session.user._id, (err, user)->
|
||||||
|
if(user)
|
||||||
|
user.first_name = sanitize(req.body.first_name).xss().trim()
|
||||||
|
user.last_name = sanitize(req.body.last_name).xss().trim()
|
||||||
|
user.ace.mode = sanitize(req.body.mode).xss().trim()
|
||||||
|
user.ace.theme = sanitize(req.body.theme).xss().trim()
|
||||||
|
user.ace.fontSize = sanitize(req.body.fontSize).xss().trim()
|
||||||
|
user.ace.autoComplete = req.body.autoComplete == "true"
|
||||||
|
user.ace.spellCheckLanguage = req.body.spellCheckLanguage
|
||||||
|
user.ace.pdfViewer = req.body.pdfViewer
|
||||||
|
user.save()
|
||||||
|
res.send {}
|
||||||
|
|
||||||
|
changePassword : (req, res, next = (error) ->)->
|
||||||
|
metrics.inc "user.password-change"
|
||||||
|
oldPass = req.body.currentPassword
|
||||||
|
AuthenticationManager.authenticate _id: req.session.user._id, oldPass, (err, user)->
|
||||||
|
if(user)
|
||||||
|
logger.log user: req.session.user, "changing password"
|
||||||
|
newPassword1 = req.body.newPassword1
|
||||||
|
newPassword2 = req.body.newPassword2
|
||||||
|
if newPassword1 != newPassword2
|
||||||
|
logger.log user: user, "passwords do not match"
|
||||||
|
res.send
|
||||||
|
message:
|
||||||
|
type:'error'
|
||||||
|
text:'Your passwords do not match'
|
||||||
|
else
|
||||||
|
logger.log user: user, "password changed"
|
||||||
|
AuthenticationManager.setUserPassword user._id, newPassword1, (error) ->
|
||||||
|
return next(error) if error?
|
||||||
|
res.send
|
||||||
|
message:
|
||||||
|
type:'success'
|
||||||
|
text:'Your password has been changed'
|
||||||
|
else
|
||||||
|
logger.log user: user, "current password wrong"
|
||||||
|
res.send
|
||||||
|
message:
|
||||||
|
type:'error'
|
||||||
|
text:'Your old password is wrong'
|
||||||
|
|
||||||
|
redirectUserToDropboxAuth: (req, res)->
|
||||||
|
user_id = req.session.user._id
|
||||||
|
dropboxHandler.getDropboxRegisterUrl user_id, (err, url)->
|
||||||
|
logger.log url:url, "redirecting user for dropbox auth"
|
||||||
|
res.redirect url
|
||||||
|
|
||||||
|
completeDropboxRegistration: (req, res)->
|
||||||
|
user_id = req.session.user._id
|
||||||
|
dropboxHandler.completeRegistration user_id, (err, success)->
|
||||||
|
res.redirect('/user/settings#dropboxSettings')
|
||||||
|
|
||||||
|
unlinkDropbox: (req, res)->
|
||||||
|
user_id = req.session.user._id
|
||||||
|
dropboxHandler.unlinkAccount user_id, (err, success)->
|
||||||
|
res.redirect('/user/settings#dropboxSettings')
|
||||||
|
|
||||||
|
deleteUser: (req, res)->
|
||||||
|
user_id = req.session.user._id
|
||||||
|
UserDeleter.deleteUser user_id, (err)->
|
||||||
|
if !err?
|
||||||
|
req.session.destroy()
|
||||||
|
res.send(200)
|
||||||
|
|
||||||
|
|
||||||
|
generateRandomString = (len)->
|
||||||
|
chars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXTZabcdefghiklmnopqrstuvwxyz"
|
||||||
|
randomString = ''
|
||||||
|
count = 0
|
||||||
|
while count++ < len
|
||||||
|
rnum = Math.floor(Math.random() * chars.length)
|
||||||
|
randomString += chars.substring(rnum,rnum+1)
|
||||||
|
return randomString
|
||||||
|
|
||||||
|
THEME_LIST = []
|
||||||
|
do generateThemeList = () ->
|
||||||
|
files = fs.readdirSync __dirname + '/../../../public/js/ace/theme'
|
||||||
|
for file in files
|
||||||
|
if file.slice(-2) == "js"
|
||||||
|
cleanName = file.slice(0,-3)
|
||||||
|
THEME_LIST.push name: cleanName
|
10
services/web/app/coffee/errors.coffee
Normal file
10
services/web/app/coffee/errors.coffee
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
NotFoundError = (message) ->
|
||||||
|
error = new Error(message)
|
||||||
|
error.name = "NotFoundError"
|
||||||
|
error.__proto__ = NotFoundError.prototype
|
||||||
|
return error
|
||||||
|
NotFoundError.prototype.__proto__ = Error.prototype
|
||||||
|
|
||||||
|
module.exports = Errors =
|
||||||
|
NotFoundError: NotFoundError
|
||||||
|
|
164
services/web/app/coffee/handlers/ProjectHandler.coffee
Executable file
164
services/web/app/coffee/handlers/ProjectHandler.coffee
Executable file
|
@ -0,0 +1,164 @@
|
||||||
|
Project = require('../models/Project').Project
|
||||||
|
Folder = require('../models/Folder').Folder
|
||||||
|
Doc = require('../models/Doc').Doc
|
||||||
|
File = require('../models/File').File
|
||||||
|
User = require('../models/User').User
|
||||||
|
logger = require('logger-sharelatex')
|
||||||
|
_ = require('underscore')
|
||||||
|
Settings = require('settings-sharelatex')
|
||||||
|
emailer = require('../managers/EmailManager')
|
||||||
|
tpdsUpdateSender = require '../Features/ThirdPartyDataStore/TpdsUpdateSender'
|
||||||
|
projectCreationHandler = require '../Features/Project/ProjectCreationHandler'
|
||||||
|
projectEntityHandler = require '../Features/Project/ProjectEntityHandler'
|
||||||
|
ProjectEditorHandler = require '../Features/Project/ProjectEditorHandler'
|
||||||
|
FileStoreHandler = require "../Features/FileStore/FileStoreHandler"
|
||||||
|
projectLocator = require '../Features/Project/ProjectLocator'
|
||||||
|
mimelib = require("mimelib")
|
||||||
|
async = require('async')
|
||||||
|
tagsHandler = require('../Features/Tags/TagsHandler')
|
||||||
|
|
||||||
|
module.exports = class ProjectHandler
|
||||||
|
getProject: (project_id, callback)->
|
||||||
|
logger.log project_id: project_id, "getting project"
|
||||||
|
Project.findById project_id, (err, project)->
|
||||||
|
callback err, ProjectEditorHandler.buildProjectModelView(project, includeUsers: false)
|
||||||
|
|
||||||
|
confirmFolder = (project_id, folder_id, callback)->
|
||||||
|
logger.log folder: folder_id, project_id: project_id, "confirming existence of folder"
|
||||||
|
if folder_id+'' == 'undefined'
|
||||||
|
Project.findById project_id, (err, project)->
|
||||||
|
callback(project.rootFolder[0]._id)
|
||||||
|
else if folder_id != null
|
||||||
|
callback folder_id
|
||||||
|
else
|
||||||
|
Project.findById project_id, (err, project)->
|
||||||
|
callback(project.rootFolder[0]._id)
|
||||||
|
|
||||||
|
renameEntity: (project_id, entity_id, entityType, newName, callback)->
|
||||||
|
logger.log(entity_id: entity_id, project_id: project_id, ('renaming '+entityType))
|
||||||
|
if !entityType?
|
||||||
|
logger.err err: "No entityType set", project_id: project_id, entity_id: entity_id
|
||||||
|
return callback("No entityType set")
|
||||||
|
entityType = entityType.toLowerCase()
|
||||||
|
Project.findById project_id, (err, project)=>
|
||||||
|
projectLocator.findElement {project:project, element_id:entity_id, type:entityType}, (err, entity, path, folder)=>
|
||||||
|
if err?
|
||||||
|
return callback err
|
||||||
|
conditons = {_id:project_id}
|
||||||
|
update = "$set":{}
|
||||||
|
namePath = path.mongo+".name"
|
||||||
|
update["$set"][namePath] = newName
|
||||||
|
endPath = path.fileSystem.replace(entity.name, newName)
|
||||||
|
tpdsUpdateSender.moveEntity({project_id:project_id, startPath:path.fileSystem, endPath:endPath, project_name:project.name, rev:entity.rev})
|
||||||
|
Project.update conditons, update, {}, (err)->
|
||||||
|
if callback?
|
||||||
|
callback err
|
||||||
|
|
||||||
|
renameProject: (project_id, window_id, newName, callback)->
|
||||||
|
logger.log project_id: project_id, "renaming project"
|
||||||
|
conditons = {_id:project_id}
|
||||||
|
Project.findOne conditons, "name", (err, project)->
|
||||||
|
oldProjectName = project.name
|
||||||
|
Project.update conditons, {name: newName}, {},(err, project)=>
|
||||||
|
tpdsUpdateSender.moveEntity {project_id:project_id, project_name:oldProjectName, newProjectName:newName}
|
||||||
|
if callback?
|
||||||
|
callback err
|
||||||
|
|
||||||
|
deleteProject: (project_id, callback = (error) ->)->
|
||||||
|
logger.log project_id:project_id, "deleting project"
|
||||||
|
Project.findById project_id, (err, project)=>
|
||||||
|
if project?
|
||||||
|
require('../Features/DocumentUpdater/DocumentUpdaterHandler').flushProjectToMongoAndDelete project_id, (error) ->
|
||||||
|
return callback(error) if error?
|
||||||
|
Project.applyToAllFilesRecursivly project.rootFolder[0], (file)=>
|
||||||
|
FileStoreHandler.deleteFile project_id, file._id, ->
|
||||||
|
Project.remove {_id:project_id}, (err)->
|
||||||
|
if callback?
|
||||||
|
callback(err)
|
||||||
|
require('../Features/Versioning/AutomaticSnapshotManager').unmarkProjectAsUpdated project_id, ->
|
||||||
|
tagsHandler.removeProjectFromAllTags project.owner_ref, project_id,->
|
||||||
|
project.collaberator_refs.forEach (collaberator_ref)->
|
||||||
|
tagsHandler.removeProjectFromAllTags collaberator_ref, project_id, ->
|
||||||
|
project.readOnly_refs.forEach (readOnly_ref)->
|
||||||
|
tagsHandler.removeProjectFromAllTags readOnly_ref, project_id,->
|
||||||
|
else
|
||||||
|
if callback?
|
||||||
|
callback(err)
|
||||||
|
|
||||||
|
setPublicAccessLevel : (project_id, newAccessLevel, callback)->
|
||||||
|
logger.log project_id: project_id, level: newAccessLevel, "set public access level"
|
||||||
|
if project_id? && newAccessLevel?
|
||||||
|
if _.include ['readOnly', 'readAndWrite', 'private'], newAccessLevel
|
||||||
|
Project.update {_id:project_id},{publicAccesLevel:newAccessLevel},{}, (err)->
|
||||||
|
if callback?
|
||||||
|
callback()
|
||||||
|
|
||||||
|
addUserToProject: (project_id, email, privlages, callback)->
|
||||||
|
if email != ''
|
||||||
|
doAdd = (user)=>
|
||||||
|
Project.findOne(_id: project_id )
|
||||||
|
.select("name owner_ref")
|
||||||
|
.populate('owner_ref')
|
||||||
|
.exec (err, project)->
|
||||||
|
emailOptions =
|
||||||
|
receiver : email
|
||||||
|
replyTo : project.owner_ref.email
|
||||||
|
subject : "#{project.owner_ref.first_name} #{project.owner_ref.last_name} wants to share '#{project.name}' with you"
|
||||||
|
heading : "#{project.name} #{project.owner_ref.last_name} wants to share '#{project.name}' with you"
|
||||||
|
message : "
|
||||||
|
"
|
||||||
|
template_name:"shared_project_email_template"
|
||||||
|
view_data:
|
||||||
|
project:
|
||||||
|
name: project.name
|
||||||
|
url: "#{Settings.siteUrl}/project/#{project._id}?" + [
|
||||||
|
"project_name=#{project.name}"
|
||||||
|
"user_first_name=#{project.owner_ref.first_name}"
|
||||||
|
"new_email=#{email}"
|
||||||
|
"r=#{project.owner_ref.referal_id}" # Referal
|
||||||
|
"rs=ci" # referral source = collaborator invite
|
||||||
|
].join("&")
|
||||||
|
owner:
|
||||||
|
first_name: project.owner_ref.first_name
|
||||||
|
email: project.owner_ref.email
|
||||||
|
sharelatex_url: Settings.siteUrl
|
||||||
|
|
||||||
|
emailer.sendEmail emailOptions
|
||||||
|
if privlages == 'readAndWrite'
|
||||||
|
level = {"collaberator_refs":user}
|
||||||
|
logger.log privileges: "readAndWrite", user: user, project: project, "adding user"
|
||||||
|
else if privlages == 'readOnly'
|
||||||
|
level = {"readOnly_refs":user}
|
||||||
|
logger.log privileges: "readOnly", user: user, project: project, "adding user"
|
||||||
|
Project.update {_id: project_id}, {$push:level},{},(err)->
|
||||||
|
projectEntityHandler.flushProjectToThirdPartyDataStore project_id, "", ->
|
||||||
|
if callback?
|
||||||
|
callback(user)
|
||||||
|
|
||||||
|
emails = mimelib.parseAddresses(email)
|
||||||
|
email = emails[0].address
|
||||||
|
User.findOne {'email':email}, (err, user)->
|
||||||
|
if(!user)
|
||||||
|
user = new User 'email':email, holdingAccount:true
|
||||||
|
user.save (err)->
|
||||||
|
logger.log user: user, 'creating new empty user'
|
||||||
|
doAdd user
|
||||||
|
else
|
||||||
|
doAdd user
|
||||||
|
|
||||||
|
removeUserFromProject: (project_id, user_id, callback)->
|
||||||
|
logger.log user_id: user_id, project_id: project_id, "removing user"
|
||||||
|
conditions = _id:project_id
|
||||||
|
update = $pull:{}
|
||||||
|
update["$pull"] = collaberator_refs:user_id, readOnly_refs:user_id
|
||||||
|
Project.update conditions, update, {}, (err)->
|
||||||
|
if err?
|
||||||
|
logger.err err: err, "problem removing user from project collaberators"
|
||||||
|
if callback?
|
||||||
|
callback()
|
||||||
|
|
||||||
|
changeUsersPrivlageLevel: (project_id, user_id, newPrivalageLevel)->
|
||||||
|
@removeUserFromProject project_id, user_id, ()=>
|
||||||
|
User.findById user_id, (err, user)=>
|
||||||
|
if user
|
||||||
|
@addUserToProject project_id, user.email, newPrivalageLevel
|
|
@ -0,0 +1,7 @@
|
||||||
|
EditorUpdatesController = require("../Features/Editor/EditorUpdatesController")
|
||||||
|
EditorRealTimeController = require("../Features/Editor/EditorRealTimeController")
|
||||||
|
|
||||||
|
module.exports = BackgroundTasks =
|
||||||
|
run: () ->
|
||||||
|
EditorUpdatesController.listenForUpdatesFromDocumentUpdater()
|
||||||
|
EditorRealTimeController.listenForEditorEvents()
|
11
services/web/app/coffee/infrastructure/CrawlerLogger.coffee
Normal file
11
services/web/app/coffee/infrastructure/CrawlerLogger.coffee
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
metrics = require('./Metrics')
|
||||||
|
module.exports =
|
||||||
|
log: (req)->
|
||||||
|
if req.headers["user-agent"]?
|
||||||
|
userAgent = req.headers["user-agent"].toLowerCase()
|
||||||
|
if userAgent.indexOf("google") != -1
|
||||||
|
metrics.inc "crawler.google"
|
||||||
|
else if userAgent.indexOf("facebook") != -1
|
||||||
|
metrics.inc "crawler.facebook"
|
||||||
|
else if userAgent.indexOf("bing") != -1
|
||||||
|
metrics.inc "crawler.bing"
|
122
services/web/app/coffee/infrastructure/ExpressLocals.coffee
Normal file
122
services/web/app/coffee/infrastructure/ExpressLocals.coffee
Normal file
|
@ -0,0 +1,122 @@
|
||||||
|
logger = require 'logger-sharelatex'
|
||||||
|
fs = require 'fs'
|
||||||
|
crypto = require 'crypto'
|
||||||
|
Settings = require('settings-sharelatex')
|
||||||
|
SubscriptionFormatters = require('../Features/Subscription/SubscriptionFormatters')
|
||||||
|
querystring = require('querystring')
|
||||||
|
|
||||||
|
fingerprints = {}
|
||||||
|
Path = require 'path'
|
||||||
|
jsPath =
|
||||||
|
if Settings.useMinifiedJs
|
||||||
|
"/minjs/"
|
||||||
|
else
|
||||||
|
"/js/"
|
||||||
|
|
||||||
|
logger.log "Generating file fingerprints..."
|
||||||
|
for path in [
|
||||||
|
"#{jsPath}libs/require.js",
|
||||||
|
"#{jsPath}ide.js",
|
||||||
|
"#{jsPath}main.js",
|
||||||
|
"#{jsPath}list.js",
|
||||||
|
"#{jsPath}libs/pdf.js",
|
||||||
|
"#{jsPath}libs/pdf.worker.js",
|
||||||
|
"/stylesheets/mainStyle.css"
|
||||||
|
]
|
||||||
|
filePath = Path.join __dirname, "../../../", "public#{path}"
|
||||||
|
content = fs.readFileSync filePath
|
||||||
|
hash = crypto.createHash("md5").update(content).digest("hex")
|
||||||
|
logger.log "#{filePath}: #{hash}"
|
||||||
|
fingerprints[path] = hash
|
||||||
|
|
||||||
|
module.exports = (app)->
|
||||||
|
app.use (req, res, next)->
|
||||||
|
res.locals.session = req.session
|
||||||
|
next()
|
||||||
|
|
||||||
|
app.use (req, res, next)->
|
||||||
|
res.locals.jsPath = jsPath
|
||||||
|
next()
|
||||||
|
|
||||||
|
app.use (req, res, next)->
|
||||||
|
res.locals.settings = Settings
|
||||||
|
next()
|
||||||
|
|
||||||
|
app.use (req, res, next)->
|
||||||
|
res.locals.getSiteHost = ->
|
||||||
|
Settings.siteUrl.substring(Settings.siteUrl.indexOf("//")+2)
|
||||||
|
next()
|
||||||
|
|
||||||
|
app.use (req, res, next)->
|
||||||
|
res.locals.formatPrivlageLevel = (privlageLevel)->
|
||||||
|
formatedPrivlages = private:"Private", readOnly:"Read Only", readAndWrite:"Read and Write"
|
||||||
|
return formatedPrivlages[privlageLevel] || "Private"
|
||||||
|
next()
|
||||||
|
|
||||||
|
app.use (req, res, next)->
|
||||||
|
res.locals.buildReferalUrl = (referal_medium) ->
|
||||||
|
url = Settings.siteUrl
|
||||||
|
if req.session? and req.session.user? and req.session.user.referal_id?
|
||||||
|
url+="?r=#{req.session.user.referal_id}&rm=#{referal_medium}&rs=b" # Referal source = bonus
|
||||||
|
return url
|
||||||
|
res.locals.getReferalId = ->
|
||||||
|
if req.session? and req.session.user? and req.session.user.referal_id
|
||||||
|
return req.session.user.referal_id
|
||||||
|
res.locals.getReferalTagLine = ->
|
||||||
|
tagLines = [
|
||||||
|
"Roar!"
|
||||||
|
"Shout about us!"
|
||||||
|
"Please recommend us"
|
||||||
|
"Tell the world!"
|
||||||
|
"Thanks for using ShareLaTeX"
|
||||||
|
]
|
||||||
|
return tagLines[Math.floor(Math.random()*tagLines.length)]
|
||||||
|
res.locals.getRedirAsQueryString = ->
|
||||||
|
if req.query.redir?
|
||||||
|
return "?#{querystring.stringify({redir:req.query.redir})}"
|
||||||
|
return ""
|
||||||
|
next()
|
||||||
|
|
||||||
|
app.use (req, res, next) ->
|
||||||
|
res.locals.csrfToken = req.session._csrf
|
||||||
|
next()
|
||||||
|
|
||||||
|
app.use (req, res, next)->
|
||||||
|
res.locals.fingerprint = (path) ->
|
||||||
|
if fingerprints[path]?
|
||||||
|
return fingerprints[path]
|
||||||
|
else
|
||||||
|
logger.err "No fingerprint for file: #{path}"
|
||||||
|
return ""
|
||||||
|
next()
|
||||||
|
app.use (req, res, next)->
|
||||||
|
res.locals.formatPrice = SubscriptionFormatters.formatPrice
|
||||||
|
next()
|
||||||
|
|
||||||
|
app.use (req, res, next)->
|
||||||
|
if req.session.user?
|
||||||
|
res.locals.mixpanelId = req.session.user._id
|
||||||
|
res.locals.user =
|
||||||
|
email: req.session.user.email
|
||||||
|
first_name: req.session.user.first_name
|
||||||
|
last_name: req.session.user.last_name
|
||||||
|
if req.session.justRegistered
|
||||||
|
res.locals.justRegistered = true
|
||||||
|
delete req.session.justRegistered
|
||||||
|
if req.session.justLoggedIn
|
||||||
|
res.locals.justLoggedIn = true
|
||||||
|
delete req.session.justLoggedIn
|
||||||
|
res.locals.mixpanelToken = Settings.analytics?.mixpanel?.token
|
||||||
|
res.locals.gaToken = Settings.analytics?.ga?.token
|
||||||
|
res.locals.heapToken = Settings.analytics?.heap?.token
|
||||||
|
res.locals.tenderUrl = Settings.tenderUrl
|
||||||
|
next()
|
||||||
|
|
||||||
|
app.use (req, res, next) ->
|
||||||
|
if req.query? and req.query.scribtex_path?
|
||||||
|
res.locals.lookingForScribtex = true
|
||||||
|
res.locals.scribtexPath = req.query.scribtex_path
|
||||||
|
next()
|
||||||
|
|
||||||
|
|
||||||
|
|
5
services/web/app/coffee/infrastructure/Keys.coffee
Normal file
5
services/web/app/coffee/infrastructure/Keys.coffee
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
module.exports =
|
||||||
|
|
||||||
|
queue:
|
||||||
|
web_to_tpds_http_requests: "web_to_tpds_http_requests"
|
||||||
|
tpds_to_web_http_requests: "tpds_to_web_http_requests"
|
|
@ -0,0 +1,18 @@
|
||||||
|
module.exports =
|
||||||
|
user: (user) ->
|
||||||
|
if !user._id?
|
||||||
|
user = {_id : user}
|
||||||
|
return {
|
||||||
|
id: user._id
|
||||||
|
email: user.email
|
||||||
|
first_name: user.name
|
||||||
|
last_name: user.name
|
||||||
|
}
|
||||||
|
|
||||||
|
project: (project) ->
|
||||||
|
if !project._id?
|
||||||
|
project = {_id: project}
|
||||||
|
return {
|
||||||
|
id: project._id
|
||||||
|
name: project.name
|
||||||
|
}
|
24
services/web/app/coffee/infrastructure/Metrics.coffee
Normal file
24
services/web/app/coffee/infrastructure/Metrics.coffee
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
StatsD = require('lynx')
|
||||||
|
settings = require('settings-sharelatex')
|
||||||
|
statsd = new StatsD('localhost', 8125, {on_error:->})
|
||||||
|
|
||||||
|
buildKey = (key)-> "web.#{process.env.NODE_ENV}.#{key}"
|
||||||
|
|
||||||
|
module.exports =
|
||||||
|
set : (key, value, sampleRate = 1)->
|
||||||
|
statsd.set buildKey(key), value, sampleRate
|
||||||
|
|
||||||
|
inc : (key, sampleRate = 1)->
|
||||||
|
statsd.increment buildKey(key), sampleRate
|
||||||
|
|
||||||
|
Timer : class
|
||||||
|
constructor :(key, sampleRate = 1)->
|
||||||
|
this.start = new Date()
|
||||||
|
this.key = buildKey(key)
|
||||||
|
done:->
|
||||||
|
timeSpan = new Date - this.start
|
||||||
|
statsd.timing(this.key, timeSpan, this.sampleRate)
|
||||||
|
|
||||||
|
gauge : (key, value, sampleRate = 1)->
|
||||||
|
statsd.gauge key, value, sampleRate
|
||||||
|
|
5
services/web/app/coffee/infrastructure/Monitor.coffee
Normal file
5
services/web/app/coffee/infrastructure/Monitor.coffee
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
require("./Monitor/MongoDB").monitor()
|
||||||
|
|
||||||
|
exports.logger = require("./Monitor/HTTP").logger
|
||||||
|
|
||||||
|
|
21
services/web/app/coffee/infrastructure/Monitor/HTTP.coffee
Normal file
21
services/web/app/coffee/infrastructure/Monitor/HTTP.coffee
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
logger = require "logger-sharelatex"
|
||||||
|
|
||||||
|
module.exports.logger = (req, res, next) ->
|
||||||
|
startTime = new Date()
|
||||||
|
end = res.end
|
||||||
|
res.end = () ->
|
||||||
|
end.apply(this, arguments)
|
||||||
|
logger.log
|
||||||
|
req:
|
||||||
|
url: req.originalUrl || req.url
|
||||||
|
method: req.method
|
||||||
|
referrer: req.headers['referer'] || req.headers['referrer']
|
||||||
|
"remote-addr": req.ip || req.socket?.socket?.remoteAddress || req.socket?.remoteAddress
|
||||||
|
"user-agent": req.headers["user-agent"]
|
||||||
|
"content-length": req.headers["content-length"]
|
||||||
|
res:
|
||||||
|
"content-length": res._headers?["content-length"]
|
||||||
|
"response-time": new Date() - startTime
|
||||||
|
"http request"
|
||||||
|
next()
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue