mirror of
https://github.com/overleaf/overleaf.git
synced 2025-04-09 01:12:35 +00:00
Merge branch 'master' into projectsizelimit
This commit is contained in:
commit
3fc357a343
56 changed files with 1015 additions and 270 deletions
services/web
app
coffee/Features
Authentication
Blog
DocumentUpdater
Email
FileStore
Subscription
SubscriptionController.coffeeSubscriptionGroupController.coffeeSubscriptionGroupHandler.coffeeUserFeaturesUpdater.coffee
Uploads
ArchiveManager.coffeeFileSystemImportManager.coffeeProjectUploadController.coffeeProjectUploadManager.coffee
User
views/project
public
coffee
directives
ide
directives
editor/directives
file-tree
pdf/controllers
settings/controllers
main
js/libs
stylesheets/app
test/UnitTests/coffee
DocumentUpdater
Editor
Email
FileStore
Project
Security
Subscription
ThirdPartyDataStore
Uploads
ArchiveManagerTests.coffeeFileSystemImportManagerTests.coffeeProjectUploadControllerTests.coffeeProjectUploadManagerTests.coffee
User
infrastructure/LockManager
|
@ -1,4 +1,3 @@
|
|||
Settings = require 'settings-sharelatex'
|
||||
User = require("../../models/User").User
|
||||
{db, ObjectId} = require("../../infrastructure/mongojs")
|
||||
crypto = require 'crypto'
|
||||
|
|
|
@ -4,15 +4,13 @@ logger = require("logger-sharelatex")
|
|||
_ = require("underscore")
|
||||
ErrorController = require "../Errors/ErrorController"
|
||||
|
||||
extensionsToProxy = [".png", ".xml", ".jpeg", ".json", ".zip", ".eps"]
|
||||
|
||||
module.exports = BlogController =
|
||||
|
||||
getPage: (req, res, next)->
|
||||
url = req.url?.toLowerCase()
|
||||
blogUrl = "#{settings.apis.blog.url}#{url}"
|
||||
|
||||
extensionsToProxy = [".png", ".xml", ".jpeg", ".json", ".zip", ".eps"]
|
||||
extensionsToProxy = [".png", ".xml", ".jpeg", ".json", ".zip", ".eps", ".gif"]
|
||||
|
||||
shouldProxy = _.find extensionsToProxy, (extension)->
|
||||
url.indexOf(extension) != -1
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
request = require 'request'
|
||||
request = request.defaults()
|
||||
async = require 'async'
|
||||
settings = require 'settings-sharelatex'
|
||||
_ = require 'underscore'
|
||||
async = require 'async'
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
_ = require('underscore')
|
||||
|
||||
PersonalEmailLayout = require("./Layouts/PersonalEmailLayout")
|
||||
NotificationEmailLayout = require("./Layouts/NotificationEmailLayout")
|
||||
settings = require("settings-sharelatex")
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
logger = require('logger-sharelatex')
|
||||
metrics = require('../../infrastructure/Metrics')
|
||||
Settings = require('settings-sharelatex')
|
||||
metrics = require("../../infrastructure/Metrics")
|
||||
nodemailer = require("nodemailer")
|
||||
sesTransport = require('nodemailer-ses-transport')
|
||||
_ = require("underscore")
|
||||
|
@ -25,7 +24,6 @@ else if Settings?.email?.parameters?
|
|||
|
||||
|
||||
logger.log "using smtp for email"
|
||||
console.log smtp
|
||||
nm_client = nodemailer.createTransport(smtp)
|
||||
else
|
||||
nm_client = client
|
||||
|
|
|
@ -6,24 +6,32 @@ settings = require("settings-sharelatex")
|
|||
oneMinInMs = 60 * 1000
|
||||
fiveMinsInMs = oneMinInMs * 5
|
||||
|
||||
module.exports =
|
||||
module.exports = FileStoreHandler =
|
||||
|
||||
uploadFileFromDisk: (project_id, file_id, fsPath, callback)->
|
||||
logger.log project_id:project_id, file_id:file_id, fsPath:fsPath, "uploading file from disk"
|
||||
readStream = fs.createReadStream(fsPath)
|
||||
opts =
|
||||
method: "post"
|
||||
uri: @_buildUrl(project_id, file_id)
|
||||
timeout:fiveMinsInMs
|
||||
writeStream = request(opts)
|
||||
readStream.pipe writeStream
|
||||
writeStream.on "end", callback
|
||||
readStream.on "error", (err)->
|
||||
logger.err err:err, project_id:project_id, file_id:file_id, fsPath:fsPath, "something went wrong on the read stream of uploadFileFromDisk"
|
||||
callback err
|
||||
writeStream.on "error", (err)->
|
||||
logger.err err:err, project_id:project_id, file_id:file_id, fsPath:fsPath, "something went wrong on the write stream of uploadFileFromDisk"
|
||||
callback err
|
||||
fs.lstat fsPath, (err, stat)->
|
||||
if err?
|
||||
logger.err err:err, project_id:project_id, file_id:file_id, fsPath:fsPath, "error stating file"
|
||||
callback(err)
|
||||
if !stat.isFile()
|
||||
logger.log project_id:project_id, file_id:file_id, fsPath:fsPath, "tried to upload symlink, not contining"
|
||||
return callback(new Error("can not upload symlink"))
|
||||
|
||||
logger.log project_id:project_id, file_id:file_id, fsPath:fsPath, "uploading file from disk"
|
||||
readStream = fs.createReadStream(fsPath)
|
||||
opts =
|
||||
method: "post"
|
||||
uri: FileStoreHandler._buildUrl(project_id, file_id)
|
||||
timeout:fiveMinsInMs
|
||||
writeStream = request(opts)
|
||||
readStream.pipe writeStream
|
||||
writeStream.on "end", callback
|
||||
readStream.on "error", (err)->
|
||||
logger.err err:err, project_id:project_id, file_id:file_id, fsPath:fsPath, "something went wrong on the read stream of uploadFileFromDisk"
|
||||
callback err
|
||||
writeStream.on "error", (err)->
|
||||
logger.err err:err, project_id:project_id, file_id:file_id, fsPath:fsPath, "something went wrong on the write stream of uploadFileFromDisk"
|
||||
callback err
|
||||
|
||||
getFileStream: (project_id, file_id, query, callback)->
|
||||
logger.log project_id:project_id, file_id:file_id, query:query, "getting file stream from file store"
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
SecurityManager = require '../../managers/SecurityManager'
|
||||
SubscriptionHandler = require './SubscriptionHandler'
|
||||
PlansLocator = require("./PlansLocator")
|
||||
SubscriptionFormatters = require("./SubscriptionFormatters")
|
||||
SubscriptionViewModelBuilder = require('./SubscriptionViewModelBuilder')
|
||||
LimitationsManager = require("./LimitationsManager")
|
||||
RecurlyWrapper = require './RecurlyWrapper'
|
||||
|
|
|
@ -1,10 +1,7 @@
|
|||
SubscriptionGroupHandler = require("./SubscriptionGroupHandler")
|
||||
logger = require("logger-sharelatex")
|
||||
SubscriptionLocator = require("./SubscriptionLocator")
|
||||
|
||||
ErrorsController = require("../Errors/ErrorController")
|
||||
settings = require("settings-sharelatex")
|
||||
|
||||
SubscriptionDomainHandler = require("./SubscriptionDomainHandler")
|
||||
_ = require("underscore")
|
||||
|
||||
|
@ -12,7 +9,7 @@ module.exports =
|
|||
|
||||
addUserToGroup: (req, res)->
|
||||
adminUserId = req.session.user._id
|
||||
newEmail = req.body.email
|
||||
newEmail = req.body?.email?.toLowerCase()?.trim()
|
||||
logger.log adminUserId:adminUserId, newEmail:newEmail, "adding user to group subscription"
|
||||
SubscriptionGroupHandler.addUserToGroup adminUserId, newEmail, (err, user)->
|
||||
if err?
|
||||
|
@ -90,11 +87,12 @@ module.exports =
|
|||
logger.log subscription_id:subscription_id, user_id:req?.session?.user?._id, email:email, "starting the completion of joining group"
|
||||
SubscriptionGroupHandler.processGroupVerification email, subscription_id, req.query?.token, (err)->
|
||||
if err? and err == "token_not_found"
|
||||
res.redirect "/user/subscription/#{subscription_id}/group/invited?expired=true"
|
||||
return res.redirect "/user/subscription/#{subscription_id}/group/invited?expired=true"
|
||||
else if err?
|
||||
res.sendStatus 500
|
||||
return res.sendStatus 500
|
||||
else
|
||||
res.redirect "/user/subscription/#{subscription_id}/group/successful-join"
|
||||
logger.log subscription_id:subscription_id, email:email, "user successful completed join of group subscription"
|
||||
return res.redirect "/user/subscription/#{subscription_id}/group/successful-join"
|
||||
|
||||
renderSuccessfulJoinPage: (req, res)->
|
||||
subscription_id = req.params.subscription_id
|
||||
|
|
|
@ -14,9 +14,10 @@ NotificationsBuilder = require("../Notifications/NotificationsBuilder")
|
|||
module.exports = SubscriptionGroupHandler =
|
||||
|
||||
addUserToGroup: (adminUserId, newEmail, callback)->
|
||||
logger.log adminUserId:adminUserId, newEmail:newEmail, "adding user to group"
|
||||
UserCreator.getUserOrCreateHoldingAccount newEmail, (err, user)->
|
||||
if err?
|
||||
logger.err err:err, "error creating user for holding account"
|
||||
logger.err err:err, adminUserId:adminUserId, newEmail:newEmail, "error creating user for holding account"
|
||||
return callback(err)
|
||||
if !user?
|
||||
msg = "no user returned whenc reating holidng account or getting user"
|
||||
|
@ -24,8 +25,10 @@ module.exports = SubscriptionGroupHandler =
|
|||
return callback(msg)
|
||||
LimitationsManager.hasGroupMembersLimitReached adminUserId, (err, limitReached, subscription)->
|
||||
if err?
|
||||
logger.err err:err, adminUserId:adminUserId, newEmail:newEmail, "error checking if limit reached for group plan"
|
||||
return callback(err)
|
||||
if limitReached
|
||||
logger.err adminUserId:adminUserId, newEmail:newEmail, "group subscription limit reached not adding user to group"
|
||||
return callback(limitReached:limitReached)
|
||||
SubscriptionUpdater.addUserToGroup adminUserId, user._id, (err)->
|
||||
if err?
|
||||
|
@ -74,8 +77,8 @@ module.exports = SubscriptionGroupHandler =
|
|||
EmailHandler.sendEmail "completeJoinGroupAccount", opts, callback
|
||||
|
||||
processGroupVerification: (userEmail, subscription_id, token, callback)->
|
||||
logger.log userEmail:userEmail, subscription_id:subscription_id, "processing group verification for user"
|
||||
OneTimeTokenHandler.getValueFromTokenAndExpire token, (err, token_subscription_id)->
|
||||
|
||||
if err? or subscription_id != token_subscription_id
|
||||
logger.err userEmail:userEmail, token:token, "token value not found for processing group verification"
|
||||
return callback("token_not_found")
|
||||
|
@ -84,7 +87,7 @@ module.exports = SubscriptionGroupHandler =
|
|||
logger.err err:err, subscription:subscription, userEmail:userEmail, subscription_id:subscription_id, "error getting subscription"
|
||||
return callback(err)
|
||||
if !subscription?
|
||||
logger.warn subscription_id:subscription_id, "no subscription found"
|
||||
logger.warn subscription_id:subscription_id, userEmail:userEmail, "no subscription found"
|
||||
return callback()
|
||||
SubscriptionGroupHandler.addUserToGroup subscription?.admin_id, userEmail, callback
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
Settings = require "settings-sharelatex"
|
||||
logger = require("logger-sharelatex")
|
||||
User = require('../../models/User').User
|
||||
PlansLocator = require("./PlansLocator")
|
||||
|
|
|
@ -1,21 +1,23 @@
|
|||
child = require "child_process"
|
||||
logger = require "logger-sharelatex"
|
||||
metrics = require "../../infrastructure/Metrics"
|
||||
fs = require "fs"
|
||||
Path = require "path"
|
||||
_ = require("underscore")
|
||||
|
||||
ONE_MEG = 1024 * 1024
|
||||
|
||||
module.exports = ArchiveManager =
|
||||
extractZipArchive: (source, destination, _callback = (err) ->) ->
|
||||
callback = (args...) ->
|
||||
_callback(args...)
|
||||
_callback = () ->
|
||||
|
||||
timer = new metrics.Timer("unzipDirectory")
|
||||
logger.log source: source, destination: destination, "unzipping file"
|
||||
|
||||
unzip = child.spawn("unzip", [source, "-d", destination])
|
||||
_isZipTooLarge: (source, callback = (err, isTooLarge)->)->
|
||||
callback = _.once callback
|
||||
|
||||
# don't remove this line, some zips need
|
||||
# us to listen on this for some unknow reason
|
||||
unzip = child.spawn("unzip", ["-l", source])
|
||||
|
||||
output = ""
|
||||
unzip.stdout.on "data", (d)->
|
||||
output += d
|
||||
|
||||
error = null
|
||||
unzip.stderr.on "data", (chunk) ->
|
||||
|
@ -29,9 +31,80 @@ module.exports = ArchiveManager =
|
|||
callback(err)
|
||||
|
||||
unzip.on "exit", () ->
|
||||
timer.done()
|
||||
if error?
|
||||
error = new Error(error)
|
||||
logger.error err:error, source: source, destination: destination, "error unzipping file"
|
||||
callback(error)
|
||||
logger.error err:error, source: source, destination: destination, "error checking zip size"
|
||||
|
||||
lines = output.split("\n")
|
||||
lastLine = lines[lines.length - 2]?.trim()
|
||||
totalSizeInBytes = lastLine?.split(" ")?[0]
|
||||
|
||||
totalSizeInBytes = parseInt(totalSizeInBytes)
|
||||
|
||||
if !totalSizeInBytes? or isNaN(totalSizeInBytes)
|
||||
logger.err source:source, "error getting bytes of zip"
|
||||
return callback(new Error("something went wrong"))
|
||||
|
||||
isTooLarge = totalSizeInBytes > (ONE_MEG * 300)
|
||||
|
||||
callback(error, isTooLarge)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
extractZipArchive: (source, destination, _callback = (err) ->) ->
|
||||
callback = (args...) ->
|
||||
_callback(args...)
|
||||
_callback = () ->
|
||||
|
||||
ArchiveManager._isZipTooLarge source, (err, isTooLarge)->
|
||||
if err?
|
||||
logger.err err:err, "error checking size of zip file"
|
||||
return callback(err)
|
||||
|
||||
if isTooLarge
|
||||
return callback(new Error("zip_too_large"))
|
||||
|
||||
|
||||
timer = new metrics.Timer("unzipDirectory")
|
||||
logger.log source: source, destination: destination, "unzipping file"
|
||||
|
||||
unzip = child.spawn("unzip", [source, "-d", destination])
|
||||
|
||||
# don't remove this line, some zips need
|
||||
# us to listen on this for some unknow reason
|
||||
unzip.stdout.on "data", (d)->
|
||||
|
||||
error = null
|
||||
unzip.stderr.on "data", (chunk) ->
|
||||
error ||= ""
|
||||
error += chunk
|
||||
|
||||
unzip.on "error", (err) ->
|
||||
logger.error {err, source, destination}, "unzip failed"
|
||||
if err.code == "ENOENT"
|
||||
logger.error "unzip command not found. Please check the unzip command is installed"
|
||||
callback(err)
|
||||
|
||||
unzip.on "exit", () ->
|
||||
timer.done()
|
||||
if error?
|
||||
error = new Error(error)
|
||||
logger.error err:error, source: source, destination: destination, "error unzipping file"
|
||||
callback(error)
|
||||
|
||||
findTopLevelDirectory: (directory, callback = (error, topLevelDir) ->) ->
|
||||
fs.readdir directory, (error, files) ->
|
||||
return callback(error) if error?
|
||||
if files.length == 1
|
||||
childPath = Path.join(directory, files[0])
|
||||
fs.stat childPath, (error, stat) ->
|
||||
return callback(error) if error?
|
||||
if stat.isDirectory()
|
||||
return callback(null, childPath)
|
||||
else
|
||||
return callback(null, directory)
|
||||
else
|
||||
return callback(null, directory)
|
||||
|
||||
|
|
|
@ -4,64 +4,108 @@ _ = require "underscore"
|
|||
FileTypeManager = require "./FileTypeManager"
|
||||
EditorController = require "../Editor/EditorController"
|
||||
ProjectLocator = require "../Project/ProjectLocator"
|
||||
logger = require "logger-sharelatex"
|
||||
logger = require("logger-sharelatex")
|
||||
|
||||
module.exports = FileSystemImportManager =
|
||||
addDoc: (project_id, folder_id, name, path, replace, callback = (error, doc)-> )->
|
||||
fs.readFile path, "utf8", (error, content = "") ->
|
||||
return callback(error) if error?
|
||||
content = content.replace(/\r/g, "")
|
||||
lines = content.split("\n")
|
||||
EditorController.addDocWithoutLock project_id, folder_id, name, lines, "upload", callback
|
||||
|
||||
addFile: (project_id, folder_id, name, path, replace, callback = (error, file)-> )->
|
||||
logger.log project_id:project_id, folder_id:folder_id, name:name, path:path, replace:replace, "adding file from filesystem"
|
||||
if replace
|
||||
ProjectLocator.findElement project_id: project_id, element_id: folder_id, type: "folder", (error, folder) ->
|
||||
addDoc: (user_id, project_id, folder_id, name, path, replace, callback = (error, doc)-> )->
|
||||
FileSystemImportManager._isSafeOnFileSystem path, (err, isSafe)->
|
||||
if !isSafe
|
||||
logger.log user_id:user_id, project_id:project_id, folder_id:folder_id, name:name, path:path, "add doc is from symlink, stopping process"
|
||||
return callback("path is symlink")
|
||||
fs.readFile path, "utf8", (error, content = "") ->
|
||||
return callback(error) if error?
|
||||
return callback(new Error("Couldn't find folder")) if !folder?
|
||||
existingFile = null
|
||||
for fileRef in folder.fileRefs
|
||||
if fileRef.name == name
|
||||
existingFile = fileRef
|
||||
break
|
||||
if existingFile?
|
||||
EditorController.replaceFile project_id, existingFile._id, path, "upload", callback
|
||||
else
|
||||
EditorController.addFileWithoutLock project_id, folder_id, name, path, "upload", callback
|
||||
else
|
||||
EditorController.addFileWithoutLock project_id, folder_id, name, path, "upload", callback
|
||||
|
||||
addFolder: (project_id, folder_id, name, path, replace, callback = (error)-> ) ->
|
||||
EditorController.addFolderWithoutLock project_id, folder_id, name, "upload", (error, new_folder) =>
|
||||
return callback(error) if error?
|
||||
@addFolderContents project_id, new_folder._id, path, replace, (error) ->
|
||||
return callback(error) if error?
|
||||
callback null, new_folder
|
||||
|
||||
addFolderContents: (project_id, parent_folder_id, folderPath, replace, callback = (error)-> ) ->
|
||||
fs.readdir folderPath, (error, entries = []) =>
|
||||
return callback(error) if error?
|
||||
jobs = _.map entries, (entry) =>
|
||||
(callback) =>
|
||||
FileTypeManager.shouldIgnore entry, (error, ignore) =>
|
||||
content = content.replace(/\r/g, "")
|
||||
lines = content.split("\n")
|
||||
if replace
|
||||
ProjectLocator.findElement project_id: project_id, element_id: folder_id, type: "folder", (error, folder) ->
|
||||
return callback(error) if error?
|
||||
if !ignore
|
||||
@addEntity project_id, parent_folder_id, entry, "#{folderPath}/#{entry}", replace, callback
|
||||
return callback(new Error("Couldn't find folder")) if !folder?
|
||||
existingDoc = null
|
||||
for doc in folder.docs
|
||||
if doc.name == name
|
||||
existingDoc = doc
|
||||
break
|
||||
if existingDoc?
|
||||
EditorController.setDoc project_id, existingDoc._id, user_id, lines, "upload", callback
|
||||
else
|
||||
callback()
|
||||
async.parallelLimit jobs, 5, callback
|
||||
EditorController.addDocWithoutLock project_id, folder_id, name, lines, "upload", callback
|
||||
else
|
||||
EditorController.addDocWithoutLock project_id, folder_id, name, lines, "upload", callback
|
||||
|
||||
addEntity: (project_id, folder_id, name, path, replace, callback = (error, entity)-> ) ->
|
||||
FileTypeManager.isDirectory path, (error, isDirectory) =>
|
||||
return callback(error) if error?
|
||||
if isDirectory
|
||||
@addFolder project_id, folder_id, name, path, replace, callback
|
||||
addFile: (user_id, project_id, folder_id, name, path, replace, callback = (error, file)-> )->
|
||||
FileSystemImportManager._isSafeOnFileSystem path, (err, isSafe)->
|
||||
if !isSafe
|
||||
logger.log user_id:user_id, project_id:project_id, folder_id:folder_id, name:name, path:path, "add file is from symlink, stopping insert"
|
||||
return callback("path is symlink")
|
||||
|
||||
if !replace
|
||||
EditorController.addFileWithoutLock project_id, folder_id, name, path, "upload", callback
|
||||
else
|
||||
FileTypeManager.isBinary name, path, (error, isBinary) =>
|
||||
ProjectLocator.findElement project_id: project_id, element_id: folder_id, type: "folder", (error, folder) ->
|
||||
return callback(error) if error?
|
||||
if isBinary
|
||||
@addFile project_id, folder_id, name, path, replace, callback
|
||||
return callback(new Error("Couldn't find folder")) if !folder?
|
||||
existingFile = null
|
||||
for fileRef in folder.fileRefs
|
||||
if fileRef.name == name
|
||||
existingFile = fileRef
|
||||
break
|
||||
if existingFile?
|
||||
EditorController.replaceFile project_id, existingFile._id, path, "upload", callback
|
||||
else
|
||||
@addDoc project_id, folder_id, name, path, replace, callback
|
||||
EditorController.addFileWithoutLock project_id, folder_id, name, path, "upload", callback
|
||||
|
||||
addFolder: (user_id, project_id, folder_id, name, path, replace, callback = (error)-> ) ->
|
||||
FileSystemImportManager._isSafeOnFileSystem path, (err, isSafe)->
|
||||
if !isSafe
|
||||
logger.log user_id:user_id, project_id:project_id, folder_id:folder_id, path:path, "add folder is from symlink, stopping insert"
|
||||
return callback("path is symlink")
|
||||
EditorController.addFolderWithoutLock project_id, folder_id, name, "upload", (error, new_folder) =>
|
||||
return callback(error) if error?
|
||||
FileSystemImportManager.addFolderContents user_id, project_id, new_folder._id, path, replace, (error) ->
|
||||
return callback(error) if error?
|
||||
callback null, new_folder
|
||||
|
||||
addFolderContents: (user_id, project_id, parent_folder_id, folderPath, replace, callback = (error)-> ) ->
|
||||
FileSystemImportManager._isSafeOnFileSystem folderPath, (err, isSafe)->
|
||||
if !isSafe
|
||||
logger.log user_id:user_id, project_id:project_id, parent_folder_id:parent_folder_id, folderPath:folderPath, "add folder contents is from symlink, stopping insert"
|
||||
return callback("path is symlink")
|
||||
fs.readdir folderPath, (error, entries = []) =>
|
||||
return callback(error) if error?
|
||||
jobs = _.map entries, (entry) =>
|
||||
(callback) =>
|
||||
FileTypeManager.shouldIgnore entry, (error, ignore) =>
|
||||
return callback(error) if error?
|
||||
if !ignore
|
||||
FileSystemImportManager.addEntity user_id, project_id, parent_folder_id, entry, "#{folderPath}/#{entry}", replace, callback
|
||||
else
|
||||
callback()
|
||||
async.parallelLimit jobs, 5, callback
|
||||
|
||||
addEntity: (user_id, project_id, folder_id, name, path, replace, callback = (error, entity)-> ) ->
|
||||
FileSystemImportManager._isSafeOnFileSystem path, (err, isSafe)->
|
||||
if !isSafe
|
||||
logger.log user_id:user_id, project_id:project_id, folder_id:folder_id, path:path, "add entry is from symlink, stopping insert"
|
||||
return callback("path is symlink")
|
||||
|
||||
FileTypeManager.isDirectory path, (error, isDirectory) =>
|
||||
return callback(error) if error?
|
||||
if isDirectory
|
||||
FileSystemImportManager.addFolder user_id, project_id, folder_id, name, path, replace, callback
|
||||
else
|
||||
FileTypeManager.isBinary name, path, (error, isBinary) =>
|
||||
return callback(error) if error?
|
||||
if isBinary
|
||||
FileSystemImportManager.addFile user_id, project_id, folder_id, name, path, replace, callback
|
||||
else
|
||||
FileSystemImportManager.addDoc user_id, project_id, folder_id, name, path, replace, callback
|
||||
|
||||
|
||||
_isSafeOnFileSystem: (path, callback = (err, isSafe)->)->
|
||||
fs.lstat path, (err, stat)->
|
||||
if err?
|
||||
logger.err err:err, "error with path symlink check"
|
||||
return callback(err)
|
||||
isSafe = stat.isFile() or stat.isDirectory()
|
||||
callback(err, isSafe)
|
||||
|
||||
|
|
|
@ -35,7 +35,8 @@ module.exports = ProjectUploadController =
|
|||
logger.err project_id:project_id, name:name, "bad name when trying to upload file"
|
||||
return res.send success: false
|
||||
logger.log folder_id:folder_id, project_id:project_id, "getting upload file request"
|
||||
FileSystemImportManager.addEntity project_id, folder_id, name, path, true, (error, entity) ->
|
||||
user_id = req.session.user._id
|
||||
FileSystemImportManager.addEntity user_id, project_id, folder_id, name, path, true, (error, entity) ->
|
||||
fs.unlink path, ->
|
||||
timer.done()
|
||||
if error?
|
||||
|
|
|
@ -9,19 +9,21 @@ module.exports = ProjectUploadHandler =
|
|||
createProjectFromZipArchive: (owner_id, name, zipPath, callback = (error, project) ->) ->
|
||||
ProjectCreationHandler.createBlankProject owner_id, name, (error, project) =>
|
||||
return callback(error) if error?
|
||||
@insertZipArchiveIntoFolder project._id, project.rootFolder[0]._id, zipPath, (error) ->
|
||||
@insertZipArchiveIntoFolder owner_id, project._id, project.rootFolder[0]._id, zipPath, (error) ->
|
||||
return callback(error) if error?
|
||||
ProjectRootDocManager.setRootDocAutomatically project._id, (error) ->
|
||||
return callback(error) if error?
|
||||
callback(error, project)
|
||||
|
||||
insertZipArchiveIntoFolder: (project_id, folder_id, path, callback = (error) ->) ->
|
||||
insertZipArchiveIntoFolder: (owner_id, project_id, folder_id, path, callback = (error) ->) ->
|
||||
destination = @_getDestinationDirectory path
|
||||
ArchiveManager.extractZipArchive path, destination, (error) ->
|
||||
return callback(error) if error?
|
||||
FileSystemImportManager.addFolderContents project_id, folder_id, destination, false, (error) ->
|
||||
ArchiveManager.findTopLevelDirectory destination, (error, topLevelDestination) ->
|
||||
return callback(error) if error?
|
||||
rimraf(destination, callback)
|
||||
FileSystemImportManager.addFolderContents owner_id, project_id, folder_id, topLevelDestination, false, (error) ->
|
||||
return callback(error) if error?
|
||||
rimraf(destination, callback)
|
||||
|
||||
_getDestinationDirectory: (source) ->
|
||||
return path.join(path.dirname(source), "#{path.basename(source, ".zip")}-#{Date.now()}")
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
UserHandler = require("./UserHandler")
|
||||
UserDeleter = require("./UserDeleter")
|
||||
UserLocator = require("./UserLocator")
|
||||
User = require("../../models/User").User
|
||||
|
@ -10,7 +11,7 @@ AuthenticationManager = require("../Authentication/AuthenticationManager")
|
|||
UserUpdater = require("./UserUpdater")
|
||||
settings = require "settings-sharelatex"
|
||||
|
||||
module.exports =
|
||||
module.exports = UserController =
|
||||
|
||||
deleteUser: (req, res)->
|
||||
user_id = req.session.user._id
|
||||
|
@ -67,7 +68,14 @@ module.exports =
|
|||
else
|
||||
message = req.i18n.translate("problem_changing_email_address")
|
||||
return res.send 500, {message:message}
|
||||
res.sendStatus(200)
|
||||
User.findById user_id, (err, user)->
|
||||
if err?
|
||||
logger.err err:err, user_id:user_id, "error getting user for email update"
|
||||
return res.send 500
|
||||
UserHandler.populateGroupLicenceInvite user, (err)-> #need to refresh this in the background
|
||||
if err?
|
||||
logger.err err:err, "error populateGroupLicenceInvite"
|
||||
res.sendStatus(200)
|
||||
|
||||
logout : (req, res)->
|
||||
metrics.inc "user.logout"
|
||||
|
|
|
@ -6,7 +6,8 @@ logger = require("logger-sharelatex")
|
|||
|
||||
module.exports = UserHandler =
|
||||
|
||||
_populateGroupLicenceInvite: (user, callback)->
|
||||
populateGroupLicenceInvite: (user, callback)->
|
||||
logger.log user_id:user._id, "populating any potential group licence invites"
|
||||
licence = SubscriptionDomainHandler.getLicenceUserCanJoin user
|
||||
if !licence?
|
||||
return callback()
|
||||
|
@ -21,5 +22,5 @@ module.exports = UserHandler =
|
|||
NotificationsBuilder.groupPlan(user, licence).create(callback)
|
||||
|
||||
setupLoginData: (user, callback = ->)->
|
||||
@_populateGroupLicenceInvite user, callback
|
||||
@populateGroupLicenceInvite user, callback
|
||||
|
||||
|
|
|
@ -53,7 +53,13 @@ block content
|
|||
|
||||
include ./editor/share
|
||||
|
||||
#ide-body(ng-cloak, layout="main", ng-hide="state.loading", resize-on="layout:chat:resize")
|
||||
#ide-body(
|
||||
ng-cloak,
|
||||
layout="main",
|
||||
ng-hide="state.loading",
|
||||
resize-on="layout:chat:resize",
|
||||
minimum-restore-size-west="130"
|
||||
)
|
||||
.ui-layout-west
|
||||
include ./editor/file-tree
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@ div.full-size(
|
|||
resize-on="layout:main:resize"
|
||||
resize-proportionally="true"
|
||||
initial-size-east="'50%'"
|
||||
minimum-restore-size-east="300"
|
||||
)
|
||||
.ui-layout-center
|
||||
.loading-panel(ng-show="!editor.sharejs_doc || editor.opening")
|
||||
|
@ -19,6 +20,7 @@ div.full-size(
|
|||
keybindings="settings.mode",
|
||||
font-size="settings.fontSize",
|
||||
auto-complete="settings.autoComplete",
|
||||
spell-check="true",
|
||||
spell-check-language="project.spellCheckLanguage",
|
||||
highlights="onlineUserCursorHighlights[editor.open_doc_id]"
|
||||
show-print-margin="false",
|
||||
|
|
|
@ -46,26 +46,24 @@ aside#file-tree(ng-controller="FileTreeController", ng-class="{ 'multi-selected'
|
|||
ng-controller="FileTreeRootFolderController",
|
||||
ng-class="{ 'no-toolbar': !permissions.write }"
|
||||
)
|
||||
|
||||
div(ng-show="ui.pdfLayout == 'flat' && (ui.view == 'editor' || ui.view == 'pdf' || ui.view == 'file')")
|
||||
ul.list-unstyled.file-tree-list
|
||||
li(
|
||||
ng-class="{ 'selected': ui.view == 'pdf' }"
|
||||
ng-controller="PdfViewToggleController"
|
||||
)
|
||||
.entity
|
||||
.entity-name(
|
||||
ng-click="togglePdfView()"
|
||||
)
|
||||
i.fa.fa-fw.toggle
|
||||
i.fa.fa-fw.fa-file-pdf-o
|
||||
| PDF
|
||||
|
||||
ul.list-unstyled.file-tree-list(
|
||||
droppable="permissions.write"
|
||||
accept=".entity-name"
|
||||
on-drop-callback="onDrop"
|
||||
)
|
||||
li(
|
||||
ng-show="ui.pdfLayout == 'flat' && (ui.view == 'editor' || ui.view == 'pdf' || ui.view == 'file')"
|
||||
ng-class="{ 'selected': ui.view == 'pdf' }"
|
||||
ng-controller="PdfViewToggleController"
|
||||
)
|
||||
.entity
|
||||
.entity-name(
|
||||
ng-click="togglePdfView()"
|
||||
)
|
||||
i.fa.fa-fw.toggle
|
||||
i.fa.fa-fw.fa-file-pdf-o
|
||||
| PDF
|
||||
|
||||
file-entity(
|
||||
entity="entity",
|
||||
permissions="permissions",
|
||||
|
@ -364,6 +362,17 @@ script(type="text/ng-template", id="uploadFileModalTemplate")
|
|||
.alert.alert-warning.small.modal-alert(ng-if="tooManyFiles") #{translate("maximum_files_uploaded_together", {max:"{{max_files}}"})}
|
||||
.alert.alert-warning.small.modal-alert(ng-if="rateLimitHit") #{translate("too_many_files_uploaded_throttled_short_period")}
|
||||
.alert.alert-warning.small.modal-alert(ng-if="notLoggedIn") #{translate("session_expired_redirecting_to_login", {seconds:"{{secondsToRedirect}}"})}
|
||||
.alert.alert-warning.small.modal-alert(ng-if="conflicts.length > 0")
|
||||
p.text-center
|
||||
| The following files already exist in this project:
|
||||
ul.text-center.list-unstyled.row-spaced-small
|
||||
li(ng-repeat="conflict in conflicts"): strong {{ conflict }}
|
||||
p.text-center.row-spaced-small
|
||||
| Do you want to overwrite them?
|
||||
p.text-center
|
||||
a(href, ng-click="doUpload()").btn.btn-primary Overwrite
|
||||
|
|
||||
a(href, ng-click="cancel()").btn.btn-default Cancel
|
||||
|
||||
.modal-body(
|
||||
fine-upload
|
||||
|
@ -374,10 +383,14 @@ script(type="text/ng-template", id="uploadFileModalTemplate")
|
|||
drag-area-text="{{drag_files}}"
|
||||
hint-text="{{hint_press_and_hold_control_key}}"
|
||||
multiple="true"
|
||||
auto-upload="false"
|
||||
on-complete-callback="onComplete"
|
||||
on-upload-callback="onUpload"
|
||||
on-validate-batch="onValidateBatch"
|
||||
on-error-callback="onError"
|
||||
on-submit-callback="onSubmit"
|
||||
on-cancel-callback="onCancel"
|
||||
control="control"
|
||||
params="{'folder_id': parent_folder_id}"
|
||||
)
|
||||
span #{translate("upload_files")}
|
||||
|
|
|
@ -125,7 +125,7 @@ script(type='text/ng-template', id='shareProjectModalTemplate')
|
|||
span.text-danger.error(ng-show="state.error") #{translate("generic_something_went_wrong")}
|
||||
button.btn.btn-primary(
|
||||
ng-click="done()"
|
||||
) #{translate("done")}
|
||||
) #{translate("close")}
|
||||
|
||||
script(type="text/ng-template", id="makePublicModalTemplate")
|
||||
.modal-header
|
||||
|
|
|
@ -16,7 +16,11 @@ define [
|
|||
onUploadCallback: "="
|
||||
onValidateBatch: "="
|
||||
onErrorCallback: "="
|
||||
onSubmitCallback: "="
|
||||
onCancelCallback: "="
|
||||
autoUpload: "="
|
||||
params: "="
|
||||
control: "="
|
||||
}
|
||||
link: (scope, element, attrs) ->
|
||||
multiple = scope.multiple or false
|
||||
|
@ -37,12 +41,19 @@ define [
|
|||
onUpload = scope.onUploadCallback or () ->
|
||||
onError = scope.onErrorCallback or () ->
|
||||
onValidateBatch = scope.onValidateBatch or () ->
|
||||
onSubmit = scope.onSubmitCallback or () ->
|
||||
onCancel = scope.onCancelCallback or () ->
|
||||
if !scope.autoUpload?
|
||||
autoUpload = true
|
||||
else
|
||||
autoUpload = scope.autoUpload
|
||||
params = scope.params or {}
|
||||
params._csrf = window.csrfToken
|
||||
|
||||
q = new qq.FineUploader
|
||||
element: element[0]
|
||||
multiple: multiple
|
||||
autoUpload: autoUpload
|
||||
disabledCancelForFormUploads: true
|
||||
validation: validation
|
||||
maxConnections: maxConnections
|
||||
|
@ -56,6 +67,8 @@ define [
|
|||
onUpload: onUpload
|
||||
onValidateBatch: onValidateBatch
|
||||
onError: onError
|
||||
onSubmit: onSubmit
|
||||
onCancel: onCancel
|
||||
text: text
|
||||
template: """
|
||||
<div class="qq-uploader">
|
||||
|
@ -70,5 +83,7 @@ define [
|
|||
<ul class="qq-upload-list"></ul>
|
||||
</div>
|
||||
"""
|
||||
window.q = q
|
||||
scope.control?.q = q
|
||||
return q
|
||||
}
|
|
@ -37,8 +37,12 @@ define [
|
|||
|
||||
# Restore previously recorded state
|
||||
if (state = ide.localStorage("layout.#{name}"))?
|
||||
options.west = state.west
|
||||
options.east = state.east
|
||||
if state.east?
|
||||
if !attrs.minimumRestoreSizeEast? or (state.east.size >= attrs.minimumRestoreSizeEast and !state.east.initClosed)
|
||||
options.east = state.east
|
||||
if state.west?
|
||||
if !attrs.minimumRestoreSizeWest? or (state.west.size >= attrs.minimumRestoreSizeWest and !state.west.initClosed)
|
||||
options.west = state.west
|
||||
|
||||
repositionControls = () ->
|
||||
state = element.layout().readState()
|
||||
|
|
|
@ -18,7 +18,7 @@ define [
|
|||
url = ace.config._moduleUrl(args...) + "?fingerprint=#{window.aceFingerprint}"
|
||||
return url
|
||||
|
||||
App.directive "aceEditor", ($timeout, $compile, $rootScope, event_tracking, localStorage) ->
|
||||
App.directive "aceEditor", ($timeout, $compile, $rootScope, event_tracking, localStorage, $cacheFactory) ->
|
||||
monkeyPatchSearch($rootScope, $compile)
|
||||
|
||||
return {
|
||||
|
@ -29,6 +29,7 @@ define [
|
|||
fontSize: "="
|
||||
autoComplete: "="
|
||||
sharejsDoc: "="
|
||||
spellCheck: "="
|
||||
spellCheckLanguage: "="
|
||||
highlights: "="
|
||||
text: "="
|
||||
|
@ -55,7 +56,9 @@ define [
|
|||
scope.name = attrs.aceEditor
|
||||
|
||||
autoCompleteManager = new AutoCompleteManager(scope, editor, element)
|
||||
spellCheckManager = new SpellCheckManager(scope, editor, element)
|
||||
if scope.spellCheck # only enable spellcheck when explicitly required
|
||||
spellCheckCache = $cacheFactory("spellCheck-#{scope.name}", {capacity: 1000})
|
||||
spellCheckManager = new SpellCheckManager(scope, editor, element, spellCheckCache)
|
||||
undoManager = new UndoManager(scope, editor, element)
|
||||
highlightsManager = new HighlightsManager(scope, editor, element)
|
||||
cursorPositionManager = new CursorPositionManager(scope, editor, element, localStorage)
|
||||
|
|
|
@ -66,12 +66,13 @@ define [
|
|||
}
|
||||
if references.keys and references.keys.length > 0
|
||||
references.keys.forEach (key) ->
|
||||
result.push({
|
||||
caption: "\\#{commandName}{#{previousArgsCaption}#{key}",
|
||||
value: "\\#{commandName}{#{previousArgs}#{key}",
|
||||
meta: "reference",
|
||||
score: 10000
|
||||
})
|
||||
if !(key in [null, undefined])
|
||||
result.push({
|
||||
caption: "\\#{commandName}{#{previousArgsCaption}#{key}",
|
||||
value: "\\#{commandName}{#{previousArgs}#{key}",
|
||||
meta: "reference",
|
||||
score: 10000
|
||||
})
|
||||
callback null, result
|
||||
else
|
||||
callback null, result
|
||||
|
|
|
@ -5,7 +5,7 @@ define [
|
|||
Range = ace.require("ace/range").Range
|
||||
|
||||
class SpellCheckManager
|
||||
constructor: (@$scope, @editor, @element) ->
|
||||
constructor: (@$scope, @editor, @element, @cache) ->
|
||||
$(document.body).append @element.find(".spell-check-menu")
|
||||
|
||||
@updatedLines = []
|
||||
|
@ -102,6 +102,8 @@ define [
|
|||
learnWord: (highlight) ->
|
||||
@apiRequest "/learn", word: highlight.word
|
||||
@highlightedWordManager.removeWord highlight.word
|
||||
language = @$scope.spellCheckLanguage
|
||||
@cache?.put("#{language}:#{highlight.word}", true)
|
||||
|
||||
getHighlightedWordAtCursor: () ->
|
||||
cursor = @editor.getCursorPosition()
|
||||
|
@ -143,24 +145,67 @@ define [
|
|||
runSpellCheck: (linesToProcess) ->
|
||||
{words, positions} = @getWords(linesToProcess)
|
||||
language = @$scope.spellCheckLanguage
|
||||
@apiRequest "/check", {language: language, words: words}, (error, result) =>
|
||||
if error? or !result? or !result.misspellings?
|
||||
return null
|
||||
|
||||
highlights = []
|
||||
seen = {}
|
||||
newWords = []
|
||||
newPositions = []
|
||||
|
||||
# iterate through all words, building up a list of
|
||||
# newWords/newPositions not in the cache
|
||||
for word, i in words
|
||||
key = "#{language}:#{word}"
|
||||
seen[key] ?= @cache.get(key) # avoid hitting the cache unnecessarily
|
||||
cached = seen[key]
|
||||
if not cached?
|
||||
newWords.push words[i]
|
||||
newPositions.push positions[i]
|
||||
else if cached is true
|
||||
# word is correct
|
||||
else
|
||||
highlights.push
|
||||
column: positions[i].column
|
||||
row: positions[i].row
|
||||
word: word
|
||||
suggestions: cached
|
||||
words = newWords
|
||||
positions = newPositions
|
||||
|
||||
displayResult = (highlights) =>
|
||||
if linesToProcess?
|
||||
for shouldProcess, row in linesToProcess
|
||||
@highlightedWordManager.clearRows(row, row) if shouldProcess
|
||||
else
|
||||
@highlightedWordManager.clearRows()
|
||||
for highlight in highlights
|
||||
@highlightedWordManager.addHighlight highlight
|
||||
|
||||
for misspelling in result.misspellings
|
||||
word = words[misspelling.index]
|
||||
position = positions[misspelling.index]
|
||||
@highlightedWordManager.addHighlight
|
||||
column: position.column
|
||||
row: position.row
|
||||
word: word
|
||||
suggestions: misspelling.suggestions
|
||||
if not words.length
|
||||
displayResult highlights
|
||||
else
|
||||
@apiRequest "/check", {language: language, words: words}, (error, result) =>
|
||||
if error? or !result? or !result.misspellings?
|
||||
return null
|
||||
mispelled = []
|
||||
for misspelling in result.misspellings
|
||||
word = words[misspelling.index]
|
||||
position = positions[misspelling.index]
|
||||
mispelled[misspelling.index] = true
|
||||
highlights.push
|
||||
column: position.column
|
||||
row: position.row
|
||||
word: word
|
||||
suggestions: misspelling.suggestions
|
||||
key = "#{language}:#{word}"
|
||||
if not seen[key]
|
||||
@cache.put key, misspelling.suggestions
|
||||
seen[key] = true
|
||||
for word, i in words when not mispelled[i]
|
||||
key = "#{language}:#{word}"
|
||||
if not seen[key]
|
||||
@cache.put(key, true)
|
||||
seen[key] = true
|
||||
displayResult highlights
|
||||
|
||||
getWords: (linesToProcess) ->
|
||||
lines = @editor.getValue().split("\n")
|
||||
|
|
|
@ -135,6 +135,12 @@ define [
|
|||
multiSelectSelectedEntity: () ->
|
||||
@findSelectedEntity()?.multiSelected = true
|
||||
|
||||
existsInFolder: (folder_id, name) ->
|
||||
folder = @findEntityById(folder_id)
|
||||
return false if !folder?
|
||||
entity = @_findEntityByPathInFolder(folder, name)
|
||||
return entity?
|
||||
|
||||
findSelectedEntity: () ->
|
||||
selected = null
|
||||
@forEachEntity (entity) ->
|
||||
|
|
|
@ -110,7 +110,8 @@ define [
|
|||
$scope.rateLimitHit = false
|
||||
$scope.secondsToRedirect = 10
|
||||
$scope.notLoggedIn = false
|
||||
|
||||
$scope.conflicts = []
|
||||
$scope.control = {}
|
||||
|
||||
needToLogBackIn = ->
|
||||
$scope.notLoggedIn = true
|
||||
|
@ -125,11 +126,6 @@ define [
|
|||
|
||||
decreseTimeout()
|
||||
|
||||
|
||||
uploadCount = 0
|
||||
$scope.onUpload = () ->
|
||||
uploadCount++
|
||||
|
||||
$scope.max_files = 40
|
||||
$scope.onComplete = (error, name, response) ->
|
||||
$timeout (() ->
|
||||
|
@ -154,6 +150,34 @@ define [
|
|||
else if reason.indexOf("403") != -1
|
||||
needToLogBackIn()
|
||||
|
||||
_uploadTimer = null
|
||||
uploadIfNoConflicts = () ->
|
||||
if $scope.conflicts.length == 0
|
||||
$scope.doUpload()
|
||||
|
||||
uploadCount = 0
|
||||
$scope.onSubmit = (id, name) ->
|
||||
uploadCount++
|
||||
if ide.fileTreeManager.existsInFolder($scope.parent_folder_id, name)
|
||||
$scope.conflicts.push name
|
||||
$scope.$apply()
|
||||
if !_uploadTimer?
|
||||
_uploadTimer = setTimeout () ->
|
||||
_uploadTimer = null
|
||||
uploadIfNoConflicts()
|
||||
, 0
|
||||
return true
|
||||
|
||||
$scope.onCancel = (id, name) ->
|
||||
uploadCount--
|
||||
index = $scope.conflicts.indexOf(name)
|
||||
if index > -1
|
||||
$scope.conflicts.splice(index, 1)
|
||||
$scope.$apply()
|
||||
uploadIfNoConflicts()
|
||||
|
||||
$scope.doUpload = () ->
|
||||
$scope.control?.q?.uploadStoredFiles()
|
||||
|
||||
$scope.cancel = () ->
|
||||
$modalInstance.dismiss('cancel')
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
define [
|
||||
"base"
|
||||
"libs/latex-log-parser"
|
||||
], (App, LogParser) ->
|
||||
"libs/bib-log-parser"
|
||||
], (App, LogParser, BibLogParser) ->
|
||||
App.controller "PdfController", ($scope, $http, ide, $modal, synctex, event_tracking, localStorage) ->
|
||||
autoCompile = true
|
||||
$scope.$on "project:joined", () ->
|
||||
|
@ -12,7 +13,7 @@ define [
|
|||
|
||||
$scope.$on "pdf:error:display", () ->
|
||||
$scope.pdf.error = true
|
||||
|
||||
|
||||
$scope.draft = localStorage("draft:#{$scope.project_id}") or false
|
||||
$scope.$watch "draft", (new_value, old_value) ->
|
||||
if new_value? and old_value != new_value
|
||||
|
@ -82,25 +83,42 @@ define [
|
|||
qs = if outputFile?.build? then "?build=#{outputFile.build}" else ""
|
||||
$http.get "/project/#{$scope.project_id}/output/output.log" + qs
|
||||
.success (log) ->
|
||||
#console.log ">>", log
|
||||
$scope.pdf.rawLog = log
|
||||
logEntries = LogParser.parse(log, ignoreDuplicates: true)
|
||||
#console.log ">>", logEntries
|
||||
$scope.pdf.logEntries = logEntries
|
||||
$scope.pdf.logEntries.all = logEntries.errors.concat(logEntries.warnings).concat(logEntries.typesetting)
|
||||
|
||||
$scope.pdf.logEntryAnnotations = {}
|
||||
for entry in logEntries.all
|
||||
if entry.file?
|
||||
entry.file = normalizeFilePath(entry.file)
|
||||
|
||||
entity = ide.fileTreeManager.findEntityByPath(entry.file)
|
||||
if entity?
|
||||
$scope.pdf.logEntryAnnotations[entity.id] ||= []
|
||||
$scope.pdf.logEntryAnnotations[entity.id].push {
|
||||
row: entry.line - 1
|
||||
type: if entry.level == "error" then "error" else "warning"
|
||||
text: entry.message
|
||||
}
|
||||
|
||||
# # # #
|
||||
proceed = () ->
|
||||
$scope.pdf.logEntryAnnotations = {}
|
||||
for entry in logEntries.all
|
||||
if entry.file?
|
||||
entry.file = normalizeFilePath(entry.file)
|
||||
entity = ide.fileTreeManager.findEntityByPath(entry.file)
|
||||
if entity?
|
||||
$scope.pdf.logEntryAnnotations[entity.id] ||= []
|
||||
$scope.pdf.logEntryAnnotations[entity.id].push {
|
||||
row: entry.line - 1
|
||||
type: if entry.level == "error" then "error" else "warning"
|
||||
text: entry.message
|
||||
}
|
||||
# Get the biber log and parse it too
|
||||
$http.get "/project/#{$scope.project_id}/output/output.blg" + qs
|
||||
.success (log) ->
|
||||
window._s = $scope
|
||||
biberLogEntries = BibLogParser.parse(log, {})
|
||||
if $scope.pdf.logEntries
|
||||
entries = $scope.pdf.logEntries
|
||||
all = biberLogEntries.errors.concat(biberLogEntries.warnings)
|
||||
entries.all = entries.all.concat(all)
|
||||
entries.errors = entries.errors.concat(biberLogEntries.errors)
|
||||
entries.warnings = entries.warnings.concat(biberLogEntries.warnings)
|
||||
proceed()
|
||||
.error (e) ->
|
||||
console.error ">> error", e
|
||||
proceed()
|
||||
# # # #
|
||||
.error () ->
|
||||
$scope.pdf.logEntries = []
|
||||
$scope.pdf.rawLog = ""
|
||||
|
@ -127,7 +145,7 @@ define [
|
|||
$scope.recompile = (options = {}) ->
|
||||
return if $scope.pdf.compiling
|
||||
$scope.pdf.compiling = true
|
||||
|
||||
|
||||
ide.$scope.$broadcast("flush-changes")
|
||||
|
||||
options.rootDocOverride_id = getRootDocOverride_id()
|
||||
|
@ -140,7 +158,7 @@ define [
|
|||
.error () ->
|
||||
$scope.pdf.compiling = false
|
||||
$scope.pdf.error = true
|
||||
|
||||
|
||||
# This needs to be public.
|
||||
ide.$scope.recompile = $scope.recompile
|
||||
|
||||
|
@ -177,17 +195,17 @@ define [
|
|||
.then (data) ->
|
||||
{doc, line} = data
|
||||
ide.editorManager.openDoc(doc, gotoLine: line)
|
||||
|
||||
|
||||
$scope.switchToFlatLayout = () ->
|
||||
$scope.ui.pdfLayout = 'flat'
|
||||
$scope.ui.view = 'pdf'
|
||||
ide.localStorage "pdf.layout", "flat"
|
||||
|
||||
|
||||
$scope.switchToSideBySideLayout = () ->
|
||||
$scope.ui.pdfLayout = 'sideBySide'
|
||||
$scope.ui.view = 'editor'
|
||||
localStorage "pdf.layout", "split"
|
||||
|
||||
|
||||
if pdfLayout = localStorage("pdf.layout")
|
||||
$scope.switchToSideBySideLayout() if pdfLayout == "split"
|
||||
$scope.switchToFlatLayout() if pdfLayout == "flat"
|
||||
|
@ -216,7 +234,7 @@ define [
|
|||
if !path?
|
||||
deferred.reject()
|
||||
return deferred.promise
|
||||
|
||||
|
||||
# If the root file is folder/main.tex, then synctex sees the
|
||||
# path as folder/./main.tex
|
||||
rootDocDirname = ide.fileTreeManager.getRootDocDirname()
|
||||
|
@ -226,7 +244,7 @@ define [
|
|||
{row, column} = cursorPosition
|
||||
|
||||
$http({
|
||||
url: "/project/#{ide.project_id}/sync/code",
|
||||
url: "/project/#{ide.project_id}/sync/code",
|
||||
method: "GET",
|
||||
params: {
|
||||
file: path
|
||||
|
@ -253,7 +271,7 @@ define [
|
|||
position.offset.top = position.offset.top + 80
|
||||
|
||||
$http({
|
||||
url: "/project/#{ide.project_id}/sync/pdf",
|
||||
url: "/project/#{ide.project_id}/sync/pdf",
|
||||
method: "GET",
|
||||
params: {
|
||||
page: position.page + 1
|
||||
|
@ -316,4 +334,4 @@ define [
|
|||
|
||||
$scope.cancel = () ->
|
||||
$modalInstance.dismiss('cancel')
|
||||
]
|
||||
]
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
define [
|
||||
"base"
|
||||
], (App) ->
|
||||
MAX_PROJECT_NAME_LENGTH = 150
|
||||
App.controller "ProjectNameController", ["$scope", "settings", "ide", ($scope, settings, ide) ->
|
||||
$scope.state =
|
||||
renaming: false
|
||||
|
@ -12,11 +13,14 @@ define [
|
|||
$scope.$emit "project:rename:start"
|
||||
|
||||
$scope.finishRenaming = () ->
|
||||
newName = $scope.inputs.name
|
||||
if newName.length < 150
|
||||
$scope.project.name = newName
|
||||
settings.saveProjectSettings({name: $scope.project.name})
|
||||
$scope.state.renaming = false
|
||||
newName = $scope.inputs.name
|
||||
if !newName? or newName.length == 0 or newName.length > MAX_PROJECT_NAME_LENGTH
|
||||
return
|
||||
if $scope.project.name == newName
|
||||
return
|
||||
$scope.project.name = newName
|
||||
settings.saveProjectSettings({name: $scope.project.name})
|
||||
|
||||
ide.socket.on "projectNameUpdated", (name) ->
|
||||
$scope.$apply () ->
|
||||
|
|
|
@ -257,9 +257,11 @@ define [
|
|||
modalInstance.result.then (project_id) ->
|
||||
window.location = "/project/#{project_id}"
|
||||
|
||||
MAX_PROJECT_NAME_LENGTH = 150
|
||||
$scope.renameProject = (project, newName) ->
|
||||
if newName.length < 150
|
||||
project.name = newName
|
||||
if !newName? or newName.length == 0 or newName.length > MAX_PROJECT_NAME_LENGTH
|
||||
return
|
||||
project.name = newName
|
||||
queuedHttp.post "/project/#{project.id}/rename", {
|
||||
newProjectName: project.name
|
||||
_csrf: window.csrfToken
|
||||
|
|
|
@ -12,12 +12,13 @@ define [
|
|||
console.log "email not set"
|
||||
return
|
||||
$scope.sending = true
|
||||
ticketNumber = Math.floor((1 + Math.random()) * 0x10000).toString(32)
|
||||
params =
|
||||
name: $scope.form.name || $scope.form.email
|
||||
email: $scope.form.email
|
||||
labels: $scope.form.source
|
||||
message: "Please contact me with more details"
|
||||
subject: $scope.form.subject
|
||||
subject: $scope.form.subject + " - [#{ticketNumber}]"
|
||||
about : "#{$scope.form.position || ''} #{$scope.form.university || ''}"
|
||||
|
||||
Groove.createTicket params, (err, json)->
|
||||
|
|
190
services/web/public/js/libs/bib-log-parser.js
Normal file
190
services/web/public/js/libs/bib-log-parser.js
Normal file
|
@ -0,0 +1,190 @@
|
|||
// Generated by CoffeeScript 1.10.0
|
||||
define(function() {
|
||||
var BAD_CROSS_REFERENCE_REGEX, BibLogParser, LINE_SPLITTER_REGEX, MESSAGE_LEVELS, MULTILINE_COMMAND_ERROR_REGEX, MULTILINE_ERROR_REGEX, MULTILINE_WARNING_REGEX, SINGLELINE_WARNING_REGEX, consume, errorParsers, warningParsers;
|
||||
LINE_SPLITTER_REGEX = /^\[(\d+)].*>\s(INFO|WARN|ERROR)\s-\s(.*)$/;
|
||||
MESSAGE_LEVELS = {
|
||||
"INFO": "info",
|
||||
"WARN": "warning",
|
||||
"ERROR": "error"
|
||||
};
|
||||
BibLogParser = function(text, options) {
|
||||
if (typeof text !== 'string') {
|
||||
throw new Error("BibLogParser Error: text parameter must be a string");
|
||||
}
|
||||
this.text = text.replace(/(\r\n)|\r/g, '\n');
|
||||
this.options = options || {};
|
||||
this.lines = text.split('\n');
|
||||
};
|
||||
consume = function(logText, regex, process) {
|
||||
var iterationCount, match, newEntry, re, result, text;
|
||||
text = logText;
|
||||
result = [];
|
||||
re = regex;
|
||||
iterationCount = 0;
|
||||
while (match = re.exec(text)) {
|
||||
iterationCount += 1;
|
||||
if (iterationCount >= 10000) {
|
||||
return result;
|
||||
}
|
||||
newEntry = process(match);
|
||||
result.push(newEntry);
|
||||
text = (match.input.slice(0, match.index)) + (match.input.slice(match.index + match[0].length + 1, match.input.length));
|
||||
}
|
||||
return [result, text];
|
||||
};
|
||||
MULTILINE_WARNING_REGEX = /^Warning--(.+)\n--line (\d+) of file (.+)$/m;
|
||||
SINGLELINE_WARNING_REGEX = /^Warning--(.+)$/m;
|
||||
MULTILINE_ERROR_REGEX = /^(.*)---line (\d+) of file (.*)\n([^]+?)\nI'm skipping whatever remains of this entry$/m;
|
||||
BAD_CROSS_REFERENCE_REGEX = /^(A bad cross reference---entry ".+?"\nrefers to entry.+?, which doesn't exist)$/m;
|
||||
MULTILINE_COMMAND_ERROR_REGEX = /^(.*)\n---line (\d+) of file (.*)\n([^]+?)\nI'm skipping whatever remains of this command$/m;
|
||||
warningParsers = [
|
||||
[
|
||||
MULTILINE_WARNING_REGEX, function(match) {
|
||||
var fileName, fullMatch, lineNumber, message;
|
||||
fullMatch = match[0], message = match[1], lineNumber = match[2], fileName = match[3];
|
||||
return {
|
||||
file: fileName,
|
||||
level: "warning",
|
||||
message: message,
|
||||
line: lineNumber,
|
||||
raw: fullMatch
|
||||
};
|
||||
}
|
||||
], [
|
||||
SINGLELINE_WARNING_REGEX, function(match) {
|
||||
var fullMatch, message;
|
||||
fullMatch = match[0], message = match[1];
|
||||
return {
|
||||
file: '',
|
||||
level: "warning",
|
||||
message: message,
|
||||
line: '',
|
||||
raw: fullMatch
|
||||
};
|
||||
}
|
||||
]
|
||||
];
|
||||
errorParsers = [
|
||||
[
|
||||
MULTILINE_ERROR_REGEX, function(match) {
|
||||
var fileName, firstMessage, fullMatch, lineNumber, secondMessage;
|
||||
fullMatch = match[0], firstMessage = match[1], lineNumber = match[2], fileName = match[3], secondMessage = match[4];
|
||||
return {
|
||||
file: fileName,
|
||||
level: "error",
|
||||
message: firstMessage + '\n' + secondMessage,
|
||||
line: lineNumber,
|
||||
raw: fullMatch
|
||||
};
|
||||
}
|
||||
], [
|
||||
BAD_CROSS_REFERENCE_REGEX, function(match) {
|
||||
var fullMatch, message;
|
||||
fullMatch = match[0], message = match[1];
|
||||
return {
|
||||
file: '',
|
||||
level: "error",
|
||||
message: message,
|
||||
line: '',
|
||||
raw: fullMatch
|
||||
};
|
||||
}
|
||||
], [
|
||||
MULTILINE_COMMAND_ERROR_REGEX, function(match) {
|
||||
var fileName, firstMessage, fullMatch, lineNumber, secondMessage;
|
||||
fullMatch = match[0], firstMessage = match[1], lineNumber = match[2], fileName = match[3], secondMessage = match[4];
|
||||
return {
|
||||
file: fileName,
|
||||
level: "error",
|
||||
message: firstMessage + '\n' + secondMessage,
|
||||
line: lineNumber,
|
||||
raw: fullMatch
|
||||
};
|
||||
}
|
||||
]
|
||||
];
|
||||
(function() {
|
||||
this.parseBibtex = function() {
|
||||
var allErrors, allWarnings, ref, ref1, remainingText, result;
|
||||
result = {
|
||||
all: [],
|
||||
errors: [],
|
||||
warnings: [],
|
||||
files: [],
|
||||
typesetting: []
|
||||
};
|
||||
ref = warningParsers.reduce(function(accumulator, parser) {
|
||||
var _remainingText, currentWarnings, process, ref, regex, text, warnings;
|
||||
currentWarnings = accumulator[0], text = accumulator[1];
|
||||
regex = parser[0], process = parser[1];
|
||||
ref = consume(text, regex, process), warnings = ref[0], _remainingText = ref[1];
|
||||
return [currentWarnings.concat(warnings), _remainingText];
|
||||
}, [[], this.text]), allWarnings = ref[0], remainingText = ref[1];
|
||||
ref1 = errorParsers.reduce(function(accumulator, parser) {
|
||||
var _remainingText, currentErrors, errors, process, ref1, regex, text;
|
||||
currentErrors = accumulator[0], text = accumulator[1];
|
||||
regex = parser[0], process = parser[1];
|
||||
ref1 = consume(text, regex, process), errors = ref1[0], _remainingText = ref1[1];
|
||||
return [currentErrors.concat(errors), _remainingText];
|
||||
}, [[], remainingText]), allErrors = ref1[0], remainingText = ref1[1];
|
||||
result.warnings = allWarnings;
|
||||
result.errors = allErrors;
|
||||
result.all = allWarnings.concat(allErrors);
|
||||
return result;
|
||||
};
|
||||
this.parseBiber = function() {
|
||||
var result;
|
||||
result = {
|
||||
all: [],
|
||||
errors: [],
|
||||
warnings: [],
|
||||
files: [],
|
||||
typesetting: []
|
||||
};
|
||||
this.lines.forEach(function(line) {
|
||||
var _, fileName, fullLine, lineMatch, lineNumber, match, message, messageType, newEntry, realMessage;
|
||||
match = line.match(LINE_SPLITTER_REGEX);
|
||||
if (match) {
|
||||
fullLine = match[0], lineNumber = match[1], messageType = match[2], message = match[3];
|
||||
newEntry = {
|
||||
file: '',
|
||||
level: MESSAGE_LEVELS[messageType] || "INFO",
|
||||
message: message,
|
||||
line: '',
|
||||
raw: fullLine
|
||||
};
|
||||
lineMatch = newEntry.message.match(/^BibTeX subsystem: \/.+\/(\w+\.\w+)_.+, line (\d+), (.+)$/);
|
||||
if (lineMatch && lineMatch.length === 4) {
|
||||
_ = lineMatch[0], fileName = lineMatch[1], lineNumber = lineMatch[2], realMessage = lineMatch[3];
|
||||
newEntry.file = fileName;
|
||||
newEntry.line = lineNumber;
|
||||
newEntry.message = realMessage;
|
||||
}
|
||||
result.all.push(newEntry);
|
||||
switch (newEntry.level) {
|
||||
case 'error':
|
||||
return result.errors.push(newEntry);
|
||||
case 'warning':
|
||||
return result.warnings.push(newEntry);
|
||||
}
|
||||
}
|
||||
});
|
||||
return result;
|
||||
};
|
||||
return this.parse = function() {
|
||||
var firstLine;
|
||||
firstLine = this.lines[0];
|
||||
if (firstLine.match(/^.*INFO - This is Biber.*$/)) {
|
||||
return this.parseBiber();
|
||||
} else if (firstLine.match(/^This is BibTeX, Version.+$/)) {
|
||||
return this.parseBibtex();
|
||||
} else {
|
||||
throw new Error("BibLogParser Error: cannot determine whether text is biber or bibtex output");
|
||||
}
|
||||
};
|
||||
}).call(BibLogParser.prototype);
|
||||
BibLogParser.parse = function(text, options) {
|
||||
return new BibLogParser(text, options).parse();
|
||||
};
|
||||
return BibLogParser;
|
||||
});
|
|
@ -109,6 +109,15 @@
|
|||
}
|
||||
a.rename {
|
||||
visibility: hidden;
|
||||
display: inline-block;
|
||||
color: @gray-light;
|
||||
padding: 5px;
|
||||
border-radius: @border-radius-small;
|
||||
&:hover {
|
||||
text-shadow: 0 1px 0 rgba(0, 0, 0, 0.25);
|
||||
color: @gray-dark;
|
||||
text-decoration: none;
|
||||
}
|
||||
}
|
||||
&:hover {
|
||||
a.rename {
|
||||
|
@ -243,6 +252,20 @@
|
|||
margin-bottom:0px;
|
||||
}
|
||||
|
||||
.sl_references_search_hint {
|
||||
position: absolute;
|
||||
bottom: -22px;
|
||||
left: -1px;
|
||||
right: 0px;
|
||||
text-align: center;
|
||||
padding: 2px;
|
||||
background: rgb(202, 214, 250);
|
||||
border: 1px solid lightgray;
|
||||
box-shadow: 3px 3px 5px rgba(0,0,0,.2);
|
||||
span {
|
||||
color: black;
|
||||
}
|
||||
}
|
||||
// -- References Search Modal --
|
||||
.references-search-modal-backdrop {
|
||||
// don't grey out the editor when the
|
||||
|
@ -291,7 +314,6 @@
|
|||
}
|
||||
// search result items list
|
||||
.search-results {
|
||||
margin-top: 14px;
|
||||
font-size: 12px;
|
||||
.no-results-message {
|
||||
font-size: 16px;
|
||||
|
@ -312,6 +334,9 @@
|
|||
.hit-year.small {
|
||||
color: white;
|
||||
}
|
||||
.hit-journal.small {
|
||||
color: white;
|
||||
}
|
||||
}
|
||||
.hit-title {
|
||||
font-size: 1.3em;
|
||||
|
|
|
@ -95,15 +95,16 @@
|
|||
&.toolbar-small {
|
||||
height: 32px;
|
||||
> a, .toolbar-right > a {
|
||||
padding: 4px 2px 2px;
|
||||
padding: 2px 4px 1px 4px;
|
||||
margin: 0;
|
||||
margin-top: 2px;
|
||||
}
|
||||
> a {
|
||||
margin-left: 6px;
|
||||
margin-left: 2px;
|
||||
}
|
||||
.toolbar-right > a {
|
||||
margin-left: 0;
|
||||
margin-right: 6px;
|
||||
margin-right: 2px;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ path = require 'path'
|
|||
_ = require 'underscore'
|
||||
modulePath = path.join __dirname, '../../../../app/js/Features/DocumentUpdater/DocumentUpdaterHandler'
|
||||
|
||||
describe 'Flushing documents :', ->
|
||||
describe 'DocumentUpdaterHandler - Flushing documents :', ->
|
||||
|
||||
beforeEach ->
|
||||
@project_id = "project-id-923"
|
||||
|
@ -33,6 +33,9 @@ describe 'Flushing documents :', ->
|
|||
"../../models/Project": Project: @Project={}
|
||||
'../../Features/Project/ProjectLocator':{}
|
||||
'redis-sharelatex' : createClient: () => @rclient
|
||||
"../../infrastructure/Metrics":
|
||||
Timer:->
|
||||
done:->
|
||||
|
||||
describe 'queueChange', ->
|
||||
beforeEach ->
|
||||
|
|
|
@ -10,6 +10,7 @@ describe "EditorRealTimeController", ->
|
|||
createClient: () ->
|
||||
auth:->
|
||||
"../../infrastructure/Server" : io: @io = {}
|
||||
"settings-sharelatex":{redis:{}}
|
||||
@EditorRealTimeController.rclientPub = publish: sinon.stub()
|
||||
@EditorRealTimeController.rclientSub =
|
||||
subscribe: sinon.stub()
|
||||
|
|
|
@ -9,7 +9,7 @@ _ = require('underscore')
|
|||
_.templateSettings =
|
||||
interpolate: /\{\{(.+?)\}\}/g
|
||||
|
||||
describe "Email Templator ", ->
|
||||
describe "EmailBuilder", ->
|
||||
|
||||
beforeEach ->
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ sinon = require('sinon')
|
|||
modulePath = path.join __dirname, "../../../../app/js/Features/Email/EmailSender.js"
|
||||
expect = require("chai").expect
|
||||
|
||||
describe "Email", ->
|
||||
describe "EmailSender", ->
|
||||
|
||||
beforeEach ->
|
||||
|
||||
|
@ -30,6 +30,9 @@ describe "Email", ->
|
|||
log:->
|
||||
warn:->
|
||||
err:->
|
||||
"../../infrastructure/Metrics": inc:->
|
||||
|
||||
|
||||
|
||||
@opts =
|
||||
to: "bob@bob.com"
|
||||
|
|
|
@ -10,6 +10,10 @@ describe "FileStoreHandler", ->
|
|||
beforeEach ->
|
||||
@fs =
|
||||
createReadStream : sinon.stub()
|
||||
lstat: sinon.stub().callsArgWith(1, null, {
|
||||
isFile:=> @isSafeOnFileSystem
|
||||
isDirectory:-> return false
|
||||
})
|
||||
@writeStream =
|
||||
my:"writeStream"
|
||||
on: (type, cb)->
|
||||
|
@ -31,6 +35,7 @@ describe "FileStoreHandler", ->
|
|||
describe "uploadFileFromDisk", ->
|
||||
beforeEach ->
|
||||
@request.returns(@writeStream)
|
||||
@isSafeOnFileSystem = true
|
||||
|
||||
it "should create read stream", (done)->
|
||||
@fs.createReadStream.returns
|
||||
|
@ -74,6 +79,13 @@ describe "FileStoreHandler", ->
|
|||
@handler._buildUrl.calledWith(@project_id, @file_id).should.equal true
|
||||
done()
|
||||
|
||||
describe "symlink", ->
|
||||
it "should not read file if it is symlink", (done)->
|
||||
@isSafeOnFileSystem = false
|
||||
@handler.uploadFileFromDisk @project_id, @file_id, @fsPath, =>
|
||||
@fs.createReadStream.called.should.equal false
|
||||
done()
|
||||
|
||||
describe "deleteFile", ->
|
||||
|
||||
it "should send a delete request to filestore api", (done)->
|
||||
|
|
|
@ -55,6 +55,10 @@ describe "ProjectController", ->
|
|||
"logger-sharelatex":
|
||||
log:->
|
||||
err:->
|
||||
"../../infrastructure/Metrics":
|
||||
Timer:->
|
||||
done:->
|
||||
inc:->
|
||||
"./ProjectDeleter": @ProjectDeleter
|
||||
"./ProjectDuplicator": @ProjectDuplicator
|
||||
"./ProjectCreationHandler": @ProjectCreationHandler
|
||||
|
|
|
@ -50,6 +50,9 @@ describe 'ProjectCreationHandler', ->
|
|||
'./ProjectEntityHandler':@ProjectEntityHandler
|
||||
"settings-sharelatex": @Settings = {}
|
||||
'logger-sharelatex': {log:->}
|
||||
"../../infrastructure/Metrics": inc:->
|
||||
|
||||
|
||||
|
||||
describe 'Creating a Blank project', ->
|
||||
beforeEach ->
|
||||
|
|
|
@ -64,6 +64,8 @@ describe 'ProjectEntityHandler', ->
|
|||
@projectUpdater = markAsUpdated:sinon.stub()
|
||||
@projectLocator =
|
||||
findElement : sinon.stub()
|
||||
@settings =
|
||||
maxEntitiesPerProject:200
|
||||
@ProjectEntityHandler = SandboxedModule.require modulePath, requires:
|
||||
'../../models/Project': Project:@ProjectModel
|
||||
'../../models/Doc': Doc:@DocModel
|
||||
|
@ -77,6 +79,7 @@ describe 'ProjectEntityHandler', ->
|
|||
'logger-sharelatex': @logger = {log:sinon.stub(), error: sinon.stub(), err:->}
|
||||
'./ProjectUpdateHandler': @projectUpdater
|
||||
"./ProjectGetter": @ProjectGetter
|
||||
"settings-sharelatex":@settings
|
||||
|
||||
|
||||
describe 'mkdirp', ->
|
||||
|
|
|
@ -26,6 +26,7 @@ describe "LoginRateLimiter", ->
|
|||
|
||||
@LoginRateLimiter = SandboxedModule.require modulePath, requires:
|
||||
'redis-sharelatex' : createClient: () => @rclient
|
||||
"settings-sharelatex":{redis:{}}
|
||||
|
||||
describe "processLoginRequest", ->
|
||||
|
||||
|
|
|
@ -54,11 +54,11 @@ describe "SubscriptionGroupController", ->
|
|||
describe "addUserToGroup", ->
|
||||
|
||||
it "should use the admin id for the logged in user and take the email address from the body", (done)->
|
||||
newEmail = "31231"
|
||||
newEmail = " boB@gmaiL.com "
|
||||
@req.body = email: newEmail
|
||||
res =
|
||||
json : (data)=>
|
||||
@GroupHandler.addUserToGroup.calledWith(@adminUserId, newEmail).should.equal true
|
||||
@GroupHandler.addUserToGroup.calledWith(@adminUserId, "bob@gmail.com").should.equal true
|
||||
data.user.should.deep.equal @user
|
||||
done()
|
||||
@Controller.addUserToGroup @req, res
|
||||
|
|
|
@ -5,7 +5,7 @@ modulePath = "../../../../app/js/Features/Subscription/UserFeaturesUpdater"
|
|||
assert = require("chai").assert
|
||||
|
||||
|
||||
describe "user Features updater", ->
|
||||
describe "UserFeaturesUpdater", ->
|
||||
|
||||
beforeEach ->
|
||||
|
||||
|
|
|
@ -14,6 +14,8 @@ describe 'TpdsController', ->
|
|||
'logger-sharelatex':
|
||||
log:->
|
||||
err:->
|
||||
"../../infrastructure/Metrics": inc:->
|
||||
|
||||
@user_id = "dsad29jlkjas"
|
||||
|
||||
describe 'getting an update', ->
|
||||
|
|
|
@ -38,6 +38,8 @@ describe 'TpdsUpdateSender', ->
|
|||
"logger-sharelatex":{log:->}
|
||||
'../../models/Project': Project:@Project
|
||||
'request':@request
|
||||
"../../infrastructure/Metrics":
|
||||
inc:->
|
||||
|
||||
describe "_enqueue", ->
|
||||
|
||||
|
|
|
@ -20,9 +20,13 @@ describe 'UpdateMerger :', ->
|
|||
'../Project/ProjectEntityHandler': @projectEntityHandler
|
||||
'fs': @fs
|
||||
'../Uploads/FileTypeManager':@FileTypeManager
|
||||
'settings-sharelatex':{path:{dumpPath:"dump_here"}}
|
||||
'logger-sharelatex':
|
||||
log: ->
|
||||
err: ->
|
||||
"../../infrastructure/Metrics":
|
||||
Timer:->
|
||||
done:->
|
||||
@project_id = "project_id_here"
|
||||
@user_id = "mock-user-id"
|
||||
@source = "dropbox"
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
sinon = require('sinon')
|
||||
expect = require("chai").expect
|
||||
chai = require('chai')
|
||||
should = chai.should()
|
||||
modulePath = "../../../../app/js/Features/Uploads/ArchiveManager.js"
|
||||
|
@ -9,12 +10,16 @@ describe "ArchiveManager", ->
|
|||
beforeEach ->
|
||||
@logger =
|
||||
error: sinon.stub()
|
||||
err:->
|
||||
log: sinon.stub()
|
||||
@process = new events.EventEmitter
|
||||
@process.stdout = new events.EventEmitter
|
||||
@process.stderr = new events.EventEmitter
|
||||
|
||||
@child =
|
||||
spawn: sinon.stub().returns(@process)
|
||||
|
||||
|
||||
@metrics =
|
||||
Timer: class Timer
|
||||
done: sinon.stub()
|
||||
|
@ -22,12 +27,14 @@ describe "ArchiveManager", ->
|
|||
"child_process": @child
|
||||
"logger-sharelatex": @logger
|
||||
"../../infrastructure/Metrics": @metrics
|
||||
"fs": @fs = {}
|
||||
|
||||
describe "extractZipArchive", ->
|
||||
beforeEach ->
|
||||
@source = "/path/to/zip/source.zip"
|
||||
@destination = "/path/to/zip/destination"
|
||||
@callback = sinon.stub()
|
||||
@ArchiveManager._isZipTooLarge = sinon.stub().callsArgWith(1, null, false)
|
||||
|
||||
describe "successfully", ->
|
||||
beforeEach (done) ->
|
||||
|
@ -57,6 +64,19 @@ describe "ArchiveManager", ->
|
|||
it "should log out the error", ->
|
||||
@logger.error.called.should.equal true
|
||||
|
||||
describe "with a zip that is too large", ->
|
||||
beforeEach (done) ->
|
||||
@ArchiveManager._isZipTooLarge = sinon.stub().callsArgWith(1, null, true)
|
||||
@ArchiveManager.extractZipArchive @source, @destination, (error) =>
|
||||
@callback(error)
|
||||
done()
|
||||
|
||||
it "should return the callback with an error", ->
|
||||
@callback.calledWithExactly(new Error("zip_too_large")).should.equal true
|
||||
|
||||
it "should not call spawn", ->
|
||||
@child.spawn.called.should.equal false
|
||||
|
||||
describe "with an error on the process", ->
|
||||
beforeEach (done) ->
|
||||
@ArchiveManager.extractZipArchive @source, @destination, (error) =>
|
||||
|
@ -69,4 +89,96 @@ describe "ArchiveManager", ->
|
|||
|
||||
it "should log out the error", ->
|
||||
@logger.error.called.should.equal true
|
||||
|
||||
describe "_isZipTooLarge", ->
|
||||
beforeEach ->
|
||||
@output = (totalSize)->" Length Date Time Name \n-------- ---- ---- ---- \n241 03-12-16 12:20 main.tex \n108801 03-12-16 12:20 ddd/x1J5kHh.jpg \n-------- ------- \n#{totalSize} 2 files\n"
|
||||
|
||||
it "should return false with small output", (done)->
|
||||
@ArchiveManager._isZipTooLarge @source, (error, isTooLarge) =>
|
||||
isTooLarge.should.equal false
|
||||
done()
|
||||
@process.stdout.emit "data", @output("109042")
|
||||
@process.emit "exit"
|
||||
|
||||
it "should return true with large bytes", (done)->
|
||||
@ArchiveManager._isZipTooLarge @source, (error, isTooLarge) =>
|
||||
isTooLarge.should.equal true
|
||||
done()
|
||||
@process.stdout.emit "data", @output("1090000000000000042")
|
||||
@process.emit "exit"
|
||||
|
||||
it "should return error on no data", (done)->
|
||||
@ArchiveManager._isZipTooLarge @source, (error, isTooLarge) =>
|
||||
expect(error).to.exist
|
||||
done()
|
||||
@process.stdout.emit "data", ""
|
||||
@process.emit "exit"
|
||||
|
||||
it "should return error if it didn't get a number", (done)->
|
||||
@ArchiveManager._isZipTooLarge @source, (error, isTooLarge) =>
|
||||
expect(error).to.exist
|
||||
done()
|
||||
@process.stdout.emit "data", @output("total_size_string")
|
||||
@process.emit "exit"
|
||||
|
||||
it "should return error if the is only a bit of data", (done)->
|
||||
@ArchiveManager._isZipTooLarge @source, (error, isTooLarge) =>
|
||||
expect(error).to.exist
|
||||
done()
|
||||
@process.stdout.emit "data", " Length Date Time Name \n--------"
|
||||
@process.emit "exit"
|
||||
|
||||
describe "findTopLevelDirectory", ->
|
||||
beforeEach ->
|
||||
@fs.readdir = sinon.stub()
|
||||
@fs.stat = sinon.stub()
|
||||
@directory = "test/directory"
|
||||
|
||||
describe "with multiple files", ->
|
||||
beforeEach ->
|
||||
@fs.readdir.callsArgWith(1, null, ["multiple", "files"])
|
||||
@ArchiveManager.findTopLevelDirectory(@directory, @callback)
|
||||
|
||||
it "should find the files in the directory", ->
|
||||
@fs.readdir
|
||||
.calledWith(@directory)
|
||||
.should.equal true
|
||||
|
||||
it "should return the original directory", ->
|
||||
@callback
|
||||
.calledWith(null, @directory)
|
||||
.should.equal true
|
||||
|
||||
describe "with a single file (not folder)", ->
|
||||
beforeEach ->
|
||||
@fs.readdir.callsArgWith(1, null, ["foo.tex"])
|
||||
@fs.stat.callsArgWith(1, null, { isDirectory: () -> false })
|
||||
@ArchiveManager.findTopLevelDirectory(@directory, @callback)
|
||||
|
||||
it "should check if the file is a directory", ->
|
||||
@fs.stat
|
||||
.calledWith(@directory + "/foo.tex")
|
||||
.should.equal true
|
||||
|
||||
it "should return the original directory", ->
|
||||
@callback
|
||||
.calledWith(null, @directory)
|
||||
.should.equal true
|
||||
|
||||
describe "with a single top-level folder", ->
|
||||
beforeEach ->
|
||||
@fs.readdir.callsArgWith(1, null, ["folder"])
|
||||
@fs.stat.callsArgWith(1, null, { isDirectory: () -> true })
|
||||
@ArchiveManager.findTopLevelDirectory(@directory, @callback)
|
||||
|
||||
it "should check if the file is a directory", ->
|
||||
@fs.stat
|
||||
.calledWith(@directory + "/folder")
|
||||
.should.equal true
|
||||
|
||||
it "should return the child directory", ->
|
||||
@callback
|
||||
.calledWith(null, @directory + "/folder")
|
||||
.should.equal true
|
||||
|
||||
|
|
|
@ -11,48 +11,128 @@ describe "FileSystemImportManager", ->
|
|||
@name = "test-file.tex"
|
||||
@path_on_disk = "/path/to/file/#{@name}"
|
||||
@replace = "replace-boolean-flag-mock"
|
||||
@user_id = "mock-user-123"
|
||||
@callback = sinon.stub()
|
||||
@FileSystemImportManager = SandboxedModule.require modulePath, requires:
|
||||
"fs" : @fs = {}
|
||||
"../Editor/EditorController": @EditorController = {}
|
||||
"./FileTypeManager": @FileTypeManager = {}
|
||||
"../Project/ProjectLocator": @ProjectLocator = {}
|
||||
|
||||
"logger-sharelatex":
|
||||
log:->
|
||||
err:->
|
||||
|
||||
describe "addDoc", ->
|
||||
beforeEach ->
|
||||
@docContent = "one\ntwo\nthree"
|
||||
@docLines = @docContent.split("\n")
|
||||
@fs.readFile = sinon.stub().callsArgWith(2, null, @docContent)
|
||||
@EditorController.addDocWithoutLock = sinon.stub().callsArg(5)
|
||||
@FileSystemImportManager.addDoc @project_id, @folder_id, @name, @path_on_disk, false, @callback
|
||||
@FileSystemImportManager._isSafeOnFileSystem = sinon.stub().callsArgWith(1, null, true)
|
||||
|
||||
it "should read the file from disk", ->
|
||||
@fs.readFile.calledWith(@path_on_disk, "utf8").should.equal true
|
||||
|
||||
it "should insert the doc", ->
|
||||
@EditorController.addDocWithoutLock.calledWith(@project_id, @folder_id, @name, @docLines, "upload")
|
||||
.should.equal true
|
||||
describe "when path is symlink", ->
|
||||
beforeEach ->
|
||||
@FileSystemImportManager._isSafeOnFileSystem = sinon.stub().callsArgWith(1, null, false)
|
||||
@EditorController.addDocWithoutLock = sinon.stub()
|
||||
@FileSystemImportManager.addDoc @user_id, @project_id, @folder_id, @name, @path_on_disk, false, @callback
|
||||
|
||||
describe "addDoc with windows line ending", ->
|
||||
beforeEach ->
|
||||
@docContent = "one\r\ntwo\r\nthree"
|
||||
@docLines = ["one", "two", "three"]
|
||||
@fs.readFile = sinon.stub().callsArgWith(2, null, @docContent)
|
||||
@EditorController.addDocWithoutLock = sinon.stub().callsArg(5)
|
||||
@FileSystemImportManager.addDoc @project_id, @folder_id, @name, @path_on_disk, false, @callback
|
||||
it "should not read the file from disk", ->
|
||||
@fs.readFile.called.should.equal false
|
||||
|
||||
it "should strip the \\r characters before adding", ->
|
||||
@EditorController.addDocWithoutLock.calledWith(@project_id, @folder_id, @name, @docLines, "upload")
|
||||
.should.equal true
|
||||
it "should not insert the doc", ->
|
||||
@EditorController.addDocWithoutLock.called.should.equal false
|
||||
|
||||
describe "with replace set to false", ->
|
||||
beforeEach ->
|
||||
@EditorController.addDocWithoutLock = sinon.stub().callsArg(5)
|
||||
@FileSystemImportManager.addDoc @user_id, @project_id, @folder_id, @name, @path_on_disk, false, @callback
|
||||
|
||||
it "should read the file from disk", ->
|
||||
@fs.readFile.calledWith(@path_on_disk, "utf8").should.equal true
|
||||
|
||||
it "should insert the doc", ->
|
||||
@EditorController.addDocWithoutLock.calledWith(@project_id, @folder_id, @name, @docLines, "upload")
|
||||
.should.equal true
|
||||
|
||||
describe "with windows line ending", ->
|
||||
beforeEach ->
|
||||
@docContent = "one\r\ntwo\r\nthree"
|
||||
@docLines = ["one", "two", "three"]
|
||||
@fs.readFile = sinon.stub().callsArgWith(2, null, @docContent)
|
||||
@EditorController.addDocWithoutLock = sinon.stub().callsArg(5)
|
||||
@FileSystemImportManager.addDoc @user_id, @project_id, @folder_id, @name, @path_on_disk, false, @callback
|
||||
|
||||
it "should strip the \\r characters before adding", ->
|
||||
@EditorController.addDocWithoutLock.calledWith(@project_id, @folder_id, @name, @docLines, "upload")
|
||||
.should.equal true
|
||||
|
||||
describe "with replace set to true", ->
|
||||
describe "when the doc doesn't exist", ->
|
||||
beforeEach ->
|
||||
@folder = {
|
||||
docs: [{
|
||||
_id: "doc-id-2"
|
||||
name: "not-the-right-file.tex"
|
||||
}]
|
||||
}
|
||||
@ProjectLocator.findElement = sinon.stub().callsArgWith(1, null, @folder)
|
||||
@EditorController.addDocWithoutLock = sinon.stub().callsArg(5)
|
||||
@FileSystemImportManager.addDoc @user_id, @project_id, @folder_id, @name, @path_on_disk, true, @callback
|
||||
|
||||
it "should look up the folder", ->
|
||||
@ProjectLocator.findElement
|
||||
.calledWith(project_id: @project_id, element_id: @folder_id, type: "folder")
|
||||
.should.equal true
|
||||
|
||||
it "should insert the doc", ->
|
||||
@EditorController.addDocWithoutLock.calledWith(@project_id, @folder_id, @name, @docLines, "upload")
|
||||
.should.equal true
|
||||
|
||||
describe "when the doc does exist", ->
|
||||
beforeEach ->
|
||||
@folder = {
|
||||
docs: [{
|
||||
_id: @doc_id = "doc-id-1"
|
||||
name: @name
|
||||
}, {
|
||||
_id: "doc-id-2"
|
||||
name: "not-the-right-file.tex"
|
||||
}]
|
||||
}
|
||||
@ProjectLocator.findElement = sinon.stub().callsArgWith(1, null, @folder)
|
||||
@EditorController.setDoc = sinon.stub().callsArg(5)
|
||||
@FileSystemImportManager.addDoc @user_id, @project_id, @folder_id, @name, @path_on_disk, true, @callback
|
||||
|
||||
it "should look up the folder", ->
|
||||
@ProjectLocator.findElement
|
||||
.calledWith(project_id: @project_id, element_id: @folder_id, type: "folder")
|
||||
.should.equal true
|
||||
|
||||
it "should set the doc with the new doc lines", ->
|
||||
@EditorController.setDoc.calledWith(@project_id, @doc_id, @user_id, @docLines, "upload")
|
||||
.should.equal true
|
||||
|
||||
describe "addFile with replace set to false", ->
|
||||
beforeEach ->
|
||||
@EditorController.addFileWithoutLock = sinon.stub().callsArg(5)
|
||||
@FileSystemImportManager.addFile @project_id, @folder_id, @name, @path_on_disk, false, @callback
|
||||
@FileSystemImportManager._isSafeOnFileSystem = sinon.stub().callsArgWith(1, null, true)
|
||||
@FileSystemImportManager.addFile @user_id, @project_id, @folder_id, @name, @path_on_disk, false, @callback
|
||||
|
||||
it "should add the file", ->
|
||||
@EditorController.addFileWithoutLock.calledWith(@project_id, @folder_id, @name, @path_on_disk, "upload")
|
||||
.should.equal true
|
||||
|
||||
describe "addFile with symlink", ->
|
||||
beforeEach ->
|
||||
@EditorController.addFileWithoutLock = sinon.stub()
|
||||
@FileSystemImportManager._isSafeOnFileSystem = sinon.stub().callsArgWith(1, null, false)
|
||||
@EditorController.replaceFile = sinon.stub()
|
||||
@FileSystemImportManager.addFile @user_id, @project_id, @folder_id, @name, @path_on_disk, false, @callback
|
||||
|
||||
it "should node add the file", ->
|
||||
@EditorController.addFileWithoutLock.called.should.equal false
|
||||
@EditorController.replaceFile.called.should.equal false
|
||||
|
||||
describe "addFile with replace set to true", ->
|
||||
describe "when the file doesn't exist", ->
|
||||
beforeEach ->
|
||||
|
@ -62,9 +142,10 @@ describe "FileSystemImportManager", ->
|
|||
name: "not-the-right-file.tex"
|
||||
}]
|
||||
}
|
||||
@FileSystemImportManager._isSafeOnFileSystem = sinon.stub().callsArgWith(1, null, true)
|
||||
@ProjectLocator.findElement = sinon.stub().callsArgWith(1, null, @folder)
|
||||
@EditorController.addFileWithoutLock = sinon.stub().callsArg(5)
|
||||
@FileSystemImportManager.addFile @project_id, @folder_id, @name, @path_on_disk, true, @callback
|
||||
@FileSystemImportManager.addFile @user_id, @project_id, @folder_id, @name, @path_on_disk, true, @callback
|
||||
|
||||
it "should look up the folder", ->
|
||||
@ProjectLocator.findElement
|
||||
|
@ -86,9 +167,10 @@ describe "FileSystemImportManager", ->
|
|||
name: "not-the-right-file.tex"
|
||||
}]
|
||||
}
|
||||
@FileSystemImportManager._isSafeOnFileSystem = sinon.stub().callsArgWith(1, null, true)
|
||||
@ProjectLocator.findElement = sinon.stub().callsArgWith(1, null, @folder)
|
||||
@EditorController.replaceFile = sinon.stub().callsArg(4)
|
||||
@FileSystemImportManager.addFile @project_id, @folder_id, @name, @path_on_disk, true, @callback
|
||||
@FileSystemImportManager.addFile @user_id, @project_id, @folder_id, @name, @path_on_disk, true, @callback
|
||||
|
||||
it "should look up the folder", ->
|
||||
@ProjectLocator.findElement
|
||||
|
@ -100,38 +182,53 @@ describe "FileSystemImportManager", ->
|
|||
.should.equal true
|
||||
|
||||
describe "addFolder", ->
|
||||
|
||||
beforeEach ->
|
||||
@new_folder_id = "new-folder-id"
|
||||
@EditorController.addFolderWithoutLock = sinon.stub().callsArgWith(4, null, _id: @new_folder_id)
|
||||
@FileSystemImportManager.addFolderContents = sinon.stub().callsArg(4)
|
||||
@FileSystemImportManager.addFolder @project_id, @folder_id, @name, @path_on_disk, @replace, @callback
|
||||
@FileSystemImportManager.addFolderContents = sinon.stub().callsArg(5)
|
||||
|
||||
it "should add a folder to the project", ->
|
||||
@EditorController.addFolderWithoutLock.calledWith(@project_id, @folder_id, @name, "upload")
|
||||
.should.equal true
|
||||
describe "successfully", ->
|
||||
beforeEach ->
|
||||
@FileSystemImportManager._isSafeOnFileSystem = sinon.stub().callsArgWith(1, null, true)
|
||||
@FileSystemImportManager.addFolder @user_id, @project_id, @folder_id, @name, @path_on_disk, @replace, @callback
|
||||
|
||||
it "should add the folders contents", ->
|
||||
@FileSystemImportManager.addFolderContents.calledWith(@project_id, @new_folder_id, @path_on_disk, @replace)
|
||||
.should.equal true
|
||||
it "should add a folder to the project", ->
|
||||
@EditorController.addFolderWithoutLock.calledWith(@project_id, @folder_id, @name, "upload")
|
||||
.should.equal true
|
||||
|
||||
it "should add the folders contents", ->
|
||||
@FileSystemImportManager.addFolderContents.calledWith(@user_id, @project_id, @new_folder_id, @path_on_disk, @replace)
|
||||
.should.equal true
|
||||
|
||||
describe "with symlink", ->
|
||||
beforeEach ->
|
||||
@FileSystemImportManager._isSafeOnFileSystem = sinon.stub().callsArgWith(1, null, false)
|
||||
@FileSystemImportManager.addFolder @user_id, @project_id, @folder_id, @name, @path_on_disk, @replace, @callback
|
||||
|
||||
it "should not add a folder to the project", ->
|
||||
@EditorController.addFolderWithoutLock.called.should.equal false
|
||||
@FileSystemImportManager.addFolderContents.called.should.equal false
|
||||
|
||||
describe "addFolderContents", ->
|
||||
beforeEach ->
|
||||
@folderEntries = ["path1", "path2", "path3"]
|
||||
@ignoredEntries = [".DS_Store"]
|
||||
@fs.readdir = sinon.stub().callsArgWith(1, null, @folderEntries.concat @ignoredEntries)
|
||||
@FileSystemImportManager.addEntity = sinon.stub().callsArg(5)
|
||||
@FileSystemImportManager.addEntity = sinon.stub().callsArg(6)
|
||||
@FileTypeManager.shouldIgnore = (path, callback) =>
|
||||
callback null, @ignoredEntries.indexOf(require("path").basename(path)) != -1
|
||||
@FileSystemImportManager.addFolderContents @project_id, @folder_id, @path_on_disk, @replace, @callback
|
||||
@FileSystemImportManager._isSafeOnFileSystem = sinon.stub().callsArgWith(1, null, true)
|
||||
@FileSystemImportManager.addFolderContents @user_id, @project_id, @folder_id, @path_on_disk, @replace, @callback
|
||||
|
||||
it "should call addEntity for each file in the folder which is not ignored", ->
|
||||
for name in @folderEntries
|
||||
@FileSystemImportManager.addEntity.calledWith(@project_id, @folder_id, name, "#{@path_on_disk}/#{name}", @replace)
|
||||
@FileSystemImportManager.addEntity.calledWith(@user_id, @project_id, @folder_id, name, "#{@path_on_disk}/#{name}", @replace)
|
||||
.should.equal true
|
||||
|
||||
it "should not call addEntity for the ignored files", ->
|
||||
for name in @ignoredEntries
|
||||
@FileSystemImportManager.addEntity.calledWith(@project_id, @folder_id, name, "#{@path_on_disk}/#{name}", @replace)
|
||||
@FileSystemImportManager.addEntity.calledWith(@user_id, @project_id, @folder_id, name, "#{@path_on_disk}/#{name}", @replace)
|
||||
.should.equal false
|
||||
|
||||
it "should look in the correct directory", ->
|
||||
|
@ -141,33 +238,36 @@ describe "FileSystemImportManager", ->
|
|||
describe "with directory", ->
|
||||
beforeEach ->
|
||||
@FileTypeManager.isDirectory = sinon.stub().callsArgWith(1, null, true)
|
||||
@FileSystemImportManager.addFolder = sinon.stub().callsArg(5)
|
||||
@FileSystemImportManager.addEntity @project_id, @folder_id, @name, @path_on_disk, @replace, @callback
|
||||
@FileSystemImportManager.addFolder = sinon.stub().callsArg(6)
|
||||
@FileSystemImportManager._isSafeOnFileSystem = sinon.stub().callsArgWith(1, null, true)
|
||||
@FileSystemImportManager.addEntity @user_id, @project_id, @folder_id, @name, @path_on_disk, @replace, @callback
|
||||
|
||||
it "should call addFolder", ->
|
||||
@FileSystemImportManager.addFolder.calledWith(@project_id, @folder_id, @name, @path_on_disk, @replace, @callback)
|
||||
@FileSystemImportManager.addFolder.calledWith(@user_id, @project_id, @folder_id, @name, @path_on_disk, @replace)
|
||||
.should.equal true
|
||||
|
||||
describe "with binary file", ->
|
||||
beforeEach ->
|
||||
@FileTypeManager.isDirectory = sinon.stub().callsArgWith(1, null, false)
|
||||
@FileTypeManager.isBinary = sinon.stub().callsArgWith(2, null, true)
|
||||
@FileSystemImportManager.addFile = sinon.stub().callsArg(5)
|
||||
@FileSystemImportManager.addEntity @project_id, @folder_id, @name, @path_on_disk, @replace, @callback
|
||||
@FileSystemImportManager._isSafeOnFileSystem = sinon.stub().callsArgWith(1, null, true)
|
||||
@FileSystemImportManager.addFile = sinon.stub().callsArg(6)
|
||||
@FileSystemImportManager.addEntity @user_id, @project_id, @folder_id, @name, @path_on_disk, @replace, @callback
|
||||
|
||||
it "should call addFile", ->
|
||||
@FileSystemImportManager.addFile.calledWith(@project_id, @folder_id, @name, @path_on_disk, @replace, @callback)
|
||||
@FileSystemImportManager.addFile.calledWith(@user_id, @project_id, @folder_id, @name, @path_on_disk, @replace, @callback)
|
||||
.should.equal true
|
||||
|
||||
describe "with text file", ->
|
||||
beforeEach ->
|
||||
@FileTypeManager.isDirectory = sinon.stub().callsArgWith(1, null, false)
|
||||
@FileTypeManager.isBinary = sinon.stub().callsArgWith(2, null, false)
|
||||
@FileSystemImportManager.addDoc = sinon.stub().callsArg(5)
|
||||
@FileSystemImportManager.addEntity @project_id, @folder_id, @name, @path_on_disk, @replace, @callback
|
||||
@FileSystemImportManager.addDoc = sinon.stub().callsArg(6)
|
||||
@FileSystemImportManager._isSafeOnFileSystem = sinon.stub().callsArgWith(1, null, true)
|
||||
@FileSystemImportManager.addEntity @user_id, @project_id, @folder_id, @name, @path_on_disk, @replace, @callback
|
||||
|
||||
it "should call addFile", ->
|
||||
@FileSystemImportManager.addDoc.calledWith(@project_id, @folder_id, @name, @path_on_disk, @replace, @callback)
|
||||
@FileSystemImportManager.addDoc.calledWith(@user_id, @project_id, @folder_id, @name, @path_on_disk, @replace, @callback)
|
||||
.should.equal true
|
||||
|
||||
|
||||
|
|
|
@ -103,6 +103,9 @@ describe "ProjectUploadController", ->
|
|||
qqfile:
|
||||
path: @path
|
||||
originalname: @name
|
||||
@req.session =
|
||||
user:
|
||||
_id: @user_id
|
||||
@req.params =
|
||||
Project_id: @project_id
|
||||
@req.query =
|
||||
|
@ -115,27 +118,12 @@ describe "ProjectUploadController", ->
|
|||
beforeEach ->
|
||||
@entity =
|
||||
_id : "1234"
|
||||
@FileSystemImportManager.addEntity = sinon.stub().callsArgWith(5, null, @entity)
|
||||
@FileSystemImportManager.addEntity = sinon.stub().callsArgWith(6, null, @entity)
|
||||
@ProjectUploadController.uploadFile @req, @res
|
||||
|
||||
it "should insert the file into the correct project", ->
|
||||
it "should insert the file", ->
|
||||
@FileSystemImportManager.addEntity
|
||||
.calledWith(@project_id)
|
||||
.should.equal true
|
||||
|
||||
it "should insert the file into the provided folder", ->
|
||||
@FileSystemImportManager.addEntity
|
||||
.calledWith(sinon.match.any, @folder_id)
|
||||
.should.equal true
|
||||
|
||||
it "should insert the file with the correct name", ->
|
||||
@FileSystemImportManager.addEntity
|
||||
.calledWith(sinon.match.any, sinon.match.any, @name)
|
||||
.should.equal true
|
||||
|
||||
it "should insert the file from the uploaded path", ->
|
||||
@FileSystemImportManager.addEntity
|
||||
.calledWith(sinon.match.any, sinon.match.any, sinon.match.any, @path)
|
||||
.calledWith(@user_id, @project_id, @folder_id, @name, @path)
|
||||
.should.equal true
|
||||
|
||||
it "should return a successful response to the FileUploader client", ->
|
||||
|
@ -143,7 +131,6 @@ describe "ProjectUploadController", ->
|
|||
success: true
|
||||
entity_id: @entity._id
|
||||
|
||||
|
||||
it "should output a log line", ->
|
||||
@logger.log
|
||||
.calledWith(sinon.match.any, "uploaded file")
|
||||
|
@ -158,7 +145,7 @@ describe "ProjectUploadController", ->
|
|||
describe "when FileSystemImportManager.addEntity returns an error", ->
|
||||
beforeEach ->
|
||||
@FileSystemImportManager.addEntity = sinon.stub()
|
||||
.callsArgWith(5, new Error("Sorry something went wrong"))
|
||||
.callsArgWith(6, new Error("Sorry something went wrong"))
|
||||
@ProjectUploadController.uploadFile @req, @res
|
||||
|
||||
it "should return an unsuccessful response to the FileUploader client", ->
|
||||
|
|
|
@ -8,6 +8,7 @@ describe "ProjectUploadManager", ->
|
|||
beforeEach ->
|
||||
@project_id = "project-id-123"
|
||||
@folder_id = "folder-id-123"
|
||||
@owner_id = "onwer-id-123"
|
||||
@callback = sinon.stub()
|
||||
@ProjectUploadManager = SandboxedModule.require modulePath, requires:
|
||||
"./FileSystemImportManager" : @FileSystemImportManager = {}
|
||||
|
@ -26,7 +27,7 @@ describe "ProjectUploadManager", ->
|
|||
_id: @project_id
|
||||
rootFolder: [ _id: @root_folder_id ]
|
||||
@ProjectCreationHandler.createBlankProject = sinon.stub().callsArgWith(2, null, @project)
|
||||
@ProjectUploadManager.insertZipArchiveIntoFolder = sinon.stub().callsArg(3)
|
||||
@ProjectUploadManager.insertZipArchiveIntoFolder = sinon.stub().callsArg(4)
|
||||
@ProjectRootDocManager.setRootDocAutomatically = sinon.stub().callsArg(1)
|
||||
@ProjectUploadManager.createProjectFromZipArchive @owner_id, @name, @source, @callback
|
||||
|
||||
|
@ -45,7 +46,7 @@ describe "ProjectUploadManager", ->
|
|||
it "should insert the zip file contents into the root folder", ->
|
||||
@ProjectUploadManager
|
||||
.insertZipArchiveIntoFolder
|
||||
.calledWith(@project_id, @root_folder_id, @source)
|
||||
.calledWith(@owner_id, @project_id, @root_folder_id, @source)
|
||||
.should.equal true
|
||||
|
||||
it "should automatically set the root doc", ->
|
||||
|
@ -63,9 +64,10 @@ describe "ProjectUploadManager", ->
|
|||
@destination = "/path/to/zile/file-extracted"
|
||||
@ProjectUploadManager._getDestinationDirectory = sinon.stub().returns @destination
|
||||
@ArchiveManager.extractZipArchive = sinon.stub().callsArg(2)
|
||||
@FileSystemImportManager.addFolderContents = sinon.stub().callsArg(4)
|
||||
@ArchiveManager.findTopLevelDirectory = sinon.stub().callsArgWith(1, null, @topLevelDestination = "/path/to/zip/file-extracted/nested")
|
||||
@FileSystemImportManager.addFolderContents = sinon.stub().callsArg(5)
|
||||
|
||||
@ProjectUploadManager.insertZipArchiveIntoFolder @project_id, @folder_id, @source, @callback
|
||||
@ProjectUploadManager.insertZipArchiveIntoFolder @owner_id, @project_id, @folder_id, @source, @callback
|
||||
|
||||
it "should set up the directory to extract the archive to", ->
|
||||
@ProjectUploadManager._getDestinationDirectory.calledWith(@source).should.equal true
|
||||
|
@ -73,8 +75,11 @@ describe "ProjectUploadManager", ->
|
|||
it "should extract the archive", ->
|
||||
@ArchiveManager.extractZipArchive.calledWith(@source, @destination).should.equal true
|
||||
|
||||
it "should find the top level directory", ->
|
||||
@ArchiveManager.findTopLevelDirectory.calledWith(@destination).should.equal true
|
||||
|
||||
it "should insert the extracted archive into the folder", ->
|
||||
@FileSystemImportManager.addFolderContents.calledWith(@project_id, @folder_id, @destination, false)
|
||||
@FileSystemImportManager.addFolderContents.calledWith(@owner_id, @project_id, @folder_id, @topLevelDestination, false)
|
||||
.should.equal true
|
||||
|
||||
it "should return the callback", ->
|
||||
|
|
|
@ -42,6 +42,8 @@ describe "UserController", ->
|
|||
changeEmailAddress:sinon.stub()
|
||||
@settings =
|
||||
siteUrl: "sharelatex.example.com"
|
||||
@UserHandler =
|
||||
populateGroupLicenceInvite:sinon.stub().callsArgWith(1)
|
||||
@UserController = SandboxedModule.require modulePath, requires:
|
||||
"./UserLocator": @UserLocator
|
||||
"./UserDeleter": @UserDeleter
|
||||
|
@ -53,9 +55,10 @@ describe "UserController", ->
|
|||
"../Authentication/AuthenticationManager": @AuthenticationManager
|
||||
"../Referal/ReferalAllocator":@ReferalAllocator
|
||||
"../Subscription/SubscriptionDomainHandler":@SubscriptionDomainHandler
|
||||
"./UserHandler":@UserHandler
|
||||
"settings-sharelatex": @settings
|
||||
"logger-sharelatex": {log:->}
|
||||
|
||||
"../../infrastructure/Metrics": inc:->
|
||||
|
||||
@req =
|
||||
session:
|
||||
|
@ -151,7 +154,14 @@ describe "UserController", ->
|
|||
done()
|
||||
@UserController.updateUserSettings @req, @res
|
||||
|
||||
|
||||
it "should call populateGroupLicenceInvite", (done)->
|
||||
@req.body.email = @newEmail.toUpperCase()
|
||||
@UserUpdater.changeEmailAddress.callsArgWith(2)
|
||||
@res.sendStatus = (code)=>
|
||||
code.should.equal 200
|
||||
@UserHandler.populateGroupLicenceInvite.calledWith(@user).should.equal true
|
||||
done()
|
||||
@UserController.updateUserSettings @req, @res
|
||||
|
||||
describe "logout", ->
|
||||
|
||||
|
@ -178,7 +188,6 @@ describe "UserController", ->
|
|||
.should.equal true
|
||||
|
||||
it "should return the user and activation url", ->
|
||||
console.log @res.json.args
|
||||
@res.json
|
||||
.calledWith({
|
||||
email: @email,
|
||||
|
|
|
@ -29,12 +29,12 @@ describe "UserHandler", ->
|
|||
"../Subscription/SubscriptionDomainHandler":@SubscriptionDomainHandler
|
||||
"../Subscription/SubscriptionGroupHandler":@SubscriptionGroupHandler
|
||||
|
||||
describe "_populateGroupLicenceInvite", ->
|
||||
describe "populateGroupLicenceInvite", ->
|
||||
|
||||
describe "no licence", ->
|
||||
beforeEach (done)->
|
||||
@SubscriptionDomainHandler.getLicenceUserCanJoin.returns()
|
||||
@UserHandler._populateGroupLicenceInvite @user, done
|
||||
@UserHandler.populateGroupLicenceInvite @user, done
|
||||
|
||||
it "should not call NotificationsBuilder", (done)->
|
||||
@NotificationsBuilder.groupPlan.called.should.equal false
|
||||
|
@ -49,7 +49,7 @@ describe "UserHandler", ->
|
|||
beforeEach (done)->
|
||||
@SubscriptionDomainHandler.getLicenceUserCanJoin.returns(@licence)
|
||||
@SubscriptionGroupHandler.isUserPartOfGroup.callsArgWith(2, null, false)
|
||||
@UserHandler._populateGroupLicenceInvite @user, done
|
||||
@UserHandler.populateGroupLicenceInvite @user, done
|
||||
|
||||
it "should create notifcation", (done)->
|
||||
@NotificationsBuilder.groupPlan.calledWith(@user, @licence).should.equal true
|
||||
|
@ -62,7 +62,7 @@ describe "UserHandler", ->
|
|||
beforeEach (done)->
|
||||
@SubscriptionDomainHandler.getLicenceUserCanJoin.returns(@licence)
|
||||
@SubscriptionGroupHandler.isUserPartOfGroup.callsArgWith(2, null, true)
|
||||
@UserHandler._populateGroupLicenceInvite @user, done
|
||||
@UserHandler.populateGroupLicenceInvite @user, done
|
||||
|
||||
it "should create notifcation", (done)->
|
||||
@NotificationsBuilder.groupPlan.called.should.equal false
|
||||
|
|
|
@ -12,6 +12,9 @@ describe 'LockManager - getting the lock', ->
|
|||
"redis-sharelatex":
|
||||
createClient : () =>
|
||||
auth:->
|
||||
"settings-sharelatex":{redis:{}}
|
||||
"./Metrics": inc:->
|
||||
|
||||
@callback = sinon.stub()
|
||||
@doc_id = "doc-id-123"
|
||||
|
||||
|
|
|
@ -13,6 +13,8 @@ describe 'LockManager - trying the lock', ->
|
|||
createClient : () =>
|
||||
auth:->
|
||||
set: @set = sinon.stub()
|
||||
"settings-sharelatex":{redis:{}}
|
||||
"./Metrics": inc:->
|
||||
@callback = sinon.stub()
|
||||
@doc_id = "doc-id-123"
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue