diff --git a/server-ce/CONTRIBUTING.md b/server-ce/CONTRIBUTING.md index 5d9f82d9fc..814c1c6775 100644 --- a/server-ce/CONTRIBUTING.md +++ b/server-ce/CONTRIBUTING.md @@ -4,7 +4,7 @@ Contributing to ShareLaTeX Thank you for reading this! If you'd like to report a bug or join in the development of ShareLaTeX, then here are some notes on how to do that. -*Note that ShareLaTeX is actually made up of many seperate repositories (a list is available +*Note that ShareLaTeX is actually made up of many separate repositories (a list is available [here](https://github.com/sharelatex/sharelatex/blob/master/README.md#other-repositories)).* Reporting bugs and opening issues diff --git a/server-ce/Dockerfile b/server-ce/Dockerfile index e9a9ec6fa2..2f394fed64 100644 --- a/server-ce/Dockerfile +++ b/server-ce/Dockerfile @@ -58,7 +58,7 @@ RUN cd /var/www/sharelatex \ RUN cd /var/www/sharelatex \ && bash ./bin/compile-services -# Links CLSI sycntex to its default location +# Links CLSI synctex to its default location # ------------------------------------------ RUN ln -s /var/www/sharelatex/clsi/bin/synctex /opt/synctex diff --git a/server-ce/Gruntfile.coffee b/server-ce/Gruntfile.coffee index 7fb8881930..3af76ef763 100644 --- a/server-ce/Gruntfile.coffee +++ b/server-ce/Gruntfile.coffee @@ -193,7 +193,7 @@ module.exports = (grunt) -> grunt.log.errorlns """ !!!!!!!!!!!!!! MONGO ERROR !!!!!!!!!!!!!! - ShareLaTeX can not talk to the mongdb instance + ShareLaTeX can not talk to the mongodb instance Check the mongodb instance is running and accessible on env var SHARELATEX_MONGO_URL diff --git a/server-ce/README.md b/server-ce/README.md index 0edb0a6b8d..03c1f4d47f 100644 --- a/server-ce/README.md +++ b/server-ce/README.md @@ -39,7 +39,7 @@ If you are upgrading from a previous version of Overleaf, please see the [Releas ## Other repositories -This repository does not contain any code. It acts a wrapper and toolkit for managing the many different Overleaf services. These each run as their own Node.js process and have their own Github repository. These are all downloaded and set up when you run `grunt install` +This repository does not contain any code. It acts a wrapper and toolkit for managing the many different Overleaf services. These each run as their own Node.js process and have their own GitHub repository. These are all downloaded and set up when you run `grunt install` | Service | Description | | ------- | ----------- | diff --git a/server-ce/docker-compose.yml b/server-ce/docker-compose.yml index f5ad685be1..0d7c80f70a 100644 --- a/server-ce/docker-compose.yml +++ b/server-ce/docker-compose.yml @@ -19,7 +19,7 @@ services: volumes: - ~/sharelatex_data:/var/lib/sharelatex ######################################################################## - #### Server Pro: Un-comment the following line to mount the docker #### + #### Server Pro: Uncomment the following line to mount the docker #### #### socket, required for Sibling Containers to work #### ######################################################################## # - /var/run/docker.sock:/var/run/docker.sock diff --git a/server-ce/migrations/1_move_doc_lines_to_doc_collection.coffee b/server-ce/migrations/1_move_doc_lines_to_doc_collection.coffee index e4433de7bd..ce8c208657 100644 --- a/server-ce/migrations/1_move_doc_lines_to_doc_collection.coffee +++ b/server-ce/migrations/1_move_doc_lines_to_doc_collection.coffee @@ -19,7 +19,7 @@ printProgress = -> exec "wc #{finished_projects_path}", (error, results) -> setTimeout printProgress, 1000 * 30 -checkIfFileHasBeenProccessed = (project_id, callback)-> +checkIfFileHasBeenProcessed = (project_id, callback)-> exec "grep #{project_id} #{finished_projects_path}", (error, results) -> hasBeenProcessed = _.include(results, project_id) callback(error, hasBeenProcessed) @@ -125,9 +125,9 @@ saveDocsIntoMongo = (project_id, docs, callback)-> processNext = (project_id, callback)-> - checkIfFileHasBeenProccessed project_id, (err, hasBeenProcessed)-> + checkIfFileHasBeenProcessed project_id, (err, hasBeenProcessed)-> if hasBeenProcessed - console.log "#{project_id} already procssed, skipping" + console.log "#{project_id} already processed, skipping" return callback() console.log "#{project_id} processing" getAllDocs project_id, (err, docs)-> diff --git a/server-ce/migrations/2_doc_lines_delete_from_project.coffee b/server-ce/migrations/2_doc_lines_delete_from_project.coffee index 2d5222f63d..0be28446d8 100644 --- a/server-ce/migrations/2_doc_lines_delete_from_project.coffee +++ b/server-ce/migrations/2_doc_lines_delete_from_project.coffee @@ -16,7 +16,7 @@ printProgress = -> exec "wc #{finished_projects_path}", (error, results) -> setTimeout printProgress, 1000 * 30 -checkIfFileHasBeenProccessed = (project_id, callback)-> +checkIfFileHasBeenProcessed = (project_id, callback)-> exec "grep #{project_id} #{finished_projects_path}", (error, results) -> hasBeenProcessed = _.include(results, project_id) callback(error, hasBeenProcessed) @@ -125,7 +125,7 @@ getWhichDocsCanBeDeleted = (docs, callback = (err, docsToBeDeleted, unmigratedDo async.series jobs, (err)-> callback err, docsToBeDeleted, unmigratedDocs -whipeDocLines = (project_id, mongoPath, callback)-> +wipeDocLines = (project_id, mongoPath, callback)-> update = $unset: {} update.$unset["#{mongoPath}.lines"] = "" @@ -137,15 +137,15 @@ removeDocLinesFromProject = (docs, project, callback)-> jobs = _.map docs, (doc)-> (cb)-> findDocInProject project, doc._id, (err, doc, mongoPath)-> - whipeDocLines project._id, mongoPath, cb + wipeDocLines project._id, mongoPath, cb async.parallelLimit jobs, 5, callback processNext = (project_id, callback)-> if !project_id? or project_id.length == 0 return callback() - checkIfFileHasBeenProccessed project_id, (err, hasBeenProcessed)-> + checkIfFileHasBeenProcessed project_id, (err, hasBeenProcessed)-> if hasBeenProcessed - console.log "#{project_id} already procssed, skipping" + console.log "#{project_id} already processed, skipping" return callback() console.log "#{project_id} processing" getAllDocs project_id, (err, docs, project)-> diff --git a/server-ce/migrations/3_pack_docHistory_collection.coffee b/server-ce/migrations/3_pack_docHistory_collection.coffee index c652dd7816..affe1fcfd1 100644 --- a/server-ce/migrations/3_pack_docHistory_collection.coffee +++ b/server-ce/migrations/3_pack_docHistory_collection.coffee @@ -65,7 +65,7 @@ markDocAsUnmigrated = (doc_id, callback)-> markDocAsProcessed doc_id, (err)-> fs.appendFile unmigrated_docs_path, "#{doc_id}\n", callback -checkIfDocHasBeenProccessed = (doc_id, callback)-> +checkIfDocHasBeenProcessed = (doc_id, callback)-> callback(null, finished_docs[doc_id]) processNext = (doc_id, callback)-> @@ -73,7 +73,7 @@ processNext = (doc_id, callback)-> return callback() if needToExit return callback(new Error("graceful shutdown")) - checkIfDocHasBeenProccessed doc_id, (err, hasBeenProcessed)-> + checkIfDocHasBeenProcessed doc_id, (err, hasBeenProcessed)-> if hasBeenProcessed console.log "#{doc_id} already processed, skipping" return callback() diff --git a/server-ce/migrations/about_migrations.md b/server-ce/migrations/about_migrations.md index 97f91442b1..8a9d7596ba 100644 --- a/server-ce/migrations/about_migrations.md +++ b/server-ce/migrations/about_migrations.md @@ -1,4 +1,4 @@ -If migration is stopped mid way it will start at the beginging next time +If migration is stopped mid way it will start at the beginning next time To see the run migrations do db.getCollection('_migrations').find() you can't do db._migrations.find() diff --git a/server-ce/settings.coffee b/server-ce/settings.coffee index f971584b55..43261774ab 100644 --- a/server-ce/settings.coffee +++ b/server-ce/settings.coffee @@ -37,7 +37,7 @@ settings = # Databases # --------- - # ShareLaTeX's main persistant data store is MongoDB (http://www.mongodb.org/) + # ShareLaTeX's main persistent data store is MongoDB (http://www.mongodb.org/) # Documentation about the URL connection string format can be found at: # # http://docs.mongodb.org/manual/reference/connection-string/ @@ -75,7 +75,7 @@ settings = # track-changes:lock historyLock: ({doc_id}) -> "HistoryLock:#{doc_id}" historyIndexLock: ({project_id}) -> "HistoryIndexLock:#{project_id}" - # track-chanegs:history + # track-changes:history uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}" docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}" # realtime @@ -93,8 +93,8 @@ settings = project_history: redisConfig # The compile server (the clsi) uses a SQL database to cache files and - # meta-data. sqllite is the default, and the load is low enough that this will - # be fine in production (we use sqllite at sharelatex.com). + # meta-data. sqlite is the default, and the load is low enough that this will + # be fine in production (we use sqlite at sharelatex.com). # # If you want to configure a different database, see the Sequelize documentation # for available options: @@ -223,7 +223,7 @@ settings = templates: true references: true -## OPTIONAL CONFIGERABLE SETTINGS +## OPTIONAL CONFIGURABLE SETTINGS if process.env["SHARELATEX_LEFT_FOOTER"]? try @@ -457,7 +457,7 @@ if process.env["SHARELATEX_SAML_ENTRYPOINT"] undefined ) requestIdExpirationPeriodMs: ( - if _saml_exiration = process.env["SHARELATEX_SAML_REQUEST_ID_EXPIRATION_PERIOD_MS"] + if _saml_expiration = process.env["SHARELATEX_SAML_REQUEST_ID_EXPIRATION_PERIOD_MS"] try parseIntOrFail(_saml_expiration) catch e diff --git a/server-ce/tasks/CreateAndDestoryUsers.coffee b/server-ce/tasks/CreateAndDestroyUsers.coffee similarity index 100% rename from server-ce/tasks/CreateAndDestoryUsers.coffee rename to server-ce/tasks/CreateAndDestroyUsers.coffee