Fix spelling errors (#823)

This commit is contained in:
Josh Soref 2021-03-02 04:35:40 -05:00 committed by GitHub
parent e03b320961
commit 231456f5d7
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 22 additions and 22 deletions

View file

@ -4,7 +4,7 @@ Contributing to ShareLaTeX
Thank you for reading this! If you'd like to report a bug or join in the development
of ShareLaTeX, then here are some notes on how to do that.
*Note that ShareLaTeX is actually made up of many seperate repositories (a list is available
*Note that ShareLaTeX is actually made up of many separate repositories (a list is available
[here](https://github.com/sharelatex/sharelatex/blob/master/README.md#other-repositories)).*
Reporting bugs and opening issues

View file

@ -58,7 +58,7 @@ RUN cd /var/www/sharelatex \
RUN cd /var/www/sharelatex \
&& bash ./bin/compile-services
# Links CLSI sycntex to its default location
# Links CLSI synctex to its default location
# ------------------------------------------
RUN ln -s /var/www/sharelatex/clsi/bin/synctex /opt/synctex

View file

@ -193,7 +193,7 @@ module.exports = (grunt) ->
grunt.log.errorlns """
!!!!!!!!!!!!!! MONGO ERROR !!!!!!!!!!!!!!
ShareLaTeX can not talk to the mongdb instance
ShareLaTeX can not talk to the mongodb instance
Check the mongodb instance is running and accessible on env var SHARELATEX_MONGO_URL

View file

@ -39,7 +39,7 @@ If you are upgrading from a previous version of Overleaf, please see the [Releas
## Other repositories
This repository does not contain any code. It acts a wrapper and toolkit for managing the many different Overleaf services. These each run as their own Node.js process and have their own Github repository. These are all downloaded and set up when you run `grunt install`
This repository does not contain any code. It acts a wrapper and toolkit for managing the many different Overleaf services. These each run as their own Node.js process and have their own GitHub repository. These are all downloaded and set up when you run `grunt install`
| Service | Description |
| ------- | ----------- |

View file

@ -19,7 +19,7 @@ services:
volumes:
- ~/sharelatex_data:/var/lib/sharelatex
########################################################################
#### Server Pro: Un-comment the following line to mount the docker ####
#### Server Pro: Uncomment the following line to mount the docker ####
#### socket, required for Sibling Containers to work ####
########################################################################
# - /var/run/docker.sock:/var/run/docker.sock

View file

@ -19,7 +19,7 @@ printProgress = ->
exec "wc #{finished_projects_path}", (error, results) ->
setTimeout printProgress, 1000 * 30
checkIfFileHasBeenProccessed = (project_id, callback)->
checkIfFileHasBeenProcessed = (project_id, callback)->
exec "grep #{project_id} #{finished_projects_path}", (error, results) ->
hasBeenProcessed = _.include(results, project_id)
callback(error, hasBeenProcessed)
@ -125,9 +125,9 @@ saveDocsIntoMongo = (project_id, docs, callback)->
processNext = (project_id, callback)->
checkIfFileHasBeenProccessed project_id, (err, hasBeenProcessed)->
checkIfFileHasBeenProcessed project_id, (err, hasBeenProcessed)->
if hasBeenProcessed
console.log "#{project_id} already procssed, skipping"
console.log "#{project_id} already processed, skipping"
return callback()
console.log "#{project_id} processing"
getAllDocs project_id, (err, docs)->

View file

@ -16,7 +16,7 @@ printProgress = ->
exec "wc #{finished_projects_path}", (error, results) ->
setTimeout printProgress, 1000 * 30
checkIfFileHasBeenProccessed = (project_id, callback)->
checkIfFileHasBeenProcessed = (project_id, callback)->
exec "grep #{project_id} #{finished_projects_path}", (error, results) ->
hasBeenProcessed = _.include(results, project_id)
callback(error, hasBeenProcessed)
@ -125,7 +125,7 @@ getWhichDocsCanBeDeleted = (docs, callback = (err, docsToBeDeleted, unmigratedDo
async.series jobs, (err)->
callback err, docsToBeDeleted, unmigratedDocs
whipeDocLines = (project_id, mongoPath, callback)->
wipeDocLines = (project_id, mongoPath, callback)->
update =
$unset: {}
update.$unset["#{mongoPath}.lines"] = ""
@ -137,15 +137,15 @@ removeDocLinesFromProject = (docs, project, callback)->
jobs = _.map docs, (doc)->
(cb)->
findDocInProject project, doc._id, (err, doc, mongoPath)->
whipeDocLines project._id, mongoPath, cb
wipeDocLines project._id, mongoPath, cb
async.parallelLimit jobs, 5, callback
processNext = (project_id, callback)->
if !project_id? or project_id.length == 0
return callback()
checkIfFileHasBeenProccessed project_id, (err, hasBeenProcessed)->
checkIfFileHasBeenProcessed project_id, (err, hasBeenProcessed)->
if hasBeenProcessed
console.log "#{project_id} already procssed, skipping"
console.log "#{project_id} already processed, skipping"
return callback()
console.log "#{project_id} processing"
getAllDocs project_id, (err, docs, project)->

View file

@ -65,7 +65,7 @@ markDocAsUnmigrated = (doc_id, callback)->
markDocAsProcessed doc_id, (err)->
fs.appendFile unmigrated_docs_path, "#{doc_id}\n", callback
checkIfDocHasBeenProccessed = (doc_id, callback)->
checkIfDocHasBeenProcessed = (doc_id, callback)->
callback(null, finished_docs[doc_id])
processNext = (doc_id, callback)->
@ -73,7 +73,7 @@ processNext = (doc_id, callback)->
return callback()
if needToExit
return callback(new Error("graceful shutdown"))
checkIfDocHasBeenProccessed doc_id, (err, hasBeenProcessed)->
checkIfDocHasBeenProcessed doc_id, (err, hasBeenProcessed)->
if hasBeenProcessed
console.log "#{doc_id} already processed, skipping"
return callback()

View file

@ -1,4 +1,4 @@
If migration is stopped mid way it will start at the beginging next time
If migration is stopped mid way it will start at the beginning next time
To see the run migrations do db.getCollection('_migrations').find() you can't do db._migrations.find()

View file

@ -37,7 +37,7 @@ settings =
# Databases
# ---------
# ShareLaTeX's main persistant data store is MongoDB (http://www.mongodb.org/)
# ShareLaTeX's main persistent data store is MongoDB (http://www.mongodb.org/)
# Documentation about the URL connection string format can be found at:
#
# http://docs.mongodb.org/manual/reference/connection-string/
@ -75,7 +75,7 @@ settings =
# track-changes:lock
historyLock: ({doc_id}) -> "HistoryLock:#{doc_id}"
historyIndexLock: ({project_id}) -> "HistoryIndexLock:#{project_id}"
# track-chanegs:history
# track-changes:history
uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}"
docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}"
# realtime
@ -93,8 +93,8 @@ settings =
project_history: redisConfig
# The compile server (the clsi) uses a SQL database to cache files and
# meta-data. sqllite is the default, and the load is low enough that this will
# be fine in production (we use sqllite at sharelatex.com).
# meta-data. sqlite is the default, and the load is low enough that this will
# be fine in production (we use sqlite at sharelatex.com).
#
# If you want to configure a different database, see the Sequelize documentation
# for available options:
@ -223,7 +223,7 @@ settings =
templates: true
references: true
## OPTIONAL CONFIGERABLE SETTINGS
## OPTIONAL CONFIGURABLE SETTINGS
if process.env["SHARELATEX_LEFT_FOOTER"]?
try
@ -457,7 +457,7 @@ if process.env["SHARELATEX_SAML_ENTRYPOINT"]
undefined
)
requestIdExpirationPeriodMs: (
if _saml_exiration = process.env["SHARELATEX_SAML_REQUEST_ID_EXPIRATION_PERIOD_MS"]
if _saml_expiration = process.env["SHARELATEX_SAML_REQUEST_ID_EXPIRATION_PERIOD_MS"]
try
parseIntOrFail(_saml_expiration)
catch e