Compare commits

...

53 Commits

Author SHA1 Message Date
Valentin Stauber e7c3068e44
Merge 4ae5448dc6 into 9729befe59 2024-05-05 19:37:44 -06:00
Alf Eaton 9729befe59 Merge pull request #18170 from overleaf/ae-token-access-page
Convert token access page to React

GitOrigin-RevId: d7434f0de395c47a95d00767727fbe9d43f9abca
2024-05-03 08:05:01 +00:00
Alf Eaton ab5495023a [visual] Add support for `description` list environments (#13460)
GitOrigin-RevId: d1ddfeed4ba453afa348e57d75fdb3e12d29f5fc
2024-05-03 08:04:53 +00:00
CloudBuild 49a74544b8 auto update translation
GitOrigin-RevId: 51358bc2a16648dee1351a35b7338de9321f5a87
2024-05-03 08:04:49 +00:00
Eric Mc Sween 4114901617 Merge pull request #18142 from overleaf/em-docupdater-resync-ranges
Adjust ranges for tracked deletes when resyncing projects

GitOrigin-RevId: 5f8e6279cdc31e76a2f93cf2129eaca8cac3cb78
2024-05-03 08:04:41 +00:00
Eric Mc Sween 65f20a4d56 Merge pull request #18186 from overleaf/em-migration-dependencies
Add a migration helper checking dependent migrations

GitOrigin-RevId: 96aa6238b20115206554faaa4c2aefc537bbe7e8
2024-05-03 08:04:36 +00:00
Jakob Ackermann 4c49841637 Merge pull request #18153 from overleaf/jpa-validate-session-in-store
[web] check for redis connection being out of sync in session store

GitOrigin-RevId: c271e88d4e1fbcb0f7a57f4775e8ef88b70b16a8
2024-05-03 08:04:25 +00:00
Jakob Ackermann 0576e02127 Merge pull request #18152 from overleaf/jpa-stricter-session-validation
[web] stricter session validation

GitOrigin-RevId: 3ef916318fde7f31e3e3fd0f7082dde7a2975a27
2024-05-03 08:04:20 +00:00
Tim Down a452e1e8cd Merge pull request #18195 from overleaf/revert-18120-td-bs5-load-js
Revert "Load correct JS for the active Bootstrap version"

GitOrigin-RevId: 7f6e846b5461cfbacec874ed55bba577e414f3a6
2024-05-03 08:04:16 +00:00
Tim Down 56150d9dbc Load correct JS for the active Bootstrap version (#18120)
* Load correct JS for the active Bootstrap version

* Tidy up bootstrapVersion declaration

* Add Bootstrap JS to website redesign layout

* FIx error on interstitial subscriptions page

* Remove unnecessary import of jQuery and Bootstrap

* Use global entrypointScripts in bootstrap-js mixin

GitOrigin-RevId: 6b1977354a72dc69008fc0d2e3baec2f28d97f6b
2024-05-03 08:04:07 +00:00
CloudBuild fb05c0bb82 auto update translation
GitOrigin-RevId: 1850bdf3c1c7cd7c3e4b60ed895278602f4be0f9
2024-05-02 08:04:04 +00:00
Jessica Lawshe a827e925c3 Merge pull request #18158 from overleaf/jel-managed-enrollment-label
[web] Fix text wrapping of label on managed users enrollment

GitOrigin-RevId: f87d51d1f32d64b9fdebd865f340f39bad844870
2024-05-02 08:04:00 +00:00
Jessica Lawshe ae0abd6445 Merge pull request #18159 from overleaf/jel-group-invite-header
[web] Break word on group invite header

GitOrigin-RevId: 790c24e8291f1dbdfa9231e4c9e3d4e531bf2b8f
2024-05-02 08:03:52 +00:00
Andrew Rumble 92f62f91c1 Merge pull request #18148 from overleaf/ar-add-output-zip-endpoint-to-clsi
[clsi] Add endpoints to get zip of output files

GitOrigin-RevId: a1a935e8170ab5a8d40baa6d96f8e42fe22c2e8c
2024-05-02 08:03:44 +00:00
CloudBuild d02f175afa auto update translation
GitOrigin-RevId: 55307f35eccdc6ea38d1b58a45bd06f2b8a2adaa
2024-05-01 08:05:09 +00:00
Jimmy Domagala-Tang 0ca7a385d5 Merge pull request #18131 from overleaf/jdt-promo-hooks
feat: split logic for promos out to hooks
GitOrigin-RevId: 8f713cdf309f84dddb20e8da76009512bd990a8f
2024-05-01 08:05:04 +00:00
Antoine Clausse a26c655220 Delete 3 migration scripts for compile-timeouts (#18163)
Co-authored-by: Jakob Ackermann <jakob.ackermann@overleaf.com>
GitOrigin-RevId: 2d66052994159b6d902b807f02488095d65562e1
2024-05-01 08:05:00 +00:00
Antoine Clausse 6a6f155029 [web] Use React hooks to get split-test variants instead of `getSplitTestVariant` (`getMeta`) (#18133)
* Fix split-tests loading in React component: use `useSplitTestContext` instead of `getSplitTestVariant`

* Replace use of `isSplitTestEnabled` by `useSplitTestContext`

* Add SplitTestProvider to roots, and fix tests

* Create `useFeatureFlag` hook

* Use `useFeatureFlag` where applicable

GitOrigin-RevId: 9ff7bb3975d50bc4d07d74d93c482d56dc96f615
2024-05-01 08:04:55 +00:00
Domagoj Kriskovic ebb34b40c1 [web] "back to editor" button when history is opened (#18137)
* [web] "back to editor" button when history is opened

* rename to shouldReopenChat

* move to separate component

* show online users widget

* using MaterialIcon

* import useState directly

* fix formatting

GitOrigin-RevId: c37432f16518ef83510c48d90722e74b228b5ab1
2024-05-01 08:04:51 +00:00
Rebeka Dekany 62c2937dac Merge pull request #18164 from overleaf/rd-remove-endpoints
[web] Remove publicly accessible endpoints

GitOrigin-RevId: c8e57faf6418274cac36b6e721c97a4ca70a1193
2024-05-01 08:04:46 +00:00
Alf Eaton 417de9ee87 Fix formatting
GitOrigin-RevId: 94ebd836a8cc3fbbb3ea1b7284b1c1863263d96f
2024-05-01 08:04:41 +00:00
Copybot faf9bc39c4 Merge pull request #1108 from chschenk:feature_maxEntitiesPerProject
GitOrigin-RevId: ceeb1c13e5bbc3eb498e0ee1040ab8bbfeb574a9
2024-05-01 08:04:36 +00:00
Alf Eaton 08c784f58a Translate You on project dashboard (#18122)
GitOrigin-RevId: 5157df9079460c5aa8122fc29b14babf12a32bc4
2024-05-01 08:04:31 +00:00
Alf Eaton 8921b8484e Merge pull request #18140 from overleaf/ae-log-rules
Add new regular expressions for matching compiler error messages

GitOrigin-RevId: ab6e17951c29c2a68b385b7e0cb77abf2d22281d
2024-05-01 08:04:27 +00:00
Andrew Rumble 13bb42885e Merge pull request #18025 from overleaf/ar-add-dropbox-unlinks-to-audit-log
Add dropbox unlinks to audit log

GitOrigin-RevId: 9038293b42446843763ea83caa3f9414123961a1
2024-05-01 08:04:18 +00:00
Rebeka Dekany 285a0cae03 Merge pull request #17309 from overleaf/rd-bootstrap-5-stylelint
[web] Introducing Stylelint as the CSS linter

GitOrigin-RevId: 89ee8860cdb3a94949749577b63cde2c3dc213fb
2024-05-01 08:04:13 +00:00
Rebeka Dekany 46485e0347 Merge pull request #18030 from overleaf/rd-bootstrap-settings-css
Migrate account-settings.less file to Bootstrap 5 and Sass

GitOrigin-RevId: 898cd811d6a0576cb0faacdd729461198324d2d5
2024-05-01 08:04:08 +00:00
Jessica Lawshe e9586079d4 Merge pull request #18047 from overleaf/jel-latexqc-webpack-dev-middleware
[latexqc] Upgrade `webpack-dev-middleware`

GitOrigin-RevId: b7036f623c4fb27174c2b4f22b49ff1b257af829
2024-04-30 08:04:52 +00:00
Eric Mc Sween 501be34862 Merge pull request #18156 from overleaf/em-fix-queue-size-metric
Fix queue size by error metric in project history dashboard

GitOrigin-RevId: e837c6fc00acd23671f017c70cd9b2c643c02482
2024-04-30 08:04:47 +00:00
Andrew Rumble 9c3d9ef590 Merge pull request #17935 from overleaf/ar-refactor-compile-async
[web] make CompileManager async

GitOrigin-RevId: 617bde1f429fa9aafc7d4bf4ec628b2a22386b19
2024-04-30 08:04:43 +00:00
Miguel Serrano cee678591f Merge pull request #18145 from overleaf/msm-ce-history-scripts
[CE] Add history utility scripts (flush/resync)

GitOrigin-RevId: 3f46609c279bef70f1ee6e63f74648f1c2b99a97
2024-04-30 08:04:38 +00:00
Antoine Clausse cdd79e8ec0 Fix: unset recent users `featuresUpdatedAt` after wrong update (#18149)
* Copy previous script

* Remove `featuresUpdatedAt` that was wrongly set on recent users

* Fix! `signupDate` -> `signUpDate`

* Add test on `migration_compile_timeout_60s_to_20s_fixup_new_users.js`

* style: `$unset: { featuresUpdatedAt: 1 }` -> `$unset: { featuresUpdatedAt: '' }`

Co-authored-by: Jakob Ackermann <jakob.ackermann@overleaf.com>

* Add comment on test (https://github.com/overleaf/internal/pull/18149#discussion_r1582999534)

Co-authored-by: Jakob Ackermann <jakob.ackermann@overleaf.com>

---------

Co-authored-by: Jakob Ackermann <jakob.ackermann@overleaf.com>
GitOrigin-RevId: 408f5c7d48e60722aba736167b8e8858e9570d99
2024-04-30 08:04:33 +00:00
Antoine Clausse 711d50a2f1 [web] Create script to update forgotten `featuresUpdatedAt` after the migration to 20s compile timeout (#18113)
* Copy `migration_compile_timeout_60s_to_20s.js` script

* Update `featuresUpdatedAt`

* Add a comment about `featuresUpdatedAt` in migration_compile_timeout_60s_to_20s.js

* Fix test on migration_compile_timeout_60s_to_20s.js

* Fix: Include users having `featuresUpdatedAt` undefined in the update

* Add test on `migration_compile_timeout_60s_to_20s_fixup_features_updated_at`

GitOrigin-RevId: 4b2baf955a6a9f39bf9ce00b7839af551064c6cb
2024-04-30 08:04:28 +00:00
CloudBuild 70c05dd5f7 auto update translation
GitOrigin-RevId: 1c8fdfb7e8e0e3cb88e6f2f7e51d6a3b2da27826
2024-04-29 08:04:58 +00:00
Jakob Ackermann afca054a22 Merge pull request #18136 from overleaf/jpa-fix-i18n-scanner-glob
[web] instruct i18next-scanner to look at frontend code only

GitOrigin-RevId: 094cc571810f142b535d0813c2002944a0e1ab9d
2024-04-29 08:04:45 +00:00
CloudBuild 568044ee48 auto update translation
GitOrigin-RevId: a8aa6d0ff44aa96d16f7e4978014b36ab8b2fde7
2024-04-26 08:05:05 +00:00
Brian Gough 29105911c5 Merge pull request #17732 from overleaf/bg-session-mitigation-initial-protoype
anonymous cookie-based sessions module

GitOrigin-RevId: 75fe2d48fa384ba8d07c0b478a9a5a907a2b3b67
2024-04-26 08:04:54 +00:00
Jakob Ackermann a540754f6e Merge pull request #18116 from overleaf/jpa-bulk-replace-localhost
[misc] bulk replace localhost with 127.0.0.1

GitOrigin-RevId: d238f3635302e8ff5500d611108c4d1bef216726
2024-04-26 08:04:39 +00:00
Jakob Ackermann 90a02ebc2f Merge pull request #17949 from overleaf/jpa-set-nx-xx
[web] stricter writes to redis when creating and updating sessions

GitOrigin-RevId: 79723e0d38884bf723c7a2ba32993e4daa2612a0
2024-04-26 08:04:35 +00:00
Alf Eaton cb97bb5170 Add "None" option to "Main document" menu when no root doc is set (#18102)
GitOrigin-RevId: 5e1cd6bcbf070d7b6a92a1fcd69370361a3e6d3e
2024-04-26 08:04:31 +00:00
Antoine Clausse ede03daafd Rename test to `local-ccy-format-v2` to restart with clean data (#18115)
GitOrigin-RevId: 610495f41d69f8cac1a427ef7b8d64886bc5cdba
2024-04-26 08:04:26 +00:00
Antoine Clausse e32b4f0db1 [web] Handle error cause by `currencyDisplay: 'narrowSymbol'` in old browsers (#18060)
* Handle error cause by `currencyDisplay: 'narrowSymbol'` in old browsers

RangeError
Value narrowSymbol out of range for Number.prototype.toLocaleString options property currencyDisplay

* Make `formatCurrencyLocalized` bulletproof

GitOrigin-RevId: 26e8abc6f9fb7c06c2d14b9d86af2d84fb9f32e3
2024-04-26 08:04:22 +00:00
Eric Mc Sween 65313e6692 Merge pull request #18044 from overleaf/em-fix-migration-order
Fix doc versions migration order

GitOrigin-RevId: bad0297cafcc29c0890dbc0d6f5dcc159ded838b
2024-04-26 08:04:18 +00:00
roo hutton 359f5068ad Merge pull request #18055 from overleaf/rh-follow-redirects-1156
[web] Upgrade follow-redirects to 1.15.6

GitOrigin-RevId: 72e6efe5fbf48536590ac0cab04c1e18a8023dbe
2024-04-26 08:04:13 +00:00
roo hutton cbb7a6412c Merge pull request #18053 from overleaf/rh-upgrade-tar-621
[web] Upgrade tar to 6.2.1

GitOrigin-RevId: ca5373b0765aba0fd22dea200e695e9263d08c01
2024-04-26 08:04:09 +00:00
David c2448ff3d2 Merge pull request #17947 from overleaf/dp-secondary-email-confirmation-code
Add endpoints for secondary email confirmation by code

GitOrigin-RevId: c2829672fd9aeca457f76958d4922b9c95086f26
2024-04-26 08:04:00 +00:00
Valentin Stauber 4ae5448dc6 revert Dockerfile-base to original 2024-03-20 14:07:40 +01:00
Valentin Stauber 52ab6c12e9 Revert "split chained RUN commands in Dockerfile"
This reverts commit 3b3013e499.
2024-03-20 14:04:55 +01:00
Valentin Stauber b82bab82c4 improve default values in email settings 2024-03-20 13:59:21 +01:00
Valentin Stauber 593358a035 change ctan mirror and make wget verbose 2024-03-19 19:04:25 +01:00
Valentin Stauber 3b3013e499 split chained RUN commands in Dockerfile 2024-03-19 18:48:27 +01:00
Valentin Stauber f06759dcd6 also extract tls field from emailSettings 2024-03-18 17:23:33 +01:00
Christopher Schenk 0c265db259 Make the number of max entities per project configurable 2023-03-29 11:31:54 +02:00
231 changed files with 6152 additions and 1642 deletions

View File

@ -18,7 +18,7 @@ describe('fetch-utils', function () {
before(async function () {
this.server = new TestServer()
await this.server.start(PORT)
this.url = path => `http://localhost:${PORT}${path}`
this.url = path => `http://127.0.0.1:${PORT}${path}`
})
after(async function () {

View File

@ -4,7 +4,7 @@
const redis = require('../../')
const logger = require('@overleaf/logger')
const rclient = redis.createClient({ host: 'localhost', port: '6379' })
const rclient = redis.createClient({ host: '127.0.0.1', port: '6379' })
setInterval(() => {
rclient.healthCheck(err => {

1319
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -80,6 +80,13 @@ COPY server-ce/config/custom-environment-variables.json /overleaf/services/histo
ADD server-ce/bin/grunt /usr/local/bin/grunt
RUN chmod +x /usr/local/bin/grunt
# Copy history helper scripts
# ---------------------------
ADD server-ce/bin/flush-history-queues /overleaf/bin/flush-history-queues
RUN chmod +x /overleaf/bin/flush-history-queues
ADD server-ce/bin/force-history-resyncs /overleaf/bin/force-history-resyncs
RUN chmod +x /overleaf/bin/force-history-resyncs
# File that controls open|closed status of the site
# -------------------------------------------------
ENV SITE_MAINTENANCE_FILE "/etc/overleaf/site_status"

View File

@ -0,0 +1,8 @@
#!/bin/bash
set -euo pipefail
source /etc/container_environment.sh
source /etc/overleaf/env.sh
cd /overleaf/services/project-history
node scripts/flush_all.js 100000

View File

@ -0,0 +1,8 @@
#!/bin/bash
set -euo pipefail
source /etc/container_environment.sh
source /etc/overleaf/env.sh
cd /overleaf/services/project-history
node scripts/force_resync.js 1000 force

View File

@ -345,10 +345,15 @@ if (process.env.OVERLEAF_EMAIL_FROM_ADDRESS != null) {
// SMTP Creds
host: process.env.OVERLEAF_EMAIL_SMTP_HOST,
port: process.env.OVERLEAF_EMAIL_SMTP_PORT,
secure: parse(process.env.OVERLEAF_EMAIL_SMTP_SECURE),
ignoreTLS: parse(process.env.OVERLEAF_EMAIL_SMTP_IGNORE_TLS),
secure: parse(process.env.OVERLEAF_EMAIL_SMTP_SECURE || 'true'),
ignoreTLS: parse(process.env.OVERLEAF_EMAIL_SMTP_IGNORE_TLS || 'false'),
tls: {
rejectUnauthorized: parse(
process.env.OVERLEAF_EMAIL_SMTP_TLS_REJECT_UNAUTH || 'true'
)
},
name: process.env.OVERLEAF_EMAIL_SMTP_NAME,
logger: process.env.OVERLEAF_EMAIL_SMTP_LOGGER === 'true',
logger: parse(process.env.OVERLEAF_EMAIL_SMTP_LOGGER || 'false'),
},
textEncoding: process.env.OVERLEAF_EMAIL_TEXT_ENCODING,
@ -370,14 +375,6 @@ if (process.env.OVERLEAF_EMAIL_FROM_ADDRESS != null) {
pass: process.env.OVERLEAF_EMAIL_SMTP_PASS,
}
}
if (process.env.OVERLEAF_EMAIL_SMTP_TLS_REJECT_UNAUTH != null) {
settings.email.parameters.tls = {
rejectUnauthorized: parse(
process.env.OVERLEAF_EMAIL_SMTP_TLS_REJECT_UNAUTH
),
}
}
}
// i18n

View File

@ -1,14 +1,14 @@
module.exports = {
internal: {
chat: {
host: process.env.LISTEN_ADDRESS || 'localhost',
host: process.env.LISTEN_ADDRESS || '127.0.0.1',
port: 3010,
},
},
apis: {
web: {
url: `http://${process.env.WEB_HOST || 'localhost'}:${
url: `http://${process.env.WEB_HOST || '127.0.0.1'}:${
process.env.WEB_PORT || 3000
}`,
user: process.env.WEB_API_USER || 'overleaf',
@ -19,7 +19,7 @@ module.exports = {
mongo: {
url:
process.env.MONGO_CONNECTION_STRING ||
`mongodb://${process.env.MONGO_HOST || 'localhost'}/sharelatex`,
`mongodb://${process.env.MONGO_HOST || '127.0.0.1'}/sharelatex`,
options: {
monitorCommands: true,
},

View File

@ -9,7 +9,7 @@ export async function ensureRunning() {
if (!serverPromise) {
const { app } = await createServer()
const startServer = promisify(app.listen.bind(app))
serverPromise = startServer(3010, 'localhost')
serverPromise = startServer(3010, '127.0.0.1')
}
return serverPromise
}

View File

@ -1,7 +1,7 @@
import Request from 'request'
const request = Request.defaults({
baseUrl: 'http://localhost:3010',
baseUrl: 'http://127.0.0.1:3010',
})
async function asyncRequest(options) {

View File

@ -14,6 +14,7 @@ const Metrics = require('@overleaf/metrics')
const smokeTest = require('./test/smoke/js/SmokeTests')
const ContentTypeMapper = require('./app/js/ContentTypeMapper')
const Errors = require('./app/js/Errors')
const { createOutputZip } = require('./app/js/OutputController')
const Path = require('path')
@ -170,6 +171,20 @@ const staticOutputServer = ForbidSymlinks(
}
)
// This needs to be before GET /project/:project_id/build/:build_id/output/*
app.get(
'/project/:project_id/build/:build_id/output/output.zip',
bodyParser.json(),
createOutputZip
)
// This needs to be before GET /project/:project_id/user/:user_id/build/:build_id/output/*
app.get(
'/project/:project_id/user/:user_id/build/:build_id/output/output.zip',
bodyParser.json(),
createOutputZip
)
app.get(
'/project/:project_id/user/:user_id/build/:build_id/output/*',
function (req, res, next) {

View File

@ -0,0 +1,37 @@
const logger = require('@overleaf/logger')
const OutputFileArchiveManager = require('./OutputFileArchiveManager')
const { expressify } = require('@overleaf/promise-utils')
function cleanFiles(files) {
if (!Array.isArray(files)) {
return []
}
return files.filter(file => /^output\./g.test(file))
}
async function createOutputZip(req, res) {
const {
project_id: projectId,
user_id: userId,
build_id: buildId,
} = req.params
const files = cleanFiles(req.query.files)
logger.debug({ projectId, userId, buildId, files }, 'Will create zip file')
const archive = await OutputFileArchiveManager.archiveFilesForBuild(
projectId,
userId,
buildId,
files
)
archive.on('error', err => {
logger.warn({ err }, 'error emitted when creating output files archive')
})
res.attachment('output.zip')
res.setHeader('X-Content-Type-Options', 'nosniff')
archive.pipe(res)
}
module.exports = { createOutputZip: expressify(createOutputZip) }

View File

@ -0,0 +1,90 @@
let OutputFileArchiveManager
const archiver = require('archiver')
const OutputCacheManager = require('./OutputCacheManager')
const OutputFileFinder = require('./OutputFileFinder')
const Settings = require('@overleaf/settings')
const { open } = require('node:fs/promises')
const path = require('node:path')
const { NotFoundError } = require('./Errors')
function getContentDir(projectId, userId) {
let subDir
if (userId != null) {
subDir = `${projectId}-${userId}`
} else {
subDir = projectId
}
return `${Settings.path.outputDir}/${subDir}/`
}
module.exports = OutputFileArchiveManager = {
async archiveFilesForBuild(projectId, userId, build, files = []) {
const contentDir = getContentDir(projectId, userId)
const validFiles = await (files.length > 0
? this._getRequestedOutputFiles(projectId, userId, build, files)
: this._getAllOutputFiles(projectId, userId, build))
const archive = archiver('zip')
const missingFiles = files.filter(
file => !validFiles.includes(path.basename(file))
)
for (const file of validFiles) {
try {
const fileHandle = await open(
`${contentDir}${OutputCacheManager.path(build, file)}`
)
const fileStream = fileHandle.createReadStream()
archive.append(fileStream, { name: file })
} catch (error) {
missingFiles.push(file)
}
}
if (missingFiles.length > 0) {
archive.append(missingFiles.join('\n'), {
name: 'missing_files.txt',
})
}
await archive.finalize()
return archive
},
async _getAllOutputFiles(projectId, userId, build) {
const contentDir = getContentDir(projectId, userId)
try {
const { outputFiles } = await OutputFileFinder.promises.findOutputFiles(
[],
`${contentDir}${OutputCacheManager.path(build, '.')}`
)
return outputFiles.map(({ path }) => path)
} catch (error) {
if (
error.code === 'ENOENT' ||
error.code === 'ENOTDIR' ||
error.code === 'EACCES'
) {
throw new NotFoundError('Output files not found')
}
throw error
}
},
async _getRequestedOutputFiles(projectId, userId, build, files) {
const outputFiles = new Set(
await OutputFileArchiveManager._getAllOutputFiles(
projectId,
userId,
build
)
)
return files.filter(file => outputFiles.has(file))
},
}

View File

@ -23,7 +23,7 @@ module.exports = {
internal: {
clsi: {
port: 3013,
host: process.env.LISTEN_ADDRESS || 'localhost',
host: process.env.LISTEN_ADDRESS || '127.0.0.1',
},
load_balancer_agent: {
@ -35,12 +35,12 @@ module.exports = {
apis: {
clsi: {
// Internal requests (used by tests only at the time of writing).
url: `http://${process.env.CLSI_HOST || 'localhost'}:3013`,
url: `http://${process.env.CLSI_HOST || '127.0.0.1'}:3013`,
// External url prefix for output files, e.g. for requests via load-balancers.
outputUrlPrefix: `${process.env.ZONE ? `/zone/${process.env.ZONE}` : ''}`,
},
clsiPerf: {
host: `${process.env.CLSI_PERF_HOST || 'localhost'}:${
host: `${process.env.CLSI_PERF_HOST || '127.0.0.1'}:${
process.env.CLSI_PERF_PORT || '3043'
}`,
},

View File

@ -23,6 +23,7 @@
"@overleaf/o-error": "*",
"@overleaf/promise-utils": "*",
"@overleaf/settings": "*",
"archiver": "5.3.2",
"async": "3.2.2",
"body-parser": "^1.19.0",
"bunyan": "^1.8.15",

View File

@ -72,7 +72,7 @@ module.exports = Client = {
done(new Error('error starting server: ' + error.message))
} else {
const addr = server.address()
Settings.filestoreDomainOveride = `http://localhost:${addr.port}`
Settings.filestoreDomainOveride = `http://127.0.0.1:${addr.port}`
done()
}
})

View File

@ -0,0 +1,110 @@
const SandboxedModule = require('sandboxed-module')
const sinon = require('sinon')
const MODULE_PATH = require('path').join(
__dirname,
'../../../app/js/OutputController'
)
describe('OutputController', function () {
describe('createOutputZip', function () {
beforeEach(function () {
this.archive = {
on: sinon.stub(),
pipe: sinon.stub(),
}
this.archiveFilesForBuild = sinon.stub().resolves(this.archive)
this.OutputController = SandboxedModule.require(MODULE_PATH, {
requires: {
'./OutputFileArchiveManager': {
archiveFilesForBuild: this.archiveFilesForBuild,
},
},
})
})
describe('when OutputFileArchiveManager creates an archive', function () {
beforeEach(function (done) {
this.res = {
attachment: sinon.stub(),
setHeader: sinon.stub(),
}
this.req = {
params: {
project_id: 'project-id-123',
user_id: 'user-id-123',
build_id: 'build-id-123',
},
query: {
files: ['output.tex', 'not-output.tex'],
},
}
this.archive.pipe.callsFake(() => done())
this.OutputController.createOutputZip(this.req, this.res)
})
it('does not pass files that do not start with "output" to OutputFileArchiveManager', function () {
sinon.assert.calledWith(
this.archiveFilesForBuild,
'project-id-123',
'user-id-123',
'build-id-123',
['output.tex']
)
})
it('pipes the archive to the response', function () {
sinon.assert.calledWith(this.archive.pipe, this.res)
})
it('calls the express convenience method to set attachment headers', function () {
sinon.assert.calledWith(this.res.attachment, 'output.zip')
})
it('sets the X-Content-Type-Options header to nosniff', function () {
sinon.assert.calledWith(
this.res.setHeader,
'X-Content-Type-Options',
'nosniff'
)
})
})
describe('when OutputFileArchiveManager throws an error', function () {
let error
beforeEach(function (done) {
error = new Error('error message')
this.archiveFilesForBuild.rejects(error)
this.res = {
status: sinon.stub().returnsThis(),
send: sinon.stub(),
}
this.req = {
params: {
project_id: 'project-id-123',
user_id: 'user-id-123',
build_id: 'build-id-123',
},
query: {
files: ['output.tex'],
},
}
this.OutputController.createOutputZip(
this.req,
this.res,
(this.next = sinon.stub().callsFake(() => {
done()
}))
)
})
it('calls next with the error', function () {
sinon.assert.calledWith(this.next, error)
})
})
})
})

View File

@ -0,0 +1,234 @@
const SandboxedModule = require('sandboxed-module')
const sinon = require('sinon')
const { assert, expect } = require('chai')
const MODULE_PATH = require('path').join(
__dirname,
'../../../app/js/OutputFileArchiveManager'
)
describe('OutputFileArchiveManager', function () {
const userId = 'user-id-123'
const projectId = 'project-id-123'
const buildId = 'build-id-123'
afterEach(function () {
sinon.restore()
})
beforeEach(function () {
this.OutputFileFinder = {
promises: {
findOutputFiles: sinon.stub().resolves({ outputFiles: [] }),
},
}
this.OutputCacheManger = {
path: sinon.stub().callsFake((build, path) => {
return `${build}/${path}`
}),
}
this.archive = {
append: sinon.stub(),
finalize: sinon.stub(),
}
this.archiver = sinon.stub().returns(this.archive)
this.outputDir = '/output/dir'
this.fs = {
open: sinon.stub().callsFake(file => ({
createReadStream: sinon.stub().returns(`handle: ${file}`),
})),
}
this.OutputFileArchiveManager = SandboxedModule.require(MODULE_PATH, {
requires: {
'./OutputFileFinder': this.OutputFileFinder,
'./OutputCacheManager': this.OutputCacheManger,
archiver: this.archiver,
'node:fs/promises': this.fs,
'node:path': {
basename: sinon.stub().callsFake(path => path.split('/').pop()),
},
'@overleaf/settings': {
path: {
outputDir: this.outputDir,
},
},
},
})
})
describe('when called with no files', function () {
beforeEach(async function () {
this.OutputFileFinder.promises.findOutputFiles.resolves({
outputFiles: [
{ path: 'file_1' },
{ path: 'file_2' },
{ path: 'file_3' },
{ path: 'file_4' },
],
})
await this.OutputFileArchiveManager.archiveFilesForBuild(
projectId,
userId,
buildId
)
})
it('creates a zip archive', function () {
sinon.assert.calledWith(this.archiver, 'zip')
})
it('adds all the output files to the archive', function () {
expect(this.archive.append.callCount).to.equal(4)
sinon.assert.calledWith(
this.archive.append,
`handle: ${this.outputDir}/${projectId}-${userId}/${buildId}/file_1`,
sinon.match({ name: 'file_1' })
)
sinon.assert.calledWith(
this.archive.append,
`handle: ${this.outputDir}/${projectId}-${userId}/${buildId}/file_2`,
sinon.match({ name: 'file_2' })
)
sinon.assert.calledWith(
this.archive.append,
`handle: ${this.outputDir}/${projectId}-${userId}/${buildId}/file_3`,
sinon.match({ name: 'file_3' })
)
sinon.assert.calledWith(
this.archive.append,
`handle: ${this.outputDir}/${projectId}-${userId}/${buildId}/file_4`,
sinon.match({ name: 'file_4' })
)
})
it('finalizes the archive after all files are appended', function () {
sinon.assert.called(this.archive.finalize)
expect(this.archive.finalize.calledBefore(this.archive.append)).to.be
.false
})
})
describe('when called with a list of files that all are in the output directory', function () {
beforeEach(async function () {
this.OutputFileFinder.promises.findOutputFiles.resolves({
outputFiles: [
{ path: 'file_1' },
{ path: 'file_2' },
{ path: 'file_3' },
{ path: 'file_4' },
],
})
await this.OutputFileArchiveManager.archiveFilesForBuild(
projectId,
userId,
buildId,
['file_1', 'file_4']
)
})
it('creates a zip archive', function () {
sinon.assert.calledWith(this.archiver, 'zip')
})
it('adds only output files from the list of files to the archive', function () {
expect(this.archive.append.callCount).to.equal(2)
sinon.assert.calledWith(
this.archive.append,
`handle: ${this.outputDir}/${projectId}-${userId}/${buildId}/file_1`,
sinon.match({
name: 'file_1',
})
)
sinon.assert.calledWith(
this.archive.append,
`handle: ${this.outputDir}/${projectId}-${userId}/${buildId}/file_4`,
sinon.match({
name: 'file_4',
})
)
})
it('finalizes the archive after all files are appended', function () {
sinon.assert.called(this.archive.finalize)
expect(this.archive.finalize.calledBefore(this.archive.append)).to.be
.false
})
})
describe('when called with a list of files and one of the files is missing from the output directory', function () {
beforeEach(async function () {
this.OutputFileFinder.promises.findOutputFiles.resolves({
outputFiles: [
{ path: 'file_1' },
{ path: 'file_2' },
{ path: 'file_3' },
],
})
await this.OutputFileArchiveManager.archiveFilesForBuild(
projectId,
userId,
buildId,
['file_1', 'file_4']
)
})
it('creates a zip archive', function () {
sinon.assert.calledWith(this.archiver, 'zip')
})
it('adds the files that were found to the archive', function () {
sinon.assert.calledWith(
this.archive.append,
`handle: ${this.outputDir}/${projectId}-${userId}/${buildId}/file_1`,
sinon.match({ name: 'file_1' })
)
})
it('adds a file listing any missing files', function () {
sinon.assert.calledWith(
this.archive.append,
'file_4',
sinon.match({
name: 'missing_files.txt',
})
)
})
it('finalizes the archive after all files are appended', function () {
sinon.assert.called(this.archive.finalize)
expect(this.archive.finalize.calledBefore(this.archive.append)).to.be
.false
})
})
describe('when the output directory cannot be accessed', function () {
beforeEach(async function () {
this.OutputFileFinder.promises.findOutputFiles.rejects({
code: 'ENOENT',
})
})
it('rejects with a NotFoundError', async function () {
try {
await this.OutputFileArchiveManager.archiveFilesForBuild(
projectId,
userId,
buildId
)
assert.fail('should have thrown a NotFoundError')
} catch (err) {
expect(err).to.haveOwnProperty('name', 'NotFoundError')
}
})
it('does not create an archive', function () {
expect(this.archiver.called).to.be.false
})
})
})

View File

@ -5,14 +5,14 @@ module.exports = {
internal: {
contacts: {
port: 3036,
host: process.env.LISTEN_ADDRESS || 'localhost',
host: process.env.LISTEN_ADDRESS || '127.0.0.1',
},
},
mongo: {
url:
process.env.MONGO_CONNECTION_STRING ||
`mongodb://${process.env.MONGO_HOST || 'localhost'}/sharelatex`,
`mongodb://${process.env.MONGO_HOST || '127.0.0.1'}/sharelatex`,
options: {
monitorCommands: true,
},

View File

@ -3,11 +3,11 @@ import request from 'request'
import async from 'async'
import { app } from '../../../app/js/server.js'
const HOST = 'http://localhost:3036'
const HOST = 'http://127.0.0.1:3036'
describe('Getting Contacts', function () {
before(function (done) {
this.server = app.listen(3036, 'localhost', error => {
this.server = app.listen(3036, '127.0.0.1', error => {
if (error != null) {
throw error
}

View File

@ -19,7 +19,7 @@ module.exports = {
check(callback) {
const docId = new ObjectId()
const projectId = new ObjectId(settings.docstore.healthCheck.project_id)
const url = `http://localhost:${port}/project/${projectId}/doc/${docId}`
const url = `http://127.0.0.1:${port}/project/${projectId}/doc/${docId}`
const lines = [
'smoke test - delete me',
`${crypto.randomBytes(32).toString('hex')}`,

View File

@ -5,7 +5,7 @@ const Settings = {
internal: {
docstore: {
port: 3016,
host: process.env.LISTEN_ADDRESS || 'localhost',
host: process.env.LISTEN_ADDRESS || '127.0.0.1',
},
},

View File

@ -14,7 +14,7 @@ module.exports = {
}
this.initing = true
this.callbacks.push(callback)
app.listen(settings.internal.docstore.port, 'localhost', error => {
app.listen(settings.internal.docstore.port, '127.0.0.1', error => {
if (error != null) {
throw error
}

View File

@ -33,7 +33,7 @@ module.exports = DocstoreClient = {
getDoc(projectId, docId, qs, callback) {
request.get(
{
url: `http://localhost:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}`,
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}`,
json: true,
qs,
},
@ -44,7 +44,7 @@ module.exports = DocstoreClient = {
peekDoc(projectId, docId, qs, callback) {
request.get(
{
url: `http://localhost:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}/peek`,
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}/peek`,
json: true,
qs,
},
@ -55,7 +55,7 @@ module.exports = DocstoreClient = {
isDocDeleted(projectId, docId, callback) {
request.get(
{
url: `http://localhost:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}/deleted`,
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}/deleted`,
json: true,
},
callback
@ -65,7 +65,7 @@ module.exports = DocstoreClient = {
getAllDocs(projectId, callback) {
request.get(
{
url: `http://localhost:${settings.internal.docstore.port}/project/${projectId}/doc`,
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc`,
json: true,
},
(req, res, body) => {
@ -77,7 +77,7 @@ module.exports = DocstoreClient = {
getAllDeletedDocs(projectId, callback) {
request.get(
{
url: `http://localhost:${settings.internal.docstore.port}/project/${projectId}/doc-deleted`,
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc-deleted`,
json: true,
},
(error, res, body) => {
@ -93,7 +93,7 @@ module.exports = DocstoreClient = {
getAllRanges(projectId, callback) {
request.get(
{
url: `http://localhost:${settings.internal.docstore.port}/project/${projectId}/ranges`,
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/ranges`,
json: true,
},
callback
@ -103,7 +103,7 @@ module.exports = DocstoreClient = {
updateDoc(projectId, docId, lines, version, ranges, callback) {
return request.post(
{
url: `http://localhost:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}`,
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}`,
json: {
lines,
version,
@ -147,7 +147,7 @@ module.exports = DocstoreClient = {
deleteDocWithDateAndName(projectId, docId, deletedAt, name, callback) {
request.patch(
{
url: `http://localhost:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}`,
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}`,
json: { name, deleted: true, deletedAt },
},
callback
@ -157,7 +157,7 @@ module.exports = DocstoreClient = {
archiveAllDoc(projectId, callback) {
request.post(
{
url: `http://localhost:${settings.internal.docstore.port}/project/${projectId}/archive`,
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/archive`,
},
callback
)
@ -166,7 +166,7 @@ module.exports = DocstoreClient = {
archiveDoc(projectId, docId, callback) {
request.post(
{
url: `http://localhost:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}/archive`,
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}/archive`,
},
callback
)
@ -175,7 +175,7 @@ module.exports = DocstoreClient = {
destroyAllDoc(projectId, callback) {
request.post(
{
url: `http://localhost:${settings.internal.docstore.port}/project/${projectId}/destroy`,
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/destroy`,
},
callback
)

View File

@ -245,7 +245,7 @@ const port =
Settings.api.documentupdater &&
Settings.api.documentupdater.port) ||
3003
const host = Settings.internal.documentupdater.host || 'localhost'
const host = Settings.internal.documentupdater.host || '127.0.0.1'
if (!module.parent) {
// Called directly

View File

@ -0,0 +1,183 @@
// @ts-check
const _ = require('lodash')
const { isDelete } = require('./Utils')
/**
* @typedef {import('./types').Comment} Comment
* @typedef {import('./types').HistoryComment} HistoryComment
* @typedef {import('./types').HistoryRanges} HistoryRanges
* @typedef {import('./types').HistoryTrackedChange} HistoryTrackedChange
* @typedef {import('./types').Ranges} Ranges
* @typedef {import('./types').TrackedChange} TrackedChange
*/
/**
* Convert editor ranges to history ranges
*
* @param {Ranges} ranges
* @return {HistoryRanges}
*/
function toHistoryRanges(ranges) {
const changes = ranges.changes ?? []
const comments = (ranges.comments ?? []).slice()
// Changes are assumed to be sorted, but not comments
comments.sort((a, b) => a.op.p - b.op.p)
/**
* This will allow us to go through comments at a different pace as we loop
* through tracked changes
*/
const commentsIterator = new CommentsIterator(comments)
/**
* Current offset between editor pos and history pos
*/
let offset = 0
/**
* History comments that might overlap with the tracked change considered
*
* @type {HistoryComment[]}
*/
let pendingComments = []
/**
* The final history comments generated
*
* @type {HistoryComment[]}
*/
const historyComments = []
/**
* The final history tracked changes generated
*
* @type {HistoryTrackedChange[]}
*/
const historyChanges = []
for (const change of changes) {
historyChanges.push(toHistoryChange(change, offset))
// After this point, we're only interested in tracked deletes
if (!isDelete(change.op)) {
continue
}
// Fill pendingComments with new comments that start before this tracked
// delete and might overlap
for (const comment of commentsIterator.nextComments(change.op.p)) {
pendingComments.push(toHistoryComment(comment, offset))
}
// Save comments that are fully before this tracked delete
const newPendingComments = []
for (const historyComment of pendingComments) {
const commentEnd = historyComment.op.p + historyComment.op.c.length
if (commentEnd <= change.op.p) {
historyComments.push(historyComment)
} else {
newPendingComments.push(historyComment)
}
}
pendingComments = newPendingComments
// The rest of pending comments overlap with this tracked change. Adjust
// their history length.
for (const historyComment of pendingComments) {
historyComment.op.hlen =
(historyComment.op.hlen ?? historyComment.op.c.length) +
change.op.d.length
}
// Adjust the offset
offset += change.op.d.length
}
// Save the last pending comments
for (const historyComment of pendingComments) {
historyComments.push(historyComment)
}
// Save any comments that came after the last tracked change
for (const comment of commentsIterator.nextComments()) {
historyComments.push(toHistoryComment(comment, offset))
}
const historyRanges = {}
if (historyComments.length > 0) {
historyRanges.comments = historyComments
}
if (historyChanges.length > 0) {
historyRanges.changes = historyChanges
}
return historyRanges
}
class CommentsIterator {
/**
* Build a CommentsIterator
*
* @param {Comment[]} comments
*/
constructor(comments) {
this.comments = comments
this.currentIndex = 0
}
/**
* Generator that returns the next comments to consider
*
* @param {number} beforePos - only return comments that start before this position
* @return {Iterable<Comment>}
*/
*nextComments(beforePos = Infinity) {
while (this.currentIndex < this.comments.length) {
const comment = this.comments[this.currentIndex]
if (comment.op.p < beforePos) {
yield comment
this.currentIndex += 1
} else {
return
}
}
}
}
/**
* Convert an editor tracked change into a history tracked change
*
* @param {TrackedChange} change
* @param {number} offset - how much the history change is ahead of the
* editor change
* @return {HistoryTrackedChange}
*/
function toHistoryChange(change, offset) {
/** @type {HistoryTrackedChange} */
const historyChange = _.cloneDeep(change)
if (offset > 0) {
historyChange.op.hpos = change.op.p + offset
}
return historyChange
}
/**
* Convert an editor comment into a history comment
*
* @param {Comment} comment
* @param {number} offset - how much the history comment is ahead of the
* editor comment
* @return {HistoryComment}
*/
function toHistoryComment(comment, offset) {
/** @type {HistoryComment} */
const historyComment = _.cloneDeep(comment)
if (offset > 0) {
historyComment.op.hpos = comment.op.p + offset
}
return historyComment
}
module.exports = {
toHistoryRanges,
}

View File

@ -10,6 +10,7 @@ const logger = require('@overleaf/logger')
const metrics = require('./Metrics')
const { docIsTooLarge } = require('./Limits')
const { addTrackedDeletesToContent } = require('./Utils')
const HistoryConversions = require('./HistoryConversions')
const OError = require('@overleaf/o-error')
/**
@ -170,7 +171,8 @@ const ProjectHistoryRedisManager = {
}
if (historyRangesSupport) {
projectUpdate.resyncDocContent.ranges = ranges
projectUpdate.resyncDocContent.ranges =
HistoryConversions.toHistoryRanges(ranges)
}
const jsonUpdate = JSON.stringify(projectUpdate)

View File

@ -1,5 +1,8 @@
import { TrackingPropsRawData } from 'overleaf-editor-core/types/lib/types'
/**
* An update coming from the editor
*/
export type Update = {
doc: string
op: Op[]
@ -37,6 +40,9 @@ export type CommentOp = {
u?: boolean
}
/**
* Ranges record on a document
*/
export type Ranges = {
comments?: Comment[]
changes?: TrackedChange[]
@ -53,14 +59,35 @@ export type Comment = {
export type TrackedChange = {
id: string
op: Op
op: InsertOp | DeleteOp
metadata: {
user_id: string
ts: string
}
}
export type HistoryOp = HistoryInsertOp | HistoryDeleteOp | HistoryCommentOp | HistoryRetainOp
/**
* Updates sent to project-history
*/
export type HistoryUpdate = {
op: HistoryOp[]
doc: string
v?: number
meta?: {
pathname?: string
doc_length?: number
history_doc_length?: number
tc?: boolean
user_id?: string
}
projectHistoryId?: string
}
export type HistoryOp =
| HistoryInsertOp
| HistoryDeleteOp
| HistoryCommentOp
| HistoryRetainOp
export type HistoryInsertOp = InsertOp & {
commentIds?: string[]
@ -89,16 +116,13 @@ export type HistoryCommentOp = CommentOp & {
hlen?: number
}
export type HistoryUpdate = {
op: HistoryOp[]
doc: string
v?: number
meta?: {
pathname?: string
doc_length?: number
history_doc_length?: number
tc?: boolean
user_id?: string
}
projectHistoryId?: string
export type HistoryRanges = {
comments?: HistoryComment[]
changes?: HistoryTrackedChange[]
}
export type HistoryComment = Comment & { op: HistoryCommentOp }
export type HistoryTrackedChange = TrackedChange & {
op: HistoryInsertOp | HistoryDeleteOp
}

View File

@ -1,7 +1,7 @@
module.exports = {
internal: {
documentupdater: {
host: process.env.LISTEN_ADDRESS || 'localhost',
host: process.env.LISTEN_ADDRESS || '127.0.0.1',
port: 3003,
},
},
@ -9,20 +9,20 @@ module.exports = {
apis: {
web: {
url: `http://${
process.env.WEB_API_HOST || process.env.WEB_HOST || 'localhost'
process.env.WEB_API_HOST || process.env.WEB_HOST || '127.0.0.1'
}:${process.env.WEB_API_PORT || process.env.WEB_PORT || 3000}`,
user: process.env.WEB_API_USER || 'overleaf',
pass: process.env.WEB_API_PASSWORD || 'password',
},
project_history: {
url: `http://${process.env.PROJECT_HISTORY_HOST || 'localhost'}:3054`,
url: `http://${process.env.PROJECT_HISTORY_HOST || '127.0.0.1'}:3054`,
},
},
redis: {
pubsub: {
host:
process.env.PUBSUB_REDIS_HOST || process.env.REDIS_HOST || 'localhost',
process.env.PUBSUB_REDIS_HOST || process.env.REDIS_HOST || '127.0.0.1',
port: process.env.PUBSUB_REDIS_PORT || process.env.REDIS_PORT || '6379',
password:
process.env.PUBSUB_REDIS_PASSWORD || process.env.REDIS_PASSWORD || '',
@ -34,7 +34,7 @@ module.exports = {
history: {
port: process.env.HISTORY_REDIS_PORT || process.env.REDIS_PORT || '6379',
host:
process.env.HISTORY_REDIS_HOST || process.env.REDIS_HOST || 'localhost',
process.env.HISTORY_REDIS_HOST || process.env.REDIS_HOST || '127.0.0.1',
password:
process.env.HISTORY_REDIS_PASSWORD || process.env.REDIS_PASSWORD || '',
maxRetriesPerRequest: parseInt(
@ -45,7 +45,7 @@ module.exports = {
project_history: {
port: process.env.HISTORY_REDIS_PORT || process.env.REDIS_PORT || '6379',
host:
process.env.HISTORY_REDIS_HOST || process.env.REDIS_HOST || 'localhost',
process.env.HISTORY_REDIS_HOST || process.env.REDIS_HOST || '127.0.0.1',
password:
process.env.HISTORY_REDIS_PASSWORD || process.env.REDIS_PASSWORD || '',
maxRetriesPerRequest: parseInt(
@ -64,7 +64,7 @@ module.exports = {
lock: {
port: process.env.LOCK_REDIS_PORT || process.env.REDIS_PORT || '6379',
host:
process.env.LOCK_REDIS_HOST || process.env.REDIS_HOST || 'localhost',
process.env.LOCK_REDIS_HOST || process.env.REDIS_HOST || '127.0.0.1',
password:
process.env.LOCK_REDIS_PASSWORD || process.env.REDIS_PASSWORD || '',
maxRetriesPerRequest: parseInt(
@ -83,7 +83,7 @@ module.exports = {
host:
process.env.DOC_UPDATER_REDIS_HOST ||
process.env.REDIS_HOST ||
'localhost',
'127.0.0.1',
password:
process.env.DOC_UPDATER_REDIS_PASSWORD ||
process.env.REDIS_PASSWORD ||

View File

@ -26,7 +26,7 @@ module.exports = {
}
this.initing = true
this.callbacks.push(callback)
app.listen(3003, 'localhost', error => {
app.listen(3003, '127.0.0.1', error => {
if (error != null) {
throw error
}

View File

@ -97,7 +97,7 @@ module.exports = DocUpdaterClient = {
getDoc(projectId, docId, callback) {
request.get(
`http://localhost:3003/project/${projectId}/doc/${docId}`,
`http://127.0.0.1:3003/project/${projectId}/doc/${docId}`,
(error, res, body) => {
if (body != null && res.statusCode >= 200 && res.statusCode < 300) {
body = JSON.parse(body)
@ -109,7 +109,7 @@ module.exports = DocUpdaterClient = {
getDocAndRecentOps(projectId, docId, fromVersion, callback) {
request.get(
`http://localhost:3003/project/${projectId}/doc/${docId}?fromVersion=${fromVersion}`,
`http://127.0.0.1:3003/project/${projectId}/doc/${docId}?fromVersion=${fromVersion}`,
(error, res, body) => {
if (body != null && res.statusCode >= 200 && res.statusCode < 300) {
body = JSON.parse(body)
@ -129,7 +129,7 @@ module.exports = DocUpdaterClient = {
peekDoc(projectId, docId, callback) {
request.get(
`http://localhost:3003/project/${projectId}/doc/${docId}/peek`,
`http://127.0.0.1:3003/project/${projectId}/doc/${docId}/peek`,
(error, res, body) => {
if (body != null && res.statusCode >= 200 && res.statusCode < 300) {
body = JSON.parse(body)
@ -141,7 +141,7 @@ module.exports = DocUpdaterClient = {
flushDoc(projectId, docId, callback) {
request.post(
`http://localhost:3003/project/${projectId}/doc/${docId}/flush`,
`http://127.0.0.1:3003/project/${projectId}/doc/${docId}/flush`,
(error, res, body) => callback(error, res, body)
)
},
@ -149,7 +149,7 @@ module.exports = DocUpdaterClient = {
setDocLines(projectId, docId, lines, source, userId, undoing, callback) {
request.post(
{
url: `http://localhost:3003/project/${projectId}/doc/${docId}`,
url: `http://127.0.0.1:3003/project/${projectId}/doc/${docId}`,
json: {
lines,
source,
@ -163,36 +163,36 @@ module.exports = DocUpdaterClient = {
deleteDoc(projectId, docId, callback) {
request.del(
`http://localhost:3003/project/${projectId}/doc/${docId}`,
`http://127.0.0.1:3003/project/${projectId}/doc/${docId}`,
(error, res, body) => callback(error, res, body)
)
},
flushProject(projectId, callback) {
request.post(`http://localhost:3003/project/${projectId}/flush`, callback)
request.post(`http://127.0.0.1:3003/project/${projectId}/flush`, callback)
},
deleteProject(projectId, callback) {
request.del(`http://localhost:3003/project/${projectId}`, callback)
request.del(`http://127.0.0.1:3003/project/${projectId}`, callback)
},
deleteProjectOnShutdown(projectId, callback) {
request.del(
`http://localhost:3003/project/${projectId}?background=true&shutdown=true`,
`http://127.0.0.1:3003/project/${projectId}?background=true&shutdown=true`,
callback
)
},
flushOldProjects(callback) {
request.get(
'http://localhost:3003/flush_queued_projects?min_delete_age=1',
'http://127.0.0.1:3003/flush_queued_projects?min_delete_age=1',
callback
)
},
acceptChange(projectId, docId, changeId, callback) {
request.post(
`http://localhost:3003/project/${projectId}/doc/${docId}/change/${changeId}/accept`,
`http://127.0.0.1:3003/project/${projectId}/doc/${docId}/change/${changeId}/accept`,
callback
)
},
@ -200,7 +200,7 @@ module.exports = DocUpdaterClient = {
acceptChanges(projectId, docId, changeIds, callback) {
request.post(
{
url: `http://localhost:3003/project/${projectId}/doc/${docId}/change/accept`,
url: `http://127.0.0.1:3003/project/${projectId}/doc/${docId}/change/accept`,
json: { change_ids: changeIds },
},
callback
@ -209,14 +209,14 @@ module.exports = DocUpdaterClient = {
removeComment(projectId, docId, comment, callback) {
request.del(
`http://localhost:3003/project/${projectId}/doc/${docId}/comment/${comment}`,
`http://127.0.0.1:3003/project/${projectId}/doc/${docId}/comment/${comment}`,
callback
)
},
getProjectDocs(projectId, projectStateHash, callback) {
request.get(
`http://localhost:3003/project/${projectId}/doc?state=${projectStateHash}`,
`http://127.0.0.1:3003/project/${projectId}/doc?state=${projectStateHash}`,
(error, res, body) => {
if (body != null && res.statusCode >= 200 && res.statusCode < 300) {
body = JSON.parse(body)
@ -229,7 +229,7 @@ module.exports = DocUpdaterClient = {
sendProjectUpdate(projectId, userId, updates, version, callback) {
request.post(
{
url: `http://localhost:3003/project/${projectId}`,
url: `http://127.0.0.1:3003/project/${projectId}`,
json: { userId, updates, version },
},
(error, res, body) => callback(error, res, body)

View File

@ -4,7 +4,7 @@ const rclient1 = redis.createClient({
cluster: [
{
port: '7000',
host: 'localhost',
host: '127.0.0.1',
},
],
})
@ -13,7 +13,7 @@ const rclient2 = redis.createClient({
cluster: [
{
port: '7000',
host: 'localhost',
host: '127.0.0.1',
},
],
})

View File

@ -4,7 +4,7 @@ const rclient1 = redis.createClient({
cluster: [
{
port: '7000',
host: 'localhost',
host: '127.0.0.1',
},
],
})
@ -13,7 +13,7 @@ const rclient2 = redis.createClient({
cluster: [
{
port: '7000',
host: 'localhost',
host: '127.0.0.1',
},
],
})

View File

@ -0,0 +1,117 @@
const _ = require('lodash')
const { expect } = require('chai')
const HistoryConversions = require('../../../app/js/HistoryConversions')
describe('HistoryConversions', function () {
describe('toHistoryRanges', function () {
it('handles empty ranges', function () {
expect(HistoryConversions.toHistoryRanges({})).to.deep.equal({})
})
it("doesn't modify comments when there are no tracked changes", function () {
const ranges = {
comments: [makeComment('comment1', 5, 12)],
}
const historyRanges = HistoryConversions.toHistoryRanges(ranges)
expect(historyRanges).to.deep.equal(ranges)
})
it('adjusts comments and tracked changes to account for tracked deletes', function () {
const comments = [
makeComment('comment0', 0, 1),
makeComment('comment1', 10, 12),
makeComment('comment2', 20, 10),
makeComment('comment3', 15, 3),
]
const changes = [
makeTrackedDelete('change0', 2, 5),
makeTrackedInsert('change1', 4, 5),
makeTrackedDelete('change2', 10, 10),
makeTrackedDelete('change3', 21, 6),
makeTrackedDelete('change4', 50, 7),
]
const ranges = { comments, changes }
const historyRanges = HistoryConversions.toHistoryRanges(ranges)
expect(historyRanges.comments).to.have.deep.members([
comments[0],
// shifted by change0 and change2, extended by change3
enrichOp(comments[1], {
hpos: 25, // 10 + 5 + 10
hlen: 18, // 12 + 6
}),
// shifted by change0 and change2, extended by change3
enrichOp(comments[2], {
hpos: 35, // 20 + 5 + 10
hlen: 16, // 10 + 6
}),
// shifted by change0 and change2
enrichOp(comments[3], {
hpos: 30, // 15 + 5 + 10
}),
])
expect(historyRanges.changes).to.deep.equal([
changes[0],
enrichOp(changes[1], {
hpos: 9, // 4 + 5
}),
enrichOp(changes[2], {
hpos: 15, // 10 + 5
}),
enrichOp(changes[3], {
hpos: 36, // 21 + 5 + 10
}),
enrichOp(changes[4], {
hpos: 71, // 50 + 5 + 10 + 6
}),
])
})
})
})
function makeComment(id, pos, length) {
return {
id,
op: {
c: 'c'.repeat(length),
p: pos,
t: id,
},
metadata: makeMetadata(),
}
}
function makeTrackedInsert(id, pos, length) {
return {
id,
op: {
i: 'i'.repeat(length),
p: pos,
},
metadata: makeMetadata(),
}
}
function makeTrackedDelete(id, pos, length) {
return {
id,
op: {
d: 'd'.repeat(length),
p: pos,
},
metadata: makeMetadata(),
}
}
function makeMetadata() {
return {
user_id: 'user-id',
ts: new Date().toISOString(),
}
}
function enrichOp(commentOrChange, extraFields) {
const result = _.cloneDeep(commentOrChange)
Object.assign(result.op, extraFields)
return result
}

View File

@ -37,7 +37,7 @@ const settings = {
internal: {
filestore: {
port: 3009,
host: process.env.LISTEN_ADDRESS || 'localhost',
host: process.env.LISTEN_ADDRESS || '127.0.0.1',
},
},

View File

@ -1,5 +1,5 @@
FROM fsouza/fake-gcs-server:1.20
RUN apk add --update --no-cache curl
COPY healthcheck.sh /healthcheck.sh
HEALTHCHECK --interval=1s --timeout=1s --retries=30 CMD /healthcheck.sh http://localhost:9090
HEALTHCHECK --interval=1s --timeout=1s --retries=30 CMD /healthcheck.sh http://127.0.0.1:9090
CMD ["--port=9090", "--scheme=http"]

View File

@ -1,4 +1,4 @@
FROM adobe/s3mock:2.4.14
RUN apk add --update --no-cache curl
COPY healthcheck.sh /healthcheck.sh
HEALTHCHECK --interval=1s --timeout=1s --retries=30 CMD /healthcheck.sh http://localhost:9090
HEALTHCHECK --interval=1s --timeout=1s --retries=30 CMD /healthcheck.sh http://127.0.0.1:9090

View File

@ -32,7 +32,7 @@ class FilestoreApp {
await new Promise((resolve, reject) => {
this.server = this.app.listen(
Settings.internal.filestore.port,
'localhost',
'127.0.0.1',
err => {
if (err) {
return reject(err)

View File

@ -36,7 +36,7 @@ const BackendSettings = require('./TestConfig')
describe('Filestore', function () {
this.timeout(1000 * 10)
const filestoreUrl = `http://localhost:${Settings.internal.filestore.port}`
const filestoreUrl = `http://127.0.0.1:${Settings.internal.filestore.port}`
const seenSockets = []
async function expectNoSockets() {

View File

@ -16,7 +16,7 @@ const Swagger = require('swagger-client')
const app = require('../../../../../app')
function testUrl(pathname, opts = {}) {
const url = new URL('http://localhost')
const url = new URL('http://127.0.0.1')
url.port = exports.server.address().port
url.pathname = pathname
if (opts.qs) {

View File

@ -49,7 +49,7 @@ app.get('/health_check', (req, res) =>
app.get('*', (req, res) => res.sendStatus(404))
const host = Settings.internal?.notifications?.host || 'localhost'
const host = Settings.internal?.notifications?.host || '127.0.0.1'
const port = Settings.internal?.notifications?.port || 3042
mongoClient

View File

@ -24,7 +24,7 @@ module.exports = {
let notificationKey = `smoke-test-notification-${new ObjectId()}`
const getOpts = endPath => ({
url: `http://localhost:${port}/user/${userId}${endPath}`,
url: `http://127.0.0.1:${port}/user/${userId}${endPath}`,
timeout: 5000,
})
logger.debug(

View File

@ -2,14 +2,14 @@ module.exports = {
internal: {
notifications: {
port: 3042,
host: process.env.LISTEN_ADDRESS || 'localhost',
host: process.env.LISTEN_ADDRESS || '127.0.0.1',
},
},
mongo: {
url:
process.env.MONGO_CONNECTION_STRING ||
`mongodb://${process.env.MONGO_HOST || 'localhost'}/sharelatex`,
`mongodb://${process.env.MONGO_HOST || '127.0.0.1'}/sharelatex`,
options: {
monitorCommands: true,
},

View File

@ -18,12 +18,12 @@ const { port } = settings.internal.history
export function check(callback) {
const projectId = new ObjectId(settings.history.healthCheck.project_id)
const url = `http://localhost:${port}/project/${projectId}`
const url = `http://127.0.0.1:${port}/project/${projectId}`
logger.debug({ projectId }, 'running health check')
const jobs = [
cb =>
request.get(
{ url: `http://localhost:${port}/check_lock`, timeout: 3000 },
{ url: `http://127.0.0.1:${port}/check_lock`, timeout: 3000 },
function (err, res, body) {
if (err != null) {
OError.tag(err, 'error checking lock for health check', {

View File

@ -170,10 +170,11 @@ _mocks._countAndProcessUpdates = (
_processUpdatesBatch(projectId, updates, extendLock, cb)
},
error => {
if (error) {
return callback(error)
}
callback(null, queueSize)
// Unconventional callback signature. The caller needs the queue size
// even when an error is thrown in order to record the queue size in
// the projectHistoryFailures collection. We'll have to find another
// way to achieve this when we promisify.
callback(error, queueSize)
}
)
} else {

View File

@ -2,7 +2,7 @@ module.exports = {
mongo: {
url:
process.env.MONGO_CONNECTION_STRING ||
`mongodb://${process.env.MONGO_HOST || 'localhost'}/sharelatex`,
`mongodb://${process.env.MONGO_HOST || '127.0.0.1'}/sharelatex`,
options: {
monitorCommands: true,
},
@ -10,25 +10,25 @@ module.exports = {
internal: {
history: {
port: 3054,
host: process.env.LISTEN_ADDRESS || 'localhost',
host: process.env.LISTEN_ADDRESS || '127.0.0.1',
},
},
apis: {
documentupdater: {
url: `http://${process.env.DOCUPDATER_HOST || 'localhost'}:3003`,
url: `http://${process.env.DOCUPDATER_HOST || '127.0.0.1'}:3003`,
},
docstore: {
url: `http://${process.env.DOCSTORE_HOST || 'localhost'}:3016`,
url: `http://${process.env.DOCSTORE_HOST || '127.0.0.1'}:3016`,
},
filestore: {
url: `http://${process.env.FILESTORE_HOST || 'localhost'}:3009`,
url: `http://${process.env.FILESTORE_HOST || '127.0.0.1'}:3009`,
},
history_v1: {
requestTimeout: parseInt(process.env.V1_REQUEST_TIMEOUT || '300000', 10),
},
web: {
url: `http://${
process.env.WEB_API_HOST || process.env.WEB_HOST || 'localhost'
process.env.WEB_API_HOST || process.env.WEB_HOST || '127.0.0.1'
}:${process.env.WEB_PORT || 3000}`,
user: process.env.WEB_API_USER || 'overleaf',
pass: process.env.WEB_API_PASSWORD || 'password',
@ -40,7 +40,7 @@ module.exports = {
},
redis: {
lock: {
host: process.env.REDIS_HOST || 'localhost',
host: process.env.REDIS_HOST || '127.0.0.1',
password: process.env.REDIS_PASSWORD,
port: process.env.REDIS_PORT || 6379,
key_schema: {
@ -51,7 +51,7 @@ module.exports = {
},
project_history: {
host:
process.env.HISTORY_REDIS_HOST || process.env.REDIS_HOST || 'localhost',
process.env.HISTORY_REDIS_HOST || process.env.REDIS_HOST || '127.0.0.1',
port: process.env.HISTORY_REDIS_PORT || process.env.REDIS_PORT || 6379,
password:
process.env.HISTORY_REDIS_PASSWORD || process.env.REDIS_PASSWORD,
@ -82,7 +82,7 @@ module.exports = {
`http://${
process.env.V1_HISTORY_HOST ||
process.env.HISTORY_V1_HOST ||
'localhost'
'127.0.0.1'
}:3100/api`,
user: process.env.V1_HISTORY_USER || 'staging',
pass: process.env.V1_HISTORY_PASSWORD || 'password',

View File

@ -5,8 +5,8 @@ import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js'
import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js'
const { ObjectId } = mongodb
const MockHistoryStore = () => nock('http://localhost:3100')
const MockWeb = () => nock('http://localhost:3000')
const MockHistoryStore = () => nock('http://127.0.0.1:3100')
const MockWeb = () => nock('http://127.0.0.1:3000')
const fixture = path => new URL(`../fixtures/${path}`, import.meta.url)
describe('Deleting project', function () {

View File

@ -7,8 +7,8 @@ import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js'
import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js'
const { ObjectId } = mongodb
const MockHistoryStore = () => nock('http://localhost:3100')
const MockWeb = () => nock('http://localhost:3000')
const MockHistoryStore = () => nock('http://127.0.0.1:3100')
const MockWeb = () => nock('http://127.0.0.1:3000')
function createMockBlob(historyId, content) {
const sha = crypto.createHash('sha1').update(content).digest('hex')
@ -316,7 +316,7 @@ describe('Diffs', function () {
request.get(
{
url: `http://localhost:3054/project/${this.projectId}/diff`,
url: `http://127.0.0.1:3054/project/${this.projectId}/diff`,
qs: {
pathname: 'not_here.tex',
from: 3,

View File

@ -21,8 +21,8 @@ import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js'
import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js'
const { ObjectId } = mongodb
const MockHistoryStore = () => nock('http://localhost:3100')
const MockWeb = () => nock('http://localhost:3000')
const MockHistoryStore = () => nock('http://127.0.0.1:3100')
const MockWeb = () => nock('http://127.0.0.1:3000')
describe('DiscardingUpdates', function () {
beforeEach(function (done) {

View File

@ -24,9 +24,9 @@ import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js'
import * as HistoryId from './helpers/HistoryId.js'
const { ObjectId } = mongodb
const MockHistoryStore = () => nock('http://localhost:3100')
const MockFileStore = () => nock('http://localhost:3009')
const MockWeb = () => nock('http://localhost:3000')
const MockHistoryStore = () => nock('http://127.0.0.1:3100')
const MockFileStore = () => nock('http://127.0.0.1:3009')
const MockWeb = () => nock('http://127.0.0.1:3000')
const sha = data => crypto.createHash('sha1').update(data).digest('hex')

View File

@ -8,8 +8,8 @@ import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js'
import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js'
const { ObjectId } = mongodb
const MockHistoryStore = () => nock('http://localhost:3100')
const MockWeb = () => nock('http://localhost:3000')
const MockHistoryStore = () => nock('http://127.0.0.1:3100')
const MockWeb = () => nock('http://127.0.0.1:3000')
describe('Flushing old queues', function () {
const historyId = new ObjectId().toString()
@ -90,7 +90,7 @@ describe('Flushing old queues', function () {
it('flushes the project history queue', function (done) {
request.post(
{
url: 'http://localhost:3054/flush/old?maxAge=10800',
url: 'http://127.0.0.1:3054/flush/old?maxAge=10800',
},
(error, res, body) => {
if (error) {
@ -109,7 +109,7 @@ describe('Flushing old queues', function () {
it('flushes the project history queue in the background when requested', function (done) {
request.post(
{
url: 'http://localhost:3054/flush/old?maxAge=10800&background=1',
url: 'http://127.0.0.1:3054/flush/old?maxAge=10800&background=1',
},
(error, res, body) => {
if (error) {
@ -166,7 +166,7 @@ describe('Flushing old queues', function () {
it('does not flush the project history queue', function (done) {
request.post(
{
url: `http://localhost:3054/flush/old?maxAge=${3 * 3600}`,
url: `http://127.0.0.1:3054/flush/old?maxAge=${3 * 3600}`,
},
(error, res, body) => {
if (error) {
@ -213,7 +213,7 @@ describe('Flushing old queues', function () {
it('flushes the project history queue anyway', function (done) {
request.post(
{
url: `http://localhost:3054/flush/old?maxAge=${3 * 3600}`,
url: `http://127.0.0.1:3054/flush/old?maxAge=${3 * 3600}`,
},
(error, res, body) => {
if (error) {

View File

@ -18,8 +18,8 @@ import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js'
import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js'
const { ObjectId } = mongodb
const MockHistoryStore = () => nock('http://localhost:3100')
const MockWeb = () => nock('http://localhost:3000')
const MockHistoryStore = () => nock('http://127.0.0.1:3100')
const MockWeb = () => nock('http://127.0.0.1:3000')
describe('Health Check', function () {
beforeEach(function (done) {
@ -62,7 +62,7 @@ describe('Health Check', function () {
return it('should respond to the health check', function (done) {
return request.get(
{
url: 'http://localhost:3054/health_check',
url: 'http://127.0.0.1:3054/health_check',
},
(error, res, body) => {
if (error != null) {

View File

@ -19,9 +19,9 @@ import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js'
import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js'
const { ObjectId } = mongodb
const MockHistoryStore = () => nock('http://localhost:3100')
const MockFileStore = () => nock('http://localhost:3009')
const MockWeb = () => nock('http://localhost:3000')
const MockHistoryStore = () => nock('http://127.0.0.1:3100')
const MockFileStore = () => nock('http://127.0.0.1:3009')
const MockWeb = () => nock('http://127.0.0.1:3000')
const fixture = path => new URL(`../fixtures/${path}`, import.meta.url)

View File

@ -5,8 +5,8 @@ import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js'
import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js'
const { ObjectId } = mongodb
const MockHistoryStore = () => nock('http://localhost:3100')
const MockWeb = () => nock('http://localhost:3000')
const MockHistoryStore = () => nock('http://127.0.0.1:3100')
const MockWeb = () => nock('http://127.0.0.1:3000')
const fixture = path => new URL(`../fixtures/${path}`, import.meta.url)

View File

@ -8,10 +8,10 @@ import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js'
import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js'
const { ObjectId } = mongodb
const MockHistoryStore = () => nock('http://localhost:3100')
const MockWeb = () => nock('http://localhost:3000')
const MockHistoryStore = () => nock('http://127.0.0.1:3100')
const MockWeb = () => nock('http://127.0.0.1:3000')
const MockCallback = () => nock('http://localhost')
const MockCallback = () => nock('http://127.0.0.1')
describe('Retrying failed projects', function () {
const historyId = new ObjectId().toString()
@ -95,7 +95,7 @@ describe('Retrying failed projects', function () {
it('flushes the project history queue', function (done) {
request.post(
{
url: 'http://localhost:3054/retry/failures?failureType=soft&limit=1&timeout=10000',
url: 'http://127.0.0.1:3054/retry/failures?failureType=soft&limit=1&timeout=10000',
},
(error, res, body) => {
if (error) {
@ -118,7 +118,7 @@ describe('Retrying failed projects', function () {
.reply(200)
request.post(
{
url: 'http://localhost:3054/retry/failures?failureType=soft&limit=1&timeout=10000&callbackUrl=http%3A%2F%2Flocalhost%2Fping',
url: 'http://127.0.0.1:3054/retry/failures?failureType=soft&limit=1&timeout=10000&callbackUrl=http%3A%2F%2F127.0.0.1%2Fping',
headers: {
'X-CALLBACK-Authorization': '123',
},
@ -177,7 +177,7 @@ describe('Retrying failed projects', function () {
request.post(
{
url: 'http://localhost:3054/retry/failures?failureType=hard&limit=1&timeout=10000',
url: 'http://127.0.0.1:3054/retry/failures?failureType=hard&limit=1&timeout=10000',
},
(error, res, body) => {
if (error) {

View File

@ -9,9 +9,9 @@ import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js'
import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js'
const { ObjectId } = mongodb
const MockHistoryStore = () => nock('http://localhost:3100')
const MockFileStore = () => nock('http://localhost:3009')
const MockWeb = () => nock('http://localhost:3000')
const MockHistoryStore = () => nock('http://127.0.0.1:3100')
const MockFileStore = () => nock('http://127.0.0.1:3009')
const MockWeb = () => nock('http://127.0.0.1:3000')
// Some helper methods to make the tests more compact
function slTextUpdate(historyId, doc, userId, v, ts, op) {
@ -57,7 +57,7 @@ function slAddFileUpdate(historyId, file, userId, ts, projectId) {
return {
projectHistoryId: historyId,
pathname: file.pathname,
url: `http://localhost:3009/project/${projectId}/file/${file.id}`,
url: `http://127.0.0.1:3009/project/${projectId}/file/${file.id}`,
file: file.id,
meta: { user_id: userId, ts: ts.getTime() },
}

View File

@ -21,9 +21,9 @@ import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js'
import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js'
const { ObjectId } = mongodb
const MockHistoryStore = () => nock('http://localhost:3100')
const MockFileStore = () => nock('http://localhost:3009')
const MockWeb = () => nock('http://localhost:3000')
const MockHistoryStore = () => nock('http://127.0.0.1:3100')
const MockFileStore = () => nock('http://127.0.0.1:3009')
const MockWeb = () => nock('http://127.0.0.1:3000')
const fixture = path => new URL(`../fixtures/${path}`, import.meta.url)

View File

@ -10,9 +10,9 @@ const { ObjectId } = mongodb
const EMPTY_FILE_HASH = 'e69de29bb2d1d6434b8b29ae775ad8c2e48c5391'
const MockHistoryStore = () => nock('http://localhost:3100')
const MockFileStore = () => nock('http://localhost:3009')
const MockWeb = () => nock('http://localhost:3000')
const MockHistoryStore = () => nock('http://127.0.0.1:3100')
const MockFileStore = () => nock('http://127.0.0.1:3009')
const MockWeb = () => nock('http://127.0.0.1:3000')
describe('Syncing with web and doc-updater', function () {
const historyId = new ObjectId().toString()
@ -68,7 +68,7 @@ describe('Syncing with web and doc-updater', function () {
it('404s if project-history is not enabled', function (done) {
request.post(
{
url: `http://localhost:3054/project/${this.project_id}/resync`,
url: `http://127.0.0.1:3054/project/${this.project_id}/resync`,
},
(error, res, body) => {
if (error) {
@ -244,7 +244,7 @@ describe('Syncing with web and doc-updater', function () {
{
file: this.file_id,
path: '/test.png',
url: `http://localhost:3009/project/${this.project_id}/file/${this.file_id}`,
url: `http://127.0.0.1:3009/project/${this.project_id}/file/${this.file_id}`,
},
{ path: '/persistedFile' },
],

View File

@ -27,7 +27,7 @@ export function ensureRunning(callback) {
}
initing = true
callbacks.push(callback)
app.listen(3054, 'localhost', error => {
app.listen(3054, '127.0.0.1', error => {
if (error != null) {
throw error
}

View File

@ -14,7 +14,7 @@ export function resetDatabase(callback) {
export function initializeProject(historyId, callback) {
request.post(
{
url: 'http://localhost:3054/project',
url: 'http://127.0.0.1:3054/project',
json: { historyId },
},
(error, res, body) => {
@ -37,7 +37,7 @@ export function flushProject(projectId, options, callback) {
}
request.post(
{
url: `http://localhost:3054/project/${projectId}/flush`,
url: `http://127.0.0.1:3054/project/${projectId}/flush`,
},
(error, res, body) => {
if (error) {
@ -54,7 +54,7 @@ export function flushProject(projectId, options, callback) {
export function getSummarizedUpdates(projectId, query, callback) {
request.get(
{
url: `http://localhost:3054/project/${projectId}/updates`,
url: `http://127.0.0.1:3054/project/${projectId}/updates`,
qs: query,
json: true,
},
@ -71,7 +71,7 @@ export function getSummarizedUpdates(projectId, query, callback) {
export function getDiff(projectId, pathname, from, to, callback) {
request.get(
{
url: `http://localhost:3054/project/${projectId}/diff`,
url: `http://127.0.0.1:3054/project/${projectId}/diff`,
qs: {
pathname,
from,
@ -92,7 +92,7 @@ export function getDiff(projectId, pathname, from, to, callback) {
export function getFileTreeDiff(projectId, from, to, callback) {
request.get(
{
url: `http://localhost:3054/project/${projectId}/filetree/diff`,
url: `http://127.0.0.1:3054/project/${projectId}/filetree/diff`,
qs: {
from,
to,
@ -118,7 +118,7 @@ export function getSnapshot(projectId, pathname, version, options, callback) {
}
request.get(
{
url: `http://localhost:3054/project/${projectId}/version/${version}/${encodeURIComponent(
url: `http://127.0.0.1:3054/project/${projectId}/version/${version}/${encodeURIComponent(
pathname
)}`,
},
@ -171,7 +171,7 @@ export function getQueueLength(projectId, callback) {
export function getQueueCounts(callback) {
return request.get(
{
url: 'http://localhost:3054/status/queue',
url: 'http://127.0.0.1:3054/status/queue',
json: true,
},
callback
@ -181,7 +181,7 @@ export function getQueueCounts(callback) {
export function resyncHistory(projectId, callback) {
request.post(
{
url: `http://localhost:3054/project/${projectId}/resync`,
url: `http://127.0.0.1:3054/project/${projectId}/resync`,
json: true,
body: { origin: { kind: 'test-origin' } },
},
@ -205,7 +205,7 @@ export function createLabel(
) {
request.post(
{
url: `http://localhost:3054/project/${projectId}/user/${userId}/labels`,
url: `http://127.0.0.1:3054/project/${projectId}/user/${userId}/labels`,
json: { comment, version, created_at: createdAt },
},
(error, res, body) => {
@ -221,7 +221,7 @@ export function createLabel(
export function getLabels(projectId, callback) {
request.get(
{
url: `http://localhost:3054/project/${projectId}/labels`,
url: `http://127.0.0.1:3054/project/${projectId}/labels`,
json: true,
},
(error, res, body) => {
@ -237,7 +237,7 @@ export function getLabels(projectId, callback) {
export function deleteLabelForUser(projectId, userId, labelId, callback) {
request.delete(
{
url: `http://localhost:3054/project/${projectId}/user/${userId}/labels/${labelId}`,
url: `http://127.0.0.1:3054/project/${projectId}/user/${userId}/labels/${labelId}`,
},
(error, res, body) => {
if (error) {
@ -252,7 +252,7 @@ export function deleteLabelForUser(projectId, userId, labelId, callback) {
export function deleteLabel(projectId, labelId, callback) {
request.delete(
{
url: `http://localhost:3054/project/${projectId}/labels/${labelId}`,
url: `http://127.0.0.1:3054/project/${projectId}/labels/${labelId}`,
},
(error, res, body) => {
if (error) {
@ -279,7 +279,7 @@ export function setFailure(failureEntry, callback) {
export function transferLabelOwnership(fromUser, toUser, callback) {
request.post(
{
url: `http://localhost:3054/user/${fromUser}/labels/transfer/${toUser}`,
url: `http://127.0.0.1:3054/user/${fromUser}/labels/transfer/${toUser}`,
},
(error, res, body) => {
if (error) {
@ -293,7 +293,7 @@ export function transferLabelOwnership(fromUser, toUser, callback) {
export function getDump(projectId, callback) {
request.get(
`http://localhost:3054/project/${projectId}/dump`,
`http://127.0.0.1:3054/project/${projectId}/dump`,
(err, res, body) => {
if (err) {
return callback(err)
@ -305,7 +305,7 @@ export function getDump(projectId, callback) {
}
export function deleteProject(projectId, callback) {
request.delete(`http://localhost:3054/project/${projectId}`, (err, res) => {
request.delete(`http://127.0.0.1:3054/project/${projectId}`, (err, res) => {
if (err) {
return callback(err)
}

View File

@ -4,7 +4,7 @@ const settings = {
redis: {
pubsub: {
host:
process.env.PUBSUB_REDIS_HOST || process.env.REDIS_HOST || 'localhost',
process.env.PUBSUB_REDIS_HOST || process.env.REDIS_HOST || '127.0.0.1',
port: process.env.PUBSUB_REDIS_PORT || process.env.REDIS_PORT || '6379',
password:
process.env.PUBSUB_REDIS_PASSWORD || process.env.REDIS_PASSWORD || '',
@ -19,7 +19,7 @@ const settings = {
host:
process.env.REAL_TIME_REDIS_HOST ||
process.env.REDIS_HOST ||
'localhost',
'127.0.0.1',
port:
process.env.REAL_TIME_REDIS_PORT || process.env.REDIS_PORT || '6379',
password:
@ -45,7 +45,7 @@ const settings = {
host:
process.env.DOC_UPDATER_REDIS_HOST ||
process.env.REDIS_HOST ||
'localhost',
'127.0.0.1',
port:
process.env.DOC_UPDATER_REDIS_PORT || process.env.REDIS_PORT || '6379',
password:
@ -68,7 +68,7 @@ const settings = {
host:
process.env.SESSIONS_REDIS_HOST ||
process.env.REDIS_HOST ||
'localhost',
'127.0.0.1',
port: process.env.SESSIONS_REDIS_PORT || process.env.REDIS_PORT || '6379',
password:
process.env.SESSIONS_REDIS_PASSWORD || process.env.REDIS_PASSWORD || '',
@ -83,14 +83,14 @@ const settings = {
internal: {
realTime: {
port: 3026,
host: process.env.LISTEN_ADDRESS || 'localhost',
host: process.env.LISTEN_ADDRESS || '127.0.0.1',
},
},
apis: {
web: {
url: `http://${
process.env.WEB_API_HOST || process.env.WEB_HOST || 'localhost'
process.env.WEB_API_HOST || process.env.WEB_HOST || '127.0.0.1'
}:${process.env.WEB_API_PORT || process.env.WEB_PORT || 3000}`,
user: process.env.WEB_API_USER || 'overleaf',
pass: process.env.WEB_API_PASSWORD || 'password',
@ -99,7 +99,7 @@ const settings = {
url: `http://${
process.env.DOCUMENT_UPDATER_HOST ||
process.env.DOCUPDATER_HOST ||
'localhost'
'127.0.0.1'
}:3003`,
},
},

View File

@ -16,7 +16,7 @@ const request = require('request')
const drain = function (rate, callback) {
request.post(
{
url: `http://localhost:3026/drain?rate=${rate}`,
url: `http://127.0.0.1:3026/drain?rate=${rate}`,
},
(error, response, data) => callback(error, data)
)

View File

@ -8,7 +8,7 @@
const async = require('async')
const { expect } = require('chai')
const request = require('request').defaults({
baseUrl: 'http://localhost:3026',
baseUrl: 'http://127.0.0.1:3026',
})
const RealTimeClient = require('./helpers/RealTimeClient')

View File

@ -74,7 +74,7 @@ module.exports = Client = {
},
connect(projectId, callback) {
const client = io.connect('http://localhost:3026', {
const client = io.connect('http://127.0.0.1:3026', {
'force new connection': true,
query: new URLSearchParams({ projectId }).toString(),
})
@ -105,7 +105,7 @@ module.exports = Client = {
}
return request.get(
{
url: 'http://localhost:3026/clients',
url: 'http://127.0.0.1:3026/clients',
json: true,
},
(error, response, data) => callback(error, data)
@ -118,7 +118,7 @@ module.exports = Client = {
}
return request.get(
{
url: `http://localhost:3026/clients/${clientId}`,
url: `http://127.0.0.1:3026/clients/${clientId}`,
json: true,
},
(error, response, data) => {
@ -134,7 +134,7 @@ module.exports = Client = {
disconnectClient(clientId, callback) {
request.post(
{
url: `http://localhost:3026/client/${clientId}/disconnect`,
url: `http://127.0.0.1:3026/client/${clientId}/disconnect`,
},
(error, response, data) => callback(error, data)
)

View File

@ -33,7 +33,7 @@ module.exports = {
Settings.internal != null ? Settings.internal.realtime : undefined,
x => x.port
),
'localhost',
'127.0.0.1',
error => {
if (error != null) {
throw error

View File

@ -273,7 +273,7 @@ exports.XMLHttpRequest = function () {
case undefined:
case '':
host = 'localhost'
host = '127.0.0.1'
break
default:
@ -309,8 +309,8 @@ exports.XMLHttpRequest = function () {
return
}
// Default to port 80. If accessing localhost on another port be sure
// to use http://localhost:port/path
// Default to port 80. If accessing 127.0.0.1 on another port be sure
// to use http://127.0.0.1:port/path
const port = url.port || (ssl ? 443 : 80)
// Add query string if one is used
const uri = url.pathname + (url.search ? url.search : '')

View File

@ -7,7 +7,7 @@ import { app } from './app/js/server.js'
import * as ASpell from './app/js/ASpell.js'
import Metrics from '@overleaf/metrics'
const { host = 'localhost', port = 3005 } = Settings.internal?.spelling ?? {}
const { host = '127.0.0.1', port = 3005 } = Settings.internal?.spelling ?? {}
ASpell.startCacheDump()

View File

@ -5,7 +5,7 @@ import OError from '@overleaf/o-error'
export function healthCheck(req, res) {
const opts = {
url: `http://localhost:3005/user/${settings.healthCheckUserId}/check`,
url: `http://127.0.0.1:3005/user/${settings.healthCheckUserId}/check`,
json: {
words: ['helllo'],
language: 'en',

View File

@ -4,7 +4,7 @@ module.exports = {
internal: {
spelling: {
port: 3005,
host: process.env.LISTEN_ADDRESS || 'localhost',
host: process.env.LISTEN_ADDRESS || '127.0.0.1',
},
},

View File

@ -2,5 +2,5 @@ import { app } from '../../../app/js/server.js'
import { PORT } from './helpers/request.js'
before(function (done) {
return app.listen(PORT, 'localhost', done)
return app.listen(PORT, '127.0.0.1', done)
})

View File

@ -3,7 +3,7 @@ import Request from 'request'
export const PORT = 3005
const BASE_URL = `http://${process.env.HTTP_TEST_HOST || 'localhost'}:${PORT}`
const BASE_URL = `http://${process.env.HTTP_TEST_HOST || '127.0.0.1'}:${PORT}`
const request = Request.defaults({
baseUrl: BASE_URL,

View File

@ -87,7 +87,7 @@ const makeRequest = function (correctWords, incorrectWords, callback) {
}
}
return request.post(
'http://localhost:3005/user/1/check',
'http://127.0.0.1:3005/user/1/check',
{ json: true, body: { words: full } },
function (err, req, body) {
let m

View File

@ -0,0 +1,3 @@
{
"extends": ["stylelint-config-standard-scss"]
}

View File

@ -48,7 +48,7 @@ if (Settings.catchErrors) {
FileWriter.ensureDumpFolderExists()
const port = Settings.port || Settings.internal.web.port || 3000
const host = Settings.internal.web.host || 'localhost'
const host = Settings.internal.web.host || '127.0.0.1'
if (!module.parent) {
// Called directly

View File

@ -11,7 +11,6 @@ const basicAuth = require('basic-auth')
const tsscmp = require('tsscmp')
const UserHandler = require('../User/UserHandler')
const UserSessionsManager = require('../User/UserSessionsManager')
const SessionStoreManager = require('../../infrastructure/SessionStoreManager')
const Analytics = require('../Analytics/AnalyticsManager')
const passport = require('passport')
const NotificationsBuilder = require('../Notifications/NotificationsBuilder')
@ -409,30 +408,6 @@ const AuthenticationController = {
return expressify(middleware)
},
validateUserSession: function () {
// Middleware to check that the user's session is still good on key actions,
// such as opening a a project. Could be used to check that session has not
// exceeded a maximum lifetime (req.session.session_created), or for session
// hijacking checks (e.g. change of ip address, req.session.ip_address). For
// now, just check that the session has been loaded from the session store
// correctly.
return function (req, res, next) {
// check that the session store is returning valid results
if (req.session && !SessionStoreManager.hasValidationToken(req)) {
// force user to update session
req.session.regenerate(() => {
// need to destroy the existing session and generate a new one
// otherwise they will already be logged in when they are redirected
// to the login page
if (acceptsJson(req)) return send401WithChallenge(res)
AuthenticationController._redirectToLoginOrRegisterPage(req, res)
})
} else {
next()
}
}
},
_globalLoginWhitelist: [],
addEndpointToLoginWhitelist(endpoint) {
return AuthenticationController._globalLoginWhitelist.push(endpoint)

View File

@ -9,254 +9,219 @@ const ClsiManager = require('./ClsiManager')
const Metrics = require('@overleaf/metrics')
const { RateLimiter } = require('../../infrastructure/RateLimiter')
const UserAnalyticsIdCache = require('../Analytics/UserAnalyticsIdCache')
const {
callbackify,
callbackifyMultiResult,
} = require('@overleaf/promise-utils')
function instrumentWithTimer(fn, key) {
return async (...args) => {
const timer = new Metrics.Timer(key)
try {
return await fn(...args)
} finally {
timer.done()
}
}
}
async function compile(projectId, userId, options = {}) {
const recentlyCompiled = await CompileManager._checkIfRecentlyCompiled(
projectId,
userId
)
if (recentlyCompiled) {
return { status: 'too-recently-compiled', outputFiles: [] }
}
try {
const canCompile = await CompileManager._checkIfAutoCompileLimitHasBeenHit(
options.isAutoCompile,
'everyone'
)
if (!canCompile) {
return { status: 'autocompile-backoff', outputFiles: [] }
}
} catch (error) {
return { status: 'autocompile-backoff', outputFiles: [] }
}
await ProjectRootDocManager.promises.ensureRootDocumentIsSet(projectId)
const limits =
await CompileManager.promises.getProjectCompileLimits(projectId)
for (const key in limits) {
const value = limits[key]
options[key] = value
}
try {
const canCompile = await CompileManager._checkCompileGroupAutoCompileLimit(
options.isAutoCompile,
limits.compileGroup
)
if (!canCompile) {
return { status: 'autocompile-backoff', outputFiles: [] }
}
} catch (error) {
return { message: 'autocompile-backoff', outputFiles: [] }
}
// only pass userId down to clsi if this is a per-user compile
const compileAsUser = Settings.disablePerUserCompiles ? undefined : userId
const {
status,
outputFiles,
clsiServerId,
validationProblems,
stats,
timings,
outputUrlPrefix,
} = await ClsiManager.promises.sendRequest(projectId, compileAsUser, options)
return {
status,
outputFiles,
clsiServerId,
limits,
validationProblems,
stats,
timings,
outputUrlPrefix,
}
}
const instrumentedCompile = instrumentWithTimer(compile, 'editor.compile')
async function getProjectCompileLimits(projectId) {
const project = await ProjectGetter.promises.getProject(projectId, {
owner_ref: 1,
})
const owner = await UserGetter.promises.getUser(project.owner_ref, {
_id: 1,
alphaProgram: 1,
analyticsId: 1,
betaProgram: 1,
features: 1,
})
const ownerFeatures = (owner && owner.features) || {}
// put alpha users into their own compile group
if (owner && owner.alphaProgram) {
ownerFeatures.compileGroup = 'alpha'
}
const analyticsId = await UserAnalyticsIdCache.get(owner._id)
const compileGroup =
ownerFeatures.compileGroup || Settings.defaultFeatures.compileGroup
const limits = {
timeout:
ownerFeatures.compileTimeout || Settings.defaultFeatures.compileTimeout,
compileGroup,
compileBackendClass: compileGroup === 'standard' ? 'n2d' : 'c2d',
ownerAnalyticsId: analyticsId,
}
return limits
}
async function wordCount(projectId, userId, file, clsiserverid) {
const limits =
await CompileManager.promises.getProjectCompileLimits(projectId)
return await ClsiManager.promises.wordCount(
projectId,
userId,
file,
limits,
clsiserverid
)
}
async function stopCompile(projectId, userId) {
const limits =
await CompileManager.promises.getProjectCompileLimits(projectId)
return await ClsiManager.promises.stopCompile(projectId, userId, limits)
}
async function deleteAuxFiles(projectId, userId, clsiserverid) {
const limits =
await CompileManager.promises.getProjectCompileLimits(projectId)
return await ClsiManager.promises.deleteAuxFiles(
projectId,
userId,
limits,
clsiserverid
)
}
module.exports = CompileManager = {
compile(projectId, userId, options = {}, _callback) {
const timer = new Metrics.Timer('editor.compile')
const callback = function (...args) {
timer.done()
_callback(...args)
}
CompileManager._checkIfRecentlyCompiled(
projectId,
userId,
function (error, recentlyCompiled) {
if (error) {
return callback(error)
}
if (recentlyCompiled) {
return callback(null, 'too-recently-compiled', [])
}
CompileManager._checkIfAutoCompileLimitHasBeenHit(
options.isAutoCompile,
'everyone',
function (err, canCompile) {
if (err || !canCompile) {
return callback(null, 'autocompile-backoff', [])
}
ProjectRootDocManager.ensureRootDocumentIsSet(
projectId,
function (error) {
if (error) {
return callback(error)
}
CompileManager.getProjectCompileLimits(
projectId,
function (error, limits) {
if (error) {
return callback(error)
}
for (const key in limits) {
const value = limits[key]
options[key] = value
}
// Put a lower limit on autocompiles for free users, based on compileGroup
CompileManager._checkCompileGroupAutoCompileLimit(
options.isAutoCompile,
limits.compileGroup,
function (err, canCompile) {
if (err || !canCompile) {
return callback(null, 'autocompile-backoff', [])
}
// only pass userId down to clsi if this is a per-user compile
const compileAsUser = Settings.disablePerUserCompiles
? undefined
: userId
ClsiManager.sendRequest(
projectId,
compileAsUser,
options,
function (
error,
status,
outputFiles,
clsiServerId,
validationProblems,
stats,
timings,
outputUrlPrefix
) {
if (error) {
return callback(error)
}
callback(
null,
status,
outputFiles,
clsiServerId,
limits,
validationProblems,
stats,
timings,
outputUrlPrefix
)
}
)
}
)
}
)
}
)
}
)
}
)
promises: {
compile: instrumentedCompile,
deleteAuxFiles,
getProjectCompileLimits,
stopCompile,
wordCount,
},
compile: callbackifyMultiResult(instrumentedCompile, [
'status',
'outputFiles',
'clsiServerId',
'limits',
'validationProblems',
'stats',
'timings',
'outputUrlPrefix',
]),
stopCompile(projectId, userId, callback) {
CompileManager.getProjectCompileLimits(projectId, function (error, limits) {
if (error) {
return callback(error)
}
ClsiManager.stopCompile(projectId, userId, limits, callback)
})
},
stopCompile: callbackify(stopCompile),
deleteAuxFiles(projectId, userId, clsiserverid, callback) {
CompileManager.getProjectCompileLimits(projectId, function (error, limits) {
if (error) {
return callback(error)
}
ClsiManager.deleteAuxFiles(
projectId,
userId,
limits,
clsiserverid,
callback
)
})
},
deleteAuxFiles: callbackify(deleteAuxFiles),
getProjectCompileLimits(projectId, callback) {
ProjectGetter.getProject(
projectId,
{ owner_ref: 1 },
function (error, project) {
if (error) {
return callback(error)
}
UserGetter.getUser(
project.owner_ref,
{
_id: 1,
alphaProgram: 1,
analyticsId: 1,
betaProgram: 1,
features: 1,
},
function (err, owner) {
if (err) {
return callback(err)
}
const ownerFeatures = (owner && owner.features) || {}
// put alpha users into their own compile group
if (owner && owner.alphaProgram) {
ownerFeatures.compileGroup = 'alpha'
}
UserAnalyticsIdCache.callbacks.get(
owner._id,
function (err, analyticsId) {
if (err) {
return callback(err)
}
const compileGroup =
ownerFeatures.compileGroup ||
Settings.defaultFeatures.compileGroup
const limits = {
timeout:
ownerFeatures.compileTimeout ||
Settings.defaultFeatures.compileTimeout,
compileGroup,
compileBackendClass:
compileGroup === 'standard' ? 'n2d' : 'c2d',
ownerAnalyticsId: analyticsId,
}
callback(null, limits)
}
)
}
)
}
)
},
getProjectCompileLimits: callbackify(getProjectCompileLimits),
COMPILE_DELAY: 1, // seconds
_checkIfRecentlyCompiled(projectId, userId, callback) {
async _checkIfRecentlyCompiled(projectId, userId) {
const key = `compile:${projectId}:${userId}`
rclient.set(
key,
true,
'EX',
this.COMPILE_DELAY,
'NX',
function (error, ok) {
if (error) {
return callback(error)
}
if (ok === 'OK') {
callback(null, false)
} else {
callback(null, true)
}
}
)
const ok = await rclient.set(key, true, 'EX', this.COMPILE_DELAY, 'NX')
return ok !== 'OK'
},
_checkCompileGroupAutoCompileLimit(isAutoCompile, compileGroup, callback) {
async _checkCompileGroupAutoCompileLimit(isAutoCompile, compileGroup) {
if (!isAutoCompile) {
return callback(null, true)
return true
}
if (compileGroup === 'standard') {
// apply extra limits to the standard compile group
CompileManager._checkIfAutoCompileLimitHasBeenHit(
return await CompileManager._checkIfAutoCompileLimitHasBeenHit(
isAutoCompile,
compileGroup,
callback
compileGroup
)
} else {
Metrics.inc(`auto-compile-${compileGroup}`)
callback(null, true)
return true
}
}, // always allow priority group users to compile
_checkIfAutoCompileLimitHasBeenHit(isAutoCompile, compileGroup, callback) {
async _checkIfAutoCompileLimitHasBeenHit(isAutoCompile, compileGroup) {
if (!isAutoCompile) {
return callback(null, true)
return true
}
Metrics.inc(`auto-compile-${compileGroup}`)
const rateLimiter = getAutoCompileRateLimiter(compileGroup)
rateLimiter
.consume('global', 1, { method: 'global' })
.then(() => {
callback(null, true)
})
.catch(() => {
// Don't differentiate between errors and rate limits. Silently trigger
// the rate limit if there's an error consuming the points.
Metrics.inc(`auto-compile-${compileGroup}-limited`)
callback(null, false)
})
try {
await rateLimiter.consume('global', 1, { method: 'global' })
return true
} catch (e) {
// Don't differentiate between errors and rate limits. Silently trigger
// the rate limit if there's an error consuming the points.
Metrics.inc(`auto-compile-${compileGroup}-limited`)
return false
}
},
wordCount(projectId, userId, file, clsiserverid, callback) {
CompileManager.getProjectCompileLimits(projectId, function (error, limits) {
if (error) {
return callback(error)
}
ClsiManager.wordCount(
projectId,
userId,
file,
limits,
clsiserverid,
callback
)
})
},
wordCount: callbackify(wordCount),
}
const autoCompileRateLimiters = new Map()

View File

@ -257,10 +257,12 @@ templates.confirmCode = NoCTAEmailTemplate({
return 'Confirm your email address'
},
message(opts, isPlainText) {
const msg = [
`Welcome to Overleaf! We're so glad you joined us.`,
'Use this 6-digit confirmation code to finish your setup.',
]
const msg = opts.isSecondary
? ['Use this 6-digit code to confirm your email address.']
: [
`Welcome to Overleaf! We're so glad you joined us.`,
'Use this 6-digit confirmation code to finish your setup.',
]
if (isPlainText && opts.confirmCode) {
msg.push(opts.confirmCode)

View File

@ -48,6 +48,7 @@ function getClient() {
'secure',
'auth',
'ignoreTLS',
'tls',
'logger',
'name'
)

View File

@ -88,7 +88,7 @@ async function plansPage(req, res) {
const localCcyAssignment = await SplitTestHandler.promises.getAssignment(
req,
res,
'local-ccy-format'
'local-ccy-format-v2'
)
const formatCurrency =
localCcyAssignment.variant === 'enabled'
@ -143,7 +143,7 @@ async function userSubscriptionPage(req, res) {
const localCcyAssignment = await SplitTestHandler.promises.getAssignment(
req,
res,
'local-ccy-format'
'local-ccy-format-v2'
)
const results =
@ -261,7 +261,7 @@ async function interstitialPaymentPage(req, res) {
const localCcyAssignment = await SplitTestHandler.promises.getAssignment(
req,
res,
'local-ccy-format'
'local-ccy-format-v2'
)
res.render('subscriptions/interstitial-payment', {
@ -296,7 +296,7 @@ async function successfulSubscription(req, res) {
const localCcyAssignment = await SplitTestHandler.promises.getAssignment(
req,
res,
'local-ccy-format'
'local-ccy-format-v2'
)
const { personalSubscription } =
await SubscriptionViewModelBuilder.promises.buildUsersSubscriptionViewModel(

View File

@ -12,6 +12,7 @@ const {
handleAdminDomainRedirect,
} = require('../Authorization/AuthorizationMiddleware')
const ProjectAuditLogHandler = require('../Project/ProjectAuditLogHandler')
const SplitTestHandler = require('../SplitTests/SplitTestHandler')
const orderedPrivilegeLevels = [
PrivilegeLevels.NONE,
@ -97,7 +98,18 @@ async function tokenAccessPage(req, res, next) {
}
}
res.render('project/token/access', {
const { variant } = await SplitTestHandler.promises.getAssignment(
req,
res,
'token-access-page'
)
const view =
variant === 'react'
? 'project/token/access-react'
: 'project/token/access'
res.render(view, {
postUrl: makePostUrl(token),
})
} catch (err) {

View File

@ -43,7 +43,7 @@ function sendConfirmationEmail(userId, email, emailTemplate, callback) {
)
}
async function sendConfirmationCode(email) {
async function sendConfirmationCode(email, isSecondary) {
if (!EmailHelper.parseEmail(email)) {
throw new Error('invalid email')
}
@ -55,6 +55,7 @@ async function sendConfirmationCode(email) {
await EmailHandler.promises.sendEmail('confirmCode', {
to: email,
confirmCode,
isSecondary,
category: ['ConfirmEmail'],
})

View File

@ -1,3 +1,4 @@
const AuthenticationController = require('../Authentication/AuthenticationController')
const Settings = require('@overleaf/settings')
const logger = require('@overleaf/logger')
const SessionManager = require('../Authentication/SessionManager')
@ -15,9 +16,34 @@ const AsyncFormHelper = require('../Helpers/AsyncFormHelper')
const AnalyticsManager = require('../Analytics/AnalyticsManager')
const UserPrimaryEmailCheckHandler = require('../User/UserPrimaryEmailCheckHandler')
const UserAuditLogHandler = require('./UserAuditLogHandler')
const { RateLimiter } = require('../../infrastructure/RateLimiter')
const tsscmp = require('tsscmp')
const AUDIT_LOG_TOKEN_PREFIX_LENGTH = 10
const sendSecondaryConfirmCodeRateLimiter = new RateLimiter(
'send-secondary-confirmation-code',
{
points: 1,
duration: 60,
}
)
const checkSecondaryConfirmCodeRateLimiter = new RateLimiter(
'check-secondary-confirmation-code-per-email',
{
points: 10,
duration: 60,
}
)
const resendSecondaryConfirmCodeRateLimiter = new RateLimiter(
'resend-secondary-confirmation-code',
{
points: 1,
duration: 60,
}
)
async function _sendSecurityAlertEmail(user, email) {
const emailOptions = {
to: user.email,
@ -30,6 +56,10 @@ async function _sendSecurityAlertEmail(user, email) {
await EmailHandler.promises.sendEmail('securityAlert', emailOptions)
}
/**
* This method is for adding a secondary email to be confirmed via an emailed link.
* For code confirmation, see the `addWithConfirmationCode` method in this file.
*/
async function add(req, res, next) {
const userId = SessionManager.getLoggedInUserId(req.session)
const email = EmailHelper.parseEmail(req.body.email)
@ -127,6 +157,263 @@ function sendReconfirmation(req, res, next) {
})
}
/**
* This method is for adding a secondary email to be confirmed via a code.
* For email link confirmation see the `add` method in this file.
*/
async function addWithConfirmationCode(req, res) {
delete req.session.pendingSecondaryEmail
const userId = SessionManager.getLoggedInUserId(req.session)
const email = EmailHelper.parseEmail(req.body.email)
if (!email) {
return res.sendStatus(422)
}
const user = await UserGetter.promises.getUser(userId, {
email: 1,
'emails.email': 1,
})
if (user.emails.length >= Settings.emailAddressLimit) {
return res.status(422).json({ message: 'secondary email limit exceeded' })
}
try {
await UserGetter.promises.ensureUniqueEmailAddress(email)
await sendSecondaryConfirmCodeRateLimiter.consume(email, 1, {
method: 'email',
})
await UserAuditLogHandler.promises.addEntry(
userId,
'request-add-email-code',
userId,
req.ip,
{
newSecondaryEmail: email,
}
)
const { confirmCode, confirmCodeExpiresTimestamp } =
await UserEmailsConfirmationHandler.promises.sendConfirmationCode(
email,
true
)
req.session.pendingSecondaryEmail = {
email,
confirmCode,
confirmCodeExpiresTimestamp,
}
return res.json({
redir: '/user/emails/confirm-secondary',
})
} catch (err) {
if (err.name === 'EmailExistsError') {
return res.status(409).json({
message: {
type: 'error',
text: req.i18n.translate('email_already_registered'),
},
})
}
if (err?.remainingPoints === 0) {
return res.status(429).json({})
}
logger.err({ err }, 'failed to send confirmation code')
delete req.session.pendingSecondaryEmail
return res.status(500).json({
message: {
key: 'error_performing_request',
},
})
}
}
async function checkSecondaryEmailConfirmationCode(req, res) {
const userId = SessionManager.getLoggedInUserId(req.session)
const code = req.body.code
const user = await UserGetter.promises.getUser(userId, {
email: 1,
'emails.email': 1,
})
if (!req.session.pendingSecondaryEmail) {
logger.err(
{},
'error checking confirmation code. missing pendingSecondaryEmail'
)
return res.status(500).json({
message: {
key: 'error_performing_request',
},
})
}
try {
await checkSecondaryConfirmCodeRateLimiter.consume(
req.session.pendingSecondaryEmail.email,
1,
{ method: 'email' }
)
} catch (err) {
if (err?.remainingPoints === 0) {
return res.sendStatus(429)
} else {
return res.status(500).json({
message: {
key: 'error_performing_request',
},
})
}
}
if (
req.session.pendingSecondaryEmail.confirmCodeExpiresTimestamp < Date.now()
) {
return res.status(403).json({
message: { key: 'expired_confirmation_code' },
})
}
if (!tsscmp(req.session.pendingSecondaryEmail.confirmCode, code)) {
return res.status(403).json({
message: { key: 'invalid_confirmation_code' },
})
}
try {
await UserAuditLogHandler.promises.addEntry(
userId,
'add-email-via-code',
userId,
req.ip,
{
newSecondaryEmail: req.session.pendingSecondaryEmail.email,
}
)
await UserUpdater.promises.addEmailAddress(
userId,
req.session.pendingSecondaryEmail.email,
{},
{
initiatorId: user._id,
ipAddress: req.ip,
}
)
await UserUpdater.promises.confirmEmail(
userId,
req.session.pendingSecondaryEmail.email,
{}
)
delete req.session.pendingSecondaryEmail
AnalyticsManager.recordEventForUser(user._id, 'email-verified', {
provider: 'email',
verification_type: 'token',
isPrimary: false,
})
const redirectUrl =
AuthenticationController.getRedirectFromSession(req) || '/project'
return res.json({
redir: redirectUrl,
})
} catch (error) {
if (error.name === 'EmailExistsError') {
return res.status(409).json({
message: {
type: 'error',
text: req.i18n.translate('email_already_registered'),
},
})
}
logger.err({ error }, 'failed to check confirmation code')
return res.status(500).json({
message: {
key: 'error_performing_request',
},
})
}
}
async function resendSecondaryEmailConfirmationCode(req, res) {
if (!req.session.pendingSecondaryEmail) {
logger.err(
{},
'error resending confirmation code. missing pendingSecondaryEmail'
)
return res.status(500).json({
message: {
key: 'error_performing_request',
},
})
}
const email = req.session.pendingSecondaryEmail.email
try {
await resendSecondaryConfirmCodeRateLimiter.consume(email, 1, {
method: 'email',
})
} catch (err) {
if (err?.remainingPoints === 0) {
return res.status(429).json({})
} else {
throw err
}
}
try {
const userId = SessionManager.getLoggedInUserId(req.session)
await UserAuditLogHandler.promises.addEntry(
userId,
'resend-add-email-code',
userId,
req.ip,
{
newSecondaryEmail: email,
}
)
const { confirmCode, confirmCodeExpiresTimestamp } =
await UserEmailsConfirmationHandler.promises.sendConfirmationCode(
email,
true
)
req.session.pendingSecondaryEmail.confirmCode = confirmCode
req.session.pendingSecondaryEmail.confirmCodeExpiresTimestamp =
confirmCodeExpiresTimestamp
return res.status(200).json({
message: { key: 'we_sent_new_code' },
})
} catch (err) {
logger.err({ err, email }, 'failed to send confirmation code')
return res.status(500).json({
key: 'error_performing_request',
})
}
}
async function primaryEmailCheckPage(req, res) {
const userId = SessionManager.getLoggedInUserId(req.session)
const user = await UserGetter.promises.getUser(userId, {
@ -175,6 +462,13 @@ const UserEmailsController = {
},
add: expressify(add),
addWithConfirmationCode: expressify(addWithConfirmationCode),
checkSecondaryEmailConfirmationCode: expressify(
checkSecondaryEmailConfirmationCode
),
resendSecondaryEmailConfirmationCode: expressify(
resendSecondaryEmailConfirmationCode
),
remove(req, res, next) {
const userId = SessionManager.getLoggedInUserId(req.session)

View File

@ -2,14 +2,31 @@ const session = require('express-session')
const RedisStore = require('connect-redis')(session)
const metrics = require('@overleaf/metrics')
const logger = require('@overleaf/logger')
const Settings = require('@overleaf/settings')
const SessionManager = require('../Features/Authentication/SessionManager')
const Metrics = require('@overleaf/metrics')
const MAX_SESSION_SIZE_THRESHOLD = 4096
// Define a custom session store to record session metrics and log large
// anonymous sessions for debugging purposes
// Also make the SET calls more robust/consistent by adding flags
// - XX: ensure update in place, expect that the old session value is still in redis at that key
// - NX: ensure initial set, expect that there is no other session at that key already
class CustomSessionStore extends RedisStore {
static largestSessionSize = 3 * 1024 // ignore sessions smaller than 3KB
#initialSetStore
#updateInPlaceStore
constructor({ client }) {
super({ client })
this.#initialSetStore = new RedisStore({
client: new CustomSetRedisClient(client, 'NX'),
})
this.#updateInPlaceStore = new RedisStore({
client: new CustomSetRedisClient(client, 'XX'),
})
}
static metric(method, sess) {
let type // type of session: 'logged-in', 'anonymous', or 'na' (not available)
@ -39,19 +56,34 @@ class CustomSessionStore extends RedisStore {
}
}
// Override the get, set, touch, and destroy methods to record metrics
get(sid, cb) {
super.get(sid, (err, ...args) => {
if (args[0]) {
CustomSessionStore.metric('get', args[0])
}
cb(err, ...args)
super.get(sid, (err, sess) => {
if (err || !sess || !checkValidationToken(sid, sess)) return cb(err, null)
CustomSessionStore.metric('get', sess)
cb(null, sess)
})
}
set(sid, sess, cb) {
// Refresh the validation token just before writing to Redis
// This will ensure that the token is always matching to the sessionID that we write the session value for.
// Potential reasons for missing/mismatching token:
// - brand-new session
// - cycling of the sessionID as part of the login flow
// - upgrade from a client side session to a redis session
// - accidental writes in the app code
sess.validationToken = computeValidationToken(sid)
CustomSessionStore.metric('set', sess)
super.set(sid, sess, cb)
const originalId = sess.req.signedCookies[Settings.cookieName]
if (sid === originalId || sid === sess.req.newSessionId) {
this.#updateInPlaceStore.set(sid, sess, cb)
} else {
Metrics.inc('security.session', 1, { status: 'new' })
// Multiple writes can get issued with the new sid. Keep track of it.
Object.defineProperty(sess.req, 'newSessionId', { value: sid })
this.#initialSetStore.set(sid, sess, cb)
}
}
touch(sid, sess, cb) {
@ -66,6 +98,35 @@ class CustomSessionStore extends RedisStore {
}
}
function computeValidationToken(sid) {
// This should be a deterministic function of the client-side sessionID,
// prepended with a version number in case we want to change it later.
return 'v1:' + sid.slice(-4)
}
function checkValidationToken(sid, sess) {
const sessionToken = sess.validationToken
if (sessionToken) {
const clientToken = computeValidationToken(sid)
// Reject sessions where the validation token is out of sync with the sessionID.
// If you change the method for computing the token (above) then you need to either check or ignore previous versions of the token.
if (sessionToken === clientToken) {
Metrics.inc('security.session', 1, { status: 'ok' })
return true
} else {
logger.warn(
{ sid, sessionToken, clientToken },
'session token validation failed'
)
Metrics.inc('security.session', 1, { status: 'error' })
return false
}
} else {
Metrics.inc('security.session', 1, { status: 'missing' })
return false
}
}
// Helper function to return a redacted version of session object
// so we can identify the largest keys without exposing sensitive
// data
@ -81,4 +142,24 @@ function redactSession(sess) {
)
}
class CustomSetRedisClient {
#client
#flag
constructor(client, flag) {
this.#client = client
this.#flag = flag
}
set(args, cb) {
args.push(this.#flag)
this.#client.set(args, (err, ok) => {
metrics.inc('session.store.set', 1, {
path: this.#flag,
status: err ? 'error' : ok ? 'success' : 'failure',
})
cb(err, ok)
})
}
}
module.exports = CustomSessionStore

View File

@ -88,10 +88,15 @@ function loadViewIncludes(app) {
}
}
function registerAppMiddleware(app) {
function registerMiddleware(appOrRouter, middlewareName, options) {
if (!middlewareName) {
throw new Error(
'middleware name must be provided to register module middleware'
)
}
for (const module of modules()) {
if (module.appMiddleware) {
module.appMiddleware(app)
if (module[middlewareName]) {
module[middlewareName](appOrRouter, options)
}
}
}
@ -164,7 +169,7 @@ module.exports = {
loadViewIncludes,
moduleIncludes,
moduleIncludesAvailable,
registerAppMiddleware,
registerMiddleware,
hooks: {
attach: attachHook,
fire: fireHook,

View File

@ -14,7 +14,6 @@ const HttpPermissionsPolicyMiddleware = require('./HttpPermissionsPolicy')
const sessionsRedisClient = UserSessionsRedis.client()
const SessionAutostartMiddleware = require('./SessionAutostartMiddleware')
const SessionStoreManager = require('./SessionStoreManager')
const AnalyticsManager = require('../Features/Analytics/AnalyticsManager')
const session = require('express-session')
const CustomSessionStore = require('./CustomSessionStore')
@ -133,7 +132,7 @@ Modules.loadViewIncludes(app)
app.use(metrics.http.monitor(logger))
Modules.registerAppMiddleware(app)
Modules.registerMiddleware(app, 'appMiddleware')
app.use(bodyParser.urlencoded({ extended: true, limit: '2mb' }))
app.use(bodyParser.json({ limit: Settings.max_json_request_size }))
app.use(methodOverride())
@ -157,6 +156,9 @@ RedirectManager.apply(webRouter)
webRouter.use(cookieParser(Settings.security.sessionSecret))
SessionAutostartMiddleware.applyInitialMiddleware(webRouter)
Modules.registerMiddleware(webRouter, 'sessionMiddleware', {
store: sessionStore,
})
webRouter.use(
session({
resave: false,
@ -178,11 +180,6 @@ if (Features.hasFeature('saas')) {
webRouter.use(AnalyticsManager.analyticsIdMiddleware)
}
// patch the session store to generate a validation token for every new session
SessionStoreManager.enableValidationToken(sessionStore)
// use middleware to reject all requests with invalid tokens
webRouter.use(SessionStoreManager.validationMiddleware)
// passport
webRouter.use(passport.initialize())
webRouter.use(passport.session())

View File

@ -1,74 +0,0 @@
const Metrics = require('@overleaf/metrics')
const logger = require('@overleaf/logger')
function computeValidationToken(req) {
// this should be a deterministic function of the client-side sessionID,
// prepended with a version number in case we want to change it later
return 'v1:' + req.sessionID.slice(-4)
}
function checkValidationToken(req) {
if (req.session) {
const sessionToken = req.session.validationToken
if (sessionToken) {
const clientToken = computeValidationToken(req)
// Reject invalid sessions. If you change the method for computing the
// token (above) then you need to either check or ignore previous
// versions of the token.
if (sessionToken === clientToken) {
Metrics.inc('security.session', 1, { status: 'ok' })
return true
} else {
logger.error(
{
sessionToken,
clientToken,
},
'session token validation failed'
)
Metrics.inc('security.session', 1, { status: 'error' })
return false
}
} else {
Metrics.inc('security.session', 1, { status: 'missing' })
}
}
return true // fallback to allowing session
}
module.exports = {
enableValidationToken(sessionStore) {
// generate an identifier from the sessionID for every new session
const originalGenerate = sessionStore.generate
sessionStore.generate = function (req) {
originalGenerate(req)
// add the validation token as a property that cannot be overwritten
Object.defineProperty(req.session, 'validationToken', {
value: computeValidationToken(req),
enumerable: true,
writable: false,
})
Metrics.inc('security.session', 1, { status: 'new' })
}
},
validationMiddleware(req, res, next) {
if (!req.session.noSessionCallback) {
if (!checkValidationToken(req)) {
// the session must exist for it to fail validation
return req.session.destroy(() => {
return next(new Error('invalid session'))
})
}
}
next()
},
hasValidationToken(req) {
if (req && req.session && req.session.validationToken) {
return true
} else {
return false
}
},
}

View File

@ -366,6 +366,30 @@ function initialize(webRouter, privateApiRouter, publicApiRouter) {
RateLimiterMiddleware.rateLimit(rateLimiters.endorseEmail),
UserEmailsController.endorse
)
webRouter.post(
'/user/emails/secondary',
AuthenticationController.requireLogin(),
PermissionsController.requirePermission('add-secondary-email'),
RateLimiterMiddleware.rateLimit(rateLimiters.addEmail),
UserEmailsController.addWithConfirmationCode
)
webRouter.post(
'/user/emails/confirm-secondary',
AuthenticationController.requireLogin(),
PermissionsController.requirePermission('add-secondary-email'),
RateLimiterMiddleware.rateLimit(rateLimiters.checkEmailConfirmationCode),
UserEmailsController.checkSecondaryEmailConfirmationCode
)
webRouter.post(
'/user/emails/resend-secondary-confirmation',
AuthenticationController.requireLogin(),
PermissionsController.requirePermission('add-secondary-email'),
RateLimiterMiddleware.rateLimit(rateLimiters.resendConfirmationCode),
UserEmailsController.resendSecondaryEmailConfirmationCode
)
}
webRouter.get(
@ -489,7 +513,6 @@ function initialize(webRouter, privateApiRouter, publicApiRouter) {
RateLimiterMiddleware.rateLimit(openProjectRateLimiter, {
params: ['Project_id'],
}),
AuthenticationController.validateUserSession(),
AuthorizationMiddleware.ensureUserCanReadProject,
ProjectController.loadEditor
)
@ -1300,28 +1323,6 @@ function initialize(webRouter, privateApiRouter, publicApiRouter) {
}
)
webRouter.get('/no-cache', function (req, res, next) {
res.header('Cache-Control', 'max-age=0')
res.sendStatus(404)
})
webRouter.get('/oops-express', (req, res, next) =>
next(new Error('Test error'))
)
webRouter.get('/oops-internal', function (req, res, next) {
throw new Error('Test error')
})
webRouter.get('/oops-mongo', (req, res, next) =>
require('./models/Project').Project.findOne({}, function () {
throw new Error('Test error')
})
)
privateApiRouter.get('/opps-small', function (req, res, next) {
logger.err('test error occured')
res.sendStatus(200)
})
webRouter.post('/error/client', function (req, res, next) {
logger.warn(
{ err: req.body.error, meta: req.body.meta },

View File

@ -14,19 +14,23 @@
* @returns {string}
*/
function formatCurrencyLocalized(amount, currency, locale, stripIfInteger) {
const options = { style: 'currency', currency }
if (stripIfInteger && Number.isInteger(amount)) {
options.minimumFractionDigits = 0
}
try {
return amount.toLocaleString(locale, {
style: 'currency',
currency,
minimumFractionDigits: 0,
...options,
currencyDisplay: 'narrowSymbol',
})
}
return amount.toLocaleString(locale, {
style: 'currency',
currency,
currencyDisplay: 'narrowSymbol',
})
} catch {}
try {
return amount.toLocaleString(locale, options)
} catch {}
return `${currency} ${amount}`
}
module.exports = {

View File

@ -0,0 +1,16 @@
extends ../../layout-marketing
block entrypointVar
- entrypoint = 'pages/token-access'
block vars
- var suppressFooter = true
- var suppressCookieBanner = true
- var suppressSkipToContent = true
block append meta
meta(name="ol-postUrl" data-type="string" content=postUrl)
meta(name="ol-user" data-type="json" content=user)
block content
div#token-access-page

View File

@ -18,7 +18,7 @@ block content
.col-md-8.col-md-offset-2.text-center(ng-cloak)
.card(ng-controller="TeamInviteController")
.page-header
h1.text-centered(ng-non-bindable) #{translate("invited_to_group", {inviterName: inviterName, appName: appName})}
h1.text-centered(ng-non-bindable) !{translate("invited_to_group", {inviterName: inviterName, appName: appName}, [{name: 'span', attrs: {class: 'team-invite-name'}}])}
div(ng-show="view =='restrictedByManagedGroup'")
.alert.alert-info

View File

@ -7,7 +7,7 @@ block content
.col-md-8.col-md-offset-2.text-center
.card
.page-header
h1.text-centered #{translate("invited_to_group", {inviterName: inviterName, appName: appName})}
h1.text-centered !{translate("invited_to_group", {inviterName: inviterName, appName: appName }, [{name: 'span', attrs: {class: 'team-invite-name'}}])}
if (accountExists)
div

View File

@ -174,7 +174,7 @@ module.exports = {
redis: {
web: {
host: process.env.REDIS_HOST || 'localhost',
host: process.env.REDIS_HOST || '127.0.0.1',
port: process.env.REDIS_PORT || '6379',
password: process.env.REDIS_PASSWORD || '',
db: process.env.REDIS_DB,
@ -185,36 +185,36 @@ module.exports = {
// websessions:
// cluster: [
// {host: 'localhost', port: 7000}
// {host: 'localhost', port: 7001}
// {host: 'localhost', port: 7002}
// {host: 'localhost', port: 7003}
// {host: 'localhost', port: 7004}
// {host: 'localhost', port: 7005}
// {host: '127.0.0.1', port: 7000}
// {host: '127.0.0.1', port: 7001}
// {host: '127.0.0.1', port: 7002}
// {host: '127.0.0.1', port: 7003}
// {host: '127.0.0.1', port: 7004}
// {host: '127.0.0.1', port: 7005}
// ]
// ratelimiter:
// cluster: [
// {host: 'localhost', port: 7000}
// {host: 'localhost', port: 7001}
// {host: 'localhost', port: 7002}
// {host: 'localhost', port: 7003}
// {host: 'localhost', port: 7004}
// {host: 'localhost', port: 7005}
// {host: '127.0.0.1', port: 7000}
// {host: '127.0.0.1', port: 7001}
// {host: '127.0.0.1', port: 7002}
// {host: '127.0.0.1', port: 7003}
// {host: '127.0.0.1', port: 7004}
// {host: '127.0.0.1', port: 7005}
// ]
// cooldown:
// cluster: [
// {host: 'localhost', port: 7000}
// {host: 'localhost', port: 7001}
// {host: 'localhost', port: 7002}
// {host: 'localhost', port: 7003}
// {host: 'localhost', port: 7004}
// {host: 'localhost', port: 7005}
// {host: '127.0.0.1', port: 7000}
// {host: '127.0.0.1', port: 7001}
// {host: '127.0.0.1', port: 7002}
// {host: '127.0.0.1', port: 7003}
// {host: '127.0.0.1', port: 7004}
// {host: '127.0.0.1', port: 7005}
// ]
api: {
host: process.env.REDIS_HOST || 'localhost',
host: process.env.REDIS_HOST || '127.0.0.1',
port: process.env.REDIS_PORT || '6379',
password: process.env.REDIS_PASSWORD || '',
maxRetriesPerRequest: parseInt(
@ -232,7 +232,7 @@ module.exports = {
internal: {
web: {
port: process.env.WEB_PORT || 3000,
host: process.env.LISTEN_ADDRESS || 'localhost',
host: process.env.LISTEN_ADDRESS || '127.0.0.1',
},
},
@ -242,7 +242,7 @@ module.exports = {
apis: {
web: {
url: `http://${
process.env.WEB_API_HOST || process.env.WEB_HOST || 'localhost'
process.env.WEB_API_HOST || process.env.WEB_HOST || '127.0.0.1'
}:${process.env.WEB_API_PORT || process.env.WEB_PORT || 3000}`,
user: httpAuthUser,
pass: httpAuthPass,
@ -251,25 +251,25 @@ module.exports = {
url: `http://${
process.env.DOCUPDATER_HOST ||
process.env.DOCUMENT_UPDATER_HOST ||
'localhost'
'127.0.0.1'
}:3003`,
},
spelling: {
url: `http://${process.env.SPELLING_HOST || 'localhost'}:3005`,
url: `http://${process.env.SPELLING_HOST || '127.0.0.1'}:3005`,
host: process.env.SPELLING_HOST,
},
docstore: {
url: `http://${process.env.DOCSTORE_HOST || 'localhost'}:3016`,
pubUrl: `http://${process.env.DOCSTORE_HOST || 'localhost'}:3016`,
url: `http://${process.env.DOCSTORE_HOST || '127.0.0.1'}:3016`,
pubUrl: `http://${process.env.DOCSTORE_HOST || '127.0.0.1'}:3016`,
},
chat: {
internal_url: `http://${process.env.CHAT_HOST || 'localhost'}:3010`,
internal_url: `http://${process.env.CHAT_HOST || '127.0.0.1'}:3010`,
},
filestore: {
url: `http://${process.env.FILESTORE_HOST || 'localhost'}:3009`,
url: `http://${process.env.FILESTORE_HOST || '127.0.0.1'}:3009`,
},
clsi: {
url: `http://${process.env.CLSI_HOST || 'localhost'}:3013`,
url: `http://${process.env.CLSI_HOST || '127.0.0.1'}:3013`,
// url: "http://#{process.env['CLSI_LB_HOST']}:3014"
backendGroupName: undefined,
submissionBackendClass:
@ -277,19 +277,19 @@ module.exports = {
},
project_history: {
sendProjectStructureOps: true,
url: `http://${process.env.PROJECT_HISTORY_HOST || 'localhost'}:3054`,
url: `http://${process.env.PROJECT_HISTORY_HOST || '127.0.0.1'}:3054`,
},
realTime: {
url: `http://${process.env.REALTIME_HOST || 'localhost'}:3026`,
url: `http://${process.env.REALTIME_HOST || '127.0.0.1'}:3026`,
},
contacts: {
url: `http://${process.env.CONTACTS_HOST || 'localhost'}:3036`,
url: `http://${process.env.CONTACTS_HOST || '127.0.0.1'}:3036`,
},
notifications: {
url: `http://${process.env.NOTIFICATIONS_HOST || 'localhost'}:3042`,
url: `http://${process.env.NOTIFICATIONS_HOST || '127.0.0.1'}:3042`,
},
webpack: {
url: `http://${process.env.WEBPACK_HOST || 'localhost'}:3808`,
url: `http://${process.env.WEBPACK_HOST || '127.0.0.1'}:3808`,
},
wiki: {
url: process.env.WIKI_URL || 'https://learn.sharelatex.com',
@ -328,7 +328,7 @@ module.exports = {
// Where your instance of Overleaf Community Edition/Server Pro can be found publicly. Used in emails
// that are sent out, generated links, etc.
siteUrl: (siteUrl = process.env.PUBLIC_URL || 'http://localhost:3000'),
siteUrl: (siteUrl = process.env.PUBLIC_URL || 'http://127.0.0.1:3000'),
lockManager: {
lockTestInterval: intFromEnv('LOCK_MANAGER_LOCK_TEST_INTERVAL', 50),
@ -364,7 +364,10 @@ module.exports = {
robotsNoindex: process.env.ROBOTS_NOINDEX === 'true' || false,
maxEntitiesPerProject: 2000,
maxEntitiesPerProject: parseInt(
process.env.MAX_ENTITIES_PER_PROJECT || '2000',
10
),
projectUploadTimeout: parseInt(
process.env.PROJECT_UPLOAD_TIMEOUT || '120000',

View File

@ -6,7 +6,7 @@ QUEUES_REDIS_HOST=redis
ANALYTICS_QUEUES_REDIS_HOST=redis
MONGO_URL=mongodb://mongo/test-overleaf
OVERLEAF_ALLOW_PUBLIC_ACCESS=true
LINKED_URL_PROXY=http://localhost:6543
LINKED_URL_PROXY=http://127.0.0.1:6543
ENABLED_LINKED_FILE_TYPES=url,project_file,project_output_file,mendeley,zotero
NODE_ENV=test
NODE_OPTIONS=--unhandled-rejections=strict
@ -17,7 +17,7 @@ PUBLIC_URL=http://www.overleaf.test:23000
HTTP_TEST_HOST=www.overleaf.test
OT_JWT_AUTH_KEY="very secret key"
EXTERNAL_AUTH=none
RECAPTCHA_ENDPOINT=http://localhost:2222/recaptcha/api/siteverify
RECAPTCHA_ENDPOINT=http://127.0.0.1:2222/recaptcha/api/siteverify
# Server-Pro LDAP
OVERLEAF_LDAP_URL=ldap://ldap:389
OVERLEAF_LDAP_SEARCH_BASE=ou=people,dc=planetexpress,dc=com

View File

@ -22,6 +22,7 @@
"accept_or_reject_each_changes_individually": "",
"accept_terms_and_conditions": "",
"accepted_invite": "",
"accepting_invite_as": "",
"access_denied": "",
"account_has_been_link_to_institution_account": "",
"account_has_past_due_invoice_change_plan_warning": "",
@ -97,6 +98,7 @@
"autocomplete_references": "",
"back": "",
"back_to_configuration": "",
"back_to_editor": "",
"back_to_subscription": "",
"back_to_your_projects": "",
"beta_program_already_participating": "",
@ -531,6 +533,7 @@
"history_view_all": "",
"history_view_labels": "",
"hit_enter_to_reply": "",
"home": "",
"hotkey_add_a_comment": "",
"hotkey_autocomplete_menu": "",
"hotkey_beginning_of_document": "",
@ -618,6 +621,7 @@
"invite_not_accepted": "",
"invited_to_group": "",
"invited_to_group_have_individual_subcription": "",
"invited_to_join": "",
"ip_address": "",
"is_email_affiliated": "",
"issued_on": "",
@ -1334,6 +1338,7 @@
"to_use_text_wrapping_in_your_table_make_sure_you_include_the_array_package": "",
"toggle_compile_options_menu": "",
"token": "",
"token_access_failure": "",
"token_limit_reached": "",
"token_read_only": "",
"token_read_write": "",

View File

@ -24,6 +24,14 @@ export default function SettingsDocument() {
label: doc.path,
}))
if (!rootDocId) {
mappedDocs.unshift({
value: '',
label: 'None',
disabled: true,
})
}
return mappedDocs
}, [docs, rootDocId])
@ -34,7 +42,7 @@ export default function SettingsDocument() {
return (
<SettingsMenuSelect
onChange={setRootDocId}
value={rootDocId}
value={rootDocId ?? ''}
options={validDocsOptions}
label={t('main_document')}
name="rootDocId"

View File

@ -0,0 +1,21 @@
import { useTranslation } from 'react-i18next'
import { Button } from 'react-bootstrap'
import MaterialIcon from '@/shared/components/material-icon'
function BackToEditorButton({ onClick }: { onClick: () => void }) {
const { t } = useTranslation()
return (
<Button
bsSize="sm"
bsStyle={null}
onClick={onClick}
className="back-to-editor-btn btn-secondary"
>
<MaterialIcon type="arrow_back" className="toolbar-btn-secondary-icon" />
<p className="toolbar-label">{t('back_to_editor')}</p>
</Button>
)
}
export default BackToEditorButton

View File

@ -1,4 +1,4 @@
import React, { useCallback } from 'react'
import React, { useState, useCallback } from 'react'
import ToolbarHeader from './toolbar-header'
import { useEditorContext } from '../../../shared/context/editor-context'
import { useChatContext } from '../../chat/context/chat-context'
@ -66,11 +66,21 @@ const EditorNavigationToolbarRoot = React.memo(
[reviewPanelOpen, setReviewPanelOpen]
)
const [shouldReopenChat, setShouldReopenChat] = useState(chatIsOpen)
const toggleHistoryOpen = useCallback(() => {
const action = view === 'history' ? 'close' : 'open'
eventTracking.sendMB('navigation-clicked-history', { action })
if (chatIsOpen && action === 'open') {
setShouldReopenChat(true)
toggleChatOpen()
}
if (shouldReopenChat && action === 'close') {
setShouldReopenChat(false)
toggleChatOpen()
}
setView(view === 'history' ? 'editor' : 'history')
}, [view, setView])
}, [view, chatIsOpen, shouldReopenChat, setView, toggleChatOpen])
const openShareModal = useCallback(() => {
eventTracking.sendMB('navigation-clicked-share')

View File

@ -1,18 +1,13 @@
import PropTypes from 'prop-types'
import classNames from 'classnames'
import { useTranslation } from 'react-i18next'
import Icon from '../../../shared/components/icon'
function HistoryToggleButton({ historyIsOpen, onClick }) {
function HistoryToggleButton({ onClick }) {
const { t } = useTranslation()
const classes = classNames('btn', 'btn-full-height', {
active: historyIsOpen,
})
return (
<div className="toolbar-item">
<button className={classes} onClick={onClick}>
<button className="btn btn-full-height" onClick={onClick}>
<Icon type="history" fw />
<p className="toolbar-label">{t('history')}</p>
</button>
@ -21,7 +16,6 @@ function HistoryToggleButton({ historyIsOpen, onClick }) {
}
HistoryToggleButton.propTypes = {
historyIsOpen: PropTypes.bool,
onClick: PropTypes.func.isRequired,
}

View File

@ -13,6 +13,7 @@ import TrackChangesToggleButton from './track-changes-toggle-button'
import HistoryToggleButton from './history-toggle-button'
import ShareProjectButton from './share-project-button'
import importOverleafModules from '../../../../macros/import-overleaf-module.macro'
import BackToEditorButton from './back-to-editor-button'
const [publishModalModules] = importOverleafModules('publishModal')
const PublishButton = publishModalModules?.import.default
@ -64,34 +65,37 @@ const ToolbarHeader = React.memo(function ToolbarHeader({
<div className="toolbar-right">
<OnlineUsersWidget onlineUsers={onlineUsers} goToUser={goToUser} />
{trackChangesVisible && (
<TrackChangesToggleButton
onMouseDown={toggleReviewPanelOpen}
disabled={historyIsOpen}
trackChangesIsOpen={reviewPanelOpen}
/>
)}
{historyIsOpen ? (
<BackToEditorButton onClick={toggleHistoryOpen} />
) : (
<>
{trackChangesVisible && (
<TrackChangesToggleButton
onMouseDown={toggleReviewPanelOpen}
disabled={historyIsOpen}
trackChangesIsOpen={reviewPanelOpen}
/>
)}
<ShareProjectButton onClick={openShareModal} />
{shouldDisplayPublishButton && (
<PublishButton cobranding={cobranding} />
)}
<ShareProjectButton onClick={openShareModal} />
{shouldDisplayPublishButton && (
<PublishButton cobranding={cobranding} />
)}
{!isRestrictedTokenMember && (
<HistoryToggleButton
historyIsOpen={historyIsOpen}
onClick={toggleHistoryOpen}
/>
)}
{!isRestrictedTokenMember && (
<HistoryToggleButton onClick={toggleHistoryOpen} />
)}
<LayoutDropdownButton />
<LayoutDropdownButton />
{!isRestrictedTokenMember && (
<ChatToggleButton
chatIsOpen={chatIsOpen}
onClick={toggleChatOpen}
unreadMessageCount={unreadMessageCount}
/>
{!isRestrictedTokenMember && (
<ChatToggleButton
chatIsOpen={chatIsOpen}
onClick={toggleChatOpen}
unreadMessageCount={unreadMessageCount}
/>
)}
</>
)}
</div>
</header>

View File

@ -4,7 +4,7 @@ import { useCallback, useEffect, useState } from 'react'
import * as eventTracking from '../../../../infrastructure/event-tracking'
import StartFreeTrialButton from '../../../../shared/components/start-free-trial-button'
import { paywallPrompt } from '../../../../main/account-upgrade'
import { useSplitTestContext } from '@/shared/context/split-test-context'
import { useFeatureFlag } from '@/shared/context/split-test-context'
function FeatureItem({ text }: { text: string }) {
return (
@ -18,8 +18,7 @@ export function OwnerPaywallPrompt() {
const { t } = useTranslation()
const [clickedFreeTrialButton, setClickedFreeTrialButton] = useState(false)
const { splitTestVariants } = useSplitTestContext()
const hasNewPaywallCta = splitTestVariants['paywall-cta'] === 'enabled'
const hasNewPaywallCta = useFeatureFlag('paywall-cta')
useEffect(() => {
eventTracking.send('subscription-funnel', 'editor-click-feature', 'history')

Some files were not shown because too many files have changed in this diff Show More