mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-14 20:40:17 -05:00
merge multiple repositories into an existing monorepo
- merged using: 'monorepo_add.sh services-web:services/web' - see https://github.com/shopsys/monorepo-tools
This commit is contained in:
commit
84ada570ab
1785 changed files with 464822 additions and 0 deletions
17
services/web/.dockerignore
Normal file
17
services/web/.dockerignore
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
.git
|
||||||
|
|
||||||
|
.npmrc
|
||||||
|
|
||||||
|
modules/*/Makefile
|
||||||
|
**/node_modules
|
||||||
|
copybara
|
||||||
|
data
|
||||||
|
public/js
|
||||||
|
public/minjs
|
||||||
|
public/stylesheets
|
||||||
|
public/manifest.json
|
||||||
|
|
||||||
|
build.tar
|
||||||
|
|
||||||
|
.sentryclirc
|
||||||
|
.sentryclirc.enc
|
4
services/web/.eastrc
Normal file
4
services/web/.eastrc
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
{
|
||||||
|
"adapter": "./migrations/lib/adapter",
|
||||||
|
"migrationNumberFormat": "dateTime"
|
||||||
|
}
|
7
services/web/.eslintignore
Normal file
7
services/web/.eslintignore
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
# NOTE: changing paths may require updating them in the Makefile too.
|
||||||
|
node_modules
|
||||||
|
modules/**/scripts
|
||||||
|
frontend/js/vendor
|
||||||
|
modules/**/frontend/js/vendor
|
||||||
|
public/js
|
||||||
|
public/minjs
|
166
services/web/.eslintrc
Normal file
166
services/web/.eslintrc
Normal file
|
@ -0,0 +1,166 @@
|
||||||
|
{
|
||||||
|
"root": true,
|
||||||
|
"extends": [
|
||||||
|
"eslint:recommended",
|
||||||
|
"plugin:react/recommended",
|
||||||
|
"plugin:react-hooks/recommended",
|
||||||
|
"plugin:jsx-a11y/recommended",
|
||||||
|
"standard",
|
||||||
|
"standard-jsx",
|
||||||
|
"standard-react",
|
||||||
|
"prettier"
|
||||||
|
],
|
||||||
|
"plugins": [
|
||||||
|
"jsx-a11y",
|
||||||
|
"mocha",
|
||||||
|
"chai-expect",
|
||||||
|
"chai-friendly"
|
||||||
|
],
|
||||||
|
"env": {
|
||||||
|
"browser": true,
|
||||||
|
"mocha": true,
|
||||||
|
"node": true,
|
||||||
|
"es2020": true
|
||||||
|
},
|
||||||
|
"parserOptions": {
|
||||||
|
"sourceType": "module",
|
||||||
|
"ecmaFeatures": {
|
||||||
|
"jsx": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"settings": {
|
||||||
|
// Tell eslint-plugin-react to detect which version of React we are using
|
||||||
|
"react": {
|
||||||
|
"version": "detect"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"rules": {
|
||||||
|
// Swap the no-unused-expressions rule with a more chai-friendly one
|
||||||
|
"no-unused-expressions": "off",
|
||||||
|
"chai-friendly/no-unused-expressions": "error",
|
||||||
|
|
||||||
|
// Disable some rules after upgrading ESLint
|
||||||
|
// TODO: re-enable and fix
|
||||||
|
"no-var": "off",
|
||||||
|
|
||||||
|
// do not allow importing of implicit dependencies.
|
||||||
|
"import/no-extraneous-dependencies": "error",
|
||||||
|
|
||||||
|
"node/no-callback-literal": "off",
|
||||||
|
"node/no-deprecated-api": "off",
|
||||||
|
"node/handle-callback-err": "off",
|
||||||
|
"node/no-path-concat": "off"
|
||||||
|
},
|
||||||
|
"overrides": [
|
||||||
|
// NOTE: changing paths may require updating them in the Makefile too.
|
||||||
|
{
|
||||||
|
// Test specific rules
|
||||||
|
"files": ["**/test/*/src/**/*.js", "**/test/**/*.test.js"],
|
||||||
|
"globals": {
|
||||||
|
"expect": true
|
||||||
|
},
|
||||||
|
"rules": {
|
||||||
|
// mocha-specific rules
|
||||||
|
"mocha/handle-done-callback": "error",
|
||||||
|
"mocha/no-exclusive-tests": "error",
|
||||||
|
"mocha/no-global-tests": "error",
|
||||||
|
"mocha/no-identical-title": "error",
|
||||||
|
"mocha/no-nested-tests": "error",
|
||||||
|
"mocha/no-pending-tests": "error",
|
||||||
|
"mocha/no-skipped-tests": "error",
|
||||||
|
"mocha/no-mocha-arrows": "error",
|
||||||
|
|
||||||
|
// chai-specific rules
|
||||||
|
"chai-expect/missing-assertion": "error",
|
||||||
|
"chai-expect/terminating-properties": "error",
|
||||||
|
|
||||||
|
// prefer-arrow-callback applies to all callbacks, not just ones in mocha tests.
|
||||||
|
// we don't enforce this at the top-level - just in tests to manage `this` scope
|
||||||
|
// based on mocha's context mechanism
|
||||||
|
"mocha/prefer-arrow-callback": "error"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
// Frontend test specific rules
|
||||||
|
"files": ["**/test/karma/**/*.js"],
|
||||||
|
"globals": {
|
||||||
|
"expect": true,
|
||||||
|
"$": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
// Backend specific rules
|
||||||
|
"files": ["**/app/src/**/*.js"],
|
||||||
|
"rules": {
|
||||||
|
// don't allow console.log in backend code
|
||||||
|
"no-console": "error",
|
||||||
|
|
||||||
|
// do not allow importing of implicit dependencies.
|
||||||
|
"import/no-extraneous-dependencies": ["error", {
|
||||||
|
// do not allow importing of devDependencies.
|
||||||
|
"devDependencies": false
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
// Frontend specific rules
|
||||||
|
"files": ["**/frontend/js/**/*.js", "**/frontend/stories/**/*.js", "**/*.stories.js", "**/test/frontend/**/*.js"],
|
||||||
|
"globals": {
|
||||||
|
"__webpack_public_path__": true,
|
||||||
|
"$": true,
|
||||||
|
"angular": true,
|
||||||
|
"ace": true,
|
||||||
|
"ga": true,
|
||||||
|
"sl_console": true,
|
||||||
|
"sl_debugging": true,
|
||||||
|
// Injected in layout.pug
|
||||||
|
"user_id": true,
|
||||||
|
"ExposedSettings": true
|
||||||
|
},
|
||||||
|
"rules": {
|
||||||
|
// Prevent usage of legacy string refs
|
||||||
|
"react/no-string-refs": "error",
|
||||||
|
|
||||||
|
// Prevent curly braces around strings (as they're unnecessary)
|
||||||
|
"react/jsx-curly-brace-presence": ["error", {
|
||||||
|
"props": "never",
|
||||||
|
"children": "never"
|
||||||
|
}],
|
||||||
|
|
||||||
|
// Allow target="_blank" in JSX
|
||||||
|
"react/jsx-no-target-blank": "off",
|
||||||
|
|
||||||
|
// Don't import React for JSX; the JSX runtime is added by a Babel plugin
|
||||||
|
"react/react-in-jsx-scope": "off",
|
||||||
|
"react/jsx-uses-react": "off",
|
||||||
|
|
||||||
|
// Fix conflict between prettier & standard by overriding to prefer
|
||||||
|
// double quotes
|
||||||
|
"jsx-quotes": ["error", "prefer-double"],
|
||||||
|
|
||||||
|
// Override weird behaviour of jsx-a11y label-has-for (says labels must be
|
||||||
|
// nested *and* have for/id attributes)
|
||||||
|
"jsx-a11y/label-has-for": [
|
||||||
|
"error",
|
||||||
|
{
|
||||||
|
"required": {
|
||||||
|
"some": [
|
||||||
|
"nesting",
|
||||||
|
"id"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"files": ["scripts/ukamf/*.js"],
|
||||||
|
"rules": {
|
||||||
|
// Do not allow importing of any dependencies unless specified in either
|
||||||
|
// - web/package.json
|
||||||
|
// - web/scripts/ukamf/package.json
|
||||||
|
"import/no-extraneous-dependencies": ["error", {"packageDir": [".", "scripts/ukamf"]}]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
38
services/web/.github/ISSUE_TEMPLATE.md
vendored
Normal file
38
services/web/.github/ISSUE_TEMPLATE.md
vendored
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
<!-- BUG REPORT TEMPLATE -->
|
||||||
|
|
||||||
|
## Steps to Reproduce
|
||||||
|
<!-- Describe the steps leading up to when / where you found the bug. -->
|
||||||
|
<!-- Screenshots may be helpful here. -->
|
||||||
|
|
||||||
|
1.
|
||||||
|
2.
|
||||||
|
3.
|
||||||
|
|
||||||
|
## Expected Behaviour
|
||||||
|
<!-- What should have happened when you completed the steps above? -->
|
||||||
|
|
||||||
|
## Observed Behaviour
|
||||||
|
<!-- What actually happened when you completed the steps above? -->
|
||||||
|
<!-- Screenshots may be helpful here. -->
|
||||||
|
|
||||||
|
## Context
|
||||||
|
<!-- How has this issue affected you? What were you trying to accomplish? -->
|
||||||
|
|
||||||
|
## Technical Info
|
||||||
|
<!-- Provide any technical details that may be applicable (or N/A if not applicable). -->
|
||||||
|
|
||||||
|
* URL:
|
||||||
|
* Browser Name and version:
|
||||||
|
* Operating System and version (desktop or mobile):
|
||||||
|
* Signed in as:
|
||||||
|
* Project and/or file:
|
||||||
|
|
||||||
|
## Analysis
|
||||||
|
<!--- Optionally, document investigation of / suggest a fix for the bug, e.g. 'comes from this line / commit' -->
|
||||||
|
|
||||||
|
## Who Needs to Know?
|
||||||
|
<!-- If you want to bring this to the attention of particular people, @-mention them below. -->
|
||||||
|
<!-- If a user reported this bug and should be notified when it is fixed, provide the Front conversation link. -->
|
||||||
|
|
||||||
|
-
|
||||||
|
-
|
43
services/web/.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
43
services/web/.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
### Description
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#### Screenshots
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#### Related Issues / PRs
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### Review
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#### Potential Impact
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#### Manual Testing Performed
|
||||||
|
|
||||||
|
- [ ]
|
||||||
|
- [ ]
|
||||||
|
|
||||||
|
#### Accessibility
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### Deployment
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#### Deployment Checklist
|
||||||
|
|
||||||
|
- [ ] Update documentation not included in the PR (if any)
|
||||||
|
- [ ]
|
||||||
|
|
||||||
|
#### Metrics and Monitoring
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#### Who Needs to Know?
|
79
services/web/.gitignore
vendored
Normal file
79
services/web/.gitignore
vendored
Normal file
|
@ -0,0 +1,79 @@
|
||||||
|
# Compiled source #
|
||||||
|
###################
|
||||||
|
*.com
|
||||||
|
*.class
|
||||||
|
*.dll
|
||||||
|
*.exe
|
||||||
|
*.o
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Packages #
|
||||||
|
############
|
||||||
|
# it's better to unpack these files and commit the raw source
|
||||||
|
# git has its own built in compression methods
|
||||||
|
*.7z
|
||||||
|
*.dmg
|
||||||
|
*.gz
|
||||||
|
*.iso
|
||||||
|
*.jar
|
||||||
|
*.rar
|
||||||
|
*.tar
|
||||||
|
*.zip
|
||||||
|
|
||||||
|
# Logs and databases #
|
||||||
|
######################
|
||||||
|
*.log
|
||||||
|
*.sql
|
||||||
|
*.sqlite
|
||||||
|
|
||||||
|
# OS generated files #
|
||||||
|
######################
|
||||||
|
.DS_Store?
|
||||||
|
ehthumbs.db
|
||||||
|
Icon?
|
||||||
|
Thumbs.db
|
||||||
|
|
||||||
|
node_modules/*
|
||||||
|
data/*
|
||||||
|
coverage
|
||||||
|
|
||||||
|
cookies.txt
|
||||||
|
requestQueueWorker.js
|
||||||
|
TpdsWorker.js
|
||||||
|
BackgroundJobsWorker.js
|
||||||
|
UserAndProjectPopulator.coffee
|
||||||
|
|
||||||
|
public/manifest.json
|
||||||
|
|
||||||
|
public/js
|
||||||
|
public/minjs
|
||||||
|
public/stylesheets
|
||||||
|
public/fonts
|
||||||
|
|
||||||
|
Gemfile.lock
|
||||||
|
|
||||||
|
*.swp
|
||||||
|
.DS_Store
|
||||||
|
|
||||||
|
docker-shared.yml
|
||||||
|
|
||||||
|
config/*.coffee
|
||||||
|
!config/settings.defaults.coffee
|
||||||
|
!config/settings.webpack.coffee
|
||||||
|
config/*.js
|
||||||
|
!config/settings.defaults.js
|
||||||
|
!config/settings.webpack.js
|
||||||
|
!config/settings.overrides.saas.js
|
||||||
|
!config/settings.overrides.server-pro.js
|
||||||
|
|
||||||
|
modules/**/Makefile
|
||||||
|
|
||||||
|
# Sentry secrets file (injected by CI)
|
||||||
|
.sentryclirc
|
||||||
|
|
||||||
|
# via dev-environment
|
||||||
|
.npmrc
|
||||||
|
|
||||||
|
# Intellij
|
||||||
|
.idea
|
||||||
|
.run
|
1
services/web/.nvmrc
Normal file
1
services/web/.nvmrc
Normal file
|
@ -0,0 +1 @@
|
||||||
|
12.22.3
|
8
services/web/.prettierignore
Normal file
8
services/web/.prettierignore
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
# NOTE: changing paths may require updating them in the Makefile too.
|
||||||
|
node_modules
|
||||||
|
modules/**/scripts
|
||||||
|
frontend/js/vendor
|
||||||
|
modules/**/frontend/js/vendor
|
||||||
|
public/js
|
||||||
|
public/minjs
|
||||||
|
frontend/stylesheets/components/nvd3.less
|
9
services/web/.prettierrc
Normal file
9
services/web/.prettierrc
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
{
|
||||||
|
"arrowParens": "avoid",
|
||||||
|
"jsxSingleQuote": false,
|
||||||
|
"semi": false,
|
||||||
|
"singleQuote": true,
|
||||||
|
"trailingComma": "es5",
|
||||||
|
"tabWidth": 2,
|
||||||
|
"useTabs": false
|
||||||
|
}
|
3
services/web/.storybook/global.css
Normal file
3
services/web/.storybook/global.css
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
.sidebar-container a[title='Overleaf'] {
|
||||||
|
max-width: 100px;
|
||||||
|
}
|
50
services/web/.storybook/main.js
Normal file
50
services/web/.storybook/main.js
Normal file
|
@ -0,0 +1,50 @@
|
||||||
|
const path = require('path')
|
||||||
|
|
||||||
|
// NOTE: must be set before webpack config is imported
|
||||||
|
process.env.SHARELATEX_CONFIG = path.resolve(
|
||||||
|
__dirname,
|
||||||
|
'../config/settings.webpack.js'
|
||||||
|
)
|
||||||
|
|
||||||
|
const customConfig = require('../webpack.config.dev')
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
stories: [
|
||||||
|
'../frontend/stories/**/*.stories.js',
|
||||||
|
'../modules/**/stories/**/*.stories.js',
|
||||||
|
],
|
||||||
|
addons: ['@storybook/addon-essentials', '@storybook/addon-a11y'],
|
||||||
|
webpackFinal: storybookConfig => {
|
||||||
|
// Combine Storybook's webpack loaders with our webpack loaders
|
||||||
|
const rules = [
|
||||||
|
// Filter out the Storybook font file loader, which overrides our font
|
||||||
|
// file loader causing the font to fail to load
|
||||||
|
...storybookConfig.module.rules.filter(
|
||||||
|
rule => !rule.test.toString().includes('woff')
|
||||||
|
),
|
||||||
|
// Replace the less rule, adding to-string-loader
|
||||||
|
// Filter out the MiniCSS extraction, which conflicts with the built-in CSS loader
|
||||||
|
...customConfig.module.rules.filter(
|
||||||
|
rule =>
|
||||||
|
!rule.test.toString().includes('less') &&
|
||||||
|
!rule.test.toString().includes('css')
|
||||||
|
),
|
||||||
|
{
|
||||||
|
test: /\.less$/,
|
||||||
|
use: ['to-string-loader', 'css-loader', 'less-loader'],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
// Combine Storybook's webpack plugins with our webpack plugins
|
||||||
|
const plugins = [...storybookConfig.plugins, ...customConfig.plugins]
|
||||||
|
|
||||||
|
return {
|
||||||
|
...storybookConfig,
|
||||||
|
module: {
|
||||||
|
...storybookConfig.module,
|
||||||
|
rules,
|
||||||
|
},
|
||||||
|
plugins,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
15
services/web/.storybook/manager.js
Normal file
15
services/web/.storybook/manager.js
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
import { addons } from '@storybook/addons'
|
||||||
|
import { create } from '@storybook/theming'
|
||||||
|
|
||||||
|
import './global.css'
|
||||||
|
|
||||||
|
import brandImage from '../public/img/ol-brand/overleaf.svg'
|
||||||
|
|
||||||
|
const theme = create({
|
||||||
|
base: 'light',
|
||||||
|
brandTitle: 'Overleaf',
|
||||||
|
brandUrl: 'https://www.overleaf.com',
|
||||||
|
brandImage,
|
||||||
|
})
|
||||||
|
|
||||||
|
addons.setConfig({ theme })
|
11
services/web/.storybook/preview.css
Normal file
11
services/web/.storybook/preview.css
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
.sb-show-main.modal-open {
|
||||||
|
overflow-y: auto !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sb-show-main .modal-backdrop {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sb-show-main .modal {
|
||||||
|
position: relative;
|
||||||
|
}
|
126
services/web/.storybook/preview.js
Normal file
126
services/web/.storybook/preview.js
Normal file
|
@ -0,0 +1,126 @@
|
||||||
|
import './preview.css'
|
||||||
|
|
||||||
|
// Storybook does not (currently) support async loading of "stories". Therefore
|
||||||
|
// the strategy in frontend/js/i18n.js does not work (because we cannot wait on
|
||||||
|
// the promise to resolve).
|
||||||
|
// Therefore we have to use the synchronous method for configuring
|
||||||
|
// react-i18next. Because this, we can only hard-code a single language.
|
||||||
|
import i18n from 'i18next'
|
||||||
|
import { initReactI18next } from 'react-i18next'
|
||||||
|
import en from '../locales/en.json'
|
||||||
|
i18n.use(initReactI18next).init({
|
||||||
|
lng: 'en',
|
||||||
|
|
||||||
|
resources: {
|
||||||
|
en: { translation: en },
|
||||||
|
},
|
||||||
|
|
||||||
|
react: {
|
||||||
|
useSuspense: false,
|
||||||
|
},
|
||||||
|
|
||||||
|
interpolation: {
|
||||||
|
prefix: '__',
|
||||||
|
suffix: '__',
|
||||||
|
unescapeSuffix: 'HTML',
|
||||||
|
skipOnVariables: true,
|
||||||
|
defaultVariables: {
|
||||||
|
appName: 'Overleaf',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
export const parameters = {
|
||||||
|
// Automatically mark prop-types like onClick, onToggle, etc as Storybook
|
||||||
|
// "actions", so that they are logged in the Actions pane at the bottom of the
|
||||||
|
// viewer
|
||||||
|
actions: { argTypesRegex: '^on.*' },
|
||||||
|
docs: {
|
||||||
|
// render stories in iframes, to isolate modals
|
||||||
|
inlineStories: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
export const globalTypes = {
|
||||||
|
theme: {
|
||||||
|
name: 'Theme',
|
||||||
|
description: 'Editor theme',
|
||||||
|
defaultValue: 'default-',
|
||||||
|
toolbar: {
|
||||||
|
icon: 'circlehollow',
|
||||||
|
items: [
|
||||||
|
{ value: 'default-', title: 'Default' },
|
||||||
|
{ value: 'light-', title: 'Light' },
|
||||||
|
{ value: 'ieee-', title: 'IEEE' },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
export const loaders = [
|
||||||
|
async ({ globals }) => {
|
||||||
|
const { theme } = globals
|
||||||
|
|
||||||
|
return {
|
||||||
|
// NOTE: this uses `${theme}style.less` rather than `${theme}.less`
|
||||||
|
// so that webpack only bundles files ending with "style.less"
|
||||||
|
activeStyle: await import(
|
||||||
|
`../frontend/stylesheets/${theme === 'default-' ? '' : theme}style.less`
|
||||||
|
),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
const withTheme = (Story, context) => {
|
||||||
|
const { activeStyle } = context.loaded
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
{activeStyle && <style>{activeStyle.default}</style>}
|
||||||
|
<Story {...context} />
|
||||||
|
</>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export const decorators = [withTheme]
|
||||||
|
|
||||||
|
window.ExposedSettings = {
|
||||||
|
maxEntitiesPerProject: 10,
|
||||||
|
maxUploadSize: 5 * 1024 * 1024,
|
||||||
|
enableSubscriptions: true,
|
||||||
|
textExtensions: [
|
||||||
|
'tex',
|
||||||
|
'latex',
|
||||||
|
'sty',
|
||||||
|
'cls',
|
||||||
|
'bst',
|
||||||
|
'bib',
|
||||||
|
'bibtex',
|
||||||
|
'txt',
|
||||||
|
'tikz',
|
||||||
|
'mtx',
|
||||||
|
'rtex',
|
||||||
|
'md',
|
||||||
|
'asy',
|
||||||
|
'latexmkrc',
|
||||||
|
'lbx',
|
||||||
|
'bbx',
|
||||||
|
'cbx',
|
||||||
|
'm',
|
||||||
|
'lco',
|
||||||
|
'dtx',
|
||||||
|
'ins',
|
||||||
|
'ist',
|
||||||
|
'def',
|
||||||
|
'clo',
|
||||||
|
'ldf',
|
||||||
|
'rmd',
|
||||||
|
'lua',
|
||||||
|
'gv',
|
||||||
|
'mf',
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
window.user = {
|
||||||
|
id: 'storybook',
|
||||||
|
}
|
6
services/web/.vscode/settings.json
vendored
Normal file
6
services/web/.vscode/settings.json
vendored
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
{
|
||||||
|
"files.exclude": {
|
||||||
|
"node_modules": true,
|
||||||
|
"data": true
|
||||||
|
}
|
||||||
|
}
|
56
services/web/Dockerfile
Normal file
56
services/web/Dockerfile
Normal file
|
@ -0,0 +1,56 @@
|
||||||
|
# the base image is suitable for running web with /app bind mounted
|
||||||
|
FROM node:12.22.3 as base
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# install_deps changes app files and installs npm packages
|
||||||
|
# as such it has to run at a later stage
|
||||||
|
|
||||||
|
RUN apt-get update \
|
||||||
|
&& apt-get install -y parallel \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
RUN mkdir /app/node_modules && chown node:node /app/node_modules
|
||||||
|
|
||||||
|
# the deps image is used for caching npm ci
|
||||||
|
FROM base as deps
|
||||||
|
|
||||||
|
COPY package.json package-lock.json /app/
|
||||||
|
|
||||||
|
RUN npm ci --quiet
|
||||||
|
|
||||||
|
|
||||||
|
# the dev is suitable for running tests
|
||||||
|
FROM deps as dev
|
||||||
|
|
||||||
|
COPY . /app
|
||||||
|
|
||||||
|
RUN mkdir -p /app/data/dumpFolder && \
|
||||||
|
mkdir -p /app/data/logs && \
|
||||||
|
mkdir -p /app/data/pdf && \
|
||||||
|
mkdir -p /app/data/uploads && \
|
||||||
|
mkdir -p /app/data/zippedProjects && \
|
||||||
|
chmod -R 0755 /app/data/ && \
|
||||||
|
chown -R node:node /app/data/
|
||||||
|
|
||||||
|
ARG SENTRY_RELEASE
|
||||||
|
ENV SENTRY_RELEASE=$SENTRY_RELEASE
|
||||||
|
|
||||||
|
USER node
|
||||||
|
|
||||||
|
|
||||||
|
# the webpack image has deps+src+webpack artifacts
|
||||||
|
FROM dev as webpack
|
||||||
|
|
||||||
|
USER root
|
||||||
|
RUN chmod 0755 ./install_deps.sh && ./install_deps.sh
|
||||||
|
|
||||||
|
|
||||||
|
# the final production image without webpack source maps
|
||||||
|
FROM webpack as app
|
||||||
|
|
||||||
|
RUN find /app/public -name '*.js.map' -delete
|
||||||
|
RUN rm /app/modules/server-ce-scripts -rf
|
||||||
|
USER node
|
||||||
|
|
||||||
|
CMD ["node", "--expose-gc", "app.js"]
|
6
services/web/Dockerfile.frontend
Normal file
6
services/web/Dockerfile.frontend
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
FROM node:12.22.3
|
||||||
|
|
||||||
|
# Install Google Chrome
|
||||||
|
RUN wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add -
|
||||||
|
RUN sh -c 'echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list'
|
||||||
|
RUN apt-get update && apt-get install -y google-chrome-stable
|
11
services/web/Dockerfile.frontend.ci
Normal file
11
services/web/Dockerfile.frontend.ci
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
ARG PROJECT_NAME
|
||||||
|
ARG BRANCH_NAME
|
||||||
|
ARG BUILD_NUMBER
|
||||||
|
|
||||||
|
FROM ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
|
||||||
|
|
||||||
|
USER root
|
||||||
|
|
||||||
|
RUN wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add - && \
|
||||||
|
echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list && \
|
||||||
|
apt-get update && apt-get install -y google-chrome-stable
|
661
services/web/LICENSE
Normal file
661
services/web/LICENSE
Normal file
|
@ -0,0 +1,661 @@
|
||||||
|
GNU AFFERO GENERAL PUBLIC LICENSE
|
||||||
|
Version 3, 19 November 2007
|
||||||
|
|
||||||
|
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||||
|
Everyone is permitted to copy and distribute verbatim copies
|
||||||
|
of this license document, but changing it is not allowed.
|
||||||
|
|
||||||
|
Preamble
|
||||||
|
|
||||||
|
The GNU Affero General Public License is a free, copyleft license for
|
||||||
|
software and other kinds of works, specifically designed to ensure
|
||||||
|
cooperation with the community in the case of network server software.
|
||||||
|
|
||||||
|
The licenses for most software and other practical works are designed
|
||||||
|
to take away your freedom to share and change the works. By contrast,
|
||||||
|
our General Public Licenses are intended to guarantee your freedom to
|
||||||
|
share and change all versions of a program--to make sure it remains free
|
||||||
|
software for all its users.
|
||||||
|
|
||||||
|
When we speak of free software, we are referring to freedom, not
|
||||||
|
price. Our General Public Licenses are designed to make sure that you
|
||||||
|
have the freedom to distribute copies of free software (and charge for
|
||||||
|
them if you wish), that you receive source code or can get it if you
|
||||||
|
want it, that you can change the software or use pieces of it in new
|
||||||
|
free programs, and that you know you can do these things.
|
||||||
|
|
||||||
|
Developers that use our General Public Licenses protect your rights
|
||||||
|
with two steps: (1) assert copyright on the software, and (2) offer
|
||||||
|
you this License which gives you legal permission to copy, distribute
|
||||||
|
and/or modify the software.
|
||||||
|
|
||||||
|
A secondary benefit of defending all users' freedom is that
|
||||||
|
improvements made in alternate versions of the program, if they
|
||||||
|
receive widespread use, become available for other developers to
|
||||||
|
incorporate. Many developers of free software are heartened and
|
||||||
|
encouraged by the resulting cooperation. However, in the case of
|
||||||
|
software used on network servers, this result may fail to come about.
|
||||||
|
The GNU General Public License permits making a modified version and
|
||||||
|
letting the public access it on a server without ever releasing its
|
||||||
|
source code to the public.
|
||||||
|
|
||||||
|
The GNU Affero General Public License is designed specifically to
|
||||||
|
ensure that, in such cases, the modified source code becomes available
|
||||||
|
to the community. It requires the operator of a network server to
|
||||||
|
provide the source code of the modified version running there to the
|
||||||
|
users of that server. Therefore, public use of a modified version, on
|
||||||
|
a publicly accessible server, gives the public access to the source
|
||||||
|
code of the modified version.
|
||||||
|
|
||||||
|
An older license, called the Affero General Public License and
|
||||||
|
published by Affero, was designed to accomplish similar goals. This is
|
||||||
|
a different license, not a version of the Affero GPL, but Affero has
|
||||||
|
released a new version of the Affero GPL which permits relicensing under
|
||||||
|
this license.
|
||||||
|
|
||||||
|
The precise terms and conditions for copying, distribution and
|
||||||
|
modification follow.
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
0. Definitions.
|
||||||
|
|
||||||
|
"This License" refers to version 3 of the GNU Affero General Public License.
|
||||||
|
|
||||||
|
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||||
|
works, such as semiconductor masks.
|
||||||
|
|
||||||
|
"The Program" refers to any copyrightable work licensed under this
|
||||||
|
License. Each licensee is addressed as "you". "Licensees" and
|
||||||
|
"recipients" may be individuals or organizations.
|
||||||
|
|
||||||
|
To "modify" a work means to copy from or adapt all or part of the work
|
||||||
|
in a fashion requiring copyright permission, other than the making of an
|
||||||
|
exact copy. The resulting work is called a "modified version" of the
|
||||||
|
earlier work or a work "based on" the earlier work.
|
||||||
|
|
||||||
|
A "covered work" means either the unmodified Program or a work based
|
||||||
|
on the Program.
|
||||||
|
|
||||||
|
To "propagate" a work means to do anything with it that, without
|
||||||
|
permission, would make you directly or secondarily liable for
|
||||||
|
infringement under applicable copyright law, except executing it on a
|
||||||
|
computer or modifying a private copy. Propagation includes copying,
|
||||||
|
distribution (with or without modification), making available to the
|
||||||
|
public, and in some countries other activities as well.
|
||||||
|
|
||||||
|
To "convey" a work means any kind of propagation that enables other
|
||||||
|
parties to make or receive copies. Mere interaction with a user through
|
||||||
|
a computer network, with no transfer of a copy, is not conveying.
|
||||||
|
|
||||||
|
An interactive user interface displays "Appropriate Legal Notices"
|
||||||
|
to the extent that it includes a convenient and prominently visible
|
||||||
|
feature that (1) displays an appropriate copyright notice, and (2)
|
||||||
|
tells the user that there is no warranty for the work (except to the
|
||||||
|
extent that warranties are provided), that licensees may convey the
|
||||||
|
work under this License, and how to view a copy of this License. If
|
||||||
|
the interface presents a list of user commands or options, such as a
|
||||||
|
menu, a prominent item in the list meets this criterion.
|
||||||
|
|
||||||
|
1. Source Code.
|
||||||
|
|
||||||
|
The "source code" for a work means the preferred form of the work
|
||||||
|
for making modifications to it. "Object code" means any non-source
|
||||||
|
form of a work.
|
||||||
|
|
||||||
|
A "Standard Interface" means an interface that either is an official
|
||||||
|
standard defined by a recognized standards body, or, in the case of
|
||||||
|
interfaces specified for a particular programming language, one that
|
||||||
|
is widely used among developers working in that language.
|
||||||
|
|
||||||
|
The "System Libraries" of an executable work include anything, other
|
||||||
|
than the work as a whole, that (a) is included in the normal form of
|
||||||
|
packaging a Major Component, but which is not part of that Major
|
||||||
|
Component, and (b) serves only to enable use of the work with that
|
||||||
|
Major Component, or to implement a Standard Interface for which an
|
||||||
|
implementation is available to the public in source code form. A
|
||||||
|
"Major Component", in this context, means a major essential component
|
||||||
|
(kernel, window system, and so on) of the specific operating system
|
||||||
|
(if any) on which the executable work runs, or a compiler used to
|
||||||
|
produce the work, or an object code interpreter used to run it.
|
||||||
|
|
||||||
|
The "Corresponding Source" for a work in object code form means all
|
||||||
|
the source code needed to generate, install, and (for an executable
|
||||||
|
work) run the object code and to modify the work, including scripts to
|
||||||
|
control those activities. However, it does not include the work's
|
||||||
|
System Libraries, or general-purpose tools or generally available free
|
||||||
|
programs which are used unmodified in performing those activities but
|
||||||
|
which are not part of the work. For example, Corresponding Source
|
||||||
|
includes interface definition files associated with source files for
|
||||||
|
the work, and the source code for shared libraries and dynamically
|
||||||
|
linked subprograms that the work is specifically designed to require,
|
||||||
|
such as by intimate data communication or control flow between those
|
||||||
|
subprograms and other parts of the work.
|
||||||
|
|
||||||
|
The Corresponding Source need not include anything that users
|
||||||
|
can regenerate automatically from other parts of the Corresponding
|
||||||
|
Source.
|
||||||
|
|
||||||
|
The Corresponding Source for a work in source code form is that
|
||||||
|
same work.
|
||||||
|
|
||||||
|
2. Basic Permissions.
|
||||||
|
|
||||||
|
All rights granted under this License are granted for the term of
|
||||||
|
copyright on the Program, and are irrevocable provided the stated
|
||||||
|
conditions are met. This License explicitly affirms your unlimited
|
||||||
|
permission to run the unmodified Program. The output from running a
|
||||||
|
covered work is covered by this License only if the output, given its
|
||||||
|
content, constitutes a covered work. This License acknowledges your
|
||||||
|
rights of fair use or other equivalent, as provided by copyright law.
|
||||||
|
|
||||||
|
You may make, run and propagate covered works that you do not
|
||||||
|
convey, without conditions so long as your license otherwise remains
|
||||||
|
in force. You may convey covered works to others for the sole purpose
|
||||||
|
of having them make modifications exclusively for you, or provide you
|
||||||
|
with facilities for running those works, provided that you comply with
|
||||||
|
the terms of this License in conveying all material for which you do
|
||||||
|
not control copyright. Those thus making or running the covered works
|
||||||
|
for you must do so exclusively on your behalf, under your direction
|
||||||
|
and control, on terms that prohibit them from making any copies of
|
||||||
|
your copyrighted material outside their relationship with you.
|
||||||
|
|
||||||
|
Conveying under any other circumstances is permitted solely under
|
||||||
|
the conditions stated below. Sublicensing is not allowed; section 10
|
||||||
|
makes it unnecessary.
|
||||||
|
|
||||||
|
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||||
|
|
||||||
|
No covered work shall be deemed part of an effective technological
|
||||||
|
measure under any applicable law fulfilling obligations under article
|
||||||
|
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||||
|
similar laws prohibiting or restricting circumvention of such
|
||||||
|
measures.
|
||||||
|
|
||||||
|
When you convey a covered work, you waive any legal power to forbid
|
||||||
|
circumvention of technological measures to the extent such circumvention
|
||||||
|
is effected by exercising rights under this License with respect to
|
||||||
|
the covered work, and you disclaim any intention to limit operation or
|
||||||
|
modification of the work as a means of enforcing, against the work's
|
||||||
|
users, your or third parties' legal rights to forbid circumvention of
|
||||||
|
technological measures.
|
||||||
|
|
||||||
|
4. Conveying Verbatim Copies.
|
||||||
|
|
||||||
|
You may convey verbatim copies of the Program's source code as you
|
||||||
|
receive it, in any medium, provided that you conspicuously and
|
||||||
|
appropriately publish on each copy an appropriate copyright notice;
|
||||||
|
keep intact all notices stating that this License and any
|
||||||
|
non-permissive terms added in accord with section 7 apply to the code;
|
||||||
|
keep intact all notices of the absence of any warranty; and give all
|
||||||
|
recipients a copy of this License along with the Program.
|
||||||
|
|
||||||
|
You may charge any price or no price for each copy that you convey,
|
||||||
|
and you may offer support or warranty protection for a fee.
|
||||||
|
|
||||||
|
5. Conveying Modified Source Versions.
|
||||||
|
|
||||||
|
You may convey a work based on the Program, or the modifications to
|
||||||
|
produce it from the Program, in the form of source code under the
|
||||||
|
terms of section 4, provided that you also meet all of these conditions:
|
||||||
|
|
||||||
|
a) The work must carry prominent notices stating that you modified
|
||||||
|
it, and giving a relevant date.
|
||||||
|
|
||||||
|
b) The work must carry prominent notices stating that it is
|
||||||
|
released under this License and any conditions added under section
|
||||||
|
7. This requirement modifies the requirement in section 4 to
|
||||||
|
"keep intact all notices".
|
||||||
|
|
||||||
|
c) You must license the entire work, as a whole, under this
|
||||||
|
License to anyone who comes into possession of a copy. This
|
||||||
|
License will therefore apply, along with any applicable section 7
|
||||||
|
additional terms, to the whole of the work, and all its parts,
|
||||||
|
regardless of how they are packaged. This License gives no
|
||||||
|
permission to license the work in any other way, but it does not
|
||||||
|
invalidate such permission if you have separately received it.
|
||||||
|
|
||||||
|
d) If the work has interactive user interfaces, each must display
|
||||||
|
Appropriate Legal Notices; however, if the Program has interactive
|
||||||
|
interfaces that do not display Appropriate Legal Notices, your
|
||||||
|
work need not make them do so.
|
||||||
|
|
||||||
|
A compilation of a covered work with other separate and independent
|
||||||
|
works, which are not by their nature extensions of the covered work,
|
||||||
|
and which are not combined with it such as to form a larger program,
|
||||||
|
in or on a volume of a storage or distribution medium, is called an
|
||||||
|
"aggregate" if the compilation and its resulting copyright are not
|
||||||
|
used to limit the access or legal rights of the compilation's users
|
||||||
|
beyond what the individual works permit. Inclusion of a covered work
|
||||||
|
in an aggregate does not cause this License to apply to the other
|
||||||
|
parts of the aggregate.
|
||||||
|
|
||||||
|
6. Conveying Non-Source Forms.
|
||||||
|
|
||||||
|
You may convey a covered work in object code form under the terms
|
||||||
|
of sections 4 and 5, provided that you also convey the
|
||||||
|
machine-readable Corresponding Source under the terms of this License,
|
||||||
|
in one of these ways:
|
||||||
|
|
||||||
|
a) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by the
|
||||||
|
Corresponding Source fixed on a durable physical medium
|
||||||
|
customarily used for software interchange.
|
||||||
|
|
||||||
|
b) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by a
|
||||||
|
written offer, valid for at least three years and valid for as
|
||||||
|
long as you offer spare parts or customer support for that product
|
||||||
|
model, to give anyone who possesses the object code either (1) a
|
||||||
|
copy of the Corresponding Source for all the software in the
|
||||||
|
product that is covered by this License, on a durable physical
|
||||||
|
medium customarily used for software interchange, for a price no
|
||||||
|
more than your reasonable cost of physically performing this
|
||||||
|
conveying of source, or (2) access to copy the
|
||||||
|
Corresponding Source from a network server at no charge.
|
||||||
|
|
||||||
|
c) Convey individual copies of the object code with a copy of the
|
||||||
|
written offer to provide the Corresponding Source. This
|
||||||
|
alternative is allowed only occasionally and noncommercially, and
|
||||||
|
only if you received the object code with such an offer, in accord
|
||||||
|
with subsection 6b.
|
||||||
|
|
||||||
|
d) Convey the object code by offering access from a designated
|
||||||
|
place (gratis or for a charge), and offer equivalent access to the
|
||||||
|
Corresponding Source in the same way through the same place at no
|
||||||
|
further charge. You need not require recipients to copy the
|
||||||
|
Corresponding Source along with the object code. If the place to
|
||||||
|
copy the object code is a network server, the Corresponding Source
|
||||||
|
may be on a different server (operated by you or a third party)
|
||||||
|
that supports equivalent copying facilities, provided you maintain
|
||||||
|
clear directions next to the object code saying where to find the
|
||||||
|
Corresponding Source. Regardless of what server hosts the
|
||||||
|
Corresponding Source, you remain obligated to ensure that it is
|
||||||
|
available for as long as needed to satisfy these requirements.
|
||||||
|
|
||||||
|
e) Convey the object code using peer-to-peer transmission, provided
|
||||||
|
you inform other peers where the object code and Corresponding
|
||||||
|
Source of the work are being offered to the general public at no
|
||||||
|
charge under subsection 6d.
|
||||||
|
|
||||||
|
A separable portion of the object code, whose source code is excluded
|
||||||
|
from the Corresponding Source as a System Library, need not be
|
||||||
|
included in conveying the object code work.
|
||||||
|
|
||||||
|
A "User Product" is either (1) a "consumer product", which means any
|
||||||
|
tangible personal property which is normally used for personal, family,
|
||||||
|
or household purposes, or (2) anything designed or sold for incorporation
|
||||||
|
into a dwelling. In determining whether a product is a consumer product,
|
||||||
|
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||||
|
product received by a particular user, "normally used" refers to a
|
||||||
|
typical or common use of that class of product, regardless of the status
|
||||||
|
of the particular user or of the way in which the particular user
|
||||||
|
actually uses, or expects or is expected to use, the product. A product
|
||||||
|
is a consumer product regardless of whether the product has substantial
|
||||||
|
commercial, industrial or non-consumer uses, unless such uses represent
|
||||||
|
the only significant mode of use of the product.
|
||||||
|
|
||||||
|
"Installation Information" for a User Product means any methods,
|
||||||
|
procedures, authorization keys, or other information required to install
|
||||||
|
and execute modified versions of a covered work in that User Product from
|
||||||
|
a modified version of its Corresponding Source. The information must
|
||||||
|
suffice to ensure that the continued functioning of the modified object
|
||||||
|
code is in no case prevented or interfered with solely because
|
||||||
|
modification has been made.
|
||||||
|
|
||||||
|
If you convey an object code work under this section in, or with, or
|
||||||
|
specifically for use in, a User Product, and the conveying occurs as
|
||||||
|
part of a transaction in which the right of possession and use of the
|
||||||
|
User Product is transferred to the recipient in perpetuity or for a
|
||||||
|
fixed term (regardless of how the transaction is characterized), the
|
||||||
|
Corresponding Source conveyed under this section must be accompanied
|
||||||
|
by the Installation Information. But this requirement does not apply
|
||||||
|
if neither you nor any third party retains the ability to install
|
||||||
|
modified object code on the User Product (for example, the work has
|
||||||
|
been installed in ROM).
|
||||||
|
|
||||||
|
The requirement to provide Installation Information does not include a
|
||||||
|
requirement to continue to provide support service, warranty, or updates
|
||||||
|
for a work that has been modified or installed by the recipient, or for
|
||||||
|
the User Product in which it has been modified or installed. Access to a
|
||||||
|
network may be denied when the modification itself materially and
|
||||||
|
adversely affects the operation of the network or violates the rules and
|
||||||
|
protocols for communication across the network.
|
||||||
|
|
||||||
|
Corresponding Source conveyed, and Installation Information provided,
|
||||||
|
in accord with this section must be in a format that is publicly
|
||||||
|
documented (and with an implementation available to the public in
|
||||||
|
source code form), and must require no special password or key for
|
||||||
|
unpacking, reading or copying.
|
||||||
|
|
||||||
|
7. Additional Terms.
|
||||||
|
|
||||||
|
"Additional permissions" are terms that supplement the terms of this
|
||||||
|
License by making exceptions from one or more of its conditions.
|
||||||
|
Additional permissions that are applicable to the entire Program shall
|
||||||
|
be treated as though they were included in this License, to the extent
|
||||||
|
that they are valid under applicable law. If additional permissions
|
||||||
|
apply only to part of the Program, that part may be used separately
|
||||||
|
under those permissions, but the entire Program remains governed by
|
||||||
|
this License without regard to the additional permissions.
|
||||||
|
|
||||||
|
When you convey a copy of a covered work, you may at your option
|
||||||
|
remove any additional permissions from that copy, or from any part of
|
||||||
|
it. (Additional permissions may be written to require their own
|
||||||
|
removal in certain cases when you modify the work.) You may place
|
||||||
|
additional permissions on material, added by you to a covered work,
|
||||||
|
for which you have or can give appropriate copyright permission.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, for material you
|
||||||
|
add to a covered work, you may (if authorized by the copyright holders of
|
||||||
|
that material) supplement the terms of this License with terms:
|
||||||
|
|
||||||
|
a) Disclaiming warranty or limiting liability differently from the
|
||||||
|
terms of sections 15 and 16 of this License; or
|
||||||
|
|
||||||
|
b) Requiring preservation of specified reasonable legal notices or
|
||||||
|
author attributions in that material or in the Appropriate Legal
|
||||||
|
Notices displayed by works containing it; or
|
||||||
|
|
||||||
|
c) Prohibiting misrepresentation of the origin of that material, or
|
||||||
|
requiring that modified versions of such material be marked in
|
||||||
|
reasonable ways as different from the original version; or
|
||||||
|
|
||||||
|
d) Limiting the use for publicity purposes of names of licensors or
|
||||||
|
authors of the material; or
|
||||||
|
|
||||||
|
e) Declining to grant rights under trademark law for use of some
|
||||||
|
trade names, trademarks, or service marks; or
|
||||||
|
|
||||||
|
f) Requiring indemnification of licensors and authors of that
|
||||||
|
material by anyone who conveys the material (or modified versions of
|
||||||
|
it) with contractual assumptions of liability to the recipient, for
|
||||||
|
any liability that these contractual assumptions directly impose on
|
||||||
|
those licensors and authors.
|
||||||
|
|
||||||
|
All other non-permissive additional terms are considered "further
|
||||||
|
restrictions" within the meaning of section 10. If the Program as you
|
||||||
|
received it, or any part of it, contains a notice stating that it is
|
||||||
|
governed by this License along with a term that is a further
|
||||||
|
restriction, you may remove that term. If a license document contains
|
||||||
|
a further restriction but permits relicensing or conveying under this
|
||||||
|
License, you may add to a covered work material governed by the terms
|
||||||
|
of that license document, provided that the further restriction does
|
||||||
|
not survive such relicensing or conveying.
|
||||||
|
|
||||||
|
If you add terms to a covered work in accord with this section, you
|
||||||
|
must place, in the relevant source files, a statement of the
|
||||||
|
additional terms that apply to those files, or a notice indicating
|
||||||
|
where to find the applicable terms.
|
||||||
|
|
||||||
|
Additional terms, permissive or non-permissive, may be stated in the
|
||||||
|
form of a separately written license, or stated as exceptions;
|
||||||
|
the above requirements apply either way.
|
||||||
|
|
||||||
|
8. Termination.
|
||||||
|
|
||||||
|
You may not propagate or modify a covered work except as expressly
|
||||||
|
provided under this License. Any attempt otherwise to propagate or
|
||||||
|
modify it is void, and will automatically terminate your rights under
|
||||||
|
this License (including any patent licenses granted under the third
|
||||||
|
paragraph of section 11).
|
||||||
|
|
||||||
|
However, if you cease all violation of this License, then your
|
||||||
|
license from a particular copyright holder is reinstated (a)
|
||||||
|
provisionally, unless and until the copyright holder explicitly and
|
||||||
|
finally terminates your license, and (b) permanently, if the copyright
|
||||||
|
holder fails to notify you of the violation by some reasonable means
|
||||||
|
prior to 60 days after the cessation.
|
||||||
|
|
||||||
|
Moreover, your license from a particular copyright holder is
|
||||||
|
reinstated permanently if the copyright holder notifies you of the
|
||||||
|
violation by some reasonable means, this is the first time you have
|
||||||
|
received notice of violation of this License (for any work) from that
|
||||||
|
copyright holder, and you cure the violation prior to 30 days after
|
||||||
|
your receipt of the notice.
|
||||||
|
|
||||||
|
Termination of your rights under this section does not terminate the
|
||||||
|
licenses of parties who have received copies or rights from you under
|
||||||
|
this License. If your rights have been terminated and not permanently
|
||||||
|
reinstated, you do not qualify to receive new licenses for the same
|
||||||
|
material under section 10.
|
||||||
|
|
||||||
|
9. Acceptance Not Required for Having Copies.
|
||||||
|
|
||||||
|
You are not required to accept this License in order to receive or
|
||||||
|
run a copy of the Program. Ancillary propagation of a covered work
|
||||||
|
occurring solely as a consequence of using peer-to-peer transmission
|
||||||
|
to receive a copy likewise does not require acceptance. However,
|
||||||
|
nothing other than this License grants you permission to propagate or
|
||||||
|
modify any covered work. These actions infringe copyright if you do
|
||||||
|
not accept this License. Therefore, by modifying or propagating a
|
||||||
|
covered work, you indicate your acceptance of this License to do so.
|
||||||
|
|
||||||
|
10. Automatic Licensing of Downstream Recipients.
|
||||||
|
|
||||||
|
Each time you convey a covered work, the recipient automatically
|
||||||
|
receives a license from the original licensors, to run, modify and
|
||||||
|
propagate that work, subject to this License. You are not responsible
|
||||||
|
for enforcing compliance by third parties with this License.
|
||||||
|
|
||||||
|
An "entity transaction" is a transaction transferring control of an
|
||||||
|
organization, or substantially all assets of one, or subdividing an
|
||||||
|
organization, or merging organizations. If propagation of a covered
|
||||||
|
work results from an entity transaction, each party to that
|
||||||
|
transaction who receives a copy of the work also receives whatever
|
||||||
|
licenses to the work the party's predecessor in interest had or could
|
||||||
|
give under the previous paragraph, plus a right to possession of the
|
||||||
|
Corresponding Source of the work from the predecessor in interest, if
|
||||||
|
the predecessor has it or can get it with reasonable efforts.
|
||||||
|
|
||||||
|
You may not impose any further restrictions on the exercise of the
|
||||||
|
rights granted or affirmed under this License. For example, you may
|
||||||
|
not impose a license fee, royalty, or other charge for exercise of
|
||||||
|
rights granted under this License, and you may not initiate litigation
|
||||||
|
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||||
|
any patent claim is infringed by making, using, selling, offering for
|
||||||
|
sale, or importing the Program or any portion of it.
|
||||||
|
|
||||||
|
11. Patents.
|
||||||
|
|
||||||
|
A "contributor" is a copyright holder who authorizes use under this
|
||||||
|
License of the Program or a work on which the Program is based. The
|
||||||
|
work thus licensed is called the contributor's "contributor version".
|
||||||
|
|
||||||
|
A contributor's "essential patent claims" are all patent claims
|
||||||
|
owned or controlled by the contributor, whether already acquired or
|
||||||
|
hereafter acquired, that would be infringed by some manner, permitted
|
||||||
|
by this License, of making, using, or selling its contributor version,
|
||||||
|
but do not include claims that would be infringed only as a
|
||||||
|
consequence of further modification of the contributor version. For
|
||||||
|
purposes of this definition, "control" includes the right to grant
|
||||||
|
patent sublicenses in a manner consistent with the requirements of
|
||||||
|
this License.
|
||||||
|
|
||||||
|
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||||
|
patent license under the contributor's essential patent claims, to
|
||||||
|
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||||
|
propagate the contents of its contributor version.
|
||||||
|
|
||||||
|
In the following three paragraphs, a "patent license" is any express
|
||||||
|
agreement or commitment, however denominated, not to enforce a patent
|
||||||
|
(such as an express permission to practice a patent or covenant not to
|
||||||
|
sue for patent infringement). To "grant" such a patent license to a
|
||||||
|
party means to make such an agreement or commitment not to enforce a
|
||||||
|
patent against the party.
|
||||||
|
|
||||||
|
If you convey a covered work, knowingly relying on a patent license,
|
||||||
|
and the Corresponding Source of the work is not available for anyone
|
||||||
|
to copy, free of charge and under the terms of this License, through a
|
||||||
|
publicly available network server or other readily accessible means,
|
||||||
|
then you must either (1) cause the Corresponding Source to be so
|
||||||
|
available, or (2) arrange to deprive yourself of the benefit of the
|
||||||
|
patent license for this particular work, or (3) arrange, in a manner
|
||||||
|
consistent with the requirements of this License, to extend the patent
|
||||||
|
license to downstream recipients. "Knowingly relying" means you have
|
||||||
|
actual knowledge that, but for the patent license, your conveying the
|
||||||
|
covered work in a country, or your recipient's use of the covered work
|
||||||
|
in a country, would infringe one or more identifiable patents in that
|
||||||
|
country that you have reason to believe are valid.
|
||||||
|
|
||||||
|
If, pursuant to or in connection with a single transaction or
|
||||||
|
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||||
|
covered work, and grant a patent license to some of the parties
|
||||||
|
receiving the covered work authorizing them to use, propagate, modify
|
||||||
|
or convey a specific copy of the covered work, then the patent license
|
||||||
|
you grant is automatically extended to all recipients of the covered
|
||||||
|
work and works based on it.
|
||||||
|
|
||||||
|
A patent license is "discriminatory" if it does not include within
|
||||||
|
the scope of its coverage, prohibits the exercise of, or is
|
||||||
|
conditioned on the non-exercise of one or more of the rights that are
|
||||||
|
specifically granted under this License. You may not convey a covered
|
||||||
|
work if you are a party to an arrangement with a third party that is
|
||||||
|
in the business of distributing software, under which you make payment
|
||||||
|
to the third party based on the extent of your activity of conveying
|
||||||
|
the work, and under which the third party grants, to any of the
|
||||||
|
parties who would receive the covered work from you, a discriminatory
|
||||||
|
patent license (a) in connection with copies of the covered work
|
||||||
|
conveyed by you (or copies made from those copies), or (b) primarily
|
||||||
|
for and in connection with specific products or compilations that
|
||||||
|
contain the covered work, unless you entered into that arrangement,
|
||||||
|
or that patent license was granted, prior to 28 March 2007.
|
||||||
|
|
||||||
|
Nothing in this License shall be construed as excluding or limiting
|
||||||
|
any implied license or other defenses to infringement that may
|
||||||
|
otherwise be available to you under applicable patent law.
|
||||||
|
|
||||||
|
12. No Surrender of Others' Freedom.
|
||||||
|
|
||||||
|
If conditions are imposed on you (whether by court order, agreement or
|
||||||
|
otherwise) that contradict the conditions of this License, they do not
|
||||||
|
excuse you from the conditions of this License. If you cannot convey a
|
||||||
|
covered work so as to satisfy simultaneously your obligations under this
|
||||||
|
License and any other pertinent obligations, then as a consequence you may
|
||||||
|
not convey it at all. For example, if you agree to terms that obligate you
|
||||||
|
to collect a royalty for further conveying from those to whom you convey
|
||||||
|
the Program, the only way you could satisfy both those terms and this
|
||||||
|
License would be to refrain entirely from conveying the Program.
|
||||||
|
|
||||||
|
13. Remote Network Interaction; Use with the GNU General Public License.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, if you modify the
|
||||||
|
Program, your modified version must prominently offer all users
|
||||||
|
interacting with it remotely through a computer network (if your version
|
||||||
|
supports such interaction) an opportunity to receive the Corresponding
|
||||||
|
Source of your version by providing access to the Corresponding Source
|
||||||
|
from a network server at no charge, through some standard or customary
|
||||||
|
means of facilitating copying of software. This Corresponding Source
|
||||||
|
shall include the Corresponding Source for any work covered by version 3
|
||||||
|
of the GNU General Public License that is incorporated pursuant to the
|
||||||
|
following paragraph.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, you have
|
||||||
|
permission to link or combine any covered work with a work licensed
|
||||||
|
under version 3 of the GNU General Public License into a single
|
||||||
|
combined work, and to convey the resulting work. The terms of this
|
||||||
|
License will continue to apply to the part which is the covered work,
|
||||||
|
but the work with which it is combined will remain governed by version
|
||||||
|
3 of the GNU General Public License.
|
||||||
|
|
||||||
|
14. Revised Versions of this License.
|
||||||
|
|
||||||
|
The Free Software Foundation may publish revised and/or new versions of
|
||||||
|
the GNU Affero General Public License from time to time. Such new versions
|
||||||
|
will be similar in spirit to the present version, but may differ in detail to
|
||||||
|
address new problems or concerns.
|
||||||
|
|
||||||
|
Each version is given a distinguishing version number. If the
|
||||||
|
Program specifies that a certain numbered version of the GNU Affero General
|
||||||
|
Public License "or any later version" applies to it, you have the
|
||||||
|
option of following the terms and conditions either of that numbered
|
||||||
|
version or of any later version published by the Free Software
|
||||||
|
Foundation. If the Program does not specify a version number of the
|
||||||
|
GNU Affero General Public License, you may choose any version ever published
|
||||||
|
by the Free Software Foundation.
|
||||||
|
|
||||||
|
If the Program specifies that a proxy can decide which future
|
||||||
|
versions of the GNU Affero General Public License can be used, that proxy's
|
||||||
|
public statement of acceptance of a version permanently authorizes you
|
||||||
|
to choose that version for the Program.
|
||||||
|
|
||||||
|
Later license versions may give you additional or different
|
||||||
|
permissions. However, no additional obligations are imposed on any
|
||||||
|
author or copyright holder as a result of your choosing to follow a
|
||||||
|
later version.
|
||||||
|
|
||||||
|
15. Disclaimer of Warranty.
|
||||||
|
|
||||||
|
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||||
|
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||||
|
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||||
|
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||||
|
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||||
|
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||||
|
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||||
|
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||||
|
|
||||||
|
16. Limitation of Liability.
|
||||||
|
|
||||||
|
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||||
|
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||||
|
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||||
|
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||||
|
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||||
|
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||||
|
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||||
|
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||||
|
SUCH DAMAGES.
|
||||||
|
|
||||||
|
17. Interpretation of Sections 15 and 16.
|
||||||
|
|
||||||
|
If the disclaimer of warranty and limitation of liability provided
|
||||||
|
above cannot be given local legal effect according to their terms,
|
||||||
|
reviewing courts shall apply local law that most closely approximates
|
||||||
|
an absolute waiver of all civil liability in connection with the
|
||||||
|
Program, unless a warranty or assumption of liability accompanies a
|
||||||
|
copy of the Program in return for a fee.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
How to Apply These Terms to Your New Programs
|
||||||
|
|
||||||
|
If you develop a new program, and you want it to be of the greatest
|
||||||
|
possible use to the public, the best way to achieve this is to make it
|
||||||
|
free software which everyone can redistribute and change under these terms.
|
||||||
|
|
||||||
|
To do so, attach the following notices to the program. It is safest
|
||||||
|
to attach them to the start of each source file to most effectively
|
||||||
|
state the exclusion of warranty; and each file should have at least
|
||||||
|
the "copyright" line and a pointer to where the full notice is found.
|
||||||
|
|
||||||
|
<one line to give the program's name and a brief idea of what it does.>
|
||||||
|
Copyright (C) <year> <name of author>
|
||||||
|
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU Affero General Public License as published by
|
||||||
|
the Free Software Foundation, either version 3 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Affero General Public License
|
||||||
|
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
Also add information on how to contact you by electronic and paper mail.
|
||||||
|
|
||||||
|
If your software can interact with users remotely through a computer
|
||||||
|
network, you should also make sure that it provides a way for users to
|
||||||
|
get its source. For example, if your program is a web application, its
|
||||||
|
interface could display a "Source" link that leads users to an archive
|
||||||
|
of the code. There are many ways you could offer source, and different
|
||||||
|
solutions will be better for different programs; see section 13 for the
|
||||||
|
specific requirements.
|
||||||
|
|
||||||
|
You should also get your employer (if you work as a programmer) or school,
|
||||||
|
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||||
|
For more information on this, and how to apply and follow the GNU AGPL, see
|
||||||
|
<http://www.gnu.org/licenses/>.
|
510
services/web/Makefile
Normal file
510
services/web/Makefile
Normal file
|
@ -0,0 +1,510 @@
|
||||||
|
DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml --log-level ERROR
|
||||||
|
|
||||||
|
BUILD_NUMBER ?= local
|
||||||
|
BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||||
|
PROJECT_NAME = web
|
||||||
|
BUILD_DIR_NAME = $(shell pwd | xargs basename | tr -cd '[a-zA-Z0-9_.\-]')
|
||||||
|
|
||||||
|
export SHARELATEX_CONFIG ?= /app/test/acceptance/config/settings.test.saas.js
|
||||||
|
export BASE_CONFIG ?= ${SHARELATEX_CONFIG}
|
||||||
|
|
||||||
|
CFG_SAAS=/app/test/acceptance/config/settings.test.saas.js
|
||||||
|
CFG_SERVER_CE=/app/test/acceptance/config/settings.test.server-ce.js
|
||||||
|
CFG_SERVER_PRO=/app/test/acceptance/config/settings.test.server-pro.js
|
||||||
|
|
||||||
|
DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \
|
||||||
|
BRANCH_NAME=$(BRANCH_NAME) \
|
||||||
|
PROJECT_NAME=$(PROJECT_NAME) \
|
||||||
|
MOCHA_GREP=${MOCHA_GREP} \
|
||||||
|
docker-compose ${DOCKER_COMPOSE_FLAGS}
|
||||||
|
|
||||||
|
MODULE_DIRS := $(shell find modules -mindepth 1 -maxdepth 1 -type d -not -name '.git' )
|
||||||
|
MODULE_MAKEFILES := $(MODULE_DIRS:=/Makefile)
|
||||||
|
MODULE_NAME=$(shell basename $(MODULE))
|
||||||
|
|
||||||
|
$(MODULE_MAKEFILES): Makefile.module
|
||||||
|
cp Makefile.module $@ || diff Makefile.module $@
|
||||||
|
|
||||||
|
#
|
||||||
|
# Clean
|
||||||
|
#
|
||||||
|
|
||||||
|
clean_ci:
|
||||||
|
$(DOCKER_COMPOSE) down -v -t 0
|
||||||
|
docker container list | grep 'days ago' | cut -d ' ' -f 1 - | xargs -r docker container stop
|
||||||
|
docker image prune -af --filter "until=48h"
|
||||||
|
docker network prune -f
|
||||||
|
|
||||||
|
#
|
||||||
|
# Tests
|
||||||
|
#
|
||||||
|
|
||||||
|
test: test_unit test_karma test_acceptance test_frontend
|
||||||
|
|
||||||
|
test_module: test_unit_module test_acceptance_module
|
||||||
|
|
||||||
|
#
|
||||||
|
# Unit tests
|
||||||
|
#
|
||||||
|
|
||||||
|
test_unit: test_unit_all
|
||||||
|
test_unit_all:
|
||||||
|
COMPOSE_PROJECT_NAME=unit_test_all_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) run --rm test_unit npm run test:unit:all
|
||||||
|
COMPOSE_PROJECT_NAME=unit_test_all_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down -v -t 0
|
||||||
|
|
||||||
|
test_unit_all_silent:
|
||||||
|
COMPOSE_PROJECT_NAME=unit_test_all_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) run --rm test_unit npm run test:unit:all:silent
|
||||||
|
COMPOSE_PROJECT_NAME=unit_test_all_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down -v -t 0
|
||||||
|
|
||||||
|
test_unit_app:
|
||||||
|
COMPOSE_PROJECT_NAME=unit_test_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down -v -t 0
|
||||||
|
COMPOSE_PROJECT_NAME=unit_test_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) run --name unit_test_$(BUILD_DIR_NAME) --rm test_unit
|
||||||
|
COMPOSE_PROJECT_NAME=unit_test_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down -v -t 0
|
||||||
|
|
||||||
|
TEST_SUITES = $(sort $(filter-out \
|
||||||
|
$(wildcard test/unit/src/helpers/*), \
|
||||||
|
$(wildcard test/unit/src/*/*)))
|
||||||
|
|
||||||
|
MOCHA_CMD_LINE = \
|
||||||
|
mocha \
|
||||||
|
--exit \
|
||||||
|
--file test/unit/bootstrap.js \
|
||||||
|
--grep=${MOCHA_GREP} \
|
||||||
|
--reporter spec \
|
||||||
|
--timeout 25000 \
|
||||||
|
|
||||||
|
.PHONY: $(TEST_SUITES)
|
||||||
|
$(TEST_SUITES):
|
||||||
|
$(MOCHA_CMD_LINE) $@
|
||||||
|
|
||||||
|
J ?= 1
|
||||||
|
test_unit_app_parallel_gnu_make: $(TEST_SUITES)
|
||||||
|
test_unit_app_parallel_gnu_make_docker: export COMPOSE_PROJECT_NAME = \
|
||||||
|
unit_test_parallel_make_$(BUILD_DIR_NAME)
|
||||||
|
test_unit_app_parallel_gnu_make_docker:
|
||||||
|
$(DOCKER_COMPOSE) down -v -t 0
|
||||||
|
$(DOCKER_COMPOSE) run --rm test_unit \
|
||||||
|
make test_unit_app_parallel_gnu_make --output-sync -j $(J)
|
||||||
|
$(DOCKER_COMPOSE) down -v -t 0
|
||||||
|
|
||||||
|
test_unit_app_parallel: test_unit_app_parallel_gnu_parallel
|
||||||
|
test_unit_app_parallel_gnu_parallel: export COMPOSE_PROJECT_NAME = \
|
||||||
|
unit_test_parallel_$(BUILD_DIR_NAME)
|
||||||
|
test_unit_app_parallel_gnu_parallel:
|
||||||
|
$(DOCKER_COMPOSE) down -v -t 0
|
||||||
|
$(DOCKER_COMPOSE) run --rm test_unit npm run test:unit:app:parallel
|
||||||
|
$(DOCKER_COMPOSE) down -v -t 0
|
||||||
|
|
||||||
|
TEST_UNIT_MODULES = $(MODULE_DIRS:=/test_unit)
|
||||||
|
$(TEST_UNIT_MODULES): %/test_unit: %/Makefile
|
||||||
|
test_unit_modules: $(TEST_UNIT_MODULES)
|
||||||
|
|
||||||
|
test_unit_module:
|
||||||
|
$(MAKE) modules/$(MODULE_NAME)/test_unit
|
||||||
|
|
||||||
|
#
|
||||||
|
# Karma frontend tests
|
||||||
|
#
|
||||||
|
|
||||||
|
test_karma: build_test_karma test_karma_run
|
||||||
|
|
||||||
|
test_karma_run:
|
||||||
|
COMPOSE_PROJECT_NAME=karma_test_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down -v -t 0
|
||||||
|
COMPOSE_PROJECT_NAME=karma_test_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) run --rm test_karma
|
||||||
|
COMPOSE_PROJECT_NAME=karma_test_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down -v -t 0
|
||||||
|
|
||||||
|
test_karma_build_run: build_test_karma test_karma_run
|
||||||
|
|
||||||
|
#
|
||||||
|
# Frontend tests
|
||||||
|
#
|
||||||
|
|
||||||
|
test_frontend:
|
||||||
|
COMPOSE_PROJECT_NAME=frontend_test_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down -v -t 0
|
||||||
|
COMPOSE_PROJECT_NAME=frontend_test_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) run --rm test_frontend
|
||||||
|
COMPOSE_PROJECT_NAME=frontend_test_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down -v -t 0
|
||||||
|
|
||||||
|
#
|
||||||
|
# Acceptance tests
|
||||||
|
#
|
||||||
|
|
||||||
|
test_acceptance: test_acceptance_app test_acceptance_modules
|
||||||
|
test_acceptance_saas: test_acceptance_app_saas test_acceptance_modules_merged_saas
|
||||||
|
test_acceptance_server_ce: test_acceptance_app_server_ce test_acceptance_modules_merged_server_ce
|
||||||
|
test_acceptance_server_pro: test_acceptance_app_server_pro test_acceptance_modules_merged_server_pro
|
||||||
|
|
||||||
|
TEST_ACCEPTANCE_APP := \
|
||||||
|
test_acceptance_app_saas \
|
||||||
|
test_acceptance_app_server_ce \
|
||||||
|
test_acceptance_app_server_pro \
|
||||||
|
|
||||||
|
test_acceptance_app: $(TEST_ACCEPTANCE_APP)
|
||||||
|
test_acceptance_app_saas: export COMPOSE_PROJECT_NAME=acceptance_test_saas_$(BUILD_DIR_NAME)
|
||||||
|
test_acceptance_app_saas: export SHARELATEX_CONFIG=$(CFG_SAAS)
|
||||||
|
test_acceptance_app_server_ce: export COMPOSE_PROJECT_NAME=acceptance_test_server_ce_$(BUILD_DIR_NAME)
|
||||||
|
test_acceptance_app_server_ce: export SHARELATEX_CONFIG=$(CFG_SERVER_CE)
|
||||||
|
test_acceptance_app_server_pro: export COMPOSE_PROJECT_NAME=acceptance_test_server_pro_$(BUILD_DIR_NAME)
|
||||||
|
test_acceptance_app_server_pro: export SHARELATEX_CONFIG=$(CFG_SERVER_PRO)
|
||||||
|
|
||||||
|
$(TEST_ACCEPTANCE_APP):
|
||||||
|
$(DOCKER_COMPOSE) down -v -t 0
|
||||||
|
$(DOCKER_COMPOSE) run --rm test_acceptance
|
||||||
|
$(DOCKER_COMPOSE) down -v -t 0
|
||||||
|
|
||||||
|
# We are using _make magic_ for turning these file-targets into calls to
|
||||||
|
# sub-Makefiles in the individual modules.
|
||||||
|
# These sub-Makefiles need to be kept in sync with the template, hence we
|
||||||
|
# add a dependency on each modules Makefile and cross-link that to the
|
||||||
|
# template at the very top of this file.
|
||||||
|
# Example: `web$ make modules/server-ce-scripts/test_acceptance_server_ce`
|
||||||
|
# Description: Run the acceptance tests of the server-ce-scripts module in a
|
||||||
|
# Server CE Environment.
|
||||||
|
# Break down:
|
||||||
|
# Target: modules/server-ce-scripts/test_acceptance_server_ce
|
||||||
|
# -> depends on modules/server-ce-scripts/Makefile
|
||||||
|
# -> add environment variable BASE_CONFIG=$(CFG_SERVER_CE)
|
||||||
|
# -> BASE_CONFIG=/app/test/acceptance/config/settings.test.server-ce.js
|
||||||
|
# -> automatic target: `make -C server-ce-scripts test_acceptance_server_ce`
|
||||||
|
# -> automatic target: run `make test_acceptance_server_ce` in module
|
||||||
|
# Target: modules/server-ce-scripts/Makefile
|
||||||
|
# -> depends on Makefile.module
|
||||||
|
# -> automatic target: copies the file when changed
|
||||||
|
TEST_ACCEPTANCE_MODULES = $(MODULE_DIRS:=/test_acceptance)
|
||||||
|
$(TEST_ACCEPTANCE_MODULES): %/test_acceptance: %/Makefile
|
||||||
|
$(TEST_ACCEPTANCE_MODULES): modules/%/test_acceptance:
|
||||||
|
$(MAKE) test_acceptance_module MODULE_NAME=$*
|
||||||
|
|
||||||
|
TEST_ACCEPTANCE_MODULES_SAAS = $(MODULE_DIRS:=/test_acceptance_saas)
|
||||||
|
$(TEST_ACCEPTANCE_MODULES_SAAS): %/test_acceptance_saas: %/Makefile
|
||||||
|
$(TEST_ACCEPTANCE_MODULES_SAAS): export BASE_CONFIG = $(CFG_SAAS)
|
||||||
|
|
||||||
|
# This line adds `/test_acceptance_saas` suffix to all items in $(MODULE_DIRS).
|
||||||
|
TEST_ACCEPTANCE_MODULES_SERVER_CE = $(MODULE_DIRS:=/test_acceptance_server_ce)
|
||||||
|
# This line adds a dependency on the modules Makefile.
|
||||||
|
$(TEST_ACCEPTANCE_MODULES_SERVER_CE): %/test_acceptance_server_ce: %/Makefile
|
||||||
|
# This line adds the environment variable BASE_CONFIG=$(CFG_SERVER_CE) to all
|
||||||
|
# invocations of `web$ make modules/foo/test_acceptance_server_ce`.
|
||||||
|
$(TEST_ACCEPTANCE_MODULES_SERVER_CE): export BASE_CONFIG = $(CFG_SERVER_CE)
|
||||||
|
|
||||||
|
TEST_ACCEPTANCE_MODULES_SERVER_PRO = $(MODULE_DIRS:=/test_acceptance_server_pro)
|
||||||
|
$(TEST_ACCEPTANCE_MODULES_SERVER_PRO): %/test_acceptance_server_pro: %/Makefile
|
||||||
|
$(TEST_ACCEPTANCE_MODULES_SERVER_PRO): export BASE_CONFIG = $(CFG_SERVER_PRO)
|
||||||
|
|
||||||
|
CLEAN_TEST_ACCEPTANCE_MODULES = $(MODULE_DIRS:=/clean_test_acceptance)
|
||||||
|
$(CLEAN_TEST_ACCEPTANCE_MODULES): %/clean_test_acceptance: %/Makefile
|
||||||
|
clean_test_acceptance_modules: $(CLEAN_TEST_ACCEPTANCE_MODULES)
|
||||||
|
clean_ci: clean_test_acceptance_modules
|
||||||
|
|
||||||
|
test_acceptance_module_noop:
|
||||||
|
@echo
|
||||||
|
@echo Module '$(MODULE_NAME)' does not run in ${LABEL}.
|
||||||
|
@echo
|
||||||
|
|
||||||
|
TEST_ACCEPTANCE_MODULE_MAYBE_IN := \
|
||||||
|
test_acceptance_module_maybe_in_saas \
|
||||||
|
test_acceptance_module_maybe_in_server_ce \
|
||||||
|
test_acceptance_module_maybe_in_server_pro \
|
||||||
|
|
||||||
|
test_acceptance_module: $(TEST_ACCEPTANCE_MODULE_MAYBE_IN)
|
||||||
|
test_acceptance_module_maybe_in_saas: export BASE_CONFIG=$(CFG_SAAS)
|
||||||
|
test_acceptance_module_maybe_in_server_ce: export BASE_CONFIG=$(CFG_SERVER_CE)
|
||||||
|
test_acceptance_module_maybe_in_server_pro: export BASE_CONFIG=$(CFG_SERVER_PRO)
|
||||||
|
|
||||||
|
# We need to figure out whether the module is loaded in a given environment.
|
||||||
|
# This information is stored in the (base-)settings.
|
||||||
|
# We get the full list of modules and check for a matching module entry.
|
||||||
|
# Either the grep will find and emit the module, or exits with code 1, which
|
||||||
|
# we handle with a fallback to a noop make target.
|
||||||
|
# Run the node command in a docker-compose container which provides the needed
|
||||||
|
# npm dependencies (from disk in dev-env or from the CI image in CI).
|
||||||
|
# Pick the test_unit service which is very light-weight -- the test_acceptance
|
||||||
|
# service would start mongo/redis.
|
||||||
|
$(TEST_ACCEPTANCE_MODULE_MAYBE_IN): test_acceptance_module_maybe_in_%:
|
||||||
|
$(MAKE) $(shell \
|
||||||
|
SHARELATEX_CONFIG=$(BASE_CONFIG) \
|
||||||
|
$(DOCKER_COMPOSE) run --rm test_unit \
|
||||||
|
node test/acceptance/getModuleTargets test_acceptance_$* \
|
||||||
|
| grep -e /$(MODULE_NAME)/ || echo test_acceptance_module_noop LABEL=$* \
|
||||||
|
)
|
||||||
|
|
||||||
|
# See docs for test_acceptance_server_ce how this works.
|
||||||
|
test_acceptance_module_saas: export BASE_CONFIG = $(CFG_SAAS)
|
||||||
|
test_acceptance_module_saas:
|
||||||
|
$(MAKE) modules/$(MODULE_NAME)/test_acceptance_saas
|
||||||
|
|
||||||
|
test_acceptance_module_server_ce: export BASE_CONFIG = $(CFG_SERVER_CE)
|
||||||
|
test_acceptance_module_server_ce:
|
||||||
|
$(MAKE) modules/$(MODULE_NAME)/test_acceptance_server_ce
|
||||||
|
|
||||||
|
test_acceptance_module_server_pro: export BASE_CONFIG = $(CFG_SERVER_PRO)
|
||||||
|
test_acceptance_module_server_pro:
|
||||||
|
$(MAKE) modules/$(MODULE_NAME)/test_acceptance_server_pro
|
||||||
|
|
||||||
|
# See docs for test_acceptance_server_ce how this works.
|
||||||
|
TEST_ACCEPTANCE_MODULES_MERGED_INNER = $(MODULE_DIRS:=/test_acceptance_merged_inner)
|
||||||
|
$(TEST_ACCEPTANCE_MODULES_MERGED_INNER): %/test_acceptance_merged_inner: %/Makefile
|
||||||
|
test_acceptance_modules_merged_inner:
|
||||||
|
$(MAKE) $(shell \
|
||||||
|
SHARELATEX_CONFIG=$(BASE_CONFIG) \
|
||||||
|
node test/acceptance/getModuleTargets test_acceptance_merged_inner \
|
||||||
|
)
|
||||||
|
|
||||||
|
# inner loop for running saas tests in parallel
|
||||||
|
no_more_targets:
|
||||||
|
|
||||||
|
# If we ever have more than 40 modules, we need to add _5 targets to all the places and have it START at 41.
|
||||||
|
test_acceptance_modules_merged_inner_1: export START=1
|
||||||
|
test_acceptance_modules_merged_inner_2: export START=11
|
||||||
|
test_acceptance_modules_merged_inner_3: export START=21
|
||||||
|
test_acceptance_modules_merged_inner_4: export START=31
|
||||||
|
TEST_ACCEPTANCE_MODULES_MERGED_INNER_SPLIT = \
|
||||||
|
test_acceptance_modules_merged_inner_1 \
|
||||||
|
test_acceptance_modules_merged_inner_2 \
|
||||||
|
test_acceptance_modules_merged_inner_3 \
|
||||||
|
test_acceptance_modules_merged_inner_4 \
|
||||||
|
|
||||||
|
# The node script prints one module per line.
|
||||||
|
# Using tail and head we skip over the first n=START entries and print the last 10.
|
||||||
|
# Finally we check with grep for any targets in a batch and print a fallback if none were found.
|
||||||
|
$(TEST_ACCEPTANCE_MODULES_MERGED_INNER_SPLIT):
|
||||||
|
$(MAKE) $(shell \
|
||||||
|
SHARELATEX_CONFIG=$(BASE_CONFIG) \
|
||||||
|
node test/acceptance/getModuleTargets test_acceptance_merged_inner \
|
||||||
|
| tail -n+$(START) | head -n 10 \
|
||||||
|
| grep -e . || echo no_more_targets \
|
||||||
|
)
|
||||||
|
|
||||||
|
# See docs for test_acceptance_server_ce how this works.
|
||||||
|
test_acceptance_modules_merged_saas: export COMPOSE_PROJECT_NAME = \
|
||||||
|
acceptance_test_modules_merged_saas_$(BUILD_DIR_NAME)
|
||||||
|
test_acceptance_modules_merged_saas: export BASE_CONFIG = $(CFG_SAAS)
|
||||||
|
|
||||||
|
test_acceptance_modules_merged_server_ce: export COMPOSE_PROJECT_NAME = \
|
||||||
|
acceptance_test_modules_merged_server_ce_$(BUILD_DIR_NAME)
|
||||||
|
test_acceptance_modules_merged_server_ce: export BASE_CONFIG = $(CFG_SERVER_CE)
|
||||||
|
|
||||||
|
test_acceptance_modules_merged_server_pro: export COMPOSE_PROJECT_NAME = \
|
||||||
|
acceptance_test_modules_merged_server_pro_$(BUILD_DIR_NAME)
|
||||||
|
test_acceptance_modules_merged_server_pro: export BASE_CONFIG = $(CFG_SERVER_PRO)
|
||||||
|
|
||||||
|
# All these variants run the same command.
|
||||||
|
# Each target has a different set of environment defined above.
|
||||||
|
TEST_ACCEPTANCE_MODULES_MERGED_VARIANTS = \
|
||||||
|
test_acceptance_modules_merged_saas \
|
||||||
|
test_acceptance_modules_merged_server_ce \
|
||||||
|
test_acceptance_modules_merged_server_pro \
|
||||||
|
|
||||||
|
$(TEST_ACCEPTANCE_MODULES_MERGED_VARIANTS):
|
||||||
|
$(DOCKER_COMPOSE) down -v -t 0
|
||||||
|
$(DOCKER_COMPOSE) run --rm test_acceptance make test_acceptance_modules_merged_inner
|
||||||
|
$(DOCKER_COMPOSE) down -v -t 0
|
||||||
|
|
||||||
|
# outer loop for running saas tests in parallel
|
||||||
|
TEST_ACCEPTANCE_MODULES_MERGED_SPLIT_SAAS = \
|
||||||
|
test_acceptance_modules_merged_saas_1 \
|
||||||
|
test_acceptance_modules_merged_saas_2 \
|
||||||
|
test_acceptance_modules_merged_saas_3 \
|
||||||
|
test_acceptance_modules_merged_saas_4 \
|
||||||
|
|
||||||
|
test_acceptance_modules_merged_saas_1: export COMPOSE_PROJECT_NAME = \
|
||||||
|
acceptance_test_modules_merged_saas_1_$(BUILD_DIR_NAME)
|
||||||
|
test_acceptance_modules_merged_saas_2: export COMPOSE_PROJECT_NAME = \
|
||||||
|
acceptance_test_modules_merged_saas_2_$(BUILD_DIR_NAME)
|
||||||
|
test_acceptance_modules_merged_saas_3: export COMPOSE_PROJECT_NAME = \
|
||||||
|
acceptance_test_modules_merged_saas_3_$(BUILD_DIR_NAME)
|
||||||
|
test_acceptance_modules_merged_saas_4: export COMPOSE_PROJECT_NAME = \
|
||||||
|
acceptance_test_modules_merged_saas_4_$(BUILD_DIR_NAME)
|
||||||
|
$(TEST_ACCEPTANCE_MODULES_MERGED_SPLIT_SAAS): export BASE_CONFIG = $(CFG_SAAS)
|
||||||
|
|
||||||
|
$(TEST_ACCEPTANCE_MODULES_MERGED_SPLIT_SAAS): test_acceptance_modules_merged_saas_%:
|
||||||
|
$(DOCKER_COMPOSE) down -v -t 0
|
||||||
|
$(DOCKER_COMPOSE) run --rm test_acceptance make test_acceptance_modules_merged_inner_$*
|
||||||
|
$(DOCKER_COMPOSE) down -v -t 0
|
||||||
|
|
||||||
|
test_acceptance_modules: $(TEST_ACCEPTANCE_MODULES_MERGED_VARIANTS)
|
||||||
|
|
||||||
|
#
|
||||||
|
# CI tests
|
||||||
|
#
|
||||||
|
|
||||||
|
ci:
|
||||||
|
MOCHA_ARGS="--reporter tap" \
|
||||||
|
$(MAKE) test
|
||||||
|
|
||||||
|
#
|
||||||
|
# Lint & format
|
||||||
|
#
|
||||||
|
ORG_PATH = /usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
|
||||||
|
RUN_LINT_FORMAT ?= \
|
||||||
|
docker run --rm ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
|
||||||
|
|
||||||
|
NODE_MODULES_PATH := ${PATH}:${PWD}/node_modules/.bin:/app/node_modules/.bin
|
||||||
|
WITH_NODE_MODULES_PATH = \
|
||||||
|
format_backend \
|
||||||
|
format_frontend \
|
||||||
|
format_misc \
|
||||||
|
format_styles \
|
||||||
|
format_test_app_unit \
|
||||||
|
format_test_app_rest \
|
||||||
|
format_test_modules \
|
||||||
|
$(TEST_SUITES) \
|
||||||
|
|
||||||
|
$(WITH_NODE_MODULES_PATH): export PATH=$(NODE_MODULES_PATH)
|
||||||
|
|
||||||
|
lint: lint_backend
|
||||||
|
lint_backend:
|
||||||
|
npx eslint \
|
||||||
|
app.js \
|
||||||
|
'app/**/*.js' \
|
||||||
|
'modules/*/index.js' \
|
||||||
|
'modules/*/app/**/*.js' \
|
||||||
|
--max-warnings=0
|
||||||
|
|
||||||
|
lint: lint_frontend
|
||||||
|
lint_frontend:
|
||||||
|
npx eslint \
|
||||||
|
'frontend/**/*.js' \
|
||||||
|
'modules/*/frontend/**/*.js' \
|
||||||
|
--max-warnings=0
|
||||||
|
|
||||||
|
lint: lint_test
|
||||||
|
lint_test: lint_test_app
|
||||||
|
lint_test_app: lint_test_app_unit
|
||||||
|
lint_test_app_unit:
|
||||||
|
npx eslint \
|
||||||
|
'test/unit/**/*.js' \
|
||||||
|
--max-warnings=0
|
||||||
|
|
||||||
|
lint_test_app: lint_test_app_rest
|
||||||
|
lint_test_app_rest:
|
||||||
|
npx eslint \
|
||||||
|
'test/**/*.js' \
|
||||||
|
--ignore-pattern 'test/unit/**/*.js' \
|
||||||
|
--max-warnings=0
|
||||||
|
|
||||||
|
lint_test: lint_test_modules
|
||||||
|
lint_test_modules:
|
||||||
|
npx eslint \
|
||||||
|
'modules/*/test/**/*.js' \
|
||||||
|
--max-warnings=0
|
||||||
|
|
||||||
|
lint: lint_misc
|
||||||
|
# migrations, scripts, webpack config, karma config
|
||||||
|
lint_misc:
|
||||||
|
npx eslint . \
|
||||||
|
--ignore-pattern app.js \
|
||||||
|
--ignore-pattern 'app/**/*.js' \
|
||||||
|
--ignore-pattern 'modules/*/app/**/*.js' \
|
||||||
|
--ignore-pattern 'modules/*/index.js' \
|
||||||
|
--ignore-pattern 'frontend/**/*.js' \
|
||||||
|
--ignore-pattern 'modules/*/frontend/**/*.js' \
|
||||||
|
--ignore-pattern 'test/**/*.js' \
|
||||||
|
--ignore-pattern 'modules/*/test/**/*.js' \
|
||||||
|
--max-warnings=0
|
||||||
|
|
||||||
|
lint: lint_pug
|
||||||
|
lint_pug:
|
||||||
|
bin/lint_pug_templates
|
||||||
|
|
||||||
|
lint_in_docker:
|
||||||
|
$(RUN_LINT_FORMAT) make lint -j --output-sync
|
||||||
|
|
||||||
|
format: format_js
|
||||||
|
format_js:
|
||||||
|
npm run --silent format
|
||||||
|
|
||||||
|
format: format_styles
|
||||||
|
format_styles:
|
||||||
|
npm run --silent format:styles
|
||||||
|
|
||||||
|
format_fix:
|
||||||
|
npm run --silent format:fix
|
||||||
|
|
||||||
|
format_styles_fix:
|
||||||
|
npm run --silent format:styles:fix
|
||||||
|
|
||||||
|
format_in_docker:
|
||||||
|
$(RUN_LINT_FORMAT) make format -j --output-sync
|
||||||
|
|
||||||
|
#
|
||||||
|
# Build & publish
|
||||||
|
#
|
||||||
|
|
||||||
|
IMAGE_CI ?= ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
|
||||||
|
IMAGE_REPO ?= gcr.io/overleaf-ops/$(PROJECT_NAME)
|
||||||
|
IMAGE_REPO_BRANCH ?= $(IMAGE_REPO):$(BRANCH_NAME)
|
||||||
|
IMAGE_REPO_MAIN ?= $(IMAGE_REPO):main
|
||||||
|
IMAGE_REPO_MASTER ?= $(IMAGE_REPO):master
|
||||||
|
IMAGE_REPO_FINAL ?= $(IMAGE_REPO_BRANCH)-$(BUILD_NUMBER)
|
||||||
|
|
||||||
|
export SENTRY_RELEASE ?= ${COMMIT_SHA}
|
||||||
|
|
||||||
|
build_deps:
|
||||||
|
docker build --pull \
|
||||||
|
--cache-from $(IMAGE_REPO_BRANCH)-deps \
|
||||||
|
--cache-from $(IMAGE_REPO_MAIN)-deps \
|
||||||
|
--cache-from $(IMAGE_REPO_MASTER)-deps \
|
||||||
|
--tag $(IMAGE_REPO_BRANCH)-deps \
|
||||||
|
--target deps \
|
||||||
|
.
|
||||||
|
|
||||||
|
build_dev:
|
||||||
|
docker build \
|
||||||
|
--build-arg SENTRY_RELEASE \
|
||||||
|
--cache-from $(IMAGE_REPO_BRANCH)-deps \
|
||||||
|
--cache-from $(IMAGE_CI)-dev \
|
||||||
|
--tag $(IMAGE_CI) \
|
||||||
|
--tag $(IMAGE_CI)-dev \
|
||||||
|
--target dev \
|
||||||
|
.
|
||||||
|
|
||||||
|
build_webpack:
|
||||||
|
$(MAKE) build_webpack_once \
|
||||||
|
|| $(MAKE) build_webpack_once
|
||||||
|
|
||||||
|
build_webpack_once:
|
||||||
|
docker build \
|
||||||
|
--build-arg SENTRY_RELEASE \
|
||||||
|
--cache-from $(IMAGE_CI)-dev \
|
||||||
|
--cache-from $(IMAGE_CI)-webpack \
|
||||||
|
--tag $(IMAGE_CI)-webpack \
|
||||||
|
--target webpack \
|
||||||
|
.
|
||||||
|
|
||||||
|
build:
|
||||||
|
docker build \
|
||||||
|
--build-arg SENTRY_RELEASE \
|
||||||
|
--cache-from $(IMAGE_CI)-webpack \
|
||||||
|
--cache-from $(IMAGE_REPO_FINAL) \
|
||||||
|
--tag $(IMAGE_REPO_FINAL) \
|
||||||
|
.
|
||||||
|
|
||||||
|
build_test_karma:
|
||||||
|
COMPOSE_PROJECT_NAME=karma_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) build test_karma
|
||||||
|
|
||||||
|
publish:
|
||||||
|
docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
|
||||||
|
|
||||||
|
tar:
|
||||||
|
COMPOSE_PROJECT_NAME=tar_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) run --rm tar
|
||||||
|
COMPOSE_PROJECT_NAME=tar_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down -v -t 0
|
||||||
|
|
||||||
|
MODULE_TARGETS = \
|
||||||
|
$(TEST_ACCEPTANCE_MODULES_SAAS) \
|
||||||
|
$(TEST_ACCEPTANCE_MODULES_SERVER_CE) \
|
||||||
|
$(TEST_ACCEPTANCE_MODULES_SERVER_PRO) \
|
||||||
|
$(TEST_ACCEPTANCE_MODULES_MERGED_INNER) \
|
||||||
|
$(CLEAN_TEST_ACCEPTANCE_MODULES) \
|
||||||
|
$(TEST_UNIT_MODULES) \
|
||||||
|
|
||||||
|
$(MODULE_TARGETS):
|
||||||
|
$(MAKE) -C $(dir $@) $(notdir $@) BUILD_DIR_NAME=$(BUILD_DIR_NAME)
|
||||||
|
|
||||||
|
.PHONY:
|
||||||
|
$(MODULE_TARGETS) \
|
||||||
|
compile_modules compile_modules_full clean_ci \
|
||||||
|
test test_module test_unit test_unit_app \
|
||||||
|
test_unit_modules test_unit_module test_karma test_karma_run \
|
||||||
|
test_karma_build_run test_frontend test_acceptance test_acceptance_app \
|
||||||
|
test_acceptance_modules test_acceptance_module ci format format_fix lint \
|
||||||
|
build build_test_karma publish tar
|
71
services/web/Makefile.module
Normal file
71
services/web/Makefile.module
Normal file
|
@ -0,0 +1,71 @@
|
||||||
|
BUILD_DIR_NAME ?= web
|
||||||
|
MODULE_NAME := $(notdir $(shell pwd))
|
||||||
|
MODULE_DIR := modules/$(MODULE_NAME)
|
||||||
|
PROJECT_NAME = web
|
||||||
|
|
||||||
|
export SHARELATEX_CONFIG = /app/$(MODULE_DIR)/test/acceptance/config/settings.test.js
|
||||||
|
export BASE_CONFIG ?= /app/test/acceptance/config/settings.test.saas.js
|
||||||
|
|
||||||
|
CFG_SAAS=/app/test/acceptance/config/settings.test.saas.js
|
||||||
|
CFG_SERVER_CE=/app/test/acceptance/config/settings.test.server-ce.js
|
||||||
|
CFG_SERVER_PRO=/app/test/acceptance/config/settings.test.server-pro.js
|
||||||
|
|
||||||
|
DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml --log-level ERROR
|
||||||
|
DOCKER_COMPOSE := cd ../../ && \
|
||||||
|
MODULE_DIR=$(MODULE_DIR) \
|
||||||
|
BUILD_NUMBER=$(BUILD_NUMBER) \
|
||||||
|
BRANCH_NAME=$(BRANCH_NAME) \
|
||||||
|
PROJECT_NAME=$(PROJECT_NAME) \
|
||||||
|
MOCHA_GREP=${MOCHA_GREP} \
|
||||||
|
docker-compose ${DOCKER_COMPOSE_FLAGS}
|
||||||
|
|
||||||
|
DOCKER_COMPOSE_TEST_ACCEPTANCE := \
|
||||||
|
export COMPOSE_PROJECT_NAME=acceptance_test_$(BUILD_DIR_NAME)_$(MODULE_NAME) \
|
||||||
|
&& $(DOCKER_COMPOSE)
|
||||||
|
|
||||||
|
DOCKER_COMPOSE_TEST_UNIT := \
|
||||||
|
export COMPOSE_PROJECT_NAME=unit_test_$(BUILD_DIR_NAME)_$(MODULE_NAME) \
|
||||||
|
&& $(DOCKER_COMPOSE)
|
||||||
|
|
||||||
|
ifeq (,$(wildcard test/unit))
|
||||||
|
test_unit:
|
||||||
|
|
||||||
|
else
|
||||||
|
test_unit:
|
||||||
|
${DOCKER_COMPOSE_TEST_UNIT} run --rm test_unit npm -q run test:unit:run_dir -- ${MOCHA_ARGS} $(MODULE_DIR)/test/unit/src
|
||||||
|
${DOCKER_COMPOSE_TEST_UNIT} down
|
||||||
|
|
||||||
|
endif
|
||||||
|
|
||||||
|
ALL_TEST_ACCEPTANCE_VARIANTS := \
|
||||||
|
test_acceptance \
|
||||||
|
test_acceptance_saas \
|
||||||
|
test_acceptance_server_ce \
|
||||||
|
test_acceptance_server_pro \
|
||||||
|
|
||||||
|
ifeq (,$(wildcard test/acceptance))
|
||||||
|
$(ALL_TEST_ACCEPTANCE_VARIANTS) test_acceptance_merged_inner:
|
||||||
|
@echo
|
||||||
|
@echo Module $(MODULE_NAME) does not have acceptance tests.
|
||||||
|
@echo
|
||||||
|
|
||||||
|
clean_test_acceptance:
|
||||||
|
|
||||||
|
else
|
||||||
|
test_acceptance_saas: export BASE_CONFIG = $(CFG_SAAS)
|
||||||
|
test_acceptance_server_ce: export BASE_CONFIG = $(CFG_SERVER_CE)
|
||||||
|
test_acceptance_server_pro: export BASE_CONFIG = $(CFG_SERVER_PRO)
|
||||||
|
|
||||||
|
$(ALL_TEST_ACCEPTANCE_VARIANTS):
|
||||||
|
$(MAKE) --no-print-directory clean_test_acceptance
|
||||||
|
${DOCKER_COMPOSE_TEST_ACCEPTANCE} run --rm test_acceptance npm -q run test:acceptance:run_dir -- ${MOCHA_ARGS} $(MODULE_DIR)/test/acceptance/src
|
||||||
|
$(MAKE) --no-print-directory clean_test_acceptance
|
||||||
|
|
||||||
|
test_acceptance_merged_inner:
|
||||||
|
cd ../../ && \
|
||||||
|
npm -q run test:acceptance:run_dir -- ${MOCHA_ARGS} $(MODULE_DIR)/test/acceptance/src
|
||||||
|
|
||||||
|
clean_test_acceptance:
|
||||||
|
${DOCKER_COMPOSE_TEST_ACCEPTANCE} down -v -t 0
|
||||||
|
|
||||||
|
endif
|
145
services/web/README.md
Normal file
145
services/web/README.md
Normal file
|
@ -0,0 +1,145 @@
|
||||||
|
overleaf/web
|
||||||
|
==============
|
||||||
|
|
||||||
|
overleaf/web is the front-end web service of the open-source web-based collaborative LaTeX editor,
|
||||||
|
[Overleaf](https://www.overleaf.com).
|
||||||
|
It serves all the HTML pages, CSS and javascript to the client. overleaf/web also contains
|
||||||
|
a lot of logic around creating and editing projects, and account management.
|
||||||
|
|
||||||
|
|
||||||
|
The rest of the Overleaf stack, along with information about contributing can be found in the
|
||||||
|
[overleaf/overleaf](https://github.com/overleaf/overleaf) repository.
|
||||||
|
|
||||||
|
Build process
|
||||||
|
----------------
|
||||||
|
|
||||||
|
overleaf/web uses [Grunt](http://gruntjs.com/) to build its front-end related assets.
|
||||||
|
|
||||||
|
Image processing tasks are commented out in the gruntfile and the needed packages aren't presently in the project's `package.json`. If the images need to be processed again (minified and sprited), start by fetching the packages (`npm install grunt-contrib-imagemin grunt-sprity`), then *decomment* the tasks in `Gruntfile.coffee`. After this, the tasks can be called (explicitly, via `grunt imagemin` and `grunt sprity`).
|
||||||
|
|
||||||
|
New Docker-based build process
|
||||||
|
------------------------------
|
||||||
|
|
||||||
|
Note that the Grunt workflow from above should still work, but we are transitioning to a
|
||||||
|
Docker based testing workflow, which is documented below:
|
||||||
|
|
||||||
|
### Running the app
|
||||||
|
|
||||||
|
The app runs natively using npm and Node on the local system:
|
||||||
|
|
||||||
|
```
|
||||||
|
$ npm install
|
||||||
|
$ npm run start
|
||||||
|
```
|
||||||
|
|
||||||
|
*Ideally the app would run in Docker like the tests below, but with host networking not supported in OS X, we need to run it natively until all services are Dockerised.*
|
||||||
|
|
||||||
|
### Running Tests
|
||||||
|
|
||||||
|
To run all tests run:
|
||||||
|
```
|
||||||
|
make test
|
||||||
|
```
|
||||||
|
|
||||||
|
To run both unit and acceptance tests for a module run:
|
||||||
|
```
|
||||||
|
make test_module MODULE=overleaf-integration
|
||||||
|
```
|
||||||
|
|
||||||
|
### Unit Tests
|
||||||
|
|
||||||
|
The test suites run in Docker.
|
||||||
|
|
||||||
|
Unit tests can be run in the `test_unit` container defined in `docker-compose.tests.yml`.
|
||||||
|
|
||||||
|
The makefile contains a short cut to run these:
|
||||||
|
|
||||||
|
```
|
||||||
|
make test_unit
|
||||||
|
```
|
||||||
|
|
||||||
|
During development it is often useful to only run a subset of tests, which can be configured with arguments to the mocha CLI:
|
||||||
|
|
||||||
|
```
|
||||||
|
make test_unit MOCHA_GREP='AuthorizationManager'
|
||||||
|
```
|
||||||
|
|
||||||
|
To run only the unit tests for a single module do:
|
||||||
|
```
|
||||||
|
make test_unit_module MODULE=overleaf-integration
|
||||||
|
```
|
||||||
|
|
||||||
|
Module tests can also use a MOCHA_GREP argument:
|
||||||
|
```
|
||||||
|
make test_unit_module MODULE=overleaf-integration MOCHA_GREP=SSO
|
||||||
|
```
|
||||||
|
|
||||||
|
### Acceptance Tests
|
||||||
|
|
||||||
|
Acceptance tests are run against a live service, which runs in the `acceptance_test` container defined in `docker-compose.tests.yml`.
|
||||||
|
|
||||||
|
To run the tests out-of-the-box, the makefile defines:
|
||||||
|
|
||||||
|
```
|
||||||
|
make test_acceptance
|
||||||
|
```
|
||||||
|
|
||||||
|
However, during development it is often useful to leave the service running for rapid iteration on the acceptance tests. This can be done with:
|
||||||
|
|
||||||
|
```
|
||||||
|
make test_acceptance_app_start_service
|
||||||
|
make test_acceptance_app_run # Run as many times as needed during development
|
||||||
|
make test_acceptance_app_stop_service
|
||||||
|
```
|
||||||
|
|
||||||
|
`make test_acceptance` just runs these three commands in sequence and then runs `make test_acceptance_modules` which performs the tests for each module in the `modules` directory. (Note that there is not currently an equivalent to the `-start` / `-run` x _n_ / `-stop` series for modules.)
|
||||||
|
|
||||||
|
During development it is often useful to only run a subset of tests, which can be configured with arguments to the mocha CLI:
|
||||||
|
|
||||||
|
```
|
||||||
|
make test_acceptance_run MOCHA_GREP='AuthorizationManager'
|
||||||
|
```
|
||||||
|
|
||||||
|
To run only the acceptance tests for a single module do:
|
||||||
|
```
|
||||||
|
make test_acceptance_module MODULE=overleaf-integration
|
||||||
|
```
|
||||||
|
|
||||||
|
Module tests can also use a MOCHA_GREP argument:
|
||||||
|
```
|
||||||
|
make test_acceptance_module MODULE=overleaf-integration MOCHA_GREP=SSO
|
||||||
|
```
|
||||||
|
|
||||||
|
Routes
|
||||||
|
------
|
||||||
|
|
||||||
|
Run `bin/routes` to print out all routes in the project.
|
||||||
|
|
||||||
|
|
||||||
|
License and Credits
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
This project is licensed under the [AGPLv3 license](http://www.gnu.org/licenses/agpl-3.0.html)
|
||||||
|
|
||||||
|
### Stylesheets
|
||||||
|
|
||||||
|
Overleaf is based on [Bootstrap](http://getbootstrap.com/), which is licensed under the
|
||||||
|
[MIT license](http://opensource.org/licenses/MIT).
|
||||||
|
All modifications (`*.less` files in `public/stylesheets`) are also licensed
|
||||||
|
under the MIT license.
|
||||||
|
|
||||||
|
### Artwork
|
||||||
|
|
||||||
|
#### Silk icon set 1.3
|
||||||
|
|
||||||
|
We gratefully acknowledge [Mark James](http://www.famfamfam.com/lab/icons/silk/) for
|
||||||
|
releasing his Silk icon set under the Creative Commons Attribution 2.5 license. Some
|
||||||
|
of these icons are used within Overleaf inside the `public/img/silk` and
|
||||||
|
`public/brand/icons` directories.
|
||||||
|
|
||||||
|
#### IconShock icons
|
||||||
|
|
||||||
|
We gratefully acknowledge [IconShock](http://www.iconshock.com) for use of the icons
|
||||||
|
in the `public/img/iconshock` directory found via
|
||||||
|
[findicons.com](http://findicons.com/icon/498089/height?id=526085#)
|
||||||
|
|
76
services/web/app.js
Normal file
76
services/web/app.js
Normal file
|
@ -0,0 +1,76 @@
|
||||||
|
/* eslint-disable
|
||||||
|
max-len,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
const metrics = require('@overleaf/metrics')
|
||||||
|
metrics.initialize(process.env.METRICS_APP_NAME || 'web')
|
||||||
|
const Settings = require('@overleaf/settings')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const PlansLocator = require('./app/src/Features/Subscription/PlansLocator')
|
||||||
|
logger.initialize(process.env.METRICS_APP_NAME || 'web')
|
||||||
|
logger.logger.serializers.user = require('./app/src/infrastructure/LoggerSerializers').user
|
||||||
|
logger.logger.serializers.docs = require('./app/src/infrastructure/LoggerSerializers').docs
|
||||||
|
logger.logger.serializers.files = require('./app/src/infrastructure/LoggerSerializers').files
|
||||||
|
logger.logger.serializers.project = require('./app/src/infrastructure/LoggerSerializers').project
|
||||||
|
if ((Settings.sentry != null ? Settings.sentry.dsn : undefined) != null) {
|
||||||
|
logger.initializeErrorReporting(Settings.sentry.dsn)
|
||||||
|
}
|
||||||
|
|
||||||
|
const http = require('http')
|
||||||
|
const https = require('https')
|
||||||
|
http.globalAgent.maxSockets = Settings.limits.httpGlobalAgentMaxSockets
|
||||||
|
https.globalAgent.maxSockets = Settings.limits.httpsGlobalAgentMaxSockets
|
||||||
|
|
||||||
|
metrics.memory.monitor(logger)
|
||||||
|
|
||||||
|
const Server = require('./app/src/infrastructure/Server')
|
||||||
|
const mongodb = require('./app/src/infrastructure/mongodb')
|
||||||
|
const mongoose = require('./app/src/infrastructure/Mongoose')
|
||||||
|
|
||||||
|
if (Settings.catchErrors) {
|
||||||
|
process.removeAllListeners('uncaughtException')
|
||||||
|
process.on('uncaughtException', error =>
|
||||||
|
logger.error({ err: error }, 'uncaughtException')
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const port = Settings.port || Settings.internal.web.port || 3000
|
||||||
|
const host = Settings.internal.web.host || 'localhost'
|
||||||
|
if (!module.parent) {
|
||||||
|
// Called directly
|
||||||
|
|
||||||
|
// We want to make sure that we provided a password through the environment.
|
||||||
|
if (!process.env.WEB_API_USER || !process.env.WEB_API_PASSWORD) {
|
||||||
|
throw new Error('No API user and password provided')
|
||||||
|
}
|
||||||
|
|
||||||
|
PlansLocator.ensurePlansAreSetupCorrectly()
|
||||||
|
|
||||||
|
Promise.all([mongodb.waitForDb(), mongoose.connectionPromise])
|
||||||
|
.then(() => {
|
||||||
|
Server.server.listen(port, host, function () {
|
||||||
|
logger.info(`web starting up, listening on ${host}:${port}`)
|
||||||
|
logger.info(`${require('http').globalAgent.maxSockets} sockets enabled`)
|
||||||
|
// wait until the process is ready before monitoring the event loop
|
||||||
|
metrics.event_loop.monitor(logger)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
logger.fatal({ err }, 'Cannot connect to mongo. Exiting.')
|
||||||
|
process.exit(1)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// handle SIGTERM for graceful shutdown in kubernetes
|
||||||
|
process.on('SIGTERM', function (signal) {
|
||||||
|
logger.warn({ signal: signal }, 'received signal, shutting down')
|
||||||
|
Settings.shuttingDown = true
|
||||||
|
})
|
||||||
|
|
||||||
|
module.exports = Server.server
|
|
@ -0,0 +1,38 @@
|
||||||
|
const metrics = require('@overleaf/metrics')
|
||||||
|
const AnalyticsManager = require('./AnalyticsManager')
|
||||||
|
const SessionManager = require('../Authentication/SessionManager')
|
||||||
|
const GeoIpLookup = require('../../infrastructure/GeoIpLookup')
|
||||||
|
const Features = require('../../infrastructure/Features')
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
updateEditingSession(req, res, next) {
|
||||||
|
if (!Features.hasFeature('analytics')) {
|
||||||
|
return res.sendStatus(202)
|
||||||
|
}
|
||||||
|
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||||
|
const { projectId } = req.params
|
||||||
|
let countryCode = null
|
||||||
|
|
||||||
|
if (userId) {
|
||||||
|
GeoIpLookup.getDetails(req.ip, function (err, geoDetails) {
|
||||||
|
if (err) {
|
||||||
|
metrics.inc('analytics_geo_ip_lookup_errors')
|
||||||
|
} else if (geoDetails && geoDetails.country_code) {
|
||||||
|
countryCode = geoDetails.country_code
|
||||||
|
}
|
||||||
|
AnalyticsManager.updateEditingSession(userId, projectId, countryCode)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
res.sendStatus(202)
|
||||||
|
},
|
||||||
|
|
||||||
|
recordEvent(req, res, next) {
|
||||||
|
if (!Features.hasFeature('analytics')) {
|
||||||
|
return res.sendStatus(202)
|
||||||
|
}
|
||||||
|
const userId =
|
||||||
|
SessionManager.getLoggedInUserId(req.session) || req.sessionID
|
||||||
|
AnalyticsManager.recordEvent(userId, req.params.event, req.body)
|
||||||
|
res.sendStatus(202)
|
||||||
|
},
|
||||||
|
}
|
120
services/web/app/src/Features/Analytics/AnalyticsManager.js
Normal file
120
services/web/app/src/Features/Analytics/AnalyticsManager.js
Normal file
|
@ -0,0 +1,120 @@
|
||||||
|
const Settings = require('@overleaf/settings')
|
||||||
|
const Metrics = require('../../infrastructure/Metrics')
|
||||||
|
const Queues = require('../../infrastructure/Queues')
|
||||||
|
|
||||||
|
const analyticsEventsQueue = Queues.getAnalyticsEventsQueue()
|
||||||
|
const analyticsEditingSessionsQueue = Queues.getAnalyticsEditingSessionsQueue()
|
||||||
|
const analyticsUserPropertiesQueue = Queues.getAnalyticsUserPropertiesQueue()
|
||||||
|
|
||||||
|
function identifyUser(userId, oldUserId) {
|
||||||
|
if (!userId || !oldUserId) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (isAnalyticsDisabled() || isSmokeTestUser(userId)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
Metrics.analyticsQueue.inc({ status: 'adding', event_type: 'identify' })
|
||||||
|
analyticsEventsQueue
|
||||||
|
.add('identify', { userId, oldUserId })
|
||||||
|
.then(() => {
|
||||||
|
Metrics.analyticsQueue.inc({ status: 'added', event_type: 'identify' })
|
||||||
|
})
|
||||||
|
.catch(() => {
|
||||||
|
Metrics.analyticsQueue.inc({ status: 'error', event_type: 'identify' })
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function recordEvent(userId, event, segmentation) {
|
||||||
|
if (!userId) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (isAnalyticsDisabled() || isSmokeTestUser(userId)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
Metrics.analyticsQueue.inc({ status: 'adding', event_type: 'event' })
|
||||||
|
analyticsEventsQueue
|
||||||
|
.add('event', { userId, event, segmentation })
|
||||||
|
.then(() => {
|
||||||
|
Metrics.analyticsQueue.inc({ status: 'added', event_type: 'event' })
|
||||||
|
})
|
||||||
|
.catch(() => {
|
||||||
|
Metrics.analyticsQueue.inc({ status: 'error', event_type: 'event' })
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function updateEditingSession(userId, projectId, countryCode) {
|
||||||
|
if (!userId) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (isAnalyticsDisabled() || isSmokeTestUser(userId)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
Metrics.analyticsQueue.inc({
|
||||||
|
status: 'adding',
|
||||||
|
event_type: 'editing-session',
|
||||||
|
})
|
||||||
|
analyticsEditingSessionsQueue
|
||||||
|
.add({ userId, projectId, countryCode })
|
||||||
|
.then(() => {
|
||||||
|
Metrics.analyticsQueue.inc({
|
||||||
|
status: 'added',
|
||||||
|
event_type: 'editing-session',
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.catch(() => {
|
||||||
|
Metrics.analyticsQueue.inc({
|
||||||
|
status: 'error',
|
||||||
|
event_type: 'editing-session',
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function setUserProperty(userId, propertyName, propertyValue) {
|
||||||
|
if (!userId) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (isAnalyticsDisabled() || isSmokeTestUser(userId)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (propertyValue === undefined) {
|
||||||
|
throw new Error(
|
||||||
|
'propertyValue cannot be undefined, use null to unset a property'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
Metrics.analyticsQueue.inc({
|
||||||
|
status: 'adding',
|
||||||
|
event_type: 'user-property',
|
||||||
|
})
|
||||||
|
analyticsUserPropertiesQueue
|
||||||
|
.add({ userId, propertyName, propertyValue })
|
||||||
|
.then(() => {
|
||||||
|
Metrics.analyticsQueue.inc({
|
||||||
|
status: 'added',
|
||||||
|
event_type: 'user-property',
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.catch(() => {
|
||||||
|
Metrics.analyticsQueue.inc({
|
||||||
|
status: 'error',
|
||||||
|
event_type: 'user-property',
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function isSmokeTestUser(userId) {
|
||||||
|
const smokeTestUserId = Settings.smokeTest && Settings.smokeTest.userId
|
||||||
|
return smokeTestUserId != null && userId.toString() === smokeTestUserId
|
||||||
|
}
|
||||||
|
|
||||||
|
function isAnalyticsDisabled() {
|
||||||
|
return !(Settings.analytics && Settings.analytics.enabled)
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
identifyUser,
|
||||||
|
recordEvent,
|
||||||
|
updateEditingSession,
|
||||||
|
setUserProperty,
|
||||||
|
}
|
28
services/web/app/src/Features/Analytics/AnalyticsProxy.js
Normal file
28
services/web/app/src/Features/Analytics/AnalyticsProxy.js
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
const settings = require('@overleaf/settings')
|
||||||
|
const Errors = require('../Errors/Errors')
|
||||||
|
const httpProxy = require('express-http-proxy')
|
||||||
|
const URL = require('url')
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
call(basePath) {
|
||||||
|
if (!settings.apis.analytics) {
|
||||||
|
return (req, res, next) =>
|
||||||
|
next(
|
||||||
|
new Errors.ServiceNotConfiguredError(
|
||||||
|
'Analytics service not configured'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return httpProxy(settings.apis.analytics.url, {
|
||||||
|
proxyReqPathResolver(req) {
|
||||||
|
const requestPath = URL.parse(req.url).path
|
||||||
|
return `${basePath}${requestPath}`
|
||||||
|
},
|
||||||
|
proxyReqOptDecorator(proxyReqOpts, srcReq) {
|
||||||
|
proxyReqOpts.headers = {} // unset all headers
|
||||||
|
return proxyReqOpts
|
||||||
|
},
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
|
@ -0,0 +1,125 @@
|
||||||
|
var RefererParser = require('referer-parser')
|
||||||
|
const { URL } = require('url')
|
||||||
|
const AnalyticsManager = require('./AnalyticsManager')
|
||||||
|
|
||||||
|
function clearSource(session) {
|
||||||
|
if (session) {
|
||||||
|
delete session.required_login_for
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const UTM_KEYS = [
|
||||||
|
'utm_campaign',
|
||||||
|
'utm_source',
|
||||||
|
'utm_term',
|
||||||
|
'utm_medium',
|
||||||
|
'utm_count',
|
||||||
|
]
|
||||||
|
|
||||||
|
function parseUtm(query) {
|
||||||
|
var utmValues = {}
|
||||||
|
for (const utmKey of UTM_KEYS) {
|
||||||
|
if (query[utmKey]) {
|
||||||
|
utmValues[utmKey] = query[utmKey]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return Object.keys(utmValues).length > 0 ? utmValues : null
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseReferrer(referrer, url) {
|
||||||
|
if (!referrer) {
|
||||||
|
return {
|
||||||
|
medium: 'direct',
|
||||||
|
detail: 'none',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const parsedReferrer = new RefererParser(referrer, url)
|
||||||
|
|
||||||
|
const referrerValues = {
|
||||||
|
medium: parsedReferrer.medium,
|
||||||
|
detail: parsedReferrer.referer,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (referrerValues.medium === 'unknown') {
|
||||||
|
try {
|
||||||
|
const referrerHostname = new URL(referrer).hostname
|
||||||
|
if (referrerHostname) {
|
||||||
|
referrerValues.medium = 'link'
|
||||||
|
referrerValues.detail = referrerHostname
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
// ignore referrer parsing errors
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return referrerValues
|
||||||
|
}
|
||||||
|
|
||||||
|
function setInbound(session, url, query, referrer) {
|
||||||
|
const inboundSession = {
|
||||||
|
referrer: parseReferrer(referrer, url),
|
||||||
|
utm: parseUtm(query),
|
||||||
|
}
|
||||||
|
|
||||||
|
if (inboundSession.referrer || inboundSession.utm) {
|
||||||
|
session.inbound = inboundSession
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function clearInbound(session) {
|
||||||
|
if (session) {
|
||||||
|
delete session.inbound
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function addUserProperties(userId, session) {
|
||||||
|
if (!session) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (session.referal_id) {
|
||||||
|
AnalyticsManager.setUserProperty(
|
||||||
|
userId,
|
||||||
|
`registered-from-bonus-scheme`,
|
||||||
|
true
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (session.required_login_for) {
|
||||||
|
AnalyticsManager.setUserProperty(
|
||||||
|
userId,
|
||||||
|
`registered-from-${session.required_login_for}`,
|
||||||
|
true
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (session.inbound) {
|
||||||
|
if (session.inbound.referrer) {
|
||||||
|
AnalyticsManager.setUserProperty(
|
||||||
|
userId,
|
||||||
|
`registered-from-referrer-${session.inbound.referrer.medium}`,
|
||||||
|
session.inbound.referrer.detail || 'other'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (session.inbound.utm) {
|
||||||
|
for (const utmKey of UTM_KEYS) {
|
||||||
|
if (session.inbound.utm[utmKey]) {
|
||||||
|
AnalyticsManager.setUserProperty(
|
||||||
|
userId,
|
||||||
|
`registered-from-${utmKey.replace('_', '-')}`,
|
||||||
|
session.inbound.utm[utmKey]
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
clearSource,
|
||||||
|
setInbound,
|
||||||
|
clearInbound,
|
||||||
|
addUserProperties,
|
||||||
|
}
|
|
@ -0,0 +1,55 @@
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const OError = require('@overleaf/o-error')
|
||||||
|
const AnalyticsRegistrationSourceHelper = require('./AnalyticsRegistrationSourceHelper')
|
||||||
|
const SessionManager = require('../../Features/Authentication/SessionManager')
|
||||||
|
|
||||||
|
function setSource(source) {
|
||||||
|
return function (req, res, next) {
|
||||||
|
if (req.session) {
|
||||||
|
req.session.required_login_for = source
|
||||||
|
}
|
||||||
|
next()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function clearSource() {
|
||||||
|
return function (req, res, next) {
|
||||||
|
AnalyticsRegistrationSourceHelper.clearSource(req.session)
|
||||||
|
next()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function setInbound() {
|
||||||
|
return function setInbound(req, res, next) {
|
||||||
|
if (req.session.inbound) {
|
||||||
|
return next() // don't overwrite referrer
|
||||||
|
}
|
||||||
|
|
||||||
|
if (SessionManager.isUserLoggedIn(req.session)) {
|
||||||
|
return next() // don't store referrer if user is alread logged in
|
||||||
|
}
|
||||||
|
|
||||||
|
const referrer = req.header('referrer')
|
||||||
|
try {
|
||||||
|
AnalyticsRegistrationSourceHelper.setInbound(
|
||||||
|
req.session,
|
||||||
|
req.url,
|
||||||
|
req.query,
|
||||||
|
referrer
|
||||||
|
)
|
||||||
|
} catch (error) {
|
||||||
|
// log errors and fail silently
|
||||||
|
OError.tag(error, 'failed to parse inbound referrer', {
|
||||||
|
referrer,
|
||||||
|
})
|
||||||
|
logger.warn({ error }, error.message)
|
||||||
|
}
|
||||||
|
next()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
setSource,
|
||||||
|
clearSource,
|
||||||
|
setInbound,
|
||||||
|
}
|
40
services/web/app/src/Features/Analytics/AnalyticsRouter.js
Normal file
40
services/web/app/src/Features/Analytics/AnalyticsRouter.js
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
const AuthenticationController = require('./../Authentication/AuthenticationController')
|
||||||
|
const AnalyticsController = require('./AnalyticsController')
|
||||||
|
const AnalyticsProxy = require('./AnalyticsProxy')
|
||||||
|
const RateLimiterMiddleware = require('./../Security/RateLimiterMiddleware')
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
apply(webRouter, privateApiRouter, publicApiRouter) {
|
||||||
|
webRouter.post(
|
||||||
|
'/event/:event([a-z0-9-_]+)',
|
||||||
|
RateLimiterMiddleware.rateLimit({
|
||||||
|
endpointName: 'analytics-record-event',
|
||||||
|
maxRequests: 200,
|
||||||
|
timeInterval: 60,
|
||||||
|
}),
|
||||||
|
AnalyticsController.recordEvent
|
||||||
|
)
|
||||||
|
|
||||||
|
webRouter.put(
|
||||||
|
'/editingSession/:projectId',
|
||||||
|
RateLimiterMiddleware.rateLimit({
|
||||||
|
endpointName: 'analytics-update-editing-session',
|
||||||
|
params: ['projectId'],
|
||||||
|
maxRequests: 20,
|
||||||
|
timeInterval: 60,
|
||||||
|
}),
|
||||||
|
AnalyticsController.updateEditingSession
|
||||||
|
)
|
||||||
|
|
||||||
|
publicApiRouter.use(
|
||||||
|
'/analytics/uniExternalCollaboration',
|
||||||
|
AuthenticationController.requirePrivateApiAuth(),
|
||||||
|
RateLimiterMiddleware.rateLimit({
|
||||||
|
endpointName: 'analytics-uni-external-collab-proxy',
|
||||||
|
maxRequests: 20,
|
||||||
|
timeInterval: 60,
|
||||||
|
}),
|
||||||
|
AnalyticsProxy.call('/uniExternalCollaboration')
|
||||||
|
)
|
||||||
|
},
|
||||||
|
}
|
|
@ -0,0 +1,509 @@
|
||||||
|
const AuthenticationManager = require('./AuthenticationManager')
|
||||||
|
const SessionManager = require('./SessionManager')
|
||||||
|
const OError = require('@overleaf/o-error')
|
||||||
|
const LoginRateLimiter = require('../Security/LoginRateLimiter')
|
||||||
|
const UserUpdater = require('../User/UserUpdater')
|
||||||
|
const Metrics = require('@overleaf/metrics')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const querystring = require('querystring')
|
||||||
|
const Settings = require('@overleaf/settings')
|
||||||
|
const basicAuth = require('basic-auth-connect')
|
||||||
|
const crypto = require('crypto')
|
||||||
|
const UserHandler = require('../User/UserHandler')
|
||||||
|
const UserSessionsManager = require('../User/UserSessionsManager')
|
||||||
|
const SessionStoreManager = require('../../infrastructure/SessionStoreManager')
|
||||||
|
const Analytics = require('../Analytics/AnalyticsManager')
|
||||||
|
const passport = require('passport')
|
||||||
|
const NotificationsBuilder = require('../Notifications/NotificationsBuilder')
|
||||||
|
const UrlHelper = require('../Helpers/UrlHelper')
|
||||||
|
const AsyncFormHelper = require('../Helpers/AsyncFormHelper')
|
||||||
|
const _ = require('lodash')
|
||||||
|
const UserAuditLogHandler = require('../User/UserAuditLogHandler')
|
||||||
|
const AnalyticsRegistrationSourceHelper = require('../Analytics/AnalyticsRegistrationSourceHelper')
|
||||||
|
const {
|
||||||
|
acceptsJson,
|
||||||
|
} = require('../../infrastructure/RequestContentTypeDetection')
|
||||||
|
|
||||||
|
function send401WithChallenge(res) {
|
||||||
|
res.setHeader('WWW-Authenticate', 'OverleafLogin')
|
||||||
|
res.sendStatus(401)
|
||||||
|
}
|
||||||
|
|
||||||
|
const AuthenticationController = {
|
||||||
|
serializeUser(user, callback) {
|
||||||
|
if (!user._id || !user.email) {
|
||||||
|
const err = new Error('serializeUser called with non-user object')
|
||||||
|
logger.warn({ user }, err.message)
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
const lightUser = {
|
||||||
|
_id: user._id,
|
||||||
|
first_name: user.first_name,
|
||||||
|
last_name: user.last_name,
|
||||||
|
isAdmin: user.isAdmin,
|
||||||
|
staffAccess: user.staffAccess,
|
||||||
|
email: user.email,
|
||||||
|
referal_id: user.referal_id,
|
||||||
|
session_created: new Date().toISOString(),
|
||||||
|
ip_address: user._login_req_ip,
|
||||||
|
must_reconfirm: user.must_reconfirm,
|
||||||
|
v1_id: user.overleaf != null ? user.overleaf.id : undefined,
|
||||||
|
}
|
||||||
|
callback(null, lightUser)
|
||||||
|
},
|
||||||
|
|
||||||
|
deserializeUser(user, cb) {
|
||||||
|
cb(null, user)
|
||||||
|
},
|
||||||
|
|
||||||
|
passportLogin(req, res, next) {
|
||||||
|
// This function is middleware which wraps the passport.authenticate middleware,
|
||||||
|
// so we can send back our custom `{message: {text: "", type: ""}}` responses on failure,
|
||||||
|
// and send a `{redir: ""}` response on success
|
||||||
|
passport.authenticate('local', function (err, user, info) {
|
||||||
|
if (err) {
|
||||||
|
return next(err)
|
||||||
|
}
|
||||||
|
if (user) {
|
||||||
|
// `user` is either a user object or false
|
||||||
|
return AuthenticationController.finishLogin(user, req, res, next)
|
||||||
|
} else {
|
||||||
|
if (info.redir != null) {
|
||||||
|
return res.json({ redir: info.redir })
|
||||||
|
} else {
|
||||||
|
return res.json({ message: info })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})(req, res, next)
|
||||||
|
},
|
||||||
|
|
||||||
|
finishLogin(user, req, res, next) {
|
||||||
|
if (user === false) {
|
||||||
|
return res.redirect('/login')
|
||||||
|
} // OAuth2 'state' mismatch
|
||||||
|
|
||||||
|
const Modules = require('../../infrastructure/Modules')
|
||||||
|
Modules.hooks.fire(
|
||||||
|
'preFinishLogin',
|
||||||
|
req,
|
||||||
|
res,
|
||||||
|
user,
|
||||||
|
function (error, results) {
|
||||||
|
if (error) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
if (results.some(result => result && result.doNotFinish)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (user.must_reconfirm) {
|
||||||
|
return AuthenticationController._redirectToReconfirmPage(
|
||||||
|
req,
|
||||||
|
res,
|
||||||
|
user
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const redir =
|
||||||
|
AuthenticationController._getRedirectFromSession(req) || '/project'
|
||||||
|
_loginAsyncHandlers(req, user)
|
||||||
|
const userId = user._id
|
||||||
|
UserAuditLogHandler.addEntry(userId, 'login', userId, req.ip, err => {
|
||||||
|
if (err) {
|
||||||
|
return next(err)
|
||||||
|
}
|
||||||
|
_afterLoginSessionSetup(req, user, function (err) {
|
||||||
|
if (err) {
|
||||||
|
return next(err)
|
||||||
|
}
|
||||||
|
AuthenticationController._clearRedirectFromSession(req)
|
||||||
|
AnalyticsRegistrationSourceHelper.clearSource(req.session)
|
||||||
|
AnalyticsRegistrationSourceHelper.clearInbound(req.session)
|
||||||
|
AsyncFormHelper.redirect(req, res, redir)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
doPassportLogin(req, username, password, done) {
|
||||||
|
const email = username.toLowerCase()
|
||||||
|
const Modules = require('../../infrastructure/Modules')
|
||||||
|
Modules.hooks.fire(
|
||||||
|
'preDoPassportLogin',
|
||||||
|
req,
|
||||||
|
email,
|
||||||
|
function (err, infoList) {
|
||||||
|
if (err) {
|
||||||
|
return done(err)
|
||||||
|
}
|
||||||
|
const info = infoList.find(i => i != null)
|
||||||
|
if (info != null) {
|
||||||
|
return done(null, false, info)
|
||||||
|
}
|
||||||
|
LoginRateLimiter.processLoginRequest(email, function (err, isAllowed) {
|
||||||
|
if (err) {
|
||||||
|
return done(err)
|
||||||
|
}
|
||||||
|
if (!isAllowed) {
|
||||||
|
logger.log({ email }, 'too many login requests')
|
||||||
|
return done(null, null, {
|
||||||
|
text: req.i18n.translate('to_many_login_requests_2_mins'),
|
||||||
|
type: 'error',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
AuthenticationManager.authenticate(
|
||||||
|
{ email },
|
||||||
|
password,
|
||||||
|
function (error, user) {
|
||||||
|
if (error != null) {
|
||||||
|
return done(error)
|
||||||
|
}
|
||||||
|
if (user != null) {
|
||||||
|
// async actions
|
||||||
|
done(null, user)
|
||||||
|
} else {
|
||||||
|
AuthenticationController._recordFailedLogin()
|
||||||
|
logger.log({ email }, 'failed log in')
|
||||||
|
done(null, false, {
|
||||||
|
text: req.i18n.translate('email_or_password_wrong_try_again'),
|
||||||
|
type: 'error',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
ipMatchCheck(req, user) {
|
||||||
|
if (req.ip !== user.lastLoginIp) {
|
||||||
|
NotificationsBuilder.ipMatcherAffiliation(user._id).create(req.ip)
|
||||||
|
}
|
||||||
|
return UserUpdater.updateUser(user._id.toString(), {
|
||||||
|
$set: { lastLoginIp: req.ip },
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
requireLogin() {
|
||||||
|
const doRequest = function (req, res, next) {
|
||||||
|
if (next == null) {
|
||||||
|
next = function () {}
|
||||||
|
}
|
||||||
|
if (!SessionManager.isUserLoggedIn(req.session)) {
|
||||||
|
if (acceptsJson(req)) return send401WithChallenge(res)
|
||||||
|
return AuthenticationController._redirectToLoginOrRegisterPage(req, res)
|
||||||
|
} else {
|
||||||
|
req.user = SessionManager.getSessionUser(req.session)
|
||||||
|
return next()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return doRequest
|
||||||
|
},
|
||||||
|
|
||||||
|
requireOauth() {
|
||||||
|
// require this here because module may not be included in some versions
|
||||||
|
const Oauth2Server = require('../../../../modules/oauth2-server/app/src/Oauth2Server')
|
||||||
|
return function (req, res, next) {
|
||||||
|
if (next == null) {
|
||||||
|
next = function () {}
|
||||||
|
}
|
||||||
|
const request = new Oauth2Server.Request(req)
|
||||||
|
const response = new Oauth2Server.Response(res)
|
||||||
|
return Oauth2Server.server.authenticate(
|
||||||
|
request,
|
||||||
|
response,
|
||||||
|
{},
|
||||||
|
function (err, token) {
|
||||||
|
if (err) {
|
||||||
|
// use a 401 status code for malformed header for git-bridge
|
||||||
|
if (
|
||||||
|
err.code === 400 &&
|
||||||
|
err.message === 'Invalid request: malformed authorization header'
|
||||||
|
) {
|
||||||
|
err.code = 401
|
||||||
|
}
|
||||||
|
// send all other errors
|
||||||
|
return res
|
||||||
|
.status(err.code)
|
||||||
|
.json({ error: err.name, error_description: err.message })
|
||||||
|
}
|
||||||
|
req.oauth = { access_token: token.accessToken }
|
||||||
|
req.oauth_token = token
|
||||||
|
req.oauth_user = token.user
|
||||||
|
return next()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
validateUserSession: function () {
|
||||||
|
// Middleware to check that the user's session is still good on key actions,
|
||||||
|
// such as opening a a project. Could be used to check that session has not
|
||||||
|
// exceeded a maximum lifetime (req.session.session_created), or for session
|
||||||
|
// hijacking checks (e.g. change of ip address, req.session.ip_address). For
|
||||||
|
// now, just check that the session has been loaded from the session store
|
||||||
|
// correctly.
|
||||||
|
return function (req, res, next) {
|
||||||
|
// check that the session store is returning valid results
|
||||||
|
if (req.session && !SessionStoreManager.hasValidationToken(req)) {
|
||||||
|
// force user to update session
|
||||||
|
req.session.regenerate(() => {
|
||||||
|
// need to destroy the existing session and generate a new one
|
||||||
|
// otherwise they will already be logged in when they are redirected
|
||||||
|
// to the login page
|
||||||
|
if (acceptsJson(req)) return send401WithChallenge(res)
|
||||||
|
AuthenticationController._redirectToLoginOrRegisterPage(req, res)
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
next()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
_globalLoginWhitelist: [],
|
||||||
|
addEndpointToLoginWhitelist(endpoint) {
|
||||||
|
return AuthenticationController._globalLoginWhitelist.push(endpoint)
|
||||||
|
},
|
||||||
|
|
||||||
|
requireGlobalLogin(req, res, next) {
|
||||||
|
if (
|
||||||
|
AuthenticationController._globalLoginWhitelist.includes(
|
||||||
|
req._parsedUrl.pathname
|
||||||
|
)
|
||||||
|
) {
|
||||||
|
return next()
|
||||||
|
}
|
||||||
|
|
||||||
|
if (req.headers.authorization != null) {
|
||||||
|
AuthenticationController.requirePrivateApiAuth()(req, res, next)
|
||||||
|
} else if (SessionManager.isUserLoggedIn(req.session)) {
|
||||||
|
next()
|
||||||
|
} else {
|
||||||
|
logger.log(
|
||||||
|
{ url: req.url },
|
||||||
|
'user trying to access endpoint not in global whitelist'
|
||||||
|
)
|
||||||
|
if (acceptsJson(req)) return send401WithChallenge(res)
|
||||||
|
AuthenticationController.setRedirectInSession(req)
|
||||||
|
res.redirect('/login')
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
validateAdmin(req, res, next) {
|
||||||
|
const adminDomains = Settings.adminDomains
|
||||||
|
if (
|
||||||
|
!adminDomains ||
|
||||||
|
!(Array.isArray(adminDomains) && adminDomains.length)
|
||||||
|
) {
|
||||||
|
return next()
|
||||||
|
}
|
||||||
|
const user = SessionManager.getSessionUser(req.session)
|
||||||
|
if (!(user && user.isAdmin)) {
|
||||||
|
return next()
|
||||||
|
}
|
||||||
|
const email = user.email
|
||||||
|
if (email == null) {
|
||||||
|
return next(
|
||||||
|
new OError('[ValidateAdmin] Admin user without email address', {
|
||||||
|
userId: user._id,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if (!adminDomains.find(domain => email.endsWith(`@${domain}`))) {
|
||||||
|
return next(
|
||||||
|
new OError('[ValidateAdmin] Admin user with invalid email domain', {
|
||||||
|
email: email,
|
||||||
|
userId: user._id,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return next()
|
||||||
|
},
|
||||||
|
|
||||||
|
requireBasicAuth: function (userDetails) {
|
||||||
|
return basicAuth(function (user, pass) {
|
||||||
|
const expectedPassword = userDetails[user]
|
||||||
|
const isValid =
|
||||||
|
expectedPassword &&
|
||||||
|
expectedPassword.length === pass.length &&
|
||||||
|
crypto.timingSafeEqual(Buffer.from(expectedPassword), Buffer.from(pass))
|
||||||
|
if (!isValid) {
|
||||||
|
logger.err({ user }, 'invalid login details')
|
||||||
|
}
|
||||||
|
return isValid
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
requirePrivateApiAuth() {
|
||||||
|
return AuthenticationController.requireBasicAuth(Settings.httpAuthUsers)
|
||||||
|
},
|
||||||
|
|
||||||
|
setRedirectInSession(req, value) {
|
||||||
|
if (value == null) {
|
||||||
|
value =
|
||||||
|
Object.keys(req.query).length > 0
|
||||||
|
? `${req.path}?${querystring.stringify(req.query)}`
|
||||||
|
: `${req.path}`
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
req.session != null &&
|
||||||
|
!/^\/(socket.io|js|stylesheets|img)\/.*$/.test(value) &&
|
||||||
|
!/^.*\.(png|jpeg|svg)$/.test(value)
|
||||||
|
) {
|
||||||
|
const safePath = UrlHelper.getSafeRedirectPath(value)
|
||||||
|
return (req.session.postLoginRedirect = safePath)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
_redirectToLoginOrRegisterPage(req, res) {
|
||||||
|
if (
|
||||||
|
req.query.zipUrl != null ||
|
||||||
|
req.query.project_name != null ||
|
||||||
|
req.path === '/user/subscription/new'
|
||||||
|
) {
|
||||||
|
AuthenticationController._redirectToRegisterPage(req, res)
|
||||||
|
} else {
|
||||||
|
AuthenticationController._redirectToLoginPage(req, res)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
_redirectToLoginPage(req, res) {
|
||||||
|
logger.log(
|
||||||
|
{ url: req.url },
|
||||||
|
'user not logged in so redirecting to login page'
|
||||||
|
)
|
||||||
|
AuthenticationController.setRedirectInSession(req)
|
||||||
|
const url = `/login?${querystring.stringify(req.query)}`
|
||||||
|
res.redirect(url)
|
||||||
|
Metrics.inc('security.login-redirect')
|
||||||
|
},
|
||||||
|
|
||||||
|
_redirectToReconfirmPage(req, res, user) {
|
||||||
|
logger.log(
|
||||||
|
{ url: req.url },
|
||||||
|
'user needs to reconfirm so redirecting to reconfirm page'
|
||||||
|
)
|
||||||
|
req.session.reconfirm_email = user != null ? user.email : undefined
|
||||||
|
const redir = '/user/reconfirm'
|
||||||
|
AsyncFormHelper.redirect(req, res, redir)
|
||||||
|
},
|
||||||
|
|
||||||
|
_redirectToRegisterPage(req, res) {
|
||||||
|
logger.log(
|
||||||
|
{ url: req.url },
|
||||||
|
'user not logged in so redirecting to register page'
|
||||||
|
)
|
||||||
|
AuthenticationController.setRedirectInSession(req)
|
||||||
|
const url = `/register?${querystring.stringify(req.query)}`
|
||||||
|
res.redirect(url)
|
||||||
|
Metrics.inc('security.login-redirect')
|
||||||
|
},
|
||||||
|
|
||||||
|
_recordSuccessfulLogin(userId, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function () {}
|
||||||
|
}
|
||||||
|
UserUpdater.updateUser(
|
||||||
|
userId.toString(),
|
||||||
|
{
|
||||||
|
$set: { lastLoggedIn: new Date() },
|
||||||
|
$inc: { loginCount: 1 },
|
||||||
|
},
|
||||||
|
function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
callback(error)
|
||||||
|
}
|
||||||
|
Metrics.inc('user.login.success')
|
||||||
|
callback()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
_recordFailedLogin(callback) {
|
||||||
|
Metrics.inc('user.login.failed')
|
||||||
|
if (callback) callback()
|
||||||
|
},
|
||||||
|
|
||||||
|
_getRedirectFromSession(req) {
|
||||||
|
let safePath
|
||||||
|
const value = _.get(req, ['session', 'postLoginRedirect'])
|
||||||
|
if (value) {
|
||||||
|
safePath = UrlHelper.getSafeRedirectPath(value)
|
||||||
|
}
|
||||||
|
return safePath || null
|
||||||
|
},
|
||||||
|
|
||||||
|
_clearRedirectFromSession(req) {
|
||||||
|
if (req.session != null) {
|
||||||
|
delete req.session.postLoginRedirect
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
function _afterLoginSessionSetup(req, user, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function () {}
|
||||||
|
}
|
||||||
|
req.login(user, function (err) {
|
||||||
|
if (err) {
|
||||||
|
OError.tag(err, 'error from req.login', {
|
||||||
|
user_id: user._id,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
// Regenerate the session to get a new sessionID (cookie value) to
|
||||||
|
// protect against session fixation attacks
|
||||||
|
const oldSession = req.session
|
||||||
|
req.session.destroy(function (err) {
|
||||||
|
if (err) {
|
||||||
|
OError.tag(err, 'error when trying to destroy old session', {
|
||||||
|
user_id: user._id,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
req.sessionStore.generate(req)
|
||||||
|
// Note: the validation token is not writable, so it does not get
|
||||||
|
// transferred to the new session below.
|
||||||
|
for (const key in oldSession) {
|
||||||
|
const value = oldSession[key]
|
||||||
|
if (key !== '__tmp' && key !== 'csrfSecret') {
|
||||||
|
req.session[key] = value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
req.session.save(function (err) {
|
||||||
|
if (err) {
|
||||||
|
OError.tag(err, 'error saving regenerated session after login', {
|
||||||
|
user_id: user._id,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
UserSessionsManager.trackSession(user, req.sessionID, function () {})
|
||||||
|
callback(null)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
function _loginAsyncHandlers(req, user) {
|
||||||
|
UserHandler.setupLoginData(user, err => {
|
||||||
|
if (err != null) {
|
||||||
|
logger.warn({ err }, 'error setting up login data')
|
||||||
|
}
|
||||||
|
})
|
||||||
|
LoginRateLimiter.recordSuccessfulLogin(user.email)
|
||||||
|
AuthenticationController._recordSuccessfulLogin(user._id)
|
||||||
|
AuthenticationController.ipMatchCheck(req, user)
|
||||||
|
Analytics.recordEvent(user._id, 'user-logged-in')
|
||||||
|
Analytics.identifyUser(user._id, req.sessionID)
|
||||||
|
logger.log(
|
||||||
|
{ email: user.email, user_id: user._id.toString() },
|
||||||
|
'successful log in'
|
||||||
|
)
|
||||||
|
req.session.justLoggedIn = true
|
||||||
|
// capture the request ip for use when creating the session
|
||||||
|
return (user._login_req_ip = req.ip)
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = AuthenticationController
|
|
@ -0,0 +1,9 @@
|
||||||
|
const Errors = require('../Errors/Errors')
|
||||||
|
|
||||||
|
class InvalidEmailError extends Errors.BackwardCompatibleError {}
|
||||||
|
class InvalidPasswordError extends Errors.BackwardCompatibleError {}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
InvalidEmailError,
|
||||||
|
InvalidPasswordError,
|
||||||
|
}
|
|
@ -0,0 +1,227 @@
|
||||||
|
const Settings = require('@overleaf/settings')
|
||||||
|
const { User } = require('../../models/User')
|
||||||
|
const { db, ObjectId } = require('../../infrastructure/mongodb')
|
||||||
|
const bcrypt = require('bcrypt')
|
||||||
|
const EmailHelper = require('../Helpers/EmailHelper')
|
||||||
|
const {
|
||||||
|
InvalidEmailError,
|
||||||
|
InvalidPasswordError,
|
||||||
|
} = require('./AuthenticationErrors')
|
||||||
|
const util = require('util')
|
||||||
|
|
||||||
|
const BCRYPT_ROUNDS = Settings.security.bcryptRounds || 12
|
||||||
|
const BCRYPT_MINOR_VERSION = Settings.security.bcryptMinorVersion || 'a'
|
||||||
|
|
||||||
|
const _checkWriteResult = function (result, callback) {
|
||||||
|
// for MongoDB
|
||||||
|
if (result && result.modifiedCount === 1) {
|
||||||
|
callback(null, true)
|
||||||
|
} else {
|
||||||
|
callback(null, false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const AuthenticationManager = {
|
||||||
|
authenticate(query, password, callback) {
|
||||||
|
// Using Mongoose for legacy reasons here. The returned User instance
|
||||||
|
// gets serialized into the session and there may be subtle differences
|
||||||
|
// between the user returned by Mongoose vs mongodb (such as default values)
|
||||||
|
User.findOne(query, (error, user) => {
|
||||||
|
if (error) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
if (!user || !user.hashedPassword) {
|
||||||
|
return callback(null, null)
|
||||||
|
}
|
||||||
|
bcrypt.compare(password, user.hashedPassword, function (error, match) {
|
||||||
|
if (error) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
if (!match) {
|
||||||
|
return callback(null, null)
|
||||||
|
}
|
||||||
|
AuthenticationManager.checkRounds(
|
||||||
|
user,
|
||||||
|
user.hashedPassword,
|
||||||
|
password,
|
||||||
|
function (err) {
|
||||||
|
if (err) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
callback(null, user)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
validateEmail(email) {
|
||||||
|
const parsed = EmailHelper.parseEmail(email)
|
||||||
|
if (!parsed) {
|
||||||
|
return new InvalidEmailError({ message: 'email not valid' })
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
},
|
||||||
|
|
||||||
|
// validates a password based on a similar set of rules to `complexPassword.js` on the frontend
|
||||||
|
// note that `passfield.js` enforces more rules than this, but these are the most commonly set.
|
||||||
|
// returns null on success, or an error object.
|
||||||
|
validatePassword(password, email) {
|
||||||
|
if (password == null) {
|
||||||
|
return new InvalidPasswordError({
|
||||||
|
message: 'password not set',
|
||||||
|
info: { code: 'not_set' },
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
let allowAnyChars, min, max
|
||||||
|
if (Settings.passwordStrengthOptions) {
|
||||||
|
allowAnyChars = Settings.passwordStrengthOptions.allowAnyChars === true
|
||||||
|
if (Settings.passwordStrengthOptions.length) {
|
||||||
|
min = Settings.passwordStrengthOptions.length.min
|
||||||
|
max = Settings.passwordStrengthOptions.length.max
|
||||||
|
}
|
||||||
|
}
|
||||||
|
allowAnyChars = !!allowAnyChars
|
||||||
|
min = min || 6
|
||||||
|
max = max || 72
|
||||||
|
|
||||||
|
// we don't support passwords > 72 characters in length, because bcrypt truncates them
|
||||||
|
if (max > 72) {
|
||||||
|
max = 72
|
||||||
|
}
|
||||||
|
|
||||||
|
if (password.length < min) {
|
||||||
|
return new InvalidPasswordError({
|
||||||
|
message: 'password is too short',
|
||||||
|
info: { code: 'too_short' },
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if (password.length > max) {
|
||||||
|
return new InvalidPasswordError({
|
||||||
|
message: 'password is too long',
|
||||||
|
info: { code: 'too_long' },
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
!allowAnyChars &&
|
||||||
|
!AuthenticationManager._passwordCharactersAreValid(password)
|
||||||
|
) {
|
||||||
|
return new InvalidPasswordError({
|
||||||
|
message: 'password contains an invalid character',
|
||||||
|
info: { code: 'invalid_character' },
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if (typeof email === 'string' && email !== '') {
|
||||||
|
const startOfEmail = email.split('@')[0]
|
||||||
|
if (
|
||||||
|
password.indexOf(email) !== -1 ||
|
||||||
|
password.indexOf(startOfEmail) !== -1
|
||||||
|
) {
|
||||||
|
return new InvalidPasswordError({
|
||||||
|
message: 'password contains part of email address',
|
||||||
|
info: { code: 'contains_email' },
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
},
|
||||||
|
|
||||||
|
setUserPassword(user, password, callback) {
|
||||||
|
AuthenticationManager.setUserPasswordInV2(user, password, callback)
|
||||||
|
},
|
||||||
|
|
||||||
|
checkRounds(user, hashedPassword, password, callback) {
|
||||||
|
// Temporarily disable this function, TODO: re-enable this
|
||||||
|
if (Settings.security.disableBcryptRoundsUpgrades) {
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
// check current number of rounds and rehash if necessary
|
||||||
|
const currentRounds = bcrypt.getRounds(hashedPassword)
|
||||||
|
if (currentRounds < BCRYPT_ROUNDS) {
|
||||||
|
AuthenticationManager.setUserPassword(user, password, callback)
|
||||||
|
} else {
|
||||||
|
callback()
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
hashPassword(password, callback) {
|
||||||
|
bcrypt.genSalt(BCRYPT_ROUNDS, BCRYPT_MINOR_VERSION, function (error, salt) {
|
||||||
|
if (error) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
bcrypt.hash(password, salt, callback)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
setUserPasswordInV2(user, password, callback) {
|
||||||
|
if (!user || !user.email || !user._id) {
|
||||||
|
return callback(new Error('invalid user object'))
|
||||||
|
}
|
||||||
|
const validationError = this.validatePassword(password, user.email)
|
||||||
|
if (validationError) {
|
||||||
|
return callback(validationError)
|
||||||
|
}
|
||||||
|
this.hashPassword(password, function (error, hash) {
|
||||||
|
if (error) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
db.users.updateOne(
|
||||||
|
{
|
||||||
|
_id: ObjectId(user._id.toString()),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$set: {
|
||||||
|
hashedPassword: hash,
|
||||||
|
},
|
||||||
|
$unset: {
|
||||||
|
password: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
function (updateError, result) {
|
||||||
|
if (updateError) {
|
||||||
|
return callback(updateError)
|
||||||
|
}
|
||||||
|
_checkWriteResult(result, callback)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_passwordCharactersAreValid(password) {
|
||||||
|
let digits, letters, lettersUp, symbols
|
||||||
|
if (
|
||||||
|
Settings.passwordStrengthOptions &&
|
||||||
|
Settings.passwordStrengthOptions.chars
|
||||||
|
) {
|
||||||
|
digits = Settings.passwordStrengthOptions.chars.digits
|
||||||
|
letters = Settings.passwordStrengthOptions.chars.letters
|
||||||
|
lettersUp = Settings.passwordStrengthOptions.chars.letters_up
|
||||||
|
symbols = Settings.passwordStrengthOptions.chars.symbols
|
||||||
|
}
|
||||||
|
digits = digits || '1234567890'
|
||||||
|
letters = letters || 'abcdefghijklmnopqrstuvwxyz'
|
||||||
|
lettersUp = lettersUp || 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
|
||||||
|
symbols = symbols || '@#$%^&*()-_=+[]{};:<>/?!£€.,'
|
||||||
|
|
||||||
|
for (let charIndex = 0; charIndex <= password.length - 1; charIndex++) {
|
||||||
|
if (
|
||||||
|
digits.indexOf(password[charIndex]) === -1 &&
|
||||||
|
letters.indexOf(password[charIndex]) === -1 &&
|
||||||
|
lettersUp.indexOf(password[charIndex]) === -1 &&
|
||||||
|
symbols.indexOf(password[charIndex]) === -1
|
||||||
|
) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
AuthenticationManager.promises = {
|
||||||
|
authenticate: util.promisify(AuthenticationManager.authenticate),
|
||||||
|
hashPassword: util.promisify(AuthenticationManager.hashPassword),
|
||||||
|
setUserPassword: util.promisify(AuthenticationManager.setUserPassword),
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = AuthenticationManager
|
|
@ -0,0 +1,46 @@
|
||||||
|
const _ = require('lodash')
|
||||||
|
|
||||||
|
const SessionManager = {
|
||||||
|
getSessionUser(session) {
|
||||||
|
const sessionUser = _.get(session, ['user'])
|
||||||
|
const sessionPassportUser = _.get(session, ['passport', 'user'])
|
||||||
|
return sessionUser || sessionPassportUser || null
|
||||||
|
},
|
||||||
|
|
||||||
|
setInSessionUser(session, props) {
|
||||||
|
const sessionUser = SessionManager.getSessionUser(session)
|
||||||
|
if (!sessionUser) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
for (const key in props) {
|
||||||
|
const value = props[key]
|
||||||
|
sessionUser[key] = value
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
},
|
||||||
|
|
||||||
|
isUserLoggedIn(session) {
|
||||||
|
const userId = SessionManager.getLoggedInUserId(session)
|
||||||
|
return ![null, undefined, false].includes(userId)
|
||||||
|
},
|
||||||
|
|
||||||
|
getLoggedInUserId(session) {
|
||||||
|
const user = SessionManager.getSessionUser(session)
|
||||||
|
if (user) {
|
||||||
|
return user._id
|
||||||
|
} else {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
getLoggedInUserV1Id(session) {
|
||||||
|
const user = SessionManager.getSessionUser(session)
|
||||||
|
if (user != null && user.v1_id != null) {
|
||||||
|
return user.v1_id
|
||||||
|
} else {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = SessionManager
|
|
@ -0,0 +1,295 @@
|
||||||
|
const CollaboratorsGetter = require('../Collaborators/CollaboratorsGetter')
|
||||||
|
const CollaboratorsHandler = require('../Collaborators/CollaboratorsHandler')
|
||||||
|
const ProjectGetter = require('../Project/ProjectGetter')
|
||||||
|
const { User } = require('../../models/User')
|
||||||
|
const PrivilegeLevels = require('./PrivilegeLevels')
|
||||||
|
const TokenAccessHandler = require('../TokenAccess/TokenAccessHandler')
|
||||||
|
const PublicAccessLevels = require('./PublicAccessLevels')
|
||||||
|
const Errors = require('../Errors/Errors')
|
||||||
|
const { ObjectId } = require('mongodb')
|
||||||
|
const { promisifyAll } = require('../../util/promises')
|
||||||
|
|
||||||
|
const AuthorizationManager = {
|
||||||
|
isRestrictedUser(userId, privilegeLevel, isTokenMember) {
|
||||||
|
if (privilegeLevel === PrivilegeLevels.NONE) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return (
|
||||||
|
privilegeLevel === PrivilegeLevels.READ_ONLY && (isTokenMember || !userId)
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
isRestrictedUserForProject(userId, projectId, token, callback) {
|
||||||
|
AuthorizationManager.getPrivilegeLevelForProject(
|
||||||
|
userId,
|
||||||
|
projectId,
|
||||||
|
token,
|
||||||
|
(err, privilegeLevel) => {
|
||||||
|
if (err) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
CollaboratorsHandler.userIsTokenMember(
|
||||||
|
userId,
|
||||||
|
projectId,
|
||||||
|
(err, isTokenMember) => {
|
||||||
|
if (err) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
callback(
|
||||||
|
null,
|
||||||
|
AuthorizationManager.isRestrictedUser(
|
||||||
|
userId,
|
||||||
|
privilegeLevel,
|
||||||
|
isTokenMember
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
getPublicAccessLevel(projectId, callback) {
|
||||||
|
if (!ObjectId.isValid(projectId)) {
|
||||||
|
return callback(new Error('invalid project id'))
|
||||||
|
}
|
||||||
|
// Note, the Project property in the DB is `publicAccesLevel`, without the second `s`
|
||||||
|
ProjectGetter.getProject(
|
||||||
|
projectId,
|
||||||
|
{ publicAccesLevel: 1 },
|
||||||
|
function (error, project) {
|
||||||
|
if (error) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
if (!project) {
|
||||||
|
return callback(
|
||||||
|
new Errors.NotFoundError(`no project found with id ${projectId}`)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
callback(null, project.publicAccesLevel)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
// Get the privilege level that the user has for the project
|
||||||
|
// Returns:
|
||||||
|
// * privilegeLevel: "owner", "readAndWrite", of "readOnly" if the user has
|
||||||
|
// access. false if the user does not have access
|
||||||
|
// * becausePublic: true if the access level is only because the project is public.
|
||||||
|
// * becauseSiteAdmin: true if access level is only because user is admin
|
||||||
|
getPrivilegeLevelForProject(userId, projectId, token, callback) {
|
||||||
|
if (userId) {
|
||||||
|
AuthorizationManager.getPrivilegeLevelForProjectWithUser(
|
||||||
|
userId,
|
||||||
|
projectId,
|
||||||
|
token,
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
AuthorizationManager.getPrivilegeLevelForProjectWithoutUser(
|
||||||
|
projectId,
|
||||||
|
token,
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
// User is present, get their privilege level from database
|
||||||
|
getPrivilegeLevelForProjectWithUser(userId, projectId, token, callback) {
|
||||||
|
CollaboratorsGetter.getMemberIdPrivilegeLevel(
|
||||||
|
userId,
|
||||||
|
projectId,
|
||||||
|
function (error, privilegeLevel) {
|
||||||
|
if (error) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
if (privilegeLevel && privilegeLevel !== PrivilegeLevels.NONE) {
|
||||||
|
// The user has direct access
|
||||||
|
return callback(null, privilegeLevel, false, false)
|
||||||
|
}
|
||||||
|
AuthorizationManager.isUserSiteAdmin(userId, function (error, isAdmin) {
|
||||||
|
if (error) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
if (isAdmin) {
|
||||||
|
return callback(null, PrivilegeLevels.OWNER, false, true)
|
||||||
|
}
|
||||||
|
// Legacy public-access system
|
||||||
|
// User is present (not anonymous), but does not have direct access
|
||||||
|
AuthorizationManager.getPublicAccessLevel(
|
||||||
|
projectId,
|
||||||
|
function (err, publicAccessLevel) {
|
||||||
|
if (err) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
if (publicAccessLevel === PublicAccessLevels.READ_ONLY) {
|
||||||
|
return callback(null, PrivilegeLevels.READ_ONLY, true, false)
|
||||||
|
}
|
||||||
|
if (publicAccessLevel === PublicAccessLevels.READ_AND_WRITE) {
|
||||||
|
return callback(
|
||||||
|
null,
|
||||||
|
PrivilegeLevels.READ_AND_WRITE,
|
||||||
|
true,
|
||||||
|
false
|
||||||
|
)
|
||||||
|
}
|
||||||
|
callback(null, PrivilegeLevels.NONE, false, false)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
// User is Anonymous, Try Token-based access
|
||||||
|
getPrivilegeLevelForProjectWithoutUser(projectId, token, callback) {
|
||||||
|
AuthorizationManager.getPublicAccessLevel(
|
||||||
|
projectId,
|
||||||
|
function (err, publicAccessLevel) {
|
||||||
|
if (err) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
if (publicAccessLevel === PublicAccessLevels.READ_ONLY) {
|
||||||
|
// Legacy public read-only access for anonymous user
|
||||||
|
return callback(null, PrivilegeLevels.READ_ONLY, true, false)
|
||||||
|
}
|
||||||
|
if (publicAccessLevel === PublicAccessLevels.READ_AND_WRITE) {
|
||||||
|
// Legacy public read-write access for anonymous user
|
||||||
|
return callback(null, PrivilegeLevels.READ_AND_WRITE, true, false)
|
||||||
|
}
|
||||||
|
if (publicAccessLevel === PublicAccessLevels.TOKEN_BASED) {
|
||||||
|
return AuthorizationManager.getPrivilegeLevelForProjectWithToken(
|
||||||
|
projectId,
|
||||||
|
token,
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
}
|
||||||
|
// Deny anonymous user access
|
||||||
|
callback(null, PrivilegeLevels.NONE, false, false)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
getPrivilegeLevelForProjectWithToken(projectId, token, callback) {
|
||||||
|
// Anonymous users can have read-only access to token-based projects,
|
||||||
|
// while read-write access must be logged in,
|
||||||
|
// unless the `enableAnonymousReadAndWriteSharing` setting is enabled
|
||||||
|
TokenAccessHandler.validateTokenForAnonymousAccess(
|
||||||
|
projectId,
|
||||||
|
token,
|
||||||
|
function (err, isValidReadAndWrite, isValidReadOnly) {
|
||||||
|
if (err) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
if (isValidReadOnly) {
|
||||||
|
// Grant anonymous user read-only access
|
||||||
|
return callback(null, PrivilegeLevels.READ_ONLY, false, false)
|
||||||
|
}
|
||||||
|
if (isValidReadAndWrite) {
|
||||||
|
// Grant anonymous user read-and-write access
|
||||||
|
return callback(null, PrivilegeLevels.READ_AND_WRITE, false, false)
|
||||||
|
}
|
||||||
|
// Deny anonymous access
|
||||||
|
callback(null, PrivilegeLevels.NONE, false, false)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
canUserReadProject(userId, projectId, token, callback) {
|
||||||
|
AuthorizationManager.getPrivilegeLevelForProject(
|
||||||
|
userId,
|
||||||
|
projectId,
|
||||||
|
token,
|
||||||
|
function (error, privilegeLevel) {
|
||||||
|
if (error) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
callback(
|
||||||
|
null,
|
||||||
|
[
|
||||||
|
PrivilegeLevels.OWNER,
|
||||||
|
PrivilegeLevels.READ_AND_WRITE,
|
||||||
|
PrivilegeLevels.READ_ONLY,
|
||||||
|
].includes(privilegeLevel)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
canUserWriteProjectContent(userId, projectId, token, callback) {
|
||||||
|
AuthorizationManager.getPrivilegeLevelForProject(
|
||||||
|
userId,
|
||||||
|
projectId,
|
||||||
|
token,
|
||||||
|
function (error, privilegeLevel) {
|
||||||
|
if (error) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
callback(
|
||||||
|
null,
|
||||||
|
[PrivilegeLevels.OWNER, PrivilegeLevels.READ_AND_WRITE].includes(
|
||||||
|
privilegeLevel
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
canUserWriteProjectSettings(userId, projectId, token, callback) {
|
||||||
|
AuthorizationManager.getPrivilegeLevelForProject(
|
||||||
|
userId,
|
||||||
|
projectId,
|
||||||
|
token,
|
||||||
|
function (error, privilegeLevel, becausePublic) {
|
||||||
|
if (error) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
if (privilegeLevel === PrivilegeLevels.OWNER) {
|
||||||
|
return callback(null, true)
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
privilegeLevel === PrivilegeLevels.READ_AND_WRITE &&
|
||||||
|
!becausePublic
|
||||||
|
) {
|
||||||
|
return callback(null, true)
|
||||||
|
}
|
||||||
|
callback(null, false)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
canUserAdminProject(userId, projectId, token, callback) {
|
||||||
|
AuthorizationManager.getPrivilegeLevelForProject(
|
||||||
|
userId,
|
||||||
|
projectId,
|
||||||
|
token,
|
||||||
|
function (error, privilegeLevel, becausePublic, becauseSiteAdmin) {
|
||||||
|
if (error) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
callback(
|
||||||
|
null,
|
||||||
|
privilegeLevel === PrivilegeLevels.OWNER,
|
||||||
|
becauseSiteAdmin
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
isUserSiteAdmin(userId, callback) {
|
||||||
|
if (!userId) {
|
||||||
|
return callback(null, false)
|
||||||
|
}
|
||||||
|
User.findOne({ _id: userId }, { isAdmin: 1 }, function (error, user) {
|
||||||
|
if (error) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
callback(null, (user && user.isAdmin) === true)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = AuthorizationManager
|
||||||
|
module.exports.promises = promisifyAll(AuthorizationManager, {
|
||||||
|
without: 'isRestrictedUser',
|
||||||
|
})
|
|
@ -0,0 +1,272 @@
|
||||||
|
let AuthorizationMiddleware
|
||||||
|
const AuthorizationManager = require('./AuthorizationManager')
|
||||||
|
const async = require('async')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const { ObjectId } = require('mongodb')
|
||||||
|
const Errors = require('../Errors/Errors')
|
||||||
|
const HttpErrorHandler = require('../Errors/HttpErrorHandler')
|
||||||
|
const AuthenticationController = require('../Authentication/AuthenticationController')
|
||||||
|
const SessionManager = require('../Authentication/SessionManager')
|
||||||
|
const TokenAccessHandler = require('../TokenAccess/TokenAccessHandler')
|
||||||
|
|
||||||
|
module.exports = AuthorizationMiddleware = {
|
||||||
|
ensureUserCanReadMultipleProjects(req, res, next) {
|
||||||
|
const projectIds = (req.query.project_ids || '').split(',')
|
||||||
|
AuthorizationMiddleware._getUserId(req, function (error, userId) {
|
||||||
|
if (error) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
// Remove the projects we have access to. Note rejectSeries doesn't use
|
||||||
|
// errors in callbacks
|
||||||
|
async.rejectSeries(
|
||||||
|
projectIds,
|
||||||
|
function (projectId, cb) {
|
||||||
|
const token = TokenAccessHandler.getRequestToken(req, projectId)
|
||||||
|
AuthorizationManager.canUserReadProject(
|
||||||
|
userId,
|
||||||
|
projectId,
|
||||||
|
token,
|
||||||
|
function (error, canRead) {
|
||||||
|
if (error) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
cb(canRead)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
function (unauthorizedProjectIds) {
|
||||||
|
if (unauthorizedProjectIds.length > 0) {
|
||||||
|
return AuthorizationMiddleware.redirectToRestricted(req, res, next)
|
||||||
|
}
|
||||||
|
next()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
blockRestrictedUserFromProject(req, res, next) {
|
||||||
|
AuthorizationMiddleware._getUserAndProjectId(
|
||||||
|
req,
|
||||||
|
function (error, userId, projectId) {
|
||||||
|
if (error) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
const token = TokenAccessHandler.getRequestToken(req, projectId)
|
||||||
|
AuthorizationManager.isRestrictedUserForProject(
|
||||||
|
userId,
|
||||||
|
projectId,
|
||||||
|
token,
|
||||||
|
(err, isRestrictedUser) => {
|
||||||
|
if (err) {
|
||||||
|
return next(err)
|
||||||
|
}
|
||||||
|
if (isRestrictedUser) {
|
||||||
|
return res.sendStatus(403)
|
||||||
|
}
|
||||||
|
next()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
ensureUserCanReadProject(req, res, next) {
|
||||||
|
AuthorizationMiddleware._getUserAndProjectId(
|
||||||
|
req,
|
||||||
|
function (error, userId, projectId) {
|
||||||
|
if (error) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
const token = TokenAccessHandler.getRequestToken(req, projectId)
|
||||||
|
AuthorizationManager.canUserReadProject(
|
||||||
|
userId,
|
||||||
|
projectId,
|
||||||
|
token,
|
||||||
|
function (error, canRead) {
|
||||||
|
if (error) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
if (canRead) {
|
||||||
|
logger.log(
|
||||||
|
{ userId, projectId },
|
||||||
|
'allowing user read access to project'
|
||||||
|
)
|
||||||
|
return next()
|
||||||
|
}
|
||||||
|
logger.log(
|
||||||
|
{ userId, projectId },
|
||||||
|
'denying user read access to project'
|
||||||
|
)
|
||||||
|
HttpErrorHandler.forbidden(req, res)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
ensureUserCanWriteProjectSettings(req, res, next) {
|
||||||
|
AuthorizationMiddleware._getUserAndProjectId(
|
||||||
|
req,
|
||||||
|
function (error, userId, projectId) {
|
||||||
|
if (error) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
const token = TokenAccessHandler.getRequestToken(req, projectId)
|
||||||
|
AuthorizationManager.canUserWriteProjectSettings(
|
||||||
|
userId,
|
||||||
|
projectId,
|
||||||
|
token,
|
||||||
|
function (error, canWrite) {
|
||||||
|
if (error) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
if (canWrite) {
|
||||||
|
logger.log(
|
||||||
|
{ userId, projectId },
|
||||||
|
'allowing user write access to project settings'
|
||||||
|
)
|
||||||
|
return next()
|
||||||
|
}
|
||||||
|
logger.log(
|
||||||
|
{ userId, projectId },
|
||||||
|
'denying user write access to project settings'
|
||||||
|
)
|
||||||
|
HttpErrorHandler.forbidden(req, res)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
ensureUserCanWriteProjectContent(req, res, next) {
|
||||||
|
AuthorizationMiddleware._getUserAndProjectId(
|
||||||
|
req,
|
||||||
|
function (error, userId, projectId) {
|
||||||
|
if (error) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
const token = TokenAccessHandler.getRequestToken(req, projectId)
|
||||||
|
AuthorizationManager.canUserWriteProjectContent(
|
||||||
|
userId,
|
||||||
|
projectId,
|
||||||
|
token,
|
||||||
|
function (error, canWrite) {
|
||||||
|
if (error) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
if (canWrite) {
|
||||||
|
logger.log(
|
||||||
|
{ userId, projectId },
|
||||||
|
'allowing user write access to project content'
|
||||||
|
)
|
||||||
|
return next()
|
||||||
|
}
|
||||||
|
logger.log(
|
||||||
|
{ userId, projectId },
|
||||||
|
'denying user write access to project settings'
|
||||||
|
)
|
||||||
|
HttpErrorHandler.forbidden(req, res)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
ensureUserCanAdminProject(req, res, next) {
|
||||||
|
AuthorizationMiddleware._getUserAndProjectId(
|
||||||
|
req,
|
||||||
|
function (error, userId, projectId) {
|
||||||
|
if (error) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
const token = TokenAccessHandler.getRequestToken(req, projectId)
|
||||||
|
AuthorizationManager.canUserAdminProject(
|
||||||
|
userId,
|
||||||
|
projectId,
|
||||||
|
token,
|
||||||
|
function (error, canAdmin) {
|
||||||
|
if (error) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
if (canAdmin) {
|
||||||
|
logger.log(
|
||||||
|
{ userId, projectId },
|
||||||
|
'allowing user admin access to project'
|
||||||
|
)
|
||||||
|
return next()
|
||||||
|
}
|
||||||
|
logger.log(
|
||||||
|
{ userId, projectId },
|
||||||
|
'denying user admin access to project'
|
||||||
|
)
|
||||||
|
HttpErrorHandler.forbidden(req, res)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
ensureUserIsSiteAdmin(req, res, next) {
|
||||||
|
AuthorizationMiddleware._getUserId(req, function (error, userId) {
|
||||||
|
if (error) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
AuthorizationManager.isUserSiteAdmin(userId, function (error, isAdmin) {
|
||||||
|
if (error) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
if (isAdmin) {
|
||||||
|
logger.log({ userId }, 'allowing user admin access to site')
|
||||||
|
return next()
|
||||||
|
}
|
||||||
|
logger.log({ userId }, 'denying user admin access to site')
|
||||||
|
AuthorizationMiddleware.redirectToRestricted(req, res, next)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_getUserAndProjectId(req, callback) {
|
||||||
|
const projectId = req.params.project_id || req.params.Project_id
|
||||||
|
if (!projectId) {
|
||||||
|
return callback(new Error('Expected project_id in request parameters'))
|
||||||
|
}
|
||||||
|
if (!ObjectId.isValid(projectId)) {
|
||||||
|
return callback(
|
||||||
|
new Errors.NotFoundError(`invalid projectId: ${projectId}`)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
AuthorizationMiddleware._getUserId(req, function (error, userId) {
|
||||||
|
if (error) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
callback(null, userId, projectId)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_getUserId(req, callback) {
|
||||||
|
const userId =
|
||||||
|
SessionManager.getLoggedInUserId(req.session) ||
|
||||||
|
(req.oauth_user && req.oauth_user._id) ||
|
||||||
|
null
|
||||||
|
callback(null, userId)
|
||||||
|
},
|
||||||
|
|
||||||
|
redirectToRestricted(req, res, next) {
|
||||||
|
// TODO: move this to throwing ForbiddenError
|
||||||
|
res.redirect(
|
||||||
|
`/restricted?from=${encodeURIComponent(res.locals.currentUrl)}`
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
restricted(req, res, next) {
|
||||||
|
if (SessionManager.isUserLoggedIn(req.session)) {
|
||||||
|
return res.render('user/restricted', { title: 'restricted' })
|
||||||
|
}
|
||||||
|
const { from } = req.query
|
||||||
|
logger.log({ from }, 'redirecting to login')
|
||||||
|
if (from) {
|
||||||
|
AuthenticationController.setRedirectInSession(req, from)
|
||||||
|
}
|
||||||
|
res.redirect('/login')
|
||||||
|
},
|
||||||
|
}
|
|
@ -0,0 +1,8 @@
|
||||||
|
const PrivilegeLevels = {
|
||||||
|
NONE: false,
|
||||||
|
READ_ONLY: 'readOnly',
|
||||||
|
READ_AND_WRITE: 'readAndWrite',
|
||||||
|
OWNER: 'owner',
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = PrivilegeLevels
|
|
@ -0,0 +1,6 @@
|
||||||
|
module.exports = {
|
||||||
|
READ_ONLY: 'readOnly', // LEGACY
|
||||||
|
READ_AND_WRITE: 'readAndWrite', // LEGACY
|
||||||
|
PRIVATE: 'private',
|
||||||
|
TOKEN_BASED: 'tokenBased',
|
||||||
|
}
|
5
services/web/app/src/Features/Authorization/Sources.js
Normal file
5
services/web/app/src/Features/Authorization/Sources.js
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
module.exports = {
|
||||||
|
INVITE: 'invite',
|
||||||
|
TOKEN: 'token',
|
||||||
|
OWNER: 'owner',
|
||||||
|
}
|
|
@ -0,0 +1,56 @@
|
||||||
|
const BetaProgramHandler = require('./BetaProgramHandler')
|
||||||
|
const OError = require('@overleaf/o-error')
|
||||||
|
const UserGetter = require('../User/UserGetter')
|
||||||
|
const Settings = require('@overleaf/settings')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const SessionManager = require('../Authentication/SessionManager')
|
||||||
|
|
||||||
|
const BetaProgramController = {
|
||||||
|
optIn(req, res, next) {
|
||||||
|
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||||
|
logger.log({ userId }, 'user opting in to beta program')
|
||||||
|
if (userId == null) {
|
||||||
|
return next(new Error('no user id in session'))
|
||||||
|
}
|
||||||
|
BetaProgramHandler.optIn(userId, function (err) {
|
||||||
|
if (err) {
|
||||||
|
return next(err)
|
||||||
|
}
|
||||||
|
res.redirect('/beta/participate')
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
optOut(req, res, next) {
|
||||||
|
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||||
|
logger.log({ userId }, 'user opting out of beta program')
|
||||||
|
if (userId == null) {
|
||||||
|
return next(new Error('no user id in session'))
|
||||||
|
}
|
||||||
|
BetaProgramHandler.optOut(userId, function (err) {
|
||||||
|
if (err) {
|
||||||
|
return next(err)
|
||||||
|
}
|
||||||
|
res.redirect('/beta/participate')
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
optInPage(req, res, next) {
|
||||||
|
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||||
|
logger.log({ user_id: userId }, 'showing beta participation page for user')
|
||||||
|
UserGetter.getUser(userId, function (err, user) {
|
||||||
|
if (err) {
|
||||||
|
OError.tag(err, 'error fetching user', {
|
||||||
|
userId,
|
||||||
|
})
|
||||||
|
return next(err)
|
||||||
|
}
|
||||||
|
res.render('beta_program/opt_in', {
|
||||||
|
title: 'sharelatex_beta_program',
|
||||||
|
user,
|
||||||
|
languages: Settings.languages,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = BetaProgramController
|
|
@ -0,0 +1,28 @@
|
||||||
|
const { callbackify } = require('util')
|
||||||
|
const metrics = require('@overleaf/metrics')
|
||||||
|
const UserUpdater = require('../User/UserUpdater')
|
||||||
|
|
||||||
|
async function optIn(userId) {
|
||||||
|
await UserUpdater.promises.updateUser(userId, { $set: { betaProgram: true } })
|
||||||
|
metrics.inc('beta-program.opt-in')
|
||||||
|
}
|
||||||
|
|
||||||
|
async function optOut(userId) {
|
||||||
|
await UserUpdater.promises.updateUser(userId, {
|
||||||
|
$set: { betaProgram: false },
|
||||||
|
})
|
||||||
|
metrics.inc('beta-program.opt-out')
|
||||||
|
}
|
||||||
|
|
||||||
|
const BetaProgramHandler = {
|
||||||
|
optIn: callbackify(optIn),
|
||||||
|
|
||||||
|
optOut: callbackify(optOut),
|
||||||
|
}
|
||||||
|
|
||||||
|
BetaProgramHandler.promises = {
|
||||||
|
optIn,
|
||||||
|
optOut,
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = BetaProgramHandler
|
|
@ -0,0 +1,82 @@
|
||||||
|
const OError = require('@overleaf/o-error')
|
||||||
|
const url = require('url')
|
||||||
|
const settings = require('@overleaf/settings')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const V1Api = require('../V1/V1Api')
|
||||||
|
const sanitizeHtml = require('sanitize-html')
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
getBrandVariationById,
|
||||||
|
}
|
||||||
|
|
||||||
|
function getBrandVariationById(brandVariationId, callback) {
|
||||||
|
if (brandVariationId == null || brandVariationId === '') {
|
||||||
|
return callback(new Error('Branding variation id not provided'))
|
||||||
|
}
|
||||||
|
logger.log({ brandVariationId }, 'fetching brand variation details from v1')
|
||||||
|
V1Api.request(
|
||||||
|
{
|
||||||
|
uri: `/api/v2/brand_variations/${brandVariationId}`,
|
||||||
|
},
|
||||||
|
function (error, response, brandVariationDetails) {
|
||||||
|
if (error != null) {
|
||||||
|
OError.tag(error, 'error getting brand variation details', {
|
||||||
|
brandVariationId,
|
||||||
|
})
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
formatBrandVariationDetails(brandVariationDetails)
|
||||||
|
sanitizeBrandVariationDetails(brandVariationDetails)
|
||||||
|
callback(null, brandVariationDetails)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatBrandVariationDetails(details) {
|
||||||
|
if (details.export_url != null) {
|
||||||
|
details.export_url = setV1AsHostIfRelativeURL(details.export_url)
|
||||||
|
}
|
||||||
|
if (details.home_url != null) {
|
||||||
|
details.home_url = setV1AsHostIfRelativeURL(details.home_url)
|
||||||
|
}
|
||||||
|
if (details.logo_url != null) {
|
||||||
|
details.logo_url = setV1AsHostIfRelativeURL(details.logo_url)
|
||||||
|
}
|
||||||
|
if (details.journal_guidelines_url != null) {
|
||||||
|
details.journal_guidelines_url = setV1AsHostIfRelativeURL(
|
||||||
|
details.journal_guidelines_url
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if (details.journal_cover_url != null) {
|
||||||
|
details.journal_cover_url = setV1AsHostIfRelativeURL(
|
||||||
|
details.journal_cover_url
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if (details.submission_confirmation_page_logo_url != null) {
|
||||||
|
details.submission_confirmation_page_logo_url = setV1AsHostIfRelativeURL(
|
||||||
|
details.submission_confirmation_page_logo_url
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if (details.publish_menu_icon != null) {
|
||||||
|
details.publish_menu_icon = setV1AsHostIfRelativeURL(
|
||||||
|
details.publish_menu_icon
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function sanitizeBrandVariationDetails(details) {
|
||||||
|
if (details.submit_button_html) {
|
||||||
|
details.submit_button_html = sanitizeHtml(
|
||||||
|
details.submit_button_html,
|
||||||
|
settings.modules.sanitize.options
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function setV1AsHostIfRelativeURL(urlString) {
|
||||||
|
// The first argument is the base URL to resolve against if the second argument is not absolute.
|
||||||
|
// As it only applies if the second argument is not absolute, we can use it to transform relative URLs into
|
||||||
|
// absolute ones using v1 as the host. If the URL is absolute (e.g. a filepicker one), then the base
|
||||||
|
// argument is just ignored
|
||||||
|
return url.resolve(settings.apis.v1.url, urlString)
|
||||||
|
}
|
64
services/web/app/src/Features/Captcha/CaptchaMiddleware.js
Normal file
64
services/web/app/src/Features/Captcha/CaptchaMiddleware.js
Normal file
|
@ -0,0 +1,64 @@
|
||||||
|
/* eslint-disable
|
||||||
|
max-len,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let CaptchaMiddleware
|
||||||
|
const request = require('request')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const Settings = require('@overleaf/settings')
|
||||||
|
|
||||||
|
module.exports = CaptchaMiddleware = {
|
||||||
|
validateCaptcha(action) {
|
||||||
|
return function (req, res, next) {
|
||||||
|
if (
|
||||||
|
(Settings.recaptcha != null ? Settings.recaptcha.siteKey : undefined) ==
|
||||||
|
null
|
||||||
|
) {
|
||||||
|
return next()
|
||||||
|
}
|
||||||
|
if (Settings.recaptcha.disabled[action]) {
|
||||||
|
return next()
|
||||||
|
}
|
||||||
|
const response = req.body['g-recaptcha-response']
|
||||||
|
const options = {
|
||||||
|
form: {
|
||||||
|
secret: Settings.recaptcha.secretKey,
|
||||||
|
response,
|
||||||
|
},
|
||||||
|
json: true,
|
||||||
|
}
|
||||||
|
return request.post(
|
||||||
|
'https://www.google.com/recaptcha/api/siteverify',
|
||||||
|
options,
|
||||||
|
function (error, response, body) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
if (!(body != null ? body.success : undefined)) {
|
||||||
|
logger.warn(
|
||||||
|
{ statusCode: response.statusCode, body },
|
||||||
|
'failed recaptcha siteverify request'
|
||||||
|
)
|
||||||
|
return res.status(400).send({
|
||||||
|
errorReason: 'cannot_verify_user_not_robot',
|
||||||
|
message: {
|
||||||
|
text:
|
||||||
|
'Sorry, we could not verify that you are not a robot. Please check that Google reCAPTCHA is not being blocked by an ad blocker or firewall.',
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
return next()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
168
services/web/app/src/Features/Chat/ChatApiHandler.js
Normal file
168
services/web/app/src/Features/Chat/ChatApiHandler.js
Normal file
|
@ -0,0 +1,168 @@
|
||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
node/handle-callback-err,
|
||||||
|
max-len,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let ChatApiHandler
|
||||||
|
const OError = require('@overleaf/o-error')
|
||||||
|
const request = require('request')
|
||||||
|
const settings = require('@overleaf/settings')
|
||||||
|
|
||||||
|
module.exports = ChatApiHandler = {
|
||||||
|
_apiRequest(opts, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, data) {}
|
||||||
|
}
|
||||||
|
return request(opts, function (error, response, data) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
if (response.statusCode >= 200 && response.statusCode < 300) {
|
||||||
|
return callback(null, data)
|
||||||
|
} else {
|
||||||
|
error = new OError(
|
||||||
|
`chat api returned non-success code: ${response.statusCode}`,
|
||||||
|
opts
|
||||||
|
)
|
||||||
|
error.statusCode = response.statusCode
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
sendGlobalMessage(project_id, user_id, content, callback) {
|
||||||
|
return ChatApiHandler._apiRequest(
|
||||||
|
{
|
||||||
|
url: `${settings.apis.chat.internal_url}/project/${project_id}/messages`,
|
||||||
|
method: 'POST',
|
||||||
|
json: { user_id, content },
|
||||||
|
},
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
getGlobalMessages(project_id, limit, before, callback) {
|
||||||
|
const qs = {}
|
||||||
|
if (limit != null) {
|
||||||
|
qs.limit = limit
|
||||||
|
}
|
||||||
|
if (before != null) {
|
||||||
|
qs.before = before
|
||||||
|
}
|
||||||
|
|
||||||
|
return ChatApiHandler._apiRequest(
|
||||||
|
{
|
||||||
|
url: `${settings.apis.chat.internal_url}/project/${project_id}/messages`,
|
||||||
|
method: 'GET',
|
||||||
|
qs,
|
||||||
|
json: true,
|
||||||
|
},
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
sendComment(project_id, thread_id, user_id, content, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
return ChatApiHandler._apiRequest(
|
||||||
|
{
|
||||||
|
url: `${settings.apis.chat.internal_url}/project/${project_id}/thread/${thread_id}/messages`,
|
||||||
|
method: 'POST',
|
||||||
|
json: { user_id, content },
|
||||||
|
},
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
getThreads(project_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
return ChatApiHandler._apiRequest(
|
||||||
|
{
|
||||||
|
url: `${settings.apis.chat.internal_url}/project/${project_id}/threads`,
|
||||||
|
method: 'GET',
|
||||||
|
json: true,
|
||||||
|
},
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
resolveThread(project_id, thread_id, user_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
return ChatApiHandler._apiRequest(
|
||||||
|
{
|
||||||
|
url: `${settings.apis.chat.internal_url}/project/${project_id}/thread/${thread_id}/resolve`,
|
||||||
|
method: 'POST',
|
||||||
|
json: { user_id },
|
||||||
|
},
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
reopenThread(project_id, thread_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
return ChatApiHandler._apiRequest(
|
||||||
|
{
|
||||||
|
url: `${settings.apis.chat.internal_url}/project/${project_id}/thread/${thread_id}/reopen`,
|
||||||
|
method: 'POST',
|
||||||
|
},
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
deleteThread(project_id, thread_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
return ChatApiHandler._apiRequest(
|
||||||
|
{
|
||||||
|
url: `${settings.apis.chat.internal_url}/project/${project_id}/thread/${thread_id}`,
|
||||||
|
method: 'DELETE',
|
||||||
|
},
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
editMessage(project_id, thread_id, message_id, content, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
return ChatApiHandler._apiRequest(
|
||||||
|
{
|
||||||
|
url: `${settings.apis.chat.internal_url}/project/${project_id}/thread/${thread_id}/messages/${message_id}/edit`,
|
||||||
|
method: 'POST',
|
||||||
|
json: {
|
||||||
|
content,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
deleteMessage(project_id, thread_id, message_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
return ChatApiHandler._apiRequest(
|
||||||
|
{
|
||||||
|
url: `${settings.apis.chat.internal_url}/project/${project_id}/thread/${thread_id}/messages/${message_id}`,
|
||||||
|
method: 'DELETE',
|
||||||
|
},
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
}
|
134
services/web/app/src/Features/Chat/ChatController.js
Normal file
134
services/web/app/src/Features/Chat/ChatController.js
Normal file
|
@ -0,0 +1,134 @@
|
||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
node/handle-callback-err,
|
||||||
|
max-len,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS101: Remove unnecessary use of Array.from
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let ChatController
|
||||||
|
const ChatApiHandler = require('./ChatApiHandler')
|
||||||
|
const EditorRealTimeController = require('../Editor/EditorRealTimeController')
|
||||||
|
const SessionManager = require('../Authentication/SessionManager')
|
||||||
|
const UserInfoManager = require('../User/UserInfoManager')
|
||||||
|
const UserInfoController = require('../User/UserInfoController')
|
||||||
|
const async = require('async')
|
||||||
|
|
||||||
|
module.exports = ChatController = {
|
||||||
|
sendMessage(req, res, next) {
|
||||||
|
const { project_id } = req.params
|
||||||
|
const { content } = req.body
|
||||||
|
const user_id = SessionManager.getLoggedInUserId(req.session)
|
||||||
|
if (user_id == null) {
|
||||||
|
const err = new Error('no logged-in user')
|
||||||
|
return next(err)
|
||||||
|
}
|
||||||
|
return ChatApiHandler.sendGlobalMessage(
|
||||||
|
project_id,
|
||||||
|
user_id,
|
||||||
|
content,
|
||||||
|
function (err, message) {
|
||||||
|
if (err != null) {
|
||||||
|
return next(err)
|
||||||
|
}
|
||||||
|
return UserInfoManager.getPersonalInfo(
|
||||||
|
message.user_id,
|
||||||
|
function (err, user) {
|
||||||
|
if (err != null) {
|
||||||
|
return next(err)
|
||||||
|
}
|
||||||
|
message.user = UserInfoController.formatPersonalInfo(user)
|
||||||
|
EditorRealTimeController.emitToRoom(
|
||||||
|
project_id,
|
||||||
|
'new-chat-message',
|
||||||
|
message
|
||||||
|
)
|
||||||
|
return res.sendStatus(204)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
getMessages(req, res, next) {
|
||||||
|
const { project_id } = req.params
|
||||||
|
const { query } = req
|
||||||
|
return ChatApiHandler.getGlobalMessages(
|
||||||
|
project_id,
|
||||||
|
query.limit,
|
||||||
|
query.before,
|
||||||
|
function (err, messages) {
|
||||||
|
if (err != null) {
|
||||||
|
return next(err)
|
||||||
|
}
|
||||||
|
return ChatController._injectUserInfoIntoThreads(
|
||||||
|
{ global: { messages } },
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
return next(err)
|
||||||
|
}
|
||||||
|
return res.json(messages)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
_injectUserInfoIntoThreads(threads, callback) {
|
||||||
|
// There will be a lot of repitition of user_ids, so first build a list
|
||||||
|
// of unique ones to perform db look ups on, then use these to populate the
|
||||||
|
// user fields
|
||||||
|
let message, thread, thread_id, user_id
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, threads) {}
|
||||||
|
}
|
||||||
|
const user_ids = {}
|
||||||
|
for (thread_id in threads) {
|
||||||
|
thread = threads[thread_id]
|
||||||
|
if (thread.resolved) {
|
||||||
|
user_ids[thread.resolved_by_user_id] = true
|
||||||
|
}
|
||||||
|
for (message of Array.from(thread.messages)) {
|
||||||
|
user_ids[message.user_id] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const jobs = []
|
||||||
|
const users = {}
|
||||||
|
for (user_id in user_ids) {
|
||||||
|
const _ = user_ids[user_id]
|
||||||
|
;(user_id =>
|
||||||
|
jobs.push(cb =>
|
||||||
|
UserInfoManager.getPersonalInfo(user_id, function (error, user) {
|
||||||
|
if (error != null) return cb(error)
|
||||||
|
user = UserInfoController.formatPersonalInfo(user)
|
||||||
|
users[user_id] = user
|
||||||
|
cb()
|
||||||
|
})
|
||||||
|
))(user_id)
|
||||||
|
}
|
||||||
|
|
||||||
|
return async.series(jobs, function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
for (thread_id in threads) {
|
||||||
|
thread = threads[thread_id]
|
||||||
|
if (thread.resolved) {
|
||||||
|
thread.resolved_by_user = users[thread.resolved_by_user_id]
|
||||||
|
}
|
||||||
|
for (message of Array.from(thread.messages)) {
|
||||||
|
message.user = users[message.user_id]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return callback(null, threads)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
|
@ -0,0 +1,115 @@
|
||||||
|
const OError = require('@overleaf/o-error')
|
||||||
|
const HttpErrorHandler = require('../../Features/Errors/HttpErrorHandler')
|
||||||
|
const { ObjectId } = require('mongodb')
|
||||||
|
const CollaboratorsHandler = require('./CollaboratorsHandler')
|
||||||
|
const CollaboratorsGetter = require('./CollaboratorsGetter')
|
||||||
|
const OwnershipTransferHandler = require('./OwnershipTransferHandler')
|
||||||
|
const SessionManager = require('../Authentication/SessionManager')
|
||||||
|
const EditorRealTimeController = require('../Editor/EditorRealTimeController')
|
||||||
|
const TagsHandler = require('../Tags/TagsHandler')
|
||||||
|
const Errors = require('../Errors/Errors')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const { expressify } = require('../../util/promises')
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
removeUserFromProject: expressify(removeUserFromProject),
|
||||||
|
removeSelfFromProject: expressify(removeSelfFromProject),
|
||||||
|
getAllMembers: expressify(getAllMembers),
|
||||||
|
setCollaboratorInfo: expressify(setCollaboratorInfo),
|
||||||
|
transferOwnership: expressify(transferOwnership),
|
||||||
|
}
|
||||||
|
|
||||||
|
async function removeUserFromProject(req, res, next) {
|
||||||
|
const projectId = req.params.Project_id
|
||||||
|
const userId = req.params.user_id
|
||||||
|
await _removeUserIdFromProject(projectId, userId)
|
||||||
|
EditorRealTimeController.emitToRoom(projectId, 'project:membership:changed', {
|
||||||
|
members: true,
|
||||||
|
})
|
||||||
|
res.sendStatus(204)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function removeSelfFromProject(req, res, next) {
|
||||||
|
const projectId = req.params.Project_id
|
||||||
|
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||||
|
await _removeUserIdFromProject(projectId, userId)
|
||||||
|
res.sendStatus(204)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getAllMembers(req, res, next) {
|
||||||
|
const projectId = req.params.Project_id
|
||||||
|
logger.log({ projectId }, 'getting all active members for project')
|
||||||
|
let members
|
||||||
|
try {
|
||||||
|
members = await CollaboratorsGetter.promises.getAllInvitedMembers(projectId)
|
||||||
|
} catch (err) {
|
||||||
|
throw OError.tag(err, 'error getting members for project', { projectId })
|
||||||
|
}
|
||||||
|
res.json({ members })
|
||||||
|
}
|
||||||
|
|
||||||
|
async function setCollaboratorInfo(req, res, next) {
|
||||||
|
try {
|
||||||
|
const projectId = req.params.Project_id
|
||||||
|
const userId = req.params.user_id
|
||||||
|
const { privilegeLevel } = req.body
|
||||||
|
await CollaboratorsHandler.promises.setCollaboratorPrivilegeLevel(
|
||||||
|
projectId,
|
||||||
|
userId,
|
||||||
|
privilegeLevel
|
||||||
|
)
|
||||||
|
EditorRealTimeController.emitToRoom(
|
||||||
|
projectId,
|
||||||
|
'project:membership:changed',
|
||||||
|
{ members: true }
|
||||||
|
)
|
||||||
|
res.sendStatus(204)
|
||||||
|
} catch (err) {
|
||||||
|
if (err instanceof Errors.NotFoundError) {
|
||||||
|
HttpErrorHandler.notFound(req, res)
|
||||||
|
} else {
|
||||||
|
next(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function transferOwnership(req, res, next) {
|
||||||
|
const sessionUser = SessionManager.getSessionUser(req.session)
|
||||||
|
const projectId = req.params.Project_id
|
||||||
|
const toUserId = req.body.user_id
|
||||||
|
try {
|
||||||
|
await OwnershipTransferHandler.promises.transferOwnership(
|
||||||
|
projectId,
|
||||||
|
toUserId,
|
||||||
|
{
|
||||||
|
allowTransferToNonCollaborators: sessionUser.isAdmin,
|
||||||
|
sessionUserId: ObjectId(sessionUser._id),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
res.sendStatus(204)
|
||||||
|
} catch (err) {
|
||||||
|
if (err instanceof Errors.ProjectNotFoundError) {
|
||||||
|
HttpErrorHandler.notFound(req, res, `project not found: ${projectId}`)
|
||||||
|
} else if (err instanceof Errors.UserNotFoundError) {
|
||||||
|
HttpErrorHandler.notFound(req, res, `user not found: ${toUserId}`)
|
||||||
|
} else if (err instanceof Errors.UserNotCollaboratorError) {
|
||||||
|
HttpErrorHandler.forbidden(
|
||||||
|
req,
|
||||||
|
res,
|
||||||
|
`user ${toUserId} should be a collaborator in project ${projectId} prior to ownership transfer`
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
next(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function _removeUserIdFromProject(projectId, userId) {
|
||||||
|
await CollaboratorsHandler.promises.removeUserFromProject(projectId, userId)
|
||||||
|
EditorRealTimeController.emitToRoom(
|
||||||
|
projectId,
|
||||||
|
'userRemovedFromProject',
|
||||||
|
userId
|
||||||
|
)
|
||||||
|
await TagsHandler.promises.removeProjectFromAllTags(userId, projectId)
|
||||||
|
}
|
|
@ -0,0 +1,47 @@
|
||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
node/handle-callback-err,
|
||||||
|
max-len,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let CollaboratorsEmailHandler
|
||||||
|
const { Project } = require('../../models/Project')
|
||||||
|
const EmailHandler = require('../Email/EmailHandler')
|
||||||
|
const Settings = require('@overleaf/settings')
|
||||||
|
|
||||||
|
module.exports = CollaboratorsEmailHandler = {
|
||||||
|
_buildInviteUrl(project, invite) {
|
||||||
|
return (
|
||||||
|
`${Settings.siteUrl}/project/${project._id}/invite/token/${invite.token}?` +
|
||||||
|
[
|
||||||
|
`project_name=${encodeURIComponent(project.name)}`,
|
||||||
|
`user_first_name=${encodeURIComponent(project.owner_ref.first_name)}`,
|
||||||
|
].join('&')
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
notifyUserOfProjectInvite(project_id, email, invite, sendingUser, callback) {
|
||||||
|
return Project.findOne({ _id: project_id })
|
||||||
|
.select('name owner_ref')
|
||||||
|
.populate('owner_ref')
|
||||||
|
.exec(function (err, project) {
|
||||||
|
const emailOptions = {
|
||||||
|
to: email,
|
||||||
|
replyTo: project.owner_ref.email,
|
||||||
|
project: {
|
||||||
|
name: project.name,
|
||||||
|
},
|
||||||
|
inviteUrl: CollaboratorsEmailHandler._buildInviteUrl(project, invite),
|
||||||
|
owner: project.owner_ref,
|
||||||
|
sendingUser_id: sendingUser._id,
|
||||||
|
}
|
||||||
|
return EmailHandler.sendEmail('projectInvite', emailOptions, callback)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
|
@ -0,0 +1,269 @@
|
||||||
|
const { callbackify } = require('util')
|
||||||
|
const pLimit = require('p-limit')
|
||||||
|
const { ObjectId } = require('mongodb')
|
||||||
|
const OError = require('@overleaf/o-error')
|
||||||
|
const { Project } = require('../../models/Project')
|
||||||
|
const UserGetter = require('../User/UserGetter')
|
||||||
|
const ProjectGetter = require('../Project/ProjectGetter')
|
||||||
|
const PublicAccessLevels = require('../Authorization/PublicAccessLevels')
|
||||||
|
const Errors = require('../Errors/Errors')
|
||||||
|
const ProjectEditorHandler = require('../Project/ProjectEditorHandler')
|
||||||
|
const Sources = require('../Authorization/Sources')
|
||||||
|
const PrivilegeLevels = require('../Authorization/PrivilegeLevels')
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
getMemberIdsWithPrivilegeLevels: callbackify(getMemberIdsWithPrivilegeLevels),
|
||||||
|
getMemberIds: callbackify(getMemberIds),
|
||||||
|
getInvitedMemberIds: callbackify(getInvitedMemberIds),
|
||||||
|
getInvitedMembersWithPrivilegeLevels: callbackify(
|
||||||
|
getInvitedMembersWithPrivilegeLevels
|
||||||
|
),
|
||||||
|
getInvitedMembersWithPrivilegeLevelsFromFields: callbackify(
|
||||||
|
getInvitedMembersWithPrivilegeLevelsFromFields
|
||||||
|
),
|
||||||
|
getMemberIdPrivilegeLevel: callbackify(getMemberIdPrivilegeLevel),
|
||||||
|
getInvitedCollaboratorCount: callbackify(getInvitedCollaboratorCount),
|
||||||
|
getProjectsUserIsMemberOf: callbackify(getProjectsUserIsMemberOf),
|
||||||
|
isUserInvitedMemberOfProject: callbackify(isUserInvitedMemberOfProject),
|
||||||
|
userIsTokenMember: callbackify(userIsTokenMember),
|
||||||
|
getAllInvitedMembers: callbackify(getAllInvitedMembers),
|
||||||
|
promises: {
|
||||||
|
getMemberIdsWithPrivilegeLevels,
|
||||||
|
getMemberIds,
|
||||||
|
getInvitedMemberIds,
|
||||||
|
getInvitedMembersWithPrivilegeLevels,
|
||||||
|
getInvitedMembersWithPrivilegeLevelsFromFields,
|
||||||
|
getMemberIdPrivilegeLevel,
|
||||||
|
getInvitedCollaboratorCount,
|
||||||
|
getProjectsUserIsMemberOf,
|
||||||
|
isUserInvitedMemberOfProject,
|
||||||
|
userIsTokenMember,
|
||||||
|
getAllInvitedMembers,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getMemberIdsWithPrivilegeLevels(projectId) {
|
||||||
|
const project = await ProjectGetter.promises.getProject(projectId, {
|
||||||
|
owner_ref: 1,
|
||||||
|
collaberator_refs: 1,
|
||||||
|
readOnly_refs: 1,
|
||||||
|
tokenAccessReadOnly_refs: 1,
|
||||||
|
tokenAccessReadAndWrite_refs: 1,
|
||||||
|
publicAccesLevel: 1,
|
||||||
|
})
|
||||||
|
if (!project) {
|
||||||
|
throw new Errors.NotFoundError(`no project found with id ${projectId}`)
|
||||||
|
}
|
||||||
|
const memberIds = _getMemberIdsWithPrivilegeLevelsFromFields(
|
||||||
|
project.owner_ref,
|
||||||
|
project.collaberator_refs,
|
||||||
|
project.readOnly_refs,
|
||||||
|
project.tokenAccessReadAndWrite_refs,
|
||||||
|
project.tokenAccessReadOnly_refs,
|
||||||
|
project.publicAccesLevel
|
||||||
|
)
|
||||||
|
return memberIds
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getMemberIds(projectId) {
|
||||||
|
const members = await getMemberIdsWithPrivilegeLevels(projectId)
|
||||||
|
return members.map(m => m.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getInvitedMemberIds(projectId) {
|
||||||
|
const members = await getMemberIdsWithPrivilegeLevels(projectId)
|
||||||
|
return members.filter(m => m.source !== Sources.TOKEN).map(m => m.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getInvitedMembersWithPrivilegeLevels(projectId) {
|
||||||
|
let members = await getMemberIdsWithPrivilegeLevels(projectId)
|
||||||
|
members = members.filter(m => m.source !== Sources.TOKEN)
|
||||||
|
return _loadMembers(members)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getInvitedMembersWithPrivilegeLevelsFromFields(
|
||||||
|
ownerId,
|
||||||
|
collaboratorIds,
|
||||||
|
readOnlyIds
|
||||||
|
) {
|
||||||
|
const members = _getMemberIdsWithPrivilegeLevelsFromFields(
|
||||||
|
ownerId,
|
||||||
|
collaboratorIds,
|
||||||
|
readOnlyIds,
|
||||||
|
[],
|
||||||
|
[],
|
||||||
|
null
|
||||||
|
)
|
||||||
|
return _loadMembers(members)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getMemberIdPrivilegeLevel(userId, projectId) {
|
||||||
|
// In future if the schema changes and getting all member ids is more expensive (multiple documents)
|
||||||
|
// then optimise this.
|
||||||
|
if (userId == null) {
|
||||||
|
return PrivilegeLevels.NONE
|
||||||
|
}
|
||||||
|
const members = await getMemberIdsWithPrivilegeLevels(projectId)
|
||||||
|
for (const member of members) {
|
||||||
|
if (member.id === userId.toString()) {
|
||||||
|
return member.privilegeLevel
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return PrivilegeLevels.NONE
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getInvitedCollaboratorCount(projectId) {
|
||||||
|
const count = await _getInvitedMemberCount(projectId)
|
||||||
|
return count - 1 // Don't count project owner
|
||||||
|
}
|
||||||
|
|
||||||
|
async function isUserInvitedMemberOfProject(userId, projectId) {
|
||||||
|
const members = await getMemberIdsWithPrivilegeLevels(projectId)
|
||||||
|
for (const member of members) {
|
||||||
|
if (
|
||||||
|
member.id.toString() === userId.toString() &&
|
||||||
|
member.source !== Sources.TOKEN
|
||||||
|
) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getProjectsUserIsMemberOf(userId, fields) {
|
||||||
|
const limit = pLimit(2)
|
||||||
|
const [
|
||||||
|
readAndWrite,
|
||||||
|
readOnly,
|
||||||
|
tokenReadAndWrite,
|
||||||
|
tokenReadOnly,
|
||||||
|
] = await Promise.all([
|
||||||
|
limit(() => Project.find({ collaberator_refs: userId }, fields).exec()),
|
||||||
|
limit(() => Project.find({ readOnly_refs: userId }, fields).exec()),
|
||||||
|
limit(() =>
|
||||||
|
Project.find(
|
||||||
|
{
|
||||||
|
tokenAccessReadAndWrite_refs: userId,
|
||||||
|
publicAccesLevel: PublicAccessLevels.TOKEN_BASED,
|
||||||
|
},
|
||||||
|
fields
|
||||||
|
).exec()
|
||||||
|
),
|
||||||
|
limit(() =>
|
||||||
|
Project.find(
|
||||||
|
{
|
||||||
|
tokenAccessReadOnly_refs: userId,
|
||||||
|
publicAccesLevel: PublicAccessLevels.TOKEN_BASED,
|
||||||
|
},
|
||||||
|
fields
|
||||||
|
).exec()
|
||||||
|
),
|
||||||
|
])
|
||||||
|
return { readAndWrite, readOnly, tokenReadAndWrite, tokenReadOnly }
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getAllInvitedMembers(projectId) {
|
||||||
|
try {
|
||||||
|
const rawMembers = await getInvitedMembersWithPrivilegeLevels(projectId)
|
||||||
|
const { members } = ProjectEditorHandler.buildOwnerAndMembersViews(
|
||||||
|
rawMembers
|
||||||
|
)
|
||||||
|
return members
|
||||||
|
} catch (err) {
|
||||||
|
throw OError.tag(err, 'error getting members for project', { projectId })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function userIsTokenMember(userId, projectId) {
|
||||||
|
userId = ObjectId(userId.toString())
|
||||||
|
projectId = ObjectId(projectId.toString())
|
||||||
|
const project = await Project.findOne(
|
||||||
|
{
|
||||||
|
_id: projectId,
|
||||||
|
$or: [
|
||||||
|
{ tokenAccessReadOnly_refs: userId },
|
||||||
|
{ tokenAccessReadAndWrite_refs: userId },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
_id: 1,
|
||||||
|
}
|
||||||
|
).exec()
|
||||||
|
return project != null
|
||||||
|
}
|
||||||
|
|
||||||
|
async function _getInvitedMemberCount(projectId) {
|
||||||
|
const members = await getMemberIdsWithPrivilegeLevels(projectId)
|
||||||
|
return members.filter(m => m.source !== Sources.TOKEN).length
|
||||||
|
}
|
||||||
|
|
||||||
|
function _getMemberIdsWithPrivilegeLevelsFromFields(
|
||||||
|
ownerId,
|
||||||
|
collaboratorIds,
|
||||||
|
readOnlyIds,
|
||||||
|
tokenAccessIds,
|
||||||
|
tokenAccessReadOnlyIds,
|
||||||
|
publicAccessLevel
|
||||||
|
) {
|
||||||
|
const members = []
|
||||||
|
members.push({
|
||||||
|
id: ownerId.toString(),
|
||||||
|
privilegeLevel: PrivilegeLevels.OWNER,
|
||||||
|
source: Sources.OWNER,
|
||||||
|
})
|
||||||
|
for (const memberId of collaboratorIds || []) {
|
||||||
|
members.push({
|
||||||
|
id: memberId.toString(),
|
||||||
|
privilegeLevel: PrivilegeLevels.READ_AND_WRITE,
|
||||||
|
source: Sources.INVITE,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
for (const memberId of readOnlyIds || []) {
|
||||||
|
members.push({
|
||||||
|
id: memberId.toString(),
|
||||||
|
privilegeLevel: PrivilegeLevels.READ_ONLY,
|
||||||
|
source: Sources.INVITE,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if (publicAccessLevel === PublicAccessLevels.TOKEN_BASED) {
|
||||||
|
for (const memberId of tokenAccessIds || []) {
|
||||||
|
members.push({
|
||||||
|
id: memberId.toString(),
|
||||||
|
privilegeLevel: PrivilegeLevels.READ_AND_WRITE,
|
||||||
|
source: Sources.TOKEN,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
for (const memberId of tokenAccessReadOnlyIds || []) {
|
||||||
|
members.push({
|
||||||
|
id: memberId.toString(),
|
||||||
|
privilegeLevel: PrivilegeLevels.READ_ONLY,
|
||||||
|
source: Sources.TOKEN,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return members
|
||||||
|
}
|
||||||
|
|
||||||
|
async function _loadMembers(members) {
|
||||||
|
const limit = pLimit(3)
|
||||||
|
const results = await Promise.all(
|
||||||
|
members.map(member =>
|
||||||
|
limit(async () => {
|
||||||
|
const user = await UserGetter.promises.getUser(member.id, {
|
||||||
|
_id: 1,
|
||||||
|
email: 1,
|
||||||
|
features: 1,
|
||||||
|
first_name: 1,
|
||||||
|
last_name: 1,
|
||||||
|
signUpDate: 1,
|
||||||
|
})
|
||||||
|
if (user != null) {
|
||||||
|
return { user, privilegeLevel: member.privilegeLevel }
|
||||||
|
} else {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
})
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return results.filter(r => r != null)
|
||||||
|
}
|
|
@ -0,0 +1,262 @@
|
||||||
|
const { callbackify } = require('util')
|
||||||
|
const OError = require('@overleaf/o-error')
|
||||||
|
const { Project } = require('../../models/Project')
|
||||||
|
const ProjectGetter = require('../Project/ProjectGetter')
|
||||||
|
const ProjectHelper = require('../Project/ProjectHelper')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const ContactManager = require('../Contacts/ContactManager')
|
||||||
|
const PrivilegeLevels = require('../Authorization/PrivilegeLevels')
|
||||||
|
const TpdsProjectFlusher = require('../ThirdPartyDataStore/TpdsProjectFlusher')
|
||||||
|
const CollaboratorsGetter = require('./CollaboratorsGetter')
|
||||||
|
const Errors = require('../Errors/Errors')
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
userIsTokenMember: callbackify(userIsTokenMember),
|
||||||
|
removeUserFromProject: callbackify(removeUserFromProject),
|
||||||
|
removeUserFromAllProjects: callbackify(removeUserFromAllProjects),
|
||||||
|
addUserIdToProject: callbackify(addUserIdToProject),
|
||||||
|
transferProjects: callbackify(transferProjects),
|
||||||
|
promises: {
|
||||||
|
userIsTokenMember,
|
||||||
|
removeUserFromProject,
|
||||||
|
removeUserFromAllProjects,
|
||||||
|
addUserIdToProject,
|
||||||
|
transferProjects,
|
||||||
|
setCollaboratorPrivilegeLevel,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
async function removeUserFromProject(projectId, userId) {
|
||||||
|
try {
|
||||||
|
const project = await Project.findOne({ _id: projectId }).exec()
|
||||||
|
|
||||||
|
// Deal with the old type of boolean value for archived
|
||||||
|
// In order to clear it
|
||||||
|
if (typeof project.archived === 'boolean') {
|
||||||
|
let archived = ProjectHelper.calculateArchivedArray(
|
||||||
|
project,
|
||||||
|
userId,
|
||||||
|
'ARCHIVE'
|
||||||
|
)
|
||||||
|
|
||||||
|
archived = archived.filter(id => id.toString() !== userId.toString())
|
||||||
|
|
||||||
|
await Project.updateOne(
|
||||||
|
{ _id: projectId },
|
||||||
|
{
|
||||||
|
$set: { archived: archived },
|
||||||
|
$pull: {
|
||||||
|
collaberator_refs: userId,
|
||||||
|
readOnly_refs: userId,
|
||||||
|
tokenAccessReadOnly_refs: userId,
|
||||||
|
tokenAccessReadAndWrite_refs: userId,
|
||||||
|
trashed: userId,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
await Project.updateOne(
|
||||||
|
{ _id: projectId },
|
||||||
|
{
|
||||||
|
$pull: {
|
||||||
|
collaberator_refs: userId,
|
||||||
|
readOnly_refs: userId,
|
||||||
|
tokenAccessReadOnly_refs: userId,
|
||||||
|
tokenAccessReadAndWrite_refs: userId,
|
||||||
|
archived: userId,
|
||||||
|
trashed: userId,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
throw OError.tag(err, 'problem removing user from project collaborators', {
|
||||||
|
projectId,
|
||||||
|
userId,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function removeUserFromAllProjects(userId) {
|
||||||
|
const {
|
||||||
|
readAndWrite,
|
||||||
|
readOnly,
|
||||||
|
tokenReadAndWrite,
|
||||||
|
tokenReadOnly,
|
||||||
|
} = await CollaboratorsGetter.promises.getProjectsUserIsMemberOf(userId, {
|
||||||
|
_id: 1,
|
||||||
|
})
|
||||||
|
const allProjects = readAndWrite
|
||||||
|
.concat(readOnly)
|
||||||
|
.concat(tokenReadAndWrite)
|
||||||
|
.concat(tokenReadOnly)
|
||||||
|
for (const project of allProjects) {
|
||||||
|
await removeUserFromProject(project._id, userId)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function addUserIdToProject(
|
||||||
|
projectId,
|
||||||
|
addingUserId,
|
||||||
|
userId,
|
||||||
|
privilegeLevel
|
||||||
|
) {
|
||||||
|
const project = await ProjectGetter.promises.getProject(projectId, {
|
||||||
|
collaberator_refs: 1,
|
||||||
|
readOnly_refs: 1,
|
||||||
|
})
|
||||||
|
let level
|
||||||
|
let existingUsers = project.collaberator_refs || []
|
||||||
|
existingUsers = existingUsers.concat(project.readOnly_refs || [])
|
||||||
|
existingUsers = existingUsers.map(u => u.toString())
|
||||||
|
if (existingUsers.includes(userId.toString())) {
|
||||||
|
return // User already in Project
|
||||||
|
}
|
||||||
|
if (privilegeLevel === PrivilegeLevels.READ_AND_WRITE) {
|
||||||
|
level = { collaberator_refs: userId }
|
||||||
|
logger.log({ privileges: 'readAndWrite', userId, projectId }, 'adding user')
|
||||||
|
} else if (privilegeLevel === PrivilegeLevels.READ_ONLY) {
|
||||||
|
level = { readOnly_refs: userId }
|
||||||
|
logger.log({ privileges: 'readOnly', userId, projectId }, 'adding user')
|
||||||
|
} else {
|
||||||
|
throw new Error(`unknown privilegeLevel: ${privilegeLevel}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (addingUserId) {
|
||||||
|
ContactManager.addContact(addingUserId, userId)
|
||||||
|
}
|
||||||
|
|
||||||
|
await Project.updateOne({ _id: projectId }, { $addToSet: level }).exec()
|
||||||
|
|
||||||
|
// Flush to TPDS in background to add files to collaborator's Dropbox
|
||||||
|
TpdsProjectFlusher.promises.flushProjectToTpds(projectId).catch(err => {
|
||||||
|
logger.error(
|
||||||
|
{ err, projectId, userId },
|
||||||
|
'error flushing to TPDS after adding collaborator'
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async function transferProjects(fromUserId, toUserId) {
|
||||||
|
// Find all the projects this user is part of so we can flush them to TPDS
|
||||||
|
const projects = await Project.find(
|
||||||
|
{
|
||||||
|
$or: [
|
||||||
|
{ owner_ref: fromUserId },
|
||||||
|
{ collaberator_refs: fromUserId },
|
||||||
|
{ readOnly_refs: fromUserId },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{ _id: 1 }
|
||||||
|
).exec()
|
||||||
|
const projectIds = projects.map(p => p._id)
|
||||||
|
logger.log({ projectIds, fromUserId, toUserId }, 'transferring projects')
|
||||||
|
|
||||||
|
await Project.updateMany(
|
||||||
|
{ owner_ref: fromUserId },
|
||||||
|
{ $set: { owner_ref: toUserId } }
|
||||||
|
).exec()
|
||||||
|
|
||||||
|
await Project.updateMany(
|
||||||
|
{ collaberator_refs: fromUserId },
|
||||||
|
{
|
||||||
|
$addToSet: { collaberator_refs: toUserId },
|
||||||
|
}
|
||||||
|
).exec()
|
||||||
|
await Project.updateMany(
|
||||||
|
{ collaberator_refs: fromUserId },
|
||||||
|
{
|
||||||
|
$pull: { collaberator_refs: fromUserId },
|
||||||
|
}
|
||||||
|
).exec()
|
||||||
|
|
||||||
|
await Project.updateMany(
|
||||||
|
{ readOnly_refs: fromUserId },
|
||||||
|
{
|
||||||
|
$addToSet: { readOnly_refs: toUserId },
|
||||||
|
}
|
||||||
|
).exec()
|
||||||
|
await Project.updateMany(
|
||||||
|
{ readOnly_refs: fromUserId },
|
||||||
|
{
|
||||||
|
$pull: { readOnly_refs: fromUserId },
|
||||||
|
}
|
||||||
|
).exec()
|
||||||
|
|
||||||
|
// Flush in background, no need to block on this
|
||||||
|
_flushProjects(projectIds).catch(err => {
|
||||||
|
logger.err(
|
||||||
|
{ err, projectIds, fromUserId, toUserId },
|
||||||
|
'error flushing tranferred projects to TPDS'
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async function setCollaboratorPrivilegeLevel(
|
||||||
|
projectId,
|
||||||
|
userId,
|
||||||
|
privilegeLevel
|
||||||
|
) {
|
||||||
|
// Make sure we're only updating the project if the user is already a
|
||||||
|
// collaborator
|
||||||
|
const query = {
|
||||||
|
_id: projectId,
|
||||||
|
$or: [{ collaberator_refs: userId }, { readOnly_refs: userId }],
|
||||||
|
}
|
||||||
|
let update
|
||||||
|
switch (privilegeLevel) {
|
||||||
|
case PrivilegeLevels.READ_AND_WRITE: {
|
||||||
|
update = {
|
||||||
|
$pull: { readOnly_refs: userId },
|
||||||
|
$addToSet: { collaberator_refs: userId },
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
case PrivilegeLevels.READ_ONLY: {
|
||||||
|
update = {
|
||||||
|
$pull: { collaberator_refs: userId },
|
||||||
|
$addToSet: { readOnly_refs: userId },
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
default: {
|
||||||
|
throw new OError(`unknown privilege level: ${privilegeLevel}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const mongoResponse = await Project.updateOne(query, update).exec()
|
||||||
|
if (mongoResponse.n === 0) {
|
||||||
|
throw new Errors.NotFoundError('project or collaborator not found')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function userIsTokenMember(userId, projectId) {
|
||||||
|
if (!userId) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const project = await Project.findOne(
|
||||||
|
{
|
||||||
|
_id: projectId,
|
||||||
|
$or: [
|
||||||
|
{ tokenAccessReadOnly_refs: userId },
|
||||||
|
{ tokenAccessReadAndWrite_refs: userId },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
_id: 1,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return project != null
|
||||||
|
} catch (err) {
|
||||||
|
throw OError.tag(err, 'problem while checking if user is token member', {
|
||||||
|
userId,
|
||||||
|
projectId,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function _flushProjects(projectIds) {
|
||||||
|
for (const projectId of projectIds) {
|
||||||
|
await TpdsProjectFlusher.promises.flushProjectToTpds(projectId)
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,392 @@
|
||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
node/handle-callback-err,
|
||||||
|
max-len,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let CollaboratorsInviteController
|
||||||
|
const OError = require('@overleaf/o-error')
|
||||||
|
const ProjectGetter = require('../Project/ProjectGetter')
|
||||||
|
const LimitationsManager = require('../Subscription/LimitationsManager')
|
||||||
|
const UserGetter = require('../User/UserGetter')
|
||||||
|
const CollaboratorsGetter = require('./CollaboratorsGetter')
|
||||||
|
const CollaboratorsInviteHandler = require('./CollaboratorsInviteHandler')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const Settings = require('@overleaf/settings')
|
||||||
|
const EmailHelper = require('../Helpers/EmailHelper')
|
||||||
|
const EditorRealTimeController = require('../Editor/EditorRealTimeController')
|
||||||
|
const AnalyticsManager = require('../Analytics/AnalyticsManager')
|
||||||
|
const SessionManager = require('../Authentication/SessionManager')
|
||||||
|
const rateLimiter = require('../../infrastructure/RateLimiter')
|
||||||
|
|
||||||
|
module.exports = CollaboratorsInviteController = {
|
||||||
|
getAllInvites(req, res, next) {
|
||||||
|
const projectId = req.params.Project_id
|
||||||
|
logger.log({ projectId }, 'getting all active invites for project')
|
||||||
|
return CollaboratorsInviteHandler.getAllInvites(
|
||||||
|
projectId,
|
||||||
|
function (err, invites) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error getting invites for project', {
|
||||||
|
projectId,
|
||||||
|
})
|
||||||
|
return next(err)
|
||||||
|
}
|
||||||
|
return res.json({ invites })
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
_checkShouldInviteEmail(email, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err, shouldAllowInvite) {}
|
||||||
|
}
|
||||||
|
if (Settings.restrictInvitesToExistingAccounts === true) {
|
||||||
|
logger.log({ email }, 'checking if user exists with this email')
|
||||||
|
return UserGetter.getUserByAnyEmail(
|
||||||
|
email,
|
||||||
|
{ _id: 1 },
|
||||||
|
function (err, user) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
const userExists =
|
||||||
|
user != null && (user != null ? user._id : undefined) != null
|
||||||
|
return callback(null, userExists)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
return callback(null, true)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
_checkRateLimit(user_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
return LimitationsManager.allowedNumberOfCollaboratorsForUser(
|
||||||
|
user_id,
|
||||||
|
function (err, collabLimit) {
|
||||||
|
if (collabLimit == null) {
|
||||||
|
collabLimit = 1
|
||||||
|
}
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
if (collabLimit === -1) {
|
||||||
|
collabLimit = 20
|
||||||
|
}
|
||||||
|
collabLimit = collabLimit * 10
|
||||||
|
const opts = {
|
||||||
|
endpointName: 'invite-to-project-by-user-id',
|
||||||
|
timeInterval: 60 * 30,
|
||||||
|
subjectName: user_id,
|
||||||
|
throttle: collabLimit,
|
||||||
|
}
|
||||||
|
return rateLimiter.addCount(opts, callback)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
inviteToProject(req, res, next) {
|
||||||
|
const projectId = req.params.Project_id
|
||||||
|
let { email } = req.body
|
||||||
|
const sendingUser = SessionManager.getSessionUser(req.session)
|
||||||
|
const sendingUserId = sendingUser._id
|
||||||
|
if (email === sendingUser.email) {
|
||||||
|
logger.log(
|
||||||
|
{ projectId, email, sendingUserId },
|
||||||
|
'cannot invite yourself to project'
|
||||||
|
)
|
||||||
|
return res.json({ invite: null, error: 'cannot_invite_self' })
|
||||||
|
}
|
||||||
|
logger.log({ projectId, email, sendingUserId }, 'inviting to project')
|
||||||
|
return LimitationsManager.canAddXCollaborators(
|
||||||
|
projectId,
|
||||||
|
1,
|
||||||
|
(error, allowed) => {
|
||||||
|
let privileges
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
if (!allowed) {
|
||||||
|
logger.log(
|
||||||
|
{ projectId, email, sendingUserId },
|
||||||
|
'not allowed to invite more users to project'
|
||||||
|
)
|
||||||
|
return res.json({ invite: null })
|
||||||
|
}
|
||||||
|
;({ email, privileges } = req.body)
|
||||||
|
email = EmailHelper.parseEmail(email)
|
||||||
|
if (email == null || email === '') {
|
||||||
|
logger.log(
|
||||||
|
{ projectId, email, sendingUserId },
|
||||||
|
'invalid email address'
|
||||||
|
)
|
||||||
|
return res.status(400).send({ errorReason: 'invalid_email' })
|
||||||
|
}
|
||||||
|
return CollaboratorsInviteController._checkRateLimit(
|
||||||
|
sendingUserId,
|
||||||
|
function (error, underRateLimit) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
if (!underRateLimit) {
|
||||||
|
return res.sendStatus(429)
|
||||||
|
}
|
||||||
|
return CollaboratorsInviteController._checkShouldInviteEmail(
|
||||||
|
email,
|
||||||
|
function (err, shouldAllowInvite) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(
|
||||||
|
err,
|
||||||
|
'error checking if we can invite this email address',
|
||||||
|
{
|
||||||
|
email,
|
||||||
|
projectId,
|
||||||
|
sendingUserId,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return next(err)
|
||||||
|
}
|
||||||
|
if (!shouldAllowInvite) {
|
||||||
|
logger.log(
|
||||||
|
{ email, projectId, sendingUserId },
|
||||||
|
'not allowed to send an invite to this email address'
|
||||||
|
)
|
||||||
|
return res.json({
|
||||||
|
invite: null,
|
||||||
|
error: 'cannot_invite_non_user',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return CollaboratorsInviteHandler.inviteToProject(
|
||||||
|
projectId,
|
||||||
|
sendingUser,
|
||||||
|
email,
|
||||||
|
privileges,
|
||||||
|
function (err, invite) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error creating project invite', {
|
||||||
|
projectId,
|
||||||
|
email,
|
||||||
|
sendingUserId,
|
||||||
|
})
|
||||||
|
return next(err)
|
||||||
|
}
|
||||||
|
logger.log(
|
||||||
|
{ projectId, email, sendingUserId },
|
||||||
|
'invite created'
|
||||||
|
)
|
||||||
|
EditorRealTimeController.emitToRoom(
|
||||||
|
projectId,
|
||||||
|
'project:membership:changed',
|
||||||
|
{ invites: true }
|
||||||
|
)
|
||||||
|
return res.json({ invite })
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
revokeInvite(req, res, next) {
|
||||||
|
const projectId = req.params.Project_id
|
||||||
|
const inviteId = req.params.invite_id
|
||||||
|
logger.log({ projectId, inviteId }, 'revoking invite')
|
||||||
|
return CollaboratorsInviteHandler.revokeInvite(
|
||||||
|
projectId,
|
||||||
|
inviteId,
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error revoking invite', {
|
||||||
|
projectId,
|
||||||
|
inviteId,
|
||||||
|
})
|
||||||
|
return next(err)
|
||||||
|
}
|
||||||
|
EditorRealTimeController.emitToRoom(
|
||||||
|
projectId,
|
||||||
|
'project:membership:changed',
|
||||||
|
{ invites: true }
|
||||||
|
)
|
||||||
|
return res.sendStatus(201)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
resendInvite(req, res, next) {
|
||||||
|
const projectId = req.params.Project_id
|
||||||
|
const inviteId = req.params.invite_id
|
||||||
|
logger.log({ projectId, inviteId }, 'resending invite')
|
||||||
|
const sendingUser = SessionManager.getSessionUser(req.session)
|
||||||
|
return CollaboratorsInviteController._checkRateLimit(
|
||||||
|
sendingUser._id,
|
||||||
|
function (error, underRateLimit) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
if (!underRateLimit) {
|
||||||
|
return res.sendStatus(429)
|
||||||
|
}
|
||||||
|
return CollaboratorsInviteHandler.resendInvite(
|
||||||
|
projectId,
|
||||||
|
sendingUser,
|
||||||
|
inviteId,
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error resending invite', {
|
||||||
|
projectId,
|
||||||
|
inviteId,
|
||||||
|
})
|
||||||
|
return next(err)
|
||||||
|
}
|
||||||
|
return res.sendStatus(201)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
viewInvite(req, res, next) {
|
||||||
|
const projectId = req.params.Project_id
|
||||||
|
const { token } = req.params
|
||||||
|
const _renderInvalidPage = function () {
|
||||||
|
logger.log(
|
||||||
|
{ projectId, token },
|
||||||
|
'invite not valid, rendering not-valid page'
|
||||||
|
)
|
||||||
|
return res.render('project/invite/not-valid', { title: 'Invalid Invite' })
|
||||||
|
}
|
||||||
|
// check if the user is already a member of the project
|
||||||
|
const currentUser = SessionManager.getSessionUser(req.session)
|
||||||
|
return CollaboratorsGetter.isUserInvitedMemberOfProject(
|
||||||
|
currentUser._id,
|
||||||
|
projectId,
|
||||||
|
function (err, isMember) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error checking if user is member of project', {
|
||||||
|
projectId,
|
||||||
|
})
|
||||||
|
return next(err)
|
||||||
|
}
|
||||||
|
if (isMember) {
|
||||||
|
logger.log(
|
||||||
|
{ projectId, userId: currentUser._id },
|
||||||
|
'user is already a member of this project, redirecting'
|
||||||
|
)
|
||||||
|
return res.redirect(`/project/${projectId}`)
|
||||||
|
}
|
||||||
|
// get the invite
|
||||||
|
return CollaboratorsInviteHandler.getInviteByToken(
|
||||||
|
projectId,
|
||||||
|
token,
|
||||||
|
function (err, invite) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error getting invite by token', {
|
||||||
|
projectId,
|
||||||
|
token,
|
||||||
|
})
|
||||||
|
return next(err)
|
||||||
|
}
|
||||||
|
// check if invite is gone, or otherwise non-existent
|
||||||
|
if (invite == null) {
|
||||||
|
logger.log({ projectId, token }, 'no invite found for this token')
|
||||||
|
return _renderInvalidPage()
|
||||||
|
}
|
||||||
|
// check the user who sent the invite exists
|
||||||
|
return UserGetter.getUser(
|
||||||
|
{ _id: invite.sendingUserId },
|
||||||
|
{ email: 1, first_name: 1, last_name: 1 },
|
||||||
|
function (err, owner) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error getting project owner', {
|
||||||
|
projectId,
|
||||||
|
})
|
||||||
|
return next(err)
|
||||||
|
}
|
||||||
|
if (owner == null) {
|
||||||
|
logger.log({ projectId }, 'no project owner found')
|
||||||
|
return _renderInvalidPage()
|
||||||
|
}
|
||||||
|
// fetch the project name
|
||||||
|
return ProjectGetter.getProject(
|
||||||
|
projectId,
|
||||||
|
{},
|
||||||
|
function (err, project) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error getting project', {
|
||||||
|
projectId,
|
||||||
|
})
|
||||||
|
return next(err)
|
||||||
|
}
|
||||||
|
if (project == null) {
|
||||||
|
logger.log({ projectId }, 'no project found')
|
||||||
|
return _renderInvalidPage()
|
||||||
|
}
|
||||||
|
// finally render the invite
|
||||||
|
return res.render('project/invite/show', {
|
||||||
|
invite,
|
||||||
|
project,
|
||||||
|
owner,
|
||||||
|
title: 'Project Invite',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
acceptInvite(req, res, next) {
|
||||||
|
const projectId = req.params.Project_id
|
||||||
|
const { token } = req.params
|
||||||
|
const currentUser = SessionManager.getSessionUser(req.session)
|
||||||
|
logger.log(
|
||||||
|
{ projectId, userId: currentUser._id, token },
|
||||||
|
'got request to accept invite'
|
||||||
|
)
|
||||||
|
return CollaboratorsInviteHandler.acceptInvite(
|
||||||
|
projectId,
|
||||||
|
token,
|
||||||
|
currentUser,
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error accepting invite by token', {
|
||||||
|
projectId,
|
||||||
|
token,
|
||||||
|
})
|
||||||
|
return next(err)
|
||||||
|
}
|
||||||
|
EditorRealTimeController.emitToRoom(
|
||||||
|
projectId,
|
||||||
|
'project:membership:changed',
|
||||||
|
{ invites: true, members: true }
|
||||||
|
)
|
||||||
|
AnalyticsManager.recordEvent(currentUser._id, 'project-invite-accept', {
|
||||||
|
projectId,
|
||||||
|
userId: currentUser._id,
|
||||||
|
})
|
||||||
|
if (req.xhr) {
|
||||||
|
return res.sendStatus(204) // Done async via project page notification
|
||||||
|
} else {
|
||||||
|
return res.redirect(`/project/${projectId}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
}
|
|
@ -0,0 +1,361 @@
|
||||||
|
/* eslint-disable
|
||||||
|
node/handle-callback-err,
|
||||||
|
max-len,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
const { ProjectInvite } = require('../../models/ProjectInvite')
|
||||||
|
const OError = require('@overleaf/o-error')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const CollaboratorsEmailHandler = require('./CollaboratorsEmailHandler')
|
||||||
|
const CollaboratorsHandler = require('./CollaboratorsHandler')
|
||||||
|
const UserGetter = require('../User/UserGetter')
|
||||||
|
const ProjectGetter = require('../Project/ProjectGetter')
|
||||||
|
const Errors = require('../Errors/Errors')
|
||||||
|
const Crypto = require('crypto')
|
||||||
|
const NotificationsBuilder = require('../Notifications/NotificationsBuilder')
|
||||||
|
const { promisifyAll } = require('../../util/promises')
|
||||||
|
|
||||||
|
const CollaboratorsInviteHandler = {
|
||||||
|
getAllInvites(projectId, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err, invites) {}
|
||||||
|
}
|
||||||
|
logger.log({ projectId }, 'fetching invites for project')
|
||||||
|
return ProjectInvite.find({ projectId }, function (err, invites) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error getting invites from mongo', {
|
||||||
|
projectId,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
logger.log(
|
||||||
|
{ projectId, count: invites.length },
|
||||||
|
'found invites for project'
|
||||||
|
)
|
||||||
|
return callback(null, invites)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
getInviteCount(projectId, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err, count) {}
|
||||||
|
}
|
||||||
|
logger.log({ projectId }, 'counting invites for project')
|
||||||
|
return ProjectInvite.countDocuments({ projectId }, function (err, count) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error getting invites from mongo', {
|
||||||
|
projectId,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
return callback(null, count)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_trySendInviteNotification(projectId, sendingUser, invite, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err) {}
|
||||||
|
}
|
||||||
|
const { email } = invite
|
||||||
|
return UserGetter.getUserByAnyEmail(
|
||||||
|
email,
|
||||||
|
{ _id: 1 },
|
||||||
|
function (err, existingUser) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error checking if user exists', {
|
||||||
|
projectId,
|
||||||
|
email,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
if (existingUser == null) {
|
||||||
|
logger.log({ projectId, email }, 'no existing user found, returning')
|
||||||
|
return callback(null)
|
||||||
|
}
|
||||||
|
return ProjectGetter.getProject(
|
||||||
|
projectId,
|
||||||
|
{ _id: 1, name: 1 },
|
||||||
|
function (err, project) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error getting project', {
|
||||||
|
projectId,
|
||||||
|
email,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
if (project == null) {
|
||||||
|
logger.log(
|
||||||
|
{ projectId },
|
||||||
|
'no project found while sending notification, returning'
|
||||||
|
)
|
||||||
|
return callback(null)
|
||||||
|
}
|
||||||
|
return NotificationsBuilder.projectInvite(
|
||||||
|
invite,
|
||||||
|
project,
|
||||||
|
sendingUser,
|
||||||
|
existingUser
|
||||||
|
).create(callback)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
_tryCancelInviteNotification(inviteId, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function () {}
|
||||||
|
}
|
||||||
|
return NotificationsBuilder.projectInvite(
|
||||||
|
{ _id: inviteId },
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null
|
||||||
|
).read(callback)
|
||||||
|
},
|
||||||
|
|
||||||
|
_sendMessages(projectId, sendingUser, invite, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err) {}
|
||||||
|
}
|
||||||
|
logger.log(
|
||||||
|
{ projectId, inviteId: invite._id },
|
||||||
|
'sending notification and email for invite'
|
||||||
|
)
|
||||||
|
return CollaboratorsEmailHandler.notifyUserOfProjectInvite(
|
||||||
|
projectId,
|
||||||
|
invite.email,
|
||||||
|
invite,
|
||||||
|
sendingUser,
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
return CollaboratorsInviteHandler._trySendInviteNotification(
|
||||||
|
projectId,
|
||||||
|
sendingUser,
|
||||||
|
invite,
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
inviteToProject(projectId, sendingUser, email, privileges, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err, invite) {}
|
||||||
|
}
|
||||||
|
logger.log(
|
||||||
|
{ projectId, sendingUserId: sendingUser._id, email, privileges },
|
||||||
|
'adding invite'
|
||||||
|
)
|
||||||
|
return Crypto.randomBytes(24, function (err, buffer) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error generating random token', {
|
||||||
|
projectId,
|
||||||
|
sendingUserId: sendingUser._id,
|
||||||
|
email,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
const token = buffer.toString('hex')
|
||||||
|
const invite = new ProjectInvite({
|
||||||
|
email,
|
||||||
|
token,
|
||||||
|
sendingUserId: sendingUser._id,
|
||||||
|
projectId,
|
||||||
|
privileges,
|
||||||
|
})
|
||||||
|
return invite.save(function (err, invite) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error saving token', {
|
||||||
|
projectId,
|
||||||
|
sendingUserId: sendingUser._id,
|
||||||
|
email,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
// Send email and notification in background
|
||||||
|
CollaboratorsInviteHandler._sendMessages(
|
||||||
|
projectId,
|
||||||
|
sendingUser,
|
||||||
|
invite,
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
return logger.err(
|
||||||
|
{ err, projectId, email },
|
||||||
|
'error sending messages for invite'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return callback(null, invite)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
revokeInvite(projectId, inviteId, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err) {}
|
||||||
|
}
|
||||||
|
logger.log({ projectId, inviteId }, 'removing invite')
|
||||||
|
return ProjectInvite.deleteOne(
|
||||||
|
{ projectId, _id: inviteId },
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error removing invite', {
|
||||||
|
projectId,
|
||||||
|
inviteId,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
CollaboratorsInviteHandler._tryCancelInviteNotification(
|
||||||
|
inviteId,
|
||||||
|
function () {}
|
||||||
|
)
|
||||||
|
return callback(null)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
resendInvite(projectId, sendingUser, inviteId, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err) {}
|
||||||
|
}
|
||||||
|
logger.log({ projectId, inviteId }, 'resending invite email')
|
||||||
|
return ProjectInvite.findOne(
|
||||||
|
{ _id: inviteId, projectId },
|
||||||
|
function (err, invite) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error finding invite', {
|
||||||
|
projectId,
|
||||||
|
inviteId,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
if (invite == null) {
|
||||||
|
logger.err(
|
||||||
|
{ err, projectId, inviteId },
|
||||||
|
'no invite found, nothing to resend'
|
||||||
|
)
|
||||||
|
return callback(null)
|
||||||
|
}
|
||||||
|
return CollaboratorsInviteHandler._sendMessages(
|
||||||
|
projectId,
|
||||||
|
sendingUser,
|
||||||
|
invite,
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error resending invite messages', {
|
||||||
|
projectId,
|
||||||
|
inviteId,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
return callback(null)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
getInviteByToken(projectId, tokenString, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err, invite) {}
|
||||||
|
}
|
||||||
|
logger.log({ projectId, tokenString }, 'fetching invite by token')
|
||||||
|
return ProjectInvite.findOne(
|
||||||
|
{ projectId, token: tokenString },
|
||||||
|
function (err, invite) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error fetching invite', {
|
||||||
|
projectId,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
if (invite == null) {
|
||||||
|
logger.err({ err, projectId, token: tokenString }, 'no invite found')
|
||||||
|
return callback(null, null)
|
||||||
|
}
|
||||||
|
return callback(null, invite)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
acceptInvite(projectId, tokenString, user, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err) {}
|
||||||
|
}
|
||||||
|
logger.log({ projectId, userId: user._id, tokenString }, 'accepting invite')
|
||||||
|
return CollaboratorsInviteHandler.getInviteByToken(
|
||||||
|
projectId,
|
||||||
|
tokenString,
|
||||||
|
function (err, invite) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error finding invite', {
|
||||||
|
projectId,
|
||||||
|
tokenString,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
if (!invite) {
|
||||||
|
err = new Errors.NotFoundError('no matching invite found')
|
||||||
|
logger.log(
|
||||||
|
{ err, projectId, tokenString },
|
||||||
|
'no matching invite found'
|
||||||
|
)
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
const inviteId = invite._id
|
||||||
|
return CollaboratorsHandler.addUserIdToProject(
|
||||||
|
projectId,
|
||||||
|
invite.sendingUserId,
|
||||||
|
user._id,
|
||||||
|
invite.privileges,
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error adding user to project', {
|
||||||
|
projectId,
|
||||||
|
inviteId,
|
||||||
|
userId: user._id,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
// Remove invite
|
||||||
|
logger.log({ projectId, inviteId }, 'removing invite')
|
||||||
|
return ProjectInvite.deleteOne({ _id: inviteId }, function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error removing invite', {
|
||||||
|
projectId,
|
||||||
|
inviteId,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
CollaboratorsInviteHandler._tryCancelInviteNotification(
|
||||||
|
inviteId,
|
||||||
|
function () {}
|
||||||
|
)
|
||||||
|
return callback()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = CollaboratorsInviteHandler
|
||||||
|
module.exports.promises = promisifyAll(CollaboratorsInviteHandler)
|
|
@ -0,0 +1,130 @@
|
||||||
|
const CollaboratorsController = require('./CollaboratorsController')
|
||||||
|
const AuthenticationController = require('../Authentication/AuthenticationController')
|
||||||
|
const AuthorizationMiddleware = require('../Authorization/AuthorizationMiddleware')
|
||||||
|
const PrivilegeLevels = require('../Authorization/PrivilegeLevels')
|
||||||
|
const CollaboratorsInviteController = require('./CollaboratorsInviteController')
|
||||||
|
const RateLimiterMiddleware = require('../Security/RateLimiterMiddleware')
|
||||||
|
const CaptchaMiddleware = require('../Captcha/CaptchaMiddleware')
|
||||||
|
const AnalyticsRegistrationSourceMiddleware = require('../Analytics/AnalyticsRegistrationSourceMiddleware')
|
||||||
|
const { Joi, validate } = require('../../infrastructure/Validation')
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
apply(webRouter, apiRouter) {
|
||||||
|
webRouter.post(
|
||||||
|
'/project/:Project_id/leave',
|
||||||
|
AuthenticationController.requireLogin(),
|
||||||
|
CollaboratorsController.removeSelfFromProject
|
||||||
|
)
|
||||||
|
|
||||||
|
webRouter.put(
|
||||||
|
'/project/:Project_id/users/:user_id',
|
||||||
|
AuthenticationController.requireLogin(),
|
||||||
|
validate({
|
||||||
|
params: Joi.object({
|
||||||
|
Project_id: Joi.objectId(),
|
||||||
|
user_id: Joi.objectId(),
|
||||||
|
}),
|
||||||
|
body: Joi.object({
|
||||||
|
privilegeLevel: Joi.string()
|
||||||
|
.valid(PrivilegeLevels.READ_ONLY, PrivilegeLevels.READ_AND_WRITE)
|
||||||
|
.required(),
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
AuthorizationMiddleware.ensureUserCanAdminProject,
|
||||||
|
CollaboratorsController.setCollaboratorInfo
|
||||||
|
)
|
||||||
|
|
||||||
|
webRouter.delete(
|
||||||
|
'/project/:Project_id/users/:user_id',
|
||||||
|
AuthenticationController.requireLogin(),
|
||||||
|
AuthorizationMiddleware.ensureUserCanAdminProject,
|
||||||
|
CollaboratorsController.removeUserFromProject
|
||||||
|
)
|
||||||
|
|
||||||
|
webRouter.get(
|
||||||
|
'/project/:Project_id/members',
|
||||||
|
AuthenticationController.requireLogin(),
|
||||||
|
AuthorizationMiddleware.ensureUserCanAdminProject,
|
||||||
|
CollaboratorsController.getAllMembers
|
||||||
|
)
|
||||||
|
|
||||||
|
webRouter.post(
|
||||||
|
'/project/:Project_id/transfer-ownership',
|
||||||
|
AuthenticationController.requireLogin(),
|
||||||
|
validate({
|
||||||
|
params: Joi.object({
|
||||||
|
Project_id: Joi.objectId(),
|
||||||
|
}),
|
||||||
|
body: Joi.object({
|
||||||
|
user_id: Joi.objectId(),
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
AuthorizationMiddleware.ensureUserCanAdminProject,
|
||||||
|
CollaboratorsController.transferOwnership
|
||||||
|
)
|
||||||
|
|
||||||
|
// invites
|
||||||
|
webRouter.post(
|
||||||
|
'/project/:Project_id/invite',
|
||||||
|
RateLimiterMiddleware.rateLimit({
|
||||||
|
endpointName: 'invite-to-project-by-project-id',
|
||||||
|
params: ['Project_id'],
|
||||||
|
maxRequests: 100,
|
||||||
|
timeInterval: 60 * 10,
|
||||||
|
}),
|
||||||
|
RateLimiterMiddleware.rateLimit({
|
||||||
|
endpointName: 'invite-to-project-by-ip',
|
||||||
|
ipOnly: true,
|
||||||
|
maxRequests: 100,
|
||||||
|
timeInterval: 60 * 10,
|
||||||
|
}),
|
||||||
|
CaptchaMiddleware.validateCaptcha('invite'),
|
||||||
|
AuthenticationController.requireLogin(),
|
||||||
|
AuthorizationMiddleware.ensureUserCanAdminProject,
|
||||||
|
CollaboratorsInviteController.inviteToProject
|
||||||
|
)
|
||||||
|
|
||||||
|
webRouter.get(
|
||||||
|
'/project/:Project_id/invites',
|
||||||
|
AuthenticationController.requireLogin(),
|
||||||
|
AuthorizationMiddleware.ensureUserCanAdminProject,
|
||||||
|
CollaboratorsInviteController.getAllInvites
|
||||||
|
)
|
||||||
|
|
||||||
|
webRouter.delete(
|
||||||
|
'/project/:Project_id/invite/:invite_id',
|
||||||
|
AuthenticationController.requireLogin(),
|
||||||
|
AuthorizationMiddleware.ensureUserCanAdminProject,
|
||||||
|
CollaboratorsInviteController.revokeInvite
|
||||||
|
)
|
||||||
|
|
||||||
|
webRouter.post(
|
||||||
|
'/project/:Project_id/invite/:invite_id/resend',
|
||||||
|
RateLimiterMiddleware.rateLimit({
|
||||||
|
endpointName: 'resend-invite',
|
||||||
|
params: ['Project_id'],
|
||||||
|
maxRequests: 200,
|
||||||
|
timeInterval: 60 * 10,
|
||||||
|
}),
|
||||||
|
AuthenticationController.requireLogin(),
|
||||||
|
AuthorizationMiddleware.ensureUserCanAdminProject,
|
||||||
|
CollaboratorsInviteController.resendInvite
|
||||||
|
)
|
||||||
|
|
||||||
|
webRouter.get(
|
||||||
|
'/project/:Project_id/invite/token/:token',
|
||||||
|
AnalyticsRegistrationSourceMiddleware.setSource('project-invite'),
|
||||||
|
AuthenticationController.requireLogin(),
|
||||||
|
CollaboratorsInviteController.viewInvite,
|
||||||
|
AnalyticsRegistrationSourceMiddleware.clearSource()
|
||||||
|
)
|
||||||
|
|
||||||
|
webRouter.post(
|
||||||
|
'/project/:Project_id/invite/token/:token/accept',
|
||||||
|
AnalyticsRegistrationSourceMiddleware.setSource('project-invite'),
|
||||||
|
AuthenticationController.requireLogin(),
|
||||||
|
CollaboratorsInviteController.acceptInvite,
|
||||||
|
AnalyticsRegistrationSourceMiddleware.clearSource()
|
||||||
|
)
|
||||||
|
},
|
||||||
|
}
|
|
@ -0,0 +1,124 @@
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const { Project } = require('../../models/Project')
|
||||||
|
const ProjectGetter = require('../Project/ProjectGetter')
|
||||||
|
const UserGetter = require('../User/UserGetter')
|
||||||
|
const CollaboratorsHandler = require('./CollaboratorsHandler')
|
||||||
|
const EmailHandler = require('../Email/EmailHandler')
|
||||||
|
const Errors = require('../Errors/Errors')
|
||||||
|
const PrivilegeLevels = require('../Authorization/PrivilegeLevels')
|
||||||
|
const TpdsProjectFlusher = require('../ThirdPartyDataStore/TpdsProjectFlusher')
|
||||||
|
const ProjectAuditLogHandler = require('../Project/ProjectAuditLogHandler')
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
promises: { transferOwnership },
|
||||||
|
}
|
||||||
|
|
||||||
|
async function transferOwnership(projectId, newOwnerId, options = {}) {
|
||||||
|
const { allowTransferToNonCollaborators, sessionUserId } = options
|
||||||
|
|
||||||
|
// Fetch project and user
|
||||||
|
const [project, newOwner] = await Promise.all([
|
||||||
|
_getProject(projectId),
|
||||||
|
_getUser(newOwnerId),
|
||||||
|
])
|
||||||
|
|
||||||
|
// Exit early if the transferee is already the project owner
|
||||||
|
const previousOwnerId = project.owner_ref
|
||||||
|
if (previousOwnerId.equals(newOwnerId)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check that user is already a collaborator
|
||||||
|
if (
|
||||||
|
!allowTransferToNonCollaborators &&
|
||||||
|
!_userIsCollaborator(newOwner, project)
|
||||||
|
) {
|
||||||
|
throw new Errors.UserNotCollaboratorError({ info: { userId: newOwnerId } })
|
||||||
|
}
|
||||||
|
|
||||||
|
// Transfer ownership
|
||||||
|
await ProjectAuditLogHandler.promises.addEntry(
|
||||||
|
projectId,
|
||||||
|
'transfer-ownership',
|
||||||
|
sessionUserId,
|
||||||
|
{ previousOwnerId, newOwnerId }
|
||||||
|
)
|
||||||
|
await _transferOwnership(projectId, previousOwnerId, newOwnerId)
|
||||||
|
|
||||||
|
// Flush project to TPDS
|
||||||
|
await TpdsProjectFlusher.promises.flushProjectToTpds(projectId)
|
||||||
|
|
||||||
|
// Send confirmation emails
|
||||||
|
const previousOwner = await UserGetter.promises.getUser(previousOwnerId)
|
||||||
|
await _sendEmails(project, previousOwner, newOwner)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function _getProject(projectId) {
|
||||||
|
const project = await ProjectGetter.promises.getProject(projectId, {
|
||||||
|
owner_ref: 1,
|
||||||
|
collaberator_refs: 1,
|
||||||
|
name: 1,
|
||||||
|
})
|
||||||
|
if (project == null) {
|
||||||
|
throw new Errors.ProjectNotFoundError({ info: { projectId } })
|
||||||
|
}
|
||||||
|
return project
|
||||||
|
}
|
||||||
|
|
||||||
|
async function _getUser(userId) {
|
||||||
|
const user = await UserGetter.promises.getUser(userId)
|
||||||
|
if (user == null) {
|
||||||
|
throw new Errors.UserNotFoundError({ info: { userId } })
|
||||||
|
}
|
||||||
|
return user
|
||||||
|
}
|
||||||
|
|
||||||
|
function _userIsCollaborator(user, project) {
|
||||||
|
const collaboratorIds = project.collaberator_refs || []
|
||||||
|
return collaboratorIds.some(collaboratorId => collaboratorId.equals(user._id))
|
||||||
|
}
|
||||||
|
|
||||||
|
async function _transferOwnership(projectId, previousOwnerId, newOwnerId) {
|
||||||
|
await CollaboratorsHandler.promises.removeUserFromProject(
|
||||||
|
projectId,
|
||||||
|
newOwnerId
|
||||||
|
)
|
||||||
|
await Project.updateOne(
|
||||||
|
{ _id: projectId },
|
||||||
|
{ $set: { owner_ref: newOwnerId } }
|
||||||
|
).exec()
|
||||||
|
await CollaboratorsHandler.promises.addUserIdToProject(
|
||||||
|
projectId,
|
||||||
|
newOwnerId,
|
||||||
|
previousOwnerId,
|
||||||
|
PrivilegeLevels.READ_AND_WRITE
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function _sendEmails(project, previousOwner, newOwner) {
|
||||||
|
if (previousOwner == null) {
|
||||||
|
// The previous owner didn't exist. This is not supposed to happen, but
|
||||||
|
// since we're changing the owner anyway, we'll just warn
|
||||||
|
logger.warn(
|
||||||
|
{ projectId: project._id, ownerId: previousOwner._id },
|
||||||
|
'Project owner did not exist before ownership transfer'
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
// Send confirmation emails
|
||||||
|
await Promise.all([
|
||||||
|
EmailHandler.promises.sendEmail(
|
||||||
|
'ownershipTransferConfirmationPreviousOwner',
|
||||||
|
{
|
||||||
|
to: previousOwner.email,
|
||||||
|
project,
|
||||||
|
newOwner,
|
||||||
|
}
|
||||||
|
),
|
||||||
|
EmailHandler.promises.sendEmail('ownershipTransferConfirmationNewOwner', {
|
||||||
|
to: newOwner.email,
|
||||||
|
project,
|
||||||
|
previousOwner,
|
||||||
|
}),
|
||||||
|
])
|
||||||
|
}
|
||||||
|
}
|
158
services/web/app/src/Features/Compile/ClsiCookieManager.js
Normal file
158
services/web/app/src/Features/Compile/ClsiCookieManager.js
Normal file
|
@ -0,0 +1,158 @@
|
||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
node/handle-callback-err,
|
||||||
|
max-len,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let rclient_secondary
|
||||||
|
const OError = require('@overleaf/o-error')
|
||||||
|
const Settings = require('@overleaf/settings')
|
||||||
|
const request = require('request').defaults({ timeout: 30 * 1000 })
|
||||||
|
const RedisWrapper = require('../../infrastructure/RedisWrapper')
|
||||||
|
const rclient = RedisWrapper.client('clsi_cookie')
|
||||||
|
if (Settings.redis.clsi_cookie_secondary != null) {
|
||||||
|
rclient_secondary = RedisWrapper.client('clsi_cookie_secondary')
|
||||||
|
}
|
||||||
|
const Cookie = require('cookie')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
|
||||||
|
const clsiCookiesEnabled =
|
||||||
|
(Settings.clsiCookie != null ? Settings.clsiCookie.key : undefined) != null &&
|
||||||
|
Settings.clsiCookie.key.length !== 0
|
||||||
|
|
||||||
|
module.exports = function (backendGroup) {
|
||||||
|
return {
|
||||||
|
buildKey(project_id) {
|
||||||
|
if (backendGroup != null) {
|
||||||
|
return `clsiserver:${backendGroup}:${project_id}`
|
||||||
|
} else {
|
||||||
|
return `clsiserver:${project_id}`
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
_getServerId(project_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err, serverId) {}
|
||||||
|
}
|
||||||
|
return rclient.get(this.buildKey(project_id), (err, serverId) => {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
if (serverId == null || serverId === '') {
|
||||||
|
return this._populateServerIdViaRequest(project_id, callback)
|
||||||
|
} else {
|
||||||
|
return callback(null, serverId)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_populateServerIdViaRequest(project_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err, serverId) {}
|
||||||
|
}
|
||||||
|
const url = `${Settings.apis.clsi.url}/project/${project_id}/status`
|
||||||
|
return request.post(url, (err, res, body) => {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error getting initial server id for project', {
|
||||||
|
project_id,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
return this.setServerId(project_id, res, function (err, serverId) {
|
||||||
|
if (err != null) {
|
||||||
|
logger.warn(
|
||||||
|
{ err, project_id },
|
||||||
|
'error setting server id via populate request'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return callback(err, serverId)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_parseServerIdFromResponse(response) {
|
||||||
|
const cookies = Cookie.parse(
|
||||||
|
(response.headers['set-cookie'] != null
|
||||||
|
? response.headers['set-cookie'][0]
|
||||||
|
: undefined) || ''
|
||||||
|
)
|
||||||
|
return cookies != null ? cookies[Settings.clsiCookie.key] : undefined
|
||||||
|
},
|
||||||
|
|
||||||
|
setServerId(project_id, response, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err, serverId) {}
|
||||||
|
}
|
||||||
|
if (!clsiCookiesEnabled) {
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
const serverId = this._parseServerIdFromResponse(response)
|
||||||
|
if (serverId == null) {
|
||||||
|
// We don't get a cookie back if it hasn't changed
|
||||||
|
return rclient.expire(
|
||||||
|
this.buildKey(project_id),
|
||||||
|
Settings.clsiCookie.ttl,
|
||||||
|
err => callback(err, undefined)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if (rclient_secondary != null) {
|
||||||
|
this._setServerIdInRedis(rclient_secondary, project_id, serverId)
|
||||||
|
}
|
||||||
|
return this._setServerIdInRedis(rclient, project_id, serverId, err =>
|
||||||
|
callback(err, serverId)
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
_setServerIdInRedis(rclient, project_id, serverId, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err) {}
|
||||||
|
}
|
||||||
|
rclient.setex(
|
||||||
|
this.buildKey(project_id),
|
||||||
|
Settings.clsiCookie.ttl,
|
||||||
|
serverId,
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
clearServerId(project_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err) {}
|
||||||
|
}
|
||||||
|
if (!clsiCookiesEnabled) {
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
return rclient.del(this.buildKey(project_id), callback)
|
||||||
|
},
|
||||||
|
|
||||||
|
getCookieJar(project_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err, jar, clsiServerId) {}
|
||||||
|
}
|
||||||
|
if (!clsiCookiesEnabled) {
|
||||||
|
return callback(null, request.jar(), undefined)
|
||||||
|
}
|
||||||
|
return this._getServerId(project_id, (err, serverId) => {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error getting server id', {
|
||||||
|
project_id,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
const serverCookie = request.cookie(
|
||||||
|
`${Settings.clsiCookie.key}=${serverId}`
|
||||||
|
)
|
||||||
|
const jar = request.jar()
|
||||||
|
jar.setCookie(serverCookie, Settings.apis.clsi.url)
|
||||||
|
return callback(null, jar, serverId)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
86
services/web/app/src/Features/Compile/ClsiFormatChecker.js
Normal file
86
services/web/app/src/Features/Compile/ClsiFormatChecker.js
Normal file
|
@ -0,0 +1,86 @@
|
||||||
|
/* eslint-disable
|
||||||
|
max-len,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let ClsiFormatChecker
|
||||||
|
const _ = require('lodash')
|
||||||
|
const async = require('async')
|
||||||
|
const settings = require('@overleaf/settings')
|
||||||
|
|
||||||
|
module.exports = ClsiFormatChecker = {
|
||||||
|
checkRecoursesForProblems(resources, callback) {
|
||||||
|
const jobs = {
|
||||||
|
conflictedPaths(cb) {
|
||||||
|
return ClsiFormatChecker._checkForConflictingPaths(resources, cb)
|
||||||
|
},
|
||||||
|
|
||||||
|
sizeCheck(cb) {
|
||||||
|
return ClsiFormatChecker._checkDocsAreUnderSizeLimit(resources, cb)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
return async.series(jobs, function (err, problems) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
problems = _.omitBy(problems, _.isEmpty)
|
||||||
|
|
||||||
|
if (_.isEmpty(problems)) {
|
||||||
|
return callback()
|
||||||
|
} else {
|
||||||
|
return callback(null, problems)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_checkForConflictingPaths(resources, callback) {
|
||||||
|
const paths = resources.map(resource => resource.path)
|
||||||
|
|
||||||
|
const conflicts = _.filter(paths, function (path) {
|
||||||
|
const matchingPaths = _.filter(
|
||||||
|
paths,
|
||||||
|
checkPath => checkPath.indexOf(path + '/') !== -1
|
||||||
|
)
|
||||||
|
|
||||||
|
return matchingPaths.length > 0
|
||||||
|
})
|
||||||
|
|
||||||
|
const conflictObjects = conflicts.map(conflict => ({ path: conflict }))
|
||||||
|
|
||||||
|
return callback(null, conflictObjects)
|
||||||
|
},
|
||||||
|
|
||||||
|
_checkDocsAreUnderSizeLimit(resources, callback) {
|
||||||
|
const sizeLimit = 1000 * 1000 * settings.compileBodySizeLimitMb
|
||||||
|
|
||||||
|
let totalSize = 0
|
||||||
|
|
||||||
|
let sizedResources = resources.map(function (resource) {
|
||||||
|
const result = { path: resource.path }
|
||||||
|
if (resource.content != null) {
|
||||||
|
result.size = resource.content.replace(/\n/g, '').length
|
||||||
|
result.kbSize = Math.ceil(result.size / 1000)
|
||||||
|
} else {
|
||||||
|
result.size = 0
|
||||||
|
}
|
||||||
|
totalSize += result.size
|
||||||
|
return result
|
||||||
|
})
|
||||||
|
|
||||||
|
const tooLarge = totalSize > sizeLimit
|
||||||
|
if (!tooLarge) {
|
||||||
|
return callback()
|
||||||
|
} else {
|
||||||
|
sizedResources = _.sortBy(sizedResources, 'size').reverse().slice(0, 10)
|
||||||
|
return callback(null, { resources: sizedResources, totalSize })
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
907
services/web/app/src/Features/Compile/ClsiManager.js
Normal file
907
services/web/app/src/Features/Compile/ClsiManager.js
Normal file
|
@ -0,0 +1,907 @@
|
||||||
|
const async = require('async')
|
||||||
|
const Settings = require('@overleaf/settings')
|
||||||
|
const request = require('request')
|
||||||
|
const ProjectGetter = require('../Project/ProjectGetter')
|
||||||
|
const ProjectEntityHandler = require('../Project/ProjectEntityHandler')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const Url = require('url')
|
||||||
|
const OError = require('@overleaf/o-error')
|
||||||
|
|
||||||
|
const ClsiCookieManager = require('./ClsiCookieManager')(
|
||||||
|
Settings.apis.clsi != null ? Settings.apis.clsi.backendGroupName : undefined
|
||||||
|
)
|
||||||
|
const NewBackendCloudClsiCookieManager = require('./ClsiCookieManager')(
|
||||||
|
Settings.apis.clsi_new != null
|
||||||
|
? Settings.apis.clsi_new.backendGroupName
|
||||||
|
: undefined
|
||||||
|
)
|
||||||
|
const ClsiStateManager = require('./ClsiStateManager')
|
||||||
|
const _ = require('underscore')
|
||||||
|
const ClsiFormatChecker = require('./ClsiFormatChecker')
|
||||||
|
const DocumentUpdaterHandler = require('../DocumentUpdater/DocumentUpdaterHandler')
|
||||||
|
const Metrics = require('@overleaf/metrics')
|
||||||
|
const Errors = require('../Errors/Errors')
|
||||||
|
|
||||||
|
const VALID_COMPILERS = ['pdflatex', 'latex', 'xelatex', 'lualatex']
|
||||||
|
|
||||||
|
const ClsiManager = {
|
||||||
|
sendRequest(projectId, userId, options, callback) {
|
||||||
|
if (options == null) {
|
||||||
|
options = {}
|
||||||
|
}
|
||||||
|
ClsiManager.sendRequestOnce(
|
||||||
|
projectId,
|
||||||
|
userId,
|
||||||
|
options,
|
||||||
|
(err, status, ...result) => {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
if (status === 'conflict') {
|
||||||
|
// Try again, with a full compile
|
||||||
|
return ClsiManager.sendRequestOnce(
|
||||||
|
projectId,
|
||||||
|
userId,
|
||||||
|
{ ...options, syncType: 'full' },
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
} else if (status === 'unavailable') {
|
||||||
|
return ClsiManager.sendRequestOnce(
|
||||||
|
projectId,
|
||||||
|
userId,
|
||||||
|
{ ...options, syncType: 'full', forceNewClsiServer: true },
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
}
|
||||||
|
callback(null, status, ...result)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
sendRequestOnce(projectId, userId, options, callback) {
|
||||||
|
if (options == null) {
|
||||||
|
options = {}
|
||||||
|
}
|
||||||
|
ClsiManager._buildRequest(projectId, options, (err, req) => {
|
||||||
|
if (err != null) {
|
||||||
|
if (err.message === 'no main file specified') {
|
||||||
|
return callback(null, 'validation-problems', null, null, {
|
||||||
|
mainFile: err.message,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
return callback(
|
||||||
|
OError.tag(err, 'Could not build request to CLSI', {
|
||||||
|
projectId,
|
||||||
|
options,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ClsiManager._sendBuiltRequest(
|
||||||
|
projectId,
|
||||||
|
userId,
|
||||||
|
req,
|
||||||
|
options,
|
||||||
|
(err, status, ...result) => {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(
|
||||||
|
OError.tag(err, 'CLSI compile failed', { projectId, userId })
|
||||||
|
)
|
||||||
|
}
|
||||||
|
callback(null, status, ...result)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
// for public API requests where there is no project id
|
||||||
|
sendExternalRequest(submissionId, clsiRequest, options, callback) {
|
||||||
|
if (options == null) {
|
||||||
|
options = {}
|
||||||
|
}
|
||||||
|
ClsiManager._sendBuiltRequest(
|
||||||
|
submissionId,
|
||||||
|
null,
|
||||||
|
clsiRequest,
|
||||||
|
options,
|
||||||
|
(err, status, ...result) => {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(
|
||||||
|
OError.tag(err, 'CLSI compile failed', {
|
||||||
|
submissionId,
|
||||||
|
options,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
callback(null, status, ...result)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
stopCompile(projectId, userId, options, callback) {
|
||||||
|
if (options == null) {
|
||||||
|
options = {}
|
||||||
|
}
|
||||||
|
const compilerUrl = this._getCompilerUrl(
|
||||||
|
options.compileGroup,
|
||||||
|
projectId,
|
||||||
|
userId,
|
||||||
|
'compile/stop'
|
||||||
|
)
|
||||||
|
const opts = {
|
||||||
|
url: compilerUrl,
|
||||||
|
method: 'POST',
|
||||||
|
}
|
||||||
|
ClsiManager._makeRequest(projectId, opts, callback)
|
||||||
|
},
|
||||||
|
|
||||||
|
deleteAuxFiles(projectId, userId, options, clsiserverid, callback) {
|
||||||
|
if (options == null) {
|
||||||
|
options = {}
|
||||||
|
}
|
||||||
|
const compilerUrl = this._getCompilerUrl(
|
||||||
|
options.compileGroup,
|
||||||
|
projectId,
|
||||||
|
userId
|
||||||
|
)
|
||||||
|
const opts = {
|
||||||
|
url: compilerUrl,
|
||||||
|
method: 'DELETE',
|
||||||
|
}
|
||||||
|
ClsiManager._makeRequestWithClsiServerId(
|
||||||
|
projectId,
|
||||||
|
opts,
|
||||||
|
clsiserverid,
|
||||||
|
clsiErr => {
|
||||||
|
// always clear the project state from the docupdater, even if there
|
||||||
|
// was a problem with the request to the clsi
|
||||||
|
DocumentUpdaterHandler.clearProjectState(projectId, docUpdaterErr => {
|
||||||
|
ClsiCookieManager.clearServerId(projectId, redisError => {
|
||||||
|
if (clsiErr) {
|
||||||
|
return callback(
|
||||||
|
OError.tag(clsiErr, 'Failed to delete aux files', { projectId })
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if (docUpdaterErr) {
|
||||||
|
return callback(
|
||||||
|
OError.tag(
|
||||||
|
docUpdaterErr,
|
||||||
|
'Failed to clear project state in doc updater',
|
||||||
|
{ projectId }
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if (redisError) {
|
||||||
|
// redis errors need wrapping as the instance may be shared
|
||||||
|
return callback(
|
||||||
|
OError(
|
||||||
|
'Failed to clear clsi persistence',
|
||||||
|
{ projectId },
|
||||||
|
redisError
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
callback()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
_sendBuiltRequest(projectId, userId, req, options, callback) {
|
||||||
|
if (options == null) {
|
||||||
|
options = {}
|
||||||
|
}
|
||||||
|
if (options.forceNewClsiServer) {
|
||||||
|
// Clear clsi cookie, then try again
|
||||||
|
return ClsiCookieManager.clearServerId(projectId, err => {
|
||||||
|
if (err) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
options.forceNewClsiServer = false // backend has now been reset
|
||||||
|
return ClsiManager._sendBuiltRequest(
|
||||||
|
projectId,
|
||||||
|
userId,
|
||||||
|
req,
|
||||||
|
options,
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
ClsiFormatChecker.checkRecoursesForProblems(
|
||||||
|
req.compile != null ? req.compile.resources : undefined,
|
||||||
|
(err, validationProblems) => {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(
|
||||||
|
OError.tag(
|
||||||
|
err,
|
||||||
|
'could not check resources for potential problems before sending to clsi'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if (validationProblems != null) {
|
||||||
|
logger.log(
|
||||||
|
{ projectId, validationProblems },
|
||||||
|
'problems with users latex before compile was attempted'
|
||||||
|
)
|
||||||
|
return callback(
|
||||||
|
null,
|
||||||
|
'validation-problems',
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
validationProblems
|
||||||
|
)
|
||||||
|
}
|
||||||
|
ClsiManager._postToClsi(
|
||||||
|
projectId,
|
||||||
|
userId,
|
||||||
|
req,
|
||||||
|
options.compileGroup,
|
||||||
|
(err, response, clsiServerId) => {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(
|
||||||
|
OError.tag(err, 'error sending request to clsi', {
|
||||||
|
projectId,
|
||||||
|
userId,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const outputFiles = ClsiManager._parseOutputFiles(
|
||||||
|
projectId,
|
||||||
|
response && response.compile && response.compile.outputFiles
|
||||||
|
)
|
||||||
|
const compile = (response && response.compile) || {}
|
||||||
|
const status = compile.status
|
||||||
|
const stats = compile.stats
|
||||||
|
const timings = compile.timings
|
||||||
|
const validationProblems = undefined
|
||||||
|
callback(
|
||||||
|
null,
|
||||||
|
status,
|
||||||
|
outputFiles,
|
||||||
|
clsiServerId,
|
||||||
|
validationProblems,
|
||||||
|
stats,
|
||||||
|
timings
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
_makeRequestWithClsiServerId(projectId, opts, clsiserverid, callback) {
|
||||||
|
if (clsiserverid) {
|
||||||
|
// ignore cookies and newBackend, go straight to the clsi node
|
||||||
|
opts.qs = Object.assign({ clsiserverid }, opts.qs)
|
||||||
|
request(opts, (err, response, body) => {
|
||||||
|
if (err) {
|
||||||
|
return callback(
|
||||||
|
OError.tag(err, 'error making request to CLSI', { projectId })
|
||||||
|
)
|
||||||
|
}
|
||||||
|
callback(null, response, body)
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
ClsiManager._makeRequest(projectId, opts, callback)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
_makeRequest(projectId, opts, callback) {
|
||||||
|
async.series(
|
||||||
|
{
|
||||||
|
currentBackend(cb) {
|
||||||
|
const startTime = new Date()
|
||||||
|
ClsiCookieManager.getCookieJar(
|
||||||
|
projectId,
|
||||||
|
(err, jar, clsiServerId) => {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(
|
||||||
|
OError.tag(err, 'error getting cookie jar for CLSI request', {
|
||||||
|
projectId,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
opts.jar = jar
|
||||||
|
const timer = new Metrics.Timer('compile.currentBackend')
|
||||||
|
request(opts, (err, response, body) => {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(
|
||||||
|
OError.tag(err, 'error making request to CLSI', {
|
||||||
|
projectId,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
timer.done()
|
||||||
|
Metrics.inc(
|
||||||
|
`compile.currentBackend.response.${response.statusCode}`
|
||||||
|
)
|
||||||
|
ClsiCookieManager.setServerId(
|
||||||
|
projectId,
|
||||||
|
response,
|
||||||
|
(err, newClsiServerId) => {
|
||||||
|
if (err != null) {
|
||||||
|
callback(
|
||||||
|
OError.tag(err, 'error setting server id', {
|
||||||
|
projectId,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
// return as soon as the standard compile has returned
|
||||||
|
callback(
|
||||||
|
null,
|
||||||
|
response,
|
||||||
|
body,
|
||||||
|
newClsiServerId || clsiServerId
|
||||||
|
)
|
||||||
|
}
|
||||||
|
cb(err, {
|
||||||
|
response,
|
||||||
|
body,
|
||||||
|
finishTime: new Date() - startTime,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
newBackend(cb) {
|
||||||
|
const startTime = new Date()
|
||||||
|
ClsiManager._makeNewBackendRequest(
|
||||||
|
projectId,
|
||||||
|
opts,
|
||||||
|
(err, response, body) => {
|
||||||
|
if (err != null) {
|
||||||
|
logger.warn({ err }, 'Error making request to new CLSI backend')
|
||||||
|
}
|
||||||
|
if (response != null) {
|
||||||
|
Metrics.inc(
|
||||||
|
`compile.newBackend.response.${response.statusCode}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
cb(err, {
|
||||||
|
response,
|
||||||
|
body,
|
||||||
|
finishTime: new Date() - startTime,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
(err, results) => {
|
||||||
|
if (err != null) {
|
||||||
|
// This was handled higher up
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (results.newBackend != null && results.newBackend.response != null) {
|
||||||
|
const currentStatusCode = results.currentBackend.response.statusCode
|
||||||
|
const newStatusCode = results.newBackend.response.statusCode
|
||||||
|
const statusCodeSame = newStatusCode === currentStatusCode
|
||||||
|
const currentCompileTime = results.currentBackend.finishTime
|
||||||
|
const newBackendCompileTime = results.newBackend.finishTime || 0
|
||||||
|
const timeDifference = newBackendCompileTime - currentCompileTime
|
||||||
|
logger.log(
|
||||||
|
{
|
||||||
|
statusCodeSame,
|
||||||
|
timeDifference,
|
||||||
|
currentCompileTime,
|
||||||
|
newBackendCompileTime,
|
||||||
|
projectId,
|
||||||
|
},
|
||||||
|
'both clsi requests returned'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
_makeNewBackendRequest(projectId, baseOpts, callback) {
|
||||||
|
if (Settings.apis.clsi_new == null || Settings.apis.clsi_new.url == null) {
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
const opts = {
|
||||||
|
...baseOpts,
|
||||||
|
url: baseOpts.url.replace(
|
||||||
|
Settings.apis.clsi.url,
|
||||||
|
Settings.apis.clsi_new.url
|
||||||
|
),
|
||||||
|
}
|
||||||
|
NewBackendCloudClsiCookieManager.getCookieJar(projectId, (err, jar) => {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(
|
||||||
|
OError.tag(err, 'error getting cookie jar for CLSI request', {
|
||||||
|
projectId,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
opts.jar = jar
|
||||||
|
const timer = new Metrics.Timer('compile.newBackend')
|
||||||
|
request(opts, (err, response, body) => {
|
||||||
|
timer.done()
|
||||||
|
if (err != null) {
|
||||||
|
return callback(
|
||||||
|
OError.tag(err, 'error making request to new CLSI', {
|
||||||
|
projectId,
|
||||||
|
opts,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
NewBackendCloudClsiCookieManager.setServerId(
|
||||||
|
projectId,
|
||||||
|
response,
|
||||||
|
err => {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(
|
||||||
|
OError.tag(err, 'error setting server id on new backend', {
|
||||||
|
projectId,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
callback(null, response, body)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_getCompilerUrl(compileGroup, projectId, userId, action) {
|
||||||
|
const host = Settings.apis.clsi.url
|
||||||
|
let path = `/project/${projectId}`
|
||||||
|
if (userId != null) {
|
||||||
|
path += `/user/${userId}`
|
||||||
|
}
|
||||||
|
if (action != null) {
|
||||||
|
path += `/${action}`
|
||||||
|
}
|
||||||
|
return `${host}${path}`
|
||||||
|
},
|
||||||
|
|
||||||
|
_postToClsi(projectId, userId, req, compileGroup, callback) {
|
||||||
|
const compileUrl = this._getCompilerUrl(
|
||||||
|
compileGroup,
|
||||||
|
projectId,
|
||||||
|
userId,
|
||||||
|
'compile'
|
||||||
|
)
|
||||||
|
const opts = {
|
||||||
|
url: compileUrl,
|
||||||
|
json: req,
|
||||||
|
method: 'POST',
|
||||||
|
}
|
||||||
|
ClsiManager._makeRequest(
|
||||||
|
projectId,
|
||||||
|
opts,
|
||||||
|
(err, response, body, clsiServerId) => {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(
|
||||||
|
new OError('failed to make request to CLSI', {
|
||||||
|
projectId,
|
||||||
|
userId,
|
||||||
|
compileOptions: req.compile.options,
|
||||||
|
rootResourcePath: req.compile.rootResourcePath,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if (response.statusCode >= 200 && response.statusCode < 300) {
|
||||||
|
callback(null, body, clsiServerId)
|
||||||
|
} else if (response.statusCode === 413) {
|
||||||
|
callback(null, { compile: { status: 'project-too-large' } })
|
||||||
|
} else if (response.statusCode === 409) {
|
||||||
|
callback(null, { compile: { status: 'conflict' } })
|
||||||
|
} else if (response.statusCode === 423) {
|
||||||
|
callback(null, { compile: { status: 'compile-in-progress' } })
|
||||||
|
} else if (response.statusCode === 503) {
|
||||||
|
callback(null, { compile: { status: 'unavailable' } })
|
||||||
|
} else {
|
||||||
|
callback(
|
||||||
|
new OError(
|
||||||
|
`CLSI returned non-success code: ${response.statusCode}`,
|
||||||
|
{
|
||||||
|
projectId,
|
||||||
|
userId,
|
||||||
|
compileOptions: req.compile.options,
|
||||||
|
rootResourcePath: req.compile.rootResourcePath,
|
||||||
|
clsiResponse: body,
|
||||||
|
statusCode: response.statusCode,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
_parseOutputFiles(projectId, rawOutputFiles = []) {
|
||||||
|
const outputFiles = []
|
||||||
|
for (const file of rawOutputFiles) {
|
||||||
|
outputFiles.push({
|
||||||
|
path: file.path, // the clsi is now sending this to web
|
||||||
|
url: Url.parse(file.url).path, // the location of the file on the clsi, excluding the host part
|
||||||
|
type: file.type,
|
||||||
|
build: file.build,
|
||||||
|
contentId: file.contentId,
|
||||||
|
ranges: file.ranges,
|
||||||
|
size: file.size,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return outputFiles
|
||||||
|
},
|
||||||
|
|
||||||
|
_buildRequest(projectId, options, callback) {
|
||||||
|
if (options == null) {
|
||||||
|
options = {}
|
||||||
|
}
|
||||||
|
ProjectGetter.getProject(
|
||||||
|
projectId,
|
||||||
|
{ compiler: 1, rootDoc_id: 1, imageName: 1, rootFolder: 1 },
|
||||||
|
(err, project) => {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(
|
||||||
|
OError.tag(err, 'failed to get project', { projectId })
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if (project == null) {
|
||||||
|
return callback(
|
||||||
|
new Errors.NotFoundError(`project does not exist: ${projectId}`)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if (!VALID_COMPILERS.includes(project.compiler)) {
|
||||||
|
project.compiler = 'pdflatex'
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.incrementalCompilesEnabled || options.syncType != null) {
|
||||||
|
// new way, either incremental or full
|
||||||
|
const timer = new Metrics.Timer('editor.compile-getdocs-redis')
|
||||||
|
ClsiManager.getContentFromDocUpdaterIfMatch(
|
||||||
|
projectId,
|
||||||
|
project,
|
||||||
|
options,
|
||||||
|
(err, projectStateHash, docUpdaterDocs) => {
|
||||||
|
timer.done()
|
||||||
|
if (err != null) {
|
||||||
|
logger.error({ err, projectId }, 'error checking project state')
|
||||||
|
// note: we don't bail out when there's an error getting
|
||||||
|
// incremental files from the docupdater, we just fall back
|
||||||
|
// to a normal compile below
|
||||||
|
}
|
||||||
|
// see if we can send an incremental update to the CLSI
|
||||||
|
if (
|
||||||
|
docUpdaterDocs != null &&
|
||||||
|
options.syncType !== 'full' &&
|
||||||
|
err == null
|
||||||
|
) {
|
||||||
|
Metrics.inc('compile-from-redis')
|
||||||
|
ClsiManager._buildRequestFromDocupdater(
|
||||||
|
projectId,
|
||||||
|
options,
|
||||||
|
project,
|
||||||
|
projectStateHash,
|
||||||
|
docUpdaterDocs,
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
Metrics.inc('compile-from-mongo')
|
||||||
|
ClsiManager._buildRequestFromMongo(
|
||||||
|
projectId,
|
||||||
|
options,
|
||||||
|
project,
|
||||||
|
projectStateHash,
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
// old way, always from mongo
|
||||||
|
const timer = new Metrics.Timer('editor.compile-getdocs-mongo')
|
||||||
|
ClsiManager._getContentFromMongo(projectId, (err, docs, files) => {
|
||||||
|
timer.done()
|
||||||
|
if (err != null) {
|
||||||
|
return callback(
|
||||||
|
OError.tag(err, 'failed to get contents from Mongo', {
|
||||||
|
projectId,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
ClsiManager._finaliseRequest(
|
||||||
|
projectId,
|
||||||
|
options,
|
||||||
|
project,
|
||||||
|
docs,
|
||||||
|
files,
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
getContentFromDocUpdaterIfMatch(projectId, project, options, callback) {
|
||||||
|
ClsiStateManager.computeHash(project, options, (err, projectStateHash) => {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(
|
||||||
|
OError.tag(err, 'Failed to compute project state hash', { projectId })
|
||||||
|
)
|
||||||
|
}
|
||||||
|
DocumentUpdaterHandler.getProjectDocsIfMatch(
|
||||||
|
projectId,
|
||||||
|
projectStateHash,
|
||||||
|
(err, docs) => {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(
|
||||||
|
OError.tag(err, 'Failed to get project documents', {
|
||||||
|
projectId,
|
||||||
|
projectStateHash,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
callback(null, projectStateHash, docs)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
getOutputFileStream(projectId, userId, buildId, outputFilePath, callback) {
|
||||||
|
const url = `${Settings.apis.clsi.url}/project/${projectId}/user/${userId}/build/${buildId}/output/${outputFilePath}`
|
||||||
|
ClsiCookieManager.getCookieJar(projectId, (err, jar) => {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(
|
||||||
|
OError.tag(err, 'Failed to get cookie jar', {
|
||||||
|
projectId,
|
||||||
|
userId,
|
||||||
|
buildId,
|
||||||
|
outputFilePath,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const options = { url, method: 'GET', timeout: 60 * 1000, jar }
|
||||||
|
const readStream = request(options)
|
||||||
|
callback(null, readStream)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_buildRequestFromDocupdater(
|
||||||
|
projectId,
|
||||||
|
options,
|
||||||
|
project,
|
||||||
|
projectStateHash,
|
||||||
|
docUpdaterDocs,
|
||||||
|
callback
|
||||||
|
) {
|
||||||
|
ProjectEntityHandler.getAllDocPathsFromProject(project, (err, docPath) => {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(
|
||||||
|
OError.tag(err, 'Failed to get doc paths', { projectId })
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const docs = {}
|
||||||
|
for (const doc of docUpdaterDocs || []) {
|
||||||
|
const path = docPath[doc._id]
|
||||||
|
docs[path] = doc
|
||||||
|
}
|
||||||
|
// send new docs but not files as those are already on the clsi
|
||||||
|
options = _.clone(options)
|
||||||
|
options.syncType = 'incremental'
|
||||||
|
options.syncState = projectStateHash
|
||||||
|
// create stub doc entries for any possible root docs, if not
|
||||||
|
// present in the docupdater. This allows finaliseRequest to
|
||||||
|
// identify the root doc.
|
||||||
|
const possibleRootDocIds = [options.rootDoc_id, project.rootDoc_id]
|
||||||
|
for (const rootDocId of possibleRootDocIds) {
|
||||||
|
if (rootDocId != null && rootDocId in docPath) {
|
||||||
|
const path = docPath[rootDocId]
|
||||||
|
if (docs[path] == null) {
|
||||||
|
docs[path] = { _id: rootDocId, path }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ClsiManager._finaliseRequest(
|
||||||
|
projectId,
|
||||||
|
options,
|
||||||
|
project,
|
||||||
|
docs,
|
||||||
|
[],
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_buildRequestFromMongo(
|
||||||
|
projectId,
|
||||||
|
options,
|
||||||
|
project,
|
||||||
|
projectStateHash,
|
||||||
|
callback
|
||||||
|
) {
|
||||||
|
ClsiManager._getContentFromMongo(projectId, (err, docs, files) => {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(
|
||||||
|
OError.tag(err, 'failed to get project contents from Mongo', {
|
||||||
|
projectId,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
options = {
|
||||||
|
...options,
|
||||||
|
syncType: 'full',
|
||||||
|
syncState: projectStateHash,
|
||||||
|
}
|
||||||
|
ClsiManager._finaliseRequest(
|
||||||
|
projectId,
|
||||||
|
options,
|
||||||
|
project,
|
||||||
|
docs,
|
||||||
|
files,
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_getContentFromMongo(projectId, callback) {
|
||||||
|
DocumentUpdaterHandler.flushProjectToMongo(projectId, err => {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(
|
||||||
|
OError.tag(err, 'failed to flush project to Mongo', { projectId })
|
||||||
|
)
|
||||||
|
}
|
||||||
|
ProjectEntityHandler.getAllDocs(projectId, (err, docs) => {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(
|
||||||
|
OError.tag(err, 'failed to get project docs', { projectId })
|
||||||
|
)
|
||||||
|
}
|
||||||
|
ProjectEntityHandler.getAllFiles(projectId, (err, files) => {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(
|
||||||
|
OError.tag(err, 'failed to get project files', { projectId })
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if (files == null) {
|
||||||
|
files = {}
|
||||||
|
}
|
||||||
|
callback(null, docs || {}, files || {})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_finaliseRequest(projectId, options, project, docs, files, callback) {
|
||||||
|
const resources = []
|
||||||
|
let rootResourcePath = null
|
||||||
|
let rootResourcePathOverride = null
|
||||||
|
let hasMainFile = false
|
||||||
|
let numberOfDocsInProject = 0
|
||||||
|
|
||||||
|
for (let path in docs) {
|
||||||
|
const doc = docs[path]
|
||||||
|
path = path.replace(/^\//, '') // Remove leading /
|
||||||
|
numberOfDocsInProject++
|
||||||
|
if (doc.lines != null) {
|
||||||
|
// add doc to resources unless it is just a stub entry
|
||||||
|
resources.push({
|
||||||
|
path,
|
||||||
|
content: doc.lines.join('\n'),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
project.rootDoc_id != null &&
|
||||||
|
doc._id.toString() === project.rootDoc_id.toString()
|
||||||
|
) {
|
||||||
|
rootResourcePath = path
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
options.rootDoc_id != null &&
|
||||||
|
doc._id.toString() === options.rootDoc_id.toString()
|
||||||
|
) {
|
||||||
|
rootResourcePathOverride = path
|
||||||
|
}
|
||||||
|
if (path === 'main.tex') {
|
||||||
|
hasMainFile = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (rootResourcePathOverride != null) {
|
||||||
|
rootResourcePath = rootResourcePathOverride
|
||||||
|
}
|
||||||
|
if (rootResourcePath == null) {
|
||||||
|
if (hasMainFile) {
|
||||||
|
rootResourcePath = 'main.tex'
|
||||||
|
} else if (numberOfDocsInProject === 1) {
|
||||||
|
// only one file, must be the main document
|
||||||
|
for (const path in docs) {
|
||||||
|
// Remove leading /
|
||||||
|
rootResourcePath = path.replace(/^\//, '')
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return callback(new OError('no main file specified', { projectId }))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let path in files) {
|
||||||
|
const file = files[path]
|
||||||
|
path = path.replace(/^\//, '') // Remove leading /
|
||||||
|
resources.push({
|
||||||
|
path,
|
||||||
|
url: `${Settings.apis.filestore.url}/project/${project._id}/file/${file._id}`,
|
||||||
|
modified: file.created != null ? file.created.getTime() : undefined,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
callback(null, {
|
||||||
|
compile: {
|
||||||
|
options: {
|
||||||
|
compiler: project.compiler,
|
||||||
|
timeout: options.timeout,
|
||||||
|
imageName: project.imageName,
|
||||||
|
draft: !!options.draft,
|
||||||
|
check: options.check,
|
||||||
|
syncType: options.syncType,
|
||||||
|
syncState: options.syncState,
|
||||||
|
compileGroup: options.compileGroup,
|
||||||
|
enablePdfCaching:
|
||||||
|
(Settings.enablePdfCaching && options.enablePdfCaching) || false,
|
||||||
|
},
|
||||||
|
rootResourcePath,
|
||||||
|
resources,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
wordCount(projectId, userId, file, options, clsiserverid, callback) {
|
||||||
|
ClsiManager._buildRequest(projectId, options, (err, req) => {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(
|
||||||
|
OError.tag(err, 'Failed to build CLSI request', {
|
||||||
|
projectId,
|
||||||
|
options,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const filename = file || req.compile.rootResourcePath
|
||||||
|
const wordCountUrl = ClsiManager._getCompilerUrl(
|
||||||
|
options.compileGroup,
|
||||||
|
projectId,
|
||||||
|
userId,
|
||||||
|
'wordcount'
|
||||||
|
)
|
||||||
|
const opts = {
|
||||||
|
url: wordCountUrl,
|
||||||
|
qs: {
|
||||||
|
file: filename,
|
||||||
|
image: req.compile.options.imageName,
|
||||||
|
},
|
||||||
|
method: 'GET',
|
||||||
|
}
|
||||||
|
ClsiManager._makeRequestWithClsiServerId(
|
||||||
|
projectId,
|
||||||
|
opts,
|
||||||
|
clsiserverid,
|
||||||
|
(err, response, body) => {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(
|
||||||
|
OError.tag(err, 'CLSI request failed', { projectId })
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if (response.statusCode >= 200 && response.statusCode < 300) {
|
||||||
|
callback(null, body)
|
||||||
|
} else {
|
||||||
|
callback(
|
||||||
|
new OError(
|
||||||
|
`CLSI returned non-success code: ${response.statusCode}`,
|
||||||
|
{
|
||||||
|
projectId,
|
||||||
|
clsiResponse: body,
|
||||||
|
statusCode: response.statusCode,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = ClsiManager
|
81
services/web/app/src/Features/Compile/ClsiStateManager.js
Normal file
81
services/web/app/src/Features/Compile/ClsiStateManager.js
Normal file
|
@ -0,0 +1,81 @@
|
||||||
|
/* eslint-disable
|
||||||
|
node/handle-callback-err,
|
||||||
|
max-len,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS101: Remove unnecessary use of Array.from
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS205: Consider reworking code to avoid use of IIFEs
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let ClsiStateManager
|
||||||
|
const Settings = require('@overleaf/settings')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const crypto = require('crypto')
|
||||||
|
const ProjectEntityHandler = require('../Project/ProjectEntityHandler')
|
||||||
|
|
||||||
|
// The "state" of a project is a hash of the relevant attributes in the
|
||||||
|
// project object in this case we only need the rootFolder.
|
||||||
|
//
|
||||||
|
// The idea is that it will change if any doc or file is
|
||||||
|
// created/renamed/deleted, and also if the content of any file (not
|
||||||
|
// doc) changes.
|
||||||
|
//
|
||||||
|
// When the hash changes the full set of files on the CLSI will need to
|
||||||
|
// be updated. If it doesn't change then we can overwrite changed docs
|
||||||
|
// in place on the clsi, getting them from the docupdater.
|
||||||
|
//
|
||||||
|
// The docupdater is responsible for setting the key in redis, and
|
||||||
|
// unsetting it if it removes any documents from the doc updater.
|
||||||
|
|
||||||
|
const buildState = s =>
|
||||||
|
crypto.createHash('sha1').update(s, 'utf8').digest('hex')
|
||||||
|
|
||||||
|
module.exports = ClsiStateManager = {
|
||||||
|
computeHash(project, options, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err, hash) {}
|
||||||
|
}
|
||||||
|
return ProjectEntityHandler.getAllEntitiesFromProject(
|
||||||
|
project,
|
||||||
|
function (err, docs, files) {
|
||||||
|
const fileList = Array.from(files || []).map(
|
||||||
|
f => `${f.file._id}:${f.file.rev}:${f.file.created}:${f.path}`
|
||||||
|
)
|
||||||
|
const docList = Array.from(docs || []).map(
|
||||||
|
d => `${d.doc._id}:${d.path}`
|
||||||
|
)
|
||||||
|
const sortedEntityList = [
|
||||||
|
...Array.from(docList),
|
||||||
|
...Array.from(fileList),
|
||||||
|
].sort()
|
||||||
|
// ignore the isAutoCompile options as it doesn't affect the
|
||||||
|
// output, but include all other options e.g. draft
|
||||||
|
const optionsList = (() => {
|
||||||
|
const result = []
|
||||||
|
const object = options || {}
|
||||||
|
for (const key in object) {
|
||||||
|
const value = object[key]
|
||||||
|
if (!['isAutoCompile'].includes(key)) {
|
||||||
|
result.push(`option ${key}:${value}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
})()
|
||||||
|
const sortedOptionsList = optionsList.sort()
|
||||||
|
const hash = buildState(
|
||||||
|
[
|
||||||
|
...Array.from(sortedEntityList),
|
||||||
|
...Array.from(sortedOptionsList),
|
||||||
|
].join('\n')
|
||||||
|
)
|
||||||
|
return callback(null, hash)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
}
|
582
services/web/app/src/Features/Compile/CompileController.js
Normal file
582
services/web/app/src/Features/Compile/CompileController.js
Normal file
|
@ -0,0 +1,582 @@
|
||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
node/handle-callback-err,
|
||||||
|
max-len,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let CompileController
|
||||||
|
const OError = require('@overleaf/o-error')
|
||||||
|
const Metrics = require('@overleaf/metrics')
|
||||||
|
const ProjectGetter = require('../Project/ProjectGetter')
|
||||||
|
const CompileManager = require('./CompileManager')
|
||||||
|
const ClsiManager = require('./ClsiManager')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const request = require('request')
|
||||||
|
const Settings = require('@overleaf/settings')
|
||||||
|
const SessionManager = require('../Authentication/SessionManager')
|
||||||
|
const RateLimiter = require('../../infrastructure/RateLimiter')
|
||||||
|
const ClsiCookieManager = require('./ClsiCookieManager')(
|
||||||
|
Settings.apis.clsi != null ? Settings.apis.clsi.backendGroupName : undefined
|
||||||
|
)
|
||||||
|
const Path = require('path')
|
||||||
|
|
||||||
|
const COMPILE_TIMEOUT_MS = 10 * 60 * 1000
|
||||||
|
|
||||||
|
function getImageNameForProject(projectId, callback) {
|
||||||
|
ProjectGetter.getProject(projectId, { imageName: 1 }, (err, project) => {
|
||||||
|
if (err) return callback(err)
|
||||||
|
if (!project) return callback(new Error('project not found'))
|
||||||
|
callback(null, project.imageName)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = CompileController = {
|
||||||
|
compile(req, res, next) {
|
||||||
|
res.setTimeout(COMPILE_TIMEOUT_MS)
|
||||||
|
const project_id = req.params.Project_id
|
||||||
|
const isAutoCompile = !!req.query.auto_compile
|
||||||
|
const enablePdfCaching = !!req.query.enable_pdf_caching
|
||||||
|
const user_id = SessionManager.getLoggedInUserId(req.session)
|
||||||
|
const options = {
|
||||||
|
isAutoCompile,
|
||||||
|
enablePdfCaching,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (req.body.rootDoc_id) {
|
||||||
|
options.rootDoc_id = req.body.rootDoc_id
|
||||||
|
} else if (
|
||||||
|
req.body.settingsOverride &&
|
||||||
|
req.body.settingsOverride.rootDoc_id
|
||||||
|
) {
|
||||||
|
// Can be removed after deploy
|
||||||
|
options.rootDoc_id = req.body.settingsOverride.rootDoc_id
|
||||||
|
}
|
||||||
|
if (req.body.compiler) {
|
||||||
|
options.compiler = req.body.compiler
|
||||||
|
}
|
||||||
|
if (req.body.draft) {
|
||||||
|
options.draft = req.body.draft
|
||||||
|
}
|
||||||
|
if (['validate', 'error', 'silent'].includes(req.body.check)) {
|
||||||
|
options.check = req.body.check
|
||||||
|
}
|
||||||
|
if (req.body.incrementalCompilesEnabled) {
|
||||||
|
options.incrementalCompilesEnabled = true
|
||||||
|
}
|
||||||
|
|
||||||
|
CompileManager.compile(
|
||||||
|
project_id,
|
||||||
|
user_id,
|
||||||
|
options,
|
||||||
|
(
|
||||||
|
error,
|
||||||
|
status,
|
||||||
|
outputFiles,
|
||||||
|
clsiServerId,
|
||||||
|
limits,
|
||||||
|
validationProblems,
|
||||||
|
stats,
|
||||||
|
timings
|
||||||
|
) => {
|
||||||
|
if (error) {
|
||||||
|
Metrics.inc('compile-error')
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
Metrics.inc('compile-status', 1, { status: status })
|
||||||
|
res.json({
|
||||||
|
status,
|
||||||
|
outputFiles,
|
||||||
|
compileGroup: limits != null ? limits.compileGroup : undefined,
|
||||||
|
clsiServerId,
|
||||||
|
validationProblems,
|
||||||
|
stats,
|
||||||
|
timings,
|
||||||
|
pdfDownloadDomain: Settings.pdfDownloadDomain,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
stopCompile(req, res, next) {
|
||||||
|
if (next == null) {
|
||||||
|
next = function (error) {}
|
||||||
|
}
|
||||||
|
const project_id = req.params.Project_id
|
||||||
|
const user_id = SessionManager.getLoggedInUserId(req.session)
|
||||||
|
return CompileManager.stopCompile(project_id, user_id, function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
return res.status(200).send()
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
// Used for submissions through the public API
|
||||||
|
compileSubmission(req, res, next) {
|
||||||
|
if (next == null) {
|
||||||
|
next = function (error) {}
|
||||||
|
}
|
||||||
|
res.setTimeout(COMPILE_TIMEOUT_MS)
|
||||||
|
const { submission_id } = req.params
|
||||||
|
const options = {}
|
||||||
|
if ((req.body != null ? req.body.rootResourcePath : undefined) != null) {
|
||||||
|
options.rootResourcePath = req.body.rootResourcePath
|
||||||
|
}
|
||||||
|
if (req.body != null ? req.body.compiler : undefined) {
|
||||||
|
options.compiler = req.body.compiler
|
||||||
|
}
|
||||||
|
if (req.body != null ? req.body.draft : undefined) {
|
||||||
|
options.draft = req.body.draft
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
['validate', 'error', 'silent'].includes(
|
||||||
|
req.body != null ? req.body.check : undefined
|
||||||
|
)
|
||||||
|
) {
|
||||||
|
options.check = req.body.check
|
||||||
|
}
|
||||||
|
options.compileGroup =
|
||||||
|
(req.body != null ? req.body.compileGroup : undefined) ||
|
||||||
|
Settings.defaultFeatures.compileGroup
|
||||||
|
options.timeout =
|
||||||
|
(req.body != null ? req.body.timeout : undefined) ||
|
||||||
|
Settings.defaultFeatures.compileTimeout
|
||||||
|
return ClsiManager.sendExternalRequest(
|
||||||
|
submission_id,
|
||||||
|
req.body,
|
||||||
|
options,
|
||||||
|
function (error, status, outputFiles, clsiServerId, validationProblems) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
res.contentType('application/json')
|
||||||
|
return res.status(200).send(
|
||||||
|
JSON.stringify({
|
||||||
|
status,
|
||||||
|
outputFiles,
|
||||||
|
clsiServerId,
|
||||||
|
validationProblems,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
_compileAsUser(req, callback) {
|
||||||
|
// callback with user_id if per-user, undefined otherwise
|
||||||
|
if (!Settings.disablePerUserCompiles) {
|
||||||
|
const user_id = SessionManager.getLoggedInUserId(req.session)
|
||||||
|
return callback(null, user_id)
|
||||||
|
} else {
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
}, // do a per-project compile, not per-user
|
||||||
|
|
||||||
|
_downloadAsUser(req, callback) {
|
||||||
|
// callback with user_id if per-user, undefined otherwise
|
||||||
|
if (!Settings.disablePerUserCompiles) {
|
||||||
|
const user_id = SessionManager.getLoggedInUserId(req.session)
|
||||||
|
return callback(null, user_id)
|
||||||
|
} else {
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
}, // do a per-project compile, not per-user
|
||||||
|
|
||||||
|
downloadPdf(req, res, next) {
|
||||||
|
if (next == null) {
|
||||||
|
next = function (error) {}
|
||||||
|
}
|
||||||
|
Metrics.inc('pdf-downloads')
|
||||||
|
const project_id = req.params.Project_id
|
||||||
|
const isPdfjsPartialDownload =
|
||||||
|
req.query != null ? req.query.pdfng : undefined
|
||||||
|
const rateLimit = function (callback) {
|
||||||
|
if (isPdfjsPartialDownload) {
|
||||||
|
return callback(null, true)
|
||||||
|
} else {
|
||||||
|
const rateLimitOpts = {
|
||||||
|
endpointName: 'full-pdf-download',
|
||||||
|
throttle: 1000,
|
||||||
|
subjectName: req.ip,
|
||||||
|
timeInterval: 60 * 60,
|
||||||
|
}
|
||||||
|
return RateLimiter.addCount(rateLimitOpts, callback)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ProjectGetter.getProject(
|
||||||
|
project_id,
|
||||||
|
{ name: 1 },
|
||||||
|
function (err, project) {
|
||||||
|
res.contentType('application/pdf')
|
||||||
|
const filename = `${CompileController._getSafeProjectName(project)}.pdf`
|
||||||
|
|
||||||
|
if (req.query.popupDownload) {
|
||||||
|
res.setContentDisposition('attachment', { filename })
|
||||||
|
} else {
|
||||||
|
res.setContentDisposition('', { filename })
|
||||||
|
}
|
||||||
|
|
||||||
|
return rateLimit(function (err, canContinue) {
|
||||||
|
if (err != null) {
|
||||||
|
logger.err({ err }, 'error checking rate limit for pdf download')
|
||||||
|
return res.sendStatus(500)
|
||||||
|
} else if (!canContinue) {
|
||||||
|
logger.log(
|
||||||
|
{ project_id, ip: req.ip },
|
||||||
|
'rate limit hit downloading pdf'
|
||||||
|
)
|
||||||
|
return res.sendStatus(500)
|
||||||
|
} else {
|
||||||
|
return CompileController._downloadAsUser(
|
||||||
|
req,
|
||||||
|
function (error, user_id) {
|
||||||
|
const url = CompileController._getFileUrl(
|
||||||
|
project_id,
|
||||||
|
user_id,
|
||||||
|
req.params.build_id,
|
||||||
|
'output.pdf'
|
||||||
|
)
|
||||||
|
return CompileController.proxyToClsi(
|
||||||
|
project_id,
|
||||||
|
url,
|
||||||
|
req,
|
||||||
|
res,
|
||||||
|
next
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
_getSafeProjectName(project) {
|
||||||
|
const wordRegExp = /\W/g
|
||||||
|
const safeProjectName = project.name.replace(wordRegExp, '_')
|
||||||
|
return safeProjectName
|
||||||
|
},
|
||||||
|
|
||||||
|
deleteAuxFiles(req, res, next) {
|
||||||
|
const project_id = req.params.Project_id
|
||||||
|
const { clsiserverid } = req.query
|
||||||
|
return CompileController._compileAsUser(req, function (error, user_id) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
CompileManager.deleteAuxFiles(
|
||||||
|
project_id,
|
||||||
|
user_id,
|
||||||
|
clsiserverid,
|
||||||
|
function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
return res.sendStatus(200)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
// this is only used by templates, so is not called with a user_id
|
||||||
|
compileAndDownloadPdf(req, res, next) {
|
||||||
|
const { project_id } = req.params
|
||||||
|
// pass user_id as null, since templates are an "anonymous" compile
|
||||||
|
return CompileManager.compile(project_id, null, {}, function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
logger.err(
|
||||||
|
{ err, project_id },
|
||||||
|
'something went wrong compile and downloading pdf'
|
||||||
|
)
|
||||||
|
res.sendStatus(500)
|
||||||
|
}
|
||||||
|
const url = `/project/${project_id}/output/output.pdf`
|
||||||
|
return CompileController.proxyToClsi(project_id, url, req, res, next)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
getFileFromClsi(req, res, next) {
|
||||||
|
if (next == null) {
|
||||||
|
next = function (error) {}
|
||||||
|
}
|
||||||
|
const project_id = req.params.Project_id
|
||||||
|
return CompileController._downloadAsUser(req, function (error, user_id) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
const url = CompileController._getFileUrl(
|
||||||
|
project_id,
|
||||||
|
user_id,
|
||||||
|
req.params.build_id,
|
||||||
|
req.params.file
|
||||||
|
)
|
||||||
|
return CompileController.proxyToClsi(project_id, url, req, res, next)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
getFileFromClsiWithoutUser(req, res, next) {
|
||||||
|
if (next == null) {
|
||||||
|
next = function (error) {}
|
||||||
|
}
|
||||||
|
const { submission_id } = req.params
|
||||||
|
const url = CompileController._getFileUrl(
|
||||||
|
submission_id,
|
||||||
|
null,
|
||||||
|
req.params.build_id,
|
||||||
|
req.params.file
|
||||||
|
)
|
||||||
|
const limits = {
|
||||||
|
compileGroup:
|
||||||
|
(req.body != null ? req.body.compileGroup : undefined) ||
|
||||||
|
Settings.defaultFeatures.compileGroup,
|
||||||
|
}
|
||||||
|
return CompileController.proxyToClsiWithLimits(
|
||||||
|
submission_id,
|
||||||
|
url,
|
||||||
|
limits,
|
||||||
|
req,
|
||||||
|
res,
|
||||||
|
next
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
// compute a GET file url for a given project, user (optional), build (optional) and file
|
||||||
|
_getFileUrl(project_id, user_id, build_id, file) {
|
||||||
|
let url
|
||||||
|
if (user_id != null && build_id != null) {
|
||||||
|
url = `/project/${project_id}/user/${user_id}/build/${build_id}/output/${file}`
|
||||||
|
} else if (user_id != null) {
|
||||||
|
url = `/project/${project_id}/user/${user_id}/output/${file}`
|
||||||
|
} else if (build_id != null) {
|
||||||
|
url = `/project/${project_id}/build/${build_id}/output/${file}`
|
||||||
|
} else {
|
||||||
|
url = `/project/${project_id}/output/${file}`
|
||||||
|
}
|
||||||
|
return url
|
||||||
|
},
|
||||||
|
|
||||||
|
// compute a POST url for a project, user (optional) and action
|
||||||
|
_getUrl(project_id, user_id, action) {
|
||||||
|
let path = `/project/${project_id}`
|
||||||
|
if (user_id != null) {
|
||||||
|
path += `/user/${user_id}`
|
||||||
|
}
|
||||||
|
return `${path}/${action}`
|
||||||
|
},
|
||||||
|
|
||||||
|
proxySyncPdf(req, res, next) {
|
||||||
|
if (next == null) {
|
||||||
|
next = function (error) {}
|
||||||
|
}
|
||||||
|
const project_id = req.params.Project_id
|
||||||
|
const { page, h, v } = req.query
|
||||||
|
if (!(page != null ? page.match(/^\d+$/) : undefined)) {
|
||||||
|
return next(new Error('invalid page parameter'))
|
||||||
|
}
|
||||||
|
if (!(h != null ? h.match(/^-?\d+\.\d+$/) : undefined)) {
|
||||||
|
return next(new Error('invalid h parameter'))
|
||||||
|
}
|
||||||
|
if (!(v != null ? v.match(/^-?\d+\.\d+$/) : undefined)) {
|
||||||
|
return next(new Error('invalid v parameter'))
|
||||||
|
}
|
||||||
|
// whether this request is going to a per-user container
|
||||||
|
return CompileController._compileAsUser(req, function (error, user_id) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
getImageNameForProject(project_id, (error, imageName) => {
|
||||||
|
if (error) return next(error)
|
||||||
|
|
||||||
|
const url = CompileController._getUrl(project_id, user_id, 'sync/pdf')
|
||||||
|
const destination = { url, qs: { page, h, v, imageName } }
|
||||||
|
return CompileController.proxyToClsi(
|
||||||
|
project_id,
|
||||||
|
destination,
|
||||||
|
req,
|
||||||
|
res,
|
||||||
|
next
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
proxySyncCode(req, res, next) {
|
||||||
|
if (next == null) {
|
||||||
|
next = function (error) {}
|
||||||
|
}
|
||||||
|
const project_id = req.params.Project_id
|
||||||
|
const { file, line, column } = req.query
|
||||||
|
if (file == null) {
|
||||||
|
return next(new Error('missing file parameter'))
|
||||||
|
}
|
||||||
|
// Check that we are dealing with a simple file path (this is not
|
||||||
|
// strictly needed because synctex uses this parameter as a label
|
||||||
|
// to look up in the synctex output, and does not open the file
|
||||||
|
// itself). Since we have valid synctex paths like foo/./bar we
|
||||||
|
// allow those by replacing /./ with /
|
||||||
|
const testPath = file.replace('/./', '/')
|
||||||
|
if (Path.resolve('/', testPath) !== `/${testPath}`) {
|
||||||
|
return next(new Error('invalid file parameter'))
|
||||||
|
}
|
||||||
|
if (!(line != null ? line.match(/^\d+$/) : undefined)) {
|
||||||
|
return next(new Error('invalid line parameter'))
|
||||||
|
}
|
||||||
|
if (!(column != null ? column.match(/^\d+$/) : undefined)) {
|
||||||
|
return next(new Error('invalid column parameter'))
|
||||||
|
}
|
||||||
|
return CompileController._compileAsUser(req, function (error, user_id) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
getImageNameForProject(project_id, (error, imageName) => {
|
||||||
|
if (error) return next(error)
|
||||||
|
|
||||||
|
const url = CompileController._getUrl(project_id, user_id, 'sync/code')
|
||||||
|
const destination = { url, qs: { file, line, column, imageName } }
|
||||||
|
return CompileController.proxyToClsi(
|
||||||
|
project_id,
|
||||||
|
destination,
|
||||||
|
req,
|
||||||
|
res,
|
||||||
|
next
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
proxyToClsi(project_id, url, req, res, next) {
|
||||||
|
if (next == null) {
|
||||||
|
next = function (error) {}
|
||||||
|
}
|
||||||
|
if (req.query != null ? req.query.compileGroup : undefined) {
|
||||||
|
return CompileController.proxyToClsiWithLimits(
|
||||||
|
project_id,
|
||||||
|
url,
|
||||||
|
{ compileGroup: req.query.compileGroup },
|
||||||
|
req,
|
||||||
|
res,
|
||||||
|
next
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
return CompileManager.getProjectCompileLimits(
|
||||||
|
project_id,
|
||||||
|
function (error, limits) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
return CompileController.proxyToClsiWithLimits(
|
||||||
|
project_id,
|
||||||
|
url,
|
||||||
|
limits,
|
||||||
|
req,
|
||||||
|
res,
|
||||||
|
next
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
proxyToClsiWithLimits(project_id, url, limits, req, res, next) {
|
||||||
|
if (next == null) {
|
||||||
|
next = function (error) {}
|
||||||
|
}
|
||||||
|
_getPersistenceOptions(req, project_id, (err, persistenceOptions) => {
|
||||||
|
let qs
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error getting cookie jar for clsi request')
|
||||||
|
return next(err)
|
||||||
|
}
|
||||||
|
// expand any url parameter passed in as {url:..., qs:...}
|
||||||
|
if (typeof url === 'object') {
|
||||||
|
;({ url, qs } = url)
|
||||||
|
}
|
||||||
|
const compilerUrl = Settings.apis.clsi.url
|
||||||
|
url = `${compilerUrl}${url}`
|
||||||
|
const oneMinute = 60 * 1000
|
||||||
|
// the base request
|
||||||
|
const options = {
|
||||||
|
url,
|
||||||
|
method: req.method,
|
||||||
|
timeout: oneMinute,
|
||||||
|
...persistenceOptions,
|
||||||
|
}
|
||||||
|
// add any provided query string
|
||||||
|
if (qs != null) {
|
||||||
|
options.qs = Object.assign(options.qs || {}, qs)
|
||||||
|
}
|
||||||
|
// if we have a build parameter, pass it through to the clsi
|
||||||
|
if (
|
||||||
|
(req.query != null ? req.query.pdfng : undefined) &&
|
||||||
|
(req.query != null ? req.query.build : undefined) != null
|
||||||
|
) {
|
||||||
|
// only for new pdf viewer
|
||||||
|
if (options.qs == null) {
|
||||||
|
options.qs = {}
|
||||||
|
}
|
||||||
|
options.qs.build = req.query.build
|
||||||
|
}
|
||||||
|
// if we are byte serving pdfs, pass through If-* and Range headers
|
||||||
|
// do not send any others, there's a proxying loop if Host: is passed!
|
||||||
|
if (req.query != null ? req.query.pdfng : undefined) {
|
||||||
|
const newHeaders = {}
|
||||||
|
for (const h in req.headers) {
|
||||||
|
const v = req.headers[h]
|
||||||
|
if (/^(If-|Range)/i.test(h)) {
|
||||||
|
newHeaders[h] = req.headers[h]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
options.headers = newHeaders
|
||||||
|
}
|
||||||
|
const proxy = request(options)
|
||||||
|
proxy.pipe(res)
|
||||||
|
return proxy.on('error', error =>
|
||||||
|
logger.warn({ err: error, url }, 'CLSI proxy error')
|
||||||
|
)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
wordCount(req, res, next) {
|
||||||
|
const project_id = req.params.Project_id
|
||||||
|
const file = req.query.file || false
|
||||||
|
const { clsiserverid } = req.query
|
||||||
|
return CompileController._compileAsUser(req, function (error, user_id) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
CompileManager.wordCount(
|
||||||
|
project_id,
|
||||||
|
user_id,
|
||||||
|
file,
|
||||||
|
clsiserverid,
|
||||||
|
function (error, body) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
res.contentType('application/json')
|
||||||
|
return res.send(body)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
function _getPersistenceOptions(req, projectId, callback) {
|
||||||
|
const { clsiserverid } = req.query
|
||||||
|
if (clsiserverid && typeof clsiserverid === 'string') {
|
||||||
|
callback(null, { qs: { clsiserverid } })
|
||||||
|
} else {
|
||||||
|
ClsiCookieManager.getCookieJar(projectId, (err, jar) => {
|
||||||
|
callback(err, { jar })
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
292
services/web/app/src/Features/Compile/CompileManager.js
Normal file
292
services/web/app/src/Features/Compile/CompileManager.js
Normal file
|
@ -0,0 +1,292 @@
|
||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
node/handle-callback-err,
|
||||||
|
max-len,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS101: Remove unnecessary use of Array.from
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS103: Rewrite code to no longer use __guard__
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let CompileManager
|
||||||
|
const Settings = require('@overleaf/settings')
|
||||||
|
const RedisWrapper = require('../../infrastructure/RedisWrapper')
|
||||||
|
const rclient = RedisWrapper.client('clsi_recently_compiled')
|
||||||
|
const ProjectGetter = require('../Project/ProjectGetter')
|
||||||
|
const ProjectRootDocManager = require('../Project/ProjectRootDocManager')
|
||||||
|
const UserGetter = require('../User/UserGetter')
|
||||||
|
const ClsiManager = require('./ClsiManager')
|
||||||
|
const Metrics = require('@overleaf/metrics')
|
||||||
|
const rateLimiter = require('../../infrastructure/RateLimiter')
|
||||||
|
|
||||||
|
module.exports = CompileManager = {
|
||||||
|
compile(project_id, user_id, options, _callback) {
|
||||||
|
if (options == null) {
|
||||||
|
options = {}
|
||||||
|
}
|
||||||
|
if (_callback == null) {
|
||||||
|
_callback = function (error) {}
|
||||||
|
}
|
||||||
|
const timer = new Metrics.Timer('editor.compile')
|
||||||
|
const callback = function (...args) {
|
||||||
|
timer.done()
|
||||||
|
return _callback(...Array.from(args || []))
|
||||||
|
}
|
||||||
|
|
||||||
|
return CompileManager._checkIfRecentlyCompiled(
|
||||||
|
project_id,
|
||||||
|
user_id,
|
||||||
|
function (error, recentlyCompiled) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
if (recentlyCompiled) {
|
||||||
|
return callback(null, 'too-recently-compiled', [])
|
||||||
|
}
|
||||||
|
|
||||||
|
return CompileManager._checkIfAutoCompileLimitHasBeenHit(
|
||||||
|
options.isAutoCompile,
|
||||||
|
'everyone',
|
||||||
|
function (err, canCompile) {
|
||||||
|
if (!canCompile) {
|
||||||
|
return callback(null, 'autocompile-backoff', [])
|
||||||
|
}
|
||||||
|
|
||||||
|
return ProjectRootDocManager.ensureRootDocumentIsSet(
|
||||||
|
project_id,
|
||||||
|
function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return CompileManager.getProjectCompileLimits(
|
||||||
|
project_id,
|
||||||
|
function (error, limits) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
for (const key in limits) {
|
||||||
|
const value = limits[key]
|
||||||
|
options[key] = value
|
||||||
|
}
|
||||||
|
// Put a lower limit on autocompiles for free users, based on compileGroup
|
||||||
|
return CompileManager._checkCompileGroupAutoCompileLimit(
|
||||||
|
options.isAutoCompile,
|
||||||
|
limits.compileGroup,
|
||||||
|
function (err, canCompile) {
|
||||||
|
if (!canCompile) {
|
||||||
|
return callback(null, 'autocompile-backoff', [])
|
||||||
|
}
|
||||||
|
// only pass user_id down to clsi if this is a per-user compile
|
||||||
|
const compileAsUser = Settings.disablePerUserCompiles
|
||||||
|
? undefined
|
||||||
|
: user_id
|
||||||
|
return ClsiManager.sendRequest(
|
||||||
|
project_id,
|
||||||
|
compileAsUser,
|
||||||
|
options,
|
||||||
|
function (
|
||||||
|
error,
|
||||||
|
status,
|
||||||
|
outputFiles,
|
||||||
|
clsiServerId,
|
||||||
|
validationProblems,
|
||||||
|
stats,
|
||||||
|
timings
|
||||||
|
) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return callback(
|
||||||
|
null,
|
||||||
|
status,
|
||||||
|
outputFiles,
|
||||||
|
clsiServerId,
|
||||||
|
limits,
|
||||||
|
validationProblems,
|
||||||
|
stats,
|
||||||
|
timings
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
stopCompile(project_id, user_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
return CompileManager.getProjectCompileLimits(
|
||||||
|
project_id,
|
||||||
|
function (error, limits) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return ClsiManager.stopCompile(project_id, user_id, limits, callback)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
deleteAuxFiles(project_id, user_id, clsiserverid, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
return CompileManager.getProjectCompileLimits(
|
||||||
|
project_id,
|
||||||
|
function (error, limits) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
ClsiManager.deleteAuxFiles(
|
||||||
|
project_id,
|
||||||
|
user_id,
|
||||||
|
limits,
|
||||||
|
clsiserverid,
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
getProjectCompileLimits(project_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, limits) {}
|
||||||
|
}
|
||||||
|
return ProjectGetter.getProject(
|
||||||
|
project_id,
|
||||||
|
{ owner_ref: 1 },
|
||||||
|
function (error, project) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return UserGetter.getUser(
|
||||||
|
project.owner_ref,
|
||||||
|
{ alphaProgram: 1, betaProgram: 1, features: 1 },
|
||||||
|
function (err, owner) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
const ownerFeatures = (owner && owner.features) || {}
|
||||||
|
// put alpha users into their own compile group
|
||||||
|
if (owner && owner.alphaProgram) {
|
||||||
|
ownerFeatures.compileGroup = 'alpha'
|
||||||
|
}
|
||||||
|
return callback(null, {
|
||||||
|
timeout:
|
||||||
|
ownerFeatures.compileTimeout ||
|
||||||
|
Settings.defaultFeatures.compileTimeout,
|
||||||
|
compileGroup:
|
||||||
|
ownerFeatures.compileGroup ||
|
||||||
|
Settings.defaultFeatures.compileGroup,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
COMPILE_DELAY: 1, // seconds
|
||||||
|
_checkIfRecentlyCompiled(project_id, user_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, recentlyCompiled) {}
|
||||||
|
}
|
||||||
|
const key = `compile:${project_id}:${user_id}`
|
||||||
|
return rclient.set(
|
||||||
|
key,
|
||||||
|
true,
|
||||||
|
'EX',
|
||||||
|
this.COMPILE_DELAY,
|
||||||
|
'NX',
|
||||||
|
function (error, ok) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
if (ok === 'OK') {
|
||||||
|
return callback(null, false)
|
||||||
|
} else {
|
||||||
|
return callback(null, true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
_checkCompileGroupAutoCompileLimit(isAutoCompile, compileGroup, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err, canCompile) {}
|
||||||
|
}
|
||||||
|
if (!isAutoCompile) {
|
||||||
|
return callback(null, true)
|
||||||
|
}
|
||||||
|
if (compileGroup === 'standard') {
|
||||||
|
// apply extra limits to the standard compile group
|
||||||
|
return CompileManager._checkIfAutoCompileLimitHasBeenHit(
|
||||||
|
isAutoCompile,
|
||||||
|
compileGroup,
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
Metrics.inc(`auto-compile-${compileGroup}`)
|
||||||
|
return callback(null, true)
|
||||||
|
}
|
||||||
|
}, // always allow priority group users to compile
|
||||||
|
|
||||||
|
_checkIfAutoCompileLimitHasBeenHit(isAutoCompile, compileGroup, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err, canCompile) {}
|
||||||
|
}
|
||||||
|
if (!isAutoCompile) {
|
||||||
|
return callback(null, true)
|
||||||
|
}
|
||||||
|
Metrics.inc(`auto-compile-${compileGroup}`)
|
||||||
|
const opts = {
|
||||||
|
endpointName: 'auto_compile',
|
||||||
|
timeInterval: 20,
|
||||||
|
subjectName: compileGroup,
|
||||||
|
throttle: Settings.rateLimit.autoCompile[compileGroup] || 25,
|
||||||
|
}
|
||||||
|
return rateLimiter.addCount(opts, function (err, canCompile) {
|
||||||
|
if (err != null) {
|
||||||
|
canCompile = false
|
||||||
|
}
|
||||||
|
if (!canCompile) {
|
||||||
|
Metrics.inc(`auto-compile-${compileGroup}-limited`)
|
||||||
|
}
|
||||||
|
return callback(err, canCompile)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
wordCount(project_id, user_id, file, clsiserverid, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
return CompileManager.getProjectCompileLimits(
|
||||||
|
project_id,
|
||||||
|
function (error, limits) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
ClsiManager.wordCount(
|
||||||
|
project_id,
|
||||||
|
user_id,
|
||||||
|
file,
|
||||||
|
limits,
|
||||||
|
clsiserverid,
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
}
|
93
services/web/app/src/Features/Contacts/ContactController.js
Normal file
93
services/web/app/src/Features/Contacts/ContactController.js
Normal file
|
@ -0,0 +1,93 @@
|
||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
max-len,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS101: Remove unnecessary use of Array.from
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let ContactsController
|
||||||
|
const SessionManager = require('../Authentication/SessionManager')
|
||||||
|
const ContactManager = require('./ContactManager')
|
||||||
|
const UserGetter = require('../User/UserGetter')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const Modules = require('../../infrastructure/Modules')
|
||||||
|
|
||||||
|
module.exports = ContactsController = {
|
||||||
|
getContacts(req, res, next) {
|
||||||
|
const user_id = SessionManager.getLoggedInUserId(req.session)
|
||||||
|
return ContactManager.getContactIds(
|
||||||
|
user_id,
|
||||||
|
{ limit: 50 },
|
||||||
|
function (error, contact_ids) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
return UserGetter.getUsers(
|
||||||
|
contact_ids,
|
||||||
|
{
|
||||||
|
email: 1,
|
||||||
|
first_name: 1,
|
||||||
|
last_name: 1,
|
||||||
|
holdingAccount: 1,
|
||||||
|
},
|
||||||
|
function (error, contacts) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UserGetter.getUsers may not preserve order so put them back in order
|
||||||
|
const positions = {}
|
||||||
|
for (let i = 0; i < contact_ids.length; i++) {
|
||||||
|
const contact_id = contact_ids[i]
|
||||||
|
positions[contact_id] = i
|
||||||
|
}
|
||||||
|
contacts.sort(
|
||||||
|
(a, b) =>
|
||||||
|
positions[a._id != null ? a._id.toString() : undefined] -
|
||||||
|
positions[b._id != null ? b._id.toString() : undefined]
|
||||||
|
)
|
||||||
|
|
||||||
|
// Don't count holding accounts to discourage users from repeating mistakes (mistyped or wrong emails, etc)
|
||||||
|
contacts = contacts.filter(c => !c.holdingAccount)
|
||||||
|
|
||||||
|
contacts = contacts.map(ContactsController._formatContact)
|
||||||
|
|
||||||
|
return Modules.hooks.fire(
|
||||||
|
'getContacts',
|
||||||
|
user_id,
|
||||||
|
contacts,
|
||||||
|
function (error, additional_contacts) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
contacts = contacts.concat(
|
||||||
|
...Array.from(additional_contacts || [])
|
||||||
|
)
|
||||||
|
return res.send({
|
||||||
|
contacts,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
_formatContact(contact) {
|
||||||
|
return {
|
||||||
|
id: contact._id != null ? contact._id.toString() : undefined,
|
||||||
|
email: contact.email || '',
|
||||||
|
first_name: contact.first_name || '',
|
||||||
|
last_name: contact.last_name || '',
|
||||||
|
type: 'user',
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
91
services/web/app/src/Features/Contacts/ContactManager.js
Normal file
91
services/web/app/src/Features/Contacts/ContactManager.js
Normal file
|
@ -0,0 +1,91 @@
|
||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
node/handle-callback-err,
|
||||||
|
max-len,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let ContactManager
|
||||||
|
const OError = require('@overleaf/o-error')
|
||||||
|
const request = require('request')
|
||||||
|
const settings = require('@overleaf/settings')
|
||||||
|
|
||||||
|
module.exports = ContactManager = {
|
||||||
|
getContactIds(user_id, options, callback) {
|
||||||
|
if (options == null) {
|
||||||
|
options = { limits: 50 }
|
||||||
|
}
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, contacts) {}
|
||||||
|
}
|
||||||
|
const url = `${settings.apis.contacts.url}/user/${user_id}/contacts`
|
||||||
|
return request.get(
|
||||||
|
{
|
||||||
|
url,
|
||||||
|
qs: options,
|
||||||
|
json: true,
|
||||||
|
jar: false,
|
||||||
|
},
|
||||||
|
function (error, res, data) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||||
|
return callback(
|
||||||
|
null,
|
||||||
|
(data != null ? data.contact_ids : undefined) || []
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
error = new OError(
|
||||||
|
`contacts api responded with non-success code: ${res.statusCode}`,
|
||||||
|
{ user_id }
|
||||||
|
)
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
addContact(user_id, contact_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
const url = `${settings.apis.contacts.url}/user/${user_id}/contacts`
|
||||||
|
return request.post(
|
||||||
|
{
|
||||||
|
url,
|
||||||
|
json: {
|
||||||
|
contact_id,
|
||||||
|
},
|
||||||
|
jar: false,
|
||||||
|
},
|
||||||
|
function (error, res, data) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||||
|
return callback(
|
||||||
|
null,
|
||||||
|
(data != null ? data.contact_ids : undefined) || []
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
error = new OError(
|
||||||
|
`contacts api responded with non-success code: ${res.statusCode}`,
|
||||||
|
{
|
||||||
|
user_id,
|
||||||
|
contact_id,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
}
|
28
services/web/app/src/Features/Contacts/ContactRouter.js
Normal file
28
services/web/app/src/Features/Contacts/ContactRouter.js
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
const AuthenticationController = require('../Authentication/AuthenticationController')
|
||||||
|
const SessionManager = require('../Authentication/SessionManager')
|
||||||
|
const ContactController = require('./ContactController')
|
||||||
|
const Settings = require('@overleaf/settings')
|
||||||
|
|
||||||
|
function contactsAuthenticationMiddleware() {
|
||||||
|
if (!Settings.allowAnonymousReadAndWriteSharing) {
|
||||||
|
return AuthenticationController.requireLogin()
|
||||||
|
} else {
|
||||||
|
return (req, res, next) => {
|
||||||
|
if (SessionManager.isUserLoggedIn(req.session)) {
|
||||||
|
next()
|
||||||
|
} else {
|
||||||
|
res.send({ contacts: [] })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
apply(webRouter) {
|
||||||
|
webRouter.get(
|
||||||
|
'/user/contacts',
|
||||||
|
contactsAuthenticationMiddleware(),
|
||||||
|
ContactController.getContacts
|
||||||
|
)
|
||||||
|
},
|
||||||
|
}
|
56
services/web/app/src/Features/Cooldown/CooldownManager.js
Normal file
56
services/web/app/src/Features/Cooldown/CooldownManager.js
Normal file
|
@ -0,0 +1,56 @@
|
||||||
|
/* eslint-disable
|
||||||
|
node/handle-callback-err,
|
||||||
|
max-len,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let CooldownManager
|
||||||
|
const RedisWrapper = require('../../infrastructure/RedisWrapper')
|
||||||
|
const rclient = RedisWrapper.client('cooldown')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
|
||||||
|
const COOLDOWN_IN_SECONDS = 60 * 10
|
||||||
|
|
||||||
|
module.exports = CooldownManager = {
|
||||||
|
_buildKey(projectId) {
|
||||||
|
return `Cooldown:{${projectId}}`
|
||||||
|
},
|
||||||
|
|
||||||
|
putProjectOnCooldown(projectId, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err) {}
|
||||||
|
}
|
||||||
|
logger.log(
|
||||||
|
{ projectId },
|
||||||
|
`[Cooldown] putting project on cooldown for ${COOLDOWN_IN_SECONDS} seconds`
|
||||||
|
)
|
||||||
|
return rclient.set(
|
||||||
|
CooldownManager._buildKey(projectId),
|
||||||
|
'1',
|
||||||
|
'EX',
|
||||||
|
COOLDOWN_IN_SECONDS,
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
isProjectOnCooldown(projectId, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err, isOnCooldown) {}
|
||||||
|
}
|
||||||
|
return rclient.get(
|
||||||
|
CooldownManager._buildKey(projectId),
|
||||||
|
function (err, result) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
return callback(null, result === '1')
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
}
|
40
services/web/app/src/Features/Cooldown/CooldownMiddleware.js
Normal file
40
services/web/app/src/Features/Cooldown/CooldownMiddleware.js
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
/* eslint-disable
|
||||||
|
max-len,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let CooldownMiddleware
|
||||||
|
const CooldownManager = require('./CooldownManager')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
|
||||||
|
module.exports = CooldownMiddleware = {
|
||||||
|
freezeProject(req, res, next) {
|
||||||
|
const projectId = req.params.Project_id
|
||||||
|
if (projectId == null) {
|
||||||
|
return next(new Error('[Cooldown] No projectId parameter on route'))
|
||||||
|
}
|
||||||
|
return CooldownManager.isProjectOnCooldown(
|
||||||
|
projectId,
|
||||||
|
function (err, projectIsOnCooldown) {
|
||||||
|
if (err != null) {
|
||||||
|
return next(err)
|
||||||
|
}
|
||||||
|
if (projectIsOnCooldown) {
|
||||||
|
logger.log(
|
||||||
|
{ projectId },
|
||||||
|
'[Cooldown] project is on cooldown, denying request'
|
||||||
|
)
|
||||||
|
return res.sendStatus(429)
|
||||||
|
}
|
||||||
|
return next()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
}
|
303
services/web/app/src/Features/Docstore/DocstoreManager.js
Normal file
303
services/web/app/src/Features/Docstore/DocstoreManager.js
Normal file
|
@ -0,0 +1,303 @@
|
||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
node/handle-callback-err,
|
||||||
|
max-len,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
const request = require('request').defaults({ jar: false })
|
||||||
|
const OError = require('@overleaf/o-error')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const settings = require('@overleaf/settings')
|
||||||
|
const Errors = require('../Errors/Errors')
|
||||||
|
const { promisifyAll } = require('../../util/promises')
|
||||||
|
|
||||||
|
const TIMEOUT = 30 * 1000 // request timeout
|
||||||
|
|
||||||
|
const DocstoreManager = {
|
||||||
|
deleteDoc(project_id, doc_id, name, deletedAt, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
const url = `${settings.apis.docstore.url}/project/${project_id}/doc/${doc_id}`
|
||||||
|
const docMetaData = { deleted: true, deletedAt, name }
|
||||||
|
const options = { url, json: docMetaData, timeout: TIMEOUT }
|
||||||
|
request.patch(options, function (error, res) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||||
|
return callback(null)
|
||||||
|
} else if (res.statusCode === 404) {
|
||||||
|
error = new Errors.NotFoundError({
|
||||||
|
message: 'tried to delete doc not in docstore',
|
||||||
|
info: {
|
||||||
|
project_id,
|
||||||
|
doc_id,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
return callback(error) // maybe suppress the error when delete doc which is not present?
|
||||||
|
} else {
|
||||||
|
error = new OError(
|
||||||
|
`docstore api responded with non-success code: ${res.statusCode}`,
|
||||||
|
{
|
||||||
|
project_id,
|
||||||
|
doc_id,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
getAllDocs(project_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
const url = `${settings.apis.docstore.url}/project/${project_id}/doc`
|
||||||
|
return request.get(
|
||||||
|
{
|
||||||
|
url,
|
||||||
|
timeout: TIMEOUT,
|
||||||
|
json: true,
|
||||||
|
},
|
||||||
|
function (error, res, docs) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||||
|
return callback(null, docs)
|
||||||
|
} else {
|
||||||
|
error = new OError(
|
||||||
|
`docstore api responded with non-success code: ${res.statusCode}`,
|
||||||
|
{ project_id }
|
||||||
|
)
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
getAllDeletedDocs(project_id, callback) {
|
||||||
|
const url = `${settings.apis.docstore.url}/project/${project_id}/doc-deleted`
|
||||||
|
request.get(
|
||||||
|
{ url, timeout: TIMEOUT, json: true },
|
||||||
|
function (error, res, docs) {
|
||||||
|
if (error) {
|
||||||
|
callback(
|
||||||
|
OError.tag(error, 'could not get deleted docs from docstore')
|
||||||
|
)
|
||||||
|
} else if (res.statusCode === 200) {
|
||||||
|
callback(null, docs)
|
||||||
|
} else {
|
||||||
|
callback(
|
||||||
|
new OError(
|
||||||
|
`docstore api responded with non-success code: ${res.statusCode}`,
|
||||||
|
{ project_id }
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
getAllRanges(project_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
const url = `${settings.apis.docstore.url}/project/${project_id}/ranges`
|
||||||
|
return request.get(
|
||||||
|
{
|
||||||
|
url,
|
||||||
|
timeout: TIMEOUT,
|
||||||
|
json: true,
|
||||||
|
},
|
||||||
|
function (error, res, docs) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||||
|
return callback(null, docs)
|
||||||
|
} else {
|
||||||
|
error = new OError(
|
||||||
|
`docstore api responded with non-success code: ${res.statusCode}`,
|
||||||
|
{ project_id }
|
||||||
|
)
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
getDoc(project_id, doc_id, options, callback) {
|
||||||
|
if (options == null) {
|
||||||
|
options = {}
|
||||||
|
}
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, lines, rev, version) {}
|
||||||
|
}
|
||||||
|
if (typeof options === 'function') {
|
||||||
|
callback = options
|
||||||
|
options = {}
|
||||||
|
}
|
||||||
|
let url = `${settings.apis.docstore.url}/project/${project_id}/doc/${doc_id}`
|
||||||
|
if (options.include_deleted) {
|
||||||
|
url += '?include_deleted=true'
|
||||||
|
}
|
||||||
|
return request.get(
|
||||||
|
{
|
||||||
|
url,
|
||||||
|
timeout: TIMEOUT,
|
||||||
|
json: true,
|
||||||
|
},
|
||||||
|
function (error, res, doc) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||||
|
logger.log(
|
||||||
|
{ doc_id, project_id, version: doc.version, rev: doc.rev },
|
||||||
|
'got doc from docstore api'
|
||||||
|
)
|
||||||
|
return callback(null, doc.lines, doc.rev, doc.version, doc.ranges)
|
||||||
|
} else if (res.statusCode === 404) {
|
||||||
|
error = new Errors.NotFoundError({
|
||||||
|
message: 'doc not found in docstore',
|
||||||
|
info: {
|
||||||
|
project_id,
|
||||||
|
doc_id,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
return callback(error)
|
||||||
|
} else {
|
||||||
|
error = new OError(
|
||||||
|
`docstore api responded with non-success code: ${res.statusCode}`,
|
||||||
|
{
|
||||||
|
project_id,
|
||||||
|
doc_id,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
isDocDeleted(project_id, doc_id, callback) {
|
||||||
|
const url = `${settings.apis.docstore.url}/project/${project_id}/doc/${doc_id}/deleted`
|
||||||
|
request.get(
|
||||||
|
{ url, timeout: TIMEOUT, json: true },
|
||||||
|
function (err, res, body) {
|
||||||
|
if (err) {
|
||||||
|
callback(err)
|
||||||
|
} else if (res.statusCode === 200) {
|
||||||
|
callback(null, body.deleted)
|
||||||
|
} else if (res.statusCode === 404) {
|
||||||
|
callback(
|
||||||
|
new Errors.NotFoundError({
|
||||||
|
message: 'doc does not exist in project',
|
||||||
|
info: { project_id, doc_id },
|
||||||
|
})
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
callback(
|
||||||
|
new OError(
|
||||||
|
`docstore api responded with non-success code: ${res.statusCode}`,
|
||||||
|
{ project_id, doc_id }
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
updateDoc(project_id, doc_id, lines, version, ranges, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, modified, rev) {}
|
||||||
|
}
|
||||||
|
const url = `${settings.apis.docstore.url}/project/${project_id}/doc/${doc_id}`
|
||||||
|
return request.post(
|
||||||
|
{
|
||||||
|
url,
|
||||||
|
timeout: TIMEOUT,
|
||||||
|
json: {
|
||||||
|
lines,
|
||||||
|
version,
|
||||||
|
ranges,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
function (error, res, result) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||||
|
logger.log(
|
||||||
|
{ project_id, doc_id },
|
||||||
|
'update doc in docstore url finished'
|
||||||
|
)
|
||||||
|
return callback(null, result.modified, result.rev)
|
||||||
|
} else {
|
||||||
|
error = new OError(
|
||||||
|
`docstore api responded with non-success code: ${res.statusCode}`,
|
||||||
|
{ project_id, doc_id }
|
||||||
|
)
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
archiveProject(project_id, callback) {
|
||||||
|
DocstoreManager._operateOnProject(project_id, 'archive', callback)
|
||||||
|
},
|
||||||
|
|
||||||
|
unarchiveProject(project_id, callback) {
|
||||||
|
DocstoreManager._operateOnProject(project_id, 'unarchive', callback)
|
||||||
|
},
|
||||||
|
|
||||||
|
destroyProject(project_id, callback) {
|
||||||
|
DocstoreManager._operateOnProject(project_id, 'destroy', callback)
|
||||||
|
},
|
||||||
|
|
||||||
|
_operateOnProject(project_id, method, callback) {
|
||||||
|
const url = `${settings.apis.docstore.url}/project/${project_id}/${method}`
|
||||||
|
logger.log({ project_id }, `calling ${method} for project in docstore`)
|
||||||
|
// use default timeout for archiving/unarchiving/destroying
|
||||||
|
request.post(url, function (err, res, docs) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, `error calling ${method} project in docstore`, {
|
||||||
|
project_id,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||||
|
callback()
|
||||||
|
} else {
|
||||||
|
const error = new Error(
|
||||||
|
`docstore api responded with non-success code: ${res.statusCode}`
|
||||||
|
)
|
||||||
|
logger.warn(
|
||||||
|
{ err: error, project_id },
|
||||||
|
`error calling ${method} project in docstore`
|
||||||
|
)
|
||||||
|
callback(error)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = DocstoreManager
|
||||||
|
module.exports.promises = promisifyAll(DocstoreManager, {
|
||||||
|
multiResult: {
|
||||||
|
getDoc: ['lines', 'rev', 'version', 'ranges'],
|
||||||
|
updateDoc: ['modified', 'rev'],
|
||||||
|
},
|
||||||
|
})
|
|
@ -0,0 +1,393 @@
|
||||||
|
const request = require('request').defaults({ timeout: 30 * 100 })
|
||||||
|
const OError = require('@overleaf/o-error')
|
||||||
|
const settings = require('@overleaf/settings')
|
||||||
|
const _ = require('underscore')
|
||||||
|
const async = require('async')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const metrics = require('@overleaf/metrics')
|
||||||
|
const { promisify } = require('util')
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
flushProjectToMongo,
|
||||||
|
flushMultipleProjectsToMongo,
|
||||||
|
flushProjectToMongoAndDelete,
|
||||||
|
flushDocToMongo,
|
||||||
|
deleteDoc,
|
||||||
|
getDocument,
|
||||||
|
setDocument,
|
||||||
|
getProjectDocsIfMatch,
|
||||||
|
clearProjectState,
|
||||||
|
acceptChanges,
|
||||||
|
deleteThread,
|
||||||
|
resyncProjectHistory,
|
||||||
|
updateProjectStructure,
|
||||||
|
promises: {
|
||||||
|
flushProjectToMongo: promisify(flushProjectToMongo),
|
||||||
|
flushMultipleProjectsToMongo: promisify(flushMultipleProjectsToMongo),
|
||||||
|
flushProjectToMongoAndDelete: promisify(flushProjectToMongoAndDelete),
|
||||||
|
flushDocToMongo: promisify(flushDocToMongo),
|
||||||
|
deleteDoc: promisify(deleteDoc),
|
||||||
|
getDocument: promisify(getDocument),
|
||||||
|
setDocument: promisify(setDocument),
|
||||||
|
getProjectDocsIfMatch: promisify(getProjectDocsIfMatch),
|
||||||
|
clearProjectState: promisify(clearProjectState),
|
||||||
|
acceptChanges: promisify(acceptChanges),
|
||||||
|
deleteThread: promisify(deleteThread),
|
||||||
|
resyncProjectHistory: promisify(resyncProjectHistory),
|
||||||
|
updateProjectStructure: promisify(updateProjectStructure),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
function flushProjectToMongo(projectId, callback) {
|
||||||
|
_makeRequest(
|
||||||
|
{
|
||||||
|
path: `/project/${projectId}/flush`,
|
||||||
|
method: 'POST',
|
||||||
|
},
|
||||||
|
projectId,
|
||||||
|
'flushing.mongo.project',
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function flushMultipleProjectsToMongo(projectIds, callback) {
|
||||||
|
const jobs = projectIds.map(projectId => callback => {
|
||||||
|
flushProjectToMongo(projectId, callback)
|
||||||
|
})
|
||||||
|
async.series(jobs, callback)
|
||||||
|
}
|
||||||
|
|
||||||
|
function flushProjectToMongoAndDelete(projectId, callback) {
|
||||||
|
_makeRequest(
|
||||||
|
{
|
||||||
|
path: `/project/${projectId}`,
|
||||||
|
method: 'DELETE',
|
||||||
|
},
|
||||||
|
projectId,
|
||||||
|
'flushing.mongo.project',
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function flushDocToMongo(projectId, docId, callback) {
|
||||||
|
_makeRequest(
|
||||||
|
{
|
||||||
|
path: `/project/${projectId}/doc/${docId}/flush`,
|
||||||
|
method: 'POST',
|
||||||
|
},
|
||||||
|
projectId,
|
||||||
|
'flushing.mongo.doc',
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function deleteDoc(projectId, docId, callback) {
|
||||||
|
_makeRequest(
|
||||||
|
{
|
||||||
|
path: `/project/${projectId}/doc/${docId}`,
|
||||||
|
method: 'DELETE',
|
||||||
|
},
|
||||||
|
projectId,
|
||||||
|
'delete.mongo.doc',
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function getDocument(projectId, docId, fromVersion, callback) {
|
||||||
|
_makeRequest(
|
||||||
|
{
|
||||||
|
path: `/project/${projectId}/doc/${docId}?fromVersion=${fromVersion}`,
|
||||||
|
json: true,
|
||||||
|
},
|
||||||
|
projectId,
|
||||||
|
'get-document',
|
||||||
|
function (error, doc) {
|
||||||
|
if (error) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
callback(null, doc.lines, doc.version, doc.ranges, doc.ops)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function setDocument(projectId, docId, userId, docLines, source, callback) {
|
||||||
|
_makeRequest(
|
||||||
|
{
|
||||||
|
path: `/project/${projectId}/doc/${docId}`,
|
||||||
|
method: 'POST',
|
||||||
|
json: {
|
||||||
|
lines: docLines,
|
||||||
|
source,
|
||||||
|
user_id: userId,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
projectId,
|
||||||
|
'set-document',
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function getProjectDocsIfMatch(projectId, projectStateHash, callback) {
|
||||||
|
// If the project state hasn't changed, we can get all the latest
|
||||||
|
// docs from redis via the docupdater. Otherwise we will need to
|
||||||
|
// fall back to getting them from mongo.
|
||||||
|
const timer = new metrics.Timer('get-project-docs')
|
||||||
|
const url = `${settings.apis.documentupdater.url}/project/${projectId}/get_and_flush_if_old?state=${projectStateHash}`
|
||||||
|
request.post(url, function (error, res, body) {
|
||||||
|
timer.done()
|
||||||
|
if (error) {
|
||||||
|
OError.tag(error, 'error getting project docs from doc updater', {
|
||||||
|
url,
|
||||||
|
projectId,
|
||||||
|
})
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
if (res.statusCode === 409) {
|
||||||
|
// HTTP response code "409 Conflict"
|
||||||
|
// Docupdater has checked the projectStateHash and found that
|
||||||
|
// it has changed. This means that the docs currently in redis
|
||||||
|
// aren't the only change to the project and the full set of
|
||||||
|
// docs/files should be retreived from docstore/filestore
|
||||||
|
// instead.
|
||||||
|
callback()
|
||||||
|
} else if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||||
|
let docs
|
||||||
|
try {
|
||||||
|
docs = JSON.parse(body)
|
||||||
|
} catch (error1) {
|
||||||
|
return callback(OError.tag(error1))
|
||||||
|
}
|
||||||
|
callback(null, docs)
|
||||||
|
} else {
|
||||||
|
callback(
|
||||||
|
new OError(
|
||||||
|
`doc updater returned a non-success status code: ${res.statusCode}`,
|
||||||
|
{
|
||||||
|
projectId,
|
||||||
|
url,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function clearProjectState(projectId, callback) {
|
||||||
|
_makeRequest(
|
||||||
|
{
|
||||||
|
path: `/project/${projectId}/clearState`,
|
||||||
|
method: 'POST',
|
||||||
|
},
|
||||||
|
projectId,
|
||||||
|
'clear-project-state',
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function acceptChanges(projectId, docId, changeIds, callback) {
|
||||||
|
_makeRequest(
|
||||||
|
{
|
||||||
|
path: `/project/${projectId}/doc/${docId}/change/accept`,
|
||||||
|
json: { change_ids: changeIds },
|
||||||
|
method: 'POST',
|
||||||
|
},
|
||||||
|
projectId,
|
||||||
|
'accept-changes',
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function deleteThread(projectId, docId, threadId, callback) {
|
||||||
|
_makeRequest(
|
||||||
|
{
|
||||||
|
path: `/project/${projectId}/doc/${docId}/comment/${threadId}`,
|
||||||
|
method: 'DELETE',
|
||||||
|
},
|
||||||
|
projectId,
|
||||||
|
'delete-thread',
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function resyncProjectHistory(
|
||||||
|
projectId,
|
||||||
|
projectHistoryId,
|
||||||
|
docs,
|
||||||
|
files,
|
||||||
|
callback
|
||||||
|
) {
|
||||||
|
_makeRequest(
|
||||||
|
{
|
||||||
|
path: `/project/${projectId}/history/resync`,
|
||||||
|
json: { docs, files, projectHistoryId },
|
||||||
|
method: 'POST',
|
||||||
|
},
|
||||||
|
projectId,
|
||||||
|
'resync-project-history',
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function updateProjectStructure(
|
||||||
|
projectId,
|
||||||
|
projectHistoryId,
|
||||||
|
userId,
|
||||||
|
changes,
|
||||||
|
callback
|
||||||
|
) {
|
||||||
|
if (
|
||||||
|
settings.apis.project_history == null ||
|
||||||
|
!settings.apis.project_history.sendProjectStructureOps
|
||||||
|
) {
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
|
||||||
|
const {
|
||||||
|
deletes: docDeletes,
|
||||||
|
adds: docAdds,
|
||||||
|
renames: docRenames,
|
||||||
|
} = _getUpdates('doc', changes.oldDocs, changes.newDocs)
|
||||||
|
const {
|
||||||
|
deletes: fileDeletes,
|
||||||
|
adds: fileAdds,
|
||||||
|
renames: fileRenames,
|
||||||
|
} = _getUpdates('file', changes.oldFiles, changes.newFiles)
|
||||||
|
const updates = [].concat(
|
||||||
|
docDeletes,
|
||||||
|
fileDeletes,
|
||||||
|
docAdds,
|
||||||
|
fileAdds,
|
||||||
|
docRenames,
|
||||||
|
fileRenames
|
||||||
|
)
|
||||||
|
const projectVersion =
|
||||||
|
changes && changes.newProject && changes.newProject.version
|
||||||
|
|
||||||
|
if (updates.length < 1) {
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
|
||||||
|
if (projectVersion == null) {
|
||||||
|
logger.warn(
|
||||||
|
{ projectId, changes, projectVersion },
|
||||||
|
'did not receive project version in changes'
|
||||||
|
)
|
||||||
|
return callback(new Error('did not receive project version in changes'))
|
||||||
|
}
|
||||||
|
|
||||||
|
_makeRequest(
|
||||||
|
{
|
||||||
|
path: `/project/${projectId}`,
|
||||||
|
json: {
|
||||||
|
updates,
|
||||||
|
userId,
|
||||||
|
version: projectVersion,
|
||||||
|
projectHistoryId,
|
||||||
|
},
|
||||||
|
method: 'POST',
|
||||||
|
},
|
||||||
|
projectId,
|
||||||
|
'update-project-structure',
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function _makeRequest(options, projectId, metricsKey, callback) {
|
||||||
|
const timer = new metrics.Timer(metricsKey)
|
||||||
|
request(
|
||||||
|
{
|
||||||
|
url: `${settings.apis.documentupdater.url}${options.path}`,
|
||||||
|
json: options.json,
|
||||||
|
method: options.method || 'GET',
|
||||||
|
},
|
||||||
|
function (error, res, body) {
|
||||||
|
timer.done()
|
||||||
|
if (error) {
|
||||||
|
logger.warn(
|
||||||
|
{ error, projectId },
|
||||||
|
'error making request to document updater'
|
||||||
|
)
|
||||||
|
callback(error)
|
||||||
|
} else if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||||
|
callback(null, body)
|
||||||
|
} else {
|
||||||
|
error = new Error(
|
||||||
|
`document updater returned a failure status code: ${res.statusCode}`
|
||||||
|
)
|
||||||
|
logger.warn(
|
||||||
|
{ error, projectId },
|
||||||
|
`document updater returned failure status code: ${res.statusCode}`
|
||||||
|
)
|
||||||
|
callback(error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function _getUpdates(entityType, oldEntities, newEntities) {
|
||||||
|
if (!oldEntities) {
|
||||||
|
oldEntities = []
|
||||||
|
}
|
||||||
|
if (!newEntities) {
|
||||||
|
newEntities = []
|
||||||
|
}
|
||||||
|
const deletes = []
|
||||||
|
const adds = []
|
||||||
|
const renames = []
|
||||||
|
|
||||||
|
const oldEntitiesHash = _.indexBy(oldEntities, entity =>
|
||||||
|
entity[entityType]._id.toString()
|
||||||
|
)
|
||||||
|
const newEntitiesHash = _.indexBy(newEntities, entity =>
|
||||||
|
entity[entityType]._id.toString()
|
||||||
|
)
|
||||||
|
|
||||||
|
// Send deletes before adds (and renames) to keep a 1:1 mapping between
|
||||||
|
// paths and ids
|
||||||
|
//
|
||||||
|
// When a file is replaced, we first delete the old file and then add the
|
||||||
|
// new file. If the 'add' operation is sent to project history before the
|
||||||
|
// 'delete' then we would have two files with the same path at that point
|
||||||
|
// in time.
|
||||||
|
for (const id in oldEntitiesHash) {
|
||||||
|
const oldEntity = oldEntitiesHash[id]
|
||||||
|
const newEntity = newEntitiesHash[id]
|
||||||
|
|
||||||
|
if (newEntity == null) {
|
||||||
|
// entity deleted
|
||||||
|
deletes.push({
|
||||||
|
type: `rename-${entityType}`,
|
||||||
|
id,
|
||||||
|
pathname: oldEntity.path,
|
||||||
|
newPathname: '',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const id in newEntitiesHash) {
|
||||||
|
const newEntity = newEntitiesHash[id]
|
||||||
|
const oldEntity = oldEntitiesHash[id]
|
||||||
|
|
||||||
|
if (oldEntity == null) {
|
||||||
|
// entity added
|
||||||
|
adds.push({
|
||||||
|
type: `add-${entityType}`,
|
||||||
|
id,
|
||||||
|
pathname: newEntity.path,
|
||||||
|
docLines: newEntity.docLines,
|
||||||
|
url: newEntity.url,
|
||||||
|
hash: newEntity.file != null ? newEntity.file.hash : undefined,
|
||||||
|
})
|
||||||
|
} else if (newEntity.path !== oldEntity.path) {
|
||||||
|
// entity renamed
|
||||||
|
renames.push({
|
||||||
|
type: `rename-${entityType}`,
|
||||||
|
id,
|
||||||
|
pathname: oldEntity.path,
|
||||||
|
newPathname: newEntity.path,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { deletes, adds, renames }
|
||||||
|
}
|
132
services/web/app/src/Features/Documents/DocumentController.js
Normal file
132
services/web/app/src/Features/Documents/DocumentController.js
Normal file
|
@ -0,0 +1,132 @@
|
||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
node/handle-callback-err,
|
||||||
|
max-len,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS103: Rewrite code to no longer use __guard__
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
const ProjectGetter = require('../Project/ProjectGetter')
|
||||||
|
const OError = require('@overleaf/o-error')
|
||||||
|
const ProjectLocator = require('../Project/ProjectLocator')
|
||||||
|
const ProjectEntityHandler = require('../Project/ProjectEntityHandler')
|
||||||
|
const ProjectEntityUpdateHandler = require('../Project/ProjectEntityUpdateHandler')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const _ = require('lodash')
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
getDocument(req, res, next) {
|
||||||
|
if (next == null) {
|
||||||
|
next = function (error) {}
|
||||||
|
}
|
||||||
|
const project_id = req.params.Project_id
|
||||||
|
const { doc_id } = req.params
|
||||||
|
const plain =
|
||||||
|
__guard__(req != null ? req.query : undefined, x => x.plain) === 'true'
|
||||||
|
return ProjectGetter.getProject(
|
||||||
|
project_id,
|
||||||
|
{ rootFolder: true, overleaf: true },
|
||||||
|
function (error, project) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
if (project == null) {
|
||||||
|
return res.sendStatus(404)
|
||||||
|
}
|
||||||
|
return ProjectLocator.findElement(
|
||||||
|
{ project, element_id: doc_id, type: 'doc' },
|
||||||
|
function (error, doc, path) {
|
||||||
|
if (error != null) {
|
||||||
|
OError.tag(error, 'error finding element for getDocument', {
|
||||||
|
doc_id,
|
||||||
|
project_id,
|
||||||
|
})
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
return ProjectEntityHandler.getDoc(
|
||||||
|
project_id,
|
||||||
|
doc_id,
|
||||||
|
function (error, lines, rev, version, ranges) {
|
||||||
|
if (error != null) {
|
||||||
|
OError.tag(
|
||||||
|
error,
|
||||||
|
'error finding doc contents for getDocument',
|
||||||
|
{
|
||||||
|
doc_id,
|
||||||
|
project_id,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
if (plain) {
|
||||||
|
res.type('text/plain')
|
||||||
|
return res.send(lines.join('\n'))
|
||||||
|
} else {
|
||||||
|
const projectHistoryId = _.get(project, 'overleaf.history.id')
|
||||||
|
const projectHistoryType = _.get(
|
||||||
|
project,
|
||||||
|
'overleaf.history.display'
|
||||||
|
)
|
||||||
|
? 'project-history'
|
||||||
|
: undefined // for backwards compatibility, don't send anything if the project is still on track-changes
|
||||||
|
return res.json({
|
||||||
|
lines,
|
||||||
|
version,
|
||||||
|
ranges,
|
||||||
|
pathname: path.fileSystem,
|
||||||
|
projectHistoryId,
|
||||||
|
projectHistoryType,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
setDocument(req, res, next) {
|
||||||
|
if (next == null) {
|
||||||
|
next = function (error) {}
|
||||||
|
}
|
||||||
|
const project_id = req.params.Project_id
|
||||||
|
const { doc_id } = req.params
|
||||||
|
const { lines, version, ranges, lastUpdatedAt, lastUpdatedBy } = req.body
|
||||||
|
return ProjectEntityUpdateHandler.updateDocLines(
|
||||||
|
project_id,
|
||||||
|
doc_id,
|
||||||
|
lines,
|
||||||
|
version,
|
||||||
|
ranges,
|
||||||
|
lastUpdatedAt,
|
||||||
|
lastUpdatedBy,
|
||||||
|
function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
OError.tag(error, 'error finding element for getDocument', {
|
||||||
|
doc_id,
|
||||||
|
project_id,
|
||||||
|
})
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
logger.log(
|
||||||
|
{ doc_id, project_id },
|
||||||
|
'finished receiving set document request from api (docupdater)'
|
||||||
|
)
|
||||||
|
return res.sendStatus(200)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
function __guard__(value, transform) {
|
||||||
|
return typeof value !== 'undefined' && value !== null
|
||||||
|
? transform(value)
|
||||||
|
: undefined
|
||||||
|
}
|
78
services/web/app/src/Features/Documents/DocumentHelper.js
Normal file
78
services/web/app/src/Features/Documents/DocumentHelper.js
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
/* eslint-disable
|
||||||
|
max-len,
|
||||||
|
no-cond-assign,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS101: Remove unnecessary use of Array.from
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let DocumentHelper
|
||||||
|
module.exports = DocumentHelper = {
|
||||||
|
getTitleFromTexContent(content, maxContentToScan) {
|
||||||
|
if (maxContentToScan == null) {
|
||||||
|
maxContentToScan = 30000
|
||||||
|
}
|
||||||
|
const TITLE_WITH_CURLY_BRACES = /\\[tT]itle\*?\s*{([^}]+)}/
|
||||||
|
const TITLE_WITH_SQUARE_BRACES = /\\[tT]itle\s*\[([^\]]+)\]/
|
||||||
|
for (const line of Array.from(
|
||||||
|
DocumentHelper._getLinesFromContent(content, maxContentToScan)
|
||||||
|
)) {
|
||||||
|
var match
|
||||||
|
if (
|
||||||
|
(match =
|
||||||
|
line.match(TITLE_WITH_CURLY_BRACES) ||
|
||||||
|
line.match(TITLE_WITH_SQUARE_BRACES))
|
||||||
|
) {
|
||||||
|
return DocumentHelper.detex(match[1])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
},
|
||||||
|
|
||||||
|
contentHasDocumentclass(content, maxContentToScan) {
|
||||||
|
if (maxContentToScan == null) {
|
||||||
|
maxContentToScan = 30000
|
||||||
|
}
|
||||||
|
for (const line of Array.from(
|
||||||
|
DocumentHelper._getLinesFromContent(content, maxContentToScan)
|
||||||
|
)) {
|
||||||
|
// We've had problems with this regex locking up CPU.
|
||||||
|
// Previously /.*\\documentclass/ would totally lock up on lines of 500kb (data text files :()
|
||||||
|
// This regex will only look from the start of the line, including whitespace so will return quickly
|
||||||
|
// regardless of line length.
|
||||||
|
if (line.match(/^\s*\\documentclass/)) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
},
|
||||||
|
|
||||||
|
detex(string) {
|
||||||
|
return string
|
||||||
|
.replace(/\\LaTeX/g, 'LaTeX')
|
||||||
|
.replace(/\\TeX/g, 'TeX')
|
||||||
|
.replace(/\\TikZ/g, 'TikZ')
|
||||||
|
.replace(/\\BibTeX/g, 'BibTeX')
|
||||||
|
.replace(/\\\[[A-Za-z0-9. ]*\]/g, ' ') // line spacing
|
||||||
|
.replace(/\\(?:[a-zA-Z]+|.|)/g, '')
|
||||||
|
.replace(/{}|~/g, ' ')
|
||||||
|
.replace(/[${}]/g, '')
|
||||||
|
.replace(/ +/g, ' ')
|
||||||
|
.trim()
|
||||||
|
},
|
||||||
|
|
||||||
|
_getLinesFromContent(content, maxContentToScan) {
|
||||||
|
if (typeof content === 'string') {
|
||||||
|
return content.substring(0, maxContentToScan).split('\n')
|
||||||
|
} else {
|
||||||
|
return content
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
|
@ -0,0 +1,82 @@
|
||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
max-len,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let ProjectDownloadsController
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const Metrics = require('@overleaf/metrics')
|
||||||
|
const ProjectGetter = require('../Project/ProjectGetter')
|
||||||
|
const ProjectZipStreamManager = require('./ProjectZipStreamManager')
|
||||||
|
const DocumentUpdaterHandler = require('../DocumentUpdater/DocumentUpdaterHandler')
|
||||||
|
|
||||||
|
module.exports = ProjectDownloadsController = {
|
||||||
|
downloadProject(req, res, next) {
|
||||||
|
const project_id = req.params.Project_id
|
||||||
|
Metrics.inc('zip-downloads')
|
||||||
|
return DocumentUpdaterHandler.flushProjectToMongo(
|
||||||
|
project_id,
|
||||||
|
function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
return ProjectGetter.getProject(
|
||||||
|
project_id,
|
||||||
|
{ name: true },
|
||||||
|
function (error, project) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
return ProjectZipStreamManager.createZipStreamForProject(
|
||||||
|
project_id,
|
||||||
|
function (error, stream) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
res.setContentDisposition('attachment', {
|
||||||
|
filename: `${project.name}.zip`,
|
||||||
|
})
|
||||||
|
res.contentType('application/zip')
|
||||||
|
return stream.pipe(res)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
downloadMultipleProjects(req, res, next) {
|
||||||
|
const project_ids = req.query.project_ids.split(',')
|
||||||
|
Metrics.inc('zip-downloads-multiple')
|
||||||
|
return DocumentUpdaterHandler.flushMultipleProjectsToMongo(
|
||||||
|
project_ids,
|
||||||
|
function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
return ProjectZipStreamManager.createZipStreamForMultipleProjects(
|
||||||
|
project_ids,
|
||||||
|
function (error, stream) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
res.setContentDisposition('attachment', {
|
||||||
|
filename: `Overleaf Projects (${project_ids.length} items).zip`,
|
||||||
|
})
|
||||||
|
res.contentType('application/zip')
|
||||||
|
return stream.pipe(res)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
}
|
|
@ -0,0 +1,182 @@
|
||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
node/handle-callback-err,
|
||||||
|
max-len,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS101: Remove unnecessary use of Array.from
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let ProjectZipStreamManager
|
||||||
|
const archiver = require('archiver')
|
||||||
|
const async = require('async')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const ProjectEntityHandler = require('../Project/ProjectEntityHandler')
|
||||||
|
const ProjectGetter = require('../Project/ProjectGetter')
|
||||||
|
const FileStoreHandler = require('../FileStore/FileStoreHandler')
|
||||||
|
|
||||||
|
module.exports = ProjectZipStreamManager = {
|
||||||
|
createZipStreamForMultipleProjects(project_ids, callback) {
|
||||||
|
// We'll build up a zip file that contains multiple zip files
|
||||||
|
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, stream) {}
|
||||||
|
}
|
||||||
|
const archive = archiver('zip')
|
||||||
|
archive.on('error', err =>
|
||||||
|
logger.err(
|
||||||
|
{ err, project_ids },
|
||||||
|
'something went wrong building archive of project'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
callback(null, archive)
|
||||||
|
|
||||||
|
const jobs = []
|
||||||
|
for (const project_id of Array.from(project_ids || [])) {
|
||||||
|
;(project_id =>
|
||||||
|
jobs.push(callback =>
|
||||||
|
ProjectGetter.getProject(
|
||||||
|
project_id,
|
||||||
|
{ name: true },
|
||||||
|
function (error, project) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
logger.log(
|
||||||
|
{ project_id, name: project.name },
|
||||||
|
'appending project to zip stream'
|
||||||
|
)
|
||||||
|
return ProjectZipStreamManager.createZipStreamForProject(
|
||||||
|
project_id,
|
||||||
|
function (error, stream) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
archive.append(stream, { name: `${project.name}.zip` })
|
||||||
|
return stream.on('end', function () {
|
||||||
|
logger.log(
|
||||||
|
{ project_id, name: project.name },
|
||||||
|
'zip stream ended'
|
||||||
|
)
|
||||||
|
return callback()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
))(project_id)
|
||||||
|
}
|
||||||
|
|
||||||
|
return async.series(jobs, function () {
|
||||||
|
logger.log(
|
||||||
|
{ project_ids },
|
||||||
|
'finished creating zip stream of multiple projects'
|
||||||
|
)
|
||||||
|
return archive.finalize()
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
createZipStreamForProject(project_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, stream) {}
|
||||||
|
}
|
||||||
|
const archive = archiver('zip')
|
||||||
|
// return stream immediately before we start adding things to it
|
||||||
|
archive.on('error', err =>
|
||||||
|
logger.err(
|
||||||
|
{ err, project_id },
|
||||||
|
'something went wrong building archive of project'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
callback(null, archive)
|
||||||
|
return this.addAllDocsToArchive(project_id, archive, error => {
|
||||||
|
if (error != null) {
|
||||||
|
logger.error(
|
||||||
|
{ err: error, project_id },
|
||||||
|
'error adding docs to zip stream'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return this.addAllFilesToArchive(project_id, archive, error => {
|
||||||
|
if (error != null) {
|
||||||
|
logger.error(
|
||||||
|
{ err: error, project_id },
|
||||||
|
'error adding files to zip stream'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return archive.finalize()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
addAllDocsToArchive(project_id, archive, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
return ProjectEntityHandler.getAllDocs(project_id, function (error, docs) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
const jobs = []
|
||||||
|
for (const path in docs) {
|
||||||
|
const doc = docs[path]
|
||||||
|
;(function (path, doc) {
|
||||||
|
if (path[0] === '/') {
|
||||||
|
path = path.slice(1)
|
||||||
|
}
|
||||||
|
return jobs.push(function (callback) {
|
||||||
|
logger.log({ project_id }, 'Adding doc')
|
||||||
|
archive.append(doc.lines.join('\n'), { name: path })
|
||||||
|
return callback()
|
||||||
|
})
|
||||||
|
})(path, doc)
|
||||||
|
}
|
||||||
|
return async.series(jobs, callback)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
addAllFilesToArchive(project_id, archive, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
return ProjectEntityHandler.getAllFiles(
|
||||||
|
project_id,
|
||||||
|
function (error, files) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
const jobs = []
|
||||||
|
for (const path in files) {
|
||||||
|
const file = files[path]
|
||||||
|
;((path, file) =>
|
||||||
|
jobs.push(callback =>
|
||||||
|
FileStoreHandler.getFileStream(
|
||||||
|
project_id,
|
||||||
|
file._id,
|
||||||
|
{},
|
||||||
|
function (error, stream) {
|
||||||
|
if (error != null) {
|
||||||
|
logger.warn(
|
||||||
|
{ err: error, project_id, file_id: file._id },
|
||||||
|
'something went wrong adding file to zip archive'
|
||||||
|
)
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
if (path[0] === '/') {
|
||||||
|
path = path.slice(1)
|
||||||
|
}
|
||||||
|
archive.append(stream, { name: path })
|
||||||
|
return stream.on('end', () => callback())
|
||||||
|
}
|
||||||
|
)
|
||||||
|
))(path, file)
|
||||||
|
}
|
||||||
|
return async.parallelLimit(jobs, 5, callback)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
}
|
727
services/web/app/src/Features/Editor/EditorController.js
Normal file
727
services/web/app/src/Features/Editor/EditorController.js
Normal file
|
@ -0,0 +1,727 @@
|
||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
node/handle-callback-err,
|
||||||
|
max-len,
|
||||||
|
no-dupe-keys,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const OError = require('@overleaf/o-error')
|
||||||
|
const Metrics = require('@overleaf/metrics')
|
||||||
|
const ProjectEntityUpdateHandler = require('../Project/ProjectEntityUpdateHandler')
|
||||||
|
const ProjectOptionsHandler = require('../Project/ProjectOptionsHandler')
|
||||||
|
const ProjectDetailsHandler = require('../Project/ProjectDetailsHandler')
|
||||||
|
const ProjectDeleter = require('../Project/ProjectDeleter')
|
||||||
|
const DocumentUpdaterHandler = require('../DocumentUpdater/DocumentUpdaterHandler')
|
||||||
|
const EditorRealTimeController = require('./EditorRealTimeController')
|
||||||
|
const async = require('async')
|
||||||
|
const PublicAccessLevels = require('../Authorization/PublicAccessLevels')
|
||||||
|
const _ = require('underscore')
|
||||||
|
const { promisifyAll } = require('../../util/promises')
|
||||||
|
|
||||||
|
const EditorController = {
|
||||||
|
addDoc(project_id, folder_id, docName, docLines, source, user_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, doc) {}
|
||||||
|
}
|
||||||
|
return EditorController.addDocWithRanges(
|
||||||
|
project_id,
|
||||||
|
folder_id,
|
||||||
|
docName,
|
||||||
|
docLines,
|
||||||
|
{},
|
||||||
|
source,
|
||||||
|
user_id,
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
addDocWithRanges(
|
||||||
|
project_id,
|
||||||
|
folder_id,
|
||||||
|
docName,
|
||||||
|
docLines,
|
||||||
|
docRanges,
|
||||||
|
source,
|
||||||
|
user_id,
|
||||||
|
callback
|
||||||
|
) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, doc) {}
|
||||||
|
}
|
||||||
|
docName = docName.trim()
|
||||||
|
Metrics.inc('editor.add-doc')
|
||||||
|
return ProjectEntityUpdateHandler.addDocWithRanges(
|
||||||
|
project_id,
|
||||||
|
folder_id,
|
||||||
|
docName,
|
||||||
|
docLines,
|
||||||
|
docRanges,
|
||||||
|
user_id,
|
||||||
|
(err, doc, folder_id) => {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error adding doc without lock', {
|
||||||
|
project_id,
|
||||||
|
docName,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
EditorRealTimeController.emitToRoom(
|
||||||
|
project_id,
|
||||||
|
'reciveNewDoc',
|
||||||
|
folder_id,
|
||||||
|
doc,
|
||||||
|
source,
|
||||||
|
user_id
|
||||||
|
)
|
||||||
|
return callback(err, doc)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
addFile(
|
||||||
|
project_id,
|
||||||
|
folder_id,
|
||||||
|
fileName,
|
||||||
|
fsPath,
|
||||||
|
linkedFileData,
|
||||||
|
source,
|
||||||
|
user_id,
|
||||||
|
callback
|
||||||
|
) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, file) {}
|
||||||
|
}
|
||||||
|
fileName = fileName.trim()
|
||||||
|
Metrics.inc('editor.add-file')
|
||||||
|
return ProjectEntityUpdateHandler.addFile(
|
||||||
|
project_id,
|
||||||
|
folder_id,
|
||||||
|
fileName,
|
||||||
|
fsPath,
|
||||||
|
linkedFileData,
|
||||||
|
user_id,
|
||||||
|
(err, fileRef, folder_id) => {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error adding file without lock', {
|
||||||
|
project_id,
|
||||||
|
folder_id,
|
||||||
|
fileName,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
EditorRealTimeController.emitToRoom(
|
||||||
|
project_id,
|
||||||
|
'reciveNewFile',
|
||||||
|
folder_id,
|
||||||
|
fileRef,
|
||||||
|
source,
|
||||||
|
linkedFileData,
|
||||||
|
user_id
|
||||||
|
)
|
||||||
|
return callback(err, fileRef)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
upsertDoc(
|
||||||
|
project_id,
|
||||||
|
folder_id,
|
||||||
|
docName,
|
||||||
|
docLines,
|
||||||
|
source,
|
||||||
|
user_id,
|
||||||
|
callback
|
||||||
|
) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err) {}
|
||||||
|
}
|
||||||
|
return ProjectEntityUpdateHandler.upsertDoc(
|
||||||
|
project_id,
|
||||||
|
folder_id,
|
||||||
|
docName,
|
||||||
|
docLines,
|
||||||
|
source,
|
||||||
|
user_id,
|
||||||
|
function (err, doc, didAddNewDoc) {
|
||||||
|
if (didAddNewDoc) {
|
||||||
|
EditorRealTimeController.emitToRoom(
|
||||||
|
project_id,
|
||||||
|
'reciveNewDoc',
|
||||||
|
folder_id,
|
||||||
|
doc,
|
||||||
|
source,
|
||||||
|
user_id
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return callback(err, doc)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
upsertFile(
|
||||||
|
project_id,
|
||||||
|
folder_id,
|
||||||
|
fileName,
|
||||||
|
fsPath,
|
||||||
|
linkedFileData,
|
||||||
|
source,
|
||||||
|
user_id,
|
||||||
|
callback
|
||||||
|
) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err, file) {}
|
||||||
|
}
|
||||||
|
return ProjectEntityUpdateHandler.upsertFile(
|
||||||
|
project_id,
|
||||||
|
folder_id,
|
||||||
|
fileName,
|
||||||
|
fsPath,
|
||||||
|
linkedFileData,
|
||||||
|
user_id,
|
||||||
|
function (err, newFile, didAddFile, existingFile) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
if (!didAddFile) {
|
||||||
|
// replacement, so remove the existing file from the client
|
||||||
|
EditorRealTimeController.emitToRoom(
|
||||||
|
project_id,
|
||||||
|
'removeEntity',
|
||||||
|
existingFile._id,
|
||||||
|
source
|
||||||
|
)
|
||||||
|
}
|
||||||
|
// now add the new file on the client
|
||||||
|
EditorRealTimeController.emitToRoom(
|
||||||
|
project_id,
|
||||||
|
'reciveNewFile',
|
||||||
|
folder_id,
|
||||||
|
newFile,
|
||||||
|
source,
|
||||||
|
linkedFileData,
|
||||||
|
user_id
|
||||||
|
)
|
||||||
|
return callback(null, newFile)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
upsertDocWithPath(
|
||||||
|
project_id,
|
||||||
|
elementPath,
|
||||||
|
docLines,
|
||||||
|
source,
|
||||||
|
user_id,
|
||||||
|
callback
|
||||||
|
) {
|
||||||
|
return ProjectEntityUpdateHandler.upsertDocWithPath(
|
||||||
|
project_id,
|
||||||
|
elementPath,
|
||||||
|
docLines,
|
||||||
|
source,
|
||||||
|
user_id,
|
||||||
|
function (err, doc, didAddNewDoc, newFolders, lastFolder) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
return EditorController._notifyProjectUsersOfNewFolders(
|
||||||
|
project_id,
|
||||||
|
newFolders,
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
if (didAddNewDoc) {
|
||||||
|
EditorRealTimeController.emitToRoom(
|
||||||
|
project_id,
|
||||||
|
'reciveNewDoc',
|
||||||
|
lastFolder._id,
|
||||||
|
doc,
|
||||||
|
source,
|
||||||
|
user_id
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
upsertFileWithPath(
|
||||||
|
project_id,
|
||||||
|
elementPath,
|
||||||
|
fsPath,
|
||||||
|
linkedFileData,
|
||||||
|
source,
|
||||||
|
user_id,
|
||||||
|
callback
|
||||||
|
) {
|
||||||
|
return ProjectEntityUpdateHandler.upsertFileWithPath(
|
||||||
|
project_id,
|
||||||
|
elementPath,
|
||||||
|
fsPath,
|
||||||
|
linkedFileData,
|
||||||
|
user_id,
|
||||||
|
function (
|
||||||
|
err,
|
||||||
|
newFile,
|
||||||
|
didAddFile,
|
||||||
|
existingFile,
|
||||||
|
newFolders,
|
||||||
|
lastFolder
|
||||||
|
) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
return EditorController._notifyProjectUsersOfNewFolders(
|
||||||
|
project_id,
|
||||||
|
newFolders,
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
if (!didAddFile) {
|
||||||
|
// replacement, so remove the existing file from the client
|
||||||
|
EditorRealTimeController.emitToRoom(
|
||||||
|
project_id,
|
||||||
|
'removeEntity',
|
||||||
|
existingFile._id,
|
||||||
|
source
|
||||||
|
)
|
||||||
|
}
|
||||||
|
// now add the new file on the client
|
||||||
|
EditorRealTimeController.emitToRoom(
|
||||||
|
project_id,
|
||||||
|
'reciveNewFile',
|
||||||
|
lastFolder._id,
|
||||||
|
newFile,
|
||||||
|
source,
|
||||||
|
linkedFileData,
|
||||||
|
user_id
|
||||||
|
)
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
addFolder(project_id, folder_id, folderName, source, userId, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, folder) {}
|
||||||
|
}
|
||||||
|
folderName = folderName.trim()
|
||||||
|
Metrics.inc('editor.add-folder')
|
||||||
|
return ProjectEntityUpdateHandler.addFolder(
|
||||||
|
project_id,
|
||||||
|
folder_id,
|
||||||
|
folderName,
|
||||||
|
(err, folder, folder_id) => {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'could not add folder', {
|
||||||
|
project_id,
|
||||||
|
folder_id,
|
||||||
|
folderName,
|
||||||
|
source,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
return EditorController._notifyProjectUsersOfNewFolder(
|
||||||
|
project_id,
|
||||||
|
folder_id,
|
||||||
|
folder,
|
||||||
|
userId,
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
return callback(null, folder)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
mkdirp(project_id, path, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, newFolders, lastFolder) {}
|
||||||
|
}
|
||||||
|
logger.log({ project_id, path }, "making directories if they don't exist")
|
||||||
|
return ProjectEntityUpdateHandler.mkdirp(
|
||||||
|
project_id,
|
||||||
|
path,
|
||||||
|
(err, newFolders, lastFolder) => {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'could not mkdirp', {
|
||||||
|
project_id,
|
||||||
|
path,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return EditorController._notifyProjectUsersOfNewFolders(
|
||||||
|
project_id,
|
||||||
|
newFolders,
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
return callback(null, newFolders, lastFolder)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
deleteEntity(project_id, entity_id, entityType, source, userId, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
Metrics.inc('editor.delete-entity')
|
||||||
|
return ProjectEntityUpdateHandler.deleteEntity(
|
||||||
|
project_id,
|
||||||
|
entity_id,
|
||||||
|
entityType,
|
||||||
|
userId,
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'could not delete entity', {
|
||||||
|
project_id,
|
||||||
|
entity_id,
|
||||||
|
entityType,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
logger.log(
|
||||||
|
{ project_id, entity_id, entityType },
|
||||||
|
'telling users entity has been deleted'
|
||||||
|
)
|
||||||
|
EditorRealTimeController.emitToRoom(
|
||||||
|
project_id,
|
||||||
|
'removeEntity',
|
||||||
|
entity_id,
|
||||||
|
source
|
||||||
|
)
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
deleteEntityWithPath(project_id, path, source, user_id, callback) {
|
||||||
|
return ProjectEntityUpdateHandler.deleteEntityWithPath(
|
||||||
|
project_id,
|
||||||
|
path,
|
||||||
|
user_id,
|
||||||
|
function (err, entity_id) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
EditorRealTimeController.emitToRoom(
|
||||||
|
project_id,
|
||||||
|
'removeEntity',
|
||||||
|
entity_id,
|
||||||
|
source
|
||||||
|
)
|
||||||
|
return callback(null, entity_id)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
updateProjectDescription(project_id, description, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function () {}
|
||||||
|
}
|
||||||
|
logger.log({ project_id, description }, 'updating project description')
|
||||||
|
return ProjectDetailsHandler.setProjectDescription(
|
||||||
|
project_id,
|
||||||
|
description,
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(
|
||||||
|
err,
|
||||||
|
'something went wrong setting the project description',
|
||||||
|
{
|
||||||
|
project_id,
|
||||||
|
description,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
EditorRealTimeController.emitToRoom(
|
||||||
|
project_id,
|
||||||
|
'projectDescriptionUpdated',
|
||||||
|
description
|
||||||
|
)
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
deleteProject(project_id, callback) {
|
||||||
|
Metrics.inc('editor.delete-project')
|
||||||
|
return ProjectDeleter.deleteProject(project_id, callback)
|
||||||
|
},
|
||||||
|
|
||||||
|
renameEntity(project_id, entity_id, entityType, newName, userId, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
Metrics.inc('editor.rename-entity')
|
||||||
|
return ProjectEntityUpdateHandler.renameEntity(
|
||||||
|
project_id,
|
||||||
|
entity_id,
|
||||||
|
entityType,
|
||||||
|
newName,
|
||||||
|
userId,
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error renaming entity', {
|
||||||
|
project_id,
|
||||||
|
entity_id,
|
||||||
|
entityType,
|
||||||
|
newName,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
if (newName.length > 0) {
|
||||||
|
EditorRealTimeController.emitToRoom(
|
||||||
|
project_id,
|
||||||
|
'reciveEntityRename',
|
||||||
|
entity_id,
|
||||||
|
newName
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
moveEntity(project_id, entity_id, folder_id, entityType, userId, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
Metrics.inc('editor.move-entity')
|
||||||
|
return ProjectEntityUpdateHandler.moveEntity(
|
||||||
|
project_id,
|
||||||
|
entity_id,
|
||||||
|
folder_id,
|
||||||
|
entityType,
|
||||||
|
userId,
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error moving entity', {
|
||||||
|
project_id,
|
||||||
|
entity_id,
|
||||||
|
folder_id,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
EditorRealTimeController.emitToRoom(
|
||||||
|
project_id,
|
||||||
|
'reciveEntityMove',
|
||||||
|
entity_id,
|
||||||
|
folder_id
|
||||||
|
)
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
renameProject(project_id, newName, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err) {}
|
||||||
|
}
|
||||||
|
return ProjectDetailsHandler.renameProject(
|
||||||
|
project_id,
|
||||||
|
newName,
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error renaming project', {
|
||||||
|
project_id,
|
||||||
|
newName,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
EditorRealTimeController.emitToRoom(
|
||||||
|
project_id,
|
||||||
|
'projectNameUpdated',
|
||||||
|
newName
|
||||||
|
)
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
setCompiler(project_id, compiler, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err) {}
|
||||||
|
}
|
||||||
|
return ProjectOptionsHandler.setCompiler(
|
||||||
|
project_id,
|
||||||
|
compiler,
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
EditorRealTimeController.emitToRoom(
|
||||||
|
project_id,
|
||||||
|
'compilerUpdated',
|
||||||
|
compiler
|
||||||
|
)
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
setImageName(project_id, imageName, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err) {}
|
||||||
|
}
|
||||||
|
return ProjectOptionsHandler.setImageName(
|
||||||
|
project_id,
|
||||||
|
imageName,
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
EditorRealTimeController.emitToRoom(
|
||||||
|
project_id,
|
||||||
|
'imageNameUpdated',
|
||||||
|
imageName
|
||||||
|
)
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
setSpellCheckLanguage(project_id, languageCode, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err) {}
|
||||||
|
}
|
||||||
|
return ProjectOptionsHandler.setSpellCheckLanguage(
|
||||||
|
project_id,
|
||||||
|
languageCode,
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
EditorRealTimeController.emitToRoom(
|
||||||
|
project_id,
|
||||||
|
'spellCheckLanguageUpdated',
|
||||||
|
languageCode
|
||||||
|
)
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
setPublicAccessLevel(project_id, newAccessLevel, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err) {}
|
||||||
|
}
|
||||||
|
return ProjectDetailsHandler.setPublicAccessLevel(
|
||||||
|
project_id,
|
||||||
|
newAccessLevel,
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
EditorRealTimeController.emitToRoom(
|
||||||
|
project_id,
|
||||||
|
'project:publicAccessLevel:changed',
|
||||||
|
{ newAccessLevel }
|
||||||
|
)
|
||||||
|
if (newAccessLevel === PublicAccessLevels.TOKEN_BASED) {
|
||||||
|
return ProjectDetailsHandler.ensureTokensArePresent(
|
||||||
|
project_id,
|
||||||
|
function (err, tokens) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
EditorRealTimeController.emitToRoom(
|
||||||
|
project_id,
|
||||||
|
'project:tokens:changed',
|
||||||
|
{ tokens }
|
||||||
|
)
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
setRootDoc(project_id, newRootDocID, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err) {}
|
||||||
|
}
|
||||||
|
return ProjectEntityUpdateHandler.setRootDoc(
|
||||||
|
project_id,
|
||||||
|
newRootDocID,
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
EditorRealTimeController.emitToRoom(
|
||||||
|
project_id,
|
||||||
|
'rootDocUpdated',
|
||||||
|
newRootDocID
|
||||||
|
)
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
_notifyProjectUsersOfNewFolders(project_id, folders, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
return async.eachSeries(
|
||||||
|
folders,
|
||||||
|
(folder, cb) =>
|
||||||
|
EditorController._notifyProjectUsersOfNewFolder(
|
||||||
|
project_id,
|
||||||
|
folder.parentFolder_id,
|
||||||
|
folder,
|
||||||
|
null,
|
||||||
|
cb
|
||||||
|
),
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
_notifyProjectUsersOfNewFolder(
|
||||||
|
project_id,
|
||||||
|
folder_id,
|
||||||
|
folder,
|
||||||
|
userId,
|
||||||
|
callback
|
||||||
|
) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
EditorRealTimeController.emitToRoom(
|
||||||
|
project_id,
|
||||||
|
'reciveNewFolder',
|
||||||
|
folder_id,
|
||||||
|
folder,
|
||||||
|
userId
|
||||||
|
)
|
||||||
|
return callback()
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
EditorController.promises = promisifyAll(EditorController)
|
||||||
|
module.exports = EditorController
|
304
services/web/app/src/Features/Editor/EditorHttpController.js
Normal file
304
services/web/app/src/Features/Editor/EditorHttpController.js
Normal file
|
@ -0,0 +1,304 @@
|
||||||
|
const ProjectDeleter = require('../Project/ProjectDeleter')
|
||||||
|
const EditorController = require('./EditorController')
|
||||||
|
const ProjectGetter = require('../Project/ProjectGetter')
|
||||||
|
const AuthorizationManager = require('../Authorization/AuthorizationManager')
|
||||||
|
const ProjectEditorHandler = require('../Project/ProjectEditorHandler')
|
||||||
|
const Metrics = require('@overleaf/metrics')
|
||||||
|
const CollaboratorsGetter = require('../Collaborators/CollaboratorsGetter')
|
||||||
|
const CollaboratorsInviteHandler = require('../Collaborators/CollaboratorsInviteHandler')
|
||||||
|
const CollaboratorsHandler = require('../Collaborators/CollaboratorsHandler')
|
||||||
|
const PrivilegeLevels = require('../Authorization/PrivilegeLevels')
|
||||||
|
const TokenAccessHandler = require('../TokenAccess/TokenAccessHandler')
|
||||||
|
const SessionManager = require('../Authentication/SessionManager')
|
||||||
|
const Errors = require('../Errors/Errors')
|
||||||
|
const HttpErrorHandler = require('../Errors/HttpErrorHandler')
|
||||||
|
const ProjectEntityUpdateHandler = require('../Project/ProjectEntityUpdateHandler')
|
||||||
|
const DocstoreManager = require('../Docstore/DocstoreManager')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const { expressify } = require('../../util/promises')
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
joinProject: expressify(joinProject),
|
||||||
|
addDoc: expressify(addDoc),
|
||||||
|
addFolder: expressify(addFolder),
|
||||||
|
renameEntity: expressify(renameEntity),
|
||||||
|
moveEntity: expressify(moveEntity),
|
||||||
|
deleteDoc: expressify(deleteDoc),
|
||||||
|
deleteFile: expressify(deleteFile),
|
||||||
|
deleteFolder: expressify(deleteFolder),
|
||||||
|
deleteEntity: expressify(deleteEntity),
|
||||||
|
convertDocToFile: expressify(convertDocToFile),
|
||||||
|
_nameIsAcceptableLength,
|
||||||
|
}
|
||||||
|
|
||||||
|
const unsupportedSpellcheckLanguages = [
|
||||||
|
'am',
|
||||||
|
'hy',
|
||||||
|
'bn',
|
||||||
|
'gu',
|
||||||
|
'he',
|
||||||
|
'hi',
|
||||||
|
'hu',
|
||||||
|
'is',
|
||||||
|
'kn',
|
||||||
|
'ml',
|
||||||
|
'mr',
|
||||||
|
'or',
|
||||||
|
'ss',
|
||||||
|
'ta',
|
||||||
|
'te',
|
||||||
|
'uk',
|
||||||
|
'uz',
|
||||||
|
'zu',
|
||||||
|
'fi',
|
||||||
|
]
|
||||||
|
|
||||||
|
async function joinProject(req, res, next) {
|
||||||
|
const projectId = req.params.Project_id
|
||||||
|
let userId = req.query.user_id
|
||||||
|
if (userId === 'anonymous-user') {
|
||||||
|
userId = null
|
||||||
|
}
|
||||||
|
Metrics.inc('editor.join-project')
|
||||||
|
const {
|
||||||
|
project,
|
||||||
|
privilegeLevel,
|
||||||
|
isRestrictedUser,
|
||||||
|
} = await _buildJoinProjectView(req, projectId, userId)
|
||||||
|
if (!project) {
|
||||||
|
return res.sendStatus(403)
|
||||||
|
}
|
||||||
|
// Hide access tokens if this is not the project owner
|
||||||
|
TokenAccessHandler.protectTokens(project, privilegeLevel)
|
||||||
|
// Hide sensitive data if the user is restricted
|
||||||
|
if (isRestrictedUser) {
|
||||||
|
project.owner = { _id: project.owner._id }
|
||||||
|
project.members = []
|
||||||
|
}
|
||||||
|
// Only show the 'renamed or deleted' message once
|
||||||
|
if (project.deletedByExternalDataSource) {
|
||||||
|
await ProjectDeleter.promises.unmarkAsDeletedByExternalSource(projectId)
|
||||||
|
}
|
||||||
|
// disable spellchecking for currently unsupported spell check languages
|
||||||
|
// preserve the value in the db so they can use it again once we add back
|
||||||
|
// support.
|
||||||
|
if (
|
||||||
|
unsupportedSpellcheckLanguages.indexOf(project.spellCheckLanguage) !== -1
|
||||||
|
) {
|
||||||
|
project.spellCheckLanguage = ''
|
||||||
|
}
|
||||||
|
res.json({
|
||||||
|
project,
|
||||||
|
privilegeLevel,
|
||||||
|
isRestrictedUser,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async function _buildJoinProjectView(req, projectId, userId) {
|
||||||
|
const project = await ProjectGetter.promises.getProjectWithoutDocLines(
|
||||||
|
projectId
|
||||||
|
)
|
||||||
|
if (project == null) {
|
||||||
|
throw new Errors.NotFoundError('project not found')
|
||||||
|
}
|
||||||
|
let deletedDocsFromDocstore = []
|
||||||
|
try {
|
||||||
|
deletedDocsFromDocstore = await DocstoreManager.promises.getAllDeletedDocs(
|
||||||
|
projectId
|
||||||
|
)
|
||||||
|
} catch (err) {
|
||||||
|
// The query in docstore is not optimized at this time and fails for
|
||||||
|
// projects with many very large, deleted documents.
|
||||||
|
// Not serving the user with deletedDocs from docstore may cause a minor
|
||||||
|
// UI issue with deleted files that are no longer available for restore.
|
||||||
|
logger.warn(
|
||||||
|
{ err, projectId },
|
||||||
|
'soft-failure when fetching deletedDocs from docstore'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const members = await CollaboratorsGetter.promises.getInvitedMembersWithPrivilegeLevels(
|
||||||
|
projectId
|
||||||
|
)
|
||||||
|
const token = TokenAccessHandler.getRequestToken(req, projectId)
|
||||||
|
const privilegeLevel = await AuthorizationManager.promises.getPrivilegeLevelForProject(
|
||||||
|
userId,
|
||||||
|
projectId,
|
||||||
|
token
|
||||||
|
)
|
||||||
|
if (privilegeLevel == null || privilegeLevel === PrivilegeLevels.NONE) {
|
||||||
|
return { project: null, privilegeLevel: null, isRestrictedUser: false }
|
||||||
|
}
|
||||||
|
const invites = await CollaboratorsInviteHandler.promises.getAllInvites(
|
||||||
|
projectId
|
||||||
|
)
|
||||||
|
const isTokenMember = await CollaboratorsHandler.promises.userIsTokenMember(
|
||||||
|
userId,
|
||||||
|
projectId
|
||||||
|
)
|
||||||
|
const isRestrictedUser = AuthorizationManager.isRestrictedUser(
|
||||||
|
userId,
|
||||||
|
privilegeLevel,
|
||||||
|
isTokenMember
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
project: ProjectEditorHandler.buildProjectModelView(
|
||||||
|
project,
|
||||||
|
members,
|
||||||
|
invites,
|
||||||
|
deletedDocsFromDocstore
|
||||||
|
),
|
||||||
|
privilegeLevel,
|
||||||
|
isRestrictedUser,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function _nameIsAcceptableLength(name) {
|
||||||
|
return name != null && name.length < 150 && name.length !== 0
|
||||||
|
}
|
||||||
|
|
||||||
|
async function addDoc(req, res, next) {
|
||||||
|
const projectId = req.params.Project_id
|
||||||
|
const { name } = req.body
|
||||||
|
const parentFolderId = req.body.parent_folder_id
|
||||||
|
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||||
|
|
||||||
|
if (!_nameIsAcceptableLength(name)) {
|
||||||
|
return res.sendStatus(400)
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const doc = await EditorController.promises.addDoc(
|
||||||
|
projectId,
|
||||||
|
parentFolderId,
|
||||||
|
name,
|
||||||
|
[],
|
||||||
|
'editor',
|
||||||
|
userId
|
||||||
|
)
|
||||||
|
res.json(doc)
|
||||||
|
} catch (err) {
|
||||||
|
if (err.message === 'project_has_too_many_files') {
|
||||||
|
res.status(400).json(req.i18n.translate('project_has_too_many_files'))
|
||||||
|
} else {
|
||||||
|
next(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function addFolder(req, res, next) {
|
||||||
|
const projectId = req.params.Project_id
|
||||||
|
const { name } = req.body
|
||||||
|
const parentFolderId = req.body.parent_folder_id
|
||||||
|
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||||
|
if (!_nameIsAcceptableLength(name)) {
|
||||||
|
return res.sendStatus(400)
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const doc = await EditorController.promises.addFolder(
|
||||||
|
projectId,
|
||||||
|
parentFolderId,
|
||||||
|
name,
|
||||||
|
'editor',
|
||||||
|
userId
|
||||||
|
)
|
||||||
|
res.json(doc)
|
||||||
|
} catch (err) {
|
||||||
|
if (err.message === 'project_has_too_many_files') {
|
||||||
|
res.status(400).json(req.i18n.translate('project_has_too_many_files'))
|
||||||
|
} else if (err.message === 'invalid element name') {
|
||||||
|
res.status(400).json(req.i18n.translate('invalid_file_name'))
|
||||||
|
} else {
|
||||||
|
next(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function renameEntity(req, res, next) {
|
||||||
|
const projectId = req.params.Project_id
|
||||||
|
const entityId = req.params.entity_id
|
||||||
|
const entityType = req.params.entity_type
|
||||||
|
const { name } = req.body
|
||||||
|
if (!_nameIsAcceptableLength(name)) {
|
||||||
|
return res.sendStatus(400)
|
||||||
|
}
|
||||||
|
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||||
|
await EditorController.promises.renameEntity(
|
||||||
|
projectId,
|
||||||
|
entityId,
|
||||||
|
entityType,
|
||||||
|
name,
|
||||||
|
userId
|
||||||
|
)
|
||||||
|
res.sendStatus(204)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function moveEntity(req, res, next) {
|
||||||
|
const projectId = req.params.Project_id
|
||||||
|
const entityId = req.params.entity_id
|
||||||
|
const entityType = req.params.entity_type
|
||||||
|
const folderId = req.body.folder_id
|
||||||
|
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||||
|
await EditorController.promises.moveEntity(
|
||||||
|
projectId,
|
||||||
|
entityId,
|
||||||
|
folderId,
|
||||||
|
entityType,
|
||||||
|
userId
|
||||||
|
)
|
||||||
|
res.sendStatus(204)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function deleteDoc(req, res, next) {
|
||||||
|
req.params.entity_type = 'doc'
|
||||||
|
await deleteEntity(req, res, next)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function deleteFile(req, res, next) {
|
||||||
|
req.params.entity_type = 'file'
|
||||||
|
await deleteEntity(req, res, next)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function deleteFolder(req, res, next) {
|
||||||
|
req.params.entity_type = 'folder'
|
||||||
|
await deleteEntity(req, res, next)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function deleteEntity(req, res, next) {
|
||||||
|
const projectId = req.params.Project_id
|
||||||
|
const entityId = req.params.entity_id
|
||||||
|
const entityType = req.params.entity_type
|
||||||
|
const userId = SessionManager.getLoggedInUserId(req.session)
|
||||||
|
await EditorController.promises.deleteEntity(
|
||||||
|
projectId,
|
||||||
|
entityId,
|
||||||
|
entityType,
|
||||||
|
'editor',
|
||||||
|
userId
|
||||||
|
)
|
||||||
|
res.sendStatus(204)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function convertDocToFile(req, res, next) {
|
||||||
|
const projectId = req.params.Project_id
|
||||||
|
const docId = req.params.entity_id
|
||||||
|
const { userId } = req.body
|
||||||
|
try {
|
||||||
|
const fileRef = await ProjectEntityUpdateHandler.promises.convertDocToFile(
|
||||||
|
projectId,
|
||||||
|
docId,
|
||||||
|
userId
|
||||||
|
)
|
||||||
|
res.json({ fileId: fileRef._id.toString() })
|
||||||
|
} catch (err) {
|
||||||
|
if (err instanceof Errors.NotFoundError) {
|
||||||
|
return HttpErrorHandler.notFound(req, res, 'Document not found')
|
||||||
|
} else if (err instanceof Errors.DocHasRangesError) {
|
||||||
|
return HttpErrorHandler.unprocessableEntity(
|
||||||
|
req,
|
||||||
|
res,
|
||||||
|
'Document has comments or tracked changes'
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,51 @@
|
||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
max-len,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS101: Remove unnecessary use of Array.from
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let EditorRealTimeController
|
||||||
|
const Settings = require('@overleaf/settings')
|
||||||
|
const Metrics = require('@overleaf/metrics')
|
||||||
|
const RedisWrapper = require('../../infrastructure/RedisWrapper')
|
||||||
|
const rclient = RedisWrapper.client('pubsub')
|
||||||
|
const os = require('os')
|
||||||
|
const crypto = require('crypto')
|
||||||
|
|
||||||
|
const HOST = os.hostname()
|
||||||
|
const RND = crypto.randomBytes(4).toString('hex') // generate a random key for this process
|
||||||
|
let COUNT = 0
|
||||||
|
|
||||||
|
module.exports = EditorRealTimeController = {
|
||||||
|
emitToRoom(room_id, message, ...payload) {
|
||||||
|
// create a unique message id using a counter
|
||||||
|
const message_id = `web:${HOST}:${RND}-${COUNT++}`
|
||||||
|
var channel
|
||||||
|
if (room_id === 'all' || !Settings.publishOnIndividualChannels) {
|
||||||
|
channel = 'editor-events'
|
||||||
|
} else {
|
||||||
|
channel = `editor-events:${room_id}`
|
||||||
|
}
|
||||||
|
const blob = JSON.stringify({
|
||||||
|
room_id,
|
||||||
|
message,
|
||||||
|
payload,
|
||||||
|
_id: message_id,
|
||||||
|
})
|
||||||
|
Metrics.summary('redis.publish.editor-events', blob.length, {
|
||||||
|
status: message,
|
||||||
|
})
|
||||||
|
return rclient.publish(channel, blob)
|
||||||
|
},
|
||||||
|
|
||||||
|
emitToAll(message, ...payload) {
|
||||||
|
return this.emitToRoom('all', message, ...Array.from(payload))
|
||||||
|
},
|
||||||
|
}
|
84
services/web/app/src/Features/Editor/EditorRouter.js
Normal file
84
services/web/app/src/Features/Editor/EditorRouter.js
Normal file
|
@ -0,0 +1,84 @@
|
||||||
|
const EditorHttpController = require('./EditorHttpController')
|
||||||
|
const AuthenticationController = require('../Authentication/AuthenticationController')
|
||||||
|
const AuthorizationMiddleware = require('../Authorization/AuthorizationMiddleware')
|
||||||
|
const RateLimiterMiddleware = require('../Security/RateLimiterMiddleware')
|
||||||
|
const { Joi, validate } = require('../../infrastructure/Validation')
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
apply(webRouter, apiRouter) {
|
||||||
|
webRouter.post(
|
||||||
|
'/project/:Project_id/doc',
|
||||||
|
AuthorizationMiddleware.ensureUserCanWriteProjectContent,
|
||||||
|
RateLimiterMiddleware.rateLimit({
|
||||||
|
endpointName: 'add-doc-to-project',
|
||||||
|
params: ['Project_id'],
|
||||||
|
maxRequests: 30,
|
||||||
|
timeInterval: 60,
|
||||||
|
}),
|
||||||
|
EditorHttpController.addDoc
|
||||||
|
)
|
||||||
|
webRouter.post(
|
||||||
|
'/project/:Project_id/folder',
|
||||||
|
AuthorizationMiddleware.ensureUserCanWriteProjectContent,
|
||||||
|
RateLimiterMiddleware.rateLimit({
|
||||||
|
endpointName: 'add-folder-to-project',
|
||||||
|
params: ['Project_id'],
|
||||||
|
maxRequests: 60,
|
||||||
|
timeInterval: 60,
|
||||||
|
}),
|
||||||
|
EditorHttpController.addFolder
|
||||||
|
)
|
||||||
|
|
||||||
|
webRouter.post(
|
||||||
|
'/project/:Project_id/:entity_type/:entity_id/rename',
|
||||||
|
AuthorizationMiddleware.ensureUserCanWriteProjectContent,
|
||||||
|
EditorHttpController.renameEntity
|
||||||
|
)
|
||||||
|
webRouter.post(
|
||||||
|
'/project/:Project_id/:entity_type/:entity_id/move',
|
||||||
|
AuthorizationMiddleware.ensureUserCanWriteProjectContent,
|
||||||
|
EditorHttpController.moveEntity
|
||||||
|
)
|
||||||
|
|
||||||
|
webRouter.delete(
|
||||||
|
'/project/:Project_id/file/:entity_id',
|
||||||
|
AuthorizationMiddleware.ensureUserCanWriteProjectContent,
|
||||||
|
EditorHttpController.deleteFile
|
||||||
|
)
|
||||||
|
webRouter.delete(
|
||||||
|
'/project/:Project_id/doc/:entity_id',
|
||||||
|
AuthorizationMiddleware.ensureUserCanWriteProjectContent,
|
||||||
|
EditorHttpController.deleteDoc
|
||||||
|
)
|
||||||
|
webRouter.delete(
|
||||||
|
'/project/:Project_id/folder/:entity_id',
|
||||||
|
AuthorizationMiddleware.ensureUserCanWriteProjectContent,
|
||||||
|
EditorHttpController.deleteFolder
|
||||||
|
)
|
||||||
|
apiRouter.post(
|
||||||
|
'/project/:Project_id/doc/:entity_id/convert-to-file',
|
||||||
|
AuthenticationController.requirePrivateApiAuth(),
|
||||||
|
validate({
|
||||||
|
body: Joi.object({
|
||||||
|
userId: Joi.objectId().required(),
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
EditorHttpController.convertDocToFile
|
||||||
|
)
|
||||||
|
|
||||||
|
// Called by the real-time API to load up the current project state.
|
||||||
|
// This is a post request because it's more than just a getting of data. We take actions
|
||||||
|
// whenever a user joins a project, like updating the deleted status.
|
||||||
|
apiRouter.post(
|
||||||
|
'/project/:Project_id/join',
|
||||||
|
AuthenticationController.requirePrivateApiAuth(),
|
||||||
|
RateLimiterMiddleware.rateLimit({
|
||||||
|
endpointName: 'join-project',
|
||||||
|
params: ['Project_id'],
|
||||||
|
maxRequests: 45,
|
||||||
|
timeInterval: 60,
|
||||||
|
}),
|
||||||
|
EditorHttpController.joinProject
|
||||||
|
)
|
||||||
|
},
|
||||||
|
}
|
41
services/web/app/src/Features/Email/Bodies/NoCTAEmailBody.js
Normal file
41
services/web/app/src/Features/Email/Bodies/NoCTAEmailBody.js
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
const _ = require('underscore')
|
||||||
|
|
||||||
|
module.exports = _.template(`\
|
||||||
|
<table class="row" style="border-collapse: collapse; border-spacing: 0; display: table; padding: 0; position: relative; text-align: left; vertical-align: top; width: 100%;">
|
||||||
|
<tbody>
|
||||||
|
<tr style="padding: 0; vertical-align: top;">
|
||||||
|
<th class="small-12 columns" style="line-height: 1.3; margin: 0 auto; padding: 0; padding-bottom: 16px; padding-left: 16px; padding-right: 16px; text-align: left; width: 564px;">
|
||||||
|
<table style="border-collapse: collapse; border-spacing: 0; padding: 0; text-align: left; vertical-align: top; width: 100%; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; line-height: 1.3;">
|
||||||
|
<tr style="padding: 0; text-align: left; vertical-align: top;">
|
||||||
|
<th style="margin: 0; padding: 0; text-align: left;">
|
||||||
|
<% if (title) { %>
|
||||||
|
<h3 class="force-overleaf-style" style="margin: 0; color: #5D6879; font-family: Georgia, serif; font-size: 24px; font-weight: normal; line-height: 1.3; padding: 0; text-align: left; word-wrap: normal;">
|
||||||
|
<%= title %>
|
||||||
|
</h3>
|
||||||
|
<% } %>
|
||||||
|
</th>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
<p style="height: 20px; margin: 0; padding: 0;"> </p>
|
||||||
|
|
||||||
|
<% if (greeting) { %>
|
||||||
|
<p style="margin: 0 0 10px 0; padding: 0;">
|
||||||
|
<%= greeting %>
|
||||||
|
</p>
|
||||||
|
<% } %>
|
||||||
|
|
||||||
|
<% (message).forEach(function(paragraph) { %>
|
||||||
|
<p class="force-overleaf-style" style="margin: 0 0 10px 0; padding: 0;">
|
||||||
|
<%= paragraph %>
|
||||||
|
</p>
|
||||||
|
<% }) %>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</th>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
\
|
||||||
|
`)
|
96
services/web/app/src/Features/Email/Bodies/cta-email.js
Normal file
96
services/web/app/src/Features/Email/Bodies/cta-email.js
Normal file
|
@ -0,0 +1,96 @@
|
||||||
|
const _ = require('underscore')
|
||||||
|
|
||||||
|
module.exports = _.template(`\
|
||||||
|
<table class="row" style="border-collapse: collapse; border-spacing: 0; display: table; padding: 0; position: relative; text-align: left; vertical-align: top; width: 100%;">
|
||||||
|
<tbody>
|
||||||
|
<tr style="padding: 0; vertical-align: top;">
|
||||||
|
<th class="small-12 columns" style="line-height: 1.3; margin: 0 auto; padding: 0; padding-bottom: 16px; padding-left: 16px; padding-right: 16px; text-align: left;">
|
||||||
|
<table style="border-collapse: collapse; border-spacing: 0; padding: 0; text-align: left; vertical-align: top; width: 100%; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; line-height: 1.3;">
|
||||||
|
<tr style="padding: 0; text-align: left; vertical-align: top;">
|
||||||
|
<th style="margin: 0; padding: 0; text-align: left;">
|
||||||
|
<% if (title) { %>
|
||||||
|
<h3 class="force-overleaf-style" style="margin: 0; color: #5D6879; font-family: Georgia, serif; font-size: 24px; font-weight: normal; line-height: 1.3; padding: 0; text-align: left; word-wrap: normal;">
|
||||||
|
<%= title %>
|
||||||
|
</h3>
|
||||||
|
<% } %>
|
||||||
|
</th>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
<p style="height: 20px; margin: 0; padding: 0;"> </p>
|
||||||
|
|
||||||
|
<% if (greeting) { %>
|
||||||
|
<p style="margin: 0 0 10px 0; padding: 0;">
|
||||||
|
<%= greeting %>
|
||||||
|
</p>
|
||||||
|
<% } %>
|
||||||
|
|
||||||
|
<% (message).forEach(function(paragraph) { %>
|
||||||
|
<p class="force-overleaf-style" style="margin: 0 0 10px 0; padding: 0;">
|
||||||
|
<%= paragraph %>
|
||||||
|
</p>
|
||||||
|
<% }) %>
|
||||||
|
|
||||||
|
<p style="margin: 0; padding: 0;"> </p>
|
||||||
|
|
||||||
|
<table style="border-collapse: collapse; border-spacing: 0; float: none; margin: 0 auto; padding: 0; text-align: center; vertical-align: top; width: auto;">
|
||||||
|
<tr style="padding: 0; text-align: left; vertical-align: top;">
|
||||||
|
<td style="-moz-hyphens: auto; -webkit-hyphens: auto; border-collapse: collapse !important; border-radius: 9999px; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; hyphens: auto; line-height: 1.3; margin: 0; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;">
|
||||||
|
<table style="border-collapse: collapse; border-spacing: 0; padding: 0; text-align: left; vertical-align: top; width: 100%;">
|
||||||
|
<tr style="padding: 0; text-align: left; vertical-align: top;">
|
||||||
|
<td style="-moz-hyphens: auto; -webkit-hyphens: auto; background: #4F9C45; border: none; border-collapse: collapse !important; border-radius: 9999px; color: #fefefe; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; hyphens: auto; line-height: 1.3; margin: 0; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;">
|
||||||
|
<a href="<%= ctaURL %>" style="border: 0 solid #4F9C45; border-radius: 9999px; color: #fefefe; display: inline-block; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: bold; line-height: 1.3; margin: 0; padding: 8px 16px 8px 16px; text-align: left; text-decoration: none;">
|
||||||
|
<%= ctaText %>
|
||||||
|
</a>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
<% if (secondaryMessage && secondaryMessage.length > 0) { %>
|
||||||
|
<p style="margin: 0; padding: 0;"> </p>
|
||||||
|
|
||||||
|
<% (secondaryMessage).forEach(function(paragraph) { %>
|
||||||
|
<p class="force-overleaf-style">
|
||||||
|
<%= paragraph %>
|
||||||
|
</p>
|
||||||
|
<% }) %>
|
||||||
|
<% } %>
|
||||||
|
|
||||||
|
<p style="margin: 0; padding: 0;"> </p>
|
||||||
|
|
||||||
|
<p class="force-overleaf-style" style="font-size: 12px;">
|
||||||
|
If the button above does not appear, please copy and paste this link into your browser's address bar:
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<p class="force-overleaf-style" style="font-size: 12px;">
|
||||||
|
<%= ctaURL %>
|
||||||
|
</p>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</th>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
<% if (gmailGoToAction) { %>
|
||||||
|
<script type="application/ld+json">
|
||||||
|
<%=
|
||||||
|
StringHelper.stringifyJsonForScript({
|
||||||
|
"@context": "http://schema.org",
|
||||||
|
"@type": "EmailMessage",
|
||||||
|
"potentialAction": {
|
||||||
|
"@type": "ViewAction",
|
||||||
|
"target": gmailGoToAction.target,
|
||||||
|
"url": gmailGoToAction.target,
|
||||||
|
"name": gmailGoToAction.name
|
||||||
|
},
|
||||||
|
"description": gmailGoToAction.description
|
||||||
|
})
|
||||||
|
%>
|
||||||
|
</script>
|
||||||
|
<% } %>
|
||||||
|
\
|
||||||
|
`)
|
559
services/web/app/src/Features/Email/EmailBuilder.js
Normal file
559
services/web/app/src/Features/Email/EmailBuilder.js
Normal file
|
@ -0,0 +1,559 @@
|
||||||
|
const _ = require('underscore')
|
||||||
|
const settings = require('@overleaf/settings')
|
||||||
|
const moment = require('moment')
|
||||||
|
const EmailMessageHelper = require('./EmailMessageHelper')
|
||||||
|
const StringHelper = require('../Helpers/StringHelper')
|
||||||
|
const BaseWithHeaderEmailLayout = require('./Layouts/BaseWithHeaderEmailLayout')
|
||||||
|
const SpamSafe = require('./SpamSafe')
|
||||||
|
const ctaEmailBody = require('./Bodies/cta-email')
|
||||||
|
const NoCTAEmailBody = require('./Bodies/NoCTAEmailBody')
|
||||||
|
|
||||||
|
function _emailBodyPlainText(content, opts, ctaEmail) {
|
||||||
|
let emailBody = `${content.greeting(opts, true)}`
|
||||||
|
emailBody += `\r\n\r\n`
|
||||||
|
emailBody += `${content.message(opts, true).join('\r\n\r\n')}`
|
||||||
|
|
||||||
|
if (ctaEmail) {
|
||||||
|
emailBody += `\r\n\r\n`
|
||||||
|
emailBody += `${content.ctaText(opts, true)}: ${content.ctaURL(opts, true)}`
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
content.secondaryMessage(opts, true) &&
|
||||||
|
content.secondaryMessage(opts, true).length > 0
|
||||||
|
) {
|
||||||
|
emailBody += `\r\n\r\n`
|
||||||
|
emailBody += `${content.secondaryMessage(opts, true).join('\r\n\r\n')}`
|
||||||
|
}
|
||||||
|
|
||||||
|
emailBody += `\r\n\r\n`
|
||||||
|
emailBody += `Regards,\r\nThe ${settings.appName} Team - ${settings.siteUrl}`
|
||||||
|
|
||||||
|
if (
|
||||||
|
settings.email &&
|
||||||
|
settings.email.template &&
|
||||||
|
settings.email.template.customFooter
|
||||||
|
) {
|
||||||
|
emailBody += `\r\n\r\n`
|
||||||
|
emailBody += settings.email.template.customFooter
|
||||||
|
}
|
||||||
|
|
||||||
|
return emailBody
|
||||||
|
}
|
||||||
|
|
||||||
|
function ctaTemplate(content) {
|
||||||
|
if (
|
||||||
|
!content.ctaURL ||
|
||||||
|
!content.ctaText ||
|
||||||
|
!content.message ||
|
||||||
|
!content.subject
|
||||||
|
) {
|
||||||
|
throw new Error('missing required CTA email content')
|
||||||
|
}
|
||||||
|
if (!content.title) {
|
||||||
|
content.title = () => {}
|
||||||
|
}
|
||||||
|
if (!content.greeting) {
|
||||||
|
content.greeting = () => 'Hi,'
|
||||||
|
}
|
||||||
|
if (!content.secondaryMessage) {
|
||||||
|
content.secondaryMessage = () => []
|
||||||
|
}
|
||||||
|
if (!content.gmailGoToAction) {
|
||||||
|
content.gmailGoToAction = () => {}
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
subject(opts) {
|
||||||
|
return content.subject(opts)
|
||||||
|
},
|
||||||
|
layout: BaseWithHeaderEmailLayout,
|
||||||
|
plainTextTemplate(opts) {
|
||||||
|
return _emailBodyPlainText(content, opts, true)
|
||||||
|
},
|
||||||
|
compiledTemplate(opts) {
|
||||||
|
return ctaEmailBody({
|
||||||
|
title: content.title(opts),
|
||||||
|
greeting: content.greeting(opts),
|
||||||
|
message: content.message(opts),
|
||||||
|
secondaryMessage: content.secondaryMessage(opts),
|
||||||
|
ctaText: content.ctaText(opts),
|
||||||
|
ctaURL: content.ctaURL(opts),
|
||||||
|
gmailGoToAction: content.gmailGoToAction(opts),
|
||||||
|
StringHelper,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function NoCTAEmailTemplate(content) {
|
||||||
|
if (content.greeting == null) {
|
||||||
|
content.greeting = () => 'Hi,'
|
||||||
|
}
|
||||||
|
if (!content.message) {
|
||||||
|
throw new Error('missing message')
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
subject(opts) {
|
||||||
|
return content.subject(opts)
|
||||||
|
},
|
||||||
|
layout: BaseWithHeaderEmailLayout,
|
||||||
|
plainTextTemplate(opts) {
|
||||||
|
return `\
|
||||||
|
${content.greeting(opts)}
|
||||||
|
|
||||||
|
${content.message(opts, true).join('\r\n\r\n')}
|
||||||
|
|
||||||
|
Regards,
|
||||||
|
The ${settings.appName} Team - ${settings.siteUrl}\
|
||||||
|
`
|
||||||
|
},
|
||||||
|
compiledTemplate(opts) {
|
||||||
|
return NoCTAEmailBody({
|
||||||
|
title:
|
||||||
|
typeof content.title === 'function' ? content.title(opts) : undefined,
|
||||||
|
greeting: content.greeting(opts),
|
||||||
|
message: content.message(opts),
|
||||||
|
StringHelper,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildEmail(templateName, opts) {
|
||||||
|
const template = templates[templateName]
|
||||||
|
opts.siteUrl = settings.siteUrl
|
||||||
|
opts.body = template.compiledTemplate(opts)
|
||||||
|
return {
|
||||||
|
subject: template.subject(opts),
|
||||||
|
html: template.layout(opts),
|
||||||
|
text: template.plainTextTemplate && template.plainTextTemplate(opts),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const templates = {}
|
||||||
|
|
||||||
|
templates.registered = ctaTemplate({
|
||||||
|
subject() {
|
||||||
|
return `Activate your ${settings.appName} Account`
|
||||||
|
},
|
||||||
|
message(opts) {
|
||||||
|
return [
|
||||||
|
`Congratulations, you've just had an account created for you on ${
|
||||||
|
settings.appName
|
||||||
|
} with the email address '${_.escape(opts.to)}'.`,
|
||||||
|
'Click here to set your password and log in:',
|
||||||
|
]
|
||||||
|
},
|
||||||
|
secondaryMessage() {
|
||||||
|
return [
|
||||||
|
`If you have any questions or problems, please contact ${settings.adminEmail}`,
|
||||||
|
]
|
||||||
|
},
|
||||||
|
ctaText() {
|
||||||
|
return 'Set password'
|
||||||
|
},
|
||||||
|
ctaURL(opts) {
|
||||||
|
return opts.setNewPasswordUrl
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
templates.canceledSubscription = ctaTemplate({
|
||||||
|
subject() {
|
||||||
|
return `${settings.appName} thoughts`
|
||||||
|
},
|
||||||
|
message() {
|
||||||
|
return [
|
||||||
|
`We are sorry to see you cancelled your ${settings.appName} premium subscription. Would you mind giving us some feedback on what the site is lacking at the moment via this quick survey?`,
|
||||||
|
]
|
||||||
|
},
|
||||||
|
secondaryMessage() {
|
||||||
|
return ['Thank you in advance!']
|
||||||
|
},
|
||||||
|
ctaText() {
|
||||||
|
return 'Leave Feedback'
|
||||||
|
},
|
||||||
|
ctaURL(opts) {
|
||||||
|
return 'https://docs.google.com/forms/d/e/1FAIpQLSfa7z_s-cucRRXm70N4jEcSbFsZeb0yuKThHGQL8ySEaQzF0Q/viewform?usp=sf_link'
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
templates.reactivatedSubscription = ctaTemplate({
|
||||||
|
subject() {
|
||||||
|
return `Subscription Reactivated - ${settings.appName}`
|
||||||
|
},
|
||||||
|
message(opts) {
|
||||||
|
return ['Your subscription was reactivated successfully.']
|
||||||
|
},
|
||||||
|
ctaText() {
|
||||||
|
return 'View Subscription Dashboard'
|
||||||
|
},
|
||||||
|
ctaURL(opts) {
|
||||||
|
return `${settings.siteUrl}/user/subscription`
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
templates.passwordResetRequested = ctaTemplate({
|
||||||
|
subject() {
|
||||||
|
return `Password Reset - ${settings.appName}`
|
||||||
|
},
|
||||||
|
title() {
|
||||||
|
return 'Password Reset'
|
||||||
|
},
|
||||||
|
message() {
|
||||||
|
return [`We got a request to reset your ${settings.appName} password.`]
|
||||||
|
},
|
||||||
|
secondaryMessage() {
|
||||||
|
return [
|
||||||
|
"If you ignore this message, your password won't be changed.",
|
||||||
|
"If you didn't request a password reset, let us know.",
|
||||||
|
]
|
||||||
|
},
|
||||||
|
ctaText() {
|
||||||
|
return 'Reset password'
|
||||||
|
},
|
||||||
|
ctaURL(opts) {
|
||||||
|
return opts.setNewPasswordUrl
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
templates.confirmEmail = ctaTemplate({
|
||||||
|
subject() {
|
||||||
|
return `Confirm Email - ${settings.appName}`
|
||||||
|
},
|
||||||
|
title() {
|
||||||
|
return 'Confirm Email'
|
||||||
|
},
|
||||||
|
message(opts) {
|
||||||
|
return [
|
||||||
|
`Please confirm that you have added a new email, ${opts.to}, to your ${settings.appName} account.`,
|
||||||
|
]
|
||||||
|
},
|
||||||
|
secondaryMessage() {
|
||||||
|
return [
|
||||||
|
'If you did not request this, you can simply ignore this message.',
|
||||||
|
`If you have any questions or trouble confirming your email address, please get in touch with our support team at ${settings.adminEmail}.`,
|
||||||
|
]
|
||||||
|
},
|
||||||
|
ctaText() {
|
||||||
|
return 'Confirm Email'
|
||||||
|
},
|
||||||
|
ctaURL(opts) {
|
||||||
|
return opts.confirmEmailUrl
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
templates.projectInvite = ctaTemplate({
|
||||||
|
subject(opts) {
|
||||||
|
return `${_.escape(
|
||||||
|
SpamSafe.safeProjectName(opts.project.name, 'New Project')
|
||||||
|
)} - shared by ${_.escape(
|
||||||
|
SpamSafe.safeEmail(opts.owner.email, 'a collaborator')
|
||||||
|
)}`
|
||||||
|
},
|
||||||
|
title(opts) {
|
||||||
|
return `${_.escape(
|
||||||
|
SpamSafe.safeProjectName(opts.project.name, 'New Project')
|
||||||
|
)} - shared by ${_.escape(
|
||||||
|
SpamSafe.safeEmail(opts.owner.email, 'a collaborator')
|
||||||
|
)}`
|
||||||
|
},
|
||||||
|
message(opts) {
|
||||||
|
return [
|
||||||
|
`${_.escape(
|
||||||
|
SpamSafe.safeEmail(opts.owner.email, 'a collaborator')
|
||||||
|
)} wants to share ${_.escape(
|
||||||
|
SpamSafe.safeProjectName(opts.project.name, 'a new project')
|
||||||
|
)} with you.`,
|
||||||
|
]
|
||||||
|
},
|
||||||
|
ctaText() {
|
||||||
|
return 'View project'
|
||||||
|
},
|
||||||
|
ctaURL(opts) {
|
||||||
|
return opts.inviteUrl
|
||||||
|
},
|
||||||
|
gmailGoToAction(opts) {
|
||||||
|
return {
|
||||||
|
target: opts.inviteUrl,
|
||||||
|
name: 'View project',
|
||||||
|
description: `Join ${_.escape(
|
||||||
|
SpamSafe.safeProjectName(opts.project.name, 'project')
|
||||||
|
)} at ${settings.appName}`,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
templates.reconfirmEmail = ctaTemplate({
|
||||||
|
subject() {
|
||||||
|
return `Reconfirm Email - ${settings.appName}`
|
||||||
|
},
|
||||||
|
title() {
|
||||||
|
return 'Reconfirm Email'
|
||||||
|
},
|
||||||
|
message(opts) {
|
||||||
|
return [
|
||||||
|
`Please reconfirm your email address, ${opts.to}, on your ${settings.appName} account.`,
|
||||||
|
]
|
||||||
|
},
|
||||||
|
secondaryMessage() {
|
||||||
|
return [
|
||||||
|
'If you did not request this, you can simply ignore this message.',
|
||||||
|
`If you have any questions or trouble confirming your email address, please get in touch with our support team at ${settings.adminEmail}.`,
|
||||||
|
]
|
||||||
|
},
|
||||||
|
ctaText() {
|
||||||
|
return 'Reconfirm Email'
|
||||||
|
},
|
||||||
|
ctaURL(opts) {
|
||||||
|
return opts.confirmEmailUrl
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
templates.verifyEmailToJoinTeam = ctaTemplate({
|
||||||
|
subject(opts) {
|
||||||
|
return `${_.escape(
|
||||||
|
_formatUserNameAndEmail(opts.inviter, 'A collaborator')
|
||||||
|
)} has invited you to join a team on ${settings.appName}`
|
||||||
|
},
|
||||||
|
title(opts) {
|
||||||
|
return `${_.escape(
|
||||||
|
_formatUserNameAndEmail(opts.inviter, 'A collaborator')
|
||||||
|
)} has invited you to join a team on ${settings.appName}`
|
||||||
|
},
|
||||||
|
message(opts) {
|
||||||
|
return [
|
||||||
|
`Please click the button below to join the team and enjoy the benefits of an upgraded ${settings.appName} account.`,
|
||||||
|
]
|
||||||
|
},
|
||||||
|
ctaText(opts) {
|
||||||
|
return 'Join now'
|
||||||
|
},
|
||||||
|
ctaURL(opts) {
|
||||||
|
return opts.acceptInviteUrl
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
templates.testEmail = ctaTemplate({
|
||||||
|
subject() {
|
||||||
|
return `A Test Email from ${settings.appName}`
|
||||||
|
},
|
||||||
|
title() {
|
||||||
|
return `A Test Email from ${settings.appName}`
|
||||||
|
},
|
||||||
|
greeting() {
|
||||||
|
return 'Hi,'
|
||||||
|
},
|
||||||
|
message() {
|
||||||
|
return [`This is a test Email from ${settings.appName}`]
|
||||||
|
},
|
||||||
|
ctaText() {
|
||||||
|
return `Open ${settings.appName}`
|
||||||
|
},
|
||||||
|
ctaURL() {
|
||||||
|
return settings.siteUrl
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
templates.ownershipTransferConfirmationPreviousOwner = NoCTAEmailTemplate({
|
||||||
|
subject(opts) {
|
||||||
|
return `Project ownership transfer - ${settings.appName}`
|
||||||
|
},
|
||||||
|
title(opts) {
|
||||||
|
const projectName = _.escape(
|
||||||
|
SpamSafe.safeProjectName(opts.project.name, 'Your project')
|
||||||
|
)
|
||||||
|
return `${projectName} - Owner change`
|
||||||
|
},
|
||||||
|
message(opts, isPlainText) {
|
||||||
|
const nameAndEmail = _.escape(
|
||||||
|
_formatUserNameAndEmail(opts.newOwner, 'a collaborator')
|
||||||
|
)
|
||||||
|
const projectName = _.escape(
|
||||||
|
SpamSafe.safeProjectName(opts.project.name, 'your project')
|
||||||
|
)
|
||||||
|
const projectNameDisplay = isPlainText
|
||||||
|
? projectName
|
||||||
|
: `<b>${projectName}</b>`
|
||||||
|
return [
|
||||||
|
`As per your request, we have made ${nameAndEmail} the owner of ${projectNameDisplay}.`,
|
||||||
|
`If you haven't asked to change the owner of ${projectNameDisplay}, please get in touch with us via ${settings.adminEmail}.`,
|
||||||
|
]
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
templates.ownershipTransferConfirmationNewOwner = ctaTemplate({
|
||||||
|
subject(opts) {
|
||||||
|
return `Project ownership transfer - ${settings.appName}`
|
||||||
|
},
|
||||||
|
title(opts) {
|
||||||
|
const projectName = _.escape(
|
||||||
|
SpamSafe.safeProjectName(opts.project.name, 'Your project')
|
||||||
|
)
|
||||||
|
return `${projectName} - Owner change`
|
||||||
|
},
|
||||||
|
message(opts, isPlainText) {
|
||||||
|
const nameAndEmail = _.escape(
|
||||||
|
_formatUserNameAndEmail(opts.previousOwner, 'A collaborator')
|
||||||
|
)
|
||||||
|
const projectName = _.escape(
|
||||||
|
SpamSafe.safeProjectName(opts.project.name, 'a project')
|
||||||
|
)
|
||||||
|
const projectNameEmphasized = isPlainText
|
||||||
|
? projectName
|
||||||
|
: `<b>${projectName}</b>`
|
||||||
|
return [
|
||||||
|
`${nameAndEmail} has made you the owner of ${projectNameEmphasized}. You can now manage ${projectName} sharing settings.`,
|
||||||
|
]
|
||||||
|
},
|
||||||
|
ctaText(opts) {
|
||||||
|
return 'View project'
|
||||||
|
},
|
||||||
|
ctaURL(opts) {
|
||||||
|
const projectUrl = `${
|
||||||
|
settings.siteUrl
|
||||||
|
}/project/${opts.project._id.toString()}`
|
||||||
|
return projectUrl
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
templates.userOnboardingEmail = NoCTAEmailTemplate({
|
||||||
|
subject(opts) {
|
||||||
|
return `Getting more out of ${settings.appName}`
|
||||||
|
},
|
||||||
|
greeting(opts) {
|
||||||
|
return ''
|
||||||
|
},
|
||||||
|
title(opts) {
|
||||||
|
return `Getting more out of ${settings.appName}`
|
||||||
|
},
|
||||||
|
message(opts, isPlainText) {
|
||||||
|
const learnLatexLink = EmailMessageHelper.displayLink(
|
||||||
|
'Learn LaTeX in 30 minutes',
|
||||||
|
`${settings.siteUrl}/learn/latex/Learn_LaTeX_in_30_minutes?utm_source=overleaf&utm_medium=email&utm_campaign=onboarding`,
|
||||||
|
isPlainText
|
||||||
|
)
|
||||||
|
const templatesLinks = EmailMessageHelper.displayLink(
|
||||||
|
'Find a beautiful template',
|
||||||
|
`${settings.siteUrl}/latex/templates?utm_source=overleaf&utm_medium=email&utm_campaign=onboarding`,
|
||||||
|
isPlainText
|
||||||
|
)
|
||||||
|
const collaboratorsLink = EmailMessageHelper.displayLink(
|
||||||
|
'Work with your collaborators',
|
||||||
|
`${settings.siteUrl}/learn/how-to/Sharing_a_project?utm_source=overleaf&utm_medium=email&utm_campaign=onboarding`,
|
||||||
|
isPlainText
|
||||||
|
)
|
||||||
|
const siteLink = EmailMessageHelper.displayLink(
|
||||||
|
'www.overleaf.com',
|
||||||
|
settings.siteUrl,
|
||||||
|
isPlainText
|
||||||
|
)
|
||||||
|
const userSettingsLink = EmailMessageHelper.displayLink(
|
||||||
|
'here',
|
||||||
|
`${settings.siteUrl}/user/settings`,
|
||||||
|
isPlainText
|
||||||
|
)
|
||||||
|
const onboardingSurveyLink = EmailMessageHelper.displayLink(
|
||||||
|
'Join our user feedback programme',
|
||||||
|
'https://forms.gle/DB7pdk2B1VFQqVVB9',
|
||||||
|
isPlainText
|
||||||
|
)
|
||||||
|
return [
|
||||||
|
`Thanks for signing up for ${settings.appName} recently. We hope you've been finding it useful! Here are some key features to help you get the most out of the service:`,
|
||||||
|
`${learnLatexLink}: In this tutorial we provide a quick and easy first introduction to LaTeX with no prior knowledge required. By the time you are finished, you will have written your first LaTeX document!`,
|
||||||
|
`${templatesLinks}: If you're looking for a template or example to get started, we've a large selection available in our template gallery, including CVs, project reports, journal articles and more.`,
|
||||||
|
`${collaboratorsLink}: One of the key features of Overleaf is the ability to share projects and collaborate on them with other users. Find out how to share your projecs with your colleagues in this quick how-to guide.`,
|
||||||
|
`${onboardingSurveyLink} to help us make Overleaf even better!`,
|
||||||
|
'Thanks again for using Overleaf :)',
|
||||||
|
`John`,
|
||||||
|
`Dr John Hammersley <br />Co-founder & CEO <br />${siteLink}<hr>`,
|
||||||
|
`Don't want onboarding emails like this from us? Don't worry, this is the only one. If you've previously subscribed to emails about product offers and company news and events, you can unsubscribe ${userSettingsLink}.`,
|
||||||
|
]
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
templates.securityAlert = NoCTAEmailTemplate({
|
||||||
|
subject(opts) {
|
||||||
|
return `Overleaf security note: ${opts.action}`
|
||||||
|
},
|
||||||
|
title(opts) {
|
||||||
|
return opts.action.charAt(0).toUpperCase() + opts.action.slice(1)
|
||||||
|
},
|
||||||
|
message(opts, isPlainText) {
|
||||||
|
const dateFormatted = moment().format('dddd D MMMM YYYY')
|
||||||
|
const timeFormatted = moment().format('HH:mm')
|
||||||
|
const helpLink = EmailMessageHelper.displayLink(
|
||||||
|
'quick guide',
|
||||||
|
`${settings.siteUrl}/learn/how-to/Keeping_your_account_secure`,
|
||||||
|
isPlainText
|
||||||
|
)
|
||||||
|
|
||||||
|
const actionDescribed = EmailMessageHelper.cleanHTML(
|
||||||
|
opts.actionDescribed,
|
||||||
|
isPlainText
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!opts.message) {
|
||||||
|
opts.message = []
|
||||||
|
}
|
||||||
|
const message = opts.message.map(m => {
|
||||||
|
return EmailMessageHelper.cleanHTML(m, isPlainText)
|
||||||
|
})
|
||||||
|
|
||||||
|
return [
|
||||||
|
`We are writing to let you know that ${actionDescribed} on ${dateFormatted} at ${timeFormatted} GMT.`,
|
||||||
|
...message,
|
||||||
|
`If this was you, you can ignore this email.`,
|
||||||
|
`If this was not you, we recommend getting in touch with our support team at ${settings.adminEmail} to report this as potentially suspicious activity on your account.`,
|
||||||
|
`We also encourage you to read our ${helpLink} to keeping your ${settings.appName} account safe.`,
|
||||||
|
]
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
templates.SAMLDataCleared = ctaTemplate({
|
||||||
|
subject(opts) {
|
||||||
|
return `Institutional Login No Longer Linked - ${settings.appName}`
|
||||||
|
},
|
||||||
|
title(opts) {
|
||||||
|
return 'Institutional Login No Longer Linked'
|
||||||
|
},
|
||||||
|
message(opts, isPlainText) {
|
||||||
|
return [
|
||||||
|
`We're writing to let you know that due to a bug on our end, we've had to temporarily disable logging into your ${settings.appName} through your institution.`,
|
||||||
|
`To get it going again, you'll need to relink your institutional email address to your ${settings.appName} account via your settings.`,
|
||||||
|
]
|
||||||
|
},
|
||||||
|
secondaryMessage() {
|
||||||
|
return [
|
||||||
|
`If you ordinarily log in to your ${settings.appName} account through your institution, you may need to set or reset your password to regain access to your account first.`,
|
||||||
|
'This bug did not affect the security of any accounts, but it may have affected license entitlements for a small number of users. We are sorry for any inconvenience that this may cause for you.',
|
||||||
|
`If you have any questions, please get in touch with our support team at ${settings.adminEmail} or by replying to this email.`,
|
||||||
|
]
|
||||||
|
},
|
||||||
|
ctaText(opts) {
|
||||||
|
return 'Update my Emails and Affiliations'
|
||||||
|
},
|
||||||
|
ctaURL(opts) {
|
||||||
|
return `${settings.siteUrl}/user/settings`
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
function _formatUserNameAndEmail(user, placeholder) {
|
||||||
|
if (user.first_name && user.last_name) {
|
||||||
|
const fullName = `${user.first_name} ${user.last_name}`
|
||||||
|
if (SpamSafe.isSafeUserName(fullName)) {
|
||||||
|
if (SpamSafe.isSafeEmail(user.email)) {
|
||||||
|
return `${fullName} (${user.email})`
|
||||||
|
} else {
|
||||||
|
return fullName
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return SpamSafe.safeEmail(user.email, placeholder)
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
templates,
|
||||||
|
ctaTemplate,
|
||||||
|
NoCTAEmailTemplate,
|
||||||
|
buildEmail,
|
||||||
|
}
|
24
services/web/app/src/Features/Email/EmailHandler.js
Normal file
24
services/web/app/src/Features/Email/EmailHandler.js
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
const { callbackify } = require('util')
|
||||||
|
const Settings = require('@overleaf/settings')
|
||||||
|
const EmailBuilder = require('./EmailBuilder')
|
||||||
|
const EmailSender = require('./EmailSender')
|
||||||
|
|
||||||
|
const EMAIL_SETTINGS = Settings.email || {}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
sendEmail: callbackify(sendEmail),
|
||||||
|
promises: {
|
||||||
|
sendEmail,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
async function sendEmail(emailType, opts) {
|
||||||
|
const email = EmailBuilder.buildEmail(emailType, opts)
|
||||||
|
if (email.type === 'lifecycle' && !EMAIL_SETTINGS.lifecycle) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
opts.html = email.html
|
||||||
|
opts.text = email.text
|
||||||
|
opts.subject = email.subject
|
||||||
|
await EmailSender.promises.sendEmail(opts)
|
||||||
|
}
|
27
services/web/app/src/Features/Email/EmailMessageHelper.js
Normal file
27
services/web/app/src/Features/Email/EmailMessageHelper.js
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
const sanitizeHtml = require('sanitize-html')
|
||||||
|
const sanitizeOptions = {
|
||||||
|
html: {
|
||||||
|
allowedTags: ['span', 'b', 'br', 'i'],
|
||||||
|
allowedAttributes: {
|
||||||
|
span: ['style', 'class'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
plainText: {
|
||||||
|
allowedTags: [],
|
||||||
|
allowedAttributes: {},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
function cleanHTML(text, isPlainText) {
|
||||||
|
if (!isPlainText) return sanitizeHtml(text, sanitizeOptions.html)
|
||||||
|
return sanitizeHtml(text, sanitizeOptions.plainText)
|
||||||
|
}
|
||||||
|
|
||||||
|
function displayLink(text, url, isPlainText) {
|
||||||
|
return isPlainText ? `${text} (${url})` : `<a href="${url}">${text}</a>`
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
cleanHTML,
|
||||||
|
displayLink,
|
||||||
|
}
|
20
services/web/app/src/Features/Email/EmailOptionsHelper.js
Normal file
20
services/web/app/src/Features/Email/EmailOptionsHelper.js
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
function _getIndefiniteArticle(providerName) {
|
||||||
|
const vowels = ['a', 'e', 'i', 'o', 'u']
|
||||||
|
if (vowels.includes(providerName.charAt(0).toLowerCase())) return 'an'
|
||||||
|
return 'a'
|
||||||
|
}
|
||||||
|
|
||||||
|
function linkOrUnlink(accountLinked, providerName, email) {
|
||||||
|
const action = accountLinked ? 'linked' : 'no longer linked'
|
||||||
|
const actionDescribed = accountLinked ? 'was linked to' : 'was unlinked from'
|
||||||
|
const indefiniteArticle = _getIndefiniteArticle(providerName)
|
||||||
|
return {
|
||||||
|
to: email,
|
||||||
|
action: `${providerName} account ${action}`,
|
||||||
|
actionDescribed: `${indefiniteArticle} ${providerName} account ${actionDescribed} your account ${email}`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
linkOrUnlink,
|
||||||
|
}
|
117
services/web/app/src/Features/Email/EmailSender.js
Normal file
117
services/web/app/src/Features/Email/EmailSender.js
Normal file
|
@ -0,0 +1,117 @@
|
||||||
|
const { callbackify } = require('util')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const metrics = require('@overleaf/metrics')
|
||||||
|
const Settings = require('@overleaf/settings')
|
||||||
|
const nodemailer = require('nodemailer')
|
||||||
|
const sesTransport = require('nodemailer-ses-transport')
|
||||||
|
const mandrillTransport = require('nodemailer-mandrill-transport')
|
||||||
|
const OError = require('@overleaf/o-error')
|
||||||
|
const RateLimiter = require('../../infrastructure/RateLimiter')
|
||||||
|
const _ = require('underscore')
|
||||||
|
|
||||||
|
const EMAIL_SETTINGS = Settings.email || {}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
sendEmail: callbackify(sendEmail),
|
||||||
|
promises: {
|
||||||
|
sendEmail,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
const client = getClient()
|
||||||
|
|
||||||
|
function getClient() {
|
||||||
|
let client
|
||||||
|
if (EMAIL_SETTINGS.parameters) {
|
||||||
|
const emailParameters = EMAIL_SETTINGS.parameters
|
||||||
|
if (emailParameters.AWSAccessKeyID || EMAIL_SETTINGS.driver === 'ses') {
|
||||||
|
logger.log('using aws ses for email')
|
||||||
|
client = nodemailer.createTransport(sesTransport(emailParameters))
|
||||||
|
} else if (emailParameters.sendgridApiKey) {
|
||||||
|
throw new OError(
|
||||||
|
'sendgridApiKey configuration option is deprecated, use SMTP instead'
|
||||||
|
)
|
||||||
|
} else if (emailParameters.MandrillApiKey) {
|
||||||
|
logger.log('using mandril for email')
|
||||||
|
client = nodemailer.createTransport(
|
||||||
|
mandrillTransport({
|
||||||
|
auth: {
|
||||||
|
apiKey: emailParameters.MandrillApiKey,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
logger.log('using smtp for email')
|
||||||
|
const smtp = _.pick(
|
||||||
|
emailParameters,
|
||||||
|
'host',
|
||||||
|
'port',
|
||||||
|
'secure',
|
||||||
|
'auth',
|
||||||
|
'ignoreTLS',
|
||||||
|
'logger',
|
||||||
|
'name'
|
||||||
|
)
|
||||||
|
client = nodemailer.createTransport(smtp)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logger.warn(
|
||||||
|
'Email transport and/or parameters not defined. No emails will be sent.'
|
||||||
|
)
|
||||||
|
client = {
|
||||||
|
async sendMail(options) {
|
||||||
|
logger.log({ options }, 'Would send email if enabled.')
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return client
|
||||||
|
}
|
||||||
|
|
||||||
|
async function sendEmail(options) {
|
||||||
|
try {
|
||||||
|
const canContinue = await checkCanSendEmail(options)
|
||||||
|
if (!canContinue) {
|
||||||
|
logger.log(
|
||||||
|
{
|
||||||
|
sendingUser_id: options.sendingUser_id,
|
||||||
|
to: options.to,
|
||||||
|
subject: options.subject,
|
||||||
|
canContinue,
|
||||||
|
},
|
||||||
|
'rate limit hit for sending email, not sending'
|
||||||
|
)
|
||||||
|
throw new OError('rate limit hit sending email')
|
||||||
|
}
|
||||||
|
metrics.inc('email')
|
||||||
|
const sendMailOptions = {
|
||||||
|
to: options.to,
|
||||||
|
from: EMAIL_SETTINGS.fromAddress || '',
|
||||||
|
subject: options.subject,
|
||||||
|
html: options.html,
|
||||||
|
text: options.text,
|
||||||
|
replyTo: options.replyTo || EMAIL_SETTINGS.replyToAddress,
|
||||||
|
socketTimeout: 30 * 1000,
|
||||||
|
}
|
||||||
|
if (EMAIL_SETTINGS.textEncoding != null) {
|
||||||
|
sendMailOptions.textEncoding = EMAIL_SETTINGS.textEncoding
|
||||||
|
}
|
||||||
|
await client.sendMail(sendMailOptions)
|
||||||
|
} catch (err) {
|
||||||
|
throw new OError('error sending message').withCause(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function checkCanSendEmail(options) {
|
||||||
|
if (options.sendingUser_id == null) {
|
||||||
|
// email not sent from user, not rate limited
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
const opts = {
|
||||||
|
endpointName: 'send_email',
|
||||||
|
timeInterval: 60 * 60 * 3,
|
||||||
|
subjectName: options.sendingUser_id,
|
||||||
|
throttle: 100,
|
||||||
|
}
|
||||||
|
const allowed = await RateLimiter.promises.addCount(opts)
|
||||||
|
return allowed
|
||||||
|
}
|
|
@ -0,0 +1,390 @@
|
||||||
|
const _ = require('underscore')
|
||||||
|
const settings = require('@overleaf/settings')
|
||||||
|
|
||||||
|
module.exports = _.template(`\
|
||||||
|
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
|
||||||
|
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
|
||||||
|
|
||||||
|
<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en" style="Margin: 0; background: #E4E8EE !important; margin: 0; min-height: 100%; padding: 0;">
|
||||||
|
<head>
|
||||||
|
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
|
||||||
|
<meta name="viewport" content="width=device-width">
|
||||||
|
<style>.button td {
|
||||||
|
border-radius: 9999px; }
|
||||||
|
|
||||||
|
.force-overleaf-style a,
|
||||||
|
.force-overleaf-style a[href] {
|
||||||
|
color: #138A07 !important;
|
||||||
|
text-decoration: none !important;
|
||||||
|
-moz-hyphens: none;
|
||||||
|
-ms-hyphens: none;
|
||||||
|
-webkit-hyphens: none;
|
||||||
|
hyphens: none; }
|
||||||
|
.force-overleaf-style a:visited,
|
||||||
|
.force-overleaf-style a[href]:visited {
|
||||||
|
color: #138A07; }
|
||||||
|
.force-overleaf-style a:hover,
|
||||||
|
.force-overleaf-style a[href]:hover {
|
||||||
|
color: #3d7935; }
|
||||||
|
.force-overleaf-style a:active,
|
||||||
|
.force-overleaf-style a[href]:active {
|
||||||
|
color: #3d7935; }
|
||||||
|
</style>
|
||||||
|
<style>@media only screen {
|
||||||
|
html {
|
||||||
|
min-height: 100%;
|
||||||
|
background: #f6f6f6;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@media only screen and (max-width: 596px) {
|
||||||
|
.small-float-center {
|
||||||
|
margin: 0 auto !important;
|
||||||
|
float: none !important;
|
||||||
|
text-align: center !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.small-text-center {
|
||||||
|
text-align: center !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.small-text-left {
|
||||||
|
text-align: left !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.small-text-right {
|
||||||
|
text-align: right !important;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@media only screen and (max-width: 596px) {
|
||||||
|
.hide-for-large {
|
||||||
|
display: block !important;
|
||||||
|
width: auto !important;
|
||||||
|
overflow: visible !important;
|
||||||
|
max-height: none !important;
|
||||||
|
font-size: inherit !important;
|
||||||
|
line-height: inherit !important;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@media only screen and (max-width: 596px) {
|
||||||
|
table.body table.container .hide-for-large,
|
||||||
|
table.body table.container .row.hide-for-large {
|
||||||
|
display: table !important;
|
||||||
|
width: 100% !important;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@media only screen and (max-width: 596px) {
|
||||||
|
table.body table.container .callout-inner.hide-for-large {
|
||||||
|
display: table-cell !important;
|
||||||
|
width: 100% !important;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@media only screen and (max-width: 596px) {
|
||||||
|
table.body table.container .show-for-large {
|
||||||
|
display: none !important;
|
||||||
|
width: 0;
|
||||||
|
mso-hide: all;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@media only screen and (max-width: 596px) {
|
||||||
|
table.body img {
|
||||||
|
width: auto;
|
||||||
|
height: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.body center {
|
||||||
|
min-width: 0 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.body .container {
|
||||||
|
width: 95% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.body .columns,
|
||||||
|
table.body .column {
|
||||||
|
height: auto !important;
|
||||||
|
-moz-box-sizing: border-box;
|
||||||
|
-webkit-box-sizing: border-box;
|
||||||
|
box-sizing: border-box;
|
||||||
|
padding-left: 16px !important;
|
||||||
|
padding-right: 16px !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.body .columns .column,
|
||||||
|
table.body .columns .columns,
|
||||||
|
table.body .column .column,
|
||||||
|
table.body .column .columns {
|
||||||
|
padding-left: 0 !important;
|
||||||
|
padding-right: 0 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.body .collapse .columns,
|
||||||
|
table.body .collapse .column {
|
||||||
|
padding-left: 0 !important;
|
||||||
|
padding-right: 0 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
td.small-1,
|
||||||
|
th.small-1 {
|
||||||
|
display: inline-block !important;
|
||||||
|
width: 8.33333% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
td.small-2,
|
||||||
|
th.small-2 {
|
||||||
|
display: inline-block !important;
|
||||||
|
width: 16.66667% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
td.small-3,
|
||||||
|
th.small-3 {
|
||||||
|
display: inline-block !important;
|
||||||
|
width: 25% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
td.small-4,
|
||||||
|
th.small-4 {
|
||||||
|
display: inline-block !important;
|
||||||
|
width: 33.33333% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
td.small-5,
|
||||||
|
th.small-5 {
|
||||||
|
display: inline-block !important;
|
||||||
|
width: 41.66667% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
td.small-6,
|
||||||
|
th.small-6 {
|
||||||
|
display: inline-block !important;
|
||||||
|
width: 50% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
td.small-7,
|
||||||
|
th.small-7 {
|
||||||
|
display: inline-block !important;
|
||||||
|
width: 58.33333% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
td.small-8,
|
||||||
|
th.small-8 {
|
||||||
|
display: inline-block !important;
|
||||||
|
width: 66.66667% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
td.small-9,
|
||||||
|
th.small-9 {
|
||||||
|
display: inline-block !important;
|
||||||
|
width: 75% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
td.small-10,
|
||||||
|
th.small-10 {
|
||||||
|
display: inline-block !important;
|
||||||
|
width: 83.33333% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
td.small-11,
|
||||||
|
th.small-11 {
|
||||||
|
display: inline-block !important;
|
||||||
|
width: 91.66667% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
td.small-12,
|
||||||
|
th.small-12 {
|
||||||
|
display: inline-block !important;
|
||||||
|
width: 100% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.columns td.small-12,
|
||||||
|
.column td.small-12,
|
||||||
|
.columns th.small-12,
|
||||||
|
.column th.small-12 {
|
||||||
|
display: block !important;
|
||||||
|
width: 100% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.body td.small-offset-1,
|
||||||
|
table.body th.small-offset-1 {
|
||||||
|
margin-left: 8.33333% !important;
|
||||||
|
Margin-left: 8.33333% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.body td.small-offset-2,
|
||||||
|
table.body th.small-offset-2 {
|
||||||
|
margin-left: 16.66667% !important;
|
||||||
|
Margin-left: 16.66667% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.body td.small-offset-3,
|
||||||
|
table.body th.small-offset-3 {
|
||||||
|
margin-left: 25% !important;
|
||||||
|
Margin-left: 25% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.body td.small-offset-4,
|
||||||
|
table.body th.small-offset-4 {
|
||||||
|
margin-left: 33.33333% !important;
|
||||||
|
Margin-left: 33.33333% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.body td.small-offset-5,
|
||||||
|
table.body th.small-offset-5 {
|
||||||
|
margin-left: 41.66667% !important;
|
||||||
|
Margin-left: 41.66667% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.body td.small-offset-6,
|
||||||
|
table.body th.small-offset-6 {
|
||||||
|
margin-left: 50% !important;
|
||||||
|
Margin-left: 50% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.body td.small-offset-7,
|
||||||
|
table.body th.small-offset-7 {
|
||||||
|
margin-left: 58.33333% !important;
|
||||||
|
Margin-left: 58.33333% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.body td.small-offset-8,
|
||||||
|
table.body th.small-offset-8 {
|
||||||
|
margin-left: 66.66667% !important;
|
||||||
|
Margin-left: 66.66667% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.body td.small-offset-9,
|
||||||
|
table.body th.small-offset-9 {
|
||||||
|
margin-left: 75% !important;
|
||||||
|
Margin-left: 75% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.body td.small-offset-10,
|
||||||
|
table.body th.small-offset-10 {
|
||||||
|
margin-left: 83.33333% !important;
|
||||||
|
Margin-left: 83.33333% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.body td.small-offset-11,
|
||||||
|
table.body th.small-offset-11 {
|
||||||
|
margin-left: 91.66667% !important;
|
||||||
|
Margin-left: 91.66667% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.body table.columns td.expander,
|
||||||
|
table.body table.columns th.expander {
|
||||||
|
display: none !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.body .right-text-pad,
|
||||||
|
table.body .text-pad-right {
|
||||||
|
padding-left: 10px !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.body .left-text-pad,
|
||||||
|
table.body .text-pad-left {
|
||||||
|
padding-right: 10px !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.menu {
|
||||||
|
width: 100% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.menu td,
|
||||||
|
table.menu th {
|
||||||
|
width: auto !important;
|
||||||
|
display: inline-block !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.menu.vertical td,
|
||||||
|
table.menu.vertical th,
|
||||||
|
table.menu.small-vertical td,
|
||||||
|
table.menu.small-vertical th {
|
||||||
|
display: block !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.menu[align="center"] {
|
||||||
|
width: auto !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.button.small-expand,
|
||||||
|
table.button.small-expanded {
|
||||||
|
width: 100% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.button.small-expand table,
|
||||||
|
table.button.small-expanded table {
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.button.small-expand table a,
|
||||||
|
table.button.small-expanded table a {
|
||||||
|
text-align: center !important;
|
||||||
|
width: 100% !important;
|
||||||
|
padding-left: 0 !important;
|
||||||
|
padding-right: 0 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.button.small-expand center,
|
||||||
|
table.button.small-expanded center {
|
||||||
|
min-width: 0;
|
||||||
|
}
|
||||||
|
}</style>
|
||||||
|
</head>
|
||||||
|
<body leftmargin="0" topmargin="0" marginwidth="0" marginheight="0" bgcolor="#F6F6F6" style="-moz-box-sizing: border-box; -ms-text-size-adjust: 100%; -webkit-box-sizing: border-box; -webkit-text-size-adjust: 100%; Margin: 0; box-sizing: border-box; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; line-height: 1.3; margin: 0; min-width: 100%; padding: 0; text-align: left; width: 100% !important;">
|
||||||
|
<!-- <span class="preheader"></span> -->
|
||||||
|
<table class="body" border="0" cellspacing="0" cellpadding="0" width="100%" height="100%" style="Margin: 0; background: #E4E8EE; border-collapse: collapse; border-spacing: 0; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; height: 100%; line-height: 1.3; margin: 0; padding: 0; text-align: left; vertical-align: top; width: 100%;">
|
||||||
|
<tr style="padding: 0; text-align: left; vertical-align: top;">
|
||||||
|
<td class="body-cell" align="center" valign="top" bgcolor="#F6F6F6" style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; background: #E4E8EE !important; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; hyphens: auto; line-height: 1.3; margin: 0; padding: 0; padding-bottom: 20px; text-align: left; vertical-align: top; word-wrap: break-word;">
|
||||||
|
<center data-parsed="" style="min-width: 580px; width: 100%;">
|
||||||
|
|
||||||
|
<table align="center" class="wrapper header float-center" style="Margin: 0 auto; background: #1E2530; border-bottom: none; border-collapse: collapse; border-spacing: 0; float: none; margin: 0 auto; padding: 0; text-align: center; vertical-align: top; width: 100%;"><tr style="padding: 0; text-align: left; vertical-align: top;"><td class="wrapper-inner" style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; hyphens: auto; line-height: 1.3; margin: 0; padding: 20px; text-align: left; vertical-align: top; word-wrap: break-word;">
|
||||||
|
<table align="center" class="container" style="Margin: 0 auto; background: transparent; border-collapse: collapse; border-spacing: 0; margin: 0 auto; padding: 0; text-align: inherit; vertical-align: top; width: 580px;"><tbody><tr style="padding: 0; text-align: left; vertical-align: top;"><td style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; hyphens: auto; line-height: 1.3; margin: 0; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;">
|
||||||
|
<table class="row collapse" style="border-collapse: collapse; border-spacing: 0; display: table; padding: 0; position: relative; text-align: left; vertical-align: top; width: 100%;"><tbody><tr style="padding: 0; text-align: left; vertical-align: top;">
|
||||||
|
<th class="small-12 large-12 columns first last" style="Margin: 0 auto; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; line-height: 1.3; margin: 0 auto; padding: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; text-align: left; width: 588px;"><table style="border-collapse: collapse; border-spacing: 0; padding: 0; text-align: left; vertical-align: top; width: 100%;"><tr style="padding: 0; text-align: left; vertical-align: top;"><th style="Margin: 0; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; line-height: 1.3; margin: 0; padding: 0; text-align: left;">
|
||||||
|
<h1 style="Margin: 0; Margin-bottom: px; color: #FFFFFF; font-family: Georgia, serif; font-size: 30px; font-weight: normal; line-height: 1.3; margin: 0; margin-bottom: px; padding: 0; text-align: left; word-wrap: normal;">
|
||||||
|
${settings.appName}
|
||||||
|
</h1>
|
||||||
|
</th>
|
||||||
|
<th class="expander" style="Margin: 0; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; line-height: 1.3; margin: 0; padding: 0 !important; text-align: left; visibility: hidden; width: 0;"></th></tr></table></th>
|
||||||
|
</tr></tbody></table>
|
||||||
|
</td></tr></tbody></table>
|
||||||
|
</td></tr></table>
|
||||||
|
<table class="spacer float-center" style="Margin: 0 auto; border-collapse: collapse; border-spacing: 0; float: none; margin: 0 auto; padding: 0; text-align: center; vertical-align: top; width: 100%;"><tbody><tr style="padding: 0; text-align: left; vertical-align: top;"><td height="20px" style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 20px; font-weight: normal; hyphens: auto; line-height: 20px; margin: 0; mso-line-height-rule: exactly; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;"> </td></tr></tbody></table>
|
||||||
|
<table align="center" class="container main float-center" style="Margin: 0 auto; Margin-top: 10px; background: #FFFFFF; border-collapse: collapse; border-spacing: 0; float: none; margin: 0 auto; margin-top: 10px; padding: 0; text-align: center; vertical-align: top; width: 580px;"><tbody><tr style="padding: 0; text-align: left; vertical-align: top;"><td style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; hyphens: auto; line-height: 1.3; margin: 0; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;">
|
||||||
|
<table class="spacer" style="border-collapse: collapse; border-spacing: 0; padding: 0; text-align: left; vertical-align: top; width: 100%;"><tbody><tr style="padding: 0; text-align: left; vertical-align: top;"><td height="20px" style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 20px; font-weight: normal; hyphens: auto; line-height: 20px; margin: 0; mso-line-height-rule: exactly; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;"> </td></tr></tbody></table>
|
||||||
|
|
||||||
|
<%= body %>
|
||||||
|
|
||||||
|
<table class="wrapper secondary" align="center" style="background: #E4E8EE; border-collapse: collapse; border-spacing: 0; padding: 0; text-align: left; vertical-align: top; width: 100%;"><tr style="padding: 0; text-align: left; vertical-align: top;"><td class="wrapper-inner" style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; hyphens: auto; line-height: 1.3; margin: 0; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;">
|
||||||
|
<table class="spacer" style="border-collapse: collapse; border-spacing: 0; padding: 0; text-align: left; vertical-align: top; width: 100%;"><tbody><tr style="padding: 0; text-align: left; vertical-align: top;"><td height="10px" style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 10px; font-weight: normal; hyphens: auto; line-height: 10px; margin: 0; mso-line-height-rule: exactly; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;"> </td></tr></tbody></table>
|
||||||
|
<p style="Margin: 0; Margin-bottom: 10px; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; line-height: 1.3; margin: 0; margin-bottom: 10px; padding: 0; text-align: left;"><small style="color: #5D6879; font-size: 80%;">
|
||||||
|
${
|
||||||
|
settings.email &&
|
||||||
|
settings.email.template &&
|
||||||
|
settings.email.template.customFooter
|
||||||
|
? `${settings.email.template.customFooter}<br>`
|
||||||
|
: ''
|
||||||
|
}${settings.appName} • <a href="${
|
||||||
|
settings.siteUrl
|
||||||
|
}" style="Margin: 0; color: #0F7A06; font-family: Helvetica, Arial, sans-serif; font-weight: normal; line-height: 1.3; margin: 0; padding: 0; text-align: left; text-decoration: none;">${
|
||||||
|
settings.siteUrl
|
||||||
|
}</a>
|
||||||
|
</small></p>
|
||||||
|
</td></tr></table>
|
||||||
|
</td></tr></tbody></table>
|
||||||
|
|
||||||
|
</center>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
<!-- prevent Gmail on iOS font size manipulation -->
|
||||||
|
<div style="display:none; white-space:nowrap; font:15px courier; line-height:0;"> </div>
|
||||||
|
</body>
|
||||||
|
</html>\
|
||||||
|
`)
|
56
services/web/app/src/Features/Email/SpamSafe.js
Normal file
56
services/web/app/src/Features/Email/SpamSafe.js
Normal file
|
@ -0,0 +1,56 @@
|
||||||
|
const XRegExp = require('xregexp')
|
||||||
|
|
||||||
|
// A note about SAFE_REGEX:
|
||||||
|
// We have to escape the escape characters because XRegExp compiles it first.
|
||||||
|
// So it's equivalent to `^[\p{L}\p{N}\s\-_!&\(\)]+$]
|
||||||
|
// \p{L} = any letter in any language
|
||||||
|
// \p{N} = any kind of numeric character
|
||||||
|
// https://www.regular-expressions.info/unicode.html#prop is a good resource for
|
||||||
|
// more obscure regex features. standard RegExp does not support these
|
||||||
|
|
||||||
|
const HAN_REGEX = XRegExp('\\p{Han}')
|
||||||
|
const SAFE_REGEX = XRegExp("^[\\p{L}\\p{N}\\s\\-_!'&\\(\\)]+$")
|
||||||
|
const EMAIL_REGEX = XRegExp('^[\\p{L}\\p{N}.+_-]+@[\\w.-]+$')
|
||||||
|
|
||||||
|
const SpamSafe = {
|
||||||
|
isSafeUserName(name) {
|
||||||
|
return SAFE_REGEX.test(name) && name.length <= 30
|
||||||
|
},
|
||||||
|
|
||||||
|
isSafeProjectName(name) {
|
||||||
|
if (HAN_REGEX.test(name)) {
|
||||||
|
return SAFE_REGEX.test(name) && name.length <= 30
|
||||||
|
}
|
||||||
|
return SAFE_REGEX.test(name) && name.length <= 100
|
||||||
|
},
|
||||||
|
|
||||||
|
isSafeEmail(email) {
|
||||||
|
return EMAIL_REGEX.test(email) && email.length <= 40
|
||||||
|
},
|
||||||
|
|
||||||
|
safeUserName(name, alternative, project) {
|
||||||
|
if (project == null) {
|
||||||
|
project = false
|
||||||
|
}
|
||||||
|
if (SpamSafe.isSafeUserName(name)) {
|
||||||
|
return name
|
||||||
|
}
|
||||||
|
return alternative
|
||||||
|
},
|
||||||
|
|
||||||
|
safeProjectName(name, alternative) {
|
||||||
|
if (SpamSafe.isSafeProjectName(name)) {
|
||||||
|
return name
|
||||||
|
}
|
||||||
|
return alternative
|
||||||
|
},
|
||||||
|
|
||||||
|
safeEmail(email, alternative) {
|
||||||
|
if (SpamSafe.isSafeEmail(email)) {
|
||||||
|
return email
|
||||||
|
}
|
||||||
|
return alternative
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = SpamSafe
|
103
services/web/app/src/Features/Errors/ErrorController.js
Normal file
103
services/web/app/src/Features/Errors/ErrorController.js
Normal file
|
@ -0,0 +1,103 @@
|
||||||
|
let ErrorController
|
||||||
|
const Errors = require('./Errors')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const SessionManager = require('../Authentication/SessionManager')
|
||||||
|
const SamlLogHandler = require('../SamlLog/SamlLogHandler')
|
||||||
|
const HttpErrorHandler = require('./HttpErrorHandler')
|
||||||
|
|
||||||
|
module.exports = ErrorController = {
|
||||||
|
notFound(req, res) {
|
||||||
|
res.status(404)
|
||||||
|
res.render('general/404', { title: 'page_not_found' })
|
||||||
|
},
|
||||||
|
|
||||||
|
forbidden(req, res) {
|
||||||
|
res.status(403)
|
||||||
|
res.render('user/restricted')
|
||||||
|
},
|
||||||
|
|
||||||
|
serverError(req, res) {
|
||||||
|
res.status(500)
|
||||||
|
res.render('general/500', { title: 'Server Error' })
|
||||||
|
},
|
||||||
|
|
||||||
|
handleError(error, req, res, next) {
|
||||||
|
const user = SessionManager.getSessionUser(req.session)
|
||||||
|
// log errors related to SAML flow
|
||||||
|
if (req.session && req.session.saml) {
|
||||||
|
SamlLogHandler.log(req.session.saml.universityId, req.sessionID, {
|
||||||
|
error: {
|
||||||
|
message: error && error.message,
|
||||||
|
stack: error && error.stack,
|
||||||
|
},
|
||||||
|
body: req.body,
|
||||||
|
path: req.path,
|
||||||
|
query: req.query,
|
||||||
|
saml: req.session.saml,
|
||||||
|
user_id: user && user._id,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if (error.code === 'EBADCSRFTOKEN') {
|
||||||
|
logger.warn(
|
||||||
|
{ err: error, url: req.url, method: req.method, user },
|
||||||
|
'invalid csrf'
|
||||||
|
)
|
||||||
|
res.sendStatus(403)
|
||||||
|
} else if (error instanceof Errors.NotFoundError) {
|
||||||
|
logger.warn({ err: error, url: req.url }, 'not found error')
|
||||||
|
ErrorController.notFound(req, res)
|
||||||
|
} else if (
|
||||||
|
error instanceof URIError &&
|
||||||
|
error.message.match(/^Failed to decode param/)
|
||||||
|
) {
|
||||||
|
logger.warn({ err: error, url: req.url }, 'Express URIError')
|
||||||
|
res.status(400)
|
||||||
|
res.render('general/500', { title: 'Invalid Error' })
|
||||||
|
} else if (error instanceof Errors.ForbiddenError) {
|
||||||
|
logger.error({ err: error }, 'forbidden error')
|
||||||
|
ErrorController.forbidden(req, res)
|
||||||
|
} else if (error instanceof Errors.TooManyRequestsError) {
|
||||||
|
logger.warn({ err: error, url: req.url }, 'too many requests error')
|
||||||
|
res.sendStatus(429)
|
||||||
|
} else if (error instanceof Errors.InvalidError) {
|
||||||
|
logger.warn({ err: error, url: req.url }, 'invalid error')
|
||||||
|
res.status(400)
|
||||||
|
res.send(error.message)
|
||||||
|
} else if (error instanceof Errors.InvalidNameError) {
|
||||||
|
logger.warn({ err: error, url: req.url }, 'invalid name error')
|
||||||
|
res.status(400)
|
||||||
|
res.send(error.message)
|
||||||
|
} else if (error instanceof Errors.SAMLSessionDataMissing) {
|
||||||
|
logger.warn(
|
||||||
|
{ err: error, url: req.url },
|
||||||
|
'missing SAML session data error'
|
||||||
|
)
|
||||||
|
HttpErrorHandler.badRequest(req, res, error.message)
|
||||||
|
} else {
|
||||||
|
logger.error(
|
||||||
|
{ err: error, url: req.url, method: req.method, user },
|
||||||
|
'error passed to top level next middleware'
|
||||||
|
)
|
||||||
|
ErrorController.serverError(req, res)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
handleApiError(error, req, res, next) {
|
||||||
|
if (error instanceof Errors.NotFoundError) {
|
||||||
|
logger.warn({ err: error, url: req.url }, 'not found error')
|
||||||
|
res.sendStatus(404)
|
||||||
|
} else if (
|
||||||
|
error instanceof URIError &&
|
||||||
|
error.message.match(/^Failed to decode param/)
|
||||||
|
) {
|
||||||
|
logger.warn({ err: error, url: req.url }, 'Express URIError')
|
||||||
|
res.sendStatus(400)
|
||||||
|
} else {
|
||||||
|
logger.error(
|
||||||
|
{ err: error, url: req.url, method: req.method },
|
||||||
|
'error passed to top level next middleware'
|
||||||
|
)
|
||||||
|
res.sendStatus(500)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
227
services/web/app/src/Features/Errors/Errors.js
Normal file
227
services/web/app/src/Features/Errors/Errors.js
Normal file
|
@ -0,0 +1,227 @@
|
||||||
|
const OError = require('@overleaf/o-error')
|
||||||
|
const settings = require('@overleaf/settings')
|
||||||
|
|
||||||
|
// Error class for legacy errors so they inherit OError while staying
|
||||||
|
// backward-compatible (can be instantiated with string as argument instead
|
||||||
|
// of object)
|
||||||
|
class BackwardCompatibleError extends OError {
|
||||||
|
constructor(messageOrOptions) {
|
||||||
|
if (typeof messageOrOptions === 'string') {
|
||||||
|
super(messageOrOptions)
|
||||||
|
} else if (messageOrOptions) {
|
||||||
|
const { message, info } = messageOrOptions
|
||||||
|
super(message, info)
|
||||||
|
} else {
|
||||||
|
super()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Error class that facilitates the migration to OError v3 by providing
|
||||||
|
// a signature in which the 2nd argument can be an object containing
|
||||||
|
// the `info` object.
|
||||||
|
class OErrorV2CompatibleError extends OError {
|
||||||
|
constructor(message, options) {
|
||||||
|
if (options) {
|
||||||
|
super(message, options.info)
|
||||||
|
} else {
|
||||||
|
super(message)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class NotFoundError extends BackwardCompatibleError {}
|
||||||
|
|
||||||
|
class ForbiddenError extends BackwardCompatibleError {}
|
||||||
|
|
||||||
|
class ServiceNotConfiguredError extends BackwardCompatibleError {}
|
||||||
|
|
||||||
|
class TooManyRequestsError extends BackwardCompatibleError {}
|
||||||
|
|
||||||
|
class InvalidNameError extends BackwardCompatibleError {}
|
||||||
|
|
||||||
|
class UnsupportedFileTypeError extends BackwardCompatibleError {}
|
||||||
|
|
||||||
|
class FileTooLargeError extends BackwardCompatibleError {}
|
||||||
|
|
||||||
|
class UnsupportedExportRecordsError extends BackwardCompatibleError {}
|
||||||
|
|
||||||
|
class V1HistoryNotSyncedError extends BackwardCompatibleError {}
|
||||||
|
|
||||||
|
class ProjectHistoryDisabledError extends BackwardCompatibleError {}
|
||||||
|
|
||||||
|
class V1ConnectionError extends BackwardCompatibleError {}
|
||||||
|
|
||||||
|
class UnconfirmedEmailError extends BackwardCompatibleError {}
|
||||||
|
|
||||||
|
class EmailExistsError extends OErrorV2CompatibleError {
|
||||||
|
constructor(options) {
|
||||||
|
super('Email already exists', options)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class InvalidError extends BackwardCompatibleError {}
|
||||||
|
|
||||||
|
class NotInV2Error extends BackwardCompatibleError {}
|
||||||
|
|
||||||
|
class SLInV2Error extends BackwardCompatibleError {}
|
||||||
|
|
||||||
|
class SAMLIdentityExistsError extends OError {
|
||||||
|
get i18nKey() {
|
||||||
|
return 'institution_account_tried_to_add_already_registered'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class SAMLAlreadyLinkedError extends OError {
|
||||||
|
get i18nKey() {
|
||||||
|
return 'institution_account_tried_to_add_already_linked'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class SAMLEmailNotAffiliatedError extends OError {
|
||||||
|
get i18nKey() {
|
||||||
|
return 'institution_account_tried_to_add_not_affiliated'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class SAMLEmailAffiliatedWithAnotherInstitutionError extends OError {
|
||||||
|
get i18nKey() {
|
||||||
|
return 'institution_account_tried_to_add_affiliated_with_another_institution'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class SAMLSessionDataMissing extends BackwardCompatibleError {
|
||||||
|
constructor(arg) {
|
||||||
|
super(arg)
|
||||||
|
|
||||||
|
const samlSession =
|
||||||
|
typeof arg === 'object' && arg !== null && arg.samlSession
|
||||||
|
? arg.samlSession
|
||||||
|
: {}
|
||||||
|
this.tryAgain = true
|
||||||
|
const {
|
||||||
|
universityId,
|
||||||
|
universityName,
|
||||||
|
externalUserId,
|
||||||
|
institutionEmail,
|
||||||
|
} = samlSession
|
||||||
|
|
||||||
|
if (
|
||||||
|
!universityId &&
|
||||||
|
!universityName &&
|
||||||
|
!externalUserId &&
|
||||||
|
!institutionEmail
|
||||||
|
) {
|
||||||
|
this.message = 'Missing session data.'
|
||||||
|
} else if (
|
||||||
|
!institutionEmail &&
|
||||||
|
samlSession &&
|
||||||
|
samlSession.userEmailAttributeUnreliable
|
||||||
|
) {
|
||||||
|
this.tryAgain = false
|
||||||
|
this.message = `Your account settings at your institution prevent us from accessing your email address. You will need to make your email address public at your institution in order to link with ${settings.appName}. Please contact your IT department if you have any questions.`
|
||||||
|
} else if (!institutionEmail) {
|
||||||
|
this.message =
|
||||||
|
'Unable to confirm your institutional email address. The institutional identity provider did not provide an email address in the expected attribute. Please contact us if this keeps happening.'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class ThirdPartyIdentityExistsError extends BackwardCompatibleError {
|
||||||
|
constructor(arg) {
|
||||||
|
super(arg)
|
||||||
|
if (!this.message) {
|
||||||
|
this.message =
|
||||||
|
'provider and external id already linked to another account'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class ThirdPartyUserNotFoundError extends BackwardCompatibleError {
|
||||||
|
constructor(arg) {
|
||||||
|
super(arg)
|
||||||
|
if (!this.message) {
|
||||||
|
this.message = 'user not found for provider and external id'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class SubscriptionAdminDeletionError extends OErrorV2CompatibleError {
|
||||||
|
constructor(options) {
|
||||||
|
super('subscription admins cannot be deleted', options)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class ProjectNotFoundError extends OErrorV2CompatibleError {
|
||||||
|
constructor(options) {
|
||||||
|
super('project not found', options)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class UserNotFoundError extends OErrorV2CompatibleError {
|
||||||
|
constructor(options) {
|
||||||
|
super('user not found', options)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class UserNotCollaboratorError extends OErrorV2CompatibleError {
|
||||||
|
constructor(options) {
|
||||||
|
super('user not a collaborator', options)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class DocHasRangesError extends OErrorV2CompatibleError {
|
||||||
|
constructor(options) {
|
||||||
|
super('document has ranges', options)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class InvalidQueryError extends OErrorV2CompatibleError {
|
||||||
|
constructor(options) {
|
||||||
|
super('invalid search query', options)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class AffiliationError extends OError {}
|
||||||
|
|
||||||
|
class InvalidInstitutionalEmailError extends OError {
|
||||||
|
get i18nKey() {
|
||||||
|
return 'invalid_institutional_email'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
OError,
|
||||||
|
BackwardCompatibleError,
|
||||||
|
NotFoundError,
|
||||||
|
ForbiddenError,
|
||||||
|
ServiceNotConfiguredError,
|
||||||
|
TooManyRequestsError,
|
||||||
|
InvalidNameError,
|
||||||
|
UnsupportedFileTypeError,
|
||||||
|
FileTooLargeError,
|
||||||
|
UnsupportedExportRecordsError,
|
||||||
|
V1HistoryNotSyncedError,
|
||||||
|
ProjectHistoryDisabledError,
|
||||||
|
V1ConnectionError,
|
||||||
|
UnconfirmedEmailError,
|
||||||
|
EmailExistsError,
|
||||||
|
InvalidError,
|
||||||
|
NotInV2Error,
|
||||||
|
SAMLIdentityExistsError,
|
||||||
|
SAMLAlreadyLinkedError,
|
||||||
|
SAMLEmailNotAffiliatedError,
|
||||||
|
SAMLEmailAffiliatedWithAnotherInstitutionError,
|
||||||
|
SAMLSessionDataMissing,
|
||||||
|
SLInV2Error,
|
||||||
|
ThirdPartyIdentityExistsError,
|
||||||
|
ThirdPartyUserNotFoundError,
|
||||||
|
SubscriptionAdminDeletionError,
|
||||||
|
ProjectNotFoundError,
|
||||||
|
UserNotFoundError,
|
||||||
|
UserNotCollaboratorError,
|
||||||
|
DocHasRangesError,
|
||||||
|
InvalidQueryError,
|
||||||
|
AffiliationError,
|
||||||
|
InvalidInstitutionalEmailError,
|
||||||
|
}
|
161
services/web/app/src/Features/Errors/HttpErrorHandler.js
Normal file
161
services/web/app/src/Features/Errors/HttpErrorHandler.js
Normal file
|
@ -0,0 +1,161 @@
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const Settings = require('@overleaf/settings')
|
||||||
|
|
||||||
|
function renderJSONError(res, message, info = {}) {
|
||||||
|
if (info.message) {
|
||||||
|
logger.warn(
|
||||||
|
info,
|
||||||
|
`http error info shouldn't contain a 'message' field, will be overridden`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if (message != null) {
|
||||||
|
res.json({ ...info, message })
|
||||||
|
} else {
|
||||||
|
res.json(info)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleGeneric500Error(req, res, statusCode, message) {
|
||||||
|
res.status(statusCode)
|
||||||
|
switch (req.accepts(['html', 'json'])) {
|
||||||
|
case 'html':
|
||||||
|
return res.render('general/500', { title: 'Server Error' })
|
||||||
|
case 'json':
|
||||||
|
return renderJSONError(res, message)
|
||||||
|
default:
|
||||||
|
return res.send('internal server error')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleGeneric400Error(req, res, statusCode, message, info = {}) {
|
||||||
|
res.status(statusCode)
|
||||||
|
switch (req.accepts(['html', 'json'])) {
|
||||||
|
case 'html':
|
||||||
|
return res.render('general/400', {
|
||||||
|
title: 'Client Error',
|
||||||
|
message: message,
|
||||||
|
})
|
||||||
|
case 'json':
|
||||||
|
return renderJSONError(res, message, info)
|
||||||
|
default:
|
||||||
|
return res.send('client error')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let HttpErrorHandler
|
||||||
|
module.exports = HttpErrorHandler = {
|
||||||
|
handleErrorByStatusCode(req, res, error, statusCode) {
|
||||||
|
const is400Error = statusCode >= 400 && statusCode < 500
|
||||||
|
const is500Error = statusCode >= 500 && statusCode < 600
|
||||||
|
|
||||||
|
if (is400Error) {
|
||||||
|
logger.warn(error)
|
||||||
|
} else if (is500Error) {
|
||||||
|
logger.error(error)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (statusCode === 403) {
|
||||||
|
HttpErrorHandler.forbidden(req, res)
|
||||||
|
} else if (statusCode === 404) {
|
||||||
|
HttpErrorHandler.notFound(req, res)
|
||||||
|
} else if (statusCode === 409) {
|
||||||
|
HttpErrorHandler.conflict(req, res, '')
|
||||||
|
} else if (statusCode === 422) {
|
||||||
|
HttpErrorHandler.unprocessableEntity(req, res)
|
||||||
|
} else if (is400Error) {
|
||||||
|
handleGeneric400Error(req, res, statusCode)
|
||||||
|
} else if (is500Error) {
|
||||||
|
handleGeneric500Error(req, res, statusCode)
|
||||||
|
} else {
|
||||||
|
logger.error(
|
||||||
|
{ err: error, statusCode },
|
||||||
|
`unable to handle error with status code ${statusCode}`
|
||||||
|
)
|
||||||
|
res.sendStatus(500)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
badRequest(req, res, message, info = {}) {
|
||||||
|
handleGeneric400Error(req, res, 400, message, info)
|
||||||
|
},
|
||||||
|
|
||||||
|
conflict(req, res, message, info = {}) {
|
||||||
|
res.status(409)
|
||||||
|
switch (req.accepts(['html', 'json'])) {
|
||||||
|
case 'html':
|
||||||
|
return res.render('general/400', {
|
||||||
|
title: 'Client Error',
|
||||||
|
message: message,
|
||||||
|
})
|
||||||
|
case 'json':
|
||||||
|
return renderJSONError(res, message, info)
|
||||||
|
default:
|
||||||
|
return res.send('conflict')
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
forbidden(req, res, message = 'restricted', info = {}) {
|
||||||
|
res.status(403)
|
||||||
|
switch (req.accepts(['html', 'json'])) {
|
||||||
|
case 'html':
|
||||||
|
return res.render('user/restricted', { title: 'restricted' })
|
||||||
|
case 'json':
|
||||||
|
return renderJSONError(res, message, info)
|
||||||
|
default:
|
||||||
|
return res.send('restricted')
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
notFound(req, res, message = 'not found', info = {}) {
|
||||||
|
res.status(404)
|
||||||
|
switch (req.accepts(['html', 'json'])) {
|
||||||
|
case 'html':
|
||||||
|
return res.render('general/404', { title: 'page_not_found' })
|
||||||
|
case 'json':
|
||||||
|
return renderJSONError(res, message, info)
|
||||||
|
default:
|
||||||
|
return res.send('not found')
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
unprocessableEntity(req, res, message = 'unprocessable entity', info = {}) {
|
||||||
|
res.status(422)
|
||||||
|
switch (req.accepts(['html', 'json'])) {
|
||||||
|
case 'html':
|
||||||
|
return res.render('general/400', {
|
||||||
|
title: 'Client Error',
|
||||||
|
message: message,
|
||||||
|
})
|
||||||
|
case 'json':
|
||||||
|
return renderJSONError(res, message, info)
|
||||||
|
default:
|
||||||
|
return res.send('unprocessable entity')
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
legacyInternal(req, res, message, error) {
|
||||||
|
logger.error(error)
|
||||||
|
handleGeneric500Error(req, res, 500, message)
|
||||||
|
},
|
||||||
|
|
||||||
|
maintenance(req, res) {
|
||||||
|
// load balancer health checks require a success response for /
|
||||||
|
if (req.url === '/') {
|
||||||
|
res.status(200)
|
||||||
|
} else {
|
||||||
|
res.status(503)
|
||||||
|
}
|
||||||
|
let message = `${Settings.appName} is currently down for maintenance.`
|
||||||
|
if (Settings.statusPageUrl) {
|
||||||
|
message += ` Please check https://${Settings.statusPageUrl} for updates.`
|
||||||
|
}
|
||||||
|
switch (req.accepts(['html', 'json'])) {
|
||||||
|
case 'html':
|
||||||
|
return res.render('general/closed', { title: 'maintenance' })
|
||||||
|
case 'json':
|
||||||
|
return renderJSONError(res, message, {})
|
||||||
|
default:
|
||||||
|
return res.send(message)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
127
services/web/app/src/Features/Exports/ExportsController.js
Normal file
127
services/web/app/src/Features/Exports/ExportsController.js
Normal file
|
@ -0,0 +1,127 @@
|
||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
max-len,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
const ExportsHandler = require('./ExportsHandler')
|
||||||
|
const SessionManager = require('../Authentication/SessionManager')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
exportProject(req, res, next) {
|
||||||
|
const { project_id, brand_variation_id } = req.params
|
||||||
|
const user_id = SessionManager.getLoggedInUserId(req.session)
|
||||||
|
const export_params = {
|
||||||
|
project_id,
|
||||||
|
brand_variation_id,
|
||||||
|
user_id,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (req.body) {
|
||||||
|
if (req.body.firstName) {
|
||||||
|
export_params.first_name = req.body.firstName.trim()
|
||||||
|
}
|
||||||
|
if (req.body.lastName) {
|
||||||
|
export_params.last_name = req.body.lastName.trim()
|
||||||
|
}
|
||||||
|
// additional parameters for gallery exports
|
||||||
|
if (req.body.title) {
|
||||||
|
export_params.title = req.body.title.trim()
|
||||||
|
}
|
||||||
|
if (req.body.description) {
|
||||||
|
export_params.description = req.body.description.trim()
|
||||||
|
}
|
||||||
|
if (req.body.author) {
|
||||||
|
export_params.author = req.body.author.trim()
|
||||||
|
}
|
||||||
|
if (req.body.license) {
|
||||||
|
export_params.license = req.body.license.trim()
|
||||||
|
}
|
||||||
|
if (req.body.showSource != null) {
|
||||||
|
export_params.show_source = req.body.showSource
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ExportsHandler.exportProject(
|
||||||
|
export_params,
|
||||||
|
function (err, export_data) {
|
||||||
|
if (err != null) {
|
||||||
|
if (err.forwardResponse != null) {
|
||||||
|
logger.log(
|
||||||
|
{ responseError: err.forwardResponse },
|
||||||
|
'forwarding response'
|
||||||
|
)
|
||||||
|
const statusCode = err.forwardResponse.status || 500
|
||||||
|
return res.status(statusCode).json(err.forwardResponse)
|
||||||
|
} else {
|
||||||
|
return next(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
logger.log(
|
||||||
|
{
|
||||||
|
user_id,
|
||||||
|
project_id,
|
||||||
|
brand_variation_id,
|
||||||
|
export_v1_id: export_data.v1_id,
|
||||||
|
},
|
||||||
|
'exported project'
|
||||||
|
)
|
||||||
|
return res.json({
|
||||||
|
export_v1_id: export_data.v1_id,
|
||||||
|
message: export_data.message,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
exportStatus(req, res) {
|
||||||
|
const { export_id } = req.params
|
||||||
|
return ExportsHandler.fetchExport(export_id, function (err, export_json) {
|
||||||
|
let json
|
||||||
|
if (err != null) {
|
||||||
|
json = {
|
||||||
|
status_summary: 'failed',
|
||||||
|
status_detail: err.toString,
|
||||||
|
}
|
||||||
|
res.json({ export_json: json })
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
const parsed_export = JSON.parse(export_json)
|
||||||
|
json = {
|
||||||
|
status_summary: parsed_export.status_summary,
|
||||||
|
status_detail: parsed_export.status_detail,
|
||||||
|
partner_submission_id: parsed_export.partner_submission_id,
|
||||||
|
v2_user_email: parsed_export.v2_user_email,
|
||||||
|
v2_user_first_name: parsed_export.v2_user_first_name,
|
||||||
|
v2_user_last_name: parsed_export.v2_user_last_name,
|
||||||
|
title: parsed_export.title,
|
||||||
|
token: parsed_export.token,
|
||||||
|
}
|
||||||
|
return res.json({ export_json: json })
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
exportDownload(req, res, next) {
|
||||||
|
const { type, export_id } = req.params
|
||||||
|
|
||||||
|
SessionManager.getLoggedInUserId(req.session)
|
||||||
|
return ExportsHandler.fetchDownload(
|
||||||
|
export_id,
|
||||||
|
type,
|
||||||
|
function (err, export_file_url) {
|
||||||
|
if (err != null) {
|
||||||
|
return next(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return res.redirect(export_file_url)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
}
|
288
services/web/app/src/Features/Exports/ExportsHandler.js
Normal file
288
services/web/app/src/Features/Exports/ExportsHandler.js
Normal file
|
@ -0,0 +1,288 @@
|
||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
node/handle-callback-err,
|
||||||
|
max-len,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS103: Rewrite code to no longer use __guard__
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let ExportsHandler, self
|
||||||
|
const OError = require('@overleaf/o-error')
|
||||||
|
const ProjectGetter = require('../Project/ProjectGetter')
|
||||||
|
const ProjectHistoryHandler = require('../Project/ProjectHistoryHandler')
|
||||||
|
const ProjectLocator = require('../Project/ProjectLocator')
|
||||||
|
const ProjectRootDocManager = require('../Project/ProjectRootDocManager')
|
||||||
|
const UserGetter = require('../User/UserGetter')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
let settings = require('@overleaf/settings')
|
||||||
|
const async = require('async')
|
||||||
|
let request = require('request')
|
||||||
|
request = request.defaults()
|
||||||
|
settings = require('@overleaf/settings')
|
||||||
|
|
||||||
|
module.exports = ExportsHandler = self = {
|
||||||
|
exportProject(export_params, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, export_data) {}
|
||||||
|
}
|
||||||
|
return self._buildExport(export_params, function (err, export_data) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
return self._requestExport(export_data, function (err, body) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
export_data.v1_id = body.exportId
|
||||||
|
export_data.message = body.message
|
||||||
|
// TODO: possibly store the export data in Mongo
|
||||||
|
return callback(null, export_data)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_buildExport(export_params, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err, export_data) {}
|
||||||
|
}
|
||||||
|
const {
|
||||||
|
project_id,
|
||||||
|
user_id,
|
||||||
|
brand_variation_id,
|
||||||
|
title,
|
||||||
|
description,
|
||||||
|
author,
|
||||||
|
license,
|
||||||
|
show_source,
|
||||||
|
} = export_params
|
||||||
|
const jobs = {
|
||||||
|
project(cb) {
|
||||||
|
return ProjectGetter.getProject(project_id, cb)
|
||||||
|
},
|
||||||
|
// TODO: when we update async, signature will change from (cb, results) to (results, cb)
|
||||||
|
rootDoc: [
|
||||||
|
'project',
|
||||||
|
(cb, results) =>
|
||||||
|
ProjectRootDocManager.ensureRootDocumentIsValid(
|
||||||
|
project_id,
|
||||||
|
function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return ProjectLocator.findRootDoc(
|
||||||
|
{ project: results.project, project_id },
|
||||||
|
cb
|
||||||
|
)
|
||||||
|
}
|
||||||
|
),
|
||||||
|
],
|
||||||
|
user(cb) {
|
||||||
|
return UserGetter.getUser(
|
||||||
|
user_id,
|
||||||
|
{ first_name: 1, last_name: 1, email: 1, overleaf: 1 },
|
||||||
|
cb
|
||||||
|
)
|
||||||
|
},
|
||||||
|
historyVersion(cb) {
|
||||||
|
return ProjectHistoryHandler.ensureHistoryExistsForProject(
|
||||||
|
project_id,
|
||||||
|
function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return self._requestVersion(project_id, cb)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
return async.auto(jobs, function (err, results) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error building project export', {
|
||||||
|
project_id,
|
||||||
|
user_id,
|
||||||
|
brand_variation_id,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
const { project, rootDoc, user, historyVersion } = results
|
||||||
|
if (!rootDoc || rootDoc[1] == null) {
|
||||||
|
err = new OError('cannot export project without root doc', {
|
||||||
|
project_id,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (export_params.first_name && export_params.last_name) {
|
||||||
|
user.first_name = export_params.first_name
|
||||||
|
user.last_name = export_params.last_name
|
||||||
|
}
|
||||||
|
|
||||||
|
const export_data = {
|
||||||
|
project: {
|
||||||
|
id: project_id,
|
||||||
|
rootDocPath: rootDoc[1] != null ? rootDoc[1].fileSystem : undefined,
|
||||||
|
historyId: __guard__(
|
||||||
|
project.overleaf != null ? project.overleaf.history : undefined,
|
||||||
|
x => x.id
|
||||||
|
),
|
||||||
|
historyVersion,
|
||||||
|
v1ProjectId:
|
||||||
|
project.overleaf != null ? project.overleaf.id : undefined,
|
||||||
|
metadata: {
|
||||||
|
compiler: project.compiler,
|
||||||
|
imageName: project.imageName,
|
||||||
|
title,
|
||||||
|
description,
|
||||||
|
author,
|
||||||
|
license,
|
||||||
|
showSource: show_source,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
user: {
|
||||||
|
id: user_id,
|
||||||
|
firstName: user.first_name,
|
||||||
|
lastName: user.last_name,
|
||||||
|
email: user.email,
|
||||||
|
orcidId: null, // until v2 gets ORCID
|
||||||
|
v1UserId: user.overleaf != null ? user.overleaf.id : undefined,
|
||||||
|
},
|
||||||
|
destination: {
|
||||||
|
brandVariationId: brand_variation_id,
|
||||||
|
},
|
||||||
|
options: {
|
||||||
|
callbackUrl: null,
|
||||||
|
}, // for now, until we want v1 to call us back
|
||||||
|
}
|
||||||
|
return callback(null, export_data)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_requestExport(export_data, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err, export_v1_id) {}
|
||||||
|
}
|
||||||
|
return request.post(
|
||||||
|
{
|
||||||
|
url: `${settings.apis.v1.url}/api/v1/sharelatex/exports`,
|
||||||
|
auth: { user: settings.apis.v1.user, pass: settings.apis.v1.pass },
|
||||||
|
json: export_data,
|
||||||
|
},
|
||||||
|
function (err, res, body) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error making request to v1 export', {
|
||||||
|
export: export_data,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
} else if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||||
|
return callback(null, body)
|
||||||
|
} else {
|
||||||
|
logger.warn(
|
||||||
|
{ export: export_data },
|
||||||
|
`v1 export returned failure; forwarding: ${body}`
|
||||||
|
)
|
||||||
|
// pass the v1 error along for the publish modal to handle
|
||||||
|
return callback({ forwardResponse: body })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
_requestVersion(project_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err, export_v1_id) {}
|
||||||
|
}
|
||||||
|
return request.get(
|
||||||
|
{
|
||||||
|
url: `${settings.apis.project_history.url}/project/${project_id}/version`,
|
||||||
|
json: true,
|
||||||
|
},
|
||||||
|
function (err, res, body) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error making request to project history', {
|
||||||
|
project_id,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
} else if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||||
|
return callback(null, body.version)
|
||||||
|
} else {
|
||||||
|
err = new OError(
|
||||||
|
`project history version returned a failure status code: ${res.statusCode}`,
|
||||||
|
{ project_id }
|
||||||
|
)
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
fetchExport(export_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err, export_json) {}
|
||||||
|
}
|
||||||
|
return request.get(
|
||||||
|
{
|
||||||
|
url: `${settings.apis.v1.url}/api/v1/sharelatex/exports/${export_id}`,
|
||||||
|
auth: { user: settings.apis.v1.user, pass: settings.apis.v1.pass },
|
||||||
|
},
|
||||||
|
function (err, res, body) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error making request to v1 export', {
|
||||||
|
export: export_id,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
} else if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||||
|
return callback(null, body)
|
||||||
|
} else {
|
||||||
|
err = new OError(
|
||||||
|
`v1 export returned a failure status code: ${res.statusCode}`,
|
||||||
|
{ export: export_id }
|
||||||
|
)
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
fetchDownload(export_id, type, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err, file_url) {}
|
||||||
|
}
|
||||||
|
return request.get(
|
||||||
|
{
|
||||||
|
url: `${settings.apis.v1.url}/api/v1/sharelatex/exports/${export_id}/${type}_url`,
|
||||||
|
auth: { user: settings.apis.v1.user, pass: settings.apis.v1.pass },
|
||||||
|
},
|
||||||
|
function (err, res, body) {
|
||||||
|
if (err != null) {
|
||||||
|
OError.tag(err, 'error making request to v1 export', {
|
||||||
|
export: export_id,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
} else if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||||
|
return callback(null, body)
|
||||||
|
} else {
|
||||||
|
err = new OError(
|
||||||
|
`v1 export returned a failure status code: ${res.statusCode}`,
|
||||||
|
{ export: export_id }
|
||||||
|
)
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
function __guard__(value, transform) {
|
||||||
|
return typeof value !== 'undefined' && value !== null
|
||||||
|
? transform(value)
|
||||||
|
: undefined
|
||||||
|
}
|
61
services/web/app/src/Features/FileStore/FileHashManager.js
Normal file
61
services/web/app/src/Features/FileStore/FileHashManager.js
Normal file
|
@ -0,0 +1,61 @@
|
||||||
|
/* eslint-disable
|
||||||
|
node/handle-callback-err,
|
||||||
|
max-len,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let FileHashManager
|
||||||
|
const crypto = require('crypto')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const fs = require('fs')
|
||||||
|
const _ = require('underscore')
|
||||||
|
|
||||||
|
module.exports = FileHashManager = {
|
||||||
|
computeHash(filePath, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, hashValue) {}
|
||||||
|
}
|
||||||
|
callback = _.once(callback) // avoid double callbacks
|
||||||
|
|
||||||
|
// taken from v1/history/storage/lib/blob_hash.js
|
||||||
|
const getGitBlobHeader = byteLength => `blob ${byteLength}` + '\x00'
|
||||||
|
|
||||||
|
const getByteLengthOfFile = cb =>
|
||||||
|
fs.stat(filePath, function (err, stats) {
|
||||||
|
if (err != null) {
|
||||||
|
return cb(err)
|
||||||
|
}
|
||||||
|
return cb(null, stats.size)
|
||||||
|
})
|
||||||
|
|
||||||
|
return getByteLengthOfFile(function (err, byteLength) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
const input = fs.createReadStream(filePath)
|
||||||
|
input.on('error', function (err) {
|
||||||
|
logger.warn({ filePath, err }, 'error opening file in computeHash')
|
||||||
|
return callback(err)
|
||||||
|
})
|
||||||
|
|
||||||
|
const hash = crypto.createHash('sha1')
|
||||||
|
hash.setEncoding('hex')
|
||||||
|
hash.update(getGitBlobHeader(byteLength))
|
||||||
|
hash.on('readable', function () {
|
||||||
|
const result = hash.read()
|
||||||
|
if (result != null) {
|
||||||
|
return callback(null, result.toString('hex'))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
return input.pipe(hash)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
|
@ -0,0 +1,87 @@
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
|
||||||
|
const FileStoreHandler = require('./FileStoreHandler')
|
||||||
|
const ProjectLocator = require('../Project/ProjectLocator')
|
||||||
|
const Errors = require('../Errors/Errors')
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
getFile(req, res) {
|
||||||
|
const projectId = req.params.Project_id
|
||||||
|
const fileId = req.params.File_id
|
||||||
|
const queryString = req.query
|
||||||
|
const userAgent = req.get('User-Agent')
|
||||||
|
ProjectLocator.findElement(
|
||||||
|
{ project_id: projectId, element_id: fileId, type: 'file' },
|
||||||
|
function (err, file) {
|
||||||
|
if (err) {
|
||||||
|
logger.err(
|
||||||
|
{ err, projectId, fileId, queryString },
|
||||||
|
'error finding element for downloading file'
|
||||||
|
)
|
||||||
|
return res.sendStatus(500)
|
||||||
|
}
|
||||||
|
FileStoreHandler.getFileStream(
|
||||||
|
projectId,
|
||||||
|
fileId,
|
||||||
|
queryString,
|
||||||
|
function (err, stream) {
|
||||||
|
if (err) {
|
||||||
|
logger.err(
|
||||||
|
{ err, projectId, fileId, queryString },
|
||||||
|
'error getting file stream for downloading file'
|
||||||
|
)
|
||||||
|
return res.sendStatus(500)
|
||||||
|
}
|
||||||
|
// mobile safari will try to render html files, prevent this
|
||||||
|
if (isMobileSafari(userAgent) && isHtml(file)) {
|
||||||
|
res.setHeader('Content-Type', 'text/plain')
|
||||||
|
}
|
||||||
|
res.setContentDisposition('attachment', { filename: file.name })
|
||||||
|
stream.pipe(res)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
getFileHead(req, res) {
|
||||||
|
const projectId = req.params.Project_id
|
||||||
|
const fileId = req.params.File_id
|
||||||
|
FileStoreHandler.getFileSize(projectId, fileId, (err, fileSize) => {
|
||||||
|
if (err) {
|
||||||
|
if (err instanceof Errors.NotFoundError) {
|
||||||
|
res.status(404).end()
|
||||||
|
} else {
|
||||||
|
logger.err({ err, projectId, fileId }, 'error getting file size')
|
||||||
|
res.status(500).end()
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
res.set('Content-Length', fileSize)
|
||||||
|
res.status(200).end()
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
function isHtml(file) {
|
||||||
|
return (
|
||||||
|
fileEndsWith(file, '.html') ||
|
||||||
|
fileEndsWith(file, '.htm') ||
|
||||||
|
fileEndsWith(file, '.xhtml')
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function fileEndsWith(file, ext) {
|
||||||
|
return (
|
||||||
|
file.name != null &&
|
||||||
|
file.name.length > ext.length &&
|
||||||
|
file.name.lastIndexOf(ext) === file.name.length - ext.length
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function isMobileSafari(userAgent) {
|
||||||
|
return (
|
||||||
|
userAgent &&
|
||||||
|
(userAgent.indexOf('iPhone') >= 0 || userAgent.indexOf('iPad') >= 0)
|
||||||
|
)
|
||||||
|
}
|
262
services/web/app/src/Features/FileStore/FileStoreHandler.js
Normal file
262
services/web/app/src/Features/FileStore/FileStoreHandler.js
Normal file
|
@ -0,0 +1,262 @@
|
||||||
|
const _ = require('underscore')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const fs = require('fs')
|
||||||
|
const request = require('request')
|
||||||
|
const settings = require('@overleaf/settings')
|
||||||
|
const Async = require('async')
|
||||||
|
const FileHashManager = require('./FileHashManager')
|
||||||
|
const { File } = require('../../models/File')
|
||||||
|
const Errors = require('../Errors/Errors')
|
||||||
|
const OError = require('@overleaf/o-error')
|
||||||
|
const { promisifyAll } = require('../../util/promises')
|
||||||
|
|
||||||
|
const ONE_MIN_IN_MS = 60 * 1000
|
||||||
|
const FIVE_MINS_IN_MS = ONE_MIN_IN_MS * 5
|
||||||
|
|
||||||
|
const FileStoreHandler = {
|
||||||
|
RETRY_ATTEMPTS: 3,
|
||||||
|
|
||||||
|
uploadFileFromDisk(projectId, fileArgs, fsPath, callback) {
|
||||||
|
fs.lstat(fsPath, function (err, stat) {
|
||||||
|
if (err) {
|
||||||
|
logger.warn({ err, projectId, fileArgs, fsPath }, 'error stating file')
|
||||||
|
callback(err)
|
||||||
|
}
|
||||||
|
if (!stat) {
|
||||||
|
logger.warn(
|
||||||
|
{ projectId, fileArgs, fsPath },
|
||||||
|
'stat is not available, can not check file from disk'
|
||||||
|
)
|
||||||
|
return callback(new Error('error getting stat, not available'))
|
||||||
|
}
|
||||||
|
if (!stat.isFile()) {
|
||||||
|
logger.log(
|
||||||
|
{ projectId, fileArgs, fsPath },
|
||||||
|
'tried to upload symlink, not continuing'
|
||||||
|
)
|
||||||
|
return callback(new Error('can not upload symlink'))
|
||||||
|
}
|
||||||
|
Async.retry(
|
||||||
|
FileStoreHandler.RETRY_ATTEMPTS,
|
||||||
|
(cb, results) =>
|
||||||
|
FileStoreHandler._doUploadFileFromDisk(
|
||||||
|
projectId,
|
||||||
|
fileArgs,
|
||||||
|
fsPath,
|
||||||
|
cb
|
||||||
|
),
|
||||||
|
function (err, result) {
|
||||||
|
if (err) {
|
||||||
|
OError.tag(err, 'Error uploading file, retries failed', {
|
||||||
|
projectId,
|
||||||
|
fileArgs,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
callback(err, result.url, result.fileRef)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_doUploadFileFromDisk(projectId, fileArgs, fsPath, callback) {
|
||||||
|
const callbackOnce = _.once(callback)
|
||||||
|
|
||||||
|
FileHashManager.computeHash(fsPath, function (err, hashValue) {
|
||||||
|
if (err) {
|
||||||
|
return callbackOnce(err)
|
||||||
|
}
|
||||||
|
const fileRef = new File(Object.assign({}, fileArgs, { hash: hashValue }))
|
||||||
|
const fileId = fileRef._id
|
||||||
|
const readStream = fs.createReadStream(fsPath)
|
||||||
|
readStream.on('error', function (err) {
|
||||||
|
logger.warn(
|
||||||
|
{ err, projectId, fileId, fsPath },
|
||||||
|
'something went wrong on the read stream of uploadFileFromDisk'
|
||||||
|
)
|
||||||
|
callbackOnce(err)
|
||||||
|
})
|
||||||
|
readStream.on('open', function () {
|
||||||
|
const url = FileStoreHandler._buildUrl(projectId, fileId)
|
||||||
|
const opts = {
|
||||||
|
method: 'post',
|
||||||
|
uri: url,
|
||||||
|
timeout: FIVE_MINS_IN_MS,
|
||||||
|
headers: {
|
||||||
|
'X-File-Hash-From-Web': hashValue,
|
||||||
|
}, // send the hash to the filestore as a custom header so it can be checked
|
||||||
|
}
|
||||||
|
const writeStream = request(opts)
|
||||||
|
writeStream.on('error', function (err) {
|
||||||
|
logger.warn(
|
||||||
|
{ err, projectId, fileId, fsPath },
|
||||||
|
'something went wrong on the write stream of uploadFileFromDisk'
|
||||||
|
)
|
||||||
|
callbackOnce(err)
|
||||||
|
})
|
||||||
|
writeStream.on('response', function (response) {
|
||||||
|
if (![200, 201].includes(response.statusCode)) {
|
||||||
|
err = new OError(
|
||||||
|
`non-ok response from filestore for upload: ${response.statusCode}`,
|
||||||
|
{ statusCode: response.statusCode }
|
||||||
|
)
|
||||||
|
return callbackOnce(err)
|
||||||
|
}
|
||||||
|
callbackOnce(null, { url, fileRef })
|
||||||
|
}) // have to pass back an object because async.retry only accepts a single result argument
|
||||||
|
readStream.pipe(writeStream)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
getFileStream(projectId, fileId, query, callback) {
|
||||||
|
let queryString = ''
|
||||||
|
if (query != null && query.format != null) {
|
||||||
|
queryString = `?format=${query.format}`
|
||||||
|
}
|
||||||
|
const opts = {
|
||||||
|
method: 'get',
|
||||||
|
uri: `${this._buildUrl(projectId, fileId)}${queryString}`,
|
||||||
|
timeout: FIVE_MINS_IN_MS,
|
||||||
|
headers: {},
|
||||||
|
}
|
||||||
|
if (query != null && query.range != null) {
|
||||||
|
const rangeText = query.range
|
||||||
|
if (rangeText && rangeText.match != null && rangeText.match(/\d+-\d+/)) {
|
||||||
|
opts.headers.range = `bytes=${query.range}`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const readStream = request(opts)
|
||||||
|
readStream.on('error', err =>
|
||||||
|
logger.err(
|
||||||
|
{ err, projectId, fileId, query, opts },
|
||||||
|
'error in file stream'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return callback(null, readStream)
|
||||||
|
},
|
||||||
|
|
||||||
|
getFileSize(projectId, fileId, callback) {
|
||||||
|
const url = this._buildUrl(projectId, fileId)
|
||||||
|
request.head(url, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
OError.tag(err, 'failed to get file size from filestore', {
|
||||||
|
projectId,
|
||||||
|
fileId,
|
||||||
|
})
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
if (res.statusCode === 404) {
|
||||||
|
return callback(new Errors.NotFoundError('file not found in filestore'))
|
||||||
|
}
|
||||||
|
if (res.statusCode !== 200) {
|
||||||
|
logger.warn(
|
||||||
|
{ projectId, fileId, statusCode: res.statusCode },
|
||||||
|
'filestore returned non-200 response'
|
||||||
|
)
|
||||||
|
return callback(new Error('filestore returned non-200 response'))
|
||||||
|
}
|
||||||
|
const fileSize = res.headers['content-length']
|
||||||
|
callback(null, fileSize)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
deleteFile(projectId, fileId, callback) {
|
||||||
|
logger.log({ projectId, fileId }, 'telling file store to delete file')
|
||||||
|
const opts = {
|
||||||
|
method: 'delete',
|
||||||
|
uri: this._buildUrl(projectId, fileId),
|
||||||
|
timeout: FIVE_MINS_IN_MS,
|
||||||
|
}
|
||||||
|
return request(opts, function (err, response) {
|
||||||
|
if (err) {
|
||||||
|
logger.warn(
|
||||||
|
{ err, projectId, fileId },
|
||||||
|
'something went wrong deleting file from filestore'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return callback(err)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
deleteProject(projectId, callback) {
|
||||||
|
request(
|
||||||
|
{
|
||||||
|
method: 'delete',
|
||||||
|
uri: this._buildUrl(projectId),
|
||||||
|
timeout: FIVE_MINS_IN_MS,
|
||||||
|
},
|
||||||
|
err => {
|
||||||
|
if (err) {
|
||||||
|
return callback(
|
||||||
|
OError.tag(
|
||||||
|
err,
|
||||||
|
'something went wrong deleting a project in filestore',
|
||||||
|
{ projectId }
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
callback()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
copyFile(oldProjectId, oldFileId, newProjectId, newFileId, callback) {
|
||||||
|
logger.log(
|
||||||
|
{ oldProjectId, oldFileId, newProjectId, newFileId },
|
||||||
|
'telling filestore to copy a file'
|
||||||
|
)
|
||||||
|
const opts = {
|
||||||
|
method: 'put',
|
||||||
|
json: {
|
||||||
|
source: {
|
||||||
|
project_id: oldProjectId,
|
||||||
|
file_id: oldFileId,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
uri: this._buildUrl(newProjectId, newFileId),
|
||||||
|
timeout: FIVE_MINS_IN_MS,
|
||||||
|
}
|
||||||
|
return request(opts, function (err, response) {
|
||||||
|
if (err) {
|
||||||
|
OError.tag(
|
||||||
|
err,
|
||||||
|
'something went wrong telling filestore api to copy file',
|
||||||
|
{
|
||||||
|
oldProjectId,
|
||||||
|
oldFileId,
|
||||||
|
newProjectId,
|
||||||
|
newFileId,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return callback(err)
|
||||||
|
} else if (response.statusCode >= 200 && response.statusCode < 300) {
|
||||||
|
// successful response
|
||||||
|
return callback(null, opts.uri)
|
||||||
|
} else {
|
||||||
|
err = new OError(
|
||||||
|
`non-ok response from filestore for copyFile: ${response.statusCode}`,
|
||||||
|
{
|
||||||
|
uri: opts.uri,
|
||||||
|
statusCode: response.statusCode,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_buildUrl(projectId, fileId) {
|
||||||
|
return (
|
||||||
|
`${settings.apis.filestore.url}/project/${projectId}` +
|
||||||
|
(fileId ? `/file/${fileId}` : '')
|
||||||
|
)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = FileStoreHandler
|
||||||
|
module.exports.promises = promisifyAll(FileStoreHandler, {
|
||||||
|
multiResult: {
|
||||||
|
uploadFileFromDisk: ['url', 'fileRef'],
|
||||||
|
},
|
||||||
|
})
|
|
@ -0,0 +1,125 @@
|
||||||
|
const RedisWrapper = require('../../infrastructure/RedisWrapper')
|
||||||
|
const rclient = RedisWrapper.client('health_check')
|
||||||
|
const settings = require('@overleaf/settings')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const UserGetter = require('../User/UserGetter')
|
||||||
|
const {
|
||||||
|
SmokeTestFailure,
|
||||||
|
runSmokeTests,
|
||||||
|
} = require('./../../../../test/smoke/src/SmokeTests')
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
check(req, res, next) {
|
||||||
|
if (!settings.siteIsOpen || !settings.editorIsOpen) {
|
||||||
|
// always return successful health checks when site is closed
|
||||||
|
res.contentType('application/json')
|
||||||
|
res.sendStatus(200)
|
||||||
|
} else {
|
||||||
|
// detach from express for cleaner stack traces
|
||||||
|
setTimeout(() => runSmokeTestsDetached(req, res).catch(next))
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
checkActiveHandles(req, res, next) {
|
||||||
|
if (!(settings.maxActiveHandles > 0) || !process._getActiveHandles) {
|
||||||
|
return next()
|
||||||
|
}
|
||||||
|
const activeHandlesCount = (process._getActiveHandles() || []).length
|
||||||
|
if (activeHandlesCount > settings.maxActiveHandles) {
|
||||||
|
logger.err(
|
||||||
|
{ activeHandlesCount, maxActiveHandles: settings.maxActiveHandles },
|
||||||
|
'exceeded max active handles, failing health check'
|
||||||
|
)
|
||||||
|
return res.sendStatus(500)
|
||||||
|
} else {
|
||||||
|
logger.debug(
|
||||||
|
{ activeHandlesCount, maxActiveHandles: settings.maxActiveHandles },
|
||||||
|
'active handles are below maximum'
|
||||||
|
)
|
||||||
|
next()
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
checkApi(req, res, next) {
|
||||||
|
rclient.healthCheck(err => {
|
||||||
|
if (err) {
|
||||||
|
logger.err({ err }, 'failed api redis health check')
|
||||||
|
return res.sendStatus(500)
|
||||||
|
}
|
||||||
|
UserGetter.getUserEmail(settings.smokeTest.userId, (err, email) => {
|
||||||
|
if (err) {
|
||||||
|
logger.err({ err }, 'failed api mongo health check')
|
||||||
|
return res.sendStatus(500)
|
||||||
|
}
|
||||||
|
if (email == null) {
|
||||||
|
logger.err({ err }, 'failed api mongo health check (no email)')
|
||||||
|
return res.sendStatus(500)
|
||||||
|
}
|
||||||
|
res.sendStatus(200)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
checkRedis(req, res, next) {
|
||||||
|
return rclient.healthCheck(function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
logger.err({ err: error }, 'failed redis health check')
|
||||||
|
return res.sendStatus(500)
|
||||||
|
} else {
|
||||||
|
return res.sendStatus(200)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
checkMongo(req, res, next) {
|
||||||
|
return UserGetter.getUserEmail(
|
||||||
|
settings.smokeTest.userId,
|
||||||
|
function (err, email) {
|
||||||
|
if (err != null) {
|
||||||
|
logger.err({ err }, 'mongo health check failed, error present')
|
||||||
|
return res.sendStatus(500)
|
||||||
|
} else if (email == null) {
|
||||||
|
logger.err(
|
||||||
|
{ err },
|
||||||
|
'mongo health check failed, no emai present in find result'
|
||||||
|
)
|
||||||
|
return res.sendStatus(500)
|
||||||
|
} else {
|
||||||
|
return res.sendStatus(200)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
function prettyJSON(blob) {
|
||||||
|
return JSON.stringify(blob, null, 2) + '\n'
|
||||||
|
}
|
||||||
|
async function runSmokeTestsDetached(req, res) {
|
||||||
|
function isAborted() {
|
||||||
|
return req.aborted
|
||||||
|
}
|
||||||
|
const stats = { start: new Date(), steps: [] }
|
||||||
|
let status, response
|
||||||
|
try {
|
||||||
|
try {
|
||||||
|
await runSmokeTests({ isAborted, stats })
|
||||||
|
} finally {
|
||||||
|
stats.end = new Date()
|
||||||
|
stats.duration = stats.end - stats.start
|
||||||
|
}
|
||||||
|
status = 200
|
||||||
|
response = { stats }
|
||||||
|
} catch (e) {
|
||||||
|
let err = e
|
||||||
|
if (!(e instanceof SmokeTestFailure)) {
|
||||||
|
err = new SmokeTestFailure('low level error', {}, e)
|
||||||
|
}
|
||||||
|
logger.err({ err, stats }, 'health check failed')
|
||||||
|
status = 500
|
||||||
|
response = { stats, error: err.message }
|
||||||
|
}
|
||||||
|
if (isAborted()) return
|
||||||
|
res.contentType('application/json')
|
||||||
|
res.status(status).send(prettyJSON(response))
|
||||||
|
}
|
17
services/web/app/src/Features/Helpers/AsyncFormHelper.js
Normal file
17
services/web/app/src/Features/Helpers/AsyncFormHelper.js
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
const {
|
||||||
|
acceptsJson,
|
||||||
|
} = require('../../infrastructure/RequestContentTypeDetection')
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
redirect,
|
||||||
|
}
|
||||||
|
|
||||||
|
// redirect the request via headers or JSON response depending on the request
|
||||||
|
// format
|
||||||
|
function redirect(req, res, redir) {
|
||||||
|
if (acceptsJson(req)) {
|
||||||
|
res.json({ redir })
|
||||||
|
} else {
|
||||||
|
res.redirect(redir)
|
||||||
|
}
|
||||||
|
}
|
19
services/web/app/src/Features/Helpers/AuthorizationHelper.js
Normal file
19
services/web/app/src/Features/Helpers/AuthorizationHelper.js
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
const { UserSchema } = require('../../models/User')
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
hasAnyStaffAccess,
|
||||||
|
}
|
||||||
|
|
||||||
|
function hasAnyStaffAccess(user) {
|
||||||
|
if (user.isAdmin) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if (!user.staffAccess) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const key of Object.keys(UserSchema.obj.staffAccess)) {
|
||||||
|
if (user.staffAccess[key]) return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
29
services/web/app/src/Features/Helpers/EmailHelper.js
Normal file
29
services/web/app/src/Features/Helpers/EmailHelper.js
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
// eslint-disable-next-line no-useless-escape
|
||||||
|
const EMAIL_REGEXP = /^([^<>()[\]\\.,;:\s@\"]+(\.[^<>()[\]\\.,;:\s@\"]+)*)@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/
|
||||||
|
|
||||||
|
function getDomain(email) {
|
||||||
|
email = parseEmail(email)
|
||||||
|
return email ? email.split('@').pop() : null
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseEmail(email) {
|
||||||
|
if (email == null) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
if (email.length > 254) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
email = email.trim().toLowerCase()
|
||||||
|
|
||||||
|
const matched = email.match(EMAIL_REGEXP)
|
||||||
|
if (matched == null || matched[0] == null) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
return matched[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
getDomain,
|
||||||
|
parseEmail,
|
||||||
|
}
|
9
services/web/app/src/Features/Helpers/FeatureFlag.js
Normal file
9
services/web/app/src/Features/Helpers/FeatureFlag.js
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
function shouldDisplayFeature(req, name, variantFlag) {
|
||||||
|
if (req.query && req.query[name]) {
|
||||||
|
return req.query[name] === 'true'
|
||||||
|
} else {
|
||||||
|
return variantFlag === true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { shouldDisplayFeature }
|
51
services/web/app/src/Features/Helpers/Mongo.js
Normal file
51
services/web/app/src/Features/Helpers/Mongo.js
Normal file
|
@ -0,0 +1,51 @@
|
||||||
|
const OError = require('@overleaf/o-error')
|
||||||
|
const { ObjectId } = require('mongodb')
|
||||||
|
const { ObjectId: MongooseObjectId } = require('mongoose').mongo
|
||||||
|
|
||||||
|
function _getObjectIdInstance(id) {
|
||||||
|
if (typeof id === 'string') {
|
||||||
|
return ObjectId(id)
|
||||||
|
} else if (id instanceof ObjectId) {
|
||||||
|
return id
|
||||||
|
} else if (id instanceof MongooseObjectId) {
|
||||||
|
return ObjectId(id.toString())
|
||||||
|
} else {
|
||||||
|
throw new OError('unexpected object id', { id })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeQuery(query) {
|
||||||
|
if (!query) {
|
||||||
|
throw new Error('no query provided')
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
typeof query === 'string' ||
|
||||||
|
query instanceof ObjectId ||
|
||||||
|
query instanceof MongooseObjectId
|
||||||
|
) {
|
||||||
|
return { _id: _getObjectIdInstance(query) }
|
||||||
|
} else if (typeof query._id === 'string') {
|
||||||
|
query._id = ObjectId(query._id)
|
||||||
|
return query
|
||||||
|
} else {
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeMultiQuery(query) {
|
||||||
|
if (Array.isArray(query)) {
|
||||||
|
return { _id: { $in: query.map(id => _getObjectIdInstance(id)) } }
|
||||||
|
} else {
|
||||||
|
return normalizeQuery(query)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function isObjectIdInstance(id) {
|
||||||
|
return id instanceof ObjectId || id instanceof MongooseObjectId
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
isObjectIdInstance,
|
||||||
|
normalizeQuery,
|
||||||
|
normalizeMultiQuery,
|
||||||
|
}
|
60
services/web/app/src/Features/Helpers/NewLogsUI.js
Normal file
60
services/web/app/src/Features/Helpers/NewLogsUI.js
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
const { ObjectId } = require('mongodb')
|
||||||
|
const Settings = require('@overleaf/settings')
|
||||||
|
|
||||||
|
const EXISTING_UI = { newLogsUI: false, subvariant: null }
|
||||||
|
const NEW_UI_WITH_POPUP = {
|
||||||
|
newLogsUI: true,
|
||||||
|
subvariant: 'new-logs-ui-with-popup',
|
||||||
|
}
|
||||||
|
const NEW_UI_WITHOUT_POPUP = {
|
||||||
|
newLogsUI: true,
|
||||||
|
subvariant: 'new-logs-ui-without-popup',
|
||||||
|
}
|
||||||
|
|
||||||
|
function _getVariantForPercentile(percentile) {
|
||||||
|
// The current percentages are:
|
||||||
|
// - 33% New UI with pop-up (originally, 5%)
|
||||||
|
// - 33% New UI without pop-up (originally, 5%)
|
||||||
|
// - 34% Existing UI
|
||||||
|
// To ensure group stability, the implementation below respects the original partitions
|
||||||
|
// for the new UI variants: [0, 5[ and [5,10[.
|
||||||
|
// Two new partitions are added: [10, 38[ and [38, 66[. These represent an extra 28p.p.
|
||||||
|
// which, with to the original 5%, add up to 33%.
|
||||||
|
|
||||||
|
if (percentile < 5) {
|
||||||
|
// This partition represents the "New UI with pop-up" group in the original roll-out (5%)
|
||||||
|
return NEW_UI_WITH_POPUP
|
||||||
|
} else if (percentile >= 5 && percentile < 10) {
|
||||||
|
// This partition represents the "New UI without pop-up" group in the original roll-out (5%)
|
||||||
|
return NEW_UI_WITHOUT_POPUP
|
||||||
|
} else if (percentile >= 10 && percentile < 38) {
|
||||||
|
// This partition represents an extra 28% of users getting the "New UI with pop-up"
|
||||||
|
return NEW_UI_WITH_POPUP
|
||||||
|
} else if (percentile >= 38 && percentile < 66) {
|
||||||
|
// This partition represents an extra 28% of users getting the "New UI without pop-up"
|
||||||
|
return NEW_UI_WITHOUT_POPUP
|
||||||
|
} else {
|
||||||
|
return EXISTING_UI
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function getNewLogsUIVariantForUser(user) {
|
||||||
|
const { _id: userId, alphaProgram: isAlphaUser } = user
|
||||||
|
const isSaaS = Boolean(Settings.overleaf)
|
||||||
|
|
||||||
|
if (!userId || !isSaaS) {
|
||||||
|
return EXISTING_UI
|
||||||
|
}
|
||||||
|
|
||||||
|
const userIdAsPercentile = (ObjectId(userId).getTimestamp() / 1000) % 100
|
||||||
|
|
||||||
|
if (isAlphaUser) {
|
||||||
|
return NEW_UI_WITH_POPUP
|
||||||
|
} else {
|
||||||
|
return _getVariantForPercentile(userIdAsPercentile)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
getNewLogsUIVariantForUser,
|
||||||
|
}
|
|
@ -0,0 +1,46 @@
|
||||||
|
const pug = require('pug-runtime')
|
||||||
|
|
||||||
|
const SPLIT_REGEX = /<(\d+)>(.*?)<\/\1>/g
|
||||||
|
|
||||||
|
function render(locale, components) {
|
||||||
|
const output = []
|
||||||
|
function addPlainText(text) {
|
||||||
|
if (!text) return
|
||||||
|
output.push(pug.escape(text))
|
||||||
|
}
|
||||||
|
|
||||||
|
// 'PRE<0>INNER</0>POST' -> ['PRE', '0', 'INNER', 'POST']
|
||||||
|
// '<0>INNER</0>' -> ['', '0', 'INNER', '']
|
||||||
|
// '<0></0>' -> ['', '0', '', '']
|
||||||
|
// '<0>INNER</0><0>INNER2</0>' -> ['', '0', 'INNER', '', '0', 'INNER2', '']
|
||||||
|
// '<0><1>INNER</1></0>' -> ['', '0', '<1>INNER</1>', '']
|
||||||
|
// 'PLAIN TEXT' -> ['PLAIN TEXT']
|
||||||
|
// NOTE: a test suite is verifying these cases: SafeHTMLSubstituteTests
|
||||||
|
const chunks = locale.split(SPLIT_REGEX)
|
||||||
|
|
||||||
|
// extract the 'PRE' chunk
|
||||||
|
addPlainText(chunks.shift())
|
||||||
|
|
||||||
|
while (chunks.length) {
|
||||||
|
// each batch consists of three chunks: ['0', 'INNER', 'POST']
|
||||||
|
const [idx, innerChunk, intermediateChunk] = chunks.splice(0, 3)
|
||||||
|
|
||||||
|
const component = components[idx]
|
||||||
|
const componentName =
|
||||||
|
typeof component === 'string' ? component : component.name
|
||||||
|
// pug is doing any necessary escaping on attribute values
|
||||||
|
const attributes = (component.attrs && pug.attrs(component.attrs)) || ''
|
||||||
|
output.push(
|
||||||
|
`<${componentName + attributes}>`,
|
||||||
|
...render(innerChunk, components),
|
||||||
|
`</${componentName}>`
|
||||||
|
)
|
||||||
|
addPlainText(intermediateChunk)
|
||||||
|
}
|
||||||
|
return output.join('')
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
SPLIT_REGEX,
|
||||||
|
render,
|
||||||
|
}
|
30
services/web/app/src/Features/Helpers/StringHelper.js
Normal file
30
services/web/app/src/Features/Helpers/StringHelper.js
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
/* eslint-disable
|
||||||
|
max-len,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
let StringHelper
|
||||||
|
const JSON_ESCAPE_REGEXP = /[\u2028\u2029&><]/g
|
||||||
|
|
||||||
|
const JSON_ESCAPE = {
|
||||||
|
'&': '\\u0026',
|
||||||
|
'>': '\\u003e',
|
||||||
|
'<': '\\u003c',
|
||||||
|
'\u2028': '\\u2028',
|
||||||
|
'\u2029': '\\u2029',
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = StringHelper = {
|
||||||
|
// stringifies and escapes a json object for use in a script. This ensures that &, < and > characters are escaped,
|
||||||
|
// along with quotes. This ensures that the string can be safely rendered into HTML. See rationale at:
|
||||||
|
// https://api.rubyonrails.org/classes/ERB/Util.html#method-c-json_escape
|
||||||
|
// and implementation lifted from:
|
||||||
|
// https://github.com/ember-fastboot/fastboot/blob/cafd96c48564d8384eb83dc908303dba8ece10fd/src/ember-app.js#L496-L510
|
||||||
|
stringifyJsonForScript(object) {
|
||||||
|
return JSON.stringify(object).replace(
|
||||||
|
JSON_ESCAPE_REGEXP,
|
||||||
|
match => JSON_ESCAPE[match]
|
||||||
|
)
|
||||||
|
},
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue