mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
Merge pull request #696 from overleaf/msm-monorepo-merge
Merge docker-image repository
This commit is contained in:
commit
9829f0e4dc
44 changed files with 1343 additions and 8 deletions
3
server-ce/.dockerignore
Normal file
3
server-ce/.dockerignore
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
.DS_Store
|
||||||
|
.git/
|
||||||
|
node_modules/
|
9
server-ce/.editorconfig
Normal file
9
server-ce/.editorconfig
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
root = true
|
||||||
|
|
||||||
|
[*]
|
||||||
|
charset = utf-8
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
|
end_of_line = lf
|
||||||
|
insert_final_newline = true
|
||||||
|
trim_trailing_whitespace = true
|
7
server-ce/.gitignore
vendored
7
server-ce/.gitignore
vendored
|
@ -1,6 +1,9 @@
|
||||||
/config
|
/config
|
||||||
config-local
|
config-local
|
||||||
node_modules
|
|
||||||
|
node_modules/
|
||||||
|
api-data
|
||||||
|
versions/
|
||||||
|
|
||||||
web
|
web
|
||||||
document-updater
|
document-updater
|
||||||
|
@ -20,4 +23,4 @@ tmp
|
||||||
db.sqlite
|
db.sqlite
|
||||||
|
|
||||||
.DS_Store
|
.DS_Store
|
||||||
.vagrant
|
.vagrant
|
101
server-ce/Dockerfile
Normal file
101
server-ce/Dockerfile
Normal file
|
@ -0,0 +1,101 @@
|
||||||
|
# ---------------------------------------------
|
||||||
|
# Overleaf Community Edition (overleaf/overleaf)
|
||||||
|
# ---------------------------------------------
|
||||||
|
|
||||||
|
FROM sharelatex/sharelatex-base:latest
|
||||||
|
|
||||||
|
ENV SHARELATEX_CONFIG /etc/sharelatex/settings.coffee
|
||||||
|
|
||||||
|
|
||||||
|
# Add required source files
|
||||||
|
# -------------------------
|
||||||
|
ADD ${baseDir}/bin /var/www/sharelatex/bin
|
||||||
|
ADD ${baseDir}/doc /var/www/sharelatex/doc
|
||||||
|
ADD ${baseDir}/migrations /var/www/sharelatex/migrations
|
||||||
|
ADD ${baseDir}/tasks /var/www/sharelatex/tasks
|
||||||
|
ADD ${baseDir}/Gruntfile.coffee /var/www/sharelatex/Gruntfile.coffee
|
||||||
|
ADD ${baseDir}/package.json /var/www/sharelatex/package.json
|
||||||
|
ADD ${baseDir}/npm-shrinkwrap.json /var/www/sharelatex/npm-shrinkwrap.json
|
||||||
|
ADD ${baseDir}/services.js /var/www/sharelatex/config/services.js
|
||||||
|
|
||||||
|
|
||||||
|
# Copy build dependencies
|
||||||
|
# -----------------------
|
||||||
|
ADD ${baseDir}/git-revision.sh /var/www/git-revision.sh
|
||||||
|
ADD ${baseDir}/services.js /var/www/sharelatex/config/services.js
|
||||||
|
|
||||||
|
|
||||||
|
# Checkout services
|
||||||
|
# -----------------
|
||||||
|
RUN cd /var/www/sharelatex \
|
||||||
|
&& npm install \
|
||||||
|
&& grunt install \
|
||||||
|
\
|
||||||
|
# Cleanup not needed artifacts
|
||||||
|
# ----------------------------
|
||||||
|
&& rm -rf /root/.cache /root/.npm $(find /tmp/ -mindepth 1 -maxdepth 1) \
|
||||||
|
# Stores the version installed for each service
|
||||||
|
# ---------------------------------------------
|
||||||
|
&& cd /var/www \
|
||||||
|
&& ./git-revision.sh > revisions.txt \
|
||||||
|
\
|
||||||
|
# Cleanup the git history
|
||||||
|
# -------------------
|
||||||
|
&& rm -rf $(find /var/www/sharelatex -name .git)
|
||||||
|
|
||||||
|
# Install npm dependencies
|
||||||
|
# ------------------------
|
||||||
|
RUN cd /var/www/sharelatex \
|
||||||
|
&& bash ./bin/install-services \
|
||||||
|
\
|
||||||
|
# Cleanup not needed artifacts
|
||||||
|
# ----------------------------
|
||||||
|
&& rm -rf /root/.cache /root/.npm $(find /tmp/ -mindepth 1 -maxdepth 1)
|
||||||
|
|
||||||
|
# Compile CoffeeScript
|
||||||
|
# --------------------
|
||||||
|
RUN cd /var/www/sharelatex \
|
||||||
|
&& bash ./bin/compile-services
|
||||||
|
|
||||||
|
# Links CLSI sycntex to its default location
|
||||||
|
# ------------------------------------------
|
||||||
|
RUN ln -s /var/www/sharelatex/clsi/bin/synctex /opt/synctex
|
||||||
|
|
||||||
|
|
||||||
|
# Copy runit service startup scripts to its location
|
||||||
|
# --------------------------------------------------
|
||||||
|
ADD ${baseDir}/runit /etc/service
|
||||||
|
|
||||||
|
|
||||||
|
# Configure nginx
|
||||||
|
# ---------------
|
||||||
|
ADD ${baseDir}/nginx/nginx.conf /etc/nginx/nginx.conf
|
||||||
|
ADD ${baseDir}/nginx/sharelatex.conf /etc/nginx/sites-enabled/sharelatex.conf
|
||||||
|
|
||||||
|
|
||||||
|
# Configure log rotation
|
||||||
|
# ----------------------
|
||||||
|
ADD ${baseDir}/logrotate/sharelatex /etc/logrotate.d/sharelatex
|
||||||
|
|
||||||
|
|
||||||
|
# Copy Phusion Image startup scripts to its location
|
||||||
|
# --------------------------------------------------
|
||||||
|
COPY ${baseDir}/init_scripts/ /etc/my_init.d/
|
||||||
|
|
||||||
|
# Copy app settings files
|
||||||
|
# -----------------------
|
||||||
|
COPY ${baseDir}/settings.coffee /etc/sharelatex/settings.coffee
|
||||||
|
|
||||||
|
# Set Environment Variables
|
||||||
|
# --------------------------------
|
||||||
|
ENV WEB_API_USER "sharelatex"
|
||||||
|
|
||||||
|
ENV SHARELATEX_APP_NAME "Overleaf Community Edition"
|
||||||
|
|
||||||
|
|
||||||
|
EXPOSE 80
|
||||||
|
|
||||||
|
WORKDIR /
|
||||||
|
|
||||||
|
ENTRYPOINT ["/sbin/my_init"]
|
||||||
|
|
75
server-ce/Dockerfile-base
Normal file
75
server-ce/Dockerfile-base
Normal file
|
@ -0,0 +1,75 @@
|
||||||
|
# --------------------------------------------------
|
||||||
|
# Overleaf Base Image (sharelatex/sharelatex-base)
|
||||||
|
# --------------------------------------------------
|
||||||
|
|
||||||
|
FROM phusion/baseimage:0.11
|
||||||
|
|
||||||
|
ENV baseDir .
|
||||||
|
|
||||||
|
|
||||||
|
# Install dependencies
|
||||||
|
# --------------------
|
||||||
|
RUN apt-get update \
|
||||||
|
&& apt-get install -y \
|
||||||
|
build-essential wget net-tools unzip time imagemagick optipng strace nginx git python zlib1g-dev libpcre3-dev \
|
||||||
|
qpdf \
|
||||||
|
aspell aspell-en aspell-af aspell-am aspell-ar aspell-ar-large aspell-bg aspell-bn aspell-br aspell-ca aspell-cs aspell-cy aspell-da aspell-de aspell-el aspell-eo aspell-es aspell-et aspell-eu-es aspell-fa aspell-fo aspell-fr aspell-ga aspell-gl-minimos aspell-gu aspell-he aspell-hi aspell-hr aspell-hsb aspell-hu aspell-hy aspell-id aspell-is aspell-it aspell-kk aspell-kn aspell-ku aspell-lt aspell-lv aspell-ml aspell-mr aspell-nl aspell-nr aspell-ns aspell-pa aspell-pl aspell-pt aspell-pt-br aspell-ro aspell-ru aspell-sk aspell-sl aspell-ss aspell-st aspell-sv aspell-tl aspell-tn aspell-ts aspell-uk aspell-uz aspell-xh aspell-zu \
|
||||||
|
\
|
||||||
|
# install Node.JS 10
|
||||||
|
&& curl -sSL https://deb.nodesource.com/setup_10.x | bash - \
|
||||||
|
&& apt-get install -y nodejs \
|
||||||
|
\
|
||||||
|
&& rm -rf \
|
||||||
|
# We are adding a custom nginx config in the main Dockerfile.
|
||||||
|
/etc/nginx/nginx.conf \
|
||||||
|
/etc/nginx/sites-enabled/default \
|
||||||
|
/var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Install Grunt
|
||||||
|
# ------------
|
||||||
|
RUN npm install -g \
|
||||||
|
grunt-cli \
|
||||||
|
&& rm -rf /root/.npm
|
||||||
|
|
||||||
|
# Install TexLive
|
||||||
|
# ---------------
|
||||||
|
# CTAN mirrors occasionally fail, in that case install TexLive against an
|
||||||
|
# specific server, for example http://ctan.crest.fr
|
||||||
|
#
|
||||||
|
# # docker build \
|
||||||
|
# --build-arg TEXLIVE_MIRROR=http://ctan.crest.fr/tex-archive/systems/texlive/tlnet \
|
||||||
|
# -f Dockerfile-base -t sharelatex/sharelatex-base .
|
||||||
|
ARG TEXLIVE_MIRROR=http://mirror.ctan.org/systems/texlive/tlnet
|
||||||
|
|
||||||
|
ENV PATH "${PATH}:/usr/local/texlive/2019/bin/x86_64-linux"
|
||||||
|
|
||||||
|
RUN mkdir /install-tl-unx \
|
||||||
|
&& curl -sSL \
|
||||||
|
${TEXLIVE_MIRROR}/install-tl-unx.tar.gz \
|
||||||
|
| tar -xzC /install-tl-unx --strip-components=1 \
|
||||||
|
\
|
||||||
|
&& echo "tlpdbopt_autobackup 0" >> /install-tl-unx/texlive.profile \
|
||||||
|
&& echo "tlpdbopt_install_docfiles 0" >> /install-tl-unx/texlive.profile \
|
||||||
|
&& echo "tlpdbopt_install_srcfiles 0" >> /install-tl-unx/texlive.profile \
|
||||||
|
&& echo "selected_scheme scheme-basic" >> /install-tl-unx/texlive.profile \
|
||||||
|
\
|
||||||
|
&& /install-tl-unx/install-tl \
|
||||||
|
-profile /install-tl-unx/texlive.profile \
|
||||||
|
-repository ${TEXLIVE_MIRROR} \
|
||||||
|
\
|
||||||
|
&& tlmgr install --repository ${TEXLIVE_MIRROR} \
|
||||||
|
latexmk \
|
||||||
|
texcount \
|
||||||
|
\
|
||||||
|
&& rm -rf /install-tl-unx
|
||||||
|
|
||||||
|
|
||||||
|
# Set up sharelatex user and home directory
|
||||||
|
# -----------------------------------------
|
||||||
|
RUN adduser --system --group --home /var/www/sharelatex --no-create-home sharelatex && \
|
||||||
|
mkdir -p /var/lib/sharelatex && \
|
||||||
|
chown www-data:www-data /var/lib/sharelatex && \
|
||||||
|
mkdir -p /var/log/sharelatex && \
|
||||||
|
chown www-data:www-data /var/log/sharelatex && \
|
||||||
|
mkdir -p /var/lib/sharelatex/data/template_files && \
|
||||||
|
chown www-data:www-data /var/lib/sharelatex/data/template_files
|
12
server-ce/Makefile
Normal file
12
server-ce/Makefile
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
# Makefile
|
||||||
|
|
||||||
|
|
||||||
|
build-base:
|
||||||
|
docker build -f Dockerfile-base -t sharelatex/sharelatex-base .
|
||||||
|
|
||||||
|
|
||||||
|
build-community:
|
||||||
|
docker build -f Dockerfile -t sharelatex/sharelatex .
|
||||||
|
|
||||||
|
|
||||||
|
PHONY: build-base build-community
|
|
@ -8,9 +8,9 @@
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<a href="#key-features">Key Features</a> •
|
<a href="#key-features">Key Features</a> •
|
||||||
<a href="https://github.com/overleaf/overleaf/wiki">Wiki</a> •
|
<a href="https://github.com/overleaf/overleaf/wiki">Wiki</a> •
|
||||||
<a href="https://www.overleaf.com/for/enterprises">Server Pro</a> •
|
<a href="https://www.sharelatex.com/i/university/onsite">Server Pro</a> •
|
||||||
<a href="#contributing">Contributing</a> •
|
<a href="#contributing">Contributing</a> •
|
||||||
<a href="https://mailchi.mp/overleaf.com/community-edition-and-server-pro">Mailing List</a> •
|
<a href="https://www.overleaf.com/for/enterprises">Mailing List</a> •
|
||||||
<a href="#authors">Authors</a> •
|
<a href="#authors">Authors</a> •
|
||||||
<a href="#license">License</a>
|
<a href="#license">License</a>
|
||||||
</p>
|
</p>
|
||||||
|
@ -54,6 +54,28 @@ This repository does not contain any code. It acts a wrapper and toolkit for man
|
||||||
| **[tags](https://github.com/overleaf/tags)** | The backend API for managing project tags (folders). |
|
| **[tags](https://github.com/overleaf/tags)** | The backend API for managing project tags (folders). |
|
||||||
| **[spelling](https://github.com/overleaf/spelling)** | An API for running server-side spelling checking on Overleaf documents. |
|
| **[spelling](https://github.com/overleaf/spelling)** | An API for running server-side spelling checking on Overleaf documents. |
|
||||||
|
|
||||||
|
## Overleaf Docker Image
|
||||||
|
|
||||||
|
This repo contains two dockerfiles, `Dockerfile-base`, which builds the
|
||||||
|
`sharelatex/sharelatex-base` image, and `Dockerfile` which builds the
|
||||||
|
`sharelatex/sharelatex` (or "community") image.
|
||||||
|
|
||||||
|
The Base image generally contains the basic dependencies like `wget` and
|
||||||
|
`aspell`, plus `texlive`. We split this out because it's a pretty heavy set of
|
||||||
|
dependencies, and it's nice to not have to rebuild all of that every time.
|
||||||
|
|
||||||
|
The `sharelatex/sharelatex` image extends the base image and adds the actual Overleaf code
|
||||||
|
and services.
|
||||||
|
|
||||||
|
Use `make build-base` and `make build-community` to build these images.
|
||||||
|
|
||||||
|
We use the [Phusion base-image](https://github.com/phusion/baseimage-docker)
|
||||||
|
(which is extended by our `base` image) to provide us with a VM-like container
|
||||||
|
in which to run the Overleaf services. Baseimage uses the `runit` service
|
||||||
|
manager to manage services, and we add our init-scripts from the `./runit`
|
||||||
|
folder.
|
||||||
|
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|
||||||
Please see the [CONTRIBUTING](https://github.com/overleaf/overleaf/blob/master/CONTRIBUTING.md) file for information on contributing to the development of Overleaf. See [our wiki](https://github.com/overleaf/overleaf/wiki/Developer-Guidelines) for information on setting up a development environment and how to recompile and run Overleaf after modifications.
|
Please see the [CONTRIBUTING](https://github.com/overleaf/overleaf/blob/master/CONTRIBUTING.md) file for information on contributing to the development of Overleaf. See [our wiki](https://github.com/overleaf/overleaf/wiki/Developer-Guidelines) for information on setting up a development environment and how to recompile and run Overleaf after modifications.
|
||||||
|
|
|
@ -12,7 +12,7 @@ grep 'name:' config/services.js | \
|
||||||
web)
|
web)
|
||||||
npm run webpack:production
|
npm run webpack:production
|
||||||
;;
|
;;
|
||||||
chat|filestore|notifications)
|
chat|filestore|notifications|tags)
|
||||||
echo "$service doesn't require a compilation"
|
echo "$service doesn't require a compilation"
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
|
|
6
server-ce/git-revision.sh
Executable file
6
server-ce/git-revision.sh
Executable file
|
@ -0,0 +1,6 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
for gitDir in $(find "$PWD" -name .git); do
|
||||||
|
echo -n "$(dirname ${gitDir}),"
|
||||||
|
git --git-dir="$gitDir" rev-parse HEAD
|
||||||
|
done
|
13
server-ce/hotfix/2.0.1/Dockerfile
Normal file
13
server-ce/hotfix/2.0.1/Dockerfile
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
FROM sharelatex/sharelatex:2.0.0
|
||||||
|
|
||||||
|
|
||||||
|
# Patch 1: Fixes project deletion (https://github.com/overleaf/overleaf/issues/644)
|
||||||
|
ADD disable_project_history.patch /etc/sharelatex/disable_project_history.patch
|
||||||
|
RUN cd /etc/sharelatex && \
|
||||||
|
patch < disable_project_history.patch
|
||||||
|
|
||||||
|
|
||||||
|
# Patch 2: Fixes admin creation via CLI (https://github.com/overleaf/overleaf/issues/647)
|
||||||
|
ADD create_and_destroy_users.patch /var/www/sharelatex/tasks/create_and_destroy_users.patch
|
||||||
|
RUN cd /var/www/sharelatex/tasks/ && \
|
||||||
|
patch < create_and_destroy_users.patch
|
11
server-ce/hotfix/2.0.1/create_and_destroy_users.patch
Normal file
11
server-ce/hotfix/2.0.1/create_and_destroy_users.patch
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
--- CreateAndDestoryUsers.coffee
|
||||||
|
+++ CreateAndDestoryUsers.coffee
|
||||||
|
@@ -21,7 +21,7 @@ module.exports = (grunt) ->
|
||||||
|
user.save (error) ->
|
||||||
|
throw error if error?
|
||||||
|
ONE_WEEK = 7 * 24 * 60 * 60 # seconds
|
||||||
|
- OneTimeTokenHandler.getNewToken user._id, { expiresIn: ONE_WEEK }, (err, token)->
|
||||||
|
+ OneTimeTokenHandler.getNewToken "password", { expiresIn: ONE_WEEK, email:user.email, user_id: user._id.toString() }, (err, token)->
|
||||||
|
return next(err) if err?
|
||||||
|
|
||||||
|
console.log ""
|
11
server-ce/hotfix/2.0.1/disable_project_history.patch
Normal file
11
server-ce/hotfix/2.0.1/disable_project_history.patch
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
--- settings.coffee
|
||||||
|
+++ settings.coffee
|
||||||
|
@@ -200,6 +200,8 @@ settings =
|
||||||
|
# is not available
|
||||||
|
v1:
|
||||||
|
url: ""
|
||||||
|
+ project_history:
|
||||||
|
+ enabled: false
|
||||||
|
references:{}
|
||||||
|
notifications:undefined
|
||||||
|
|
60
server-ce/hotfix/2.0.2/1-anon-upload.patch
Normal file
60
server-ce/hotfix/2.0.2/1-anon-upload.patch
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
--- UploadsRouter.js
|
||||||
|
+++ UploadsRouter.js
|
||||||
|
@@ -1,13 +1,3 @@
|
||||||
|
-/* eslint-disable
|
||||||
|
- no-unused-vars,
|
||||||
|
-*/
|
||||||
|
-// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
-// Fix any style issues and re-enable lint.
|
||||||
|
-/*
|
||||||
|
- * decaffeinate suggestions:
|
||||||
|
- * DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
- * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
- */
|
||||||
|
const AuthorizationMiddleware = require('../Authorization/AuthorizationMiddleware')
|
||||||
|
const AuthenticationController = require('../Authentication/AuthenticationController')
|
||||||
|
const ProjectUploadController = require('./ProjectUploadController')
|
||||||
|
@@ -28,18 +18,30 @@ module.exports = {
|
||||||
|
ProjectUploadController.uploadProject
|
||||||
|
)
|
||||||
|
|
||||||
|
- return webRouter.post(
|
||||||
|
- '/Project/:Project_id/upload',
|
||||||
|
- RateLimiterMiddleware.rateLimit({
|
||||||
|
- endpointName: 'file-upload',
|
||||||
|
- params: ['Project_id'],
|
||||||
|
- maxRequests: 200,
|
||||||
|
- timeInterval: 60 * 30
|
||||||
|
- }),
|
||||||
|
- AuthenticationController.requireLogin(),
|
||||||
|
- AuthorizationMiddleware.ensureUserCanWriteProjectContent,
|
||||||
|
- ProjectUploadController.multerMiddleware,
|
||||||
|
- ProjectUploadController.uploadFile
|
||||||
|
- )
|
||||||
|
+ const fileUploadEndpoint = '/Project/:Project_id/upload'
|
||||||
|
+ const fileUploadRateLimit = RateLimiterMiddleware.rateLimit({
|
||||||
|
+ endpointName: 'file-upload',
|
||||||
|
+ params: ['Project_id'],
|
||||||
|
+ maxRequests: 200,
|
||||||
|
+ timeInterval: 60 * 30
|
||||||
|
+ })
|
||||||
|
+ if (Settings.allowAnonymousReadAndWriteSharing) {
|
||||||
|
+ webRouter.post(
|
||||||
|
+ fileUploadEndpoint,
|
||||||
|
+ fileUploadRateLimit,
|
||||||
|
+ AuthorizationMiddleware.ensureUserCanWriteProjectContent,
|
||||||
|
+ ProjectUploadController.multerMiddleware,
|
||||||
|
+ ProjectUploadController.uploadFile
|
||||||
|
+ )
|
||||||
|
+ } else {
|
||||||
|
+ webRouter.post(
|
||||||
|
+ fileUploadEndpoint,
|
||||||
|
+ fileUploadRateLimit,
|
||||||
|
+ AuthenticationController.requireLogin(),
|
||||||
|
+ AuthorizationMiddleware.ensureUserCanWriteProjectContent,
|
||||||
|
+ ProjectUploadController.multerMiddleware,
|
||||||
|
+ ProjectUploadController.uploadFile
|
||||||
|
+ )
|
||||||
|
+ }
|
||||||
|
}
|
||||||
|
}
|
11
server-ce/hotfix/2.0.2/2-read-only-access.patch
Normal file
11
server-ce/hotfix/2.0.2/2-read-only-access.patch
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
--- TokenAccessHandler.js
|
||||||
|
+++ TokenAccessHandler.js
|
||||||
|
@@ -255,7 +255,7 @@ const TokenAccessHandler = {
|
||||||
|
|
||||||
|
getV1DocPublishedInfo(token, callback) {
|
||||||
|
// default to allowing access
|
||||||
|
- if (!Settings.apis || !Settings.apis.v1) {
|
||||||
|
+ if (!Settings.apis.v1 || !Settings.apis.v1.url) {
|
||||||
|
return callback(null, { allow: true })
|
||||||
|
}
|
||||||
|
V1Api.request(
|
11
server-ce/hotfix/2.0.2/3-url-linking-1.patch
Normal file
11
server-ce/hotfix/2.0.2/3-url-linking-1.patch
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
--- Features.js
|
||||||
|
+++ Features.js
|
||||||
|
@@ -53,6 +53,8 @@ module.exports = Features = {
|
||||||
|
return Settings.apis.references.url != null
|
||||||
|
case 'saml':
|
||||||
|
return Settings.enableSaml
|
||||||
|
+ case 'link-url':
|
||||||
|
+ return Settings.apis.linkedUrlProxy && Settings.apis.linkedUrlProxy.url
|
||||||
|
default:
|
||||||
|
throw new Error(`unknown feature: ${feature}`)
|
||||||
|
}
|
20
server-ce/hotfix/2.0.2/4-url-linking-2.patch
Normal file
20
server-ce/hotfix/2.0.2/4-url-linking-2.patch
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
--- new-file-modal.pug
|
||||||
|
+++ new-file-modal.pug
|
||||||
|
@@ -21,11 +21,12 @@ script(type='text/ng-template', id='newFileModalTemplate')
|
||||||
|
i.fa.fa-fw.fa-folder-open
|
||||||
|
|
|
||||||
|
| From Another Project
|
||||||
|
- li(ng-class="type == 'url' ? 'active' : null")
|
||||||
|
- a(href, ng-click="type = 'url'")
|
||||||
|
- i.fa.fa-fw.fa-globe
|
||||||
|
- |
|
||||||
|
- | From External URL
|
||||||
|
+ if hasFeature('link-url')
|
||||||
|
+ li(ng-class="type == 'url' ? 'active' : null")
|
||||||
|
+ a(href, ng-click="type = 'url'")
|
||||||
|
+ i.fa.fa-fw.fa-globe
|
||||||
|
+ |
|
||||||
|
+ | From External URL
|
||||||
|
!= moduleIncludes("newFileModal:selector", locals)
|
||||||
|
|
||||||
|
td(class="modal-new-file--body modal-new-file--body-{{type}}")
|
26
server-ce/hotfix/2.0.2/5-disable-analytics-1.patch
Normal file
26
server-ce/hotfix/2.0.2/5-disable-analytics-1.patch
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
--- AnalyticsController.js
|
||||||
|
+++ AnalyticsController.js
|
||||||
|
@@ -3,9 +3,13 @@ const Errors = require('../Errors/Errors')
|
||||||
|
const AuthenticationController = require('../Authentication/AuthenticationController')
|
||||||
|
const InstitutionsAPI = require('../Institutions/InstitutionsAPI')
|
||||||
|
const GeoIpLookup = require('../../infrastructure/GeoIpLookup')
|
||||||
|
+const Features = require('../../infrastructure/Features')
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
updateEditingSession(req, res, next) {
|
||||||
|
+ if (!Features.hasFeature('analytics')) {
|
||||||
|
+ return res.send(204)
|
||||||
|
+ }
|
||||||
|
const userId = AuthenticationController.getLoggedInUserId(req)
|
||||||
|
const { projectId } = req.params
|
||||||
|
let countryCode = null
|
||||||
|
@@ -28,6 +32,9 @@ module.exports = {
|
||||||
|
},
|
||||||
|
|
||||||
|
recordEvent(req, res, next) {
|
||||||
|
+ if (!Features.hasFeature('analytics')) {
|
||||||
|
+ return res.send(204)
|
||||||
|
+ }
|
||||||
|
const userId =
|
||||||
|
AuthenticationController.getLoggedInUserId(req) || req.sessionID
|
||||||
|
AnalyticsManager.recordEvent(userId, req.params.event, req.body, error =>
|
10
server-ce/hotfix/2.0.2/6-disable-analytics-2.patch
Normal file
10
server-ce/hotfix/2.0.2/6-disable-analytics-2.patch
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
--- Features.js
|
||||||
|
+++ Features.js
|
||||||
|
@@ -41,6 +41,7 @@ module.exports = Features = {
|
||||||
|
case 'templates-server-pro':
|
||||||
|
return Settings.overleaf == null
|
||||||
|
case 'affiliations':
|
||||||
|
+ case 'analytics':
|
||||||
|
// Checking both properties is needed for the time being to allow
|
||||||
|
// enabling the feature in web-api and disabling in Server Pro
|
||||||
|
// see https://github.com/overleaf/web-internal/pull/2127
|
31
server-ce/hotfix/2.0.2/Dockerfile
Normal file
31
server-ce/hotfix/2.0.2/Dockerfile
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
FROM sharelatex/sharelatex:2.0.1
|
||||||
|
|
||||||
|
|
||||||
|
# Patch 1: Fixes anonymous link sharing
|
||||||
|
ADD 1-anon-upload.patch /var/www/sharelatex/web/app/src/Features/Uploads/1-anon-upload.patch
|
||||||
|
RUN cd /var/www/sharelatex/web/app/src/Features/Uploads/ && \
|
||||||
|
patch < 1-anon-upload.patch
|
||||||
|
|
||||||
|
|
||||||
|
# Patch 2: Fixes read-only access
|
||||||
|
ADD 2-read-only-access.patch /var/www/sharelatex/web/app/src/Features/TokenAccess/3-read-only-access.patch
|
||||||
|
RUN cd /var/www/sharelatex/web/app/src/Features/TokenAccess/ && \
|
||||||
|
patch < 3-read-only-access.patch
|
||||||
|
|
||||||
|
|
||||||
|
# Patch 3: Fixes url linking
|
||||||
|
ADD 3-url-linking-1.patch /var/www/sharelatex/web/app/src/infrastructure/6-url-linking-1.patch
|
||||||
|
RUN cd /var/www/sharelatex/web/app/src/infrastructure/ && \
|
||||||
|
patch < 6-url-linking-1.patch
|
||||||
|
ADD 4-url-linking-2.patch /var/www/sharelatex/web/app/views/project/editor/7-url-linking-2.patch
|
||||||
|
RUN cd /var/www/sharelatex/web/app/views/project/editor/ && \
|
||||||
|
patch < 7-url-linking-2.patch
|
||||||
|
|
||||||
|
|
||||||
|
# Patch 4: Disables analytics
|
||||||
|
ADD 5-disable-analytics-1.patch /var/www/sharelatex/web/app/src/Features/Analytics/8-disable-analytics-1.patch
|
||||||
|
RUN cd /var/www/sharelatex/web/app/src/Features/Analytics/ && \
|
||||||
|
patch < 8-disable-analytics-1.patch
|
||||||
|
ADD 6-disable-analytics-2.patch /var/www/sharelatex/web/app/src/infrastructure/9-disable-analytics-2.patch
|
||||||
|
RUN cd /var/www/sharelatex/web/app/src/infrastructure/ && \
|
||||||
|
patch < 9-disable-analytics-2.patch
|
8
server-ce/hotfix/2.1.1/Dockerfile
Normal file
8
server-ce/hotfix/2.1.1/Dockerfile
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
FROM sharelatex/sharelatex:2.1.0
|
||||||
|
|
||||||
|
# Patch: defines recaptcha config to fix share-related issues
|
||||||
|
# - https://github.com/overleaf/overleaf/issues/684
|
||||||
|
ADD add-recaptcha-config.patch /etc/sharelatex/add-recaptcha-config.patch
|
||||||
|
RUN cd /etc/sharelatex/ && \
|
||||||
|
patch < add-recaptcha-config.patch
|
||||||
|
|
14
server-ce/hotfix/2.1.1/add-recaptcha-config.patch
Normal file
14
server-ce/hotfix/2.1.1/add-recaptcha-config.patch
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
--- a/settings.coffee
|
||||||
|
+++ b/settings.coffee
|
||||||
|
@@ -180,6 +180,11 @@ settings =
|
||||||
|
# cookie with a secure flag (recommended).
|
||||||
|
secureCookie: process.env["SHARELATEX_SECURE_COOKIE"]?
|
||||||
|
|
||||||
|
+ recaptcha:
|
||||||
|
+ disabled:
|
||||||
|
+ invite: true
|
||||||
|
+ register: true
|
||||||
|
+
|
||||||
|
# If you are running ShareLaTeX behind a proxy (like Apache, Nginx, etc)
|
||||||
|
# then set this to true to allow it to correctly detect the forwarded IP
|
||||||
|
# address and http/https protocol information.
|
35
server-ce/init_scripts/00_make_sharelatex_data_dirs.sh
Executable file
35
server-ce/init_scripts/00_make_sharelatex_data_dirs.sh
Executable file
|
@ -0,0 +1,35 @@
|
||||||
|
#!/bin/sh
|
||||||
|
set -e
|
||||||
|
|
||||||
|
mkdir -p /var/lib/sharelatex/data
|
||||||
|
chown www-data:www-data /var/lib/sharelatex/data
|
||||||
|
|
||||||
|
mkdir -p /var/lib/sharelatex/data/user_files
|
||||||
|
chown www-data:www-data /var/lib/sharelatex/data/user_files
|
||||||
|
|
||||||
|
mkdir -p /var/lib/sharelatex/data/compiles
|
||||||
|
chown www-data:www-data /var/lib/sharelatex/data/compiles
|
||||||
|
|
||||||
|
mkdir -p /var/lib/sharelatex/data/cache
|
||||||
|
chown www-data:www-data /var/lib/sharelatex/data/cache
|
||||||
|
|
||||||
|
mkdir -p /var/lib/sharelatex/data/template_files
|
||||||
|
chown www-data:www-data /var/lib/sharelatex/data/template_files
|
||||||
|
|
||||||
|
mkdir -p /var/lib/sharelatex/tmp/dumpFolder
|
||||||
|
chown www-data:www-data /var/lib/sharelatex/tmp/dumpFolder
|
||||||
|
|
||||||
|
mkdir -p /var/lib/sharelatex/tmp
|
||||||
|
chown www-data:www-data /var/lib/sharelatex/tmp
|
||||||
|
|
||||||
|
mkdir -p /var/lib/sharelatex/tmp/uploads
|
||||||
|
chown www-data:www-data /var/lib/sharelatex/tmp/uploads
|
||||||
|
|
||||||
|
mkdir -p /var/lib/sharelatex/tmp/dumpFolder
|
||||||
|
chown www-data:www-data /var/lib/sharelatex/tmp/dumpFolder
|
||||||
|
|
||||||
|
if [ ! -e "/var/lib/sharelatex/data/db.sqlite" ]; then
|
||||||
|
touch /var/lib/sharelatex/data/db.sqlite
|
||||||
|
fi
|
||||||
|
|
||||||
|
chown www-data:www-data /var/lib/sharelatex/data/db.sqlite
|
20
server-ce/init_scripts/00_regen_sharelatex_secrets.sh
Executable file
20
server-ce/init_scripts/00_regen_sharelatex_secrets.sh
Executable file
|
@ -0,0 +1,20 @@
|
||||||
|
#!/bin/bash
|
||||||
|
set -e -o pipefail
|
||||||
|
|
||||||
|
# generate secrets and defines them as environment variables
|
||||||
|
# https://github.com/phusion/baseimage-docker#centrally-defining-your-own-environment-variables
|
||||||
|
|
||||||
|
WEB_API_PASSWORD_FILE=/etc/container_environment/WEB_API_PASSWORD
|
||||||
|
CRYPTO_RANDOM_FILE=/etc/container_environment/CRYPTO_RANDOM
|
||||||
|
|
||||||
|
if [ ! -f "$WEB_API_PASSWORD_FILE" ] || [ ! -f "$CRYPTO_RANDOM_FILE" ]; then
|
||||||
|
|
||||||
|
echo "generating random secrets"
|
||||||
|
|
||||||
|
SECRET=$(dd if=/dev/urandom bs=1 count=32 2>/dev/null | base64 -w 0 | rev | cut -b 2- | rev | tr -d '\n+/')
|
||||||
|
echo ${SECRET} > ${WEB_API_PASSWORD_FILE}
|
||||||
|
|
||||||
|
SECRET=$(dd if=/dev/urandom bs=1 count=32 2>/dev/null | base64 -w 0 | rev | cut -b 2- | rev | tr -d '\n+/')
|
||||||
|
echo ${SECRET} > ${CRYPTO_RANDOM_FILE}
|
||||||
|
fi
|
||||||
|
|
5
server-ce/init_scripts/00_set_docker_host_ipaddress.sh
Executable file
5
server-ce/init_scripts/00_set_docker_host_ipaddress.sh
Executable file
|
@ -0,0 +1,5 @@
|
||||||
|
#!/bin/bash
|
||||||
|
set -e -o pipefail
|
||||||
|
|
||||||
|
# See the bottom of http://stackoverflow.com/questions/24319662/from-inside-of-a-docker-container-how-do-i-connect-to-the-localhost-of-the-mach
|
||||||
|
echo "`route -n | awk '/UG[ \t]/{print $2}'` dockerhost" >> /etc/hosts
|
7
server-ce/init_scripts/98_check_db_access.sh
Executable file
7
server-ce/init_scripts/98_check_db_access.sh
Executable file
|
@ -0,0 +1,7 @@
|
||||||
|
#!/bin/sh
|
||||||
|
set -e
|
||||||
|
|
||||||
|
echo "Checking can connect to mongo and redis"
|
||||||
|
cd /var/www/sharelatex && grunt check:redis
|
||||||
|
cd /var/www/sharelatex && grunt check:mongo
|
||||||
|
echo "All checks passed"
|
8
server-ce/init_scripts/99_migrate.sh
Executable file
8
server-ce/init_scripts/99_migrate.sh
Executable file
|
@ -0,0 +1,8 @@
|
||||||
|
#!/bin/sh
|
||||||
|
set -e
|
||||||
|
|
||||||
|
which node
|
||||||
|
which grunt
|
||||||
|
ls -al /var/www/sharelatex/migrations
|
||||||
|
cd /var/www/sharelatex && grunt migrate -v
|
||||||
|
echo "All migrations finished"
|
9
server-ce/logrotate/sharelatex
Normal file
9
server-ce/logrotate/sharelatex
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
/var/log/sharelatex/*.log {
|
||||||
|
daily
|
||||||
|
missingok
|
||||||
|
rotate 5
|
||||||
|
compress
|
||||||
|
copytruncate
|
||||||
|
notifempty
|
||||||
|
create 644 root adm
|
||||||
|
}
|
76
server-ce/nginx/nginx.conf
Normal file
76
server-ce/nginx/nginx.conf
Normal file
|
@ -0,0 +1,76 @@
|
||||||
|
daemon off;
|
||||||
|
user www-data;
|
||||||
|
worker_processes 4;
|
||||||
|
pid /run/nginx.pid;
|
||||||
|
|
||||||
|
events {
|
||||||
|
worker_connections 768;
|
||||||
|
# multi_accept on;
|
||||||
|
}
|
||||||
|
|
||||||
|
http {
|
||||||
|
|
||||||
|
##
|
||||||
|
# Basic Settings
|
||||||
|
##
|
||||||
|
|
||||||
|
sendfile on;
|
||||||
|
tcp_nopush on;
|
||||||
|
tcp_nodelay on;
|
||||||
|
keepalive_timeout 65;
|
||||||
|
types_hash_max_size 2048;
|
||||||
|
# server_tokens off;
|
||||||
|
|
||||||
|
# server_names_hash_bucket_size 64;
|
||||||
|
# server_name_in_redirect off;
|
||||||
|
|
||||||
|
include /etc/nginx/mime.types;
|
||||||
|
default_type application/octet-stream;
|
||||||
|
|
||||||
|
##
|
||||||
|
# Logging Settings
|
||||||
|
##
|
||||||
|
|
||||||
|
access_log /var/log/nginx/access.log;
|
||||||
|
error_log /var/log/nginx/error.log;
|
||||||
|
|
||||||
|
##
|
||||||
|
# Gzip Settings
|
||||||
|
##
|
||||||
|
|
||||||
|
gzip on;
|
||||||
|
gzip_disable "msie6";
|
||||||
|
|
||||||
|
client_max_body_size 50m;
|
||||||
|
|
||||||
|
# gzip_vary on;
|
||||||
|
# gzip_proxied any;
|
||||||
|
# gzip_comp_level 6;
|
||||||
|
# gzip_buffers 16 8k;
|
||||||
|
# gzip_http_version 1.1;
|
||||||
|
# gzip_types text/plain text/css application/json application/x-javascript text/xml application/xml application/xml+rss text/javascript;
|
||||||
|
|
||||||
|
##
|
||||||
|
# nginx-naxsi config
|
||||||
|
##
|
||||||
|
# Uncomment it if you installed nginx-naxsi
|
||||||
|
##
|
||||||
|
|
||||||
|
#include /etc/nginx/naxsi_core.rules;
|
||||||
|
|
||||||
|
##
|
||||||
|
# nginx-passenger config
|
||||||
|
##
|
||||||
|
# Uncomment it if you installed nginx-passenger
|
||||||
|
##
|
||||||
|
|
||||||
|
#passenger_root /usr;
|
||||||
|
#passenger_ruby /usr/bin/ruby;
|
||||||
|
|
||||||
|
##
|
||||||
|
# Virtual Host Configs
|
||||||
|
##
|
||||||
|
|
||||||
|
include /etc/nginx/conf.d/*.conf;
|
||||||
|
include /etc/nginx/sites-enabled/*;
|
||||||
|
}
|
41
server-ce/nginx/sharelatex.conf
Normal file
41
server-ce/nginx/sharelatex.conf
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
server {
|
||||||
|
listen 80;
|
||||||
|
server_name _; # Catch all, see http://nginx.org/en/docs/http/server_names.html
|
||||||
|
|
||||||
|
root /var/www/sharelatex/web/public/;
|
||||||
|
|
||||||
|
location / {
|
||||||
|
proxy_pass http://127.0.0.1:3000;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection "upgrade";
|
||||||
|
proxy_set_header X-Forwarded-Host $host;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_read_timeout 3m;
|
||||||
|
proxy_send_timeout 3m;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /socket.io {
|
||||||
|
proxy_pass http://127.0.0.1:3026;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection "upgrade";
|
||||||
|
proxy_set_header X-Forwarded-Host $host;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_read_timeout 3m;
|
||||||
|
proxy_send_timeout 3m;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /stylesheets {
|
||||||
|
expires 1y;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /minjs {
|
||||||
|
expires 1y;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /img {
|
||||||
|
expires 1y;
|
||||||
|
}
|
||||||
|
}
|
10
server-ce/runit/chat-sharelatex/run
Executable file
10
server-ce/runit/chat-sharelatex/run
Executable file
|
@ -0,0 +1,10 @@
|
||||||
|
#!/bin/bash
|
||||||
|
export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
|
||||||
|
|
||||||
|
NODE_PARAMS=""
|
||||||
|
if [ "$DEBUG_NODE" == "true" ]; then
|
||||||
|
echo "running debug - chat"
|
||||||
|
NODE_PARAMS="--inspect=0.0.0.0:30100"
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /var/www/sharelatex/chat/app.js >> /var/log/sharelatex/chat.log 2>&1
|
32
server-ce/runit/clsi-sharelatex/run
Executable file
32
server-ce/runit/clsi-sharelatex/run
Executable file
|
@ -0,0 +1,32 @@
|
||||||
|
#!/bin/bash
|
||||||
|
export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
|
||||||
|
|
||||||
|
NODE_PARAMS=""
|
||||||
|
if [ "$DEBUG_NODE" == "true" ]; then
|
||||||
|
echo "running debug - clsi"
|
||||||
|
NODE_PARAMS="--inspect=0.0.0.0:30130"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Set permissions on docker.sock if present,
|
||||||
|
# To enable sibling-containers (see entrypoint.sh in clsi project)
|
||||||
|
if [ -e '/var/run/docker.sock' ]; then
|
||||||
|
echo ">> Setting permissions on docker socket"
|
||||||
|
DOCKER_GROUP=$(stat -c '%g' /var/run/docker.sock)
|
||||||
|
groupadd --non-unique --gid ${DOCKER_GROUP} dockeronhost
|
||||||
|
usermod -aG dockeronhost www-data
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Copies over CSLI synctex to the host mounted volume, so it
|
||||||
|
# can be subsequently mounted in TexLive containers on Sandbox Compilation
|
||||||
|
SYNCTEX=/var/lib/sharelatex/bin/synctex
|
||||||
|
if [ ! -f "$SYNCTEX" ]; then
|
||||||
|
if [ "$DISABLE_SYNCTEX_BINARY_COPY" == "true" ]; then
|
||||||
|
echo ">> Copy of synctex executable disabled by DISABLE_SYNCTEX_BINARY_COPY flag, feature may not work"
|
||||||
|
else
|
||||||
|
echo ">> Copying synctex executable to the host"
|
||||||
|
mkdir -p $(dirname $SYNCTEX )
|
||||||
|
cp /var/www/sharelatex/clsi/bin/synctex $SYNCTEX
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /var/www/sharelatex/clsi/app.js >> /var/log/sharelatex/clsi.log 2>&1
|
10
server-ce/runit/contacts-sharelatex/run
Executable file
10
server-ce/runit/contacts-sharelatex/run
Executable file
|
@ -0,0 +1,10 @@
|
||||||
|
#!/bin/bash
|
||||||
|
export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
|
||||||
|
|
||||||
|
NODE_PARAMS=""
|
||||||
|
if [ "$DEBUG_NODE" == "true" ]; then
|
||||||
|
echo "running debug - contacts"
|
||||||
|
NODE_PARAMS="--inspect=0.0.0.0:30360"
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /var/www/sharelatex/contacts/app.js >> /var/log/sharelatex/contacts 2>&1
|
10
server-ce/runit/docstore-sharelatex/run
Executable file
10
server-ce/runit/docstore-sharelatex/run
Executable file
|
@ -0,0 +1,10 @@
|
||||||
|
#!/bin/bash
|
||||||
|
export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
|
||||||
|
|
||||||
|
NODE_PARAMS=""
|
||||||
|
if [ "$DEBUG_NODE" == "true" ]; then
|
||||||
|
echo "running debug - docstore"
|
||||||
|
NODE_PARAMS="--inspect=0.0.0.0:30160"
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /var/www/sharelatex/docstore/app.js >> /var/log/sharelatex/docstore.log 2>&1
|
10
server-ce/runit/document-updater-sharelatex/run
Executable file
10
server-ce/runit/document-updater-sharelatex/run
Executable file
|
@ -0,0 +1,10 @@
|
||||||
|
#!/bin/bash
|
||||||
|
export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
|
||||||
|
|
||||||
|
NODE_PARAMS=""
|
||||||
|
if [ "$DEBUG_NODE" == "true" ]; then
|
||||||
|
echo "running debug - document updater"
|
||||||
|
NODE_PARAMS="--inspect=0.0.0.0:30030"
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /var/www/sharelatex/document-updater/app.js >> /var/log/sharelatex/document-updater.log 2>&1
|
3
server-ce/runit/filestore-sharelatex/run
Executable file
3
server-ce/runit/filestore-sharelatex/run
Executable file
|
@ -0,0 +1,3 @@
|
||||||
|
#!/bin/bash
|
||||||
|
export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
|
||||||
|
exec /sbin/setuser www-data /usr/bin/node /var/www/sharelatex/filestore/app.js >> /var/log/sharelatex/filestore.log 2>&1
|
2
server-ce/runit/nginx/run
Executable file
2
server-ce/runit/nginx/run
Executable file
|
@ -0,0 +1,2 @@
|
||||||
|
#!/bin/bash
|
||||||
|
exec nginx
|
10
server-ce/runit/notifications-sharelatex/run
Executable file
10
server-ce/runit/notifications-sharelatex/run
Executable file
|
@ -0,0 +1,10 @@
|
||||||
|
#!/bin/bash
|
||||||
|
export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
|
||||||
|
|
||||||
|
NODE_PARAMS=""
|
||||||
|
if [ "$DEBUG_NODE" == "true" ]; then
|
||||||
|
echo "running debug - notifications"
|
||||||
|
NODE_PARAMS="--inspect=0.0.0.0:30420"
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /var/www/sharelatex/notifications/app.js >> /var/log/sharelatex/notifications.log 2>&1
|
3
server-ce/runit/real-time-sharelatex/run
Executable file
3
server-ce/runit/real-time-sharelatex/run
Executable file
|
@ -0,0 +1,3 @@
|
||||||
|
#!/bin/bash
|
||||||
|
export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
|
||||||
|
exec /sbin/setuser www-data /usr/bin/node /var/www/sharelatex/real-time/app.js >> /var/log/sharelatex/real-time.log 2>&1
|
10
server-ce/runit/spelling-sharelatex/run
Executable file
10
server-ce/runit/spelling-sharelatex/run
Executable file
|
@ -0,0 +1,10 @@
|
||||||
|
#!/bin/bash
|
||||||
|
export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
|
||||||
|
|
||||||
|
NODE_PARAMS=""
|
||||||
|
if [ "$DEBUG_NODE" == "true" ]; then
|
||||||
|
echo "running debug - spelling"
|
||||||
|
NODE_PARAMS="--inspect=0.0.0.0:30050"
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /var/www/sharelatex/spelling/app.js >> /var/log/sharelatex/spelling.log 2>&1
|
10
server-ce/runit/tags-sharelatex/run
Executable file
10
server-ce/runit/tags-sharelatex/run
Executable file
|
@ -0,0 +1,10 @@
|
||||||
|
#!/bin/bash
|
||||||
|
export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
|
||||||
|
|
||||||
|
NODE_PARAMS=""
|
||||||
|
if [ "$DEBUG_NODE" == "true" ]; then
|
||||||
|
echo "running debug - tags"
|
||||||
|
NODE_PARAMS="--inspect=0.0.0.0:30120"
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /var/www/sharelatex/tags/app.js >> /var/log/sharelatex/tags.log 2>&1
|
10
server-ce/runit/track-changes-sharelatex/run
Executable file
10
server-ce/runit/track-changes-sharelatex/run
Executable file
|
@ -0,0 +1,10 @@
|
||||||
|
#!/bin/bash
|
||||||
|
export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
|
||||||
|
|
||||||
|
NODE_PARAMS=""
|
||||||
|
if [ "$DEBUG_NODE" == "true" ]; then
|
||||||
|
echo "running debug - track-changes"
|
||||||
|
NODE_PARAMS="--inspect=0.0.0.0:30150"
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /var/www/sharelatex/track-changes/app.js >> /var/log/sharelatex/track-changes.log 2>&1
|
10
server-ce/runit/web-sharelatex/run
Executable file
10
server-ce/runit/web-sharelatex/run
Executable file
|
@ -0,0 +1,10 @@
|
||||||
|
#!/bin/bash
|
||||||
|
export SHARELATEX_CONFIG=/etc/sharelatex/settings.coffee
|
||||||
|
|
||||||
|
NODE_PARAMS=""
|
||||||
|
if [ "$DEBUG_NODE" == "true" ]; then
|
||||||
|
echo "running debug - web"
|
||||||
|
NODE_PARAMS="--inspect=0.0.0.0:40000"
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /var/www/sharelatex/web/app.js >> /var/log/sharelatex/web.log 2>&1
|
|
@ -1,4 +1,4 @@
|
||||||
module.exports =
|
module.exports =
|
||||||
|
|
||||||
[{
|
[{
|
||||||
name: "web",
|
name: "web",
|
||||||
|
@ -48,5 +48,4 @@ module.exports =
|
||||||
name: "notifications",
|
name: "notifications",
|
||||||
repo: "https://github.com/sharelatex/notifications-sharelatex.git",
|
repo: "https://github.com/sharelatex/notifications-sharelatex.git",
|
||||||
version: "master"
|
version: "master"
|
||||||
}
|
}]
|
||||||
]
|
|
548
server-ce/settings.coffee
Normal file
548
server-ce/settings.coffee
Normal file
|
@ -0,0 +1,548 @@
|
||||||
|
Path = require('path')
|
||||||
|
|
||||||
|
# These credentials are used for authenticating api requests
|
||||||
|
# between services that may need to go over public channels
|
||||||
|
httpAuthUser = "sharelatex"
|
||||||
|
httpAuthPass = process.env["WEB_API_PASSWORD"]
|
||||||
|
httpAuthUsers = {}
|
||||||
|
httpAuthUsers[httpAuthUser] = httpAuthPass
|
||||||
|
|
||||||
|
parse = (option)->
|
||||||
|
if option?
|
||||||
|
try
|
||||||
|
opt = JSON.parse(option)
|
||||||
|
return opt
|
||||||
|
catch err
|
||||||
|
console.error "problem parsing #{option}, invalid JSON"
|
||||||
|
return undefined
|
||||||
|
|
||||||
|
|
||||||
|
DATA_DIR = '/var/lib/sharelatex/data'
|
||||||
|
TMP_DIR = '/var/lib/sharelatex/tmp'
|
||||||
|
|
||||||
|
settings =
|
||||||
|
|
||||||
|
brandPrefix: ""
|
||||||
|
|
||||||
|
allowAnonymousReadAndWriteSharing:
|
||||||
|
process.env['SHARELATEX_ALLOW_ANONYMOUS_READ_AND_WRITE_SHARING'] == 'true'
|
||||||
|
|
||||||
|
# Databases
|
||||||
|
# ---------
|
||||||
|
|
||||||
|
# ShareLaTeX's main persistant data store is MongoDB (http://www.mongodb.org/)
|
||||||
|
# Documentation about the URL connection string format can be found at:
|
||||||
|
#
|
||||||
|
# http://docs.mongodb.org/manual/reference/connection-string/
|
||||||
|
#
|
||||||
|
# The following works out of the box with Mongo's default settings:
|
||||||
|
mongo:
|
||||||
|
url : process.env["SHARELATEX_MONGO_URL"] or 'mongodb://dockerhost/sharelatex'
|
||||||
|
|
||||||
|
# Redis is used in ShareLaTeX for high volume queries, like real-time
|
||||||
|
# editing, and session management.
|
||||||
|
#
|
||||||
|
# The following config will work with Redis's default settings:
|
||||||
|
redis:
|
||||||
|
web: redisConfig =
|
||||||
|
host: process.env["SHARELATEX_REDIS_HOST"] or "dockerhost"
|
||||||
|
port: process.env["SHARELATEX_REDIS_PORT"] or "6379"
|
||||||
|
password: process.env["SHARELATEX_REDIS_PASS"] or ""
|
||||||
|
key_schema:
|
||||||
|
# document-updater
|
||||||
|
blockingKey: ({doc_id}) -> "Blocking:#{doc_id}"
|
||||||
|
docLines: ({doc_id}) -> "doclines:#{doc_id}"
|
||||||
|
docOps: ({doc_id}) -> "DocOps:#{doc_id}"
|
||||||
|
docVersion: ({doc_id}) -> "DocVersion:#{doc_id}"
|
||||||
|
docHash: ({doc_id}) -> "DocHash:#{doc_id}"
|
||||||
|
projectKey: ({doc_id}) -> "ProjectId:#{doc_id}"
|
||||||
|
docsInProject: ({project_id}) -> "DocsIn:#{project_id}"
|
||||||
|
ranges: ({doc_id}) -> "Ranges:#{doc_id}"
|
||||||
|
# document-updater:realtime
|
||||||
|
pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}"
|
||||||
|
# document-updater:history
|
||||||
|
uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}"
|
||||||
|
docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}"
|
||||||
|
# document-updater:lock
|
||||||
|
blockingKey: ({doc_id}) -> "Blocking:#{doc_id}"
|
||||||
|
# track-changes:lock
|
||||||
|
historyLock: ({doc_id}) -> "HistoryLock:#{doc_id}"
|
||||||
|
historyIndexLock: ({project_id}) -> "HistoryIndexLock:#{project_id}"
|
||||||
|
# track-chanegs:history
|
||||||
|
uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}"
|
||||||
|
docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}"
|
||||||
|
# realtime
|
||||||
|
clientsInProject: ({project_id}) -> "clients_in_project:#{project_id}"
|
||||||
|
connectedUser: ({project_id, client_id})-> "connected_user:#{project_id}:#{client_id}"
|
||||||
|
fairy: redisConfig
|
||||||
|
# track-changes and document-updater
|
||||||
|
realtime: redisConfig
|
||||||
|
documentupdater: redisConfig
|
||||||
|
lock: redisConfig
|
||||||
|
history: redisConfig
|
||||||
|
websessions: redisConfig
|
||||||
|
api: redisConfig
|
||||||
|
pubsub: redisConfig
|
||||||
|
project_history: redisConfig
|
||||||
|
|
||||||
|
# The compile server (the clsi) uses a SQL database to cache files and
|
||||||
|
# meta-data. sqllite is the default, and the load is low enough that this will
|
||||||
|
# be fine in production (we use sqllite at sharelatex.com).
|
||||||
|
#
|
||||||
|
# If you want to configure a different database, see the Sequelize documentation
|
||||||
|
# for available options:
|
||||||
|
#
|
||||||
|
# https://github.com/sequelize/sequelize/wiki/API-Reference-Sequelize#example-usage
|
||||||
|
#
|
||||||
|
mysql:
|
||||||
|
clsi:
|
||||||
|
database: "clsi"
|
||||||
|
username: "clsi"
|
||||||
|
password: ""
|
||||||
|
dialect: "sqlite"
|
||||||
|
storage: Path.join(DATA_DIR, "db.sqlite")
|
||||||
|
|
||||||
|
# File storage
|
||||||
|
# ------------
|
||||||
|
|
||||||
|
# ShareLaTeX can store binary files like images either locally or in Amazon
|
||||||
|
# S3. The default is locally:
|
||||||
|
filestore:
|
||||||
|
backend: "fs"
|
||||||
|
stores:
|
||||||
|
user_files: Path.join(DATA_DIR, "user_files")
|
||||||
|
template_files: Path.join(DATA_DIR, "template_files")
|
||||||
|
|
||||||
|
# To use Amazon S3 as a storage backend, comment out the above config, and
|
||||||
|
# uncomment the following, filling in your key, secret, and bucket name:
|
||||||
|
#
|
||||||
|
# filestore:
|
||||||
|
# backend: "s3"
|
||||||
|
# stores:
|
||||||
|
# user_files: "BUCKET_NAME"
|
||||||
|
# s3:
|
||||||
|
# key: "AWS_KEY"
|
||||||
|
# secret: "AWS_SECRET"
|
||||||
|
#
|
||||||
|
|
||||||
|
trackchanges:
|
||||||
|
continueOnError: true
|
||||||
|
|
||||||
|
# Local disk caching
|
||||||
|
# ------------------
|
||||||
|
path:
|
||||||
|
# If we ever need to write something to disk (e.g. incoming requests
|
||||||
|
# that need processing but may be too big for memory), then write
|
||||||
|
# them to disk here:
|
||||||
|
dumpFolder: Path.join(TMP_DIR, "dumpFolder")
|
||||||
|
# Where to write uploads before they are processed
|
||||||
|
uploadFolder: Path.join(TMP_DIR, "uploads")
|
||||||
|
# Where to write the project to disk before running LaTeX on it
|
||||||
|
compilesDir: Path.join(DATA_DIR, "compiles")
|
||||||
|
# Where to cache downloaded URLs for the CLSI
|
||||||
|
clsiCacheDir: Path.join(DATA_DIR, "cache")
|
||||||
|
|
||||||
|
# Server Config
|
||||||
|
# -------------
|
||||||
|
|
||||||
|
# Where your instance of ShareLaTeX can be found publicly. This is used
|
||||||
|
# when emails are sent out and in generated links:
|
||||||
|
siteUrl: siteUrl = process.env["SHARELATEX_SITE_URL"] or 'http://localhost'
|
||||||
|
|
||||||
|
# The name this is used to describe your ShareLaTeX Installation
|
||||||
|
appName: process.env["SHARELATEX_APP_NAME"] or "ShareLaTeX (Community Edition)"
|
||||||
|
|
||||||
|
restrictInvitesToExistingAccounts: process.env["SHARELATEX_RESTRICT_INVITES_TO_EXISTING_ACCOUNTS"] == 'true'
|
||||||
|
|
||||||
|
nav:
|
||||||
|
title: process.env["SHARELATEX_NAV_TITLE"] or process.env["SHARELATEX_APP_NAME"] or "ShareLaTeX Community Edition"
|
||||||
|
|
||||||
|
|
||||||
|
# The email address which users will be directed to as the main point of
|
||||||
|
# contact for this installation of ShareLaTeX.
|
||||||
|
adminEmail: process.env["SHARELATEX_ADMIN_EMAIL"] or "placeholder@example.com"
|
||||||
|
|
||||||
|
# If provided, a sessionSecret is used to sign cookies so that they cannot be
|
||||||
|
# spoofed. This is recommended.
|
||||||
|
security:
|
||||||
|
sessionSecret: process.env["SHARELATEX_SESSION_SECRET"] or process.env["CRYPTO_RANDOM"]
|
||||||
|
|
||||||
|
# These credentials are used for authenticating api requests
|
||||||
|
# between services that may need to go over public channels
|
||||||
|
httpAuthUsers: httpAuthUsers
|
||||||
|
|
||||||
|
# Should javascript assets be served minified or not.
|
||||||
|
useMinifiedJs: true
|
||||||
|
|
||||||
|
# Should static assets be sent with a header to tell the browser to cache
|
||||||
|
# them. This should be false in development where changes are being made,
|
||||||
|
# but should be set to true in production.
|
||||||
|
cacheStaticAssets: true
|
||||||
|
|
||||||
|
# If you are running ShareLaTeX over https, set this to true to send the
|
||||||
|
# cookie with a secure flag (recommended).
|
||||||
|
secureCookie: process.env["SHARELATEX_SECURE_COOKIE"]?
|
||||||
|
|
||||||
|
# If you are running ShareLaTeX behind a proxy (like Apache, Nginx, etc)
|
||||||
|
# then set this to true to allow it to correctly detect the forwarded IP
|
||||||
|
# address and http/https protocol information.
|
||||||
|
|
||||||
|
behindProxy: process.env["SHARELATEX_BEHIND_PROXY"] or false
|
||||||
|
|
||||||
|
i18n:
|
||||||
|
subdomainLang:
|
||||||
|
www: {lngCode:process.env["SHARELATEX_SITE_LANGUAGE"] or "en", url: siteUrl}
|
||||||
|
defaultLng: process.env["SHARELATEX_SITE_LANGUAGE"] or "en"
|
||||||
|
|
||||||
|
apis:
|
||||||
|
web:
|
||||||
|
url: "http://localhost:3000"
|
||||||
|
user: httpAuthUser
|
||||||
|
pass: httpAuthPass
|
||||||
|
project_history:
|
||||||
|
enabled: false
|
||||||
|
references:{}
|
||||||
|
notifications:undefined
|
||||||
|
|
||||||
|
defaultFeatures:
|
||||||
|
collaborators: -1
|
||||||
|
dropbox: true
|
||||||
|
versioning: true
|
||||||
|
compileTimeout: 180
|
||||||
|
compileGroup: "standard"
|
||||||
|
trackChanges: true
|
||||||
|
templates: true
|
||||||
|
references: true
|
||||||
|
|
||||||
|
## OPTIONAL CONFIGERABLE SETTINGS
|
||||||
|
|
||||||
|
if process.env["SHARELATEX_LEFT_FOOTER"]?
|
||||||
|
try
|
||||||
|
settings.nav.left_footer = JSON.parse(process.env["SHARELATEX_LEFT_FOOTER"])
|
||||||
|
catch e
|
||||||
|
console.error("could not parse SHARELATEX_LEFT_FOOTER, not valid JSON")
|
||||||
|
|
||||||
|
if process.env["SHARELATEX_RIGHT_FOOTER"]?
|
||||||
|
settings.nav.right_footer = process.env["SHARELATEX_RIGHT_FOOTER"]
|
||||||
|
try
|
||||||
|
settings.nav.right_footer = JSON.parse(process.env["SHARELATEX_RIGHT_FOOTER"])
|
||||||
|
catch e
|
||||||
|
console.error("could not parse SHARELATEX_RIGHT_FOOTER, not valid JSON")
|
||||||
|
|
||||||
|
if process.env["SHARELATEX_HEADER_IMAGE_URL"]?
|
||||||
|
settings.nav.custom_logo = process.env["SHARELATEX_HEADER_IMAGE_URL"]
|
||||||
|
|
||||||
|
if process.env["SHARELATEX_HEADER_NAV_LINKS"]?
|
||||||
|
console.error """
|
||||||
|
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||||
|
#
|
||||||
|
# WARNING: SHARELATEX_HEADER_NAV_LINKS is no longer supported
|
||||||
|
# See https://github.com/sharelatex/sharelatex/wiki/Configuring-Headers,-Footers-&-Logo
|
||||||
|
#
|
||||||
|
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||||
|
"""
|
||||||
|
|
||||||
|
if process.env["SHARELATEX_HEADER_EXTRAS"]?
|
||||||
|
try
|
||||||
|
settings.nav.header_extras = JSON.parse(process.env["SHARELATEX_HEADER_EXTRAS"])
|
||||||
|
catch e
|
||||||
|
console.error("could not parse SHARELATEX_HEADER_EXTRAS, not valid JSON")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Sending Email
|
||||||
|
# -------------
|
||||||
|
#
|
||||||
|
# You must configure a mail server to be able to send invite emails from
|
||||||
|
# ShareLaTeX. The config settings are passed to nodemailer. See the nodemailer
|
||||||
|
# documentation for available options:
|
||||||
|
#
|
||||||
|
# http://www.nodemailer.com/docs/transports
|
||||||
|
|
||||||
|
|
||||||
|
if process.env["SHARELATEX_EMAIL_FROM_ADDRESS"]?
|
||||||
|
|
||||||
|
settings.email =
|
||||||
|
fromAddress: process.env["SHARELATEX_EMAIL_FROM_ADDRESS"]
|
||||||
|
replyTo: process.env["SHARELATEX_EMAIL_REPLY_TO"] or ""
|
||||||
|
driver: process.env["SHARELATEX_EMAIL_DRIVER"]
|
||||||
|
parameters:
|
||||||
|
#AWS Creds
|
||||||
|
AWSAccessKeyID: process.env["SHARELATEX_EMAIL_AWS_SES_ACCESS_KEY_ID"]
|
||||||
|
AWSSecretKey: process.env["SHARELATEX_EMAIL_AWS_SES_SECRET_KEY"]
|
||||||
|
|
||||||
|
#SMTP Creds
|
||||||
|
host: process.env["SHARELATEX_EMAIL_SMTP_HOST"]
|
||||||
|
port: process.env["SHARELATEX_EMAIL_SMTP_PORT"],
|
||||||
|
secure: parse(process.env["SHARELATEX_EMAIL_SMTP_SECURE"])
|
||||||
|
ignoreTLS: parse(process.env["SHARELATEX_EMAIL_SMTP_IGNORE_TLS"])
|
||||||
|
|
||||||
|
textEncoding: process.env["SHARELATEX_EMAIL_TEXT_ENCODING"]
|
||||||
|
templates:
|
||||||
|
customFooter: process.env["SHARELATEX_CUSTOM_EMAIL_FOOTER"]
|
||||||
|
|
||||||
|
if process.env["SHARELATEX_EMAIL_SMTP_USER"]? or process.env["SHARELATEX_EMAIL_SMTP_PASS"]?
|
||||||
|
settings.email.parameters.auth =
|
||||||
|
user: process.env["SHARELATEX_EMAIL_SMTP_USER"]
|
||||||
|
pass: process.env["SHARELATEX_EMAIL_SMTP_PASS"]
|
||||||
|
|
||||||
|
if process.env["SHARELATEX_EMAIL_SMTP_TLS_REJECT_UNAUTH"]?
|
||||||
|
settings.email.parameters.tls =
|
||||||
|
rejectUnauthorized: parse(process.env["SHARELATEX_EMAIL_SMTP_TLS_REJECT_UNAUTH"])
|
||||||
|
|
||||||
|
|
||||||
|
# i18n
|
||||||
|
if process.env["SHARELATEX_LANG_DOMAIN_MAPPING"]?
|
||||||
|
|
||||||
|
settings.i18n.subdomainLang = parse(process.env["SHARELATEX_LANG_DOMAIN_MAPPING"])
|
||||||
|
|
||||||
|
# Password Settings
|
||||||
|
# -----------
|
||||||
|
# These restrict the passwords users can use when registering
|
||||||
|
# opts are from http://antelle.github.io/passfield
|
||||||
|
if process.env["SHARELATEX_PASSWORD_VALIDATION_PATTERN"] or process.env["SHARELATEX_PASSWORD_VALIDATION_MIN_LENGTH"] or process.env["SHARELATEX_PASSWORD_VALIDATION_MAX_LENGTH"]
|
||||||
|
|
||||||
|
settings.passwordStrengthOptions =
|
||||||
|
pattern: process.env["SHARELATEX_PASSWORD_VALIDATION_PATTERN"] or "aA$3"
|
||||||
|
length: {min:process.env["SHARELATEX_PASSWORD_VALIDATION_MIN_LENGTH"] or 8, max: process.env["SHARELATEX_PASSWORD_VALIDATION_MAX_LENGTH"] or 150}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#######################
|
||||||
|
# ShareLaTeX Server Pro
|
||||||
|
#######################
|
||||||
|
|
||||||
|
if parse(process.env["SHARELATEX_IS_SERVER_PRO"]) == true
|
||||||
|
settings.bypassPercentageRollouts = true
|
||||||
|
settings.apis.references =
|
||||||
|
url: "http://localhost:3040"
|
||||||
|
|
||||||
|
|
||||||
|
# LDAP - SERVER PRO ONLY
|
||||||
|
# ----------
|
||||||
|
|
||||||
|
if process.env["SHARELATEX_LDAP_HOST"]
|
||||||
|
console.error """
|
||||||
|
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||||
|
#
|
||||||
|
# WARNING: The LDAP configuration format has changed in version 0.5.1
|
||||||
|
# See https://github.com/sharelatex/sharelatex/wiki/Server-Pro:-LDAP-Config
|
||||||
|
#
|
||||||
|
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||||
|
"""
|
||||||
|
|
||||||
|
if process.env["SHARELATEX_LDAP_URL"]
|
||||||
|
settings.externalAuth = true
|
||||||
|
settings.ldap =
|
||||||
|
emailAtt: process.env["SHARELATEX_LDAP_EMAIL_ATT"]
|
||||||
|
nameAtt: process.env["SHARELATEX_LDAP_NAME_ATT"]
|
||||||
|
lastNameAtt: process.env["SHARELATEX_LDAP_LAST_NAME_ATT"]
|
||||||
|
updateUserDetailsOnLogin: process.env["SHARELATEX_LDAP_UPDATE_USER_DETAILS_ON_LOGIN"] == 'true'
|
||||||
|
placeholder: process.env["SHARELATEX_LDAP_PLACEHOLDER"]
|
||||||
|
server:
|
||||||
|
url: process.env["SHARELATEX_LDAP_URL"]
|
||||||
|
bindDn: process.env["SHARELATEX_LDAP_BIND_DN"]
|
||||||
|
bindCredentials: process.env["SHARELATEX_LDAP_BIND_CREDENTIALS"]
|
||||||
|
bindProperty: process.env["SHARELATEX_LDAP_BIND_PROPERTY"]
|
||||||
|
searchBase: process.env["SHARELATEX_LDAP_SEARCH_BASE"]
|
||||||
|
searchScope: process.env["SHARELATEX_LDAP_SEARCH_SCOPE"]
|
||||||
|
searchFilter: process.env["SHARELATEX_LDAP_SEARCH_FILTER"]
|
||||||
|
searchAttributes: (
|
||||||
|
if _ldap_search_attribs = process.env["SHARELATEX_LDAP_SEARCH_ATTRIBUTES"]
|
||||||
|
try
|
||||||
|
JSON.parse(_ldap_search_attribs)
|
||||||
|
catch e
|
||||||
|
console.error "could not parse SHARELATEX_LDAP_SEARCH_ATTRIBUTES"
|
||||||
|
else
|
||||||
|
undefined
|
||||||
|
)
|
||||||
|
groupDnProperty: process.env["SHARELATEX_LDAP_GROUP_DN_PROPERTY"]
|
||||||
|
groupSearchBase: process.env["SHARELATEX_LDAP_GROUP_SEARCH_BASE"]
|
||||||
|
groupSearchScope: process.env["SHARELATEX_LDAP_GROUP_SEARCH_SCOPE"]
|
||||||
|
groupSearchFilter: process.env["SHARELATEX_LDAP_GROUP_SEARCH_FILTER"]
|
||||||
|
groupSearchAttributes: (
|
||||||
|
if _ldap_group_search_attribs = process.env["SHARELATEX_LDAP_GROUP_SEARCH_ATTRIBUTES"]
|
||||||
|
try
|
||||||
|
JSON.parse(_ldap_group_search_attribs)
|
||||||
|
catch e
|
||||||
|
console.error "could not parse SHARELATEX_LDAP_GROUP_SEARCH_ATTRIBUTES"
|
||||||
|
else
|
||||||
|
undefined
|
||||||
|
)
|
||||||
|
cache: process.env["SHARELATEX_LDAP_CACHE"] == 'true'
|
||||||
|
timeout: (
|
||||||
|
if _ldap_timeout = process.env["SHARELATEX_LDAP_TIMEOUT"]
|
||||||
|
try
|
||||||
|
parseInt(_ldap_timeout)
|
||||||
|
catch e
|
||||||
|
console.error "Cannot parse SHARELATEX_LDAP_TIMEOUT"
|
||||||
|
else
|
||||||
|
undefined
|
||||||
|
)
|
||||||
|
connectTimeout: (
|
||||||
|
if _ldap_connect_timeout = process.env["SHARELATEX_LDAP_CONNECT_TIMEOUT"]
|
||||||
|
try
|
||||||
|
parseInt(_ldap_connect_timeout)
|
||||||
|
catch e
|
||||||
|
console.error "Cannot parse SHARELATEX_LDAP_CONNECT_TIMEOUT"
|
||||||
|
else
|
||||||
|
undefined
|
||||||
|
)
|
||||||
|
|
||||||
|
if process.env["SHARELATEX_LDAP_TLS_OPTS_CA_PATH"]
|
||||||
|
try
|
||||||
|
ca = JSON.parse(process.env["SHARELATEX_LDAP_TLS_OPTS_CA_PATH"])
|
||||||
|
catch e
|
||||||
|
console.error "could not parse SHARELATEX_LDAP_TLS_OPTS_CA_PATH, invalid JSON"
|
||||||
|
|
||||||
|
if typeof(ca) == 'string'
|
||||||
|
ca_paths = [ca]
|
||||||
|
else if typeof(ca) == 'object' && ca?.length?
|
||||||
|
ca_paths = ca
|
||||||
|
else
|
||||||
|
console.error "problem parsing SHARELATEX_LDAP_TLS_OPTS_CA_PATH"
|
||||||
|
|
||||||
|
settings.ldap.server.tlsOptions =
|
||||||
|
rejectUnauthorized: process.env["SHARELATEX_LDAP_TLS_OPTS_REJECT_UNAUTH"] == "true"
|
||||||
|
ca:ca_paths # e.g.'/etc/ldap/ca_certs.pem'
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if process.env["SHARELATEX_SAML_ENTRYPOINT"]
|
||||||
|
# NOTE: see https://github.com/bergie/passport-saml/blob/master/README.md for docs of `server` options
|
||||||
|
settings.externalAuth = true
|
||||||
|
settings.saml =
|
||||||
|
updateUserDetailsOnLogin: process.env["SHARELATEX_SAML_UPDATE_USER_DETAILS_ON_LOGIN"] == 'true'
|
||||||
|
identityServiceName: process.env["SHARELATEX_SAML_IDENTITY_SERVICE_NAME"]
|
||||||
|
emailField: process.env["SHARELATEX_SAML_EMAIL_FIELD"] || process.env["SHARELATEX_SAML_EMAIL_FIELD_NAME"]
|
||||||
|
firstNameField: process.env["SHARELATEX_SAML_FIRST_NAME_FIELD"]
|
||||||
|
lastNameField: process.env["SHARELATEX_SAML_LAST_NAME_FIELD"]
|
||||||
|
server:
|
||||||
|
# strings
|
||||||
|
entryPoint: process.env["SHARELATEX_SAML_ENTRYPOINT"]
|
||||||
|
callbackUrl: process.env["SHARELATEX_SAML_CALLBACK_URL"]
|
||||||
|
issuer: process.env["SHARELATEX_SAML_ISSUER"]
|
||||||
|
decryptionPvk: process.env["SHARELATEX_SAML_DECRYPTION_PVK"]
|
||||||
|
signatureAlgorithm: process.env["SHARELATEX_SAML_SIGNATURE_ALGORITHM"]
|
||||||
|
identifierFormat: process.env["SHARELATEX_SAML_IDENTIFIER_FORMAT"]
|
||||||
|
attributeConsumingServiceIndex: process.env["SHARELATEX_SAML_ATTRIBUTE_CONSUMING_SERVICE_INDEX"]
|
||||||
|
authnContext: process.env["SHARELATEX_SAML_AUTHN_CONTEXT"]
|
||||||
|
authnRequestBinding: process.env["SHARELATEX_SAML_AUTHN_REQUEST_BINDING"]
|
||||||
|
validateInResponseTo: process.env["SHARELATEX_SAML_VALIDATE_IN_RESPONSE_TO"]
|
||||||
|
cacheProvider: process.env["SHARELATEX_SAML_CACHE_PROVIDER"]
|
||||||
|
logoutUrl: process.env["SHARELATEX_SAML_LOGOUT_URL"]
|
||||||
|
logoutCallbackUrl: process.env["SHARELATEX_SAML_LOGOUT_CALLBACK_URL"]
|
||||||
|
disableRequestedAuthnContext: process.env["SHARELATEX_SAML_DISABLE_REQUESTED_AUTHN_CONTEXT"] == 'true'
|
||||||
|
forceAuthn: process.env["SHARELATEX_SAML_FORCE_AUTHN"] == 'true'
|
||||||
|
skipRequestCompression: process.env["SHARELATEX_SAML_SKIP_REQUEST_COMPRESSION"] == 'true'
|
||||||
|
acceptedClockSkewMs: (
|
||||||
|
if _saml_skew = process.env["SHARELATEX_SAML_ACCEPTED_CLOCK_SKEW_MS"]
|
||||||
|
try
|
||||||
|
parseInt(_saml_skew)
|
||||||
|
catch e
|
||||||
|
console.error "Cannot parse SHARELATEX_SAML_ACCEPTED_CLOCK_SKEW_MS"
|
||||||
|
else
|
||||||
|
undefined
|
||||||
|
)
|
||||||
|
requestIdExpirationPeriodMs: (
|
||||||
|
if _saml_exiration = process.env["SHARELATEX_SAML_REQUEST_ID_EXPIRATION_PERIOD_MS"]
|
||||||
|
try
|
||||||
|
parseInt(_saml_expiration)
|
||||||
|
catch e
|
||||||
|
console.error "Cannot parse SHARELATEX_SAML_REQUEST_ID_EXPIRATION_PERIOD_MS"
|
||||||
|
else
|
||||||
|
undefined
|
||||||
|
)
|
||||||
|
additionalParams: (
|
||||||
|
if _saml_additionalParams = process.env["SHARELATEX_SAML_ADDITIONAL_PARAMS"]
|
||||||
|
try
|
||||||
|
JSON.parse(_saml_additionalParams)
|
||||||
|
catch e
|
||||||
|
console.error "Cannot parse SHARELATEX_SAML_ADDITIONAL_PARAMS"
|
||||||
|
else
|
||||||
|
undefined
|
||||||
|
)
|
||||||
|
additionalAuthorizeParams: (
|
||||||
|
if _saml_additionalAuthorizeParams = process.env["SHARELATEX_SAML_ADDITIONAL_AUTHORIZE_PARAMS"]
|
||||||
|
try
|
||||||
|
JSON.parse(_saml_additionalAuthorizeParams )
|
||||||
|
catch e
|
||||||
|
console.error "Cannot parse SHARELATEX_SAML_ADDITIONAL_AUTHORIZE_PARAMS"
|
||||||
|
else
|
||||||
|
undefined
|
||||||
|
)
|
||||||
|
additionalLogoutParams: (
|
||||||
|
if _saml_additionalLogoutParams = process.env["SHARELATEX_SAML_ADDITIONAL_LOGOUT_PARAMS"]
|
||||||
|
try
|
||||||
|
JSON.parse(_saml_additionalLogoutParams )
|
||||||
|
catch e
|
||||||
|
console.error "Cannot parse SHARELATEX_SAML_ADDITIONAL_LOGOUT_PARAMS"
|
||||||
|
else
|
||||||
|
undefined
|
||||||
|
)
|
||||||
|
|
||||||
|
# SHARELATEX_SAML_CERT cannot be empty
|
||||||
|
# https://github.com/bergie/passport-saml/commit/f6b1c885c0717f1083c664345556b535f217c102
|
||||||
|
if process.env["SHARELATEX_SAML_CERT"]
|
||||||
|
settings.saml.server.cert = process.env["SHARELATEX_SAML_CERT"]
|
||||||
|
settings.saml.server.privateCert = process.env["SHARELATEX_SAML_PRIVATE_CERT"]
|
||||||
|
|
||||||
|
# Compiler
|
||||||
|
# --------
|
||||||
|
if process.env["SANDBOXED_COMPILES"] == "true"
|
||||||
|
settings.clsi =
|
||||||
|
dockerRunner: true
|
||||||
|
docker:
|
||||||
|
image: process.env["TEX_LIVE_DOCKER_IMAGE"]
|
||||||
|
env:
|
||||||
|
HOME: "/tmp"
|
||||||
|
PATH: process.env["COMPILER_PATH"] or "/usr/local/texlive/2015/bin/x86_64-linux:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
|
||||||
|
user: "www-data"
|
||||||
|
|
||||||
|
if !settings.path?
|
||||||
|
settings.path = {}
|
||||||
|
settings.path.synctexBaseDir = () -> "/compile"
|
||||||
|
if process.env['SANDBOXED_COMPILES_SIBLING_CONTAINERS'] == 'true'
|
||||||
|
console.log("Using sibling containers for sandboxed compiles")
|
||||||
|
if process.env['SANDBOXED_COMPILES_HOST_DIR']
|
||||||
|
settings.path.sandboxedCompilesHostDir = process.env['SANDBOXED_COMPILES_HOST_DIR']
|
||||||
|
else
|
||||||
|
console.error('Sibling containers, but SANDBOXED_COMPILES_HOST_DIR not set')
|
||||||
|
|
||||||
|
|
||||||
|
# Templates
|
||||||
|
# ---------
|
||||||
|
if process.env["SHARELATEX_TEMPLATES_USER_ID"]
|
||||||
|
settings.templates =
|
||||||
|
mountPointUrl: "/templates"
|
||||||
|
user_id: process.env["SHARELATEX_TEMPLATES_USER_ID"]
|
||||||
|
|
||||||
|
settings.templateLinks = parse(process.env["SHARELATEX_NEW_PROJECT_TEMPLATE_LINKS"])
|
||||||
|
|
||||||
|
|
||||||
|
# /Learn
|
||||||
|
# -------
|
||||||
|
if process.env["SHARELATEX_PROXY_LEARN"]?
|
||||||
|
settings.proxyLearn = parse(process.env["SHARELATEX_PROXY_LEARN"])
|
||||||
|
|
||||||
|
|
||||||
|
# /References
|
||||||
|
# -----------
|
||||||
|
if process.env["SHARELATEX_ELASTICSEARCH_URL"]?
|
||||||
|
settings.references.elasticsearch =
|
||||||
|
host: process.env["SHARELATEX_ELASTICSEARCH_URL"]
|
||||||
|
|
||||||
|
|
||||||
|
# With lots of incoming and outgoing HTTP connections to different services,
|
||||||
|
# sometimes long running, it is a good idea to increase the default number
|
||||||
|
# of sockets that Node will hold open.
|
||||||
|
http = require('http')
|
||||||
|
http.globalAgent.maxSockets = 300
|
||||||
|
https = require('https')
|
||||||
|
https.globalAgent.maxSockets = 300
|
||||||
|
|
||||||
|
module.exports = settings
|
||||||
|
|
Loading…
Reference in a new issue