mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-14 20:40:17 -05:00
merge multiple repositories into an existing monorepo
- merged using: 'monorepo_add.sh services-real-time:services/real-time' - see https://github.com/shopsys/monorepo-tools
This commit is contained in:
commit
2ab66edc1f
78 changed files with 20021 additions and 0 deletions
7
services/real-time/.dockerignore
Normal file
7
services/real-time/.dockerignore
Normal file
|
@ -0,0 +1,7 @@
|
|||
node_modules/*
|
||||
gitrev
|
||||
.git
|
||||
.gitignore
|
||||
.npm
|
||||
.nvmrc
|
||||
nodemon.json
|
86
services/real-time/.eslintrc
Normal file
86
services/real-time/.eslintrc
Normal file
|
@ -0,0 +1,86 @@
|
|||
// this file was auto-generated, do not edit it directly.
|
||||
// instead run bin/update_build_scripts from
|
||||
// https://github.com/sharelatex/sharelatex-dev-environment
|
||||
{
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"standard",
|
||||
"prettier"
|
||||
],
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 2018
|
||||
},
|
||||
"plugins": [
|
||||
"mocha",
|
||||
"chai-expect",
|
||||
"chai-friendly"
|
||||
],
|
||||
"env": {
|
||||
"node": true,
|
||||
"mocha": true
|
||||
},
|
||||
"rules": {
|
||||
// TODO(das7pad): remove overrides after fixing all the violations manually (https://github.com/overleaf/issues/issues/3882#issuecomment-878999671)
|
||||
// START of temporary overrides
|
||||
"array-callback-return": "off",
|
||||
"no-dupe-else-if": "off",
|
||||
"no-var": "off",
|
||||
"no-empty": "off",
|
||||
"node/handle-callback-err": "off",
|
||||
"no-loss-of-precision": "off",
|
||||
"node/no-callback-literal": "off",
|
||||
"node/no-path-concat": "off",
|
||||
"prefer-regex-literals": "off",
|
||||
// END of temporary overrides
|
||||
|
||||
// Swap the no-unused-expressions rule with a more chai-friendly one
|
||||
"no-unused-expressions": 0,
|
||||
"chai-friendly/no-unused-expressions": "error",
|
||||
|
||||
// Do not allow importing of implicit dependencies.
|
||||
"import/no-extraneous-dependencies": "error"
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
// Test specific rules
|
||||
"files": ["test/**/*.js"],
|
||||
"globals": {
|
||||
"expect": true
|
||||
},
|
||||
"rules": {
|
||||
// mocha-specific rules
|
||||
"mocha/handle-done-callback": "error",
|
||||
"mocha/no-exclusive-tests": "error",
|
||||
"mocha/no-global-tests": "error",
|
||||
"mocha/no-identical-title": "error",
|
||||
"mocha/no-nested-tests": "error",
|
||||
"mocha/no-pending-tests": "error",
|
||||
"mocha/no-skipped-tests": "error",
|
||||
"mocha/no-mocha-arrows": "error",
|
||||
|
||||
// chai-specific rules
|
||||
"chai-expect/missing-assertion": "error",
|
||||
"chai-expect/terminating-properties": "error",
|
||||
|
||||
// prefer-arrow-callback applies to all callbacks, not just ones in mocha tests.
|
||||
// we don't enforce this at the top-level - just in tests to manage `this` scope
|
||||
// based on mocha's context mechanism
|
||||
"mocha/prefer-arrow-callback": "error"
|
||||
}
|
||||
},
|
||||
{
|
||||
// Backend specific rules
|
||||
"files": ["app/**/*.js", "app.js", "index.js"],
|
||||
"rules": {
|
||||
// don't allow console.log in backend code
|
||||
"no-console": "error",
|
||||
|
||||
// Do not allow importing of implicit dependencies.
|
||||
"import/no-extraneous-dependencies": ["error", {
|
||||
// Do not allow importing of devDependencies.
|
||||
"devDependencies": false
|
||||
}]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
38
services/real-time/.github/ISSUE_TEMPLATE.md
vendored
Normal file
38
services/real-time/.github/ISSUE_TEMPLATE.md
vendored
Normal file
|
@ -0,0 +1,38 @@
|
|||
<!-- BUG REPORT TEMPLATE -->
|
||||
|
||||
## Steps to Reproduce
|
||||
<!-- Describe the steps leading up to when / where you found the bug. -->
|
||||
<!-- Screenshots may be helpful here. -->
|
||||
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
## Expected Behaviour
|
||||
<!-- What should have happened when you completed the steps above? -->
|
||||
|
||||
## Observed Behaviour
|
||||
<!-- What actually happened when you completed the steps above? -->
|
||||
<!-- Screenshots may be helpful here. -->
|
||||
|
||||
## Context
|
||||
<!-- How has this issue affected you? What were you trying to accomplish? -->
|
||||
|
||||
## Technical Info
|
||||
<!-- Provide any technical details that may be applicable (or N/A if not applicable). -->
|
||||
|
||||
* URL:
|
||||
* Browser Name and version:
|
||||
* Operating System and version (desktop or mobile):
|
||||
* Signed in as:
|
||||
* Project and/or file:
|
||||
|
||||
## Analysis
|
||||
<!--- Optionally, document investigation of / suggest a fix for the bug, e.g. 'comes from this line / commit' -->
|
||||
|
||||
## Who Needs to Know?
|
||||
<!-- If you want to bring this to the attention of particular people, @-mention them below. -->
|
||||
<!-- If a user reported this bug and should be notified when it is fixed, provide the Front conversation link. -->
|
||||
|
||||
-
|
||||
-
|
48
services/real-time/.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
48
services/real-time/.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
|
@ -0,0 +1,48 @@
|
|||
|
||||
<!-- ** This is an Overleaf public repository ** -->
|
||||
|
||||
<!-- Please review https://github.com/overleaf/overleaf/blob/master/CONTRIBUTING.md for guidance on what is expected of a contribution. -->
|
||||
|
||||
### Description
|
||||
|
||||
|
||||
|
||||
#### Screenshots
|
||||
|
||||
|
||||
|
||||
#### Related Issues / PRs
|
||||
|
||||
|
||||
|
||||
### Review
|
||||
|
||||
|
||||
|
||||
#### Potential Impact
|
||||
|
||||
|
||||
|
||||
#### Manual Testing Performed
|
||||
|
||||
- [ ]
|
||||
- [ ]
|
||||
|
||||
#### Accessibility
|
||||
|
||||
|
||||
|
||||
### Deployment
|
||||
|
||||
|
||||
|
||||
#### Deployment Checklist
|
||||
|
||||
- [ ] Update documentation not included in the PR (if any)
|
||||
- [ ]
|
||||
|
||||
#### Metrics and Monitoring
|
||||
|
||||
|
||||
|
||||
#### Who Needs to Know?
|
23
services/real-time/.github/dependabot.yml
vendored
Normal file
23
services/real-time/.github/dependabot.yml
vendored
Normal file
|
@ -0,0 +1,23 @@
|
|||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
||||
pull-request-branch-name:
|
||||
# Separate sections of the branch name with a hyphen
|
||||
# Docker images use the branch name and do not support slashes in tags
|
||||
# https://github.com/overleaf/google-ops/issues/822
|
||||
# https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#pull-request-branch-nameseparator
|
||||
separator: "-"
|
||||
|
||||
# Block informal upgrades -- security upgrades use a separate queue.
|
||||
# https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#open-pull-requests-limit
|
||||
open-pull-requests-limit: 0
|
||||
|
||||
# currently assign team-magma to all dependabot PRs - this may change in
|
||||
# future if we reorganise teams
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "type:maintenance"
|
5
services/real-time/.gitignore
vendored
Normal file
5
services/real-time/.gitignore
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
node_modules
|
||||
forever
|
||||
|
||||
# managed by dev-environment$ bin/update_build_scripts
|
||||
.npmrc
|
3
services/real-time/.mocharc.json
Normal file
3
services/real-time/.mocharc.json
Normal file
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"require": "test/setup.js"
|
||||
}
|
1
services/real-time/.nvmrc
Normal file
1
services/real-time/.nvmrc
Normal file
|
@ -0,0 +1 @@
|
|||
12.22.3
|
11
services/real-time/.prettierrc
Normal file
11
services/real-time/.prettierrc
Normal file
|
@ -0,0 +1,11 @@
|
|||
# This file was auto-generated, do not edit it directly.
|
||||
# Instead run bin/update_build_scripts from
|
||||
# https://github.com/sharelatex/sharelatex-dev-environment
|
||||
{
|
||||
"arrowParens": "avoid",
|
||||
"semi": false,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "es5",
|
||||
"tabWidth": 2,
|
||||
"useTabs": false
|
||||
}
|
23
services/real-time/Dockerfile
Normal file
23
services/real-time/Dockerfile
Normal file
|
@ -0,0 +1,23 @@
|
|||
# This file was auto-generated, do not edit it directly.
|
||||
# Instead run bin/update_build_scripts from
|
||||
# https://github.com/sharelatex/sharelatex-dev-environment
|
||||
|
||||
FROM node:12.22.3 as base
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
FROM base as app
|
||||
|
||||
#wildcard as some files may not be in all repos
|
||||
COPY package*.json npm-shrink*.json /app/
|
||||
|
||||
RUN npm ci --quiet
|
||||
|
||||
COPY . /app
|
||||
|
||||
FROM base
|
||||
|
||||
COPY --from=app /app /app
|
||||
USER node
|
||||
|
||||
CMD ["node", "--expose-gc", "app.js"]
|
661
services/real-time/LICENSE
Normal file
661
services/real-time/LICENSE
Normal file
|
@ -0,0 +1,661 @@
|
|||
GNU AFFERO GENERAL PUBLIC LICENSE
|
||||
Version 3, 19 November 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU Affero General Public License is a free, copyleft license for
|
||||
software and other kinds of works, specifically designed to ensure
|
||||
cooperation with the community in the case of network server software.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
our General Public Licenses are intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
Developers that use our General Public Licenses protect your rights
|
||||
with two steps: (1) assert copyright on the software, and (2) offer
|
||||
you this License which gives you legal permission to copy, distribute
|
||||
and/or modify the software.
|
||||
|
||||
A secondary benefit of defending all users' freedom is that
|
||||
improvements made in alternate versions of the program, if they
|
||||
receive widespread use, become available for other developers to
|
||||
incorporate. Many developers of free software are heartened and
|
||||
encouraged by the resulting cooperation. However, in the case of
|
||||
software used on network servers, this result may fail to come about.
|
||||
The GNU General Public License permits making a modified version and
|
||||
letting the public access it on a server without ever releasing its
|
||||
source code to the public.
|
||||
|
||||
The GNU Affero General Public License is designed specifically to
|
||||
ensure that, in such cases, the modified source code becomes available
|
||||
to the community. It requires the operator of a network server to
|
||||
provide the source code of the modified version running there to the
|
||||
users of that server. Therefore, public use of a modified version, on
|
||||
a publicly accessible server, gives the public access to the source
|
||||
code of the modified version.
|
||||
|
||||
An older license, called the Affero General Public License and
|
||||
published by Affero, was designed to accomplish similar goals. This is
|
||||
a different license, not a version of the Affero GPL, but Affero has
|
||||
released a new version of the Affero GPL which permits relicensing under
|
||||
this license.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU Affero General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Remote Network Interaction; Use with the GNU General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, if you modify the
|
||||
Program, your modified version must prominently offer all users
|
||||
interacting with it remotely through a computer network (if your version
|
||||
supports such interaction) an opportunity to receive the Corresponding
|
||||
Source of your version by providing access to the Corresponding Source
|
||||
from a network server at no charge, through some standard or customary
|
||||
means of facilitating copying of software. This Corresponding Source
|
||||
shall include the Corresponding Source for any work covered by version 3
|
||||
of the GNU General Public License that is incorporated pursuant to the
|
||||
following paragraph.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the work with which it is combined will remain governed by version
|
||||
3 of the GNU General Public License.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU Affero General Public License from time to time. Such new versions
|
||||
will be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU Affero General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU Affero General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU Affero General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU Affero General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU Affero General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Affero General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If your software can interact with users remotely through a computer
|
||||
network, you should also make sure that it provides a way for users to
|
||||
get its source. For example, if your program is a web application, its
|
||||
interface could display a "Source" link that leads users to an archive
|
||||
of the code. There are many ways you could offer source, and different
|
||||
solutions will be better for different programs; see section 13 for the
|
||||
specific requirements.
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU AGPL, see
|
||||
<http://www.gnu.org/licenses/>.
|
90
services/real-time/Makefile
Normal file
90
services/real-time/Makefile
Normal file
|
@ -0,0 +1,90 @@
|
|||
# This file was auto-generated, do not edit it directly.
|
||||
# Instead run bin/update_build_scripts from
|
||||
# https://github.com/sharelatex/sharelatex-dev-environment
|
||||
|
||||
BUILD_NUMBER ?= local
|
||||
BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||
PROJECT_NAME = real-time
|
||||
BUILD_DIR_NAME = $(shell pwd | xargs basename | tr -cd '[a-zA-Z0-9_.\-]')
|
||||
|
||||
DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml
|
||||
DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \
|
||||
BRANCH_NAME=$(BRANCH_NAME) \
|
||||
PROJECT_NAME=$(PROJECT_NAME) \
|
||||
MOCHA_GREP=${MOCHA_GREP} \
|
||||
docker-compose ${DOCKER_COMPOSE_FLAGS}
|
||||
|
||||
DOCKER_COMPOSE_TEST_ACCEPTANCE = \
|
||||
COMPOSE_PROJECT_NAME=test_acceptance_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE)
|
||||
|
||||
DOCKER_COMPOSE_TEST_UNIT = \
|
||||
COMPOSE_PROJECT_NAME=test_unit_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE)
|
||||
|
||||
clean:
|
||||
-docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
|
||||
-docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
|
||||
-$(DOCKER_COMPOSE_TEST_UNIT) down --rmi local
|
||||
-$(DOCKER_COMPOSE_TEST_ACCEPTANCE) down --rmi local
|
||||
|
||||
format:
|
||||
$(DOCKER_COMPOSE) run --rm test_unit npm run --silent format
|
||||
|
||||
format_fix:
|
||||
$(DOCKER_COMPOSE) run --rm test_unit npm run --silent format:fix
|
||||
|
||||
lint:
|
||||
$(DOCKER_COMPOSE) run --rm test_unit npm run --silent lint
|
||||
|
||||
test: format lint test_unit test_acceptance
|
||||
|
||||
test_unit:
|
||||
ifneq (,$(wildcard test/unit))
|
||||
$(DOCKER_COMPOSE_TEST_UNIT) run --rm test_unit
|
||||
$(MAKE) test_unit_clean
|
||||
endif
|
||||
|
||||
test_clean: test_unit_clean
|
||||
test_unit_clean:
|
||||
ifneq (,$(wildcard test/unit))
|
||||
$(DOCKER_COMPOSE_TEST_UNIT) down -v -t 0
|
||||
endif
|
||||
|
||||
test_acceptance: test_acceptance_clean test_acceptance_pre_run test_acceptance_run
|
||||
$(MAKE) test_acceptance_clean
|
||||
|
||||
test_acceptance_debug: test_acceptance_clean test_acceptance_pre_run test_acceptance_run_debug
|
||||
$(MAKE) test_acceptance_clean
|
||||
|
||||
test_acceptance_run:
|
||||
ifneq (,$(wildcard test/acceptance))
|
||||
$(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance
|
||||
endif
|
||||
|
||||
test_acceptance_run_debug:
|
||||
ifneq (,$(wildcard test/acceptance))
|
||||
$(DOCKER_COMPOSE_TEST_ACCEPTANCE) run -p 127.0.0.9:19999:19999 --rm test_acceptance npm run test:acceptance -- --inspect=0.0.0.0:19999 --inspect-brk
|
||||
endif
|
||||
|
||||
test_clean: test_acceptance_clean
|
||||
test_acceptance_clean:
|
||||
$(DOCKER_COMPOSE_TEST_ACCEPTANCE) down -v -t 0
|
||||
|
||||
test_acceptance_pre_run:
|
||||
ifneq (,$(wildcard test/acceptance/js/scripts/pre-run))
|
||||
$(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance test/acceptance/js/scripts/pre-run
|
||||
endif
|
||||
|
||||
build:
|
||||
docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \
|
||||
--tag gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \
|
||||
.
|
||||
|
||||
tar:
|
||||
$(DOCKER_COMPOSE) up tar
|
||||
|
||||
publish:
|
||||
|
||||
docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
|
||||
|
||||
|
||||
.PHONY: clean test test_unit test_acceptance test_clean build publish
|
288
services/real-time/app.js
Normal file
288
services/real-time/app.js
Normal file
|
@ -0,0 +1,288 @@
|
|||
const Metrics = require('@overleaf/metrics')
|
||||
const Settings = require('@overleaf/settings')
|
||||
Metrics.initialize(Settings.appName || 'real-time')
|
||||
const async = require('async')
|
||||
|
||||
const logger = require('logger-sharelatex')
|
||||
logger.initialize('real-time')
|
||||
Metrics.event_loop.monitor(logger)
|
||||
|
||||
const express = require('express')
|
||||
const session = require('express-session')
|
||||
const redis = require('@overleaf/redis-wrapper')
|
||||
if (Settings.sentry && Settings.sentry.dsn) {
|
||||
logger.initializeErrorReporting(Settings.sentry.dsn)
|
||||
}
|
||||
|
||||
const sessionRedisClient = redis.createClient(Settings.redis.websessions)
|
||||
|
||||
const RedisStore = require('connect-redis')(session)
|
||||
const SessionSockets = require('./app/js/SessionSockets')
|
||||
const CookieParser = require('cookie-parser')
|
||||
|
||||
const DrainManager = require('./app/js/DrainManager')
|
||||
const HealthCheckManager = require('./app/js/HealthCheckManager')
|
||||
const DeploymentManager = require('./app/js/DeploymentManager')
|
||||
|
||||
// NOTE: debug is invoked for every blob that is put on the wire
|
||||
const socketIoLogger = {
|
||||
error(...message) {
|
||||
logger.info({ fromSocketIo: true, originalLevel: 'error' }, ...message)
|
||||
},
|
||||
warn(...message) {
|
||||
logger.info({ fromSocketIo: true, originalLevel: 'warn' }, ...message)
|
||||
},
|
||||
info() {},
|
||||
debug() {},
|
||||
log() {},
|
||||
}
|
||||
|
||||
// monitor status file to take dark deployments out of the load-balancer
|
||||
DeploymentManager.initialise()
|
||||
|
||||
// Set up socket.io server
|
||||
const app = express()
|
||||
|
||||
const server = require('http').createServer(app)
|
||||
const io = require('socket.io').listen(server, {
|
||||
logger: socketIoLogger,
|
||||
})
|
||||
|
||||
// Bind to sessions
|
||||
const sessionStore = new RedisStore({ client: sessionRedisClient })
|
||||
const cookieParser = CookieParser(Settings.security.sessionSecret)
|
||||
|
||||
const sessionSockets = new SessionSockets(
|
||||
io,
|
||||
sessionStore,
|
||||
cookieParser,
|
||||
Settings.cookieName
|
||||
)
|
||||
|
||||
Metrics.injectMetricsRoute(app)
|
||||
app.use(Metrics.http.monitor(logger))
|
||||
|
||||
io.configure(function () {
|
||||
io.enable('browser client minification')
|
||||
io.enable('browser client etag')
|
||||
|
||||
// Fix for Safari 5 error of "Error during WebSocket handshake: location mismatch"
|
||||
// See http://answers.dotcloud.com/question/578/problem-with-websocket-over-ssl-in-safari-with
|
||||
io.set('match origin protocol', true)
|
||||
|
||||
// gzip uses a Node 0.8.x method of calling the gzip program which
|
||||
// doesn't work with 0.6.x
|
||||
// io.enable('browser client gzip')
|
||||
io.set('transports', [
|
||||
'websocket',
|
||||
'flashsocket',
|
||||
'htmlfile',
|
||||
'xhr-polling',
|
||||
'jsonp-polling',
|
||||
])
|
||||
})
|
||||
|
||||
// a 200 response on '/' is required for load balancer health checks
|
||||
// these operate separately from kubernetes readiness checks
|
||||
app.get('/', function (req, res) {
|
||||
if (Settings.shutDownInProgress || DeploymentManager.deploymentIsClosed()) {
|
||||
res.sendStatus(503) // Service unavailable
|
||||
} else {
|
||||
res.send('real-time is open')
|
||||
}
|
||||
})
|
||||
|
||||
app.get('/status', function (req, res) {
|
||||
if (Settings.shutDownInProgress) {
|
||||
res.sendStatus(503) // Service unavailable
|
||||
} else {
|
||||
res.send('real-time is alive')
|
||||
}
|
||||
})
|
||||
|
||||
app.get('/debug/events', function (req, res) {
|
||||
Settings.debugEvents = parseInt(req.query.count, 10) || 20
|
||||
logger.log({ count: Settings.debugEvents }, 'starting debug mode')
|
||||
res.send(`debug mode will log next ${Settings.debugEvents} events`)
|
||||
})
|
||||
|
||||
const rclient = require('@overleaf/redis-wrapper').createClient(
|
||||
Settings.redis.realtime
|
||||
)
|
||||
|
||||
function healthCheck(req, res) {
|
||||
rclient.healthCheck(function (error) {
|
||||
if (error) {
|
||||
logger.err({ err: error }, 'failed redis health check')
|
||||
res.sendStatus(500)
|
||||
} else if (HealthCheckManager.isFailing()) {
|
||||
const status = HealthCheckManager.status()
|
||||
logger.err({ pubSubErrors: status }, 'failed pubsub health check')
|
||||
res.sendStatus(500)
|
||||
} else {
|
||||
res.sendStatus(200)
|
||||
}
|
||||
})
|
||||
}
|
||||
app.get(
|
||||
'/health_check',
|
||||
(req, res, next) => {
|
||||
if (Settings.shutDownComplete) {
|
||||
return res.sendStatus(503)
|
||||
}
|
||||
next()
|
||||
},
|
||||
healthCheck
|
||||
)
|
||||
|
||||
app.get('/health_check/redis', healthCheck)
|
||||
|
||||
const Router = require('./app/js/Router')
|
||||
Router.configure(app, io, sessionSockets)
|
||||
|
||||
const WebsocketLoadBalancer = require('./app/js/WebsocketLoadBalancer')
|
||||
WebsocketLoadBalancer.listenForEditorEvents(io)
|
||||
|
||||
const DocumentUpdaterController = require('./app/js/DocumentUpdaterController')
|
||||
DocumentUpdaterController.listenForUpdatesFromDocumentUpdater(io)
|
||||
|
||||
const { port } = Settings.internal.realTime
|
||||
const { host } = Settings.internal.realTime
|
||||
|
||||
server.listen(port, host, function (error) {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
logger.info(`realtime starting up, listening on ${host}:${port}`)
|
||||
})
|
||||
|
||||
// Stop huge stack traces in logs from all the socket.io parsing steps.
|
||||
Error.stackTraceLimit = 10
|
||||
|
||||
function shutdownCleanly(signal) {
|
||||
const connectedClients = io.sockets.clients().length
|
||||
if (connectedClients === 0) {
|
||||
logger.warn('no clients connected, exiting')
|
||||
process.exit()
|
||||
} else {
|
||||
logger.warn(
|
||||
{ connectedClients },
|
||||
'clients still connected, not shutting down yet'
|
||||
)
|
||||
setTimeout(() => shutdownCleanly(signal), 30 * 1000)
|
||||
}
|
||||
}
|
||||
|
||||
function drainAndShutdown(signal) {
|
||||
if (Settings.shutDownInProgress) {
|
||||
logger.warn({ signal }, 'shutdown already in progress, ignoring signal')
|
||||
} else {
|
||||
Settings.shutDownInProgress = true
|
||||
const { statusCheckInterval } = Settings
|
||||
if (statusCheckInterval) {
|
||||
logger.warn(
|
||||
{ signal },
|
||||
`received interrupt, delay drain by ${statusCheckInterval}ms`
|
||||
)
|
||||
}
|
||||
setTimeout(function () {
|
||||
logger.warn(
|
||||
{ signal },
|
||||
`received interrupt, starting drain over ${shutdownDrainTimeWindow} mins`
|
||||
)
|
||||
DrainManager.startDrainTimeWindow(io, shutdownDrainTimeWindow, () => {
|
||||
setTimeout(() => {
|
||||
const staleClients = io.sockets.clients()
|
||||
if (staleClients.length !== 0) {
|
||||
logger.warn(
|
||||
{ staleClients: staleClients.map(client => client.id) },
|
||||
'forcefully disconnecting stale clients'
|
||||
)
|
||||
staleClients.forEach(client => {
|
||||
client.disconnect()
|
||||
})
|
||||
}
|
||||
// Mark the node as unhealthy.
|
||||
Settings.shutDownComplete = true
|
||||
}, Settings.gracefulReconnectTimeoutMs)
|
||||
})
|
||||
shutdownCleanly(signal)
|
||||
}, statusCheckInterval)
|
||||
}
|
||||
}
|
||||
|
||||
Settings.shutDownInProgress = false
|
||||
const shutdownDrainTimeWindow = parseInt(Settings.shutdownDrainTimeWindow, 10)
|
||||
if (Settings.shutdownDrainTimeWindow) {
|
||||
logger.log({ shutdownDrainTimeWindow }, 'shutdownDrainTimeWindow enabled')
|
||||
for (const signal of [
|
||||
'SIGINT',
|
||||
'SIGHUP',
|
||||
'SIGQUIT',
|
||||
'SIGUSR1',
|
||||
'SIGUSR2',
|
||||
'SIGTERM',
|
||||
'SIGABRT',
|
||||
]) {
|
||||
process.on(signal, drainAndShutdown)
|
||||
} // signal is passed as argument to event handler
|
||||
|
||||
// global exception handler
|
||||
if (Settings.errors && Settings.errors.catchUncaughtErrors) {
|
||||
process.removeAllListeners('uncaughtException')
|
||||
process.on('uncaughtException', function (error) {
|
||||
if (
|
||||
[
|
||||
'ETIMEDOUT',
|
||||
'EHOSTUNREACH',
|
||||
'EPIPE',
|
||||
'ECONNRESET',
|
||||
'ERR_STREAM_WRITE_AFTER_END',
|
||||
].includes(error.code)
|
||||
) {
|
||||
Metrics.inc('disconnected_write', 1, { status: error.code })
|
||||
return logger.warn(
|
||||
{ err: error },
|
||||
'attempted to write to disconnected client'
|
||||
)
|
||||
}
|
||||
logger.error({ err: error }, 'uncaught exception')
|
||||
if (Settings.errors && Settings.errors.shutdownOnUncaughtError) {
|
||||
drainAndShutdown('SIGABRT')
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (Settings.continualPubsubTraffic) {
|
||||
logger.warn('continualPubsubTraffic enabled')
|
||||
|
||||
const pubsubClient = redis.createClient(Settings.redis.pubsub)
|
||||
const clusterClient = redis.createClient(Settings.redis.websessions)
|
||||
|
||||
const publishJob = function (channel, callback) {
|
||||
const checker = new HealthCheckManager(channel)
|
||||
logger.debug({ channel }, 'sending pub to keep connection alive')
|
||||
const json = JSON.stringify({
|
||||
health_check: true,
|
||||
key: checker.id,
|
||||
date: new Date().toString(),
|
||||
})
|
||||
Metrics.summary(`redis.publish.${channel}`, json.length)
|
||||
pubsubClient.publish(channel, json, function (err) {
|
||||
if (err) {
|
||||
logger.err({ err, channel }, 'error publishing pubsub traffic to redis')
|
||||
}
|
||||
const blob = JSON.stringify({ keep: 'alive' })
|
||||
Metrics.summary('redis.publish.cluster-continual-traffic', blob.length)
|
||||
clusterClient.publish('cluster-continual-traffic', blob, callback)
|
||||
})
|
||||
}
|
||||
|
||||
const runPubSubTraffic = () =>
|
||||
async.map(['applied-ops', 'editor-events'], publishJob, () =>
|
||||
setTimeout(runPubSubTraffic, 1000 * 20)
|
||||
)
|
||||
|
||||
runPubSubTraffic()
|
||||
}
|
67
services/real-time/app/js/AuthorizationManager.js
Normal file
67
services/real-time/app/js/AuthorizationManager.js
Normal file
|
@ -0,0 +1,67 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
*/
|
||||
const { NotAuthorizedError } = require('./Errors')
|
||||
|
||||
let AuthorizationManager
|
||||
module.exports = AuthorizationManager = {
|
||||
assertClientCanViewProject(client, callback) {
|
||||
AuthorizationManager._assertClientHasPrivilegeLevel(
|
||||
client,
|
||||
['readOnly', 'readAndWrite', 'owner'],
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
assertClientCanEditProject(client, callback) {
|
||||
AuthorizationManager._assertClientHasPrivilegeLevel(
|
||||
client,
|
||||
['readAndWrite', 'owner'],
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
_assertClientHasPrivilegeLevel(client, allowedLevels, callback) {
|
||||
if (allowedLevels.includes(client.ol_context.privilege_level)) {
|
||||
callback(null)
|
||||
} else {
|
||||
callback(new NotAuthorizedError())
|
||||
}
|
||||
},
|
||||
|
||||
assertClientCanViewProjectAndDoc(client, doc_id, callback) {
|
||||
AuthorizationManager.assertClientCanViewProject(client, function (error) {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
AuthorizationManager._assertClientCanAccessDoc(client, doc_id, callback)
|
||||
})
|
||||
},
|
||||
|
||||
assertClientCanEditProjectAndDoc(client, doc_id, callback) {
|
||||
AuthorizationManager.assertClientCanEditProject(client, function (error) {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
AuthorizationManager._assertClientCanAccessDoc(client, doc_id, callback)
|
||||
})
|
||||
},
|
||||
|
||||
_assertClientCanAccessDoc(client, doc_id, callback) {
|
||||
if (client.ol_context[`doc:${doc_id}`] === 'allowed') {
|
||||
callback(null)
|
||||
} else {
|
||||
callback(new NotAuthorizedError())
|
||||
}
|
||||
},
|
||||
|
||||
addAccessToDoc(client, doc_id, callback) {
|
||||
client.ol_context[`doc:${doc_id}`] = 'allowed'
|
||||
callback(null)
|
||||
},
|
||||
|
||||
removeAccessToDoc(client, doc_id, callback) {
|
||||
delete client.ol_context[`doc:${doc_id}`]
|
||||
callback(null)
|
||||
},
|
||||
}
|
101
services/real-time/app/js/ChannelManager.js
Normal file
101
services/real-time/app/js/ChannelManager.js
Normal file
|
@ -0,0 +1,101 @@
|
|||
const logger = require('logger-sharelatex')
|
||||
const metrics = require('@overleaf/metrics')
|
||||
const settings = require('@overleaf/settings')
|
||||
const OError = require('@overleaf/o-error')
|
||||
|
||||
const ClientMap = new Map() // for each redis client, store a Map of subscribed channels (channelname -> subscribe promise)
|
||||
|
||||
// Manage redis pubsub subscriptions for individual projects and docs, ensuring
|
||||
// that we never subscribe to a channel multiple times. The socket.io side is
|
||||
// handled by RoomManager.
|
||||
|
||||
module.exports = {
|
||||
getClientMapEntry(rclient) {
|
||||
// return the per-client channel map if it exists, otherwise create and
|
||||
// return an empty map for the client.
|
||||
return (
|
||||
ClientMap.get(rclient) || ClientMap.set(rclient, new Map()).get(rclient)
|
||||
)
|
||||
},
|
||||
|
||||
subscribe(rclient, baseChannel, id) {
|
||||
const clientChannelMap = this.getClientMapEntry(rclient)
|
||||
const channel = `${baseChannel}:${id}`
|
||||
const actualSubscribe = function () {
|
||||
// subscribe is happening in the foreground and it should reject
|
||||
return rclient
|
||||
.subscribe(channel)
|
||||
.finally(function () {
|
||||
if (clientChannelMap.get(channel) === subscribePromise) {
|
||||
clientChannelMap.delete(channel)
|
||||
}
|
||||
})
|
||||
.then(function () {
|
||||
logger.log({ channel }, 'subscribed to channel')
|
||||
metrics.inc(`subscribe.${baseChannel}`)
|
||||
})
|
||||
.catch(function (err) {
|
||||
logger.error({ channel, err }, 'failed to subscribe to channel')
|
||||
metrics.inc(`subscribe.failed.${baseChannel}`)
|
||||
// add context for the stack-trace at the call-site
|
||||
throw new OError('failed to subscribe to channel', {
|
||||
channel,
|
||||
}).withCause(err)
|
||||
})
|
||||
}
|
||||
|
||||
const pendingActions = clientChannelMap.get(channel) || Promise.resolve()
|
||||
const subscribePromise = pendingActions.then(
|
||||
actualSubscribe,
|
||||
actualSubscribe
|
||||
)
|
||||
clientChannelMap.set(channel, subscribePromise)
|
||||
logger.log({ channel }, 'planned to subscribe to channel')
|
||||
return subscribePromise
|
||||
},
|
||||
|
||||
unsubscribe(rclient, baseChannel, id) {
|
||||
const clientChannelMap = this.getClientMapEntry(rclient)
|
||||
const channel = `${baseChannel}:${id}`
|
||||
const actualUnsubscribe = function () {
|
||||
// unsubscribe is happening in the background, it should not reject
|
||||
return rclient
|
||||
.unsubscribe(channel)
|
||||
.finally(function () {
|
||||
if (clientChannelMap.get(channel) === unsubscribePromise) {
|
||||
clientChannelMap.delete(channel)
|
||||
}
|
||||
})
|
||||
.then(function () {
|
||||
logger.log({ channel }, 'unsubscribed from channel')
|
||||
metrics.inc(`unsubscribe.${baseChannel}`)
|
||||
})
|
||||
.catch(function (err) {
|
||||
logger.error({ channel, err }, 'unsubscribed from channel')
|
||||
metrics.inc(`unsubscribe.failed.${baseChannel}`)
|
||||
})
|
||||
}
|
||||
|
||||
const pendingActions = clientChannelMap.get(channel) || Promise.resolve()
|
||||
const unsubscribePromise = pendingActions.then(
|
||||
actualUnsubscribe,
|
||||
actualUnsubscribe
|
||||
)
|
||||
clientChannelMap.set(channel, unsubscribePromise)
|
||||
logger.log({ channel }, 'planned to unsubscribe from channel')
|
||||
return unsubscribePromise
|
||||
},
|
||||
|
||||
publish(rclient, baseChannel, id, data) {
|
||||
let channel
|
||||
metrics.summary(`redis.publish.${baseChannel}`, data.length)
|
||||
if (id === 'all' || !settings.publishOnIndividualChannels) {
|
||||
channel = baseChannel
|
||||
} else {
|
||||
channel = `${baseChannel}:${id}`
|
||||
}
|
||||
// we publish on a different client to the subscribe, so we can't
|
||||
// check for the channel existing here
|
||||
rclient.publish(channel, data)
|
||||
},
|
||||
}
|
176
services/real-time/app/js/ConnectedUsersManager.js
Normal file
176
services/real-time/app/js/ConnectedUsersManager.js
Normal file
|
@ -0,0 +1,176 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
*/
|
||||
const async = require('async')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const logger = require('logger-sharelatex')
|
||||
const redis = require('@overleaf/redis-wrapper')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const rclient = redis.createClient(Settings.redis.realtime)
|
||||
const Keys = Settings.redis.realtime.key_schema
|
||||
|
||||
const ONE_HOUR_IN_S = 60 * 60
|
||||
const ONE_DAY_IN_S = ONE_HOUR_IN_S * 24
|
||||
const FOUR_DAYS_IN_S = ONE_DAY_IN_S * 4
|
||||
|
||||
const USER_TIMEOUT_IN_S = ONE_HOUR_IN_S / 4
|
||||
const REFRESH_TIMEOUT_IN_S = 10 // only show clients which have responded to a refresh request in the last 10 seconds
|
||||
|
||||
module.exports = {
|
||||
// Use the same method for when a user connects, and when a user sends a cursor
|
||||
// update. This way we don't care if the connected_user key has expired when
|
||||
// we receive a cursor update.
|
||||
updateUserPosition(project_id, client_id, user, cursorData, callback) {
|
||||
logger.log({ project_id, client_id }, 'marking user as joined or connected')
|
||||
|
||||
const multi = rclient.multi()
|
||||
|
||||
multi.sadd(Keys.clientsInProject({ project_id }), client_id)
|
||||
multi.expire(Keys.clientsInProject({ project_id }), FOUR_DAYS_IN_S)
|
||||
|
||||
multi.hset(
|
||||
Keys.connectedUser({ project_id, client_id }),
|
||||
'last_updated_at',
|
||||
Date.now()
|
||||
)
|
||||
multi.hset(
|
||||
Keys.connectedUser({ project_id, client_id }),
|
||||
'user_id',
|
||||
user._id
|
||||
)
|
||||
multi.hset(
|
||||
Keys.connectedUser({ project_id, client_id }),
|
||||
'first_name',
|
||||
user.first_name || ''
|
||||
)
|
||||
multi.hset(
|
||||
Keys.connectedUser({ project_id, client_id }),
|
||||
'last_name',
|
||||
user.last_name || ''
|
||||
)
|
||||
multi.hset(
|
||||
Keys.connectedUser({ project_id, client_id }),
|
||||
'email',
|
||||
user.email || ''
|
||||
)
|
||||
|
||||
if (cursorData) {
|
||||
multi.hset(
|
||||
Keys.connectedUser({ project_id, client_id }),
|
||||
'cursorData',
|
||||
JSON.stringify(cursorData)
|
||||
)
|
||||
}
|
||||
multi.expire(
|
||||
Keys.connectedUser({ project_id, client_id }),
|
||||
USER_TIMEOUT_IN_S
|
||||
)
|
||||
|
||||
multi.exec(function (err) {
|
||||
if (err) {
|
||||
err = new OError('problem marking user as connected').withCause(err)
|
||||
}
|
||||
callback(err)
|
||||
})
|
||||
},
|
||||
|
||||
refreshClient(project_id, client_id) {
|
||||
logger.log({ project_id, client_id }, 'refreshing connected client')
|
||||
const multi = rclient.multi()
|
||||
multi.hset(
|
||||
Keys.connectedUser({ project_id, client_id }),
|
||||
'last_updated_at',
|
||||
Date.now()
|
||||
)
|
||||
multi.expire(
|
||||
Keys.connectedUser({ project_id, client_id }),
|
||||
USER_TIMEOUT_IN_S
|
||||
)
|
||||
multi.exec(function (err) {
|
||||
if (err) {
|
||||
logger.err(
|
||||
{ err, project_id, client_id },
|
||||
'problem refreshing connected client'
|
||||
)
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
markUserAsDisconnected(project_id, client_id, callback) {
|
||||
logger.log({ project_id, client_id }, 'marking user as disconnected')
|
||||
const multi = rclient.multi()
|
||||
multi.srem(Keys.clientsInProject({ project_id }), client_id)
|
||||
multi.expire(Keys.clientsInProject({ project_id }), FOUR_DAYS_IN_S)
|
||||
multi.del(Keys.connectedUser({ project_id, client_id }))
|
||||
multi.exec(function (err) {
|
||||
if (err) {
|
||||
err = new OError('problem marking user as disconnected').withCause(err)
|
||||
}
|
||||
callback(err)
|
||||
})
|
||||
},
|
||||
|
||||
_getConnectedUser(project_id, client_id, callback) {
|
||||
rclient.hgetall(
|
||||
Keys.connectedUser({ project_id, client_id }),
|
||||
function (err, result) {
|
||||
if (err) {
|
||||
err = new OError('problem fetching connected user details', {
|
||||
other_client_id: client_id,
|
||||
}).withCause(err)
|
||||
return callback(err)
|
||||
}
|
||||
if (!(result && result.user_id)) {
|
||||
result = {
|
||||
connected: false,
|
||||
client_id,
|
||||
}
|
||||
} else {
|
||||
result.connected = true
|
||||
result.client_id = client_id
|
||||
result.client_age =
|
||||
(Date.now() - parseInt(result.last_updated_at, 10)) / 1000
|
||||
if (result.cursorData) {
|
||||
try {
|
||||
result.cursorData = JSON.parse(result.cursorData)
|
||||
} catch (e) {
|
||||
OError.tag(e, 'error parsing cursorData JSON', {
|
||||
other_client_id: client_id,
|
||||
cursorData: result.cursorData,
|
||||
})
|
||||
return callback(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
callback(err, result)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
getConnectedUsers(project_id, callback) {
|
||||
const self = this
|
||||
rclient.smembers(
|
||||
Keys.clientsInProject({ project_id }),
|
||||
function (err, results) {
|
||||
if (err) {
|
||||
err = new OError('problem getting clients in project').withCause(err)
|
||||
return callback(err)
|
||||
}
|
||||
const jobs = results.map(
|
||||
client_id => cb => self._getConnectedUser(project_id, client_id, cb)
|
||||
)
|
||||
async.series(jobs, function (err, users) {
|
||||
if (err) {
|
||||
OError.tag(err, 'problem getting connected users')
|
||||
return callback(err)
|
||||
}
|
||||
users = users.filter(
|
||||
user =>
|
||||
user && user.connected && user.client_age < REFRESH_TIMEOUT_IN_S
|
||||
)
|
||||
callback(null, users)
|
||||
})
|
||||
}
|
||||
)
|
||||
},
|
||||
}
|
59
services/real-time/app/js/DeploymentManager.js
Normal file
59
services/real-time/app/js/DeploymentManager.js
Normal file
|
@ -0,0 +1,59 @@
|
|||
const logger = require('logger-sharelatex')
|
||||
const settings = require('@overleaf/settings')
|
||||
const fs = require('fs')
|
||||
|
||||
// Monitor a status file (e.g. /etc/real_time_status) periodically and close the
|
||||
// service if the file contents don't contain the matching deployment colour.
|
||||
|
||||
const FILE_CHECK_INTERVAL = 5000
|
||||
const statusFile = settings.deploymentFile
|
||||
const deploymentColour = settings.deploymentColour
|
||||
|
||||
let serviceCloseTime
|
||||
|
||||
function updateDeploymentStatus(fileContent) {
|
||||
const closed = fileContent && !fileContent.includes(deploymentColour)
|
||||
if (closed && !settings.serviceIsClosed) {
|
||||
settings.serviceIsClosed = true
|
||||
serviceCloseTime = Date.now() + 60 * 1000 // delay closing by 1 minute
|
||||
logger.warn({ fileContent }, 'closing service')
|
||||
} else if (!closed && settings.serviceIsClosed) {
|
||||
settings.serviceIsClosed = false
|
||||
logger.warn({ fileContent }, 'opening service')
|
||||
}
|
||||
}
|
||||
|
||||
function pollStatusFile() {
|
||||
fs.readFile(statusFile, { encoding: 'utf8' }, (err, fileContent) => {
|
||||
if (err) {
|
||||
logger.error(
|
||||
{ file: statusFile, fsErr: err },
|
||||
'error reading service status file'
|
||||
)
|
||||
return
|
||||
}
|
||||
updateDeploymentStatus(fileContent)
|
||||
})
|
||||
}
|
||||
|
||||
function checkStatusFileSync() {
|
||||
// crash on start up if file does not exist
|
||||
const content = fs.readFileSync(statusFile, { encoding: 'utf8' })
|
||||
updateDeploymentStatus(content)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
initialise() {
|
||||
if (statusFile && deploymentColour) {
|
||||
logger.log(
|
||||
{ statusFile, deploymentColour, interval: FILE_CHECK_INTERVAL },
|
||||
'monitoring deployment status file'
|
||||
)
|
||||
checkStatusFileSync() // perform an initial synchronous check at start up
|
||||
setInterval(pollStatusFile, FILE_CHECK_INTERVAL) // continue checking periodically
|
||||
}
|
||||
},
|
||||
deploymentIsClosed() {
|
||||
return settings.serviceIsClosed && Date.now() > serviceCloseTime
|
||||
},
|
||||
}
|
176
services/real-time/app/js/DocumentUpdaterController.js
Normal file
176
services/real-time/app/js/DocumentUpdaterController.js
Normal file
|
@ -0,0 +1,176 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
*/
|
||||
const logger = require('logger-sharelatex')
|
||||
const settings = require('@overleaf/settings')
|
||||
const RedisClientManager = require('./RedisClientManager')
|
||||
const SafeJsonParse = require('./SafeJsonParse')
|
||||
const EventLogger = require('./EventLogger')
|
||||
const HealthCheckManager = require('./HealthCheckManager')
|
||||
const RoomManager = require('./RoomManager')
|
||||
const ChannelManager = require('./ChannelManager')
|
||||
const metrics = require('@overleaf/metrics')
|
||||
|
||||
let DocumentUpdaterController
|
||||
module.exports = DocumentUpdaterController = {
|
||||
// DocumentUpdaterController is responsible for updates that come via Redis
|
||||
// Pub/Sub from the document updater.
|
||||
rclientList: RedisClientManager.createClientList(settings.redis.pubsub),
|
||||
|
||||
listenForUpdatesFromDocumentUpdater(io) {
|
||||
logger.log(
|
||||
{ rclients: this.rclientList.length },
|
||||
'listening for applied-ops events'
|
||||
)
|
||||
for (const rclient of this.rclientList) {
|
||||
rclient.subscribe('applied-ops')
|
||||
rclient.on('message', function (channel, message) {
|
||||
metrics.inc('rclient', 0.001) // global event rate metric
|
||||
if (settings.debugEvents > 0) {
|
||||
EventLogger.debugEvent(channel, message)
|
||||
}
|
||||
DocumentUpdaterController._processMessageFromDocumentUpdater(
|
||||
io,
|
||||
channel,
|
||||
message
|
||||
)
|
||||
})
|
||||
}
|
||||
// create metrics for each redis instance only when we have multiple redis clients
|
||||
if (this.rclientList.length > 1) {
|
||||
this.rclientList.forEach((rclient, i) => {
|
||||
// per client event rate metric
|
||||
const metricName = `rclient-${i}`
|
||||
rclient.on('message', () => metrics.inc(metricName, 0.001))
|
||||
})
|
||||
}
|
||||
this.handleRoomUpdates(this.rclientList)
|
||||
},
|
||||
|
||||
handleRoomUpdates(rclientSubList) {
|
||||
const roomEvents = RoomManager.eventSource()
|
||||
roomEvents.on('doc-active', function (doc_id) {
|
||||
const subscribePromises = rclientSubList.map(rclient =>
|
||||
ChannelManager.subscribe(rclient, 'applied-ops', doc_id)
|
||||
)
|
||||
RoomManager.emitOnCompletion(
|
||||
subscribePromises,
|
||||
`doc-subscribed-${doc_id}`
|
||||
)
|
||||
})
|
||||
roomEvents.on('doc-empty', doc_id =>
|
||||
rclientSubList.map(rclient =>
|
||||
ChannelManager.unsubscribe(rclient, 'applied-ops', doc_id)
|
||||
)
|
||||
)
|
||||
},
|
||||
|
||||
_processMessageFromDocumentUpdater(io, channel, message) {
|
||||
SafeJsonParse.parse(message, function (error, message) {
|
||||
if (error) {
|
||||
logger.error({ err: error, channel }, 'error parsing JSON')
|
||||
return
|
||||
}
|
||||
if (message.op) {
|
||||
if (message._id && settings.checkEventOrder) {
|
||||
const status = EventLogger.checkEventOrder(
|
||||
'applied-ops',
|
||||
message._id,
|
||||
message
|
||||
)
|
||||
if (status === 'duplicate') {
|
||||
return // skip duplicate events
|
||||
}
|
||||
}
|
||||
DocumentUpdaterController._applyUpdateFromDocumentUpdater(
|
||||
io,
|
||||
message.doc_id,
|
||||
message.op
|
||||
)
|
||||
} else if (message.error) {
|
||||
DocumentUpdaterController._processErrorFromDocumentUpdater(
|
||||
io,
|
||||
message.doc_id,
|
||||
message.error,
|
||||
message
|
||||
)
|
||||
} else if (message.health_check) {
|
||||
logger.debug(
|
||||
{ message },
|
||||
'got health check message in applied ops channel'
|
||||
)
|
||||
HealthCheckManager.check(channel, message.key)
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
_applyUpdateFromDocumentUpdater(io, doc_id, update) {
|
||||
let client
|
||||
const clientList = io.sockets.clients(doc_id)
|
||||
// avoid unnecessary work if no clients are connected
|
||||
if (clientList.length === 0) {
|
||||
return
|
||||
}
|
||||
// send updates to clients
|
||||
logger.log(
|
||||
{
|
||||
doc_id,
|
||||
version: update.v,
|
||||
source: update.meta && update.meta.source,
|
||||
socketIoClients: clientList.map(client => client.id),
|
||||
},
|
||||
'distributing updates to clients'
|
||||
)
|
||||
const seen = {}
|
||||
// send messages only to unique clients (due to duplicate entries in io.sockets.clients)
|
||||
for (client of clientList) {
|
||||
if (!seen[client.id]) {
|
||||
seen[client.id] = true
|
||||
if (client.publicId === update.meta.source) {
|
||||
logger.log(
|
||||
{
|
||||
doc_id,
|
||||
version: update.v,
|
||||
source: update.meta.source,
|
||||
},
|
||||
'distributing update to sender'
|
||||
)
|
||||
client.emit('otUpdateApplied', { v: update.v, doc: update.doc })
|
||||
} else if (!update.dup) {
|
||||
// Duplicate ops should just be sent back to sending client for acknowledgement
|
||||
logger.log(
|
||||
{
|
||||
doc_id,
|
||||
version: update.v,
|
||||
source: update.meta.source,
|
||||
client_id: client.id,
|
||||
},
|
||||
'distributing update to collaborator'
|
||||
)
|
||||
client.emit('otUpdateApplied', update)
|
||||
}
|
||||
}
|
||||
}
|
||||
if (Object.keys(seen).length < clientList.length) {
|
||||
metrics.inc('socket-io.duplicate-clients', 0.1)
|
||||
logger.log(
|
||||
{
|
||||
doc_id,
|
||||
socketIoClients: clientList.map(client => client.id),
|
||||
},
|
||||
'discarded duplicate clients'
|
||||
)
|
||||
}
|
||||
},
|
||||
|
||||
_processErrorFromDocumentUpdater(io, doc_id, error, message) {
|
||||
for (const client of io.sockets.clients(doc_id)) {
|
||||
logger.warn(
|
||||
{ err: error, doc_id, client_id: client.id },
|
||||
'error from document updater, disconnecting client'
|
||||
)
|
||||
client.emit('otUpdateError', error, message)
|
||||
client.disconnect()
|
||||
}
|
||||
},
|
||||
}
|
150
services/real-time/app/js/DocumentUpdaterManager.js
Normal file
150
services/real-time/app/js/DocumentUpdaterManager.js
Normal file
|
@ -0,0 +1,150 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
*/
|
||||
const request = require('request')
|
||||
const _ = require('underscore')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const logger = require('logger-sharelatex')
|
||||
const settings = require('@overleaf/settings')
|
||||
const metrics = require('@overleaf/metrics')
|
||||
const {
|
||||
ClientRequestedMissingOpsError,
|
||||
DocumentUpdaterRequestFailedError,
|
||||
NullBytesInOpError,
|
||||
UpdateTooLargeError,
|
||||
} = require('./Errors')
|
||||
|
||||
const rclient = require('@overleaf/redis-wrapper').createClient(
|
||||
settings.redis.documentupdater
|
||||
)
|
||||
const Keys = settings.redis.documentupdater.key_schema
|
||||
|
||||
const DocumentUpdaterManager = {
|
||||
getDocument(project_id, doc_id, fromVersion, callback) {
|
||||
const timer = new metrics.Timer('get-document')
|
||||
const url = `${settings.apis.documentupdater.url}/project/${project_id}/doc/${doc_id}?fromVersion=${fromVersion}`
|
||||
logger.log(
|
||||
{ project_id, doc_id, fromVersion },
|
||||
'getting doc from document updater'
|
||||
)
|
||||
request.get(url, function (err, res, body) {
|
||||
timer.done()
|
||||
if (err) {
|
||||
OError.tag(err, 'error getting doc from doc updater')
|
||||
return callback(err)
|
||||
}
|
||||
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
logger.log(
|
||||
{ project_id, doc_id },
|
||||
'got doc from document document updater'
|
||||
)
|
||||
try {
|
||||
body = JSON.parse(body)
|
||||
} catch (error) {
|
||||
OError.tag(error, 'error parsing doc updater response')
|
||||
return callback(error)
|
||||
}
|
||||
body = body || {}
|
||||
callback(null, body.lines, body.version, body.ranges, body.ops)
|
||||
} else if ([404, 422].includes(res.statusCode)) {
|
||||
callback(new ClientRequestedMissingOpsError(res.statusCode))
|
||||
} else {
|
||||
callback(
|
||||
new DocumentUpdaterRequestFailedError('getDocument', res.statusCode)
|
||||
)
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
checkDocument(project_id, doc_id, callback) {
|
||||
// in this call fromVersion = -1 means get document without docOps
|
||||
DocumentUpdaterManager.getDocument(project_id, doc_id, -1, callback)
|
||||
},
|
||||
|
||||
flushProjectToMongoAndDelete(project_id, callback) {
|
||||
// this method is called when the last connected user leaves the project
|
||||
logger.log({ project_id }, 'deleting project from document updater')
|
||||
const timer = new metrics.Timer('delete.mongo.project')
|
||||
// flush the project in the background when all users have left
|
||||
const url =
|
||||
`${settings.apis.documentupdater.url}/project/${project_id}?background=true` +
|
||||
(settings.shutDownInProgress ? '&shutdown=true' : '')
|
||||
request.del(url, function (err, res) {
|
||||
timer.done()
|
||||
if (err) {
|
||||
OError.tag(err, 'error deleting project from document updater')
|
||||
callback(err)
|
||||
} else if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
logger.log({ project_id }, 'deleted project from document updater')
|
||||
callback(null)
|
||||
} else {
|
||||
callback(
|
||||
new DocumentUpdaterRequestFailedError(
|
||||
'flushProjectToMongoAndDelete',
|
||||
res.statusCode
|
||||
)
|
||||
)
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
_getPendingUpdateListKey() {
|
||||
const shard = _.random(0, settings.pendingUpdateListShardCount - 1)
|
||||
if (shard === 0) {
|
||||
return 'pending-updates-list'
|
||||
} else {
|
||||
return `pending-updates-list-${shard}`
|
||||
}
|
||||
},
|
||||
|
||||
queueChange(project_id, doc_id, change, callback) {
|
||||
const allowedKeys = [
|
||||
'doc',
|
||||
'op',
|
||||
'v',
|
||||
'dupIfSource',
|
||||
'meta',
|
||||
'lastV',
|
||||
'hash',
|
||||
]
|
||||
change = _.pick(change, allowedKeys)
|
||||
const jsonChange = JSON.stringify(change)
|
||||
if (jsonChange.indexOf('\u0000') !== -1) {
|
||||
// memory corruption check
|
||||
return callback(new NullBytesInOpError(jsonChange))
|
||||
}
|
||||
|
||||
const updateSize = jsonChange.length
|
||||
if (updateSize > settings.maxUpdateSize) {
|
||||
return callback(new UpdateTooLargeError(updateSize))
|
||||
}
|
||||
|
||||
// record metric for each update added to queue
|
||||
metrics.summary('redis.pendingUpdates', updateSize, { status: 'push' })
|
||||
|
||||
const doc_key = `${project_id}:${doc_id}`
|
||||
// Push onto pendingUpdates for doc_id first, because once the doc updater
|
||||
// gets an entry on pending-updates-list, it starts processing.
|
||||
rclient.rpush(
|
||||
Keys.pendingUpdates({ doc_id }),
|
||||
jsonChange,
|
||||
function (error) {
|
||||
if (error) {
|
||||
error = new OError('error pushing update into redis').withCause(error)
|
||||
return callback(error)
|
||||
}
|
||||
const queueKey = DocumentUpdaterManager._getPendingUpdateListKey()
|
||||
rclient.rpush(queueKey, doc_key, function (error) {
|
||||
if (error) {
|
||||
error = new OError('error pushing doc_id into redis')
|
||||
.withInfo({ queueKey })
|
||||
.withCause(error)
|
||||
}
|
||||
callback(error)
|
||||
})
|
||||
}
|
||||
)
|
||||
},
|
||||
}
|
||||
|
||||
module.exports = DocumentUpdaterManager
|
59
services/real-time/app/js/DrainManager.js
Normal file
59
services/real-time/app/js/DrainManager.js
Normal file
|
@ -0,0 +1,59 @@
|
|||
const logger = require('logger-sharelatex')
|
||||
|
||||
module.exports = {
|
||||
startDrainTimeWindow(io, minsToDrain, callback) {
|
||||
const drainPerMin = io.sockets.clients().length / minsToDrain
|
||||
// enforce minimum drain rate
|
||||
this.startDrain(io, Math.max(drainPerMin / 60, 4), callback)
|
||||
},
|
||||
|
||||
startDrain(io, rate, callback) {
|
||||
// Clear out any old interval
|
||||
clearInterval(this.interval)
|
||||
logger.log({ rate }, 'starting drain')
|
||||
if (rate === 0) {
|
||||
return
|
||||
}
|
||||
let pollingInterval
|
||||
if (rate < 1) {
|
||||
// allow lower drain rates
|
||||
// e.g. rate=0.1 will drain one client every 10 seconds
|
||||
pollingInterval = 1000 / rate
|
||||
rate = 1
|
||||
} else {
|
||||
pollingInterval = 1000
|
||||
}
|
||||
this.interval = setInterval(() => {
|
||||
const requestedAllClientsToReconnect = this.reconnectNClients(io, rate)
|
||||
if (requestedAllClientsToReconnect && callback) {
|
||||
callback()
|
||||
callback = undefined
|
||||
}
|
||||
}, pollingInterval)
|
||||
},
|
||||
|
||||
RECONNECTED_CLIENTS: {},
|
||||
reconnectNClients(io, N) {
|
||||
let drainedCount = 0
|
||||
for (const client of io.sockets.clients()) {
|
||||
if (!this.RECONNECTED_CLIENTS[client.id]) {
|
||||
this.RECONNECTED_CLIENTS[client.id] = true
|
||||
logger.log(
|
||||
{ client_id: client.id },
|
||||
'Asking client to reconnect gracefully'
|
||||
)
|
||||
client.emit('reconnectGracefully')
|
||||
drainedCount++
|
||||
}
|
||||
const haveDrainedNClients = drainedCount === N
|
||||
if (haveDrainedNClients) {
|
||||
break
|
||||
}
|
||||
}
|
||||
if (drainedCount < N) {
|
||||
logger.log('All clients have been told to reconnectGracefully')
|
||||
return true
|
||||
}
|
||||
return false
|
||||
},
|
||||
}
|
103
services/real-time/app/js/Errors.js
Normal file
103
services/real-time/app/js/Errors.js
Normal file
|
@ -0,0 +1,103 @@
|
|||
const OError = require('@overleaf/o-error')
|
||||
|
||||
class ClientRequestedMissingOpsError extends OError {
|
||||
constructor(statusCode) {
|
||||
super('doc updater could not load requested ops', {
|
||||
statusCode,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
class CodedError extends OError {
|
||||
constructor(message, code) {
|
||||
super(message, { code })
|
||||
}
|
||||
}
|
||||
|
||||
class CorruptedJoinProjectResponseError extends OError {
|
||||
constructor() {
|
||||
super('no data returned from joinProject request')
|
||||
}
|
||||
}
|
||||
|
||||
class DataTooLargeToParseError extends OError {
|
||||
constructor(data) {
|
||||
super('data too large to parse', {
|
||||
head: data.slice(0, 1024),
|
||||
length: data.length,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
class DocumentUpdaterRequestFailedError extends OError {
|
||||
constructor(action, statusCode) {
|
||||
super('doc updater returned a non-success status code', {
|
||||
action,
|
||||
statusCode,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
class JoinLeaveEpochMismatchError extends OError {
|
||||
constructor() {
|
||||
super('joinLeaveEpoch mismatch')
|
||||
}
|
||||
}
|
||||
|
||||
class MissingSessionError extends OError {
|
||||
constructor() {
|
||||
super('could not look up session by key')
|
||||
}
|
||||
}
|
||||
|
||||
class NotAuthorizedError extends OError {
|
||||
constructor() {
|
||||
super('not authorized')
|
||||
}
|
||||
}
|
||||
|
||||
class NotJoinedError extends OError {
|
||||
constructor() {
|
||||
super('no project_id found on client')
|
||||
}
|
||||
}
|
||||
|
||||
class NullBytesInOpError extends OError {
|
||||
constructor(jsonChange) {
|
||||
super('null bytes found in op', { jsonChange })
|
||||
}
|
||||
}
|
||||
|
||||
class UnexpectedArgumentsError extends OError {
|
||||
constructor() {
|
||||
super('unexpected arguments')
|
||||
}
|
||||
}
|
||||
|
||||
class UpdateTooLargeError extends OError {
|
||||
constructor(updateSize) {
|
||||
super('update is too large', { updateSize })
|
||||
}
|
||||
}
|
||||
|
||||
class WebApiRequestFailedError extends OError {
|
||||
constructor(statusCode) {
|
||||
super('non-success status code from web', { statusCode })
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
CodedError,
|
||||
CorruptedJoinProjectResponseError,
|
||||
ClientRequestedMissingOpsError,
|
||||
DataTooLargeToParseError,
|
||||
DocumentUpdaterRequestFailedError,
|
||||
JoinLeaveEpochMismatchError,
|
||||
MissingSessionError,
|
||||
NotAuthorizedError,
|
||||
NotJoinedError,
|
||||
NullBytesInOpError,
|
||||
UnexpectedArgumentsError,
|
||||
UpdateTooLargeError,
|
||||
WebApiRequestFailedError,
|
||||
}
|
84
services/real-time/app/js/EventLogger.js
Normal file
84
services/real-time/app/js/EventLogger.js
Normal file
|
@ -0,0 +1,84 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
*/
|
||||
let EventLogger
|
||||
const logger = require('logger-sharelatex')
|
||||
const metrics = require('@overleaf/metrics')
|
||||
const settings = require('@overleaf/settings')
|
||||
|
||||
// keep track of message counters to detect duplicate and out of order events
|
||||
// messsage ids have the format "UNIQUEHOSTKEY-COUNTER"
|
||||
|
||||
const EVENT_LOG_COUNTER = {}
|
||||
const EVENT_LOG_TIMESTAMP = {}
|
||||
let EVENT_LAST_CLEAN_TIMESTAMP = 0
|
||||
|
||||
// counter for debug logs
|
||||
let COUNTER = 0
|
||||
|
||||
module.exports = EventLogger = {
|
||||
MAX_STALE_TIME_IN_MS: 3600 * 1000,
|
||||
|
||||
debugEvent(channel, message) {
|
||||
if (settings.debugEvents > 0) {
|
||||
logger.log({ channel, message, counter: COUNTER++ }, 'logging event')
|
||||
settings.debugEvents--
|
||||
}
|
||||
},
|
||||
|
||||
checkEventOrder(channel, message_id) {
|
||||
if (typeof message_id !== 'string') {
|
||||
return
|
||||
}
|
||||
let result
|
||||
if (!(result = message_id.match(/^(.*)-(\d+)$/))) {
|
||||
return
|
||||
}
|
||||
const key = result[1]
|
||||
const count = parseInt(result[2], 0)
|
||||
if (!(count >= 0)) {
|
||||
// ignore checks if counter is not present
|
||||
return
|
||||
}
|
||||
// store the last count in a hash for each host
|
||||
const previous = EventLogger._storeEventCount(key, count)
|
||||
if (!previous || count === previous + 1) {
|
||||
metrics.inc(`event.${channel}.valid`)
|
||||
return // order is ok
|
||||
}
|
||||
if (count === previous) {
|
||||
metrics.inc(`event.${channel}.duplicate`)
|
||||
logger.warn({ channel, message_id }, 'duplicate event')
|
||||
return 'duplicate'
|
||||
} else {
|
||||
metrics.inc(`event.${channel}.out-of-order`)
|
||||
logger.warn(
|
||||
{ channel, message_id, key, previous, count },
|
||||
'out of order event'
|
||||
)
|
||||
return 'out-of-order'
|
||||
}
|
||||
},
|
||||
|
||||
_storeEventCount(key, count) {
|
||||
const previous = EVENT_LOG_COUNTER[key]
|
||||
const now = Date.now()
|
||||
EVENT_LOG_COUNTER[key] = count
|
||||
EVENT_LOG_TIMESTAMP[key] = now
|
||||
// periodically remove old counts
|
||||
if (now - EVENT_LAST_CLEAN_TIMESTAMP > EventLogger.MAX_STALE_TIME_IN_MS) {
|
||||
EventLogger._cleanEventStream(now)
|
||||
EVENT_LAST_CLEAN_TIMESTAMP = now
|
||||
}
|
||||
return previous
|
||||
},
|
||||
|
||||
_cleanEventStream(now) {
|
||||
Object.entries(EVENT_LOG_TIMESTAMP).forEach(([key, timestamp]) => {
|
||||
if (now - timestamp > EventLogger.MAX_STALE_TIME_IN_MS) {
|
||||
delete EVENT_LOG_COUNTER[key]
|
||||
delete EVENT_LOG_TIMESTAMP[key]
|
||||
}
|
||||
})
|
||||
},
|
||||
}
|
77
services/real-time/app/js/HealthCheckManager.js
Normal file
77
services/real-time/app/js/HealthCheckManager.js
Normal file
|
@ -0,0 +1,77 @@
|
|||
const metrics = require('@overleaf/metrics')
|
||||
const logger = require('logger-sharelatex')
|
||||
|
||||
const os = require('os')
|
||||
const HOST = os.hostname()
|
||||
const PID = process.pid
|
||||
let COUNT = 0
|
||||
|
||||
const CHANNEL_MANAGER = {} // hash of event checkers by channel name
|
||||
const CHANNEL_ERROR = {} // error status by channel name
|
||||
|
||||
module.exports = class HealthCheckManager {
|
||||
// create an instance of this class which checks that an event with a unique
|
||||
// id is received only once within a timeout
|
||||
constructor(channel, timeout) {
|
||||
// unique event string
|
||||
this.channel = channel
|
||||
this.id = `host=${HOST}:pid=${PID}:count=${COUNT++}`
|
||||
// count of number of times the event is received
|
||||
this.count = 0
|
||||
// after a timeout check the status of the count
|
||||
this.handler = setTimeout(() => {
|
||||
this.setStatus()
|
||||
}, timeout || 1000)
|
||||
// use a timer to record the latency of the channel
|
||||
this.timer = new metrics.Timer(`event.${this.channel}.latency`)
|
||||
// keep a record of these objects to dispatch on
|
||||
CHANNEL_MANAGER[this.channel] = this
|
||||
}
|
||||
|
||||
processEvent(id) {
|
||||
// if this is our event record it
|
||||
if (id === this.id) {
|
||||
this.count++
|
||||
if (this.timer) {
|
||||
this.timer.done()
|
||||
}
|
||||
this.timer = undefined // only time the latency of the first event
|
||||
}
|
||||
}
|
||||
|
||||
setStatus() {
|
||||
// if we saw the event anything other than a single time that is an error
|
||||
const isFailing = this.count !== 1
|
||||
if (isFailing) {
|
||||
logger.err(
|
||||
{ channel: this.channel, count: this.count, id: this.id },
|
||||
'redis channel health check error'
|
||||
)
|
||||
}
|
||||
CHANNEL_ERROR[this.channel] = isFailing
|
||||
}
|
||||
|
||||
// class methods
|
||||
static check(channel, id) {
|
||||
// dispatch event to manager for channel
|
||||
if (CHANNEL_MANAGER[channel]) {
|
||||
CHANNEL_MANAGER[channel].processEvent(id)
|
||||
}
|
||||
}
|
||||
|
||||
static status() {
|
||||
// return status of all channels for logging
|
||||
return CHANNEL_ERROR
|
||||
}
|
||||
|
||||
static isFailing() {
|
||||
// check if any channel status is bad
|
||||
for (const channel in CHANNEL_ERROR) {
|
||||
const error = CHANNEL_ERROR[channel]
|
||||
if (error === true) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
}
|
52
services/real-time/app/js/HttpApiController.js
Normal file
52
services/real-time/app/js/HttpApiController.js
Normal file
|
@ -0,0 +1,52 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
*/
|
||||
const WebsocketLoadBalancer = require('./WebsocketLoadBalancer')
|
||||
const DrainManager = require('./DrainManager')
|
||||
const logger = require('logger-sharelatex')
|
||||
|
||||
module.exports = {
|
||||
sendMessage(req, res) {
|
||||
logger.log({ message: req.params.message }, 'sending message')
|
||||
if (Array.isArray(req.body)) {
|
||||
for (const payload of req.body) {
|
||||
WebsocketLoadBalancer.emitToRoom(
|
||||
req.params.project_id,
|
||||
req.params.message,
|
||||
payload
|
||||
)
|
||||
}
|
||||
} else {
|
||||
WebsocketLoadBalancer.emitToRoom(
|
||||
req.params.project_id,
|
||||
req.params.message,
|
||||
req.body
|
||||
)
|
||||
}
|
||||
res.sendStatus(204)
|
||||
},
|
||||
|
||||
startDrain(req, res) {
|
||||
const io = req.app.get('io')
|
||||
let rate = req.query.rate || '4'
|
||||
rate = parseFloat(rate) || 0
|
||||
logger.log({ rate }, 'setting client drain rate')
|
||||
DrainManager.startDrain(io, rate)
|
||||
res.sendStatus(204)
|
||||
},
|
||||
|
||||
disconnectClient(req, res, next) {
|
||||
const io = req.app.get('io')
|
||||
const { client_id } = req.params
|
||||
const client = io.sockets.sockets[client_id]
|
||||
|
||||
if (!client) {
|
||||
logger.info({ client_id }, 'api: client already disconnected')
|
||||
res.sendStatus(404)
|
||||
return
|
||||
}
|
||||
logger.warn({ client_id }, 'api: requesting client disconnect')
|
||||
client.on('disconnect', () => res.sendStatus(204))
|
||||
client.disconnect()
|
||||
},
|
||||
}
|
57
services/real-time/app/js/HttpController.js
Normal file
57
services/real-time/app/js/HttpController.js
Normal file
|
@ -0,0 +1,57 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
*/
|
||||
|
||||
let HttpController
|
||||
module.exports = HttpController = {
|
||||
// The code in this controller is hard to unit test because of a lot of
|
||||
// dependencies on internal socket.io methods. It is not critical to the running
|
||||
// of ShareLaTeX, and is only used for getting stats about connected clients,
|
||||
// and for checking internal state in acceptance tests. The acceptances tests
|
||||
// should provide appropriate coverage.
|
||||
_getConnectedClientView(ioClient) {
|
||||
const client_id = ioClient.id
|
||||
const {
|
||||
project_id,
|
||||
user_id,
|
||||
first_name,
|
||||
last_name,
|
||||
email,
|
||||
connected_time,
|
||||
} = ioClient.ol_context
|
||||
const client = {
|
||||
client_id,
|
||||
project_id,
|
||||
user_id,
|
||||
first_name,
|
||||
last_name,
|
||||
email,
|
||||
connected_time,
|
||||
}
|
||||
client.rooms = Object.keys(ioClient.manager.roomClients[client_id] || {})
|
||||
// drop the namespace
|
||||
.filter(room => room !== '')
|
||||
// room names are composed as '<NAMESPACE>/<ROOM>' and the default
|
||||
// namespace is empty (see comments in RoomManager), just drop the '/'
|
||||
.map(fullRoomPath => fullRoomPath.slice(1))
|
||||
return client
|
||||
},
|
||||
|
||||
getConnectedClients(req, res) {
|
||||
const io = req.app.get('io')
|
||||
const ioClients = io.sockets.clients()
|
||||
|
||||
res.json(ioClients.map(HttpController._getConnectedClientView))
|
||||
},
|
||||
|
||||
getConnectedClient(req, res) {
|
||||
const { client_id } = req.params
|
||||
const io = req.app.get('io')
|
||||
const ioClient = io.sockets.sockets[client_id]
|
||||
if (!ioClient) {
|
||||
res.sendStatus(404)
|
||||
return
|
||||
}
|
||||
res.json(HttpController._getConnectedClientView(ioClient))
|
||||
},
|
||||
}
|
19
services/real-time/app/js/RedisClientManager.js
Normal file
19
services/real-time/app/js/RedisClientManager.js
Normal file
|
@ -0,0 +1,19 @@
|
|||
const redis = require('@overleaf/redis-wrapper')
|
||||
const logger = require('logger-sharelatex')
|
||||
|
||||
module.exports = {
|
||||
createClientList(...configs) {
|
||||
// create a dynamic list of redis clients, excluding any configurations which are not defined
|
||||
return configs.filter(Boolean).map(x => {
|
||||
const redisType = x.cluster
|
||||
? 'cluster'
|
||||
: x.sentinels
|
||||
? 'sentinel'
|
||||
: x.host
|
||||
? 'single'
|
||||
: 'unknown'
|
||||
logger.log({ redis: redisType }, 'creating redis client')
|
||||
return redis.createClient(x)
|
||||
})
|
||||
},
|
||||
}
|
164
services/real-time/app/js/RoomManager.js
Normal file
164
services/real-time/app/js/RoomManager.js
Normal file
|
@ -0,0 +1,164 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
*/
|
||||
const logger = require('logger-sharelatex')
|
||||
const metrics = require('@overleaf/metrics')
|
||||
const { EventEmitter } = require('events')
|
||||
const OError = require('@overleaf/o-error')
|
||||
|
||||
const IdMap = new Map() // keep track of whether ids are from projects or docs
|
||||
const RoomEvents = new EventEmitter() // emits {project,doc}-active and {project,doc}-empty events
|
||||
|
||||
// Manage socket.io rooms for individual projects and docs
|
||||
//
|
||||
// The first time someone joins a project or doc we emit a 'project-active' or
|
||||
// 'doc-active' event.
|
||||
//
|
||||
// When the last person leaves a project or doc, we emit 'project-empty' or
|
||||
// 'doc-empty' event.
|
||||
//
|
||||
// The pubsub side is handled by ChannelManager
|
||||
|
||||
module.exports = {
|
||||
joinProject(client, project_id, callback) {
|
||||
this.joinEntity(client, 'project', project_id, callback)
|
||||
},
|
||||
|
||||
joinDoc(client, doc_id, callback) {
|
||||
this.joinEntity(client, 'doc', doc_id, callback)
|
||||
},
|
||||
|
||||
leaveDoc(client, doc_id) {
|
||||
this.leaveEntity(client, 'doc', doc_id)
|
||||
},
|
||||
|
||||
leaveProjectAndDocs(client) {
|
||||
// what rooms is this client in? we need to leave them all. socket.io
|
||||
// will cause us to leave the rooms, so we only need to manage our
|
||||
// channel subscriptions... but it will be safer if we leave them
|
||||
// explicitly, and then socket.io will just regard this as a client that
|
||||
// has not joined any rooms and do a final disconnection.
|
||||
const roomsToLeave = this._roomsClientIsIn(client)
|
||||
logger.log({ client: client.id, roomsToLeave }, 'client leaving project')
|
||||
for (const id of roomsToLeave) {
|
||||
const entity = IdMap.get(id)
|
||||
this.leaveEntity(client, entity, id)
|
||||
}
|
||||
},
|
||||
|
||||
emitOnCompletion(promiseList, eventName) {
|
||||
Promise.all(promiseList)
|
||||
.then(() => RoomEvents.emit(eventName))
|
||||
.catch(err => RoomEvents.emit(eventName, err))
|
||||
},
|
||||
|
||||
eventSource() {
|
||||
return RoomEvents
|
||||
},
|
||||
|
||||
joinEntity(client, entity, id, callback) {
|
||||
const beforeCount = this._clientsInRoom(client, id)
|
||||
// client joins room immediately but joinDoc request does not complete
|
||||
// until room is subscribed
|
||||
client.join(id)
|
||||
// is this a new room? if so, subscribe
|
||||
if (beforeCount === 0) {
|
||||
logger.log({ entity, id }, 'room is now active')
|
||||
RoomEvents.once(`${entity}-subscribed-${id}`, function (err) {
|
||||
// only allow the client to join when all the relevant channels have subscribed
|
||||
if (err) {
|
||||
OError.tag(err, 'error joining', { entity, id })
|
||||
return callback(err)
|
||||
}
|
||||
logger.log(
|
||||
{ client: client.id, entity, id, beforeCount },
|
||||
'client joined new room and subscribed to channel'
|
||||
)
|
||||
callback(err)
|
||||
})
|
||||
RoomEvents.emit(`${entity}-active`, id)
|
||||
IdMap.set(id, entity)
|
||||
// keep track of the number of listeners
|
||||
metrics.gauge('room-listeners', RoomEvents.eventNames().length)
|
||||
} else {
|
||||
logger.log(
|
||||
{ client: client.id, entity, id, beforeCount },
|
||||
'client joined existing room'
|
||||
)
|
||||
callback()
|
||||
}
|
||||
},
|
||||
|
||||
leaveEntity(client, entity, id) {
|
||||
// Ignore any requests to leave when the client is not actually in the
|
||||
// room. This can happen if the client sends spurious leaveDoc requests
|
||||
// for old docs after a reconnection.
|
||||
// This can now happen all the time, as we skip the join for clients that
|
||||
// disconnect before joinProject/joinDoc completed.
|
||||
if (!this._clientAlreadyInRoom(client, id)) {
|
||||
logger.log(
|
||||
{ client: client.id, entity, id },
|
||||
'ignoring request from client to leave room it is not in'
|
||||
)
|
||||
return
|
||||
}
|
||||
client.leave(id)
|
||||
const afterCount = this._clientsInRoom(client, id)
|
||||
logger.log(
|
||||
{ client: client.id, entity, id, afterCount },
|
||||
'client left room'
|
||||
)
|
||||
// is the room now empty? if so, unsubscribe
|
||||
if (!entity) {
|
||||
logger.error({ entity: id }, 'unknown entity when leaving with id')
|
||||
return
|
||||
}
|
||||
if (afterCount === 0) {
|
||||
logger.log({ entity, id }, 'room is now empty')
|
||||
RoomEvents.emit(`${entity}-empty`, id)
|
||||
IdMap.delete(id)
|
||||
metrics.gauge('room-listeners', RoomEvents.eventNames().length)
|
||||
}
|
||||
},
|
||||
|
||||
// internal functions below, these access socket.io rooms data directly and
|
||||
// will need updating for socket.io v2
|
||||
|
||||
// The below code makes some assumptions that are always true for v0
|
||||
// - we are using the base namespace '', so room names are '/<ENTITY>'
|
||||
// https://github.com/socketio/socket.io/blob/e4d61b1be65ac3313a85da111a46777aa8d4aae3/lib/manager.js#L62
|
||||
// https://github.com/socketio/socket.io/blob/e4d61b1be65ac3313a85da111a46777aa8d4aae3/lib/manager.js#L1018
|
||||
// - client.namespace is a Namespace
|
||||
// https://github.com/socketio/socket.io/blob/e4d61b1be65ac3313a85da111a46777aa8d4aae3/lib/namespace.js#L204
|
||||
// https://github.com/socketio/socket.io/blob/e4d61b1be65ac3313a85da111a46777aa8d4aae3/lib/socket.js#L40
|
||||
// - client.manager is a Manager
|
||||
// https://github.com/socketio/socket.io/blob/e4d61b1be65ac3313a85da111a46777aa8d4aae3/lib/namespace.js#L204
|
||||
// https://github.com/socketio/socket.io/blob/e4d61b1be65ac3313a85da111a46777aa8d4aae3/lib/socket.js#L41
|
||||
// - a Manager has
|
||||
// - `.rooms={'NAMESPACE/ENTITY': []}` and
|
||||
// - `.roomClients={'CLIENT_ID': {'...': true}}`
|
||||
// https://github.com/socketio/socket.io/blob/e4d61b1be65ac3313a85da111a46777aa8d4aae3/lib/manager.js#L287-L288
|
||||
// https://github.com/socketio/socket.io/blob/e4d61b1be65ac3313a85da111a46777aa8d4aae3/lib/manager.js#L444-L455
|
||||
|
||||
_clientsInRoom(client, room) {
|
||||
const clients = client.manager.rooms['/' + room] || []
|
||||
return clients.length
|
||||
},
|
||||
|
||||
_roomsClientIsIn(client) {
|
||||
const rooms = client.manager.roomClients[client.id] || {}
|
||||
return (
|
||||
Object.keys(rooms)
|
||||
// drop the namespace
|
||||
.filter(room => room !== '')
|
||||
// room names are composed as '<NAMESPACE>/<ROOM>' and the default
|
||||
// namespace is empty (see comments above), just drop the '/'
|
||||
.map(fullRoomPath => fullRoomPath.slice(1))
|
||||
)
|
||||
},
|
||||
|
||||
_clientAlreadyInRoom(client, room) {
|
||||
const rooms = client.manager.roomClients[client.id] || {}
|
||||
return !!rooms['/' + room]
|
||||
},
|
||||
}
|
374
services/real-time/app/js/Router.js
Normal file
374
services/real-time/app/js/Router.js
Normal file
|
@ -0,0 +1,374 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
*/
|
||||
const metrics = require('@overleaf/metrics')
|
||||
const logger = require('logger-sharelatex')
|
||||
const settings = require('@overleaf/settings')
|
||||
const WebsocketController = require('./WebsocketController')
|
||||
const HttpController = require('./HttpController')
|
||||
const HttpApiController = require('./HttpApiController')
|
||||
const bodyParser = require('body-parser')
|
||||
const base64id = require('base64id')
|
||||
const { UnexpectedArgumentsError } = require('./Errors')
|
||||
|
||||
const basicAuth = require('basic-auth-connect')
|
||||
const httpAuth = basicAuth(function (user, pass) {
|
||||
const isValid =
|
||||
user === settings.internal.realTime.user &&
|
||||
pass === settings.internal.realTime.pass
|
||||
if (!isValid) {
|
||||
logger.err({ user, pass }, 'invalid login details')
|
||||
}
|
||||
return isValid
|
||||
})
|
||||
|
||||
const HOSTNAME = require('os').hostname()
|
||||
|
||||
let Router
|
||||
module.exports = Router = {
|
||||
_handleError(callback, error, client, method, attrs) {
|
||||
attrs = attrs || {}
|
||||
for (const key of ['project_id', 'user_id']) {
|
||||
attrs[key] = attrs[key] || client.ol_context[key]
|
||||
}
|
||||
attrs.client_id = client.id
|
||||
attrs.err = error
|
||||
attrs.method = method
|
||||
if (error.name === 'CodedError') {
|
||||
logger.warn(attrs, error.message)
|
||||
const serializedError = { message: error.message, code: error.info.code }
|
||||
callback(serializedError)
|
||||
} else if (error.message === 'unexpected arguments') {
|
||||
// the payload might be very large, put it on level info
|
||||
logger.log(attrs, 'unexpected arguments')
|
||||
metrics.inc('unexpected-arguments', 1, { status: method })
|
||||
const serializedError = { message: error.message }
|
||||
callback(serializedError)
|
||||
} else if (error.message === 'no project_id found on client') {
|
||||
logger.debug(attrs, error.message)
|
||||
const serializedError = { message: error.message }
|
||||
callback(serializedError)
|
||||
} else if (
|
||||
[
|
||||
'not authorized',
|
||||
'joinLeaveEpoch mismatch',
|
||||
'doc updater could not load requested ops',
|
||||
].includes(error.message)
|
||||
) {
|
||||
logger.warn(attrs, error.message)
|
||||
const serializedError = { message: error.message }
|
||||
callback(serializedError)
|
||||
} else {
|
||||
logger.error(attrs, `server side error in ${method}`)
|
||||
// Don't return raw error to prevent leaking server side info
|
||||
const serializedError = {
|
||||
message: 'Something went wrong in real-time service',
|
||||
}
|
||||
callback(serializedError)
|
||||
}
|
||||
},
|
||||
|
||||
_handleInvalidArguments(client, method, args) {
|
||||
const error = new UnexpectedArgumentsError()
|
||||
let callback = args[args.length - 1]
|
||||
if (typeof callback !== 'function') {
|
||||
callback = function () {}
|
||||
}
|
||||
const attrs = { arguments: args }
|
||||
Router._handleError(callback, error, client, method, attrs)
|
||||
},
|
||||
|
||||
configure(app, io, session) {
|
||||
app.set('io', io)
|
||||
app.get('/clients', HttpController.getConnectedClients)
|
||||
app.get('/clients/:client_id', HttpController.getConnectedClient)
|
||||
|
||||
app.post(
|
||||
'/project/:project_id/message/:message',
|
||||
httpAuth,
|
||||
bodyParser.json({ limit: '5mb' }),
|
||||
HttpApiController.sendMessage
|
||||
)
|
||||
|
||||
app.post('/drain', httpAuth, HttpApiController.startDrain)
|
||||
app.post(
|
||||
'/client/:client_id/disconnect',
|
||||
httpAuth,
|
||||
HttpApiController.disconnectClient
|
||||
)
|
||||
|
||||
session.on('connection', function (error, client, session) {
|
||||
// init client context, we may access it in Router._handleError before
|
||||
// setting any values
|
||||
client.ol_context = {}
|
||||
// bail out from joinDoc when a parallel joinDoc or leaveDoc is running
|
||||
client.joinLeaveEpoch = 0
|
||||
|
||||
if (client) {
|
||||
client.on('error', function (err) {
|
||||
logger.err({ clientErr: err }, 'socket.io client error')
|
||||
if (client.connected) {
|
||||
client.emit('reconnectGracefully')
|
||||
client.disconnect()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if (settings.shutDownInProgress) {
|
||||
client.emit('connectionRejected', { message: 'retry' })
|
||||
client.disconnect()
|
||||
return
|
||||
}
|
||||
|
||||
if (
|
||||
client &&
|
||||
error &&
|
||||
error.message.match(/could not look up session by key/)
|
||||
) {
|
||||
logger.warn(
|
||||
{ err: error, client: !!client, session: !!session },
|
||||
'invalid session'
|
||||
)
|
||||
// tell the client to reauthenticate if it has an invalid session key
|
||||
client.emit('connectionRejected', { message: 'invalid session' })
|
||||
client.disconnect()
|
||||
return
|
||||
}
|
||||
|
||||
if (error) {
|
||||
logger.err(
|
||||
{ err: error, client: !!client, session: !!session },
|
||||
'error when client connected'
|
||||
)
|
||||
if (client) {
|
||||
client.emit('connectionRejected', { message: 'error' })
|
||||
}
|
||||
if (client) {
|
||||
client.disconnect()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// send positive confirmation that the client has a valid connection
|
||||
client.publicId = 'P.' + base64id.generateId()
|
||||
client.emit('connectionAccepted', null, client.publicId)
|
||||
|
||||
metrics.inc('socket-io.connection', 1, { status: client.transport })
|
||||
metrics.gauge('socket-io.clients', io.sockets.clients().length)
|
||||
|
||||
logger.log({ session, client_id: client.id }, 'client connected')
|
||||
|
||||
let user
|
||||
if (session && session.passport && session.passport.user) {
|
||||
;({ user } = session.passport)
|
||||
} else if (session && session.user) {
|
||||
;({ user } = session)
|
||||
} else {
|
||||
user = { _id: 'anonymous-user' }
|
||||
}
|
||||
|
||||
if (settings.exposeHostname) {
|
||||
client.on('debug.getHostname', function (callback) {
|
||||
if (typeof callback !== 'function') {
|
||||
return Router._handleInvalidArguments(
|
||||
client,
|
||||
'debug.getHostname',
|
||||
arguments
|
||||
)
|
||||
}
|
||||
callback(HOSTNAME)
|
||||
})
|
||||
}
|
||||
|
||||
client.on('joinProject', function (data, callback) {
|
||||
data = data || {}
|
||||
if (typeof callback !== 'function') {
|
||||
return Router._handleInvalidArguments(
|
||||
client,
|
||||
'joinProject',
|
||||
arguments
|
||||
)
|
||||
}
|
||||
|
||||
if (data.anonymousAccessToken) {
|
||||
user.anonymousAccessToken = data.anonymousAccessToken
|
||||
}
|
||||
WebsocketController.joinProject(
|
||||
client,
|
||||
user,
|
||||
data.project_id,
|
||||
function (err, ...args) {
|
||||
if (err) {
|
||||
Router._handleError(callback, err, client, 'joinProject', {
|
||||
project_id: data.project_id,
|
||||
user_id: user._id,
|
||||
})
|
||||
} else {
|
||||
callback(null, ...args)
|
||||
}
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
client.on('disconnect', function () {
|
||||
metrics.inc('socket-io.disconnect', 1, { status: client.transport })
|
||||
metrics.gauge('socket-io.clients', io.sockets.clients().length)
|
||||
|
||||
WebsocketController.leaveProject(io, client, function (err) {
|
||||
if (err) {
|
||||
Router._handleError(function () {}, err, client, 'leaveProject')
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
// Variadic. The possible arguments:
|
||||
// doc_id, callback
|
||||
// doc_id, fromVersion, callback
|
||||
// doc_id, options, callback
|
||||
// doc_id, fromVersion, options, callback
|
||||
client.on('joinDoc', function (doc_id, fromVersion, options, callback) {
|
||||
if (typeof fromVersion === 'function' && !options) {
|
||||
callback = fromVersion
|
||||
fromVersion = -1
|
||||
options = {}
|
||||
} else if (
|
||||
typeof fromVersion === 'number' &&
|
||||
typeof options === 'function'
|
||||
) {
|
||||
callback = options
|
||||
options = {}
|
||||
} else if (
|
||||
typeof fromVersion === 'object' &&
|
||||
typeof options === 'function'
|
||||
) {
|
||||
callback = options
|
||||
options = fromVersion
|
||||
fromVersion = -1
|
||||
} else if (
|
||||
typeof fromVersion === 'number' &&
|
||||
typeof options === 'object' &&
|
||||
typeof callback === 'function'
|
||||
) {
|
||||
// Called with 4 args, things are as expected
|
||||
} else {
|
||||
return Router._handleInvalidArguments(client, 'joinDoc', arguments)
|
||||
}
|
||||
|
||||
WebsocketController.joinDoc(
|
||||
client,
|
||||
doc_id,
|
||||
fromVersion,
|
||||
options,
|
||||
function (err, ...args) {
|
||||
if (err) {
|
||||
Router._handleError(callback, err, client, 'joinDoc', {
|
||||
doc_id,
|
||||
fromVersion,
|
||||
})
|
||||
} else {
|
||||
callback(null, ...args)
|
||||
}
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
client.on('leaveDoc', function (doc_id, callback) {
|
||||
if (typeof callback !== 'function') {
|
||||
return Router._handleInvalidArguments(client, 'leaveDoc', arguments)
|
||||
}
|
||||
|
||||
WebsocketController.leaveDoc(client, doc_id, function (err, ...args) {
|
||||
if (err) {
|
||||
Router._handleError(callback, err, client, 'leaveDoc', {
|
||||
doc_id,
|
||||
})
|
||||
} else {
|
||||
callback(null, ...args)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
client.on('clientTracking.getConnectedUsers', function (callback) {
|
||||
if (typeof callback !== 'function') {
|
||||
return Router._handleInvalidArguments(
|
||||
client,
|
||||
'clientTracking.getConnectedUsers',
|
||||
arguments
|
||||
)
|
||||
}
|
||||
|
||||
WebsocketController.getConnectedUsers(client, function (err, users) {
|
||||
if (err) {
|
||||
Router._handleError(
|
||||
callback,
|
||||
err,
|
||||
client,
|
||||
'clientTracking.getConnectedUsers'
|
||||
)
|
||||
} else {
|
||||
callback(null, users)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
client.on(
|
||||
'clientTracking.updatePosition',
|
||||
function (cursorData, callback) {
|
||||
if (!callback) {
|
||||
callback = function () {}
|
||||
}
|
||||
if (typeof callback !== 'function') {
|
||||
return Router._handleInvalidArguments(
|
||||
client,
|
||||
'clientTracking.updatePosition',
|
||||
arguments
|
||||
)
|
||||
}
|
||||
|
||||
WebsocketController.updateClientPosition(
|
||||
client,
|
||||
cursorData,
|
||||
function (err) {
|
||||
if (err) {
|
||||
Router._handleError(
|
||||
callback,
|
||||
err,
|
||||
client,
|
||||
'clientTracking.updatePosition'
|
||||
)
|
||||
} else {
|
||||
callback()
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
client.on('applyOtUpdate', function (doc_id, update, callback) {
|
||||
if (typeof callback !== 'function') {
|
||||
return Router._handleInvalidArguments(
|
||||
client,
|
||||
'applyOtUpdate',
|
||||
arguments
|
||||
)
|
||||
}
|
||||
|
||||
WebsocketController.applyOtUpdate(
|
||||
client,
|
||||
doc_id,
|
||||
update,
|
||||
function (err) {
|
||||
if (err) {
|
||||
Router._handleError(callback, err, client, 'applyOtUpdate', {
|
||||
doc_id,
|
||||
update,
|
||||
})
|
||||
} else {
|
||||
callback()
|
||||
}
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
},
|
||||
}
|
17
services/real-time/app/js/SafeJsonParse.js
Normal file
17
services/real-time/app/js/SafeJsonParse.js
Normal file
|
@ -0,0 +1,17 @@
|
|||
const Settings = require('@overleaf/settings')
|
||||
const { DataTooLargeToParseError } = require('./Errors')
|
||||
|
||||
module.exports = {
|
||||
parse(data, callback) {
|
||||
if (data.length > Settings.maxUpdateSize) {
|
||||
return callback(new DataTooLargeToParseError(data))
|
||||
}
|
||||
let parsed
|
||||
try {
|
||||
parsed = JSON.parse(data)
|
||||
} catch (e) {
|
||||
return callback(e)
|
||||
}
|
||||
callback(null, parsed)
|
||||
},
|
||||
}
|
36
services/real-time/app/js/SessionSockets.js
Normal file
36
services/real-time/app/js/SessionSockets.js
Normal file
|
@ -0,0 +1,36 @@
|
|||
const OError = require('@overleaf/o-error')
|
||||
const { EventEmitter } = require('events')
|
||||
const { MissingSessionError } = require('./Errors')
|
||||
|
||||
module.exports = function (io, sessionStore, cookieParser, cookieName) {
|
||||
const missingSessionError = new MissingSessionError()
|
||||
|
||||
const sessionSockets = new EventEmitter()
|
||||
function next(error, socket, session) {
|
||||
sessionSockets.emit('connection', error, socket, session)
|
||||
}
|
||||
|
||||
io.on('connection', function (socket) {
|
||||
const req = socket.handshake
|
||||
cookieParser(req, {}, function () {
|
||||
const sessionId = req.signedCookies && req.signedCookies[cookieName]
|
||||
if (!sessionId) {
|
||||
return next(missingSessionError, socket)
|
||||
}
|
||||
sessionStore.get(sessionId, function (error, session) {
|
||||
if (error) {
|
||||
OError.tag(error, 'error getting session from sessionStore', {
|
||||
sessionId,
|
||||
})
|
||||
return next(error, socket)
|
||||
}
|
||||
if (!session) {
|
||||
return next(missingSessionError, socket)
|
||||
}
|
||||
next(null, socket, session)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
return sessionSockets
|
||||
}
|
69
services/real-time/app/js/WebApiManager.js
Normal file
69
services/real-time/app/js/WebApiManager.js
Normal file
|
@ -0,0 +1,69 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
*/
|
||||
const request = require('request')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const settings = require('@overleaf/settings')
|
||||
const logger = require('logger-sharelatex')
|
||||
const {
|
||||
CodedError,
|
||||
CorruptedJoinProjectResponseError,
|
||||
NotAuthorizedError,
|
||||
WebApiRequestFailedError,
|
||||
} = require('./Errors')
|
||||
|
||||
module.exports = {
|
||||
joinProject(project_id, user, callback) {
|
||||
const user_id = user._id
|
||||
logger.log({ project_id, user_id }, 'sending join project request to web')
|
||||
const url = `${settings.apis.web.url}/project/${project_id}/join`
|
||||
const headers = {}
|
||||
if (user.anonymousAccessToken) {
|
||||
headers['x-sl-anonymous-access-token'] = user.anonymousAccessToken
|
||||
}
|
||||
request.post(
|
||||
{
|
||||
url,
|
||||
qs: { user_id },
|
||||
auth: {
|
||||
user: settings.apis.web.user,
|
||||
pass: settings.apis.web.pass,
|
||||
sendImmediately: true,
|
||||
},
|
||||
json: true,
|
||||
jar: false,
|
||||
headers,
|
||||
},
|
||||
function (error, response, data) {
|
||||
if (error) {
|
||||
OError.tag(error, 'join project request failed')
|
||||
return callback(error)
|
||||
}
|
||||
if (response.statusCode >= 200 && response.statusCode < 300) {
|
||||
if (!(data && data.project)) {
|
||||
return callback(new CorruptedJoinProjectResponseError())
|
||||
}
|
||||
callback(
|
||||
null,
|
||||
data.project,
|
||||
data.privilegeLevel,
|
||||
data.isRestrictedUser
|
||||
)
|
||||
} else if (response.statusCode === 429) {
|
||||
callback(
|
||||
new CodedError(
|
||||
'rate-limit hit when joining project',
|
||||
'TooManyRequests'
|
||||
)
|
||||
)
|
||||
} else if (response.statusCode === 403) {
|
||||
callback(new NotAuthorizedError())
|
||||
} else if (response.statusCode === 404) {
|
||||
callback(new CodedError('project not found', 'ProjectNotFound'))
|
||||
} else {
|
||||
callback(new WebApiRequestFailedError(response.statusCode))
|
||||
}
|
||||
}
|
||||
)
|
||||
},
|
||||
}
|
573
services/real-time/app/js/WebsocketController.js
Normal file
573
services/real-time/app/js/WebsocketController.js
Normal file
|
@ -0,0 +1,573 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
*/
|
||||
const OError = require('@overleaf/o-error')
|
||||
const logger = require('logger-sharelatex')
|
||||
const metrics = require('@overleaf/metrics')
|
||||
const WebApiManager = require('./WebApiManager')
|
||||
const AuthorizationManager = require('./AuthorizationManager')
|
||||
const DocumentUpdaterManager = require('./DocumentUpdaterManager')
|
||||
const ConnectedUsersManager = require('./ConnectedUsersManager')
|
||||
const WebsocketLoadBalancer = require('./WebsocketLoadBalancer')
|
||||
const RoomManager = require('./RoomManager')
|
||||
const {
|
||||
JoinLeaveEpochMismatchError,
|
||||
NotAuthorizedError,
|
||||
NotJoinedError,
|
||||
} = require('./Errors')
|
||||
|
||||
let WebsocketController
|
||||
module.exports = WebsocketController = {
|
||||
// If the protocol version changes when the client reconnects,
|
||||
// it will force a full refresh of the page. Useful for non-backwards
|
||||
// compatible protocol changes. Use only in extreme need.
|
||||
PROTOCOL_VERSION: 2,
|
||||
|
||||
joinProject(client, user, project_id, callback) {
|
||||
if (client.disconnected) {
|
||||
metrics.inc('editor.join-project.disconnected', 1, {
|
||||
status: 'immediately',
|
||||
})
|
||||
return callback()
|
||||
}
|
||||
|
||||
const user_id = user._id
|
||||
logger.log(
|
||||
{ user_id, project_id, client_id: client.id },
|
||||
'user joining project'
|
||||
)
|
||||
metrics.inc('editor.join-project', 1, { status: client.transport })
|
||||
WebApiManager.joinProject(
|
||||
project_id,
|
||||
user,
|
||||
function (error, project, privilegeLevel, isRestrictedUser) {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
if (client.disconnected) {
|
||||
metrics.inc('editor.join-project.disconnected', 1, {
|
||||
status: 'after-web-api-call',
|
||||
})
|
||||
return callback()
|
||||
}
|
||||
|
||||
if (!privilegeLevel) {
|
||||
return callback(new NotAuthorizedError())
|
||||
}
|
||||
|
||||
client.ol_context = {}
|
||||
client.ol_context.privilege_level = privilegeLevel
|
||||
client.ol_context.user_id = user_id
|
||||
client.ol_context.project_id = project_id
|
||||
client.ol_context.owner_id = project.owner && project.owner._id
|
||||
client.ol_context.first_name = user.first_name
|
||||
client.ol_context.last_name = user.last_name
|
||||
client.ol_context.email = user.email
|
||||
client.ol_context.connected_time = new Date()
|
||||
client.ol_context.signup_date = user.signUpDate
|
||||
client.ol_context.login_count = user.loginCount
|
||||
client.ol_context.is_restricted_user = !!isRestrictedUser
|
||||
|
||||
RoomManager.joinProject(client, project_id, function (err) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
logger.log(
|
||||
{ user_id, project_id, client_id: client.id },
|
||||
'user joined project'
|
||||
)
|
||||
callback(
|
||||
null,
|
||||
project,
|
||||
privilegeLevel,
|
||||
WebsocketController.PROTOCOL_VERSION
|
||||
)
|
||||
})
|
||||
|
||||
// No need to block for setting the user as connected in the cursor tracking
|
||||
ConnectedUsersManager.updateUserPosition(
|
||||
project_id,
|
||||
client.publicId,
|
||||
user,
|
||||
null,
|
||||
function (err) {
|
||||
if (err) {
|
||||
logger.warn(
|
||||
{ err, project_id, user_id, client_id: client.id },
|
||||
'background cursor update failed'
|
||||
)
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
// We want to flush a project if there are no more (local) connected clients
|
||||
// but we need to wait for the triggering client to disconnect. How long we wait
|
||||
// is determined by FLUSH_IF_EMPTY_DELAY.
|
||||
FLUSH_IF_EMPTY_DELAY: 500, // ms
|
||||
leaveProject(io, client, callback) {
|
||||
const { project_id, user_id } = client.ol_context
|
||||
if (!project_id) {
|
||||
return callback()
|
||||
} // client did not join project
|
||||
|
||||
metrics.inc('editor.leave-project', 1, { status: client.transport })
|
||||
logger.log(
|
||||
{ project_id, user_id, client_id: client.id },
|
||||
'client leaving project'
|
||||
)
|
||||
WebsocketLoadBalancer.emitToRoom(
|
||||
project_id,
|
||||
'clientTracking.clientDisconnected',
|
||||
client.publicId
|
||||
)
|
||||
|
||||
// We can do this in the background
|
||||
ConnectedUsersManager.markUserAsDisconnected(
|
||||
project_id,
|
||||
client.publicId,
|
||||
function (err) {
|
||||
if (err) {
|
||||
logger.error(
|
||||
{ err, project_id, user_id, client_id: client.id },
|
||||
'error marking client as disconnected'
|
||||
)
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
RoomManager.leaveProjectAndDocs(client)
|
||||
setTimeout(function () {
|
||||
const remainingClients = io.sockets.clients(project_id)
|
||||
if (remainingClients.length === 0) {
|
||||
// Flush project in the background
|
||||
DocumentUpdaterManager.flushProjectToMongoAndDelete(
|
||||
project_id,
|
||||
function (err) {
|
||||
if (err) {
|
||||
logger.error(
|
||||
{ err, project_id, user_id, client_id: client.id },
|
||||
'error flushing to doc updater after leaving project'
|
||||
)
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
callback()
|
||||
}, WebsocketController.FLUSH_IF_EMPTY_DELAY)
|
||||
},
|
||||
|
||||
joinDoc(client, doc_id, fromVersion, options, callback) {
|
||||
if (client.disconnected) {
|
||||
metrics.inc('editor.join-doc.disconnected', 1, { status: 'immediately' })
|
||||
return callback()
|
||||
}
|
||||
|
||||
const joinLeaveEpoch = ++client.joinLeaveEpoch
|
||||
metrics.inc('editor.join-doc', 1, { status: client.transport })
|
||||
const { project_id, user_id, is_restricted_user } = client.ol_context
|
||||
if (!project_id) {
|
||||
return callback(new NotJoinedError())
|
||||
}
|
||||
logger.log(
|
||||
{ user_id, project_id, doc_id, fromVersion, client_id: client.id },
|
||||
'client joining doc'
|
||||
)
|
||||
|
||||
WebsocketController._assertClientAuthorization(
|
||||
client,
|
||||
doc_id,
|
||||
function (error) {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
if (client.disconnected) {
|
||||
metrics.inc('editor.join-doc.disconnected', 1, {
|
||||
status: 'after-client-auth-check',
|
||||
})
|
||||
// the client will not read the response anyways
|
||||
return callback()
|
||||
}
|
||||
if (joinLeaveEpoch !== client.joinLeaveEpoch) {
|
||||
// another joinDoc or leaveDoc rpc overtook us
|
||||
return callback(new JoinLeaveEpochMismatchError())
|
||||
}
|
||||
// ensure the per-doc applied-ops channel is subscribed before sending the
|
||||
// doc to the client, so that no events are missed.
|
||||
RoomManager.joinDoc(client, doc_id, function (error) {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
if (client.disconnected) {
|
||||
metrics.inc('editor.join-doc.disconnected', 1, {
|
||||
status: 'after-joining-room',
|
||||
})
|
||||
// the client will not read the response anyways
|
||||
return callback()
|
||||
}
|
||||
|
||||
DocumentUpdaterManager.getDocument(
|
||||
project_id,
|
||||
doc_id,
|
||||
fromVersion,
|
||||
function (error, lines, version, ranges, ops) {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
if (client.disconnected) {
|
||||
metrics.inc('editor.join-doc.disconnected', 1, {
|
||||
status: 'after-doc-updater-call',
|
||||
})
|
||||
// the client will not read the response anyways
|
||||
return callback()
|
||||
}
|
||||
|
||||
if (is_restricted_user && ranges && ranges.comments) {
|
||||
ranges.comments = []
|
||||
}
|
||||
|
||||
// Encode any binary bits of data so it can go via WebSockets
|
||||
// See http://ecmanaut.blogspot.co.uk/2006/07/encoding-decoding-utf8-in-javascript.html
|
||||
const encodeForWebsockets = text =>
|
||||
unescape(encodeURIComponent(text))
|
||||
const escapedLines = []
|
||||
for (let line of lines) {
|
||||
try {
|
||||
line = encodeForWebsockets(line)
|
||||
} catch (err) {
|
||||
OError.tag(err, 'error encoding line uri component', { line })
|
||||
return callback(err)
|
||||
}
|
||||
escapedLines.push(line)
|
||||
}
|
||||
if (options.encodeRanges) {
|
||||
try {
|
||||
for (const comment of (ranges && ranges.comments) || []) {
|
||||
if (comment.op.c) {
|
||||
comment.op.c = encodeForWebsockets(comment.op.c)
|
||||
}
|
||||
}
|
||||
for (const change of (ranges && ranges.changes) || []) {
|
||||
if (change.op.i) {
|
||||
change.op.i = encodeForWebsockets(change.op.i)
|
||||
}
|
||||
if (change.op.d) {
|
||||
change.op.d = encodeForWebsockets(change.op.d)
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
OError.tag(err, 'error encoding range uri component', {
|
||||
ranges,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
}
|
||||
|
||||
AuthorizationManager.addAccessToDoc(client, doc_id, () => {})
|
||||
logger.log(
|
||||
{
|
||||
user_id,
|
||||
project_id,
|
||||
doc_id,
|
||||
fromVersion,
|
||||
client_id: client.id,
|
||||
},
|
||||
'client joined doc'
|
||||
)
|
||||
callback(null, escapedLines, version, ops, ranges)
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
_assertClientAuthorization(client, doc_id, callback) {
|
||||
// Check for project-level access first
|
||||
AuthorizationManager.assertClientCanViewProject(client, function (error) {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
// Check for doc-level access next
|
||||
AuthorizationManager.assertClientCanViewProjectAndDoc(
|
||||
client,
|
||||
doc_id,
|
||||
function (error) {
|
||||
if (error) {
|
||||
// No cached access, check docupdater
|
||||
const { project_id } = client.ol_context
|
||||
DocumentUpdaterManager.checkDocument(
|
||||
project_id,
|
||||
doc_id,
|
||||
function (error) {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
} else {
|
||||
// Success
|
||||
AuthorizationManager.addAccessToDoc(client, doc_id, callback)
|
||||
}
|
||||
}
|
||||
)
|
||||
} else {
|
||||
// Access already cached
|
||||
callback()
|
||||
}
|
||||
}
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
leaveDoc(client, doc_id, callback) {
|
||||
// client may have disconnected, but we have to cleanup internal state.
|
||||
client.joinLeaveEpoch++
|
||||
metrics.inc('editor.leave-doc', 1, { status: client.transport })
|
||||
const { project_id, user_id } = client.ol_context
|
||||
logger.log(
|
||||
{ user_id, project_id, doc_id, client_id: client.id },
|
||||
'client leaving doc'
|
||||
)
|
||||
RoomManager.leaveDoc(client, doc_id)
|
||||
// we could remove permission when user leaves a doc, but because
|
||||
// the connection is per-project, we continue to allow access
|
||||
// after the initial joinDoc since we know they are already authorised.
|
||||
// # AuthorizationManager.removeAccessToDoc client, doc_id
|
||||
callback()
|
||||
},
|
||||
updateClientPosition(client, cursorData, callback) {
|
||||
if (client.disconnected) {
|
||||
// do not create a ghost entry in redis
|
||||
return callback()
|
||||
}
|
||||
|
||||
metrics.inc('editor.update-client-position', 0.1, {
|
||||
status: client.transport,
|
||||
})
|
||||
const { project_id, first_name, last_name, email, user_id } =
|
||||
client.ol_context
|
||||
logger.log(
|
||||
{ user_id, project_id, client_id: client.id, cursorData },
|
||||
'updating client position'
|
||||
)
|
||||
|
||||
AuthorizationManager.assertClientCanViewProjectAndDoc(
|
||||
client,
|
||||
cursorData.doc_id,
|
||||
function (error) {
|
||||
if (error) {
|
||||
logger.info(
|
||||
{ err: error, client_id: client.id, project_id, user_id },
|
||||
"silently ignoring unauthorized updateClientPosition. Client likely hasn't called joinProject yet."
|
||||
)
|
||||
return callback()
|
||||
}
|
||||
cursorData.id = client.publicId
|
||||
if (user_id) {
|
||||
cursorData.user_id = user_id
|
||||
}
|
||||
if (email) {
|
||||
cursorData.email = email
|
||||
}
|
||||
// Don't store anonymous users in redis to avoid influx
|
||||
if (!user_id || user_id === 'anonymous-user') {
|
||||
cursorData.name = ''
|
||||
// consistent async behaviour
|
||||
setTimeout(callback)
|
||||
} else {
|
||||
cursorData.name =
|
||||
first_name && last_name
|
||||
? `${first_name} ${last_name}`
|
||||
: first_name || last_name || ''
|
||||
ConnectedUsersManager.updateUserPosition(
|
||||
project_id,
|
||||
client.publicId,
|
||||
{
|
||||
first_name,
|
||||
last_name,
|
||||
email,
|
||||
_id: user_id,
|
||||
},
|
||||
{
|
||||
row: cursorData.row,
|
||||
column: cursorData.column,
|
||||
doc_id: cursorData.doc_id,
|
||||
},
|
||||
callback
|
||||
)
|
||||
}
|
||||
WebsocketLoadBalancer.emitToRoom(
|
||||
project_id,
|
||||
'clientTracking.clientUpdated',
|
||||
cursorData
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
CLIENT_REFRESH_DELAY: 1000,
|
||||
getConnectedUsers(client, callback) {
|
||||
if (client.disconnected) {
|
||||
// they are not interested anymore, skip the redis lookups
|
||||
return callback()
|
||||
}
|
||||
|
||||
metrics.inc('editor.get-connected-users', { status: client.transport })
|
||||
const { project_id, user_id, is_restricted_user } = client.ol_context
|
||||
if (is_restricted_user) {
|
||||
return callback(null, [])
|
||||
}
|
||||
if (!project_id) {
|
||||
return callback(new NotJoinedError())
|
||||
}
|
||||
logger.log(
|
||||
{ user_id, project_id, client_id: client.id },
|
||||
'getting connected users'
|
||||
)
|
||||
AuthorizationManager.assertClientCanViewProject(client, function (error) {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
WebsocketLoadBalancer.emitToRoom(project_id, 'clientTracking.refresh')
|
||||
setTimeout(
|
||||
() =>
|
||||
ConnectedUsersManager.getConnectedUsers(
|
||||
project_id,
|
||||
function (error, users) {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
logger.log(
|
||||
{ user_id, project_id, client_id: client.id },
|
||||
'got connected users'
|
||||
)
|
||||
callback(null, users)
|
||||
}
|
||||
),
|
||||
WebsocketController.CLIENT_REFRESH_DELAY
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
applyOtUpdate(client, doc_id, update, callback) {
|
||||
// client may have disconnected, but we can submit their update to doc-updater anyways.
|
||||
const { user_id, project_id } = client.ol_context
|
||||
if (!project_id) {
|
||||
return callback(new NotJoinedError())
|
||||
}
|
||||
|
||||
WebsocketController._assertClientCanApplyUpdate(
|
||||
client,
|
||||
doc_id,
|
||||
update,
|
||||
function (error) {
|
||||
if (error) {
|
||||
setTimeout(
|
||||
() =>
|
||||
// Disconnect, but give the client the chance to receive the error
|
||||
client.disconnect(),
|
||||
100
|
||||
)
|
||||
return callback(error)
|
||||
}
|
||||
if (!update.meta) {
|
||||
update.meta = {}
|
||||
}
|
||||
update.meta.source = client.publicId
|
||||
update.meta.user_id = user_id
|
||||
metrics.inc('editor.doc-update', 0.3, { status: client.transport })
|
||||
|
||||
logger.log(
|
||||
{
|
||||
user_id,
|
||||
doc_id,
|
||||
project_id,
|
||||
client_id: client.id,
|
||||
version: update.v,
|
||||
},
|
||||
'sending update to doc updater'
|
||||
)
|
||||
|
||||
DocumentUpdaterManager.queueChange(
|
||||
project_id,
|
||||
doc_id,
|
||||
update,
|
||||
function (error) {
|
||||
if ((error && error.message) === 'update is too large') {
|
||||
metrics.inc('update_too_large')
|
||||
const { updateSize } = error.info
|
||||
logger.warn(
|
||||
{ user_id, project_id, doc_id, updateSize },
|
||||
'update is too large'
|
||||
)
|
||||
|
||||
// mark the update as received -- the client should not send it again!
|
||||
callback()
|
||||
|
||||
// trigger an out-of-sync error
|
||||
const message = {
|
||||
project_id,
|
||||
doc_id,
|
||||
error: 'update is too large',
|
||||
}
|
||||
setTimeout(function () {
|
||||
if (client.disconnected) {
|
||||
// skip the message broadcast, the client has moved on
|
||||
return metrics.inc('editor.doc-update.disconnected', 1, {
|
||||
status: 'at-otUpdateError',
|
||||
})
|
||||
}
|
||||
client.emit('otUpdateError', message.error, message)
|
||||
client.disconnect()
|
||||
}, 100)
|
||||
return
|
||||
}
|
||||
|
||||
if (error) {
|
||||
OError.tag(error, 'document was not available for update', {
|
||||
version: update.v,
|
||||
})
|
||||
client.disconnect()
|
||||
}
|
||||
callback(error)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
_assertClientCanApplyUpdate(client, doc_id, update, callback) {
|
||||
AuthorizationManager.assertClientCanEditProjectAndDoc(
|
||||
client,
|
||||
doc_id,
|
||||
function (error) {
|
||||
if (
|
||||
error &&
|
||||
error.message === 'not authorized' &&
|
||||
WebsocketController._isCommentUpdate(update)
|
||||
) {
|
||||
// This might be a comment op, which we only need read-only priveleges for
|
||||
AuthorizationManager.assertClientCanViewProjectAndDoc(
|
||||
client,
|
||||
doc_id,
|
||||
callback
|
||||
)
|
||||
return
|
||||
}
|
||||
callback(error)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
_isCommentUpdate(update) {
|
||||
if (!(update && update.op instanceof Array)) {
|
||||
return false
|
||||
}
|
||||
for (const op of update.op) {
|
||||
if (!op.c) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
},
|
||||
}
|
171
services/real-time/app/js/WebsocketLoadBalancer.js
Normal file
171
services/real-time/app/js/WebsocketLoadBalancer.js
Normal file
|
@ -0,0 +1,171 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
*/
|
||||
const Settings = require('@overleaf/settings')
|
||||
const logger = require('logger-sharelatex')
|
||||
const RedisClientManager = require('./RedisClientManager')
|
||||
const SafeJsonParse = require('./SafeJsonParse')
|
||||
const EventLogger = require('./EventLogger')
|
||||
const HealthCheckManager = require('./HealthCheckManager')
|
||||
const RoomManager = require('./RoomManager')
|
||||
const ChannelManager = require('./ChannelManager')
|
||||
const ConnectedUsersManager = require('./ConnectedUsersManager')
|
||||
|
||||
const RESTRICTED_USER_MESSAGE_TYPE_PASS_LIST = [
|
||||
'connectionAccepted',
|
||||
'otUpdateApplied',
|
||||
'otUpdateError',
|
||||
'joinDoc',
|
||||
'reciveNewDoc',
|
||||
'reciveNewFile',
|
||||
'reciveNewFolder',
|
||||
'removeEntity',
|
||||
]
|
||||
|
||||
let WebsocketLoadBalancer
|
||||
module.exports = WebsocketLoadBalancer = {
|
||||
rclientPubList: RedisClientManager.createClientList(Settings.redis.pubsub),
|
||||
rclientSubList: RedisClientManager.createClientList(Settings.redis.pubsub),
|
||||
|
||||
emitToRoom(room_id, message, ...payload) {
|
||||
if (!room_id) {
|
||||
logger.warn(
|
||||
{ message, payload },
|
||||
'no room_id provided, ignoring emitToRoom'
|
||||
)
|
||||
return
|
||||
}
|
||||
const data = JSON.stringify({
|
||||
room_id,
|
||||
message,
|
||||
payload,
|
||||
})
|
||||
logger.log(
|
||||
{ room_id, message, payload, length: data.length },
|
||||
'emitting to room'
|
||||
)
|
||||
|
||||
this.rclientPubList.map(rclientPub =>
|
||||
ChannelManager.publish(rclientPub, 'editor-events', room_id, data)
|
||||
)
|
||||
},
|
||||
|
||||
emitToAll(message, ...payload) {
|
||||
this.emitToRoom('all', message, ...payload)
|
||||
},
|
||||
|
||||
listenForEditorEvents(io) {
|
||||
logger.log(
|
||||
{ rclients: this.rclientSubList.length },
|
||||
'listening for editor events'
|
||||
)
|
||||
for (const rclientSub of this.rclientSubList) {
|
||||
rclientSub.subscribe('editor-events')
|
||||
rclientSub.on('message', function (channel, message) {
|
||||
if (Settings.debugEvents > 0) {
|
||||
EventLogger.debugEvent(channel, message)
|
||||
}
|
||||
WebsocketLoadBalancer._processEditorEvent(io, channel, message)
|
||||
})
|
||||
}
|
||||
this.handleRoomUpdates(this.rclientSubList)
|
||||
},
|
||||
|
||||
handleRoomUpdates(rclientSubList) {
|
||||
const roomEvents = RoomManager.eventSource()
|
||||
roomEvents.on('project-active', function (project_id) {
|
||||
const subscribePromises = rclientSubList.map(rclient =>
|
||||
ChannelManager.subscribe(rclient, 'editor-events', project_id)
|
||||
)
|
||||
RoomManager.emitOnCompletion(
|
||||
subscribePromises,
|
||||
`project-subscribed-${project_id}`
|
||||
)
|
||||
})
|
||||
roomEvents.on('project-empty', project_id =>
|
||||
rclientSubList.map(rclient =>
|
||||
ChannelManager.unsubscribe(rclient, 'editor-events', project_id)
|
||||
)
|
||||
)
|
||||
},
|
||||
|
||||
_processEditorEvent(io, channel, message) {
|
||||
SafeJsonParse.parse(message, function (error, message) {
|
||||
if (error) {
|
||||
logger.error({ err: error, channel }, 'error parsing JSON')
|
||||
return
|
||||
}
|
||||
if (message.room_id === 'all') {
|
||||
io.sockets.emit(message.message, ...message.payload)
|
||||
} else if (
|
||||
message.message === 'clientTracking.refresh' &&
|
||||
message.room_id
|
||||
) {
|
||||
const clientList = io.sockets.clients(message.room_id)
|
||||
logger.log(
|
||||
{
|
||||
channel,
|
||||
message: message.message,
|
||||
room_id: message.room_id,
|
||||
message_id: message._id,
|
||||
socketIoClients: clientList.map(client => client.id),
|
||||
},
|
||||
'refreshing client list'
|
||||
)
|
||||
for (const client of clientList) {
|
||||
ConnectedUsersManager.refreshClient(message.room_id, client.publicId)
|
||||
}
|
||||
} else if (message.room_id) {
|
||||
if (message._id && Settings.checkEventOrder) {
|
||||
const status = EventLogger.checkEventOrder(
|
||||
'editor-events',
|
||||
message._id,
|
||||
message
|
||||
)
|
||||
if (status === 'duplicate') {
|
||||
return // skip duplicate events
|
||||
}
|
||||
}
|
||||
|
||||
const is_restricted_message =
|
||||
!RESTRICTED_USER_MESSAGE_TYPE_PASS_LIST.includes(message.message)
|
||||
|
||||
// send messages only to unique clients (due to duplicate entries in io.sockets.clients)
|
||||
const clientList = io.sockets
|
||||
.clients(message.room_id)
|
||||
.filter(
|
||||
client =>
|
||||
!(is_restricted_message && client.ol_context.is_restricted_user)
|
||||
)
|
||||
|
||||
// avoid unnecessary work if no clients are connected
|
||||
if (clientList.length === 0) {
|
||||
return
|
||||
}
|
||||
logger.log(
|
||||
{
|
||||
channel,
|
||||
message: message.message,
|
||||
room_id: message.room_id,
|
||||
message_id: message._id,
|
||||
socketIoClients: clientList.map(client => client.id),
|
||||
},
|
||||
'distributing event to clients'
|
||||
)
|
||||
const seen = new Map()
|
||||
for (const client of clientList) {
|
||||
if (!seen.has(client.id)) {
|
||||
seen.set(client.id, true)
|
||||
client.emit(message.message, ...message.payload)
|
||||
}
|
||||
}
|
||||
} else if (message.health_check) {
|
||||
logger.debug(
|
||||
{ message },
|
||||
'got health check message in editor events channel'
|
||||
)
|
||||
HealthCheckManager.check(channel, message.key)
|
||||
}
|
||||
})
|
||||
},
|
||||
}
|
8
services/real-time/buildscript.txt
Normal file
8
services/real-time/buildscript.txt
Normal file
|
@ -0,0 +1,8 @@
|
|||
real-time
|
||||
--dependencies=redis
|
||||
--docker-repos=gcr.io/overleaf-ops
|
||||
--env-add=
|
||||
--env-pass-through=
|
||||
--node-version=12.22.3
|
||||
--public-repo=True
|
||||
--script-version=3.11.0
|
168
services/real-time/config/settings.defaults.js
Normal file
168
services/real-time/config/settings.defaults.js
Normal file
|
@ -0,0 +1,168 @@
|
|||
/* eslint-disable camelcase */
|
||||
|
||||
const settings = {
|
||||
redis: {
|
||||
pubsub: {
|
||||
host:
|
||||
process.env.PUBSUB_REDIS_HOST || process.env.REDIS_HOST || 'localhost',
|
||||
port: process.env.PUBSUB_REDIS_PORT || process.env.REDIS_PORT || '6379',
|
||||
password:
|
||||
process.env.PUBSUB_REDIS_PASSWORD || process.env.REDIS_PASSWORD || '',
|
||||
maxRetriesPerRequest: parseInt(
|
||||
process.env.PUBSUB_REDIS_MAX_RETRIES_PER_REQUEST ||
|
||||
process.env.REDIS_MAX_RETRIES_PER_REQUEST ||
|
||||
'20'
|
||||
),
|
||||
},
|
||||
|
||||
realtime: {
|
||||
host:
|
||||
process.env.REAL_TIME_REDIS_HOST ||
|
||||
process.env.REDIS_HOST ||
|
||||
'localhost',
|
||||
port:
|
||||
process.env.REAL_TIME_REDIS_PORT || process.env.REDIS_PORT || '6379',
|
||||
password:
|
||||
process.env.REAL_TIME_REDIS_PASSWORD ||
|
||||
process.env.REDIS_PASSWORD ||
|
||||
'',
|
||||
key_schema: {
|
||||
clientsInProject({ project_id }) {
|
||||
return `clients_in_project:{${project_id}}`
|
||||
},
|
||||
connectedUser({ project_id, client_id }) {
|
||||
return `connected_user:{${project_id}}:${client_id}`
|
||||
},
|
||||
},
|
||||
maxRetriesPerRequest: parseInt(
|
||||
process.env.REAL_TIME_REDIS_MAX_RETRIES_PER_REQUEST ||
|
||||
process.env.REDIS_MAX_RETRIES_PER_REQUEST ||
|
||||
'20'
|
||||
),
|
||||
},
|
||||
|
||||
documentupdater: {
|
||||
host:
|
||||
process.env.DOC_UPDATER_REDIS_HOST ||
|
||||
process.env.REDIS_HOST ||
|
||||
'localhost',
|
||||
port:
|
||||
process.env.DOC_UPDATER_REDIS_PORT || process.env.REDIS_PORT || '6379',
|
||||
password:
|
||||
process.env.DOC_UPDATER_REDIS_PASSWORD ||
|
||||
process.env.REDIS_PASSWORD ||
|
||||
'',
|
||||
key_schema: {
|
||||
pendingUpdates({ doc_id }) {
|
||||
return `PendingUpdates:{${doc_id}}`
|
||||
},
|
||||
},
|
||||
maxRetriesPerRequest: parseInt(
|
||||
process.env.DOC_UPDATER_REDIS_MAX_RETRIES_PER_REQUEST ||
|
||||
process.env.REDIS_MAX_RETRIES_PER_REQUEST ||
|
||||
'20'
|
||||
),
|
||||
},
|
||||
|
||||
websessions: {
|
||||
host: process.env.WEB_REDIS_HOST || process.env.REDIS_HOST || 'localhost',
|
||||
port: process.env.WEB_REDIS_PORT || process.env.REDIS_PORT || '6379',
|
||||
password:
|
||||
process.env.WEB_REDIS_PASSWORD || process.env.REDIS_PASSWORD || '',
|
||||
maxRetriesPerRequest: parseInt(
|
||||
process.env.WEB_REDIS_MAX_RETRIES_PER_REQUEST ||
|
||||
process.env.REDIS_MAX_RETRIES_PER_REQUEST ||
|
||||
'20'
|
||||
),
|
||||
},
|
||||
},
|
||||
|
||||
internal: {
|
||||
realTime: {
|
||||
port: 3026,
|
||||
host: process.env.LISTEN_ADDRESS || 'localhost',
|
||||
user: 'sharelatex',
|
||||
pass: 'password',
|
||||
},
|
||||
},
|
||||
|
||||
apis: {
|
||||
web: {
|
||||
url: `http://${
|
||||
process.env.WEB_API_HOST || process.env.WEB_HOST || 'localhost'
|
||||
}:${process.env.WEB_API_PORT || process.env.WEB_PORT || 3000}`,
|
||||
user: process.env.WEB_API_USER || 'sharelatex',
|
||||
pass: process.env.WEB_API_PASSWORD || 'password',
|
||||
},
|
||||
documentupdater: {
|
||||
url: `http://${
|
||||
process.env.DOCUMENT_UPDATER_HOST ||
|
||||
process.env.DOCUPDATER_HOST ||
|
||||
'localhost'
|
||||
}:3003`,
|
||||
},
|
||||
},
|
||||
|
||||
security: {
|
||||
sessionSecret: process.env.SESSION_SECRET || 'secret-please-change',
|
||||
},
|
||||
|
||||
cookieName: process.env.COOKIE_NAME || 'sharelatex.sid',
|
||||
|
||||
// Expose the hostname in the `debug.getHostname` rpc
|
||||
exposeHostname: process.env.EXPOSE_HOSTNAME === 'true',
|
||||
|
||||
max_doc_length: 2 * 1024 * 1024, // 2mb
|
||||
|
||||
// should be set to the same same as dispatcherCount in document updater
|
||||
pendingUpdateListShardCount: parseInt(
|
||||
process.env.PENDING_UPDATE_LIST_SHARD_COUNT || 10,
|
||||
10
|
||||
),
|
||||
|
||||
// combine
|
||||
// max_doc_length (2mb see above) * 2 (delete + insert)
|
||||
// max_ranges_size (3mb see MAX_RANGES_SIZE in document-updater)
|
||||
// overhead for JSON serialization
|
||||
maxUpdateSize:
|
||||
parseInt(process.env.MAX_UPDATE_SIZE) || 7 * 1024 * 1024 + 64 * 1024,
|
||||
|
||||
shutdownDrainTimeWindow: process.env.SHUTDOWN_DRAIN_TIME_WINDOW || 9,
|
||||
|
||||
// The shutdown procedure asks clients to reconnect gracefully.
|
||||
// 3rd-party/buggy clients may not act upon receiving the message and keep
|
||||
// stale connections alive. We forcefully disconnect them after X ms:
|
||||
gracefulReconnectTimeoutMs:
|
||||
parseInt(process.env.GRACEFUL_RECONNECT_TIMEOUT_MS, 10) ||
|
||||
// The frontend allows actively editing users to keep the connection open
|
||||
// for up-to ConnectionManager.MAX_RECONNECT_GRACEFULLY_INTERVAL=45s
|
||||
// Permit an extra delay to account for slow/flaky connections.
|
||||
(45 + 30) * 1000,
|
||||
|
||||
continualPubsubTraffic: process.env.CONTINUAL_PUBSUB_TRAFFIC || false,
|
||||
|
||||
checkEventOrder: process.env.CHECK_EVENT_ORDER || false,
|
||||
|
||||
publishOnIndividualChannels:
|
||||
process.env.PUBLISH_ON_INDIVIDUAL_CHANNELS || false,
|
||||
|
||||
statusCheckInterval: parseInt(process.env.STATUS_CHECK_INTERVAL || '0'),
|
||||
|
||||
// The deployment colour for this app (if any). Used for blue green deploys.
|
||||
deploymentColour: process.env.DEPLOYMENT_COLOUR,
|
||||
// Load balancer health checks will return 200 only when this file contains
|
||||
// the deployment colour for this app.
|
||||
deploymentFile: process.env.DEPLOYMENT_FILE,
|
||||
|
||||
sentry: {
|
||||
dsn: process.env.SENTRY_DSN,
|
||||
},
|
||||
|
||||
errors: {
|
||||
catchUncaughtErrors: true,
|
||||
shutdownOnUncaughtError: true,
|
||||
},
|
||||
}
|
||||
|
||||
// console.log settings.redis
|
||||
module.exports = settings
|
5
services/real-time/config/settings.test.js
Normal file
5
services/real-time/config/settings.test.js
Normal file
|
@ -0,0 +1,5 @@
|
|||
module.exports = {
|
||||
errors: {
|
||||
catchUncaughtErrors: false,
|
||||
},
|
||||
}
|
49
services/real-time/docker-compose.ci.yml
Normal file
49
services/real-time/docker-compose.ci.yml
Normal file
|
@ -0,0 +1,49 @@
|
|||
# This file was auto-generated, do not edit it directly.
|
||||
# Instead run bin/update_build_scripts from
|
||||
# https://github.com/sharelatex/sharelatex-dev-environment
|
||||
|
||||
version: "2.3"
|
||||
|
||||
services:
|
||||
test_unit:
|
||||
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
|
||||
user: node
|
||||
command: npm run test:unit:_run
|
||||
environment:
|
||||
NODE_ENV: test
|
||||
NODE_OPTIONS: "--unhandled-rejections=strict"
|
||||
|
||||
|
||||
test_acceptance:
|
||||
build: .
|
||||
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
|
||||
environment:
|
||||
ELASTIC_SEARCH_DSN: es:9200
|
||||
REDIS_HOST: redis
|
||||
QUEUES_REDIS_HOST: redis
|
||||
MONGO_HOST: mongo
|
||||
POSTGRES_HOST: postgres
|
||||
MOCHA_GREP: ${MOCHA_GREP}
|
||||
NODE_ENV: test
|
||||
NODE_OPTIONS: "--unhandled-rejections=strict"
|
||||
depends_on:
|
||||
redis:
|
||||
condition: service_healthy
|
||||
user: node
|
||||
command: npm run test:acceptance:_run
|
||||
|
||||
|
||||
tar:
|
||||
build: .
|
||||
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
|
||||
volumes:
|
||||
- ./:/tmp/build/
|
||||
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
|
||||
user: root
|
||||
redis:
|
||||
image: redis
|
||||
healthcheck:
|
||||
test: ping="$$(redis-cli ping)" && [ "$$ping" = 'PONG' ]
|
||||
interval: 1s
|
||||
retries: 20
|
||||
|
47
services/real-time/docker-compose.yml
Normal file
47
services/real-time/docker-compose.yml
Normal file
|
@ -0,0 +1,47 @@
|
|||
# This file was auto-generated, do not edit it directly.
|
||||
# Instead run bin/update_build_scripts from
|
||||
# https://github.com/sharelatex/sharelatex-dev-environment
|
||||
|
||||
version: "2.3"
|
||||
|
||||
services:
|
||||
test_unit:
|
||||
image: node:12.22.3
|
||||
volumes:
|
||||
- .:/app
|
||||
working_dir: /app
|
||||
environment:
|
||||
MOCHA_GREP: ${MOCHA_GREP}
|
||||
NODE_ENV: test
|
||||
NODE_OPTIONS: "--unhandled-rejections=strict"
|
||||
command: npm run --silent test:unit
|
||||
user: node
|
||||
|
||||
test_acceptance:
|
||||
image: node:12.22.3
|
||||
volumes:
|
||||
- .:/app
|
||||
working_dir: /app
|
||||
environment:
|
||||
ELASTIC_SEARCH_DSN: es:9200
|
||||
REDIS_HOST: redis
|
||||
QUEUES_REDIS_HOST: redis
|
||||
MONGO_HOST: mongo
|
||||
POSTGRES_HOST: postgres
|
||||
MOCHA_GREP: ${MOCHA_GREP}
|
||||
LOG_LEVEL: ERROR
|
||||
NODE_ENV: test
|
||||
NODE_OPTIONS: "--unhandled-rejections=strict"
|
||||
user: node
|
||||
depends_on:
|
||||
redis:
|
||||
condition: service_healthy
|
||||
command: npm run --silent test:acceptance
|
||||
|
||||
redis:
|
||||
image: redis
|
||||
healthcheck:
|
||||
test: ping=$$(redis-cli ping) && [ "$$ping" = 'PONG' ]
|
||||
interval: 1s
|
||||
retries: 20
|
||||
|
17
services/real-time/nodemon.json
Normal file
17
services/real-time/nodemon.json
Normal file
|
@ -0,0 +1,17 @@
|
|||
{
|
||||
"ignore": [
|
||||
".git",
|
||||
"node_modules/"
|
||||
],
|
||||
"verbose": true,
|
||||
"legacyWatch": true,
|
||||
"execMap": {
|
||||
"js": "npm run start"
|
||||
},
|
||||
"watch": [
|
||||
"app/js/",
|
||||
"app.js",
|
||||
"config/"
|
||||
],
|
||||
"ext": "js"
|
||||
}
|
5587
services/real-time/package-lock.json
generated
Normal file
5587
services/real-time/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load diff
65
services/real-time/package.json
Normal file
65
services/real-time/package.json
Normal file
|
@ -0,0 +1,65 @@
|
|||
{
|
||||
"name": "real-time-sharelatex",
|
||||
"version": "0.1.4",
|
||||
"description": "The socket.io layer of ShareLaTeX for real-time editor interactions",
|
||||
"author": "ShareLaTeX <team@sharelatex.com>",
|
||||
"license": "AGPL-3.0-only",
|
||||
"private": true,
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/sharelatex/real-time-sharelatex.git"
|
||||
},
|
||||
"scripts": {
|
||||
"start": "node $NODE_APP_OPTIONS app.js",
|
||||
"test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js",
|
||||
"test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP",
|
||||
"test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js",
|
||||
"test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP",
|
||||
"nodemon": "nodemon --config nodemon.json",
|
||||
"lint": "eslint --max-warnings 0 --format unix .",
|
||||
"format": "prettier --list-different $PWD/'**/*.js'",
|
||||
"format:fix": "prettier --write $PWD/'**/*.js'",
|
||||
"lint:fix": "eslint --fix ."
|
||||
},
|
||||
"dependencies": {
|
||||
"@overleaf/metrics": "^3.5.1",
|
||||
"@overleaf/o-error": "^3.1.0",
|
||||
"@overleaf/redis-wrapper": "^2.0.0",
|
||||
"@overleaf/settings": "^2.1.1",
|
||||
"async": "^0.9.0",
|
||||
"base64id": "0.1.0",
|
||||
"basic-auth-connect": "^1.0.0",
|
||||
"body-parser": "^1.19.0",
|
||||
"bunyan": "^1.8.15",
|
||||
"connect-redis": "^2.1.0",
|
||||
"cookie-parser": "^1.4.5",
|
||||
"express": "^4.17.1",
|
||||
"express-session": "^1.17.1",
|
||||
"logger-sharelatex": "^2.2.0",
|
||||
"request": "^2.88.2",
|
||||
"socket.io": "https://github.com/overleaf/socket.io/archive/0.9.19-overleaf-5.tar.gz",
|
||||
"socket.io-client": "https://github.com/overleaf/socket.io-client/archive/0.9.17-overleaf-3.tar.gz",
|
||||
"underscore": "1.13.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"chai": "^4.2.0",
|
||||
"chai-as-promised": "^7.1.1",
|
||||
"cookie-signature": "^1.1.0",
|
||||
"eslint": "^7.21.0",
|
||||
"eslint-config-prettier": "^8.1.0",
|
||||
"eslint-config-standard": "^16.0.2",
|
||||
"eslint-plugin-chai-expect": "^2.2.0",
|
||||
"eslint-plugin-chai-friendly": "^0.6.0",
|
||||
"eslint-plugin-import": "^2.22.1",
|
||||
"eslint-plugin-mocha": "^8.0.0",
|
||||
"eslint-plugin-node": "^11.1.0",
|
||||
"eslint-plugin-prettier": "^3.1.2",
|
||||
"eslint-plugin-promise": "^4.2.1",
|
||||
"mocha": "^8.3.2",
|
||||
"prettier": "^2.2.1",
|
||||
"sandboxed-module": "~0.3.0",
|
||||
"sinon": "^9.2.4",
|
||||
"timekeeper": "0.0.4",
|
||||
"uid-safe": "^2.1.5"
|
||||
}
|
||||
}
|
475
services/real-time/test/acceptance/js/ApplyUpdateTests.js
Normal file
475
services/real-time/test/acceptance/js/ApplyUpdateTests.js
Normal file
|
@ -0,0 +1,475 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-return-assign,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS201: Simplify complex destructure assignments
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const async = require('async')
|
||||
const { expect } = require('chai')
|
||||
|
||||
const RealTimeClient = require('./helpers/RealTimeClient')
|
||||
const FixturesManager = require('./helpers/FixturesManager')
|
||||
|
||||
const settings = require('@overleaf/settings')
|
||||
const redis = require('@overleaf/redis-wrapper')
|
||||
const rclient = redis.createClient(settings.redis.documentupdater)
|
||||
|
||||
const redisSettings = settings.redis
|
||||
|
||||
const PENDING_UPDATES_LIST_KEYS = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9].map(n => {
|
||||
let key = 'pending-updates-list'
|
||||
if (n !== 0) {
|
||||
key += `-${n}`
|
||||
}
|
||||
return key
|
||||
})
|
||||
|
||||
function getPendingUpdatesList(cb) {
|
||||
Promise.all(PENDING_UPDATES_LIST_KEYS.map(key => rclient.lrange(key, 0, -1)))
|
||||
.then(results => {
|
||||
cb(
|
||||
null,
|
||||
results.reduce((acc, more) => {
|
||||
if (more.length) {
|
||||
acc.push(...more)
|
||||
}
|
||||
return acc
|
||||
}, [])
|
||||
)
|
||||
})
|
||||
.catch(cb)
|
||||
}
|
||||
|
||||
function clearPendingUpdatesList(cb) {
|
||||
Promise.all(PENDING_UPDATES_LIST_KEYS.map(key => rclient.del(key)))
|
||||
.then(() => cb(null))
|
||||
.catch(cb)
|
||||
}
|
||||
|
||||
describe('applyOtUpdate', function () {
|
||||
before(function () {
|
||||
return (this.update = {
|
||||
op: [{ i: 'foo', p: 42 }],
|
||||
})
|
||||
})
|
||||
describe('when authorized', function () {
|
||||
before(function (done) {
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
privilegeLevel: 'readAndWrite',
|
||||
},
|
||||
(e, { project_id, user_id }) => {
|
||||
this.project_id = project_id
|
||||
this.user_id = user_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return FixturesManager.setUpDoc(
|
||||
this.project_id,
|
||||
{ lines: this.lines, version: this.version, ops: this.ops },
|
||||
(e, { doc_id }) => {
|
||||
this.doc_id = doc_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.client = RealTimeClient.connect()
|
||||
return this.client.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit(
|
||||
'joinProject',
|
||||
{ project_id: this.project_id },
|
||||
cb
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit('joinDoc', this.doc_id, cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit(
|
||||
'applyOtUpdate',
|
||||
this.doc_id,
|
||||
this.update,
|
||||
cb
|
||||
)
|
||||
},
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('should push the doc into the pending updates list', function (done) {
|
||||
getPendingUpdatesList((error, ...rest) => {
|
||||
const [doc_id] = Array.from(rest[0])
|
||||
doc_id.should.equal(`${this.project_id}:${this.doc_id}`)
|
||||
return done()
|
||||
})
|
||||
return null
|
||||
})
|
||||
|
||||
it('should push the update into redis', function (done) {
|
||||
rclient.lrange(
|
||||
redisSettings.documentupdater.key_schema.pendingUpdates({
|
||||
doc_id: this.doc_id,
|
||||
}),
|
||||
0,
|
||||
-1,
|
||||
(error, ...rest) => {
|
||||
let [update] = Array.from(rest[0])
|
||||
update = JSON.parse(update)
|
||||
update.op.should.deep.equal(this.update.op)
|
||||
update.meta.should.deep.equal({
|
||||
source: this.client.publicId,
|
||||
user_id: this.user_id,
|
||||
})
|
||||
return done()
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
|
||||
return after(function (done) {
|
||||
return async.series(
|
||||
[
|
||||
cb => clearPendingUpdatesList(cb),
|
||||
cb =>
|
||||
rclient.del(
|
||||
'DocsWithPendingUpdates',
|
||||
`${this.project_id}:${this.doc_id}`,
|
||||
cb
|
||||
),
|
||||
cb =>
|
||||
rclient.del(
|
||||
redisSettings.documentupdater.key_schema.pendingUpdates(
|
||||
this.doc_id
|
||||
),
|
||||
cb
|
||||
),
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when authorized with a huge edit update', function () {
|
||||
before(function (done) {
|
||||
this.update = {
|
||||
op: {
|
||||
p: 12,
|
||||
t: 'update is too large'.repeat(1024 * 400), // >7MB
|
||||
},
|
||||
}
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
privilegeLevel: 'readAndWrite',
|
||||
},
|
||||
(e, { project_id, user_id }) => {
|
||||
this.project_id = project_id
|
||||
this.user_id = user_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return FixturesManager.setUpDoc(
|
||||
this.project_id,
|
||||
{ lines: this.lines, version: this.version, ops: this.ops },
|
||||
(e, { doc_id }) => {
|
||||
this.doc_id = doc_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.client = RealTimeClient.connect()
|
||||
this.client.on('connectionAccepted', cb)
|
||||
return this.client.on('otUpdateError', otUpdateError => {
|
||||
this.otUpdateError = otUpdateError
|
||||
})
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit(
|
||||
'joinProject',
|
||||
{ project_id: this.project_id },
|
||||
cb
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit('joinDoc', this.doc_id, cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit(
|
||||
'applyOtUpdate',
|
||||
this.doc_id,
|
||||
this.update,
|
||||
error => {
|
||||
this.error = error
|
||||
return cb()
|
||||
}
|
||||
)
|
||||
},
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('should not return an error', function () {
|
||||
return expect(this.error).to.not.exist
|
||||
})
|
||||
|
||||
it('should send an otUpdateError to the client', function (done) {
|
||||
return setTimeout(() => {
|
||||
expect(this.otUpdateError).to.exist
|
||||
return done()
|
||||
}, 300)
|
||||
})
|
||||
|
||||
it('should disconnect the client', function (done) {
|
||||
return setTimeout(() => {
|
||||
this.client.socket.connected.should.equal(false)
|
||||
return done()
|
||||
}, 300)
|
||||
})
|
||||
|
||||
return it('should not put the update in redis', function (done) {
|
||||
rclient.llen(
|
||||
redisSettings.documentupdater.key_schema.pendingUpdates({
|
||||
doc_id: this.doc_id,
|
||||
}),
|
||||
(error, len) => {
|
||||
len.should.equal(0)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
})
|
||||
|
||||
describe('when authorized to read-only with an edit update', function () {
|
||||
before(function (done) {
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
privilegeLevel: 'readOnly',
|
||||
},
|
||||
(e, { project_id, user_id }) => {
|
||||
this.project_id = project_id
|
||||
this.user_id = user_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return FixturesManager.setUpDoc(
|
||||
this.project_id,
|
||||
{ lines: this.lines, version: this.version, ops: this.ops },
|
||||
(e, { doc_id }) => {
|
||||
this.doc_id = doc_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.client = RealTimeClient.connect()
|
||||
return this.client.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit(
|
||||
'joinProject',
|
||||
{ project_id: this.project_id },
|
||||
cb
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit('joinDoc', this.doc_id, cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit(
|
||||
'applyOtUpdate',
|
||||
this.doc_id,
|
||||
this.update,
|
||||
error => {
|
||||
this.error = error
|
||||
return cb()
|
||||
}
|
||||
)
|
||||
},
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('should return an error', function () {
|
||||
return expect(this.error).to.exist
|
||||
})
|
||||
|
||||
it('should disconnect the client', function (done) {
|
||||
return setTimeout(() => {
|
||||
this.client.socket.connected.should.equal(false)
|
||||
return done()
|
||||
}, 300)
|
||||
})
|
||||
|
||||
return it('should not put the update in redis', function (done) {
|
||||
rclient.llen(
|
||||
redisSettings.documentupdater.key_schema.pendingUpdates({
|
||||
doc_id: this.doc_id,
|
||||
}),
|
||||
(error, len) => {
|
||||
len.should.equal(0)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when authorized to read-only with a comment update', function () {
|
||||
before(function (done) {
|
||||
this.comment_update = {
|
||||
op: [{ c: 'foo', p: 42 }],
|
||||
}
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
privilegeLevel: 'readOnly',
|
||||
},
|
||||
(e, { project_id, user_id }) => {
|
||||
this.project_id = project_id
|
||||
this.user_id = user_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return FixturesManager.setUpDoc(
|
||||
this.project_id,
|
||||
{ lines: this.lines, version: this.version, ops: this.ops },
|
||||
(e, { doc_id }) => {
|
||||
this.doc_id = doc_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.client = RealTimeClient.connect()
|
||||
return this.client.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit(
|
||||
'joinProject',
|
||||
{ project_id: this.project_id },
|
||||
cb
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit('joinDoc', this.doc_id, cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit(
|
||||
'applyOtUpdate',
|
||||
this.doc_id,
|
||||
this.comment_update,
|
||||
cb
|
||||
)
|
||||
},
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('should push the doc into the pending updates list', function (done) {
|
||||
getPendingUpdatesList((error, ...rest) => {
|
||||
const [doc_id] = Array.from(rest[0])
|
||||
doc_id.should.equal(`${this.project_id}:${this.doc_id}`)
|
||||
return done()
|
||||
})
|
||||
return null
|
||||
})
|
||||
|
||||
it('should push the update into redis', function (done) {
|
||||
rclient.lrange(
|
||||
redisSettings.documentupdater.key_schema.pendingUpdates({
|
||||
doc_id: this.doc_id,
|
||||
}),
|
||||
0,
|
||||
-1,
|
||||
(error, ...rest) => {
|
||||
let [update] = Array.from(rest[0])
|
||||
update = JSON.parse(update)
|
||||
update.op.should.deep.equal(this.comment_update.op)
|
||||
update.meta.should.deep.equal({
|
||||
source: this.client.publicId,
|
||||
user_id: this.user_id,
|
||||
})
|
||||
return done()
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
|
||||
return after(function (done) {
|
||||
return async.series(
|
||||
[
|
||||
cb => clearPendingUpdatesList(cb),
|
||||
cb =>
|
||||
rclient.del(
|
||||
'DocsWithPendingUpdates',
|
||||
`${this.project_id}:${this.doc_id}`,
|
||||
cb
|
||||
),
|
||||
cb =>
|
||||
rclient.del(
|
||||
redisSettings.documentupdater.key_schema.pendingUpdates({
|
||||
doc_id: this.doc_id,
|
||||
}),
|
||||
cb
|
||||
),
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
253
services/real-time/test/acceptance/js/ClientTrackingTests.js
Normal file
253
services/real-time/test/acceptance/js/ClientTrackingTests.js
Normal file
|
@ -0,0 +1,253 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const { expect } = require('chai')
|
||||
|
||||
const RealTimeClient = require('./helpers/RealTimeClient')
|
||||
const MockWebServer = require('./helpers/MockWebServer')
|
||||
const FixturesManager = require('./helpers/FixturesManager')
|
||||
|
||||
const async = require('async')
|
||||
|
||||
describe('clientTracking', function () {
|
||||
describe('when a client updates its cursor location', function () {
|
||||
before(function (done) {
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
privilegeLevel: 'owner',
|
||||
project: { name: 'Test Project' },
|
||||
},
|
||||
(error, { user_id, project_id }) => {
|
||||
this.user_id = user_id
|
||||
this.project_id = project_id
|
||||
return cb()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return FixturesManager.setUpDoc(
|
||||
this.project_id,
|
||||
{ lines: this.lines, version: this.version, ops: this.ops },
|
||||
(e, { doc_id }) => {
|
||||
this.doc_id = doc_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.clientA = RealTimeClient.connect()
|
||||
return this.clientA.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.clientB = RealTimeClient.connect()
|
||||
return this.clientB.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.clientA.emit(
|
||||
'joinProject',
|
||||
{
|
||||
project_id: this.project_id,
|
||||
},
|
||||
cb
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.clientA.emit('joinDoc', this.doc_id, cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.clientB.emit(
|
||||
'joinProject',
|
||||
{
|
||||
project_id: this.project_id,
|
||||
},
|
||||
cb
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.updates = []
|
||||
this.clientB.on('clientTracking.clientUpdated', data => {
|
||||
return this.updates.push(data)
|
||||
})
|
||||
|
||||
return this.clientA.emit(
|
||||
'clientTracking.updatePosition',
|
||||
{
|
||||
row: (this.row = 42),
|
||||
column: (this.column = 36),
|
||||
doc_id: this.doc_id,
|
||||
},
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return setTimeout(cb, 300)
|
||||
}
|
||||
)
|
||||
}, // Give the message a chance to reach client B.
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('should tell other clients about the update', function () {
|
||||
return this.updates.should.deep.equal([
|
||||
{
|
||||
row: this.row,
|
||||
column: this.column,
|
||||
doc_id: this.doc_id,
|
||||
id: this.clientA.publicId,
|
||||
user_id: this.user_id,
|
||||
name: 'Joe Bloggs',
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
return it('should record the update in getConnectedUsers', function (done) {
|
||||
return this.clientB.emit(
|
||||
'clientTracking.getConnectedUsers',
|
||||
(error, users) => {
|
||||
for (const user of Array.from(users)) {
|
||||
if (user.client_id === this.clientA.publicId) {
|
||||
expect(user.cursorData).to.deep.equal({
|
||||
row: this.row,
|
||||
column: this.column,
|
||||
doc_id: this.doc_id,
|
||||
})
|
||||
return done()
|
||||
}
|
||||
}
|
||||
throw new Error('user was never found')
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when an anonymous client updates its cursor location', function () {
|
||||
before(function (done) {
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
privilegeLevel: 'owner',
|
||||
project: { name: 'Test Project' },
|
||||
publicAccess: 'readAndWrite',
|
||||
},
|
||||
(error, { user_id, project_id }) => {
|
||||
this.user_id = user_id
|
||||
this.project_id = project_id
|
||||
return cb()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return FixturesManager.setUpDoc(
|
||||
this.project_id,
|
||||
{ lines: this.lines, version: this.version, ops: this.ops },
|
||||
(e, { doc_id }) => {
|
||||
this.doc_id = doc_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.clientA = RealTimeClient.connect()
|
||||
return this.clientA.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.clientA.emit(
|
||||
'joinProject',
|
||||
{
|
||||
project_id: this.project_id,
|
||||
},
|
||||
cb
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return RealTimeClient.setSession({}, cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.anonymous = RealTimeClient.connect()
|
||||
return this.anonymous.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.anonymous.emit(
|
||||
'joinProject',
|
||||
{
|
||||
project_id: this.project_id,
|
||||
},
|
||||
cb
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.anonymous.emit('joinDoc', this.doc_id, cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.updates = []
|
||||
this.clientA.on('clientTracking.clientUpdated', data => {
|
||||
return this.updates.push(data)
|
||||
})
|
||||
|
||||
return this.anonymous.emit(
|
||||
'clientTracking.updatePosition',
|
||||
{
|
||||
row: (this.row = 42),
|
||||
column: (this.column = 36),
|
||||
doc_id: this.doc_id,
|
||||
},
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return setTimeout(cb, 300)
|
||||
}
|
||||
)
|
||||
}, // Give the message a chance to reach client B.
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
return it('should tell other clients about the update', function () {
|
||||
return this.updates.should.deep.equal([
|
||||
{
|
||||
row: this.row,
|
||||
column: this.column,
|
||||
doc_id: this.doc_id,
|
||||
id: this.anonymous.publicId,
|
||||
user_id: 'anonymous-user',
|
||||
name: '',
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
})
|
135
services/real-time/test/acceptance/js/DrainManagerTests.js
Normal file
135
services/real-time/test/acceptance/js/DrainManagerTests.js
Normal file
|
@ -0,0 +1,135 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const RealTimeClient = require('./helpers/RealTimeClient')
|
||||
const FixturesManager = require('./helpers/FixturesManager')
|
||||
|
||||
const { expect } = require('chai')
|
||||
|
||||
const async = require('async')
|
||||
const request = require('request')
|
||||
|
||||
const Settings = require('@overleaf/settings')
|
||||
|
||||
const drain = function (rate, callback) {
|
||||
request.post(
|
||||
{
|
||||
url: `http://localhost:3026/drain?rate=${rate}`,
|
||||
auth: {
|
||||
user: Settings.internal.realTime.user,
|
||||
pass: Settings.internal.realTime.pass,
|
||||
},
|
||||
},
|
||||
(error, response, data) => callback(error, data)
|
||||
)
|
||||
return null
|
||||
}
|
||||
|
||||
describe('DrainManagerTests', function () {
|
||||
before(function (done) {
|
||||
FixturesManager.setUpProject(
|
||||
{
|
||||
privilegeLevel: 'owner',
|
||||
project: {
|
||||
name: 'Test Project',
|
||||
},
|
||||
},
|
||||
(e, { project_id, user_id }) => {
|
||||
this.project_id = project_id
|
||||
this.user_id = user_id
|
||||
return done()
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
|
||||
before(function (done) {
|
||||
// cleanup to speedup reconnecting
|
||||
this.timeout(10000)
|
||||
return RealTimeClient.disconnectAllClients(done)
|
||||
})
|
||||
|
||||
// trigger and check cleanup
|
||||
it('should have disconnected all previous clients', function (done) {
|
||||
return RealTimeClient.getConnectedClients((error, data) => {
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
expect(data.length).to.equal(0)
|
||||
return done()
|
||||
})
|
||||
})
|
||||
|
||||
return describe('with two clients in the project', function () {
|
||||
beforeEach(function (done) {
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
this.clientA = RealTimeClient.connect()
|
||||
return this.clientA.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.clientB = RealTimeClient.connect()
|
||||
return this.clientB.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.clientA.emit(
|
||||
'joinProject',
|
||||
{ project_id: this.project_id },
|
||||
cb
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.clientB.emit(
|
||||
'joinProject',
|
||||
{ project_id: this.project_id },
|
||||
cb
|
||||
)
|
||||
},
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
return describe('starting to drain', function () {
|
||||
beforeEach(function (done) {
|
||||
return async.parallel(
|
||||
[
|
||||
cb => {
|
||||
return this.clientA.on('reconnectGracefully', cb)
|
||||
},
|
||||
cb => {
|
||||
return this.clientB.on('reconnectGracefully', cb)
|
||||
},
|
||||
|
||||
cb => drain(2, cb),
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
afterEach(function (done) {
|
||||
return drain(0, done)
|
||||
}) // reset drain
|
||||
|
||||
it('should not timeout', function () {
|
||||
return expect(true).to.equal(true)
|
||||
})
|
||||
|
||||
return it('should not have disconnected', function () {
|
||||
expect(this.clientA.socket.connected).to.equal(true)
|
||||
return expect(this.clientB.socket.connected).to.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
288
services/real-time/test/acceptance/js/EarlyDisconnect.js
Normal file
288
services/real-time/test/acceptance/js/EarlyDisconnect.js
Normal file
|
@ -0,0 +1,288 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-return-assign,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const async = require('async')
|
||||
const { expect } = require('chai')
|
||||
|
||||
const RealTimeClient = require('./helpers/RealTimeClient')
|
||||
const MockDocUpdaterServer = require('./helpers/MockDocUpdaterServer')
|
||||
const MockWebServer = require('./helpers/MockWebServer')
|
||||
const FixturesManager = require('./helpers/FixturesManager')
|
||||
|
||||
const settings = require('@overleaf/settings')
|
||||
const redis = require('@overleaf/redis-wrapper')
|
||||
const rclient = redis.createClient(settings.redis.pubsub)
|
||||
const rclientRT = redis.createClient(settings.redis.realtime)
|
||||
const KeysRT = settings.redis.realtime.key_schema
|
||||
|
||||
describe('EarlyDisconnect', function () {
|
||||
before(function (done) {
|
||||
return MockDocUpdaterServer.run(done)
|
||||
})
|
||||
|
||||
describe('when the client disconnects before joinProject completes', function () {
|
||||
before(function () {
|
||||
// slow down web-api requests to force the race condition
|
||||
let joinProject
|
||||
this.actualWebAPIjoinProject = joinProject = MockWebServer.joinProject
|
||||
return (MockWebServer.joinProject = (project_id, user_id, cb) =>
|
||||
setTimeout(() => joinProject(project_id, user_id, cb), 300))
|
||||
})
|
||||
|
||||
after(function () {
|
||||
return (MockWebServer.joinProject = this.actualWebAPIjoinProject)
|
||||
})
|
||||
|
||||
beforeEach(function (done) {
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
privilegeLevel: 'owner',
|
||||
project: {
|
||||
name: 'Test Project',
|
||||
},
|
||||
},
|
||||
(e, { project_id, user_id }) => {
|
||||
this.project_id = project_id
|
||||
this.user_id = user_id
|
||||
return cb()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.clientA = RealTimeClient.connect()
|
||||
return this.clientA.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.clientA.emit(
|
||||
'joinProject',
|
||||
{ project_id: this.project_id },
|
||||
() => {}
|
||||
)
|
||||
// disconnect before joinProject completes
|
||||
this.clientA.on('disconnect', () => cb())
|
||||
return this.clientA.disconnect()
|
||||
},
|
||||
|
||||
cb => {
|
||||
// wait for joinDoc and subscribe
|
||||
return setTimeout(cb, 500)
|
||||
},
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
// we can force the race condition, there is no need to repeat too often
|
||||
return Array.from(Array.from({ length: 5 }).map((_, i) => i + 1)).map(
|
||||
attempt =>
|
||||
it(`should not subscribe to the pub/sub channel anymore (race ${attempt})`, function (done) {
|
||||
rclient.pubsub('CHANNELS', (err, resp) => {
|
||||
if (err) {
|
||||
return done(err)
|
||||
}
|
||||
expect(resp).to.not.include(`editor-events:${this.project_id}`)
|
||||
return done()
|
||||
})
|
||||
return null
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
describe('when the client disconnects before joinDoc completes', function () {
|
||||
beforeEach(function (done) {
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
privilegeLevel: 'owner',
|
||||
project: {
|
||||
name: 'Test Project',
|
||||
},
|
||||
},
|
||||
(e, { project_id, user_id }) => {
|
||||
this.project_id = project_id
|
||||
this.user_id = user_id
|
||||
return cb()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.clientA = RealTimeClient.connect()
|
||||
return this.clientA.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.clientA.emit(
|
||||
'joinProject',
|
||||
{ project_id: this.project_id },
|
||||
(error, project, privilegeLevel, protocolVersion) => {
|
||||
this.project = project
|
||||
this.privilegeLevel = privilegeLevel
|
||||
this.protocolVersion = protocolVersion
|
||||
return cb(error)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return FixturesManager.setUpDoc(
|
||||
this.project_id,
|
||||
{ lines: this.lines, version: this.version, ops: this.ops },
|
||||
(e, { doc_id }) => {
|
||||
this.doc_id = doc_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.clientA.emit('joinDoc', this.doc_id, () => {})
|
||||
// disconnect before joinDoc completes
|
||||
this.clientA.on('disconnect', () => cb())
|
||||
return this.clientA.disconnect()
|
||||
},
|
||||
|
||||
cb => {
|
||||
// wait for subscribe and unsubscribe
|
||||
return setTimeout(cb, 100)
|
||||
},
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
// we can not force the race condition, so we have to try many times
|
||||
return Array.from(Array.from({ length: 20 }).map((_, i) => i + 1)).map(
|
||||
attempt =>
|
||||
it(`should not subscribe to the pub/sub channels anymore (race ${attempt})`, function (done) {
|
||||
rclient.pubsub('CHANNELS', (err, resp) => {
|
||||
if (err) {
|
||||
return done(err)
|
||||
}
|
||||
expect(resp).to.not.include(`editor-events:${this.project_id}`)
|
||||
|
||||
return rclient.pubsub('CHANNELS', (err, resp) => {
|
||||
if (err) {
|
||||
return done(err)
|
||||
}
|
||||
expect(resp).to.not.include(`applied-ops:${this.doc_id}`)
|
||||
return done()
|
||||
})
|
||||
})
|
||||
return null
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
return describe('when the client disconnects before clientTracking.updatePosition starts', function () {
|
||||
beforeEach(function (done) {
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
privilegeLevel: 'owner',
|
||||
project: {
|
||||
name: 'Test Project',
|
||||
},
|
||||
},
|
||||
(e, { project_id, user_id }) => {
|
||||
this.project_id = project_id
|
||||
this.user_id = user_id
|
||||
return cb()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.clientA = RealTimeClient.connect()
|
||||
return this.clientA.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.clientA.emit(
|
||||
'joinProject',
|
||||
{ project_id: this.project_id },
|
||||
(error, project, privilegeLevel, protocolVersion) => {
|
||||
this.project = project
|
||||
this.privilegeLevel = privilegeLevel
|
||||
this.protocolVersion = protocolVersion
|
||||
return cb(error)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return FixturesManager.setUpDoc(
|
||||
this.project_id,
|
||||
{ lines: this.lines, version: this.version, ops: this.ops },
|
||||
(e, { doc_id }) => {
|
||||
this.doc_id = doc_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.clientA.emit('joinDoc', this.doc_id, cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.clientA.emit(
|
||||
'clientTracking.updatePosition',
|
||||
{
|
||||
row: 42,
|
||||
column: 36,
|
||||
doc_id: this.doc_id,
|
||||
},
|
||||
() => {}
|
||||
)
|
||||
// disconnect before updateClientPosition completes
|
||||
this.clientA.on('disconnect', () => cb())
|
||||
return this.clientA.disconnect()
|
||||
},
|
||||
|
||||
cb => {
|
||||
// wait for updateClientPosition
|
||||
return setTimeout(cb, 100)
|
||||
},
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
// we can not force the race condition, so we have to try many times
|
||||
return Array.from(Array.from({ length: 20 }).map((_, i) => i + 1)).map(
|
||||
attempt =>
|
||||
it(`should not show the client as connected (race ${attempt})`, function (done) {
|
||||
rclientRT.smembers(
|
||||
KeysRT.clientsInProject({ project_id: this.project_id }),
|
||||
(err, results) => {
|
||||
if (err) {
|
||||
return done(err)
|
||||
}
|
||||
expect(results).to.deep.equal([])
|
||||
return done()
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
117
services/real-time/test/acceptance/js/HttpControllerTests.js
Normal file
117
services/real-time/test/acceptance/js/HttpControllerTests.js
Normal file
|
@ -0,0 +1,117 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const async = require('async')
|
||||
const { expect } = require('chai')
|
||||
const request = require('request').defaults({
|
||||
baseUrl: 'http://localhost:3026',
|
||||
})
|
||||
|
||||
const RealTimeClient = require('./helpers/RealTimeClient')
|
||||
const FixturesManager = require('./helpers/FixturesManager')
|
||||
|
||||
describe('HttpControllerTests', function () {
|
||||
describe('without a user', function () {
|
||||
return it('should return 404 for the client view', function (done) {
|
||||
const client_id = 'not-existing'
|
||||
return request.get(
|
||||
{
|
||||
url: `/clients/${client_id}`,
|
||||
json: true,
|
||||
},
|
||||
(error, response, data) => {
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
expect(response.statusCode).to.equal(404)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('with a user and after joining a project', function () {
|
||||
before(function (done) {
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
privilegeLevel: 'owner',
|
||||
},
|
||||
(error, { project_id, user_id }) => {
|
||||
this.project_id = project_id
|
||||
this.user_id = user_id
|
||||
return cb(error)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return FixturesManager.setUpDoc(
|
||||
this.project_id,
|
||||
{},
|
||||
(error, { doc_id }) => {
|
||||
this.doc_id = doc_id
|
||||
return cb(error)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.client = RealTimeClient.connect()
|
||||
return this.client.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit(
|
||||
'joinProject',
|
||||
{ project_id: this.project_id },
|
||||
cb
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit('joinDoc', this.doc_id, cb)
|
||||
},
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
return it('should send a client view', function (done) {
|
||||
return request.get(
|
||||
{
|
||||
url: `/clients/${this.client.socket.sessionid}`,
|
||||
json: true,
|
||||
},
|
||||
(error, response, data) => {
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
expect(response.statusCode).to.equal(200)
|
||||
expect(data.connected_time).to.exist
|
||||
delete data.connected_time
|
||||
// .email is not set in the session
|
||||
delete data.email
|
||||
expect(data).to.deep.equal({
|
||||
client_id: this.client.socket.sessionid,
|
||||
first_name: 'Joe',
|
||||
last_name: 'Bloggs',
|
||||
project_id: this.project_id,
|
||||
user_id: this.user_id,
|
||||
rooms: [this.project_id, this.doc_id],
|
||||
})
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
563
services/real-time/test/acceptance/js/JoinDocTests.js
Normal file
563
services/real-time/test/acceptance/js/JoinDocTests.js
Normal file
|
@ -0,0 +1,563 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-return-assign,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const { expect } = require('chai')
|
||||
|
||||
const RealTimeClient = require('./helpers/RealTimeClient')
|
||||
const MockDocUpdaterServer = require('./helpers/MockDocUpdaterServer')
|
||||
const FixturesManager = require('./helpers/FixturesManager')
|
||||
|
||||
const async = require('async')
|
||||
|
||||
describe('joinDoc', function () {
|
||||
before(function () {
|
||||
this.lines = ['test', 'doc', 'lines']
|
||||
this.version = 42
|
||||
this.ops = ['mock', 'doc', 'ops']
|
||||
return (this.ranges = { mock: 'ranges' })
|
||||
})
|
||||
|
||||
describe('when authorised readAndWrite', function () {
|
||||
before(function (done) {
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
privilegeLevel: 'readAndWrite',
|
||||
},
|
||||
(e, { project_id, user_id }) => {
|
||||
this.project_id = project_id
|
||||
this.user_id = user_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return FixturesManager.setUpDoc(
|
||||
this.project_id,
|
||||
{
|
||||
lines: this.lines,
|
||||
version: this.version,
|
||||
ops: this.ops,
|
||||
ranges: this.ranges,
|
||||
},
|
||||
(e, { doc_id }) => {
|
||||
this.doc_id = doc_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.client = RealTimeClient.connect()
|
||||
return this.client.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit(
|
||||
'joinProject',
|
||||
{ project_id: this.project_id },
|
||||
cb
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit(
|
||||
'joinDoc',
|
||||
this.doc_id,
|
||||
(error, ...rest) => {
|
||||
;[...this.returnedArgs] = Array.from(rest)
|
||||
return cb(error)
|
||||
}
|
||||
)
|
||||
},
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the doc from the doc updater', function () {
|
||||
return MockDocUpdaterServer.getDocument
|
||||
.calledWith(this.project_id, this.doc_id, -1)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc lines, version, ranges and ops', function () {
|
||||
return this.returnedArgs.should.deep.equal([
|
||||
this.lines,
|
||||
this.version,
|
||||
this.ops,
|
||||
this.ranges,
|
||||
])
|
||||
})
|
||||
|
||||
return it('should have joined the doc room', function (done) {
|
||||
return RealTimeClient.getConnectedClient(
|
||||
this.client.socket.sessionid,
|
||||
(error, client) => {
|
||||
expect(Array.from(client.rooms).includes(this.doc_id)).to.equal(true)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when authorised readOnly', function () {
|
||||
before(function (done) {
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
privilegeLevel: 'readOnly',
|
||||
},
|
||||
(e, { project_id, user_id }) => {
|
||||
this.project_id = project_id
|
||||
this.user_id = user_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return FixturesManager.setUpDoc(
|
||||
this.project_id,
|
||||
{
|
||||
lines: this.lines,
|
||||
version: this.version,
|
||||
ops: this.ops,
|
||||
ranges: this.ranges,
|
||||
},
|
||||
(e, { doc_id }) => {
|
||||
this.doc_id = doc_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.client = RealTimeClient.connect()
|
||||
return this.client.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit(
|
||||
'joinProject',
|
||||
{ project_id: this.project_id },
|
||||
cb
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit(
|
||||
'joinDoc',
|
||||
this.doc_id,
|
||||
(error, ...rest) => {
|
||||
;[...this.returnedArgs] = Array.from(rest)
|
||||
return cb(error)
|
||||
}
|
||||
)
|
||||
},
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the doc from the doc updater', function () {
|
||||
return MockDocUpdaterServer.getDocument
|
||||
.calledWith(this.project_id, this.doc_id, -1)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc lines, version, ranges and ops', function () {
|
||||
return this.returnedArgs.should.deep.equal([
|
||||
this.lines,
|
||||
this.version,
|
||||
this.ops,
|
||||
this.ranges,
|
||||
])
|
||||
})
|
||||
|
||||
return it('should have joined the doc room', function (done) {
|
||||
return RealTimeClient.getConnectedClient(
|
||||
this.client.socket.sessionid,
|
||||
(error, client) => {
|
||||
expect(Array.from(client.rooms).includes(this.doc_id)).to.equal(true)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when authorised as owner', function () {
|
||||
before(function (done) {
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
privilegeLevel: 'owner',
|
||||
},
|
||||
(e, { project_id, user_id }) => {
|
||||
this.project_id = project_id
|
||||
this.user_id = user_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return FixturesManager.setUpDoc(
|
||||
this.project_id,
|
||||
{
|
||||
lines: this.lines,
|
||||
version: this.version,
|
||||
ops: this.ops,
|
||||
ranges: this.ranges,
|
||||
},
|
||||
(e, { doc_id }) => {
|
||||
this.doc_id = doc_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.client = RealTimeClient.connect()
|
||||
return this.client.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit(
|
||||
'joinProject',
|
||||
{ project_id: this.project_id },
|
||||
cb
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit(
|
||||
'joinDoc',
|
||||
this.doc_id,
|
||||
(error, ...rest) => {
|
||||
;[...this.returnedArgs] = Array.from(rest)
|
||||
return cb(error)
|
||||
}
|
||||
)
|
||||
},
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the doc from the doc updater', function () {
|
||||
return MockDocUpdaterServer.getDocument
|
||||
.calledWith(this.project_id, this.doc_id, -1)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc lines, version, ranges and ops', function () {
|
||||
return this.returnedArgs.should.deep.equal([
|
||||
this.lines,
|
||||
this.version,
|
||||
this.ops,
|
||||
this.ranges,
|
||||
])
|
||||
})
|
||||
|
||||
return it('should have joined the doc room', function (done) {
|
||||
return RealTimeClient.getConnectedClient(
|
||||
this.client.socket.sessionid,
|
||||
(error, client) => {
|
||||
expect(Array.from(client.rooms).includes(this.doc_id)).to.equal(true)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
// It is impossible to write an acceptance test to test joining an unauthorized
|
||||
// project, since joinProject already catches that. If you can join a project,
|
||||
// then you can join a doc in that project.
|
||||
|
||||
describe('with a fromVersion', function () {
|
||||
before(function (done) {
|
||||
this.fromVersion = 36
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
privilegeLevel: 'readAndWrite',
|
||||
},
|
||||
(e, { project_id, user_id }) => {
|
||||
this.project_id = project_id
|
||||
this.user_id = user_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return FixturesManager.setUpDoc(
|
||||
this.project_id,
|
||||
{
|
||||
lines: this.lines,
|
||||
version: this.version,
|
||||
ops: this.ops,
|
||||
ranges: this.ranges,
|
||||
},
|
||||
(e, { doc_id }) => {
|
||||
this.doc_id = doc_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.client = RealTimeClient.connect()
|
||||
return this.client.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit(
|
||||
'joinProject',
|
||||
{ project_id: this.project_id },
|
||||
cb
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit(
|
||||
'joinDoc',
|
||||
this.doc_id,
|
||||
this.fromVersion,
|
||||
(error, ...rest) => {
|
||||
;[...this.returnedArgs] = Array.from(rest)
|
||||
return cb(error)
|
||||
}
|
||||
)
|
||||
},
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the doc from the doc updater with the fromVersion', function () {
|
||||
return MockDocUpdaterServer.getDocument
|
||||
.calledWith(this.project_id, this.doc_id, this.fromVersion)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc lines, version, ranges and ops', function () {
|
||||
return this.returnedArgs.should.deep.equal([
|
||||
this.lines,
|
||||
this.version,
|
||||
this.ops,
|
||||
this.ranges,
|
||||
])
|
||||
})
|
||||
|
||||
return it('should have joined the doc room', function (done) {
|
||||
return RealTimeClient.getConnectedClient(
|
||||
this.client.socket.sessionid,
|
||||
(error, client) => {
|
||||
expect(Array.from(client.rooms).includes(this.doc_id)).to.equal(true)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with options', function () {
|
||||
before(function (done) {
|
||||
this.options = { encodeRanges: true }
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
privilegeLevel: 'readAndWrite',
|
||||
},
|
||||
(e, { project_id, user_id }) => {
|
||||
this.project_id = project_id
|
||||
this.user_id = user_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return FixturesManager.setUpDoc(
|
||||
this.project_id,
|
||||
{
|
||||
lines: this.lines,
|
||||
version: this.version,
|
||||
ops: this.ops,
|
||||
ranges: this.ranges,
|
||||
},
|
||||
(e, { doc_id }) => {
|
||||
this.doc_id = doc_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.client = RealTimeClient.connect()
|
||||
return this.client.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit(
|
||||
'joinProject',
|
||||
{ project_id: this.project_id },
|
||||
cb
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit(
|
||||
'joinDoc',
|
||||
this.doc_id,
|
||||
this.options,
|
||||
(error, ...rest) => {
|
||||
;[...this.returnedArgs] = Array.from(rest)
|
||||
return cb(error)
|
||||
}
|
||||
)
|
||||
},
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the doc from the doc updater with the default fromVersion', function () {
|
||||
return MockDocUpdaterServer.getDocument
|
||||
.calledWith(this.project_id, this.doc_id, -1)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc lines, version, ranges and ops', function () {
|
||||
return this.returnedArgs.should.deep.equal([
|
||||
this.lines,
|
||||
this.version,
|
||||
this.ops,
|
||||
this.ranges,
|
||||
])
|
||||
})
|
||||
|
||||
return it('should have joined the doc room', function (done) {
|
||||
return RealTimeClient.getConnectedClient(
|
||||
this.client.socket.sessionid,
|
||||
(error, client) => {
|
||||
expect(Array.from(client.rooms).includes(this.doc_id)).to.equal(true)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('with fromVersion and options', function () {
|
||||
before(function (done) {
|
||||
this.fromVersion = 36
|
||||
this.options = { encodeRanges: true }
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
privilegeLevel: 'readAndWrite',
|
||||
},
|
||||
(e, { project_id, user_id }) => {
|
||||
this.project_id = project_id
|
||||
this.user_id = user_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return FixturesManager.setUpDoc(
|
||||
this.project_id,
|
||||
{
|
||||
lines: this.lines,
|
||||
version: this.version,
|
||||
ops: this.ops,
|
||||
ranges: this.ranges,
|
||||
},
|
||||
(e, { doc_id }) => {
|
||||
this.doc_id = doc_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.client = RealTimeClient.connect()
|
||||
return this.client.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit(
|
||||
'joinProject',
|
||||
{ project_id: this.project_id },
|
||||
cb
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit(
|
||||
'joinDoc',
|
||||
this.doc_id,
|
||||
this.fromVersion,
|
||||
this.options,
|
||||
(error, ...rest) => {
|
||||
;[...this.returnedArgs] = Array.from(rest)
|
||||
return cb(error)
|
||||
}
|
||||
)
|
||||
},
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the doc from the doc updater with the fromVersion', function () {
|
||||
return MockDocUpdaterServer.getDocument
|
||||
.calledWith(this.project_id, this.doc_id, this.fromVersion)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc lines, version, ranges and ops', function () {
|
||||
return this.returnedArgs.should.deep.equal([
|
||||
this.lines,
|
||||
this.version,
|
||||
this.ops,
|
||||
this.ranges,
|
||||
])
|
||||
})
|
||||
|
||||
return it('should have joined the doc room', function (done) {
|
||||
return RealTimeClient.getConnectedClient(
|
||||
this.client.socket.sessionid,
|
||||
(error, client) => {
|
||||
expect(Array.from(client.rooms).includes(this.doc_id)).to.equal(true)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
328
services/real-time/test/acceptance/js/JoinProjectTests.js
Normal file
328
services/real-time/test/acceptance/js/JoinProjectTests.js
Normal file
|
@ -0,0 +1,328 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const { expect } = require('chai')
|
||||
|
||||
const RealTimeClient = require('./helpers/RealTimeClient')
|
||||
const MockWebServer = require('./helpers/MockWebServer')
|
||||
const FixturesManager = require('./helpers/FixturesManager')
|
||||
|
||||
const async = require('async')
|
||||
|
||||
describe('joinProject', function () {
|
||||
describe('when authorized', function () {
|
||||
before(function (done) {
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
privilegeLevel: 'owner',
|
||||
project: {
|
||||
name: 'Test Project',
|
||||
},
|
||||
},
|
||||
(e, { project_id, user_id }) => {
|
||||
this.project_id = project_id
|
||||
this.user_id = user_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.client = RealTimeClient.connect()
|
||||
return this.client.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit(
|
||||
'joinProject',
|
||||
{ project_id: this.project_id },
|
||||
(error, project, privilegeLevel, protocolVersion) => {
|
||||
this.project = project
|
||||
this.privilegeLevel = privilegeLevel
|
||||
this.protocolVersion = protocolVersion
|
||||
return cb(error)
|
||||
}
|
||||
)
|
||||
},
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the project from web', function () {
|
||||
return MockWebServer.joinProject
|
||||
.calledWith(this.project_id, this.user_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the project', function () {
|
||||
return this.project.should.deep.equal({
|
||||
name: 'Test Project',
|
||||
})
|
||||
})
|
||||
|
||||
it('should return the privilege level', function () {
|
||||
return this.privilegeLevel.should.equal('owner')
|
||||
})
|
||||
|
||||
it('should return the protocolVersion', function () {
|
||||
return this.protocolVersion.should.equal(2)
|
||||
})
|
||||
|
||||
it('should have joined the project room', function (done) {
|
||||
return RealTimeClient.getConnectedClient(
|
||||
this.client.socket.sessionid,
|
||||
(error, client) => {
|
||||
expect(Array.from(client.rooms).includes(this.project_id)).to.equal(
|
||||
true
|
||||
)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
return it('should have marked the user as connected', function (done) {
|
||||
return this.client.emit(
|
||||
'clientTracking.getConnectedUsers',
|
||||
(error, users) => {
|
||||
let connected = false
|
||||
for (const user of Array.from(users)) {
|
||||
if (
|
||||
user.client_id === this.client.publicId &&
|
||||
user.user_id === this.user_id
|
||||
) {
|
||||
connected = true
|
||||
break
|
||||
}
|
||||
}
|
||||
expect(connected).to.equal(true)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when not authorized', function () {
|
||||
before(function (done) {
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
privilegeLevel: null,
|
||||
project: {
|
||||
name: 'Test Project',
|
||||
},
|
||||
},
|
||||
(e, { project_id, user_id }) => {
|
||||
this.project_id = project_id
|
||||
this.user_id = user_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.client = RealTimeClient.connect()
|
||||
return this.client.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit(
|
||||
'joinProject',
|
||||
{ project_id: this.project_id },
|
||||
(error, project, privilegeLevel, protocolVersion) => {
|
||||
this.error = error
|
||||
this.project = project
|
||||
this.privilegeLevel = privilegeLevel
|
||||
this.protocolVersion = protocolVersion
|
||||
return cb()
|
||||
}
|
||||
)
|
||||
},
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('should return an error', function () {
|
||||
return this.error.message.should.equal('not authorized')
|
||||
})
|
||||
|
||||
return it('should not have joined the project room', function (done) {
|
||||
return RealTimeClient.getConnectedClient(
|
||||
this.client.socket.sessionid,
|
||||
(error, client) => {
|
||||
expect(Array.from(client.rooms).includes(this.project_id)).to.equal(
|
||||
false
|
||||
)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when not authorized and web replies with a 403', function () {
|
||||
before(function (done) {
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
project_id: 'forbidden',
|
||||
privilegeLevel: 'owner',
|
||||
project: {
|
||||
name: 'Test Project',
|
||||
},
|
||||
},
|
||||
(e, { project_id, user_id }) => {
|
||||
this.project_id = project_id
|
||||
this.user_id = user_id
|
||||
cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.client = RealTimeClient.connect()
|
||||
this.client.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.client.emit(
|
||||
'joinProject',
|
||||
{ project_id: this.project_id },
|
||||
(error, project, privilegeLevel, protocolVersion) => {
|
||||
this.error = error
|
||||
this.project = project
|
||||
this.privilegeLevel = privilegeLevel
|
||||
this.protocolVersion = protocolVersion
|
||||
cb()
|
||||
}
|
||||
)
|
||||
},
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('should return an error', function () {
|
||||
this.error.message.should.equal('not authorized')
|
||||
})
|
||||
|
||||
it('should not have joined the project room', function (done) {
|
||||
RealTimeClient.getConnectedClient(
|
||||
this.client.socket.sessionid,
|
||||
(error, client) => {
|
||||
expect(Array.from(client.rooms).includes(this.project_id)).to.equal(
|
||||
false
|
||||
)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when deleted and web replies with a 404', function () {
|
||||
before(function (done) {
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
project_id: 'not-found',
|
||||
privilegeLevel: 'owner',
|
||||
project: {
|
||||
name: 'Test Project',
|
||||
},
|
||||
},
|
||||
(e, { project_id, user_id }) => {
|
||||
this.project_id = project_id
|
||||
this.user_id = user_id
|
||||
cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.client = RealTimeClient.connect()
|
||||
this.client.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.client.emit(
|
||||
'joinProject',
|
||||
{ project_id: this.project_id },
|
||||
(error, project, privilegeLevel, protocolVersion) => {
|
||||
this.error = error
|
||||
this.project = project
|
||||
this.privilegeLevel = privilegeLevel
|
||||
this.protocolVersion = protocolVersion
|
||||
cb()
|
||||
}
|
||||
)
|
||||
},
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('should return an error', function () {
|
||||
this.error.code.should.equal('ProjectNotFound')
|
||||
})
|
||||
|
||||
it('should not have joined the project room', function (done) {
|
||||
RealTimeClient.getConnectedClient(
|
||||
this.client.socket.sessionid,
|
||||
(error, client) => {
|
||||
expect(Array.from(client.rooms).includes(this.project_id)).to.equal(
|
||||
false
|
||||
)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when over rate limit', function () {
|
||||
before(function (done) {
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
this.client = RealTimeClient.connect()
|
||||
return this.client.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit(
|
||||
'joinProject',
|
||||
{ project_id: 'rate-limited' },
|
||||
error => {
|
||||
this.error = error
|
||||
return cb()
|
||||
}
|
||||
)
|
||||
},
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
return it('should return a TooManyRequests error code', function () {
|
||||
this.error.message.should.equal('rate-limit hit when joining project')
|
||||
return this.error.code.should.equal('TooManyRequests')
|
||||
})
|
||||
})
|
||||
})
|
174
services/real-time/test/acceptance/js/LeaveDocTests.js
Normal file
174
services/real-time/test/acceptance/js/LeaveDocTests.js
Normal file
|
@ -0,0 +1,174 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const { expect } = require('chai')
|
||||
const sinon = require('sinon')
|
||||
|
||||
const RealTimeClient = require('./helpers/RealTimeClient')
|
||||
const MockDocUpdaterServer = require('./helpers/MockDocUpdaterServer')
|
||||
const FixturesManager = require('./helpers/FixturesManager')
|
||||
const logger = require('logger-sharelatex')
|
||||
|
||||
const async = require('async')
|
||||
|
||||
describe('leaveDoc', function () {
|
||||
before(function () {
|
||||
this.lines = ['test', 'doc', 'lines']
|
||||
this.version = 42
|
||||
this.ops = ['mock', 'doc', 'ops']
|
||||
sinon.spy(logger, 'error')
|
||||
sinon.spy(logger, 'warn')
|
||||
sinon.spy(logger, 'log')
|
||||
return (this.other_doc_id = FixturesManager.getRandomId())
|
||||
})
|
||||
|
||||
after(function () {
|
||||
logger.error.restore() // remove the spy
|
||||
logger.warn.restore()
|
||||
return logger.log.restore()
|
||||
})
|
||||
|
||||
return describe('when joined to a doc', function () {
|
||||
beforeEach(function (done) {
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
privilegeLevel: 'readAndWrite',
|
||||
},
|
||||
(e, { project_id, user_id }) => {
|
||||
this.project_id = project_id
|
||||
this.user_id = user_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return FixturesManager.setUpDoc(
|
||||
this.project_id,
|
||||
{ lines: this.lines, version: this.version, ops: this.ops },
|
||||
(e, { doc_id }) => {
|
||||
this.doc_id = doc_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.client = RealTimeClient.connect()
|
||||
return this.client.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit(
|
||||
'joinProject',
|
||||
{ project_id: this.project_id },
|
||||
cb
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit(
|
||||
'joinDoc',
|
||||
this.doc_id,
|
||||
(error, ...rest) => {
|
||||
;[...this.returnedArgs] = Array.from(rest)
|
||||
return cb(error)
|
||||
}
|
||||
)
|
||||
},
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
describe('then leaving the doc', function () {
|
||||
beforeEach(function (done) {
|
||||
return this.client.emit('leaveDoc', this.doc_id, error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return done()
|
||||
})
|
||||
})
|
||||
|
||||
return it('should have left the doc room', function (done) {
|
||||
return RealTimeClient.getConnectedClient(
|
||||
this.client.socket.sessionid,
|
||||
(error, client) => {
|
||||
expect(Array.from(client.rooms).includes(this.doc_id)).to.equal(
|
||||
false
|
||||
)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when sending a leaveDoc request before the previous joinDoc request has completed', function () {
|
||||
beforeEach(function (done) {
|
||||
this.client.emit('leaveDoc', this.doc_id, () => {})
|
||||
this.client.emit('joinDoc', this.doc_id, () => {})
|
||||
return this.client.emit('leaveDoc', this.doc_id, error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return done()
|
||||
})
|
||||
})
|
||||
|
||||
it('should not trigger an error', function () {
|
||||
return sinon.assert.neverCalledWith(
|
||||
logger.error,
|
||||
sinon.match.any,
|
||||
"not subscribed - shouldn't happen"
|
||||
)
|
||||
})
|
||||
|
||||
return it('should have left the doc room', function (done) {
|
||||
return RealTimeClient.getConnectedClient(
|
||||
this.client.socket.sessionid,
|
||||
(error, client) => {
|
||||
expect(Array.from(client.rooms).includes(this.doc_id)).to.equal(
|
||||
false
|
||||
)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when sending a leaveDoc for a room the client has not joined ', function () {
|
||||
beforeEach(function (done) {
|
||||
return this.client.emit('leaveDoc', this.other_doc_id, error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return done()
|
||||
})
|
||||
})
|
||||
|
||||
return it('should trigger a low level message only', function () {
|
||||
return sinon.assert.calledWith(
|
||||
logger.log,
|
||||
sinon.match.any,
|
||||
'ignoring request from client to leave room it is not in'
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
270
services/real-time/test/acceptance/js/LeaveProjectTests.js
Normal file
270
services/real-time/test/acceptance/js/LeaveProjectTests.js
Normal file
|
@ -0,0 +1,270 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-throw-literal,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const RealTimeClient = require('./helpers/RealTimeClient')
|
||||
const MockDocUpdaterServer = require('./helpers/MockDocUpdaterServer')
|
||||
const FixturesManager = require('./helpers/FixturesManager')
|
||||
|
||||
const async = require('async')
|
||||
|
||||
const settings = require('@overleaf/settings')
|
||||
const redis = require('@overleaf/redis-wrapper')
|
||||
const rclient = redis.createClient(settings.redis.pubsub)
|
||||
|
||||
describe('leaveProject', function () {
|
||||
before(function (done) {
|
||||
return MockDocUpdaterServer.run(done)
|
||||
})
|
||||
|
||||
describe('with other clients in the project', function () {
|
||||
before(function (done) {
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
privilegeLevel: 'owner',
|
||||
project: {
|
||||
name: 'Test Project',
|
||||
},
|
||||
},
|
||||
(e, { project_id, user_id }) => {
|
||||
this.project_id = project_id
|
||||
this.user_id = user_id
|
||||
return cb()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.clientA = RealTimeClient.connect()
|
||||
return this.clientA.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.clientB = RealTimeClient.connect()
|
||||
this.clientB.on('connectionAccepted', cb)
|
||||
|
||||
this.clientBDisconnectMessages = []
|
||||
return this.clientB.on(
|
||||
'clientTracking.clientDisconnected',
|
||||
data => {
|
||||
return this.clientBDisconnectMessages.push(data)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.clientA.emit(
|
||||
'joinProject',
|
||||
{ project_id: this.project_id },
|
||||
(error, project, privilegeLevel, protocolVersion) => {
|
||||
this.project = project
|
||||
this.privilegeLevel = privilegeLevel
|
||||
this.protocolVersion = protocolVersion
|
||||
return cb(error)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.clientB.emit(
|
||||
'joinProject',
|
||||
{ project_id: this.project_id },
|
||||
(error, project, privilegeLevel, protocolVersion) => {
|
||||
this.project = project
|
||||
this.privilegeLevel = privilegeLevel
|
||||
this.protocolVersion = protocolVersion
|
||||
return cb(error)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return FixturesManager.setUpDoc(
|
||||
this.project_id,
|
||||
{ lines: this.lines, version: this.version, ops: this.ops },
|
||||
(e, { doc_id }) => {
|
||||
this.doc_id = doc_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.clientA.emit('joinDoc', this.doc_id, cb)
|
||||
},
|
||||
cb => {
|
||||
return this.clientB.emit('joinDoc', this.doc_id, cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
// leaveProject is called when the client disconnects
|
||||
this.clientA.on('disconnect', () => cb())
|
||||
return this.clientA.disconnect()
|
||||
},
|
||||
|
||||
cb => {
|
||||
// The API waits a little while before flushing changes
|
||||
return setTimeout(done, 1000)
|
||||
},
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('should emit a disconnect message to the room', function () {
|
||||
return this.clientBDisconnectMessages.should.deep.equal([
|
||||
this.clientA.publicId,
|
||||
])
|
||||
})
|
||||
|
||||
it('should no longer list the client in connected users', function (done) {
|
||||
return this.clientB.emit(
|
||||
'clientTracking.getConnectedUsers',
|
||||
(error, users) => {
|
||||
for (const user of Array.from(users)) {
|
||||
if (user.client_id === this.clientA.publicId) {
|
||||
throw 'Expected clientA to not be listed in connected users'
|
||||
}
|
||||
}
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should not flush the project to the document updater', function () {
|
||||
return MockDocUpdaterServer.deleteProject
|
||||
.calledWith(this.project_id)
|
||||
.should.equal(false)
|
||||
})
|
||||
|
||||
it('should remain subscribed to the editor-events channels', function (done) {
|
||||
rclient.pubsub('CHANNELS', (err, resp) => {
|
||||
if (err) {
|
||||
return done(err)
|
||||
}
|
||||
resp.should.include(`editor-events:${this.project_id}`)
|
||||
return done()
|
||||
})
|
||||
return null
|
||||
})
|
||||
|
||||
return it('should remain subscribed to the applied-ops channels', function (done) {
|
||||
rclient.pubsub('CHANNELS', (err, resp) => {
|
||||
if (err) {
|
||||
return done(err)
|
||||
}
|
||||
resp.should.include(`applied-ops:${this.doc_id}`)
|
||||
return done()
|
||||
})
|
||||
return null
|
||||
})
|
||||
})
|
||||
|
||||
return describe('with no other clients in the project', function () {
|
||||
before(function (done) {
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
privilegeLevel: 'owner',
|
||||
project: {
|
||||
name: 'Test Project',
|
||||
},
|
||||
},
|
||||
(e, { project_id, user_id }) => {
|
||||
this.project_id = project_id
|
||||
this.user_id = user_id
|
||||
return cb()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.clientA = RealTimeClient.connect()
|
||||
return this.clientA.on('connect', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.clientA.emit(
|
||||
'joinProject',
|
||||
{ project_id: this.project_id },
|
||||
(error, project, privilegeLevel, protocolVersion) => {
|
||||
this.project = project
|
||||
this.privilegeLevel = privilegeLevel
|
||||
this.protocolVersion = protocolVersion
|
||||
return cb(error)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return FixturesManager.setUpDoc(
|
||||
this.project_id,
|
||||
{ lines: this.lines, version: this.version, ops: this.ops },
|
||||
(e, { doc_id }) => {
|
||||
this.doc_id = doc_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
cb => {
|
||||
return this.clientA.emit('joinDoc', this.doc_id, cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
// leaveProject is called when the client disconnects
|
||||
this.clientA.on('disconnect', () => cb())
|
||||
return this.clientA.disconnect()
|
||||
},
|
||||
|
||||
cb => {
|
||||
// The API waits a little while before flushing changes
|
||||
return setTimeout(done, 1000)
|
||||
},
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('should flush the project to the document updater', function () {
|
||||
return MockDocUpdaterServer.deleteProject
|
||||
.calledWith(this.project_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should not subscribe to the editor-events channels anymore', function (done) {
|
||||
rclient.pubsub('CHANNELS', (err, resp) => {
|
||||
if (err) {
|
||||
return done(err)
|
||||
}
|
||||
resp.should.not.include(`editor-events:${this.project_id}`)
|
||||
return done()
|
||||
})
|
||||
return null
|
||||
})
|
||||
|
||||
return it('should not subscribe to the applied-ops channels anymore', function (done) {
|
||||
rclient.pubsub('CHANNELS', (err, resp) => {
|
||||
if (err) {
|
||||
return done(err)
|
||||
}
|
||||
resp.should.not.include(`applied-ops:${this.doc_id}`)
|
||||
return done()
|
||||
})
|
||||
return null
|
||||
})
|
||||
})
|
||||
})
|
477
services/real-time/test/acceptance/js/MatrixTests.js
Normal file
477
services/real-time/test/acceptance/js/MatrixTests.js
Normal file
|
@ -0,0 +1,477 @@
|
|||
/*
|
||||
This test suite is a multi level matrix which allows us to test many cases
|
||||
with all kinds of setups.
|
||||
|
||||
Users/Actors are defined in USERS and are a low level entity that does connect
|
||||
to a real-time pod. A typical UserItem is:
|
||||
|
||||
someDescriptiveNameForTheTestSuite: {
|
||||
setup(cb) {
|
||||
// <setup session here>
|
||||
const options = { client: RealTimeClient.connect(), foo: 'bar' }
|
||||
cb(null, options)
|
||||
}
|
||||
}
|
||||
|
||||
Sessions are a set of actions that a User performs in the life-cycle of a
|
||||
real-time session, before they try something weird. A typical SessionItem is:
|
||||
|
||||
someOtherDescriptiveNameForTheTestSuite: {
|
||||
getActions(cb) {
|
||||
cb(null, [
|
||||
{ rpc: 'RPC_ENDPOINT', args: [...] }
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
Finally there are InvalidRequests which are the weird actions I hinted on in
|
||||
the Sessions section. The defined actions may be marked as 'failed' to denote
|
||||
that real-time rejects them with an (for this test) expected error.
|
||||
A typical InvalidRequestItem is:
|
||||
|
||||
joinOwnProject: {
|
||||
getActions(cb) {
|
||||
cb(null, [
|
||||
{ rpc: 'RPC_ENDPOINT', args: [...], failed: true }
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
There is additional meta-data that UserItems and SessionItems may use to skip
|
||||
certain areas of the matrix. Theses are:
|
||||
|
||||
- Has the User an own project that they join as part of the Session?
|
||||
UserItem: { hasOwnProject: true, setup(cb) { cb(null, { project_id, ... }) }}
|
||||
SessionItem: { needsOwnProject: true }
|
||||
*/
|
||||
/* eslint-disable
|
||||
camelcase,
|
||||
*/
|
||||
const { expect } = require('chai')
|
||||
const async = require('async')
|
||||
|
||||
const RealTimeClient = require('./helpers/RealTimeClient')
|
||||
const FixturesManager = require('./helpers/FixturesManager')
|
||||
|
||||
const settings = require('@overleaf/settings')
|
||||
const Keys = settings.redis.documentupdater.key_schema
|
||||
const redis = require('@overleaf/redis-wrapper')
|
||||
const rclient = redis.createClient(settings.redis.pubsub)
|
||||
|
||||
function getPendingUpdates(doc_id, cb) {
|
||||
rclient.lrange(Keys.pendingUpdates({ doc_id }), 0, 10, cb)
|
||||
}
|
||||
function cleanupPreviousUpdates(doc_id, cb) {
|
||||
rclient.del(Keys.pendingUpdates({ doc_id }), cb)
|
||||
}
|
||||
|
||||
describe('MatrixTests', function () {
|
||||
let privateProjectId, privateDocId, readWriteProjectId, readWriteDocId
|
||||
|
||||
let privateClient
|
||||
before(function setupPrivateProject(done) {
|
||||
FixturesManager.setUpEditorSession(
|
||||
{ privilegeLevel: 'owner' },
|
||||
(err, { project_id, doc_id }) => {
|
||||
if (err) return done(err)
|
||||
privateProjectId = project_id
|
||||
privateDocId = doc_id
|
||||
privateClient = RealTimeClient.connect()
|
||||
privateClient.on('connectionAccepted', () => {
|
||||
privateClient.emit(
|
||||
'joinProject',
|
||||
{ project_id: privateProjectId },
|
||||
err => {
|
||||
if (err) return done(err)
|
||||
privateClient.emit('joinDoc', privateDocId, done)
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
before(function setupReadWriteProject(done) {
|
||||
FixturesManager.setUpEditorSession(
|
||||
{
|
||||
publicAccess: 'readAndWrite',
|
||||
},
|
||||
(err, { project_id, doc_id }) => {
|
||||
readWriteProjectId = project_id
|
||||
readWriteDocId = doc_id
|
||||
done(err)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
const USER_SETUP = {
|
||||
anonymous: {
|
||||
setup(cb) {
|
||||
RealTimeClient.setSession({}, err => {
|
||||
if (err) return cb(err)
|
||||
cb(null, {
|
||||
client: RealTimeClient.connect(),
|
||||
})
|
||||
})
|
||||
},
|
||||
},
|
||||
|
||||
registered: {
|
||||
setup(cb) {
|
||||
const user_id = FixturesManager.getRandomId()
|
||||
RealTimeClient.setSession(
|
||||
{
|
||||
user: {
|
||||
_id: user_id,
|
||||
first_name: 'Joe',
|
||||
last_name: 'Bloggs',
|
||||
},
|
||||
},
|
||||
err => {
|
||||
if (err) return cb(err)
|
||||
cb(null, {
|
||||
user_id,
|
||||
client: RealTimeClient.connect(),
|
||||
})
|
||||
}
|
||||
)
|
||||
},
|
||||
},
|
||||
|
||||
registeredWithOwnedProject: {
|
||||
setup(cb) {
|
||||
FixturesManager.setUpEditorSession(
|
||||
{ privilegeLevel: 'owner' },
|
||||
(err, { project_id, user_id, doc_id }) => {
|
||||
if (err) return cb(err)
|
||||
cb(null, {
|
||||
user_id,
|
||||
project_id,
|
||||
doc_id,
|
||||
client: RealTimeClient.connect(),
|
||||
})
|
||||
}
|
||||
)
|
||||
},
|
||||
hasOwnProject: true,
|
||||
},
|
||||
}
|
||||
|
||||
Object.entries(USER_SETUP).forEach(level0 => {
|
||||
const [userDescription, userItem] = level0
|
||||
let options, client
|
||||
|
||||
const SESSION_SETUP = {
|
||||
noop: {
|
||||
getActions(cb) {
|
||||
cb(null, [])
|
||||
},
|
||||
needsOwnProject: false,
|
||||
},
|
||||
|
||||
joinReadWriteProject: {
|
||||
getActions(cb) {
|
||||
cb(null, [
|
||||
{ rpc: 'joinProject', args: [{ project_id: readWriteProjectId }] },
|
||||
])
|
||||
},
|
||||
needsOwnProject: false,
|
||||
},
|
||||
|
||||
joinReadWriteProjectAndDoc: {
|
||||
getActions(cb) {
|
||||
cb(null, [
|
||||
{ rpc: 'joinProject', args: [{ project_id: readWriteProjectId }] },
|
||||
{ rpc: 'joinDoc', args: [readWriteDocId] },
|
||||
])
|
||||
},
|
||||
needsOwnProject: false,
|
||||
},
|
||||
|
||||
joinOwnProject: {
|
||||
getActions(cb) {
|
||||
cb(null, [
|
||||
{ rpc: 'joinProject', args: [{ project_id: options.project_id }] },
|
||||
])
|
||||
},
|
||||
needsOwnProject: true,
|
||||
},
|
||||
|
||||
joinOwnProjectAndDoc: {
|
||||
getActions(cb) {
|
||||
cb(null, [
|
||||
{ rpc: 'joinProject', args: [{ project_id: options.project_id }] },
|
||||
{ rpc: 'joinDoc', args: [options.doc_id] },
|
||||
])
|
||||
},
|
||||
needsOwnProject: true,
|
||||
},
|
||||
}
|
||||
|
||||
function performActions(getActions, done) {
|
||||
getActions((err, actions) => {
|
||||
if (err) return done(err)
|
||||
|
||||
async.eachSeries(
|
||||
actions,
|
||||
(action, cb) => {
|
||||
if (action.rpc) {
|
||||
client.emit(action.rpc, ...action.args, (...returnedArgs) => {
|
||||
const error = returnedArgs.shift()
|
||||
if (action.fails) {
|
||||
expect(error).to.exist
|
||||
expect(returnedArgs).to.have.length(0)
|
||||
return cb()
|
||||
}
|
||||
cb(error)
|
||||
})
|
||||
} else {
|
||||
cb(new Error('unexpected action'))
|
||||
}
|
||||
},
|
||||
done
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
describe(userDescription, function () {
|
||||
beforeEach(function userSetup(done) {
|
||||
userItem.setup((err, _options) => {
|
||||
if (err) return done(err)
|
||||
|
||||
options = _options
|
||||
client = options.client
|
||||
client.on('connectionAccepted', done)
|
||||
})
|
||||
})
|
||||
|
||||
Object.entries(SESSION_SETUP).forEach(level1 => {
|
||||
const [sessionSetupDescription, sessionSetupItem] = level1
|
||||
const INVALID_REQUESTS = {
|
||||
noop: {
|
||||
getActions(cb) {
|
||||
cb(null, [])
|
||||
},
|
||||
},
|
||||
|
||||
joinProjectWithDocId: {
|
||||
getActions(cb) {
|
||||
cb(null, [
|
||||
{
|
||||
rpc: 'joinProject',
|
||||
args: [{ project_id: privateDocId }],
|
||||
fails: 1,
|
||||
},
|
||||
])
|
||||
},
|
||||
},
|
||||
|
||||
joinDocWithDocId: {
|
||||
getActions(cb) {
|
||||
cb(null, [{ rpc: 'joinDoc', args: [privateDocId], fails: 1 }])
|
||||
},
|
||||
},
|
||||
|
||||
joinProjectWithProjectId: {
|
||||
getActions(cb) {
|
||||
cb(null, [
|
||||
{
|
||||
rpc: 'joinProject',
|
||||
args: [{ project_id: privateProjectId }],
|
||||
fails: 1,
|
||||
},
|
||||
])
|
||||
},
|
||||
},
|
||||
|
||||
joinDocWithProjectId: {
|
||||
getActions(cb) {
|
||||
cb(null, [{ rpc: 'joinDoc', args: [privateProjectId], fails: 1 }])
|
||||
},
|
||||
},
|
||||
|
||||
joinProjectWithProjectIdThenJoinDocWithDocId: {
|
||||
getActions(cb) {
|
||||
cb(null, [
|
||||
{
|
||||
rpc: 'joinProject',
|
||||
args: [{ project_id: privateProjectId }],
|
||||
fails: 1,
|
||||
},
|
||||
{ rpc: 'joinDoc', args: [privateDocId], fails: 1 },
|
||||
])
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// skip some areas of the matrix
|
||||
// - some Users do not have an own project
|
||||
const skip = sessionSetupItem.needsOwnProject && !userItem.hasOwnProject
|
||||
|
||||
describe(sessionSetupDescription, function () {
|
||||
beforeEach(function performSessionActions(done) {
|
||||
if (skip) return this.skip()
|
||||
performActions(sessionSetupItem.getActions, done)
|
||||
})
|
||||
|
||||
Object.entries(INVALID_REQUESTS).forEach(level2 => {
|
||||
const [InvalidRequestDescription, InvalidRequestItem] = level2
|
||||
describe(InvalidRequestDescription, function () {
|
||||
beforeEach(function performInvalidRequests(done) {
|
||||
performActions(InvalidRequestItem.getActions, done)
|
||||
})
|
||||
|
||||
describe('rooms', function () {
|
||||
it('should not add the user into the privateProject room', function (done) {
|
||||
RealTimeClient.getConnectedClient(
|
||||
client.socket.sessionid,
|
||||
(error, client) => {
|
||||
if (error) return done(error)
|
||||
expect(client.rooms).to.not.include(privateProjectId)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should not add the user into the privateDoc room', function (done) {
|
||||
RealTimeClient.getConnectedClient(
|
||||
client.socket.sessionid,
|
||||
(error, client) => {
|
||||
if (error) return done(error)
|
||||
expect(client.rooms).to.not.include(privateDocId)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('receive updates', function () {
|
||||
const receivedMessages = []
|
||||
beforeEach(function publishAnUpdateInRedis(done) {
|
||||
const update = {
|
||||
doc_id: privateDocId,
|
||||
op: {
|
||||
meta: { source: privateClient.publicId },
|
||||
v: 42,
|
||||
doc: privateDocId,
|
||||
op: [{ i: 'foo', p: 50 }],
|
||||
},
|
||||
}
|
||||
client.on('otUpdateApplied', update => {
|
||||
receivedMessages.push(update)
|
||||
})
|
||||
privateClient.once('otUpdateApplied', () => {
|
||||
setTimeout(done, 10)
|
||||
})
|
||||
rclient.publish('applied-ops', JSON.stringify(update))
|
||||
})
|
||||
|
||||
it('should send nothing to client', function () {
|
||||
expect(receivedMessages).to.have.length(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('receive messages from web', function () {
|
||||
const receivedMessages = []
|
||||
beforeEach(function publishAMessageInRedis(done) {
|
||||
const event = {
|
||||
room_id: privateProjectId,
|
||||
message: 'removeEntity',
|
||||
payload: ['foo', 'convertDocToFile'],
|
||||
_id: 'web:123',
|
||||
}
|
||||
client.on('removeEntity', (...args) => {
|
||||
receivedMessages.push(args)
|
||||
})
|
||||
privateClient.once('removeEntity', () => {
|
||||
setTimeout(done, 10)
|
||||
})
|
||||
rclient.publish('editor-events', JSON.stringify(event))
|
||||
})
|
||||
|
||||
it('should send nothing to client', function () {
|
||||
expect(receivedMessages).to.have.length(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('send updates', function () {
|
||||
let receivedArgs, submittedUpdates, update
|
||||
|
||||
beforeEach(function cleanup(done) {
|
||||
cleanupPreviousUpdates(privateDocId, done)
|
||||
})
|
||||
|
||||
beforeEach(function setupUpdateFields() {
|
||||
update = {
|
||||
doc_id: privateDocId,
|
||||
op: {
|
||||
v: 43,
|
||||
lastV: 42,
|
||||
doc: privateDocId,
|
||||
op: [{ i: 'foo', p: 50 }],
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
beforeEach(function sendAsUser(done) {
|
||||
const userUpdate = Object.assign({}, update, {
|
||||
hash: 'user',
|
||||
})
|
||||
|
||||
client.emit(
|
||||
'applyOtUpdate',
|
||||
privateDocId,
|
||||
userUpdate,
|
||||
(...args) => {
|
||||
receivedArgs = args
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
beforeEach(function sendAsPrivateUserForReferenceOp(done) {
|
||||
const privateUpdate = Object.assign({}, update, {
|
||||
hash: 'private',
|
||||
})
|
||||
|
||||
privateClient.emit(
|
||||
'applyOtUpdate',
|
||||
privateDocId,
|
||||
privateUpdate,
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
beforeEach(function fetchPendingOps(done) {
|
||||
getPendingUpdates(privateDocId, (err, updates) => {
|
||||
submittedUpdates = updates
|
||||
done(err)
|
||||
})
|
||||
})
|
||||
|
||||
it('should error out trying to send', function () {
|
||||
expect(receivedArgs).to.have.length(1)
|
||||
expect(receivedArgs[0]).to.have.property('message')
|
||||
// we are using an old version of chai: 1.9.2
|
||||
// TypeError: expect(...).to.be.oneOf is not a function
|
||||
expect(
|
||||
[
|
||||
'no project_id found on client',
|
||||
'not authorized',
|
||||
].includes(receivedArgs[0].message)
|
||||
).to.equal(true)
|
||||
})
|
||||
|
||||
it('should submit the private users message only', function () {
|
||||
expect(submittedUpdates).to.have.length(1)
|
||||
const update = JSON.parse(submittedUpdates[0])
|
||||
expect(update.hash).to.equal('private')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
373
services/real-time/test/acceptance/js/PubSubRace.js
Normal file
373
services/real-time/test/acceptance/js/PubSubRace.js
Normal file
|
@ -0,0 +1,373 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-return-assign,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const RealTimeClient = require('./helpers/RealTimeClient')
|
||||
const MockDocUpdaterServer = require('./helpers/MockDocUpdaterServer')
|
||||
const FixturesManager = require('./helpers/FixturesManager')
|
||||
|
||||
const async = require('async')
|
||||
|
||||
const settings = require('@overleaf/settings')
|
||||
const redis = require('@overleaf/redis-wrapper')
|
||||
const rclient = redis.createClient(settings.redis.pubsub)
|
||||
|
||||
describe('PubSubRace', function () {
|
||||
before(function (done) {
|
||||
return MockDocUpdaterServer.run(done)
|
||||
})
|
||||
|
||||
describe('when the client leaves a doc before joinDoc completes', function () {
|
||||
before(function (done) {
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
privilegeLevel: 'owner',
|
||||
project: {
|
||||
name: 'Test Project',
|
||||
},
|
||||
},
|
||||
(e, { project_id, user_id }) => {
|
||||
this.project_id = project_id
|
||||
this.user_id = user_id
|
||||
return cb()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.clientA = RealTimeClient.connect()
|
||||
return this.clientA.on('connect', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.clientA.emit(
|
||||
'joinProject',
|
||||
{ project_id: this.project_id },
|
||||
(error, project, privilegeLevel, protocolVersion) => {
|
||||
this.project = project
|
||||
this.privilegeLevel = privilegeLevel
|
||||
this.protocolVersion = protocolVersion
|
||||
return cb(error)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return FixturesManager.setUpDoc(
|
||||
this.project_id,
|
||||
{ lines: this.lines, version: this.version, ops: this.ops },
|
||||
(e, { doc_id }) => {
|
||||
this.doc_id = doc_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.clientA.emit('joinDoc', this.doc_id, () => {})
|
||||
// leave before joinDoc completes
|
||||
return this.clientA.emit('leaveDoc', this.doc_id, cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
// wait for subscribe and unsubscribe
|
||||
return setTimeout(cb, 100)
|
||||
},
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
return it('should not subscribe to the applied-ops channels anymore', function (done) {
|
||||
rclient.pubsub('CHANNELS', (err, resp) => {
|
||||
if (err) {
|
||||
return done(err)
|
||||
}
|
||||
resp.should.not.include(`applied-ops:${this.doc_id}`)
|
||||
return done()
|
||||
})
|
||||
return null
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the client emits joinDoc and leaveDoc requests frequently and leaves eventually', function () {
|
||||
before(function (done) {
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
privilegeLevel: 'owner',
|
||||
project: {
|
||||
name: 'Test Project',
|
||||
},
|
||||
},
|
||||
(e, { project_id, user_id }) => {
|
||||
this.project_id = project_id
|
||||
this.user_id = user_id
|
||||
return cb()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.clientA = RealTimeClient.connect()
|
||||
return this.clientA.on('connect', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.clientA.emit(
|
||||
'joinProject',
|
||||
{ project_id: this.project_id },
|
||||
(error, project, privilegeLevel, protocolVersion) => {
|
||||
this.project = project
|
||||
this.privilegeLevel = privilegeLevel
|
||||
this.protocolVersion = protocolVersion
|
||||
return cb(error)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return FixturesManager.setUpDoc(
|
||||
this.project_id,
|
||||
{ lines: this.lines, version: this.version, ops: this.ops },
|
||||
(e, { doc_id }) => {
|
||||
this.doc_id = doc_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.clientA.emit('joinDoc', this.doc_id, () => {})
|
||||
this.clientA.emit('leaveDoc', this.doc_id, () => {})
|
||||
this.clientA.emit('joinDoc', this.doc_id, () => {})
|
||||
this.clientA.emit('leaveDoc', this.doc_id, () => {})
|
||||
this.clientA.emit('joinDoc', this.doc_id, () => {})
|
||||
this.clientA.emit('leaveDoc', this.doc_id, () => {})
|
||||
this.clientA.emit('joinDoc', this.doc_id, () => {})
|
||||
this.clientA.emit('leaveDoc', this.doc_id, () => {})
|
||||
this.clientA.emit('joinDoc', this.doc_id, () => {})
|
||||
return this.clientA.emit('leaveDoc', this.doc_id, cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
// wait for subscribe and unsubscribe
|
||||
return setTimeout(cb, 100)
|
||||
},
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
return it('should not subscribe to the applied-ops channels anymore', function (done) {
|
||||
rclient.pubsub('CHANNELS', (err, resp) => {
|
||||
if (err) {
|
||||
return done(err)
|
||||
}
|
||||
resp.should.not.include(`applied-ops:${this.doc_id}`)
|
||||
return done()
|
||||
})
|
||||
return null
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the client emits joinDoc and leaveDoc requests frequently and remains in the doc', function () {
|
||||
before(function (done) {
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
privilegeLevel: 'owner',
|
||||
project: {
|
||||
name: 'Test Project',
|
||||
},
|
||||
},
|
||||
(e, { project_id, user_id }) => {
|
||||
this.project_id = project_id
|
||||
this.user_id = user_id
|
||||
return cb()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.clientA = RealTimeClient.connect()
|
||||
return this.clientA.on('connect', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.clientA.emit(
|
||||
'joinProject',
|
||||
{ project_id: this.project_id },
|
||||
(error, project, privilegeLevel, protocolVersion) => {
|
||||
this.project = project
|
||||
this.privilegeLevel = privilegeLevel
|
||||
this.protocolVersion = protocolVersion
|
||||
return cb(error)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return FixturesManager.setUpDoc(
|
||||
this.project_id,
|
||||
{ lines: this.lines, version: this.version, ops: this.ops },
|
||||
(e, { doc_id }) => {
|
||||
this.doc_id = doc_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.clientA.emit('joinDoc', this.doc_id, () => {})
|
||||
this.clientA.emit('leaveDoc', this.doc_id, () => {})
|
||||
this.clientA.emit('joinDoc', this.doc_id, () => {})
|
||||
this.clientA.emit('leaveDoc', this.doc_id, () => {})
|
||||
this.clientA.emit('joinDoc', this.doc_id, () => {})
|
||||
this.clientA.emit('leaveDoc', this.doc_id, () => {})
|
||||
this.clientA.emit('joinDoc', this.doc_id, () => {})
|
||||
this.clientA.emit('leaveDoc', this.doc_id, () => {})
|
||||
return this.clientA.emit('joinDoc', this.doc_id, cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
// wait for subscribe and unsubscribe
|
||||
return setTimeout(cb, 100)
|
||||
},
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
return it('should subscribe to the applied-ops channels', function (done) {
|
||||
rclient.pubsub('CHANNELS', (err, resp) => {
|
||||
if (err) {
|
||||
return done(err)
|
||||
}
|
||||
resp.should.include(`applied-ops:${this.doc_id}`)
|
||||
return done()
|
||||
})
|
||||
return null
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when the client disconnects before joinDoc completes', function () {
|
||||
before(function (done) {
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
privilegeLevel: 'owner',
|
||||
project: {
|
||||
name: 'Test Project',
|
||||
},
|
||||
},
|
||||
(e, { project_id, user_id }) => {
|
||||
this.project_id = project_id
|
||||
this.user_id = user_id
|
||||
return cb()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.clientA = RealTimeClient.connect()
|
||||
return this.clientA.on('connect', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.clientA.emit(
|
||||
'joinProject',
|
||||
{ project_id: this.project_id },
|
||||
(error, project, privilegeLevel, protocolVersion) => {
|
||||
this.project = project
|
||||
this.privilegeLevel = privilegeLevel
|
||||
this.protocolVersion = protocolVersion
|
||||
return cb(error)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return FixturesManager.setUpDoc(
|
||||
this.project_id,
|
||||
{ lines: this.lines, version: this.version, ops: this.ops },
|
||||
(e, { doc_id }) => {
|
||||
this.doc_id = doc_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
let joinDocCompleted = false
|
||||
this.clientA.emit(
|
||||
'joinDoc',
|
||||
this.doc_id,
|
||||
() => (joinDocCompleted = true)
|
||||
)
|
||||
// leave before joinDoc completes
|
||||
return setTimeout(
|
||||
() => {
|
||||
if (joinDocCompleted) {
|
||||
return cb(new Error('joinDocCompleted -- lower timeout'))
|
||||
}
|
||||
this.clientA.on('disconnect', () => cb())
|
||||
return this.clientA.disconnect()
|
||||
},
|
||||
// socket.io processes joinDoc and disconnect with different delays:
|
||||
// - joinDoc goes through two process.nextTick
|
||||
// - disconnect goes through one process.nextTick
|
||||
// We have to inject the disconnect event into a different event loop
|
||||
// cycle.
|
||||
3
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
// wait for subscribe and unsubscribe
|
||||
return setTimeout(cb, 100)
|
||||
},
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('should not subscribe to the editor-events channels anymore', function (done) {
|
||||
rclient.pubsub('CHANNELS', (err, resp) => {
|
||||
if (err) {
|
||||
return done(err)
|
||||
}
|
||||
resp.should.not.include(`editor-events:${this.project_id}`)
|
||||
return done()
|
||||
})
|
||||
return null
|
||||
})
|
||||
|
||||
return it('should not subscribe to the applied-ops channels anymore', function (done) {
|
||||
rclient.pubsub('CHANNELS', (err, resp) => {
|
||||
if (err) {
|
||||
return done(err)
|
||||
}
|
||||
resp.should.not.include(`applied-ops:${this.doc_id}`)
|
||||
return done()
|
||||
})
|
||||
return null
|
||||
})
|
||||
})
|
||||
})
|
347
services/real-time/test/acceptance/js/ReceiveUpdateTests.js
Normal file
347
services/real-time/test/acceptance/js/ReceiveUpdateTests.js
Normal file
|
@ -0,0 +1,347 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const { expect } = require('chai')
|
||||
|
||||
const RealTimeClient = require('./helpers/RealTimeClient')
|
||||
const MockWebServer = require('./helpers/MockWebServer')
|
||||
const FixturesManager = require('./helpers/FixturesManager')
|
||||
|
||||
const async = require('async')
|
||||
|
||||
const settings = require('@overleaf/settings')
|
||||
const redis = require('@overleaf/redis-wrapper')
|
||||
const rclient = redis.createClient(settings.redis.pubsub)
|
||||
|
||||
describe('receiveUpdate', function () {
|
||||
beforeEach(function (done) {
|
||||
this.lines = ['test', 'doc', 'lines']
|
||||
this.version = 42
|
||||
this.ops = ['mock', 'doc', 'ops']
|
||||
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
privilegeLevel: 'owner',
|
||||
project: { name: 'Test Project' },
|
||||
},
|
||||
(error, { user_id, project_id }) => {
|
||||
this.user_id = user_id
|
||||
this.project_id = project_id
|
||||
return cb()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return FixturesManager.setUpDoc(
|
||||
this.project_id,
|
||||
{ lines: this.lines, version: this.version, ops: this.ops },
|
||||
(e, { doc_id }) => {
|
||||
this.doc_id = doc_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.clientA = RealTimeClient.connect()
|
||||
return this.clientA.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.clientB = RealTimeClient.connect()
|
||||
return this.clientB.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.clientA.emit(
|
||||
'joinProject',
|
||||
{
|
||||
project_id: this.project_id,
|
||||
},
|
||||
cb
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.clientA.emit('joinDoc', this.doc_id, cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.clientB.emit(
|
||||
'joinProject',
|
||||
{
|
||||
project_id: this.project_id,
|
||||
},
|
||||
cb
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.clientB.emit('joinDoc', this.doc_id, cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
privilegeLevel: 'owner',
|
||||
project: { name: 'Test Project' },
|
||||
},
|
||||
(
|
||||
error,
|
||||
{ user_id: user_id_second, project_id: project_id_second }
|
||||
) => {
|
||||
this.user_id_second = user_id_second
|
||||
this.project_id_second = project_id_second
|
||||
return cb()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return FixturesManager.setUpDoc(
|
||||
this.project_id_second,
|
||||
{ lines: this.lines, version: this.version, ops: this.ops },
|
||||
(e, { doc_id: doc_id_second }) => {
|
||||
this.doc_id_second = doc_id_second
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.clientC = RealTimeClient.connect()
|
||||
return this.clientC.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.clientC.emit(
|
||||
'joinProject',
|
||||
{
|
||||
project_id: this.project_id_second,
|
||||
},
|
||||
cb
|
||||
)
|
||||
},
|
||||
cb => {
|
||||
return this.clientC.emit('joinDoc', this.doc_id_second, cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.clientAUpdates = []
|
||||
this.clientA.on('otUpdateApplied', update =>
|
||||
this.clientAUpdates.push(update)
|
||||
)
|
||||
this.clientBUpdates = []
|
||||
this.clientB.on('otUpdateApplied', update =>
|
||||
this.clientBUpdates.push(update)
|
||||
)
|
||||
this.clientCUpdates = []
|
||||
this.clientC.on('otUpdateApplied', update =>
|
||||
this.clientCUpdates.push(update)
|
||||
)
|
||||
|
||||
this.clientAErrors = []
|
||||
this.clientA.on('otUpdateError', error =>
|
||||
this.clientAErrors.push(error)
|
||||
)
|
||||
this.clientBErrors = []
|
||||
this.clientB.on('otUpdateError', error =>
|
||||
this.clientBErrors.push(error)
|
||||
)
|
||||
this.clientCErrors = []
|
||||
this.clientC.on('otUpdateError', error =>
|
||||
this.clientCErrors.push(error)
|
||||
)
|
||||
return cb()
|
||||
},
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
if (this.clientA != null) {
|
||||
this.clientA.disconnect()
|
||||
}
|
||||
if (this.clientB != null) {
|
||||
this.clientB.disconnect()
|
||||
}
|
||||
return this.clientC != null ? this.clientC.disconnect() : undefined
|
||||
})
|
||||
|
||||
describe('with an update from clientA', function () {
|
||||
beforeEach(function (done) {
|
||||
this.update = {
|
||||
doc_id: this.doc_id,
|
||||
op: {
|
||||
meta: {
|
||||
source: this.clientA.publicId,
|
||||
},
|
||||
v: this.version,
|
||||
doc: this.doc_id,
|
||||
op: [{ i: 'foo', p: 50 }],
|
||||
},
|
||||
}
|
||||
rclient.publish('applied-ops', JSON.stringify(this.update))
|
||||
return setTimeout(done, 200)
|
||||
}) // Give clients time to get message
|
||||
|
||||
it('should send the full op to clientB', function () {
|
||||
return this.clientBUpdates.should.deep.equal([this.update.op])
|
||||
})
|
||||
|
||||
it('should send an ack to clientA', function () {
|
||||
return this.clientAUpdates.should.deep.equal([
|
||||
{
|
||||
v: this.version,
|
||||
doc: this.doc_id,
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
return it('should send nothing to clientC', function () {
|
||||
return this.clientCUpdates.should.deep.equal([])
|
||||
})
|
||||
})
|
||||
|
||||
describe('with an update from clientC', function () {
|
||||
beforeEach(function (done) {
|
||||
this.update = {
|
||||
doc_id: this.doc_id_second,
|
||||
op: {
|
||||
meta: {
|
||||
source: this.clientC.publicId,
|
||||
},
|
||||
v: this.version,
|
||||
doc: this.doc_id_second,
|
||||
op: [{ i: 'update from clientC', p: 50 }],
|
||||
},
|
||||
}
|
||||
rclient.publish('applied-ops', JSON.stringify(this.update))
|
||||
return setTimeout(done, 200)
|
||||
}) // Give clients time to get message
|
||||
|
||||
it('should send nothing to clientA', function () {
|
||||
return this.clientAUpdates.should.deep.equal([])
|
||||
})
|
||||
|
||||
it('should send nothing to clientB', function () {
|
||||
return this.clientBUpdates.should.deep.equal([])
|
||||
})
|
||||
|
||||
return it('should send an ack to clientC', function () {
|
||||
return this.clientCUpdates.should.deep.equal([
|
||||
{
|
||||
v: this.version,
|
||||
doc: this.doc_id_second,
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe('with an update from a remote client for project 1', function () {
|
||||
beforeEach(function (done) {
|
||||
this.update = {
|
||||
doc_id: this.doc_id,
|
||||
op: {
|
||||
meta: {
|
||||
source: 'this-is-a-remote-client-id',
|
||||
},
|
||||
v: this.version,
|
||||
doc: this.doc_id,
|
||||
op: [{ i: 'foo', p: 50 }],
|
||||
},
|
||||
}
|
||||
rclient.publish('applied-ops', JSON.stringify(this.update))
|
||||
return setTimeout(done, 200)
|
||||
}) // Give clients time to get message
|
||||
|
||||
it('should send the full op to clientA', function () {
|
||||
return this.clientAUpdates.should.deep.equal([this.update.op])
|
||||
})
|
||||
|
||||
it('should send the full op to clientB', function () {
|
||||
return this.clientBUpdates.should.deep.equal([this.update.op])
|
||||
})
|
||||
|
||||
return it('should send nothing to clientC', function () {
|
||||
return this.clientCUpdates.should.deep.equal([])
|
||||
})
|
||||
})
|
||||
|
||||
describe('with an error for the first project', function () {
|
||||
beforeEach(function (done) {
|
||||
rclient.publish(
|
||||
'applied-ops',
|
||||
JSON.stringify({
|
||||
doc_id: this.doc_id,
|
||||
error: (this.error = 'something went wrong'),
|
||||
})
|
||||
)
|
||||
return setTimeout(done, 200)
|
||||
}) // Give clients time to get message
|
||||
|
||||
it('should send the error to the clients in the first project', function () {
|
||||
this.clientAErrors.should.deep.equal([this.error])
|
||||
return this.clientBErrors.should.deep.equal([this.error])
|
||||
})
|
||||
|
||||
it('should not send any errors to the client in the second project', function () {
|
||||
return this.clientCErrors.should.deep.equal([])
|
||||
})
|
||||
|
||||
it('should disconnect the clients of the first project', function () {
|
||||
this.clientA.socket.connected.should.equal(false)
|
||||
return this.clientB.socket.connected.should.equal(false)
|
||||
})
|
||||
|
||||
return it('should not disconnect the client in the second project', function () {
|
||||
return this.clientC.socket.connected.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('with an error for the second project', function () {
|
||||
beforeEach(function (done) {
|
||||
rclient.publish(
|
||||
'applied-ops',
|
||||
JSON.stringify({
|
||||
doc_id: this.doc_id_second,
|
||||
error: (this.error = 'something went wrong'),
|
||||
})
|
||||
)
|
||||
return setTimeout(done, 200)
|
||||
}) // Give clients time to get message
|
||||
|
||||
it('should not send any errors to the clients in the first project', function () {
|
||||
this.clientAErrors.should.deep.equal([])
|
||||
return this.clientBErrors.should.deep.equal([])
|
||||
})
|
||||
|
||||
it('should send the error to the client in the second project', function () {
|
||||
return this.clientCErrors.should.deep.equal([this.error])
|
||||
})
|
||||
|
||||
it('should not disconnect the clients of the first project', function () {
|
||||
this.clientA.socket.connected.should.equal(true)
|
||||
return this.clientB.socket.connected.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should disconnect the client in the second project', function () {
|
||||
return this.clientC.socket.connected.should.equal(false)
|
||||
})
|
||||
})
|
||||
})
|
121
services/real-time/test/acceptance/js/RouterTests.js
Normal file
121
services/real-time/test/acceptance/js/RouterTests.js
Normal file
|
@ -0,0 +1,121 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const async = require('async')
|
||||
const { expect } = require('chai')
|
||||
|
||||
const RealTimeClient = require('./helpers/RealTimeClient')
|
||||
const FixturesManager = require('./helpers/FixturesManager')
|
||||
|
||||
describe('Router', function () {
|
||||
return describe('joinProject', function () {
|
||||
describe('when there is no callback provided', function () {
|
||||
after(function () {
|
||||
return process.removeListener('unhandledRejection', this.onUnhandled)
|
||||
})
|
||||
|
||||
before(function (done) {
|
||||
this.onUnhandled = error => done(error)
|
||||
process.on('unhandledRejection', this.onUnhandled)
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
privilegeLevel: 'owner',
|
||||
project: {
|
||||
name: 'Test Project',
|
||||
},
|
||||
},
|
||||
(e, { project_id, user_id }) => {
|
||||
this.project_id = project_id
|
||||
this.user_id = user_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.client = RealTimeClient.connect()
|
||||
return this.client.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.client = RealTimeClient.connect()
|
||||
return this.client.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.client.emit('joinProject', { project_id: this.project_id })
|
||||
return setTimeout(cb, 100)
|
||||
},
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
return it('should keep on going', function () {
|
||||
return expect('still running').to.exist
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when there are too many arguments', function () {
|
||||
after(function () {
|
||||
return process.removeListener('unhandledRejection', this.onUnhandled)
|
||||
})
|
||||
|
||||
before(function (done) {
|
||||
this.onUnhandled = error => done(error)
|
||||
process.on('unhandledRejection', this.onUnhandled)
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
return FixturesManager.setUpProject(
|
||||
{
|
||||
privilegeLevel: 'owner',
|
||||
project: {
|
||||
name: 'Test Project',
|
||||
},
|
||||
},
|
||||
(e, { project_id, user_id }) => {
|
||||
this.project_id = project_id
|
||||
this.user_id = user_id
|
||||
return cb(e)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.client = RealTimeClient.connect()
|
||||
return this.client.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
this.client = RealTimeClient.connect()
|
||||
return this.client.on('connectionAccepted', cb)
|
||||
},
|
||||
|
||||
cb => {
|
||||
return this.client.emit('joinProject', 1, 2, 3, 4, 5, error => {
|
||||
this.error = error
|
||||
return cb()
|
||||
})
|
||||
},
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
return it('should return an error message', function () {
|
||||
return expect(this.error.message).to.equal('unexpected arguments')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
103
services/real-time/test/acceptance/js/SessionSocketsTests.js
Normal file
103
services/real-time/test/acceptance/js/SessionSocketsTests.js
Normal file
|
@ -0,0 +1,103 @@
|
|||
/* eslint-disable
|
||||
no-return-assign,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const RealTimeClient = require('./helpers/RealTimeClient')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const { expect } = require('chai')
|
||||
|
||||
describe('SessionSockets', function () {
|
||||
before(function () {
|
||||
return (this.checkSocket = function (fn) {
|
||||
const client = RealTimeClient.connect()
|
||||
client.on('connectionAccepted', fn)
|
||||
client.on('connectionRejected', fn)
|
||||
return null
|
||||
})
|
||||
})
|
||||
|
||||
describe('without cookies', function () {
|
||||
before(function () {
|
||||
return (RealTimeClient.cookie = null)
|
||||
})
|
||||
|
||||
return it('should return a lookup error', function (done) {
|
||||
return this.checkSocket(error => {
|
||||
expect(error).to.exist
|
||||
expect(error.message).to.equal('invalid session')
|
||||
return done()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('with a different cookie', function () {
|
||||
before(function () {
|
||||
return (RealTimeClient.cookie = 'some.key=someValue')
|
||||
})
|
||||
|
||||
return it('should return a lookup error', function (done) {
|
||||
return this.checkSocket(error => {
|
||||
expect(error).to.exist
|
||||
expect(error.message).to.equal('invalid session')
|
||||
return done()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('with an invalid cookie', function () {
|
||||
before(function (done) {
|
||||
RealTimeClient.setSession({}, error => {
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
RealTimeClient.cookie = `${
|
||||
Settings.cookieName
|
||||
}=${RealTimeClient.cookie.slice(17, 49)}`
|
||||
return done()
|
||||
})
|
||||
return null
|
||||
})
|
||||
|
||||
return it('should return a lookup error', function (done) {
|
||||
return this.checkSocket(error => {
|
||||
expect(error).to.exist
|
||||
expect(error.message).to.equal('invalid session')
|
||||
return done()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('with a valid cookie and no matching session', function () {
|
||||
before(function () {
|
||||
return (RealTimeClient.cookie = `${Settings.cookieName}=unknownId`)
|
||||
})
|
||||
|
||||
return it('should return a lookup error', function (done) {
|
||||
return this.checkSocket(error => {
|
||||
expect(error).to.exist
|
||||
expect(error.message).to.equal('invalid session')
|
||||
return done()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
return describe('with a valid cookie and a matching session', function () {
|
||||
before(function (done) {
|
||||
RealTimeClient.setSession({}, done)
|
||||
return null
|
||||
})
|
||||
|
||||
return it('should not return an error', function (done) {
|
||||
return this.checkSocket(error => {
|
||||
expect(error).to.not.exist
|
||||
return done()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
60
services/real-time/test/acceptance/js/SessionTests.js
Normal file
60
services/real-time/test/acceptance/js/SessionTests.js
Normal file
|
@ -0,0 +1,60 @@
|
|||
/* eslint-disable
|
||||
handle-callback-err,
|
||||
no-return-assign,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const { expect } = require('chai')
|
||||
|
||||
const RealTimeClient = require('./helpers/RealTimeClient')
|
||||
|
||||
describe('Session', function () {
|
||||
return describe('with an established session', function () {
|
||||
before(function (done) {
|
||||
this.user_id = 'mock-user-id'
|
||||
RealTimeClient.setSession(
|
||||
{
|
||||
user: { _id: this.user_id },
|
||||
},
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
this.client = RealTimeClient.connect()
|
||||
return done()
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
|
||||
it('should not get disconnected', function (done) {
|
||||
let disconnected = false
|
||||
this.client.on('disconnect', () => (disconnected = true))
|
||||
return setTimeout(() => {
|
||||
expect(disconnected).to.equal(false)
|
||||
return done()
|
||||
}, 500)
|
||||
})
|
||||
|
||||
return it('should appear in the list of connected clients', function (done) {
|
||||
return RealTimeClient.getConnectedClients((error, clients) => {
|
||||
let included = false
|
||||
for (const client of Array.from(clients)) {
|
||||
if (client.client_id === this.client.socket.sessionid) {
|
||||
included = true
|
||||
break
|
||||
}
|
||||
}
|
||||
expect(included).to.equal(true)
|
||||
return done()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
130
services/real-time/test/acceptance/js/helpers/FixturesManager.js
Normal file
130
services/real-time/test/acceptance/js/helpers/FixturesManager.js
Normal file
|
@ -0,0 +1,130 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let FixturesManager
|
||||
const RealTimeClient = require('./RealTimeClient')
|
||||
const MockWebServer = require('./MockWebServer')
|
||||
const MockDocUpdaterServer = require('./MockDocUpdaterServer')
|
||||
|
||||
module.exports = FixturesManager = {
|
||||
setUpProject(options, callback) {
|
||||
if (options == null) {
|
||||
options = {}
|
||||
}
|
||||
if (callback == null) {
|
||||
callback = function (error, data) {}
|
||||
}
|
||||
if (!options.user_id) {
|
||||
options.user_id = FixturesManager.getRandomId()
|
||||
}
|
||||
if (!options.project_id) {
|
||||
options.project_id = FixturesManager.getRandomId()
|
||||
}
|
||||
if (!options.project) {
|
||||
options.project = { name: 'Test Project' }
|
||||
}
|
||||
const { project_id, user_id, privilegeLevel, project, publicAccess } =
|
||||
options
|
||||
|
||||
const privileges = {}
|
||||
privileges[user_id] = privilegeLevel
|
||||
if (publicAccess) {
|
||||
privileges['anonymous-user'] = publicAccess
|
||||
}
|
||||
|
||||
MockWebServer.createMockProject(project_id, privileges, project)
|
||||
return MockWebServer.run(error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return RealTimeClient.setSession(
|
||||
{
|
||||
user: {
|
||||
_id: user_id,
|
||||
first_name: 'Joe',
|
||||
last_name: 'Bloggs',
|
||||
},
|
||||
},
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return callback(null, {
|
||||
project_id,
|
||||
user_id,
|
||||
privilegeLevel,
|
||||
project,
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
setUpDoc(project_id, options, callback) {
|
||||
if (options == null) {
|
||||
options = {}
|
||||
}
|
||||
if (callback == null) {
|
||||
callback = function (error, data) {}
|
||||
}
|
||||
if (!options.doc_id) {
|
||||
options.doc_id = FixturesManager.getRandomId()
|
||||
}
|
||||
if (!options.lines) {
|
||||
options.lines = ['doc', 'lines']
|
||||
}
|
||||
if (!options.version) {
|
||||
options.version = 42
|
||||
}
|
||||
if (!options.ops) {
|
||||
options.ops = ['mock', 'ops']
|
||||
}
|
||||
const { doc_id, lines, version, ops, ranges } = options
|
||||
|
||||
MockDocUpdaterServer.createMockDoc(project_id, doc_id, {
|
||||
lines,
|
||||
version,
|
||||
ops,
|
||||
ranges,
|
||||
})
|
||||
return MockDocUpdaterServer.run(error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return callback(null, { project_id, doc_id, lines, version, ops })
|
||||
})
|
||||
},
|
||||
|
||||
setUpEditorSession(options, callback) {
|
||||
FixturesManager.setUpProject(options, (err, detailsProject) => {
|
||||
if (err) return callback(err)
|
||||
|
||||
FixturesManager.setUpDoc(
|
||||
detailsProject.project_id,
|
||||
options,
|
||||
(err, detailsDoc) => {
|
||||
if (err) return callback(err)
|
||||
|
||||
callback(null, Object.assign({}, detailsProject, detailsDoc))
|
||||
}
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
getRandomId() {
|
||||
return require('crypto')
|
||||
.createHash('sha1')
|
||||
.update(Math.random().toString())
|
||||
.digest('hex')
|
||||
.slice(0, 24)
|
||||
},
|
||||
}
|
|
@ -0,0 +1,93 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-return-assign,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let MockDocUpdaterServer
|
||||
const sinon = require('sinon')
|
||||
const express = require('express')
|
||||
|
||||
module.exports = MockDocUpdaterServer = {
|
||||
docs: {},
|
||||
|
||||
createMockDoc(project_id, doc_id, data) {
|
||||
return (MockDocUpdaterServer.docs[`${project_id}:${doc_id}`] = data)
|
||||
},
|
||||
|
||||
getDocument(project_id, doc_id, fromVersion, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, data) {}
|
||||
}
|
||||
return callback(null, MockDocUpdaterServer.docs[`${project_id}:${doc_id}`])
|
||||
},
|
||||
|
||||
deleteProject: sinon.stub().callsArg(1),
|
||||
|
||||
getDocumentRequest(req, res, next) {
|
||||
const { project_id, doc_id } = req.params
|
||||
let { fromVersion } = req.query
|
||||
fromVersion = parseInt(fromVersion, 10)
|
||||
return MockDocUpdaterServer.getDocument(
|
||||
project_id,
|
||||
doc_id,
|
||||
fromVersion,
|
||||
(error, data) => {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
if (!data) {
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
return res.json(data)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
deleteProjectRequest(req, res, next) {
|
||||
const { project_id } = req.params
|
||||
return MockDocUpdaterServer.deleteProject(project_id, error => {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
return res.sendStatus(204)
|
||||
})
|
||||
},
|
||||
|
||||
running: false,
|
||||
run(callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
if (MockDocUpdaterServer.running) {
|
||||
return callback()
|
||||
}
|
||||
const app = express()
|
||||
app.get(
|
||||
'/project/:project_id/doc/:doc_id',
|
||||
MockDocUpdaterServer.getDocumentRequest
|
||||
)
|
||||
app.delete(
|
||||
'/project/:project_id',
|
||||
MockDocUpdaterServer.deleteProjectRequest
|
||||
)
|
||||
return app
|
||||
.listen(3003, error => {
|
||||
MockDocUpdaterServer.running = true
|
||||
return callback(error)
|
||||
})
|
||||
.on('error', error => {
|
||||
console.error('error starting MockDocUpdaterServer:', error.message)
|
||||
return process.exit(1)
|
||||
})
|
||||
},
|
||||
}
|
||||
|
||||
sinon.spy(MockDocUpdaterServer, 'getDocument')
|
|
@ -0,0 +1,89 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-return-assign,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let MockWebServer
|
||||
const sinon = require('sinon')
|
||||
const express = require('express')
|
||||
|
||||
module.exports = MockWebServer = {
|
||||
projects: {},
|
||||
privileges: {},
|
||||
|
||||
createMockProject(project_id, privileges, project) {
|
||||
MockWebServer.privileges[project_id] = privileges
|
||||
return (MockWebServer.projects[project_id] = project)
|
||||
},
|
||||
|
||||
joinProject(project_id, user_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, project, privilegeLevel) {}
|
||||
}
|
||||
return callback(
|
||||
null,
|
||||
MockWebServer.projects[project_id],
|
||||
MockWebServer.privileges[project_id][user_id] ||
|
||||
MockWebServer.privileges[project_id]['anonymous-user']
|
||||
)
|
||||
},
|
||||
|
||||
joinProjectRequest(req, res, next) {
|
||||
const { project_id } = req.params
|
||||
const { user_id } = req.query
|
||||
if (project_id === 'not-found') {
|
||||
return res.status(404).send()
|
||||
}
|
||||
if (project_id === 'forbidden') {
|
||||
return res.status(403).send()
|
||||
}
|
||||
if (project_id === 'rate-limited') {
|
||||
return res.status(429).send()
|
||||
} else {
|
||||
return MockWebServer.joinProject(
|
||||
project_id,
|
||||
user_id,
|
||||
(error, project, privilegeLevel) => {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
return res.json({
|
||||
project,
|
||||
privilegeLevel,
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
},
|
||||
|
||||
running: false,
|
||||
run(callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
if (MockWebServer.running) {
|
||||
return callback()
|
||||
}
|
||||
const app = express()
|
||||
app.post('/project/:project_id/join', MockWebServer.joinProjectRequest)
|
||||
return app
|
||||
.listen(3000, error => {
|
||||
MockWebServer.running = true
|
||||
return callback(error)
|
||||
})
|
||||
.on('error', error => {
|
||||
console.error('error starting MockWebServer:', error.message)
|
||||
return process.exit(1)
|
||||
})
|
||||
},
|
||||
}
|
||||
|
||||
sinon.spy(MockWebServer, 'joinProject')
|
127
services/real-time/test/acceptance/js/helpers/RealTimeClient.js
Normal file
127
services/real-time/test/acceptance/js/helpers/RealTimeClient.js
Normal file
|
@ -0,0 +1,127 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-return-assign,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let Client
|
||||
const { XMLHttpRequest } = require('../../libs/XMLHttpRequest')
|
||||
const io = require('socket.io-client')
|
||||
const async = require('async')
|
||||
|
||||
const request = require('request')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const redis = require('@overleaf/redis-wrapper')
|
||||
const rclient = redis.createClient(Settings.redis.websessions)
|
||||
|
||||
const uid = require('uid-safe').sync
|
||||
const signature = require('cookie-signature')
|
||||
|
||||
io.util.request = function () {
|
||||
const xhr = new XMLHttpRequest()
|
||||
const _open = xhr.open
|
||||
xhr.open = function () {
|
||||
_open.apply(xhr, arguments)
|
||||
if (Client.cookie != null) {
|
||||
return xhr.setRequestHeader('Cookie', Client.cookie)
|
||||
}
|
||||
}
|
||||
return xhr
|
||||
}
|
||||
|
||||
module.exports = Client = {
|
||||
cookie: null,
|
||||
|
||||
setSession(session, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
const sessionId = uid(24)
|
||||
session.cookie = {}
|
||||
return rclient.set('sess:' + sessionId, JSON.stringify(session), error => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
const secret = Settings.security.sessionSecret
|
||||
const cookieKey = 's:' + signature.sign(sessionId, secret)
|
||||
Client.cookie = `${Settings.cookieName}=${cookieKey}`
|
||||
return callback()
|
||||
})
|
||||
},
|
||||
|
||||
unsetSession(callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
Client.cookie = null
|
||||
return callback()
|
||||
},
|
||||
|
||||
connect(cookie) {
|
||||
const client = io.connect('http://localhost:3026', {
|
||||
'force new connection': true,
|
||||
})
|
||||
client.on(
|
||||
'connectionAccepted',
|
||||
(_, publicId) => (client.publicId = publicId)
|
||||
)
|
||||
return client
|
||||
},
|
||||
|
||||
getConnectedClients(callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, clients) {}
|
||||
}
|
||||
return request.get(
|
||||
{
|
||||
url: 'http://localhost:3026/clients',
|
||||
json: true,
|
||||
},
|
||||
(error, response, data) => callback(error, data)
|
||||
)
|
||||
},
|
||||
|
||||
getConnectedClient(client_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, clients) {}
|
||||
}
|
||||
return request.get(
|
||||
{
|
||||
url: `http://localhost:3026/clients/${client_id}`,
|
||||
json: true,
|
||||
},
|
||||
(error, response, data) => callback(error, data)
|
||||
)
|
||||
},
|
||||
|
||||
disconnectClient(client_id, callback) {
|
||||
request.post(
|
||||
{
|
||||
url: `http://localhost:3026/client/${client_id}/disconnect`,
|
||||
auth: {
|
||||
user: Settings.internal.realTime.user,
|
||||
pass: Settings.internal.realTime.pass,
|
||||
},
|
||||
},
|
||||
(error, response, data) => callback(error, data)
|
||||
)
|
||||
return null
|
||||
},
|
||||
|
||||
disconnectAllClients(callback) {
|
||||
return Client.getConnectedClients((error, clients) =>
|
||||
async.each(
|
||||
clients,
|
||||
(clientView, cb) => Client.disconnectClient(clientView.client_id, cb),
|
||||
callback
|
||||
)
|
||||
)
|
||||
},
|
||||
}
|
|
@ -0,0 +1,64 @@
|
|||
/* eslint-disable
|
||||
handle-callback-err,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS103: Rewrite code to no longer use __guard__
|
||||
* DS205: Consider reworking code to avoid use of IIFEs
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const app = require('../../../../app')
|
||||
const logger = require('logger-sharelatex')
|
||||
const Settings = require('@overleaf/settings')
|
||||
|
||||
module.exports = {
|
||||
running: false,
|
||||
initing: false,
|
||||
callbacks: [],
|
||||
ensureRunning(callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
if (this.running) {
|
||||
return callback()
|
||||
} else if (this.initing) {
|
||||
return this.callbacks.push(callback)
|
||||
} else {
|
||||
this.initing = true
|
||||
this.callbacks.push(callback)
|
||||
return app.listen(
|
||||
__guard__(
|
||||
Settings.internal != null ? Settings.internal.realtime : undefined,
|
||||
x => x.port
|
||||
),
|
||||
'localhost',
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
this.running = true
|
||||
logger.log('clsi running in dev mode')
|
||||
|
||||
return (() => {
|
||||
const result = []
|
||||
for (callback of Array.from(this.callbacks)) {
|
||||
result.push(callback())
|
||||
}
|
||||
return result
|
||||
})()
|
||||
}
|
||||
)
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return typeof value !== 'undefined' && value !== null
|
||||
? transform(value)
|
||||
: undefined
|
||||
}
|
579
services/real-time/test/acceptance/libs/XMLHttpRequest.js
Normal file
579
services/real-time/test/acceptance/libs/XMLHttpRequest.js
Normal file
|
@ -0,0 +1,579 @@
|
|||
/**
|
||||
* Wrapper for built-in http.js to emulate the browser XMLHttpRequest object.
|
||||
*
|
||||
* This can be used with JS designed for browsers to improve reuse of code and
|
||||
* allow the use of existing libraries.
|
||||
*
|
||||
* Usage: include("XMLHttpRequest.js") and use XMLHttpRequest per W3C specs.
|
||||
*
|
||||
* @author Dan DeFelippi <dan@driverdan.com>
|
||||
* @contributor David Ellis <d.f.ellis@ieee.org>
|
||||
* @license MIT
|
||||
*/
|
||||
|
||||
const { URL } = require('url')
|
||||
const spawn = require('child_process').spawn
|
||||
const fs = require('fs')
|
||||
|
||||
exports.XMLHttpRequest = function () {
|
||||
/**
|
||||
* Private variables
|
||||
*/
|
||||
const self = this
|
||||
const http = require('http')
|
||||
const https = require('https')
|
||||
|
||||
// Holds http.js objects
|
||||
let request
|
||||
let response
|
||||
|
||||
// Request settings
|
||||
let settings = {}
|
||||
|
||||
// Set some default headers
|
||||
const defaultHeaders = {
|
||||
'User-Agent': 'node-XMLHttpRequest',
|
||||
Accept: '*/*',
|
||||
}
|
||||
|
||||
let headers = defaultHeaders
|
||||
|
||||
// These headers are not user setable.
|
||||
// The following are allowed but banned in the spec:
|
||||
// * user-agent
|
||||
const forbiddenRequestHeaders = [
|
||||
'accept-charset',
|
||||
'accept-encoding',
|
||||
'access-control-request-headers',
|
||||
'access-control-request-method',
|
||||
'connection',
|
||||
'content-length',
|
||||
'content-transfer-encoding',
|
||||
// "cookie",
|
||||
'cookie2',
|
||||
'date',
|
||||
'expect',
|
||||
'host',
|
||||
'keep-alive',
|
||||
'origin',
|
||||
'referer',
|
||||
'te',
|
||||
'trailer',
|
||||
'transfer-encoding',
|
||||
'upgrade',
|
||||
'via',
|
||||
]
|
||||
|
||||
// These request methods are not allowed
|
||||
const forbiddenRequestMethods = ['TRACE', 'TRACK', 'CONNECT']
|
||||
|
||||
// Send flag
|
||||
let sendFlag = false
|
||||
// Error flag, used when errors occur or abort is called
|
||||
let errorFlag = false
|
||||
|
||||
// Event listeners
|
||||
const listeners = {}
|
||||
|
||||
/**
|
||||
* Constants
|
||||
*/
|
||||
|
||||
this.UNSENT = 0
|
||||
this.OPENED = 1
|
||||
this.HEADERS_RECEIVED = 2
|
||||
this.LOADING = 3
|
||||
this.DONE = 4
|
||||
|
||||
/**
|
||||
* Public vars
|
||||
*/
|
||||
|
||||
// Current state
|
||||
this.readyState = this.UNSENT
|
||||
|
||||
// default ready state change handler in case one is not set or is set late
|
||||
this.onreadystatechange = null
|
||||
|
||||
// Result & response
|
||||
this.responseText = ''
|
||||
this.responseXML = ''
|
||||
this.status = null
|
||||
this.statusText = null
|
||||
|
||||
/**
|
||||
* Private methods
|
||||
*/
|
||||
|
||||
/**
|
||||
* Check if the specified header is allowed.
|
||||
*
|
||||
* @param string header Header to validate
|
||||
* @return boolean False if not allowed, otherwise true
|
||||
*/
|
||||
const isAllowedHttpHeader = function (header) {
|
||||
return (
|
||||
header && forbiddenRequestHeaders.indexOf(header.toLowerCase()) === -1
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the specified method is allowed.
|
||||
*
|
||||
* @param string method Request method to validate
|
||||
* @return boolean False if not allowed, otherwise true
|
||||
*/
|
||||
const isAllowedHttpMethod = function (method) {
|
||||
return method && forbiddenRequestMethods.indexOf(method) === -1
|
||||
}
|
||||
|
||||
/**
|
||||
* Public methods
|
||||
*/
|
||||
|
||||
/**
|
||||
* Open the connection. Currently supports local server requests.
|
||||
*
|
||||
* @param string method Connection method (eg GET, POST)
|
||||
* @param string url URL for the connection.
|
||||
* @param boolean async Asynchronous connection. Default is true.
|
||||
* @param string user Username for basic authentication (optional)
|
||||
* @param string password Password for basic authentication (optional)
|
||||
*/
|
||||
this.open = function (method, url, async, user, password) {
|
||||
this.abort()
|
||||
errorFlag = false
|
||||
|
||||
// Check for valid request method
|
||||
if (!isAllowedHttpMethod(method)) {
|
||||
throw new Error('SecurityError: Request method not allowed')
|
||||
}
|
||||
|
||||
settings = {
|
||||
method: method,
|
||||
url: url.toString(),
|
||||
async: typeof async !== 'boolean' ? true : async,
|
||||
user: user || null,
|
||||
password: password || null,
|
||||
}
|
||||
|
||||
setState(this.OPENED)
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a header for the request.
|
||||
*
|
||||
* @param string header Header name
|
||||
* @param string value Header value
|
||||
*/
|
||||
this.setRequestHeader = function (header, value) {
|
||||
if (this.readyState !== this.OPENED) {
|
||||
throw new Error(
|
||||
'INVALID_STATE_ERR: setRequestHeader can only be called when state is OPEN'
|
||||
)
|
||||
}
|
||||
if (!isAllowedHttpHeader(header)) {
|
||||
console.warn('Refused to set unsafe header "' + header + '"')
|
||||
return
|
||||
}
|
||||
if (sendFlag) {
|
||||
throw new Error('INVALID_STATE_ERR: send flag is true')
|
||||
}
|
||||
headers[header] = value
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a header from the server response.
|
||||
*
|
||||
* @param string header Name of header to get.
|
||||
* @return string Text of the header or null if it doesn't exist.
|
||||
*/
|
||||
this.getResponseHeader = function (header) {
|
||||
if (
|
||||
typeof header === 'string' &&
|
||||
this.readyState > this.OPENED &&
|
||||
response.headers[header.toLowerCase()] &&
|
||||
!errorFlag
|
||||
) {
|
||||
return response.headers[header.toLowerCase()]
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets all the response headers.
|
||||
*
|
||||
* @return string A string with all response headers separated by CR+LF
|
||||
*/
|
||||
this.getAllResponseHeaders = function () {
|
||||
if (this.readyState < this.HEADERS_RECEIVED || errorFlag) {
|
||||
return ''
|
||||
}
|
||||
let result = ''
|
||||
|
||||
for (const i in response.headers) {
|
||||
// Cookie headers are excluded
|
||||
if (i !== 'set-cookie' && i !== 'set-cookie2') {
|
||||
result += i + ': ' + response.headers[i] + '\r\n'
|
||||
}
|
||||
}
|
||||
return result.substr(0, result.length - 2)
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a request header
|
||||
*
|
||||
* @param string name Name of header to get
|
||||
* @return string Returns the request header or empty string if not set
|
||||
*/
|
||||
this.getRequestHeader = function (name) {
|
||||
// @TODO Make this case insensitive
|
||||
if (typeof name === 'string' && headers[name]) {
|
||||
return headers[name]
|
||||
}
|
||||
|
||||
return ''
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends the request to the server.
|
||||
*
|
||||
* @param string data Optional data to send as request body.
|
||||
*/
|
||||
this.send = function (data) {
|
||||
if (this.readyState !== this.OPENED) {
|
||||
throw new Error(
|
||||
'INVALID_STATE_ERR: connection must be opened before send() is called'
|
||||
)
|
||||
}
|
||||
|
||||
if (sendFlag) {
|
||||
throw new Error('INVALID_STATE_ERR: send has already been called')
|
||||
}
|
||||
|
||||
let host
|
||||
let ssl = false
|
||||
let local = false
|
||||
const url = new URL(settings.url)
|
||||
|
||||
// Determine the server
|
||||
switch (url.protocol) {
|
||||
case 'https:':
|
||||
ssl = true
|
||||
host = url.hostname
|
||||
break
|
||||
case 'http:':
|
||||
host = url.hostname
|
||||
break
|
||||
|
||||
case 'file:':
|
||||
local = true
|
||||
break
|
||||
|
||||
case undefined:
|
||||
case '':
|
||||
host = 'localhost'
|
||||
break
|
||||
|
||||
default:
|
||||
throw new Error('Protocol not supported.')
|
||||
}
|
||||
|
||||
// Load files off the local filesystem (file://)
|
||||
if (local) {
|
||||
if (settings.method !== 'GET') {
|
||||
throw new Error('XMLHttpRequest: Only GET method is supported')
|
||||
}
|
||||
|
||||
if (settings.async) {
|
||||
fs.readFile(url.pathname, 'utf8', (error, data) => {
|
||||
if (error) {
|
||||
self.handleError(error)
|
||||
} else {
|
||||
self.status = 200
|
||||
self.responseText = data
|
||||
setState(self.DONE)
|
||||
}
|
||||
})
|
||||
} else {
|
||||
try {
|
||||
this.responseText = fs.readFileSync(url.pathname, 'utf8')
|
||||
this.status = 200
|
||||
setState(self.DONE)
|
||||
} catch (e) {
|
||||
this.handleError(e)
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// Default to port 80. If accessing localhost on another port be sure
|
||||
// to use http://localhost:port/path
|
||||
const port = url.port || (ssl ? 443 : 80)
|
||||
// Add query string if one is used
|
||||
const uri = url.pathname + (url.search ? url.search : '')
|
||||
|
||||
// Set the Host header or the server may reject the request
|
||||
headers.Host = host
|
||||
if (!((ssl && port === 443) || port === 80)) {
|
||||
headers.Host += ':' + url.port
|
||||
}
|
||||
|
||||
// Set Basic Auth if necessary
|
||||
if (settings.user) {
|
||||
if (typeof settings.password === 'undefined') {
|
||||
settings.password = ''
|
||||
}
|
||||
const authBuf = Buffer.from(settings.user + ':' + settings.password)
|
||||
headers.Authorization = 'Basic ' + authBuf.toString('base64')
|
||||
}
|
||||
|
||||
// Set content length header
|
||||
if (settings.method === 'GET' || settings.method === 'HEAD') {
|
||||
data = null
|
||||
} else if (data) {
|
||||
headers['Content-Length'] = Buffer.byteLength(data)
|
||||
|
||||
if (!headers['Content-Type']) {
|
||||
headers['Content-Type'] = 'text/plain;charset=UTF-8'
|
||||
}
|
||||
} else if (settings.method === 'POST') {
|
||||
// For a post with no data set Content-Length: 0.
|
||||
// This is required by buggy servers that don't meet the specs.
|
||||
headers['Content-Length'] = 0
|
||||
}
|
||||
|
||||
const options = {
|
||||
host: host,
|
||||
port: port,
|
||||
path: uri,
|
||||
method: settings.method,
|
||||
headers: headers,
|
||||
}
|
||||
|
||||
// Reset error flag
|
||||
errorFlag = false
|
||||
|
||||
// Handle async requests
|
||||
if (settings.async) {
|
||||
// Use the proper protocol
|
||||
const doRequest = ssl ? https.request : http.request
|
||||
|
||||
// Request is being sent, set send flag
|
||||
sendFlag = true
|
||||
|
||||
// As per spec, this is called here for historical reasons.
|
||||
self.dispatchEvent('readystatechange')
|
||||
|
||||
// Create the request
|
||||
request = doRequest(options, resp => {
|
||||
response = resp
|
||||
response.setEncoding('utf8')
|
||||
|
||||
setState(self.HEADERS_RECEIVED)
|
||||
self.status = response.statusCode
|
||||
|
||||
response.on('data', chunk => {
|
||||
// Make sure there's some data
|
||||
if (chunk) {
|
||||
self.responseText += chunk
|
||||
}
|
||||
// Don't emit state changes if the connection has been aborted.
|
||||
if (sendFlag) {
|
||||
setState(self.LOADING)
|
||||
}
|
||||
})
|
||||
|
||||
response.on('end', () => {
|
||||
if (sendFlag) {
|
||||
// Discard the 'end' event if the connection has been aborted
|
||||
setState(self.DONE)
|
||||
sendFlag = false
|
||||
}
|
||||
})
|
||||
|
||||
response.on('error', error => {
|
||||
self.handleError(error)
|
||||
})
|
||||
}).on('error', error => {
|
||||
self.handleError(error)
|
||||
})
|
||||
|
||||
// Node 0.4 and later won't accept empty data. Make sure it's needed.
|
||||
if (data) {
|
||||
request.write(data)
|
||||
}
|
||||
|
||||
request.end()
|
||||
|
||||
self.dispatchEvent('loadstart')
|
||||
} else {
|
||||
// Synchronous
|
||||
// Create a temporary file for communication with the other Node process
|
||||
const syncFile = '.node-xmlhttprequest-sync-' + process.pid
|
||||
fs.writeFileSync(syncFile, '', 'utf8')
|
||||
// The async request the other Node process executes
|
||||
const execString =
|
||||
"var http = require('http'), https = require('https'), fs = require('fs');" +
|
||||
'var doRequest = http' +
|
||||
(ssl ? 's' : '') +
|
||||
'.request;' +
|
||||
'var options = ' +
|
||||
JSON.stringify(options) +
|
||||
';' +
|
||||
"var responseText = '';" +
|
||||
'var req = doRequest(options, function(response) {' +
|
||||
"response.setEncoding('utf8');" +
|
||||
"response.on('data', function(chunk) {" +
|
||||
'responseText += chunk;' +
|
||||
'});' +
|
||||
"response.on('end', function() {" +
|
||||
"fs.writeFileSync('" +
|
||||
syncFile +
|
||||
"', 'NODE-XMLHTTPREQUEST-STATUS:' + response.statusCode + ',' + responseText, 'utf8');" +
|
||||
'});' +
|
||||
"response.on('error', function(error) {" +
|
||||
"fs.writeFileSync('" +
|
||||
syncFile +
|
||||
"', 'NODE-XMLHTTPREQUEST-ERROR:' + JSON.stringify(error), 'utf8');" +
|
||||
'});' +
|
||||
"}).on('error', function(error) {" +
|
||||
"fs.writeFileSync('" +
|
||||
syncFile +
|
||||
"', 'NODE-XMLHTTPREQUEST-ERROR:' + JSON.stringify(error), 'utf8');" +
|
||||
'});' +
|
||||
(data ? "req.write('" + data.replace(/'/g, "\\'") + "');" : '') +
|
||||
'req.end();'
|
||||
// Start the other Node Process, executing this string
|
||||
const syncProc = spawn(process.argv[0], ['-e', execString])
|
||||
while ((self.responseText = fs.readFileSync(syncFile, 'utf8')) === '') {
|
||||
// Wait while the file is empty
|
||||
}
|
||||
// Kill the child process once the file has data
|
||||
syncProc.stdin.end()
|
||||
// Remove the temporary file
|
||||
fs.unlinkSync(syncFile)
|
||||
if (self.responseText.match(/^NODE-XMLHTTPREQUEST-ERROR:/)) {
|
||||
// If the file returned an error, handle it
|
||||
const errorObj = self.responseText.replace(
|
||||
/^NODE-XMLHTTPREQUEST-ERROR:/,
|
||||
''
|
||||
)
|
||||
self.handleError(errorObj)
|
||||
} else {
|
||||
// If the file returned okay, parse its data and move to the DONE state
|
||||
self.status = self.responseText.replace(
|
||||
/^NODE-XMLHTTPREQUEST-STATUS:([0-9]*),.*/,
|
||||
'$1'
|
||||
)
|
||||
self.responseText = self.responseText.replace(
|
||||
/^NODE-XMLHTTPREQUEST-STATUS:[0-9]*,(.*)/,
|
||||
'$1'
|
||||
)
|
||||
setState(self.DONE)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Called when an error is encountered to deal with it.
|
||||
*/
|
||||
this.handleError = function (error) {
|
||||
this.status = 503
|
||||
this.statusText = error
|
||||
this.responseText = error.stack
|
||||
errorFlag = true
|
||||
setState(this.DONE)
|
||||
}
|
||||
|
||||
/**
|
||||
* Aborts a request.
|
||||
*/
|
||||
this.abort = function () {
|
||||
if (request) {
|
||||
request.abort()
|
||||
request = null
|
||||
}
|
||||
|
||||
headers = defaultHeaders
|
||||
this.responseText = ''
|
||||
this.responseXML = ''
|
||||
|
||||
errorFlag = true
|
||||
|
||||
if (
|
||||
this.readyState !== this.UNSENT &&
|
||||
(this.readyState !== this.OPENED || sendFlag) &&
|
||||
this.readyState !== this.DONE
|
||||
) {
|
||||
sendFlag = false
|
||||
setState(this.DONE)
|
||||
}
|
||||
this.readyState = this.UNSENT
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds an event listener. Preferred method of binding to events.
|
||||
*/
|
||||
this.addEventListener = function (event, callback) {
|
||||
if (!(event in listeners)) {
|
||||
listeners[event] = []
|
||||
}
|
||||
// Currently allows duplicate callbacks. Should it?
|
||||
listeners[event].push(callback)
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove an event callback that has already been bound.
|
||||
* Only works on the matching funciton, cannot be a copy.
|
||||
*/
|
||||
this.removeEventListener = function (event, callback) {
|
||||
if (event in listeners) {
|
||||
// Filter will return a new array with the callback removed
|
||||
listeners[event] = listeners[event].filter(ev => {
|
||||
return ev !== callback
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Dispatch any events, including both "on" methods and events attached using addEventListener.
|
||||
*/
|
||||
this.dispatchEvent = function (event) {
|
||||
if (typeof self['on' + event] === 'function') {
|
||||
self['on' + event]()
|
||||
}
|
||||
if (event in listeners) {
|
||||
for (let i = 0, len = listeners[event].length; i < len; i++) {
|
||||
listeners[event][i].call(self)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Changes readyState and calls onreadystatechange.
|
||||
*
|
||||
* @param int state New state
|
||||
*/
|
||||
var setState = function (state) {
|
||||
if (self.readyState !== state) {
|
||||
self.readyState = state
|
||||
|
||||
if (
|
||||
settings.async ||
|
||||
self.readyState < self.OPENED ||
|
||||
self.readyState === self.DONE
|
||||
) {
|
||||
self.dispatchEvent('readystatechange')
|
||||
}
|
||||
|
||||
if (self.readyState === self.DONE && !errorFlag) {
|
||||
self.dispatchEvent('load')
|
||||
// @TODO figure out InspectorInstrumentation::didLoadXHR(cookie)
|
||||
self.dispatchEvent('loadend')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
23
services/real-time/test/acceptance/scripts/full-test.sh
Executable file
23
services/real-time/test/acceptance/scripts/full-test.sh
Executable file
|
@ -0,0 +1,23 @@
|
|||
#! /usr/bin/env bash
|
||||
|
||||
# npm rebuild
|
||||
|
||||
echo ">> Starting server..."
|
||||
|
||||
grunt --no-color forever:app:start
|
||||
|
||||
echo ">> Server started"
|
||||
|
||||
sleep 5
|
||||
|
||||
echo ">> Running acceptance tests..."
|
||||
grunt --no-color mochaTest:acceptance
|
||||
_test_exit_code=$?
|
||||
|
||||
echo ">> Killing server"
|
||||
|
||||
grunt --no-color forever:app:stop
|
||||
|
||||
echo ">> Done"
|
||||
|
||||
exit $_test_exit_code
|
38
services/real-time/test/setup.js
Normal file
38
services/real-time/test/setup.js
Normal file
|
@ -0,0 +1,38 @@
|
|||
const chai = require('chai')
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const sinon = require('sinon')
|
||||
|
||||
// Chai configuration
|
||||
chai.should()
|
||||
|
||||
// Global stubs
|
||||
const sandbox = sinon.createSandbox()
|
||||
const stubs = {
|
||||
logger: {
|
||||
debug: sandbox.stub(),
|
||||
log: sandbox.stub(),
|
||||
info: sandbox.stub(),
|
||||
warn: sandbox.stub(),
|
||||
err: sandbox.stub(),
|
||||
error: sandbox.stub(),
|
||||
},
|
||||
}
|
||||
|
||||
// SandboxedModule configuration
|
||||
SandboxedModule.configure({
|
||||
requires: {
|
||||
'logger-sharelatex': stubs.logger,
|
||||
},
|
||||
globals: { Buffer, JSON, console, process },
|
||||
})
|
||||
|
||||
// Mocha hooks
|
||||
exports.mochaHooks = {
|
||||
beforeEach() {
|
||||
this.logger = stubs.logger
|
||||
},
|
||||
|
||||
afterEach() {
|
||||
sandbox.reset()
|
||||
},
|
||||
}
|
318
services/real-time/test/unit/js/AuthorizationManagerTests.js
Normal file
318
services/real-time/test/unit/js/AuthorizationManagerTests.js
Normal file
|
@ -0,0 +1,318 @@
|
|||
/* eslint-disable
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const { expect } = require('chai')
|
||||
const sinon = require('sinon')
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const path = require('path')
|
||||
const modulePath = '../../../app/js/AuthorizationManager'
|
||||
|
||||
describe('AuthorizationManager', function () {
|
||||
beforeEach(function () {
|
||||
this.client = { ol_context: {} }
|
||||
|
||||
return (this.AuthorizationManager = SandboxedModule.require(modulePath, {
|
||||
requires: {},
|
||||
}))
|
||||
})
|
||||
|
||||
describe('assertClientCanViewProject', function () {
|
||||
it('should allow the readOnly privilegeLevel', function (done) {
|
||||
this.client.ol_context.privilege_level = 'readOnly'
|
||||
return this.AuthorizationManager.assertClientCanViewProject(
|
||||
this.client,
|
||||
error => {
|
||||
expect(error).to.be.null
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should allow the readAndWrite privilegeLevel', function (done) {
|
||||
this.client.ol_context.privilege_level = 'readAndWrite'
|
||||
return this.AuthorizationManager.assertClientCanViewProject(
|
||||
this.client,
|
||||
error => {
|
||||
expect(error).to.be.null
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should allow the owner privilegeLevel', function (done) {
|
||||
this.client.ol_context.privilege_level = 'owner'
|
||||
return this.AuthorizationManager.assertClientCanViewProject(
|
||||
this.client,
|
||||
error => {
|
||||
expect(error).to.be.null
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
return it('should return an error with any other privilegeLevel', function (done) {
|
||||
this.client.ol_context.privilege_level = 'unknown'
|
||||
return this.AuthorizationManager.assertClientCanViewProject(
|
||||
this.client,
|
||||
error => {
|
||||
error.message.should.equal('not authorized')
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('assertClientCanEditProject', function () {
|
||||
it('should not allow the readOnly privilegeLevel', function (done) {
|
||||
this.client.ol_context.privilege_level = 'readOnly'
|
||||
return this.AuthorizationManager.assertClientCanEditProject(
|
||||
this.client,
|
||||
error => {
|
||||
error.message.should.equal('not authorized')
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should allow the readAndWrite privilegeLevel', function (done) {
|
||||
this.client.ol_context.privilege_level = 'readAndWrite'
|
||||
return this.AuthorizationManager.assertClientCanEditProject(
|
||||
this.client,
|
||||
error => {
|
||||
expect(error).to.be.null
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should allow the owner privilegeLevel', function (done) {
|
||||
this.client.ol_context.privilege_level = 'owner'
|
||||
return this.AuthorizationManager.assertClientCanEditProject(
|
||||
this.client,
|
||||
error => {
|
||||
expect(error).to.be.null
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
return it('should return an error with any other privilegeLevel', function (done) {
|
||||
this.client.ol_context.privilege_level = 'unknown'
|
||||
return this.AuthorizationManager.assertClientCanEditProject(
|
||||
this.client,
|
||||
error => {
|
||||
error.message.should.equal('not authorized')
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
// check doc access for project
|
||||
|
||||
describe('assertClientCanViewProjectAndDoc', function () {
|
||||
beforeEach(function () {
|
||||
this.doc_id = '12345'
|
||||
this.callback = sinon.stub()
|
||||
return (this.client.ol_context = {})
|
||||
})
|
||||
|
||||
describe('when not authorised at the project level', function () {
|
||||
beforeEach(function () {
|
||||
return (this.client.ol_context.privilege_level = 'unknown')
|
||||
})
|
||||
|
||||
it('should not allow access', function () {
|
||||
return this.AuthorizationManager.assertClientCanViewProjectAndDoc(
|
||||
this.client,
|
||||
this.doc_id,
|
||||
err => err.message.should.equal('not authorized')
|
||||
)
|
||||
})
|
||||
|
||||
return describe('even when authorised at the doc level', function () {
|
||||
beforeEach(function (done) {
|
||||
return this.AuthorizationManager.addAccessToDoc(
|
||||
this.client,
|
||||
this.doc_id,
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
return it('should not allow access', function () {
|
||||
return this.AuthorizationManager.assertClientCanViewProjectAndDoc(
|
||||
this.client,
|
||||
this.doc_id,
|
||||
err => err.message.should.equal('not authorized')
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when authorised at the project level', function () {
|
||||
beforeEach(function () {
|
||||
return (this.client.ol_context.privilege_level = 'readOnly')
|
||||
})
|
||||
|
||||
describe('and not authorised at the document level', function () {
|
||||
return it('should not allow access', function () {
|
||||
return this.AuthorizationManager.assertClientCanViewProjectAndDoc(
|
||||
this.client,
|
||||
this.doc_id,
|
||||
err => err.message.should.equal('not authorized')
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('and authorised at the document level', function () {
|
||||
beforeEach(function (done) {
|
||||
return this.AuthorizationManager.addAccessToDoc(
|
||||
this.client,
|
||||
this.doc_id,
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
return it('should allow access', function () {
|
||||
this.AuthorizationManager.assertClientCanViewProjectAndDoc(
|
||||
this.client,
|
||||
this.doc_id,
|
||||
this.callback
|
||||
)
|
||||
return this.callback.calledWith(null).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when document authorisation is added and then removed', function () {
|
||||
beforeEach(function (done) {
|
||||
return this.AuthorizationManager.addAccessToDoc(
|
||||
this.client,
|
||||
this.doc_id,
|
||||
() => {
|
||||
return this.AuthorizationManager.removeAccessToDoc(
|
||||
this.client,
|
||||
this.doc_id,
|
||||
done
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
return it('should deny access', function () {
|
||||
return this.AuthorizationManager.assertClientCanViewProjectAndDoc(
|
||||
this.client,
|
||||
this.doc_id,
|
||||
err => err.message.should.equal('not authorized')
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
return describe('assertClientCanEditProjectAndDoc', function () {
|
||||
beforeEach(function () {
|
||||
this.doc_id = '12345'
|
||||
this.callback = sinon.stub()
|
||||
return (this.client.ol_context = {})
|
||||
})
|
||||
|
||||
describe('when not authorised at the project level', function () {
|
||||
beforeEach(function () {
|
||||
return (this.client.ol_context.privilege_level = 'readOnly')
|
||||
})
|
||||
|
||||
it('should not allow access', function () {
|
||||
return this.AuthorizationManager.assertClientCanEditProjectAndDoc(
|
||||
this.client,
|
||||
this.doc_id,
|
||||
err => err.message.should.equal('not authorized')
|
||||
)
|
||||
})
|
||||
|
||||
return describe('even when authorised at the doc level', function () {
|
||||
beforeEach(function (done) {
|
||||
return this.AuthorizationManager.addAccessToDoc(
|
||||
this.client,
|
||||
this.doc_id,
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
return it('should not allow access', function () {
|
||||
return this.AuthorizationManager.assertClientCanEditProjectAndDoc(
|
||||
this.client,
|
||||
this.doc_id,
|
||||
err => err.message.should.equal('not authorized')
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when authorised at the project level', function () {
|
||||
beforeEach(function () {
|
||||
return (this.client.ol_context.privilege_level = 'readAndWrite')
|
||||
})
|
||||
|
||||
describe('and not authorised at the document level', function () {
|
||||
return it('should not allow access', function () {
|
||||
return this.AuthorizationManager.assertClientCanEditProjectAndDoc(
|
||||
this.client,
|
||||
this.doc_id,
|
||||
err => err.message.should.equal('not authorized')
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('and authorised at the document level', function () {
|
||||
beforeEach(function (done) {
|
||||
return this.AuthorizationManager.addAccessToDoc(
|
||||
this.client,
|
||||
this.doc_id,
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
return it('should allow access', function () {
|
||||
this.AuthorizationManager.assertClientCanEditProjectAndDoc(
|
||||
this.client,
|
||||
this.doc_id,
|
||||
this.callback
|
||||
)
|
||||
return this.callback.calledWith(null).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when document authorisation is added and then removed', function () {
|
||||
beforeEach(function (done) {
|
||||
return this.AuthorizationManager.addAccessToDoc(
|
||||
this.client,
|
||||
this.doc_id,
|
||||
() => {
|
||||
return this.AuthorizationManager.removeAccessToDoc(
|
||||
this.client,
|
||||
this.doc_id,
|
||||
done
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
return it('should deny access', function () {
|
||||
return this.AuthorizationManager.assertClientCanEditProjectAndDoc(
|
||||
this.client,
|
||||
this.doc_id,
|
||||
err => err.message.should.equal('not authorized')
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
432
services/real-time/test/unit/js/ChannelManagerTests.js
Normal file
432
services/real-time/test/unit/js/ChannelManagerTests.js
Normal file
|
@ -0,0 +1,432 @@
|
|||
/* eslint-disable
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const { expect } = require('chai')
|
||||
const sinon = require('sinon')
|
||||
const modulePath = '../../../app/js/ChannelManager.js'
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
|
||||
describe('ChannelManager', function () {
|
||||
beforeEach(function () {
|
||||
this.rclient = {}
|
||||
this.other_rclient = {}
|
||||
return (this.ChannelManager = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'@overleaf/settings': (this.settings = {}),
|
||||
'@overleaf/metrics': (this.metrics = {
|
||||
inc: sinon.stub(),
|
||||
summary: sinon.stub(),
|
||||
}),
|
||||
},
|
||||
}))
|
||||
})
|
||||
|
||||
describe('subscribe', function () {
|
||||
describe('when there is no existing subscription for this redis client', function () {
|
||||
beforeEach(function (done) {
|
||||
this.rclient.subscribe = sinon.stub().resolves()
|
||||
this.ChannelManager.subscribe(
|
||||
this.rclient,
|
||||
'applied-ops',
|
||||
'1234567890abcdef'
|
||||
)
|
||||
return setTimeout(done)
|
||||
})
|
||||
|
||||
return it('should subscribe to the redis channel', function () {
|
||||
return this.rclient.subscribe
|
||||
.calledWithExactly('applied-ops:1234567890abcdef')
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when there is an existing subscription for this redis client', function () {
|
||||
beforeEach(function (done) {
|
||||
this.rclient.subscribe = sinon.stub().resolves()
|
||||
this.ChannelManager.subscribe(
|
||||
this.rclient,
|
||||
'applied-ops',
|
||||
'1234567890abcdef'
|
||||
)
|
||||
this.ChannelManager.subscribe(
|
||||
this.rclient,
|
||||
'applied-ops',
|
||||
'1234567890abcdef'
|
||||
)
|
||||
return setTimeout(done)
|
||||
})
|
||||
|
||||
return it('should subscribe to the redis channel again', function () {
|
||||
return this.rclient.subscribe.callCount.should.equal(2)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when subscribe errors', function () {
|
||||
beforeEach(function (done) {
|
||||
this.rclient.subscribe = sinon
|
||||
.stub()
|
||||
.onFirstCall()
|
||||
.rejects(new Error('some redis error'))
|
||||
.onSecondCall()
|
||||
.resolves()
|
||||
const p = this.ChannelManager.subscribe(
|
||||
this.rclient,
|
||||
'applied-ops',
|
||||
'1234567890abcdef'
|
||||
)
|
||||
p.then(() => done(new Error('should not subscribe but fail'))).catch(
|
||||
err => {
|
||||
err.message.should.equal('failed to subscribe to channel')
|
||||
err.cause.message.should.equal('some redis error')
|
||||
this.ChannelManager.getClientMapEntry(this.rclient)
|
||||
.has('applied-ops:1234567890abcdef')
|
||||
.should.equal(false)
|
||||
this.ChannelManager.subscribe(
|
||||
this.rclient,
|
||||
'applied-ops',
|
||||
'1234567890abcdef'
|
||||
)
|
||||
// subscribe is wrapped in Promise, delay other assertions
|
||||
return setTimeout(done)
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
|
||||
it('should have recorded the error', function () {
|
||||
return expect(
|
||||
this.metrics.inc.calledWithExactly('subscribe.failed.applied-ops')
|
||||
).to.equal(true)
|
||||
})
|
||||
|
||||
it('should subscribe again', function () {
|
||||
return this.rclient.subscribe.callCount.should.equal(2)
|
||||
})
|
||||
|
||||
return it('should cleanup', function () {
|
||||
return this.ChannelManager.getClientMapEntry(this.rclient)
|
||||
.has('applied-ops:1234567890abcdef')
|
||||
.should.equal(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when subscribe errors and the clientChannelMap entry was replaced', function () {
|
||||
beforeEach(function (done) {
|
||||
this.rclient.subscribe = sinon
|
||||
.stub()
|
||||
.onFirstCall()
|
||||
.rejects(new Error('some redis error'))
|
||||
.onSecondCall()
|
||||
.resolves()
|
||||
this.first = this.ChannelManager.subscribe(
|
||||
this.rclient,
|
||||
'applied-ops',
|
||||
'1234567890abcdef'
|
||||
)
|
||||
// ignore error
|
||||
this.first.catch(() => {})
|
||||
expect(
|
||||
this.ChannelManager.getClientMapEntry(this.rclient).get(
|
||||
'applied-ops:1234567890abcdef'
|
||||
)
|
||||
).to.equal(this.first)
|
||||
|
||||
this.rclient.unsubscribe = sinon.stub().resolves()
|
||||
this.ChannelManager.unsubscribe(
|
||||
this.rclient,
|
||||
'applied-ops',
|
||||
'1234567890abcdef'
|
||||
)
|
||||
this.second = this.ChannelManager.subscribe(
|
||||
this.rclient,
|
||||
'applied-ops',
|
||||
'1234567890abcdef'
|
||||
)
|
||||
// should get replaced immediately
|
||||
expect(
|
||||
this.ChannelManager.getClientMapEntry(this.rclient).get(
|
||||
'applied-ops:1234567890abcdef'
|
||||
)
|
||||
).to.equal(this.second)
|
||||
|
||||
// let the first subscribe error -> unsubscribe -> subscribe
|
||||
return setTimeout(done)
|
||||
})
|
||||
|
||||
return it('should cleanup the second subscribePromise', function () {
|
||||
return expect(
|
||||
this.ChannelManager.getClientMapEntry(this.rclient).has(
|
||||
'applied-ops:1234567890abcdef'
|
||||
)
|
||||
).to.equal(false)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when there is an existing subscription for another redis client but not this one', function () {
|
||||
beforeEach(function (done) {
|
||||
this.other_rclient.subscribe = sinon.stub().resolves()
|
||||
this.ChannelManager.subscribe(
|
||||
this.other_rclient,
|
||||
'applied-ops',
|
||||
'1234567890abcdef'
|
||||
)
|
||||
this.rclient.subscribe = sinon.stub().resolves() // discard the original stub
|
||||
this.ChannelManager.subscribe(
|
||||
this.rclient,
|
||||
'applied-ops',
|
||||
'1234567890abcdef'
|
||||
)
|
||||
return setTimeout(done)
|
||||
})
|
||||
|
||||
return it('should subscribe to the redis channel on this redis client', function () {
|
||||
return this.rclient.subscribe
|
||||
.calledWithExactly('applied-ops:1234567890abcdef')
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('unsubscribe', function () {
|
||||
describe('when there is no existing subscription for this redis client', function () {
|
||||
beforeEach(function (done) {
|
||||
this.rclient.unsubscribe = sinon.stub().resolves()
|
||||
this.ChannelManager.unsubscribe(
|
||||
this.rclient,
|
||||
'applied-ops',
|
||||
'1234567890abcdef'
|
||||
)
|
||||
return setTimeout(done)
|
||||
})
|
||||
|
||||
return it('should unsubscribe from the redis channel', function () {
|
||||
return this.rclient.unsubscribe.called.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when there is an existing subscription for this another redis client but not this one', function () {
|
||||
beforeEach(function (done) {
|
||||
this.other_rclient.subscribe = sinon.stub().resolves()
|
||||
this.rclient.unsubscribe = sinon.stub().resolves()
|
||||
this.ChannelManager.subscribe(
|
||||
this.other_rclient,
|
||||
'applied-ops',
|
||||
'1234567890abcdef'
|
||||
)
|
||||
this.ChannelManager.unsubscribe(
|
||||
this.rclient,
|
||||
'applied-ops',
|
||||
'1234567890abcdef'
|
||||
)
|
||||
return setTimeout(done)
|
||||
})
|
||||
|
||||
return it('should still unsubscribe from the redis channel on this client', function () {
|
||||
return this.rclient.unsubscribe.called.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when unsubscribe errors and completes', function () {
|
||||
beforeEach(function (done) {
|
||||
this.rclient.subscribe = sinon.stub().resolves()
|
||||
this.ChannelManager.subscribe(
|
||||
this.rclient,
|
||||
'applied-ops',
|
||||
'1234567890abcdef'
|
||||
)
|
||||
this.rclient.unsubscribe = sinon
|
||||
.stub()
|
||||
.rejects(new Error('some redis error'))
|
||||
this.ChannelManager.unsubscribe(
|
||||
this.rclient,
|
||||
'applied-ops',
|
||||
'1234567890abcdef'
|
||||
)
|
||||
setTimeout(done)
|
||||
return null
|
||||
})
|
||||
|
||||
it('should have cleaned up', function () {
|
||||
return this.ChannelManager.getClientMapEntry(this.rclient)
|
||||
.has('applied-ops:1234567890abcdef')
|
||||
.should.equal(false)
|
||||
})
|
||||
|
||||
return it('should not error out when subscribing again', function (done) {
|
||||
const p = this.ChannelManager.subscribe(
|
||||
this.rclient,
|
||||
'applied-ops',
|
||||
'1234567890abcdef'
|
||||
)
|
||||
p.then(() => done()).catch(done)
|
||||
return null
|
||||
})
|
||||
})
|
||||
|
||||
describe('when unsubscribe errors and another client subscribes at the same time', function () {
|
||||
beforeEach(function (done) {
|
||||
this.rclient.subscribe = sinon.stub().resolves()
|
||||
this.ChannelManager.subscribe(
|
||||
this.rclient,
|
||||
'applied-ops',
|
||||
'1234567890abcdef'
|
||||
)
|
||||
let rejectSubscribe
|
||||
this.rclient.unsubscribe = () =>
|
||||
new Promise((resolve, reject) => (rejectSubscribe = reject))
|
||||
this.ChannelManager.unsubscribe(
|
||||
this.rclient,
|
||||
'applied-ops',
|
||||
'1234567890abcdef'
|
||||
)
|
||||
|
||||
setTimeout(() => {
|
||||
// delay, actualUnsubscribe should not see the new subscribe request
|
||||
this.ChannelManager.subscribe(
|
||||
this.rclient,
|
||||
'applied-ops',
|
||||
'1234567890abcdef'
|
||||
)
|
||||
.then(() => setTimeout(done))
|
||||
.catch(done)
|
||||
return setTimeout(() =>
|
||||
// delay, rejectSubscribe is not defined immediately
|
||||
rejectSubscribe(new Error('redis error'))
|
||||
)
|
||||
})
|
||||
return null
|
||||
})
|
||||
|
||||
it('should have recorded the error', function () {
|
||||
return expect(
|
||||
this.metrics.inc.calledWithExactly('unsubscribe.failed.applied-ops')
|
||||
).to.equal(true)
|
||||
})
|
||||
|
||||
it('should have subscribed', function () {
|
||||
return this.rclient.subscribe.called.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should have discarded the finished Promise', function () {
|
||||
return this.ChannelManager.getClientMapEntry(this.rclient)
|
||||
.has('applied-ops:1234567890abcdef')
|
||||
.should.equal(false)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when there is an existing subscription for this redis client', function () {
|
||||
beforeEach(function (done) {
|
||||
this.rclient.subscribe = sinon.stub().resolves()
|
||||
this.rclient.unsubscribe = sinon.stub().resolves()
|
||||
this.ChannelManager.subscribe(
|
||||
this.rclient,
|
||||
'applied-ops',
|
||||
'1234567890abcdef'
|
||||
)
|
||||
this.ChannelManager.unsubscribe(
|
||||
this.rclient,
|
||||
'applied-ops',
|
||||
'1234567890abcdef'
|
||||
)
|
||||
return setTimeout(done)
|
||||
})
|
||||
|
||||
return it('should unsubscribe from the redis channel', function () {
|
||||
return this.rclient.unsubscribe
|
||||
.calledWithExactly('applied-ops:1234567890abcdef')
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
return describe('publish', function () {
|
||||
describe("when the channel is 'all'", function () {
|
||||
beforeEach(function () {
|
||||
this.rclient.publish = sinon.stub()
|
||||
return this.ChannelManager.publish(
|
||||
this.rclient,
|
||||
'applied-ops',
|
||||
'all',
|
||||
'random-message'
|
||||
)
|
||||
})
|
||||
|
||||
return it('should publish on the base channel', function () {
|
||||
return this.rclient.publish
|
||||
.calledWithExactly('applied-ops', 'random-message')
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the channel has an specific id', function () {
|
||||
describe('when the individual channel setting is false', function () {
|
||||
beforeEach(function () {
|
||||
this.rclient.publish = sinon.stub()
|
||||
this.settings.publishOnIndividualChannels = false
|
||||
return this.ChannelManager.publish(
|
||||
this.rclient,
|
||||
'applied-ops',
|
||||
'1234567890abcdef',
|
||||
'random-message'
|
||||
)
|
||||
})
|
||||
|
||||
return it('should publish on the per-id channel', function () {
|
||||
this.rclient.publish
|
||||
.calledWithExactly('applied-ops', 'random-message')
|
||||
.should.equal(true)
|
||||
return this.rclient.publish.calledOnce.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when the individual channel setting is true', function () {
|
||||
beforeEach(function () {
|
||||
this.rclient.publish = sinon.stub()
|
||||
this.settings.publishOnIndividualChannels = true
|
||||
return this.ChannelManager.publish(
|
||||
this.rclient,
|
||||
'applied-ops',
|
||||
'1234567890abcdef',
|
||||
'random-message'
|
||||
)
|
||||
})
|
||||
|
||||
return it('should publish on the per-id channel', function () {
|
||||
this.rclient.publish
|
||||
.calledWithExactly('applied-ops:1234567890abcdef', 'random-message')
|
||||
.should.equal(true)
|
||||
return this.rclient.publish.calledOnce.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
return describe('metrics', function () {
|
||||
beforeEach(function () {
|
||||
this.rclient.publish = sinon.stub()
|
||||
return this.ChannelManager.publish(
|
||||
this.rclient,
|
||||
'applied-ops',
|
||||
'all',
|
||||
'random-message'
|
||||
)
|
||||
})
|
||||
|
||||
return it('should track the payload size', function () {
|
||||
return this.metrics.summary
|
||||
.calledWithExactly(
|
||||
'redis.publish.applied-ops',
|
||||
'random-message'.length
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
407
services/real-time/test/unit/js/ConnectedUsersManagerTests.js
Normal file
407
services/real-time/test/unit/js/ConnectedUsersManagerTests.js
Normal file
|
@ -0,0 +1,407 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const assert = require('assert')
|
||||
const path = require('path')
|
||||
const sinon = require('sinon')
|
||||
const modulePath = path.join(__dirname, '../../../app/js/ConnectedUsersManager')
|
||||
const { expect } = require('chai')
|
||||
const tk = require('timekeeper')
|
||||
|
||||
describe('ConnectedUsersManager', function () {
|
||||
beforeEach(function () {
|
||||
this.settings = {
|
||||
redis: {
|
||||
realtime: {
|
||||
key_schema: {
|
||||
clientsInProject({ project_id }) {
|
||||
return `clients_in_project:${project_id}`
|
||||
},
|
||||
connectedUser({ project_id, client_id }) {
|
||||
return `connected_user:${project_id}:${client_id}`
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
this.rClient = {
|
||||
auth() {},
|
||||
setex: sinon.stub(),
|
||||
sadd: sinon.stub(),
|
||||
get: sinon.stub(),
|
||||
srem: sinon.stub(),
|
||||
del: sinon.stub(),
|
||||
smembers: sinon.stub(),
|
||||
expire: sinon.stub(),
|
||||
hset: sinon.stub(),
|
||||
hgetall: sinon.stub(),
|
||||
exec: sinon.stub(),
|
||||
multi: () => {
|
||||
return this.rClient
|
||||
},
|
||||
}
|
||||
tk.freeze(new Date())
|
||||
|
||||
this.ConnectedUsersManager = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'@overleaf/settings': this.settings,
|
||||
'@overleaf/redis-wrapper': {
|
||||
createClient: () => {
|
||||
return this.rClient
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
this.client_id = '32132132'
|
||||
this.project_id = 'dskjh2u21321'
|
||||
this.user = {
|
||||
_id: 'user-id-123',
|
||||
first_name: 'Joe',
|
||||
last_name: 'Bloggs',
|
||||
email: 'joe@example.com',
|
||||
}
|
||||
return (this.cursorData = {
|
||||
row: 12,
|
||||
column: 9,
|
||||
doc_id: '53c3b8c85fee64000023dc6e',
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
return tk.reset()
|
||||
})
|
||||
|
||||
describe('updateUserPosition', function () {
|
||||
beforeEach(function () {
|
||||
return this.rClient.exec.callsArgWith(0)
|
||||
})
|
||||
|
||||
it('should set a key with the date and give it a ttl', function (done) {
|
||||
return this.ConnectedUsersManager.updateUserPosition(
|
||||
this.project_id,
|
||||
this.client_id,
|
||||
this.user,
|
||||
null,
|
||||
err => {
|
||||
this.rClient.hset
|
||||
.calledWith(
|
||||
`connected_user:${this.project_id}:${this.client_id}`,
|
||||
'last_updated_at',
|
||||
Date.now()
|
||||
)
|
||||
.should.equal(true)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should set a key with the user_id', function (done) {
|
||||
return this.ConnectedUsersManager.updateUserPosition(
|
||||
this.project_id,
|
||||
this.client_id,
|
||||
this.user,
|
||||
null,
|
||||
err => {
|
||||
this.rClient.hset
|
||||
.calledWith(
|
||||
`connected_user:${this.project_id}:${this.client_id}`,
|
||||
'user_id',
|
||||
this.user._id
|
||||
)
|
||||
.should.equal(true)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should set a key with the first_name', function (done) {
|
||||
return this.ConnectedUsersManager.updateUserPosition(
|
||||
this.project_id,
|
||||
this.client_id,
|
||||
this.user,
|
||||
null,
|
||||
err => {
|
||||
this.rClient.hset
|
||||
.calledWith(
|
||||
`connected_user:${this.project_id}:${this.client_id}`,
|
||||
'first_name',
|
||||
this.user.first_name
|
||||
)
|
||||
.should.equal(true)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should set a key with the last_name', function (done) {
|
||||
return this.ConnectedUsersManager.updateUserPosition(
|
||||
this.project_id,
|
||||
this.client_id,
|
||||
this.user,
|
||||
null,
|
||||
err => {
|
||||
this.rClient.hset
|
||||
.calledWith(
|
||||
`connected_user:${this.project_id}:${this.client_id}`,
|
||||
'last_name',
|
||||
this.user.last_name
|
||||
)
|
||||
.should.equal(true)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should set a key with the email', function (done) {
|
||||
return this.ConnectedUsersManager.updateUserPosition(
|
||||
this.project_id,
|
||||
this.client_id,
|
||||
this.user,
|
||||
null,
|
||||
err => {
|
||||
this.rClient.hset
|
||||
.calledWith(
|
||||
`connected_user:${this.project_id}:${this.client_id}`,
|
||||
'email',
|
||||
this.user.email
|
||||
)
|
||||
.should.equal(true)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should push the client_id on to the project list', function (done) {
|
||||
return this.ConnectedUsersManager.updateUserPosition(
|
||||
this.project_id,
|
||||
this.client_id,
|
||||
this.user,
|
||||
null,
|
||||
err => {
|
||||
this.rClient.sadd
|
||||
.calledWith(`clients_in_project:${this.project_id}`, this.client_id)
|
||||
.should.equal(true)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should add a ttl to the project set so it stays clean', function (done) {
|
||||
return this.ConnectedUsersManager.updateUserPosition(
|
||||
this.project_id,
|
||||
this.client_id,
|
||||
this.user,
|
||||
null,
|
||||
err => {
|
||||
this.rClient.expire
|
||||
.calledWith(
|
||||
`clients_in_project:${this.project_id}`,
|
||||
24 * 4 * 60 * 60
|
||||
)
|
||||
.should.equal(true)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should add a ttl to the connected user so it stays clean', function (done) {
|
||||
return this.ConnectedUsersManager.updateUserPosition(
|
||||
this.project_id,
|
||||
this.client_id,
|
||||
this.user,
|
||||
null,
|
||||
err => {
|
||||
this.rClient.expire
|
||||
.calledWith(
|
||||
`connected_user:${this.project_id}:${this.client_id}`,
|
||||
60 * 15
|
||||
)
|
||||
.should.equal(true)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
return it('should set the cursor position when provided', function (done) {
|
||||
return this.ConnectedUsersManager.updateUserPosition(
|
||||
this.project_id,
|
||||
this.client_id,
|
||||
this.user,
|
||||
this.cursorData,
|
||||
err => {
|
||||
this.rClient.hset
|
||||
.calledWith(
|
||||
`connected_user:${this.project_id}:${this.client_id}`,
|
||||
'cursorData',
|
||||
JSON.stringify(this.cursorData)
|
||||
)
|
||||
.should.equal(true)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('markUserAsDisconnected', function () {
|
||||
beforeEach(function () {
|
||||
return this.rClient.exec.callsArgWith(0)
|
||||
})
|
||||
|
||||
it('should remove the user from the set', function (done) {
|
||||
return this.ConnectedUsersManager.markUserAsDisconnected(
|
||||
this.project_id,
|
||||
this.client_id,
|
||||
err => {
|
||||
this.rClient.srem
|
||||
.calledWith(`clients_in_project:${this.project_id}`, this.client_id)
|
||||
.should.equal(true)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should delete the connected_user string', function (done) {
|
||||
return this.ConnectedUsersManager.markUserAsDisconnected(
|
||||
this.project_id,
|
||||
this.client_id,
|
||||
err => {
|
||||
this.rClient.del
|
||||
.calledWith(`connected_user:${this.project_id}:${this.client_id}`)
|
||||
.should.equal(true)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
return it('should add a ttl to the connected user set so it stays clean', function (done) {
|
||||
return this.ConnectedUsersManager.markUserAsDisconnected(
|
||||
this.project_id,
|
||||
this.client_id,
|
||||
err => {
|
||||
this.rClient.expire
|
||||
.calledWith(
|
||||
`clients_in_project:${this.project_id}`,
|
||||
24 * 4 * 60 * 60
|
||||
)
|
||||
.should.equal(true)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('_getConnectedUser', function () {
|
||||
it('should return a connected user if there is a user object', function (done) {
|
||||
const cursorData = JSON.stringify({ cursorData: { row: 1 } })
|
||||
this.rClient.hgetall.callsArgWith(1, null, {
|
||||
connected_at: new Date(),
|
||||
user_id: this.user._id,
|
||||
last_updated_at: `${Date.now()}`,
|
||||
cursorData,
|
||||
})
|
||||
return this.ConnectedUsersManager._getConnectedUser(
|
||||
this.project_id,
|
||||
this.client_id,
|
||||
(err, result) => {
|
||||
result.connected.should.equal(true)
|
||||
result.client_id.should.equal(this.client_id)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return a not connected user if there is no object', function (done) {
|
||||
this.rClient.hgetall.callsArgWith(1, null, null)
|
||||
return this.ConnectedUsersManager._getConnectedUser(
|
||||
this.project_id,
|
||||
this.client_id,
|
||||
(err, result) => {
|
||||
result.connected.should.equal(false)
|
||||
result.client_id.should.equal(this.client_id)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
return it('should return a not connected user if there is an empty object', function (done) {
|
||||
this.rClient.hgetall.callsArgWith(1, null, {})
|
||||
return this.ConnectedUsersManager._getConnectedUser(
|
||||
this.project_id,
|
||||
this.client_id,
|
||||
(err, result) => {
|
||||
result.connected.should.equal(false)
|
||||
result.client_id.should.equal(this.client_id)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('getConnectedUsers', function () {
|
||||
beforeEach(function () {
|
||||
this.users = ['1234', '5678', '9123', '8234']
|
||||
this.rClient.smembers.callsArgWith(1, null, this.users)
|
||||
this.ConnectedUsersManager._getConnectedUser = sinon.stub()
|
||||
this.ConnectedUsersManager._getConnectedUser
|
||||
.withArgs(this.project_id, this.users[0])
|
||||
.callsArgWith(2, null, {
|
||||
connected: true,
|
||||
client_age: 2,
|
||||
client_id: this.users[0],
|
||||
})
|
||||
this.ConnectedUsersManager._getConnectedUser
|
||||
.withArgs(this.project_id, this.users[1])
|
||||
.callsArgWith(2, null, {
|
||||
connected: false,
|
||||
client_age: 1,
|
||||
client_id: this.users[1],
|
||||
})
|
||||
this.ConnectedUsersManager._getConnectedUser
|
||||
.withArgs(this.project_id, this.users[2])
|
||||
.callsArgWith(2, null, {
|
||||
connected: true,
|
||||
client_age: 3,
|
||||
client_id: this.users[2],
|
||||
})
|
||||
return this.ConnectedUsersManager._getConnectedUser
|
||||
.withArgs(this.project_id, this.users[3])
|
||||
.callsArgWith(2, null, {
|
||||
connected: true,
|
||||
client_age: 11,
|
||||
client_id: this.users[3],
|
||||
})
|
||||
}) // connected but old
|
||||
|
||||
return it('should only return the users in the list which are still in redis and recently updated', function (done) {
|
||||
return this.ConnectedUsersManager.getConnectedUsers(
|
||||
this.project_id,
|
||||
(err, users) => {
|
||||
users.length.should.equal(2)
|
||||
users[0].should.deep.equal({
|
||||
client_id: this.users[0],
|
||||
client_age: 2,
|
||||
connected: true,
|
||||
})
|
||||
users[1].should.deep.equal({
|
||||
client_id: this.users[2],
|
||||
client_age: 3,
|
||||
connected: true,
|
||||
})
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,257 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-return-assign,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const sinon = require('sinon')
|
||||
const modulePath = require('path').join(
|
||||
__dirname,
|
||||
'../../../app/js/DocumentUpdaterController'
|
||||
)
|
||||
const MockClient = require('./helpers/MockClient')
|
||||
|
||||
describe('DocumentUpdaterController', function () {
|
||||
beforeEach(function () {
|
||||
this.project_id = 'project-id-123'
|
||||
this.doc_id = 'doc-id-123'
|
||||
this.callback = sinon.stub()
|
||||
this.io = { mock: 'socket.io' }
|
||||
this.rclient = []
|
||||
this.RoomEvents = { on: sinon.stub() }
|
||||
this.EditorUpdatesController = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'@overleaf/settings': (this.settings = {
|
||||
redis: {
|
||||
documentupdater: {
|
||||
key_schema: {
|
||||
pendingUpdates({ doc_id }) {
|
||||
return `PendingUpdates:${doc_id}`
|
||||
},
|
||||
},
|
||||
},
|
||||
pubsub: null,
|
||||
},
|
||||
}),
|
||||
'./RedisClientManager': {
|
||||
createClientList: () => {
|
||||
this.redis = {
|
||||
createClient: name => {
|
||||
let rclientStub
|
||||
this.rclient.push((rclientStub = { name }))
|
||||
return rclientStub
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
'./SafeJsonParse': (this.SafeJsonParse = {
|
||||
parse: (data, cb) => cb(null, JSON.parse(data)),
|
||||
}),
|
||||
'./EventLogger': (this.EventLogger = { checkEventOrder: sinon.stub() }),
|
||||
'./HealthCheckManager': { check: sinon.stub() },
|
||||
'@overleaf/metrics': (this.metrics = { inc: sinon.stub() }),
|
||||
'./RoomManager': (this.RoomManager = {
|
||||
eventSource: sinon.stub().returns(this.RoomEvents),
|
||||
}),
|
||||
'./ChannelManager': (this.ChannelManager = {}),
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
describe('listenForUpdatesFromDocumentUpdater', function () {
|
||||
beforeEach(function () {
|
||||
this.rclient.length = 0 // clear any existing clients
|
||||
this.EditorUpdatesController.rclientList = [
|
||||
this.redis.createClient('first'),
|
||||
this.redis.createClient('second'),
|
||||
]
|
||||
this.rclient[0].subscribe = sinon.stub()
|
||||
this.rclient[0].on = sinon.stub()
|
||||
this.rclient[1].subscribe = sinon.stub()
|
||||
this.rclient[1].on = sinon.stub()
|
||||
this.EditorUpdatesController.listenForUpdatesFromDocumentUpdater()
|
||||
})
|
||||
|
||||
it('should subscribe to the doc-updater stream', function () {
|
||||
this.rclient[0].subscribe.calledWith('applied-ops').should.equal(true)
|
||||
})
|
||||
|
||||
it('should register a callback to handle updates', function () {
|
||||
this.rclient[0].on.calledWith('message').should.equal(true)
|
||||
})
|
||||
|
||||
it('should subscribe to any additional doc-updater stream', function () {
|
||||
this.rclient[1].subscribe.calledWith('applied-ops').should.equal(true)
|
||||
this.rclient[1].on.calledWith('message').should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('_processMessageFromDocumentUpdater', function () {
|
||||
describe('with bad JSON', function () {
|
||||
beforeEach(function () {
|
||||
this.SafeJsonParse.parse = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, new Error('oops'))
|
||||
return this.EditorUpdatesController._processMessageFromDocumentUpdater(
|
||||
this.io,
|
||||
'applied-ops',
|
||||
'blah'
|
||||
)
|
||||
})
|
||||
|
||||
it('should log an error', function () {
|
||||
return this.logger.error.called.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with update', function () {
|
||||
beforeEach(function () {
|
||||
this.message = {
|
||||
doc_id: this.doc_id,
|
||||
op: { t: 'foo', p: 12 },
|
||||
}
|
||||
this.EditorUpdatesController._applyUpdateFromDocumentUpdater =
|
||||
sinon.stub()
|
||||
return this.EditorUpdatesController._processMessageFromDocumentUpdater(
|
||||
this.io,
|
||||
'applied-ops',
|
||||
JSON.stringify(this.message)
|
||||
)
|
||||
})
|
||||
|
||||
it('should apply the update', function () {
|
||||
return this.EditorUpdatesController._applyUpdateFromDocumentUpdater
|
||||
.calledWith(this.io, this.doc_id, this.message.op)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with error', function () {
|
||||
beforeEach(function () {
|
||||
this.message = {
|
||||
doc_id: this.doc_id,
|
||||
error: 'Something went wrong',
|
||||
}
|
||||
this.EditorUpdatesController._processErrorFromDocumentUpdater =
|
||||
sinon.stub()
|
||||
return this.EditorUpdatesController._processMessageFromDocumentUpdater(
|
||||
this.io,
|
||||
'applied-ops',
|
||||
JSON.stringify(this.message)
|
||||
)
|
||||
})
|
||||
|
||||
return it('should process the error', function () {
|
||||
return this.EditorUpdatesController._processErrorFromDocumentUpdater
|
||||
.calledWith(this.io, this.doc_id, this.message.error)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('_applyUpdateFromDocumentUpdater', function () {
|
||||
beforeEach(function () {
|
||||
this.sourceClient = new MockClient()
|
||||
this.otherClients = [new MockClient(), new MockClient()]
|
||||
this.update = {
|
||||
op: [{ t: 'foo', p: 12 }],
|
||||
meta: { source: this.sourceClient.publicId },
|
||||
v: (this.version = 42),
|
||||
doc: this.doc_id,
|
||||
}
|
||||
return (this.io.sockets = {
|
||||
clients: sinon
|
||||
.stub()
|
||||
.returns([
|
||||
this.sourceClient,
|
||||
...Array.from(this.otherClients),
|
||||
this.sourceClient,
|
||||
]),
|
||||
})
|
||||
}) // include a duplicate client
|
||||
|
||||
describe('normally', function () {
|
||||
beforeEach(function () {
|
||||
return this.EditorUpdatesController._applyUpdateFromDocumentUpdater(
|
||||
this.io,
|
||||
this.doc_id,
|
||||
this.update
|
||||
)
|
||||
})
|
||||
|
||||
it('should send a version bump to the source client', function () {
|
||||
this.sourceClient.emit
|
||||
.calledWith('otUpdateApplied', { v: this.version, doc: this.doc_id })
|
||||
.should.equal(true)
|
||||
return this.sourceClient.emit.calledOnce.should.equal(true)
|
||||
})
|
||||
|
||||
it('should get the clients connected to the document', function () {
|
||||
return this.io.sockets.clients
|
||||
.calledWith(this.doc_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should send the full update to the other clients', function () {
|
||||
return Array.from(this.otherClients).map(client =>
|
||||
client.emit
|
||||
.calledWith('otUpdateApplied', this.update)
|
||||
.should.equal(true)
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('with a duplicate op', function () {
|
||||
beforeEach(function () {
|
||||
this.update.dup = true
|
||||
return this.EditorUpdatesController._applyUpdateFromDocumentUpdater(
|
||||
this.io,
|
||||
this.doc_id,
|
||||
this.update
|
||||
)
|
||||
})
|
||||
|
||||
it('should send a version bump to the source client as usual', function () {
|
||||
return this.sourceClient.emit
|
||||
.calledWith('otUpdateApplied', { v: this.version, doc: this.doc_id })
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it("should not send anything to the other clients (they've already had the op)", function () {
|
||||
return Array.from(this.otherClients).map(client =>
|
||||
client.emit.calledWith('otUpdateApplied').should.equal(false)
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
return describe('_processErrorFromDocumentUpdater', function () {
|
||||
beforeEach(function () {
|
||||
this.clients = [new MockClient(), new MockClient()]
|
||||
this.io.sockets = { clients: sinon.stub().returns(this.clients) }
|
||||
return this.EditorUpdatesController._processErrorFromDocumentUpdater(
|
||||
this.io,
|
||||
this.doc_id,
|
||||
'Something went wrong'
|
||||
)
|
||||
})
|
||||
|
||||
it('should log a warning', function () {
|
||||
return this.logger.warn.called.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should disconnect all clients in that document', function () {
|
||||
this.io.sockets.clients.calledWith(this.doc_id).should.equal(true)
|
||||
return Array.from(this.clients).map(client =>
|
||||
client.disconnect.called.should.equal(true)
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
424
services/real-time/test/unit/js/DocumentUpdaterManagerTests.js
Normal file
424
services/real-time/test/unit/js/DocumentUpdaterManagerTests.js
Normal file
|
@ -0,0 +1,424 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const path = require('path')
|
||||
const modulePath = '../../../app/js/DocumentUpdaterManager'
|
||||
const _ = require('underscore')
|
||||
|
||||
describe('DocumentUpdaterManager', function () {
|
||||
beforeEach(function () {
|
||||
let Timer
|
||||
this.project_id = 'project-id-923'
|
||||
this.doc_id = 'doc-id-394'
|
||||
this.lines = ['one', 'two', 'three']
|
||||
this.version = 42
|
||||
this.settings = {
|
||||
apis: { documentupdater: { url: 'http://doc-updater.example.com' } },
|
||||
redis: {
|
||||
documentupdater: {
|
||||
key_schema: {
|
||||
pendingUpdates({ doc_id }) {
|
||||
return `PendingUpdates:${doc_id}`
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
maxUpdateSize: 7 * 1024 * 1024,
|
||||
pendingUpdateListShardCount: 10,
|
||||
}
|
||||
this.rclient = { auth() {} }
|
||||
|
||||
return (this.DocumentUpdaterManager = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'@overleaf/settings': this.settings,
|
||||
request: (this.request = {}),
|
||||
'@overleaf/redis-wrapper': { createClient: () => this.rclient },
|
||||
'@overleaf/metrics': (this.Metrics = {
|
||||
summary: sinon.stub(),
|
||||
Timer: (Timer = class Timer {
|
||||
done() {}
|
||||
}),
|
||||
}),
|
||||
},
|
||||
}))
|
||||
}) // avoid modifying JSON object directly
|
||||
|
||||
describe('getDocument', function () {
|
||||
beforeEach(function () {
|
||||
return (this.callback = sinon.stub())
|
||||
})
|
||||
|
||||
describe('successfully', function () {
|
||||
beforeEach(function () {
|
||||
this.body = JSON.stringify({
|
||||
lines: this.lines,
|
||||
version: this.version,
|
||||
ops: (this.ops = ['mock-op-1', 'mock-op-2']),
|
||||
ranges: (this.ranges = { mock: 'ranges' }),
|
||||
})
|
||||
this.fromVersion = 2
|
||||
this.request.get = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null, { statusCode: 200 }, this.body)
|
||||
return this.DocumentUpdaterManager.getDocument(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.fromVersion,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the document from the document updater', function () {
|
||||
const url = `${this.settings.apis.documentupdater.url}/project/${this.project_id}/doc/${this.doc_id}?fromVersion=${this.fromVersion}`
|
||||
return this.request.get.calledWith(url).should.equal(true)
|
||||
})
|
||||
|
||||
return it('should call the callback with the lines, version, ranges and ops', function () {
|
||||
return this.callback
|
||||
.calledWith(null, this.lines, this.version, this.ranges, this.ops)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the document updater API returns an error', function () {
|
||||
beforeEach(function () {
|
||||
this.request.get = sinon
|
||||
.stub()
|
||||
.callsArgWith(
|
||||
1,
|
||||
(this.error = new Error('something went wrong')),
|
||||
null,
|
||||
null
|
||||
)
|
||||
return this.DocumentUpdaterManager.getDocument(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.fromVersion,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
return it('should return an error to the callback', function () {
|
||||
return this.callback.calledWith(this.error).should.equal(true)
|
||||
})
|
||||
})
|
||||
;[404, 422].forEach(statusCode =>
|
||||
describe(`when the document updater returns a ${statusCode} status code`, function () {
|
||||
beforeEach(function () {
|
||||
this.request.get = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null, { statusCode }, '')
|
||||
return this.DocumentUpdaterManager.getDocument(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.fromVersion,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
return it('should return the callback with an error', function () {
|
||||
this.callback.called.should.equal(true)
|
||||
this.callback
|
||||
.calledWith(
|
||||
sinon.match({
|
||||
message: 'doc updater could not load requested ops',
|
||||
info: { statusCode },
|
||||
})
|
||||
)
|
||||
.should.equal(true)
|
||||
this.logger.error.called.should.equal(false)
|
||||
this.logger.warn.called.should.equal(false)
|
||||
})
|
||||
})
|
||||
)
|
||||
|
||||
return describe('when the document updater returns a failure error code', function () {
|
||||
beforeEach(function () {
|
||||
this.request.get = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null, { statusCode: 500 }, '')
|
||||
return this.DocumentUpdaterManager.getDocument(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.fromVersion,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
return it('should return the callback with an error', function () {
|
||||
this.callback.called.should.equal(true)
|
||||
this.callback
|
||||
.calledWith(
|
||||
sinon.match({
|
||||
message: 'doc updater returned a non-success status code',
|
||||
info: {
|
||||
action: 'getDocument',
|
||||
statusCode: 500,
|
||||
},
|
||||
})
|
||||
)
|
||||
.should.equal(true)
|
||||
this.logger.error.called.should.equal(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('flushProjectToMongoAndDelete', function () {
|
||||
beforeEach(function () {
|
||||
return (this.callback = sinon.stub())
|
||||
})
|
||||
|
||||
describe('successfully', function () {
|
||||
beforeEach(function () {
|
||||
this.request.del = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null, { statusCode: 204 }, '')
|
||||
return this.DocumentUpdaterManager.flushProjectToMongoAndDelete(
|
||||
this.project_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should delete the project from the document updater', function () {
|
||||
const url = `${this.settings.apis.documentupdater.url}/project/${this.project_id}?background=true`
|
||||
return this.request.del.calledWith(url).should.equal(true)
|
||||
})
|
||||
|
||||
return it('should call the callback with no error', function () {
|
||||
return this.callback.calledWith(null).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the document updater API returns an error', function () {
|
||||
beforeEach(function () {
|
||||
this.request.del = sinon
|
||||
.stub()
|
||||
.callsArgWith(
|
||||
1,
|
||||
(this.error = new Error('something went wrong')),
|
||||
null,
|
||||
null
|
||||
)
|
||||
return this.DocumentUpdaterManager.flushProjectToMongoAndDelete(
|
||||
this.project_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
return it('should return an error to the callback', function () {
|
||||
return this.callback.calledWith(this.error).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when the document updater returns a failure error code', function () {
|
||||
beforeEach(function () {
|
||||
this.request.del = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null, { statusCode: 500 }, '')
|
||||
return this.DocumentUpdaterManager.flushProjectToMongoAndDelete(
|
||||
this.project_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
return it('should return the callback with an error', function () {
|
||||
this.callback.called.should.equal(true)
|
||||
this.callback
|
||||
.calledWith(
|
||||
sinon.match({
|
||||
message: 'doc updater returned a non-success status code',
|
||||
info: {
|
||||
action: 'flushProjectToMongoAndDelete',
|
||||
statusCode: 500,
|
||||
},
|
||||
})
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('queueChange', function () {
|
||||
beforeEach(function () {
|
||||
this.change = {
|
||||
doc: '1234567890',
|
||||
op: [{ d: 'test', p: 345 }],
|
||||
v: 789,
|
||||
}
|
||||
this.rclient.rpush = sinon.stub().yields()
|
||||
return (this.callback = sinon.stub())
|
||||
})
|
||||
|
||||
describe('successfully', function () {
|
||||
beforeEach(function () {
|
||||
this.pendingUpdateListKey = `pending-updates-list-key-${Math.random()}`
|
||||
|
||||
this.DocumentUpdaterManager._getPendingUpdateListKey = sinon
|
||||
.stub()
|
||||
.returns(this.pendingUpdateListKey)
|
||||
this.DocumentUpdaterManager.queueChange(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.change,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should push the change', function () {
|
||||
this.rclient.rpush
|
||||
.calledWith(
|
||||
`PendingUpdates:${this.doc_id}`,
|
||||
JSON.stringify(this.change)
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should notify the doc updater of the change via the pending-updates-list queue', function () {
|
||||
this.rclient.rpush
|
||||
.calledWith(
|
||||
this.pendingUpdateListKey,
|
||||
`${this.project_id}:${this.doc_id}`
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with error talking to redis during rpush', function () {
|
||||
beforeEach(function () {
|
||||
this.rclient.rpush = sinon
|
||||
.stub()
|
||||
.yields(new Error('something went wrong'))
|
||||
return this.DocumentUpdaterManager.queueChange(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.change,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
return it('should return an error', function () {
|
||||
return this.callback
|
||||
.calledWithExactly(sinon.match(Error))
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with null byte corruption', function () {
|
||||
beforeEach(function () {
|
||||
this.stringifyStub = sinon
|
||||
.stub(JSON, 'stringify')
|
||||
.callsFake(() => '["bad bytes! \u0000 <- here"]')
|
||||
return this.DocumentUpdaterManager.queueChange(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.change,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
this.stringifyStub.restore()
|
||||
})
|
||||
|
||||
it('should return an error', function () {
|
||||
return this.callback
|
||||
.calledWithExactly(sinon.match(Error))
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should not push the change onto the pending-updates-list queue', function () {
|
||||
return this.rclient.rpush.called.should.equal(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the update is too large', function () {
|
||||
beforeEach(function () {
|
||||
this.change = {
|
||||
op: { p: 12, t: 'update is too large'.repeat(1024 * 400) },
|
||||
}
|
||||
return this.DocumentUpdaterManager.queueChange(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.change,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should return an error', function () {
|
||||
return this.callback
|
||||
.calledWithExactly(sinon.match(Error))
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should add the size to the error', function () {
|
||||
return this.callback.args[0][0].info.updateSize.should.equal(7782422)
|
||||
})
|
||||
|
||||
return it('should not push the change onto the pending-updates-list queue', function () {
|
||||
return this.rclient.rpush.called.should.equal(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with invalid keys', function () {
|
||||
beforeEach(function () {
|
||||
this.change = {
|
||||
op: [{ d: 'test', p: 345 }],
|
||||
version: 789, // not a valid key
|
||||
}
|
||||
return this.DocumentUpdaterManager.queueChange(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.change,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should remove the invalid keys from the change', function () {
|
||||
return this.rclient.rpush
|
||||
.calledWith(
|
||||
`PendingUpdates:${this.doc_id}`,
|
||||
JSON.stringify({ op: this.change.op })
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('_getPendingUpdateListKey', function () {
|
||||
beforeEach(function () {
|
||||
const keys = _.times(
|
||||
10000,
|
||||
this.DocumentUpdaterManager._getPendingUpdateListKey
|
||||
)
|
||||
this.keys = _.unique(keys)
|
||||
})
|
||||
it('should return normal pending updates key', function () {
|
||||
_.contains(this.keys, 'pending-updates-list').should.equal(true)
|
||||
})
|
||||
|
||||
it('should return pending-updates-list-n keys', function () {
|
||||
_.contains(this.keys, 'pending-updates-list-1').should.equal(true)
|
||||
_.contains(this.keys, 'pending-updates-list-3').should.equal(true)
|
||||
_.contains(this.keys, 'pending-updates-list-9').should.equal(true)
|
||||
})
|
||||
|
||||
it('should not include pending-updates-list-0 key', function () {
|
||||
_.contains(this.keys, 'pending-updates-list-0').should.equal(false)
|
||||
})
|
||||
|
||||
it('should not include maximum as pendingUpdateListShardCount value', function () {
|
||||
_.contains(this.keys, 'pending-updates-list-10').should.equal(false)
|
||||
})
|
||||
})
|
||||
})
|
127
services/real-time/test/unit/js/DrainManagerTests.js
Normal file
127
services/real-time/test/unit/js/DrainManagerTests.js
Normal file
|
@ -0,0 +1,127 @@
|
|||
/* eslint-disable
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const path = require('path')
|
||||
const modulePath = path.join(__dirname, '../../../app/js/DrainManager')
|
||||
|
||||
describe('DrainManager', function () {
|
||||
beforeEach(function () {
|
||||
this.DrainManager = SandboxedModule.require(modulePath, {})
|
||||
return (this.io = {
|
||||
sockets: {
|
||||
clients: sinon.stub(),
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
describe('startDrainTimeWindow', function () {
|
||||
beforeEach(function () {
|
||||
this.clients = []
|
||||
for (let i = 0; i <= 5399; i++) {
|
||||
this.clients[i] = {
|
||||
id: i,
|
||||
emit: sinon.stub(),
|
||||
}
|
||||
}
|
||||
this.io.sockets.clients.returns(this.clients)
|
||||
return (this.DrainManager.startDrain = sinon.stub())
|
||||
})
|
||||
|
||||
return it('should set a drain rate fast enough', function (done) {
|
||||
this.DrainManager.startDrainTimeWindow(this.io, 9)
|
||||
this.DrainManager.startDrain.calledWith(this.io, 10).should.equal(true)
|
||||
return done()
|
||||
})
|
||||
})
|
||||
|
||||
return describe('reconnectNClients', function () {
|
||||
beforeEach(function () {
|
||||
this.clients = []
|
||||
for (let i = 0; i <= 9; i++) {
|
||||
this.clients[i] = {
|
||||
id: i,
|
||||
emit: sinon.stub(),
|
||||
}
|
||||
}
|
||||
return this.io.sockets.clients.returns(this.clients)
|
||||
})
|
||||
|
||||
return describe('after first pass', function () {
|
||||
beforeEach(function () {
|
||||
return this.DrainManager.reconnectNClients(this.io, 3)
|
||||
})
|
||||
|
||||
it('should reconnect the first 3 clients', function () {
|
||||
return [0, 1, 2].map(i =>
|
||||
this.clients[i].emit
|
||||
.calledWith('reconnectGracefully')
|
||||
.should.equal(true)
|
||||
)
|
||||
})
|
||||
|
||||
it('should not reconnect any more clients', function () {
|
||||
return [3, 4, 5, 6, 7, 8, 9].map(i =>
|
||||
this.clients[i].emit
|
||||
.calledWith('reconnectGracefully')
|
||||
.should.equal(false)
|
||||
)
|
||||
})
|
||||
|
||||
return describe('after second pass', function () {
|
||||
beforeEach(function () {
|
||||
return this.DrainManager.reconnectNClients(this.io, 3)
|
||||
})
|
||||
|
||||
it('should reconnect the next 3 clients', function () {
|
||||
return [3, 4, 5].map(i =>
|
||||
this.clients[i].emit
|
||||
.calledWith('reconnectGracefully')
|
||||
.should.equal(true)
|
||||
)
|
||||
})
|
||||
|
||||
it('should not reconnect any more clients', function () {
|
||||
return [6, 7, 8, 9].map(i =>
|
||||
this.clients[i].emit
|
||||
.calledWith('reconnectGracefully')
|
||||
.should.equal(false)
|
||||
)
|
||||
})
|
||||
|
||||
it('should not reconnect the first 3 clients again', function () {
|
||||
return [0, 1, 2].map(i =>
|
||||
this.clients[i].emit.calledOnce.should.equal(true)
|
||||
)
|
||||
})
|
||||
|
||||
return describe('after final pass', function () {
|
||||
beforeEach(function () {
|
||||
return this.DrainManager.reconnectNClients(this.io, 100)
|
||||
})
|
||||
|
||||
it('should not reconnect the first 6 clients again', function () {
|
||||
return [0, 1, 2, 3, 4, 5].map(i =>
|
||||
this.clients[i].emit.calledOnce.should.equal(true)
|
||||
)
|
||||
})
|
||||
|
||||
return it('should log out that it reached the end', function () {
|
||||
return this.logger.log
|
||||
.calledWith('All clients have been told to reconnectGracefully')
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
153
services/real-time/test/unit/js/EventLoggerTests.js
Normal file
153
services/real-time/test/unit/js/EventLoggerTests.js
Normal file
|
@ -0,0 +1,153 @@
|
|||
/* eslint-disable
|
||||
no-return-assign,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const { expect } = require('chai')
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const modulePath = '../../../app/js/EventLogger'
|
||||
const sinon = require('sinon')
|
||||
const tk = require('timekeeper')
|
||||
|
||||
describe('EventLogger', function () {
|
||||
beforeEach(function () {
|
||||
this.start = Date.now()
|
||||
tk.freeze(new Date(this.start))
|
||||
this.EventLogger = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'@overleaf/metrics': (this.metrics = { inc: sinon.stub() }),
|
||||
},
|
||||
})
|
||||
this.channel = 'applied-ops'
|
||||
this.id_1 = 'random-hostname:abc-1'
|
||||
this.message_1 = 'message-1'
|
||||
this.id_2 = 'random-hostname:abc-2'
|
||||
return (this.message_2 = 'message-2')
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
return tk.reset()
|
||||
})
|
||||
|
||||
return describe('checkEventOrder', function () {
|
||||
describe('when the events are in order', function () {
|
||||
beforeEach(function () {
|
||||
this.EventLogger.checkEventOrder(
|
||||
this.channel,
|
||||
this.id_1,
|
||||
this.message_1
|
||||
)
|
||||
return (this.status = this.EventLogger.checkEventOrder(
|
||||
this.channel,
|
||||
this.id_2,
|
||||
this.message_2
|
||||
))
|
||||
})
|
||||
|
||||
it('should accept events in order', function () {
|
||||
return expect(this.status).to.be.undefined
|
||||
})
|
||||
|
||||
return it('should increment the valid event metric', function () {
|
||||
return this.metrics.inc
|
||||
.calledWith(`event.${this.channel}.valid`)
|
||||
.should.equals(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when there is a duplicate events', function () {
|
||||
beforeEach(function () {
|
||||
this.EventLogger.checkEventOrder(
|
||||
this.channel,
|
||||
this.id_1,
|
||||
this.message_1
|
||||
)
|
||||
return (this.status = this.EventLogger.checkEventOrder(
|
||||
this.channel,
|
||||
this.id_1,
|
||||
this.message_1
|
||||
))
|
||||
})
|
||||
|
||||
it('should return "duplicate" for the same event', function () {
|
||||
return expect(this.status).to.equal('duplicate')
|
||||
})
|
||||
|
||||
return it('should increment the duplicate event metric', function () {
|
||||
return this.metrics.inc
|
||||
.calledWith(`event.${this.channel}.duplicate`)
|
||||
.should.equals(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when there are out of order events', function () {
|
||||
beforeEach(function () {
|
||||
this.EventLogger.checkEventOrder(
|
||||
this.channel,
|
||||
this.id_1,
|
||||
this.message_1
|
||||
)
|
||||
this.EventLogger.checkEventOrder(
|
||||
this.channel,
|
||||
this.id_2,
|
||||
this.message_2
|
||||
)
|
||||
return (this.status = this.EventLogger.checkEventOrder(
|
||||
this.channel,
|
||||
this.id_1,
|
||||
this.message_1
|
||||
))
|
||||
})
|
||||
|
||||
it('should return "out-of-order" for the event', function () {
|
||||
return expect(this.status).to.equal('out-of-order')
|
||||
})
|
||||
|
||||
return it('should increment the out-of-order event metric', function () {
|
||||
return this.metrics.inc
|
||||
.calledWith(`event.${this.channel}.out-of-order`)
|
||||
.should.equals(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('after MAX_STALE_TIME_IN_MS', function () {
|
||||
return it('should flush old entries', function () {
|
||||
let status
|
||||
this.EventLogger.MAX_EVENTS_BEFORE_CLEAN = 10
|
||||
this.EventLogger.checkEventOrder(
|
||||
this.channel,
|
||||
this.id_1,
|
||||
this.message_1
|
||||
)
|
||||
for (let i = 1; i <= 8; i++) {
|
||||
status = this.EventLogger.checkEventOrder(
|
||||
this.channel,
|
||||
this.id_1,
|
||||
this.message_1
|
||||
)
|
||||
expect(status).to.equal('duplicate')
|
||||
}
|
||||
// the next event should flush the old entries aboce
|
||||
this.EventLogger.MAX_STALE_TIME_IN_MS = 1000
|
||||
tk.freeze(new Date(this.start + 5 * 1000))
|
||||
// because we flushed the entries this should not be a duplicate
|
||||
this.EventLogger.checkEventOrder(
|
||||
this.channel,
|
||||
'other-1',
|
||||
this.message_2
|
||||
)
|
||||
status = this.EventLogger.checkEventOrder(
|
||||
this.channel,
|
||||
this.id_1,
|
||||
this.message_1
|
||||
)
|
||||
return expect(status).to.be.undefined
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
412
services/real-time/test/unit/js/RoomManagerTests.js
Normal file
412
services/real-time/test/unit/js/RoomManagerTests.js
Normal file
|
@ -0,0 +1,412 @@
|
|||
/* eslint-disable
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
promise/param-names,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const { expect } = require('chai')
|
||||
const sinon = require('sinon')
|
||||
const modulePath = '../../../app/js/RoomManager.js'
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
|
||||
describe('RoomManager', function () {
|
||||
beforeEach(function () {
|
||||
this.project_id = 'project-id-123'
|
||||
this.doc_id = 'doc-id-456'
|
||||
this.other_doc_id = 'doc-id-789'
|
||||
this.client = { namespace: { name: '' }, id: 'first-client' }
|
||||
this.RoomManager = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'@overleaf/settings': (this.settings = {}),
|
||||
'@overleaf/metrics': (this.metrics = { gauge: sinon.stub() }),
|
||||
},
|
||||
})
|
||||
this.RoomManager._clientsInRoom = sinon.stub()
|
||||
this.RoomManager._clientAlreadyInRoom = sinon.stub()
|
||||
this.RoomEvents = this.RoomManager.eventSource()
|
||||
sinon.spy(this.RoomEvents, 'emit')
|
||||
return sinon.spy(this.RoomEvents, 'once')
|
||||
})
|
||||
|
||||
describe('emitOnCompletion', function () {
|
||||
return describe('when a subscribe errors', function () {
|
||||
afterEach(function () {
|
||||
return process.removeListener('unhandledRejection', this.onUnhandled)
|
||||
})
|
||||
|
||||
beforeEach(function (done) {
|
||||
this.onUnhandled = error => {
|
||||
this.unhandledError = error
|
||||
return done(new Error(`unhandledRejection: ${error.message}`))
|
||||
}
|
||||
process.on('unhandledRejection', this.onUnhandled)
|
||||
|
||||
let reject
|
||||
const subscribePromise = new Promise((_, r) => (reject = r))
|
||||
const promises = [subscribePromise]
|
||||
const eventName = 'project-subscribed-123'
|
||||
this.RoomEvents.once(eventName, () => setTimeout(done, 100))
|
||||
this.RoomManager.emitOnCompletion(promises, eventName)
|
||||
return setTimeout(() => reject(new Error('subscribe failed')))
|
||||
})
|
||||
|
||||
return it('should keep going', function () {
|
||||
return expect(this.unhandledError).to.not.exist
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('joinProject', function () {
|
||||
describe('when the project room is empty', function () {
|
||||
beforeEach(function (done) {
|
||||
this.RoomManager._clientsInRoom
|
||||
.withArgs(this.client, this.project_id)
|
||||
.onFirstCall()
|
||||
.returns(0)
|
||||
this.client.join = sinon.stub()
|
||||
this.callback = sinon.stub()
|
||||
this.RoomEvents.on('project-active', id => {
|
||||
return setTimeout(() => {
|
||||
return this.RoomEvents.emit(`project-subscribed-${id}`)
|
||||
}, 100)
|
||||
})
|
||||
return this.RoomManager.joinProject(
|
||||
this.client,
|
||||
this.project_id,
|
||||
err => {
|
||||
this.callback(err)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it("should emit a 'project-active' event with the id", function () {
|
||||
return this.RoomEvents.emit
|
||||
.calledWithExactly('project-active', this.project_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it("should listen for the 'project-subscribed-id' event", function () {
|
||||
return this.RoomEvents.once
|
||||
.calledWith(`project-subscribed-${this.project_id}`)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should join the room using the id', function () {
|
||||
return this.client.join
|
||||
.calledWithExactly(this.project_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when there are other clients in the project room', function () {
|
||||
beforeEach(function (done) {
|
||||
this.RoomManager._clientsInRoom
|
||||
.withArgs(this.client, this.project_id)
|
||||
.onFirstCall()
|
||||
.returns(123)
|
||||
.onSecondCall()
|
||||
.returns(124)
|
||||
this.client.join = sinon.stub()
|
||||
this.RoomManager.joinProject(this.client, this.project_id, done)
|
||||
})
|
||||
|
||||
it('should join the room using the id', function () {
|
||||
return this.client.join.called.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should not emit any events', function () {
|
||||
return this.RoomEvents.emit.called.should.equal(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('joinDoc', function () {
|
||||
describe('when the doc room is empty', function () {
|
||||
beforeEach(function (done) {
|
||||
this.RoomManager._clientsInRoom
|
||||
.withArgs(this.client, this.doc_id)
|
||||
.onFirstCall()
|
||||
.returns(0)
|
||||
this.client.join = sinon.stub()
|
||||
this.callback = sinon.stub()
|
||||
this.RoomEvents.on('doc-active', id => {
|
||||
return setTimeout(() => {
|
||||
return this.RoomEvents.emit(`doc-subscribed-${id}`)
|
||||
}, 100)
|
||||
})
|
||||
return this.RoomManager.joinDoc(this.client, this.doc_id, err => {
|
||||
this.callback(err)
|
||||
return done()
|
||||
})
|
||||
})
|
||||
|
||||
it("should emit a 'doc-active' event with the id", function () {
|
||||
return this.RoomEvents.emit
|
||||
.calledWithExactly('doc-active', this.doc_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it("should listen for the 'doc-subscribed-id' event", function () {
|
||||
return this.RoomEvents.once
|
||||
.calledWith(`doc-subscribed-${this.doc_id}`)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should join the room using the id', function () {
|
||||
return this.client.join
|
||||
.calledWithExactly(this.doc_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when there are other clients in the doc room', function () {
|
||||
beforeEach(function (done) {
|
||||
this.RoomManager._clientsInRoom
|
||||
.withArgs(this.client, this.doc_id)
|
||||
.onFirstCall()
|
||||
.returns(123)
|
||||
.onSecondCall()
|
||||
.returns(124)
|
||||
this.client.join = sinon.stub()
|
||||
this.RoomManager.joinDoc(this.client, this.doc_id, done)
|
||||
})
|
||||
|
||||
it('should join the room using the id', function () {
|
||||
return this.client.join.called.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should not emit any events', function () {
|
||||
return this.RoomEvents.emit.called.should.equal(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('leaveDoc', function () {
|
||||
describe('when doc room will be empty after this client has left', function () {
|
||||
beforeEach(function () {
|
||||
this.RoomManager._clientAlreadyInRoom
|
||||
.withArgs(this.client, this.doc_id)
|
||||
.returns(true)
|
||||
this.RoomManager._clientsInRoom
|
||||
.withArgs(this.client, this.doc_id)
|
||||
.onCall(0)
|
||||
.returns(0)
|
||||
this.client.leave = sinon.stub()
|
||||
return this.RoomManager.leaveDoc(this.client, this.doc_id)
|
||||
})
|
||||
|
||||
it('should leave the room using the id', function () {
|
||||
return this.client.leave
|
||||
.calledWithExactly(this.doc_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it("should emit a 'doc-empty' event with the id", function () {
|
||||
return this.RoomEvents.emit
|
||||
.calledWithExactly('doc-empty', this.doc_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when there are other clients in the doc room', function () {
|
||||
beforeEach(function () {
|
||||
this.RoomManager._clientAlreadyInRoom
|
||||
.withArgs(this.client, this.doc_id)
|
||||
.returns(true)
|
||||
this.RoomManager._clientsInRoom
|
||||
.withArgs(this.client, this.doc_id)
|
||||
.onCall(0)
|
||||
.returns(123)
|
||||
this.client.leave = sinon.stub()
|
||||
return this.RoomManager.leaveDoc(this.client, this.doc_id)
|
||||
})
|
||||
|
||||
it('should leave the room using the id', function () {
|
||||
return this.client.leave
|
||||
.calledWithExactly(this.doc_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should not emit any events', function () {
|
||||
return this.RoomEvents.emit.called.should.equal(false)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when the client is not in the doc room', function () {
|
||||
beforeEach(function () {
|
||||
this.RoomManager._clientAlreadyInRoom
|
||||
.withArgs(this.client, this.doc_id)
|
||||
.returns(false)
|
||||
this.RoomManager._clientsInRoom
|
||||
.withArgs(this.client, this.doc_id)
|
||||
.onCall(0)
|
||||
.returns(0)
|
||||
this.client.leave = sinon.stub()
|
||||
return this.RoomManager.leaveDoc(this.client, this.doc_id)
|
||||
})
|
||||
|
||||
it('should not leave the room', function () {
|
||||
return this.client.leave.called.should.equal(false)
|
||||
})
|
||||
|
||||
return it('should not emit any events', function () {
|
||||
return this.RoomEvents.emit.called.should.equal(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
return describe('leaveProjectAndDocs', function () {
|
||||
return describe('when the client is connected to the project and multiple docs', function () {
|
||||
beforeEach(function () {
|
||||
this.RoomManager._roomsClientIsIn = sinon
|
||||
.stub()
|
||||
.returns([this.project_id, this.doc_id, this.other_doc_id])
|
||||
this.client.join = sinon.stub()
|
||||
return (this.client.leave = sinon.stub())
|
||||
})
|
||||
|
||||
describe('when this is the only client connected', function () {
|
||||
beforeEach(function (done) {
|
||||
// first call is for the join,
|
||||
// second for the leave
|
||||
this.RoomManager._clientsInRoom
|
||||
.withArgs(this.client, this.doc_id)
|
||||
.onCall(0)
|
||||
.returns(0)
|
||||
.onCall(1)
|
||||
.returns(0)
|
||||
this.RoomManager._clientsInRoom
|
||||
.withArgs(this.client, this.other_doc_id)
|
||||
.onCall(0)
|
||||
.returns(0)
|
||||
.onCall(1)
|
||||
.returns(0)
|
||||
this.RoomManager._clientsInRoom
|
||||
.withArgs(this.client, this.project_id)
|
||||
.onCall(0)
|
||||
.returns(0)
|
||||
.onCall(1)
|
||||
.returns(0)
|
||||
this.RoomManager._clientAlreadyInRoom
|
||||
.withArgs(this.client, this.doc_id)
|
||||
.returns(true)
|
||||
.withArgs(this.client, this.other_doc_id)
|
||||
.returns(true)
|
||||
.withArgs(this.client, this.project_id)
|
||||
.returns(true)
|
||||
this.RoomEvents.on('project-active', id => {
|
||||
return setTimeout(() => {
|
||||
return this.RoomEvents.emit(`project-subscribed-${id}`)
|
||||
}, 100)
|
||||
})
|
||||
this.RoomEvents.on('doc-active', id => {
|
||||
return setTimeout(() => {
|
||||
return this.RoomEvents.emit(`doc-subscribed-${id}`)
|
||||
}, 100)
|
||||
})
|
||||
// put the client in the rooms
|
||||
return this.RoomManager.joinProject(
|
||||
this.client,
|
||||
this.project_id,
|
||||
() => {
|
||||
return this.RoomManager.joinDoc(this.client, this.doc_id, () => {
|
||||
return this.RoomManager.joinDoc(
|
||||
this.client,
|
||||
this.other_doc_id,
|
||||
() => {
|
||||
// now leave the project
|
||||
this.RoomManager.leaveProjectAndDocs(this.client)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should leave all the docs', function () {
|
||||
this.client.leave.calledWithExactly(this.doc_id).should.equal(true)
|
||||
return this.client.leave
|
||||
.calledWithExactly(this.other_doc_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should leave the project', function () {
|
||||
return this.client.leave
|
||||
.calledWithExactly(this.project_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it("should emit a 'doc-empty' event with the id for each doc", function () {
|
||||
this.RoomEvents.emit
|
||||
.calledWithExactly('doc-empty', this.doc_id)
|
||||
.should.equal(true)
|
||||
return this.RoomEvents.emit
|
||||
.calledWithExactly('doc-empty', this.other_doc_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it("should emit a 'project-empty' event with the id for the project", function () {
|
||||
return this.RoomEvents.emit
|
||||
.calledWithExactly('project-empty', this.project_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when other clients are still connected', function () {
|
||||
beforeEach(function () {
|
||||
this.RoomManager._clientsInRoom
|
||||
.withArgs(this.client, this.doc_id)
|
||||
.onFirstCall()
|
||||
.returns(123)
|
||||
.onSecondCall()
|
||||
.returns(122)
|
||||
this.RoomManager._clientsInRoom
|
||||
.withArgs(this.client, this.other_doc_id)
|
||||
.onFirstCall()
|
||||
.returns(123)
|
||||
.onSecondCall()
|
||||
.returns(122)
|
||||
this.RoomManager._clientsInRoom
|
||||
.withArgs(this.client, this.project_id)
|
||||
.onFirstCall()
|
||||
.returns(123)
|
||||
.onSecondCall()
|
||||
.returns(122)
|
||||
this.RoomManager._clientAlreadyInRoom
|
||||
.withArgs(this.client, this.doc_id)
|
||||
.returns(true)
|
||||
.withArgs(this.client, this.other_doc_id)
|
||||
.returns(true)
|
||||
.withArgs(this.client, this.project_id)
|
||||
.returns(true)
|
||||
return this.RoomManager.leaveProjectAndDocs(this.client)
|
||||
})
|
||||
|
||||
it('should leave all the docs', function () {
|
||||
this.client.leave.calledWithExactly(this.doc_id).should.equal(true)
|
||||
return this.client.leave
|
||||
.calledWithExactly(this.other_doc_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should leave the project', function () {
|
||||
return this.client.leave
|
||||
.calledWithExactly(this.project_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should not emit any events', function () {
|
||||
return this.RoomEvents.emit.called.should.equal(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
56
services/real-time/test/unit/js/SafeJsonParseTest.js
Normal file
56
services/real-time/test/unit/js/SafeJsonParseTest.js
Normal file
|
@ -0,0 +1,56 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-return-assign,
|
||||
no-useless-escape,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const { expect } = require('chai')
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const modulePath = '../../../app/js/SafeJsonParse'
|
||||
|
||||
describe('SafeJsonParse', function () {
|
||||
beforeEach(function () {
|
||||
return (this.SafeJsonParse = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'@overleaf/settings': (this.Settings = {
|
||||
maxUpdateSize: 16 * 1024,
|
||||
}),
|
||||
},
|
||||
}))
|
||||
})
|
||||
|
||||
return describe('parse', function () {
|
||||
it('should parse documents correctly', function (done) {
|
||||
return this.SafeJsonParse.parse('{"foo": "bar"}', (error, parsed) => {
|
||||
expect(parsed).to.deep.equal({ foo: 'bar' })
|
||||
return done()
|
||||
})
|
||||
})
|
||||
|
||||
it('should return an error on bad data', function (done) {
|
||||
return this.SafeJsonParse.parse('blah', (error, parsed) => {
|
||||
expect(error).to.exist
|
||||
return done()
|
||||
})
|
||||
})
|
||||
|
||||
return it('should return an error on oversized data', function (done) {
|
||||
// we have a 2k overhead on top of max size
|
||||
const big_blob = Array(16 * 1024).join('A')
|
||||
const data = `{\"foo\": \"${big_blob}\"}`
|
||||
this.Settings.maxUpdateSize = 2 * 1024
|
||||
return this.SafeJsonParse.parse(data, (error, parsed) => {
|
||||
this.logger.error.called.should.equal(false)
|
||||
expect(error).to.exist
|
||||
return done()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
197
services/real-time/test/unit/js/SessionSocketsTests.js
Normal file
197
services/real-time/test/unit/js/SessionSocketsTests.js
Normal file
|
@ -0,0 +1,197 @@
|
|||
/* eslint-disable
|
||||
handle-callback-err,
|
||||
no-return-assign,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const { EventEmitter } = require('events')
|
||||
const { expect } = require('chai')
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const modulePath = '../../../app/js/SessionSockets'
|
||||
const sinon = require('sinon')
|
||||
|
||||
describe('SessionSockets', function () {
|
||||
before(function () {
|
||||
this.SessionSocketsModule = SandboxedModule.require(modulePath)
|
||||
this.io = new EventEmitter()
|
||||
this.id1 = Math.random().toString()
|
||||
this.id2 = Math.random().toString()
|
||||
const redisResponses = {
|
||||
error: [new Error('Redis: something went wrong'), null],
|
||||
unknownId: [null, null],
|
||||
}
|
||||
redisResponses[this.id1] = [null, { user: { _id: '123' } }]
|
||||
redisResponses[this.id2] = [null, { user: { _id: 'abc' } }]
|
||||
|
||||
this.sessionStore = {
|
||||
get: sinon
|
||||
.stub()
|
||||
.callsFake((id, fn) => fn.apply(null, redisResponses[id])),
|
||||
}
|
||||
this.cookieParser = function (req, res, next) {
|
||||
req.signedCookies = req._signedCookies
|
||||
return next()
|
||||
}
|
||||
this.SessionSockets = this.SessionSocketsModule(
|
||||
this.io,
|
||||
this.sessionStore,
|
||||
this.cookieParser,
|
||||
'ol.sid'
|
||||
)
|
||||
return (this.checkSocket = (socket, fn) => {
|
||||
this.SessionSockets.once('connection', fn)
|
||||
return this.io.emit('connection', socket)
|
||||
})
|
||||
})
|
||||
|
||||
describe('without cookies', function () {
|
||||
before(function () {
|
||||
return (this.socket = { handshake: {} })
|
||||
})
|
||||
|
||||
it('should return a lookup error', function (done) {
|
||||
return this.checkSocket(this.socket, error => {
|
||||
expect(error).to.exist
|
||||
expect(error.message).to.equal('could not look up session by key')
|
||||
return done()
|
||||
})
|
||||
})
|
||||
|
||||
return it('should not query redis', function (done) {
|
||||
return this.checkSocket(this.socket, () => {
|
||||
expect(this.sessionStore.get.called).to.equal(false)
|
||||
return done()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('with a different cookie', function () {
|
||||
before(function () {
|
||||
return (this.socket = { handshake: { _signedCookies: { other: 1 } } })
|
||||
})
|
||||
|
||||
it('should return a lookup error', function (done) {
|
||||
return this.checkSocket(this.socket, error => {
|
||||
expect(error).to.exist
|
||||
expect(error.message).to.equal('could not look up session by key')
|
||||
return done()
|
||||
})
|
||||
})
|
||||
|
||||
return it('should not query redis', function (done) {
|
||||
return this.checkSocket(this.socket, () => {
|
||||
expect(this.sessionStore.get.called).to.equal(false)
|
||||
return done()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('with a valid cookie and a failing session lookup', function () {
|
||||
before(function () {
|
||||
return (this.socket = {
|
||||
handshake: { _signedCookies: { 'ol.sid': 'error' } },
|
||||
})
|
||||
})
|
||||
|
||||
it('should query redis', function (done) {
|
||||
return this.checkSocket(this.socket, () => {
|
||||
expect(this.sessionStore.get.called).to.equal(true)
|
||||
return done()
|
||||
})
|
||||
})
|
||||
|
||||
return it('should return a redis error', function (done) {
|
||||
return this.checkSocket(this.socket, error => {
|
||||
expect(error).to.exist
|
||||
expect(error.message).to.equal('Redis: something went wrong')
|
||||
return done()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('with a valid cookie and no matching session', function () {
|
||||
before(function () {
|
||||
return (this.socket = {
|
||||
handshake: { _signedCookies: { 'ol.sid': 'unknownId' } },
|
||||
})
|
||||
})
|
||||
|
||||
it('should query redis', function (done) {
|
||||
return this.checkSocket(this.socket, () => {
|
||||
expect(this.sessionStore.get.called).to.equal(true)
|
||||
return done()
|
||||
})
|
||||
})
|
||||
|
||||
return it('should return a lookup error', function (done) {
|
||||
return this.checkSocket(this.socket, error => {
|
||||
expect(error).to.exist
|
||||
expect(error.message).to.equal('could not look up session by key')
|
||||
return done()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('with a valid cookie and a matching session', function () {
|
||||
before(function () {
|
||||
return (this.socket = {
|
||||
handshake: { _signedCookies: { 'ol.sid': this.id1 } },
|
||||
})
|
||||
})
|
||||
|
||||
it('should query redis', function (done) {
|
||||
return this.checkSocket(this.socket, () => {
|
||||
expect(this.sessionStore.get.called).to.equal(true)
|
||||
return done()
|
||||
})
|
||||
})
|
||||
|
||||
it('should not return an error', function (done) {
|
||||
return this.checkSocket(this.socket, error => {
|
||||
expect(error).to.not.exist
|
||||
return done()
|
||||
})
|
||||
})
|
||||
|
||||
return it('should return the session', function (done) {
|
||||
return this.checkSocket(this.socket, (error, s, session) => {
|
||||
expect(session).to.deep.equal({ user: { _id: '123' } })
|
||||
return done()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
return describe('with a different valid cookie and matching session', function () {
|
||||
before(function () {
|
||||
return (this.socket = {
|
||||
handshake: { _signedCookies: { 'ol.sid': this.id2 } },
|
||||
})
|
||||
})
|
||||
|
||||
it('should query redis', function (done) {
|
||||
return this.checkSocket(this.socket, () => {
|
||||
expect(this.sessionStore.get.called).to.equal(true)
|
||||
return done()
|
||||
})
|
||||
})
|
||||
|
||||
it('should not return an error', function (done) {
|
||||
return this.checkSocket(this.socket, error => {
|
||||
expect(error).to.not.exist
|
||||
return done()
|
||||
})
|
||||
})
|
||||
|
||||
return it('should return the other session', function (done) {
|
||||
return this.checkSocket(this.socket, (error, s, session) => {
|
||||
expect(session).to.deep.equal({ user: { _id: 'abc' } })
|
||||
return done()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
208
services/real-time/test/unit/js/WebApiManagerTests.js
Normal file
208
services/real-time/test/unit/js/WebApiManagerTests.js
Normal file
|
@ -0,0 +1,208 @@
|
|||
/* eslint-disable
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const modulePath = '../../../app/js/WebApiManager.js'
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const { CodedError } = require('../../../app/js/Errors')
|
||||
|
||||
describe('WebApiManager', function () {
|
||||
beforeEach(function () {
|
||||
this.project_id = 'project-id-123'
|
||||
this.user_id = 'user-id-123'
|
||||
this.user = { _id: this.user_id }
|
||||
this.callback = sinon.stub()
|
||||
return (this.WebApiManager = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
request: (this.request = {}),
|
||||
'@overleaf/settings': (this.settings = {
|
||||
apis: {
|
||||
web: {
|
||||
url: 'http://web.example.com',
|
||||
user: 'username',
|
||||
pass: 'password',
|
||||
},
|
||||
},
|
||||
}),
|
||||
},
|
||||
}))
|
||||
})
|
||||
|
||||
return describe('joinProject', function () {
|
||||
describe('successfully', function () {
|
||||
beforeEach(function () {
|
||||
this.response = {
|
||||
project: { name: 'Test project' },
|
||||
privilegeLevel: 'owner',
|
||||
isRestrictedUser: true,
|
||||
}
|
||||
this.request.post = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null, { statusCode: 200 }, this.response)
|
||||
return this.WebApiManager.joinProject(
|
||||
this.project_id,
|
||||
this.user,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should send a request to web to join the project', function () {
|
||||
return this.request.post
|
||||
.calledWith({
|
||||
url: `${this.settings.apis.web.url}/project/${this.project_id}/join`,
|
||||
qs: {
|
||||
user_id: this.user_id,
|
||||
},
|
||||
auth: {
|
||||
user: this.settings.apis.web.user,
|
||||
pass: this.settings.apis.web.pass,
|
||||
sendImmediately: true,
|
||||
},
|
||||
json: true,
|
||||
jar: false,
|
||||
headers: {},
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should return the project, privilegeLevel, and restricted flag', function () {
|
||||
return this.callback
|
||||
.calledWith(
|
||||
null,
|
||||
this.response.project,
|
||||
this.response.privilegeLevel,
|
||||
this.response.isRestrictedUser
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when web replies with a 403', function () {
|
||||
beforeEach(function () {
|
||||
this.request.post = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null, { statusCode: 403 }, null)
|
||||
this.WebApiManager.joinProject(
|
||||
this.project_id,
|
||||
this.user_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should call the callback with an error', function () {
|
||||
this.callback
|
||||
.calledWith(
|
||||
sinon.match({
|
||||
message: 'not authorized',
|
||||
})
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when web replies with a 404', function () {
|
||||
beforeEach(function () {
|
||||
this.request.post = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null, { statusCode: 404 }, null)
|
||||
this.WebApiManager.joinProject(
|
||||
this.project_id,
|
||||
this.user_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should call the callback with an error', function () {
|
||||
this.callback
|
||||
.calledWith(
|
||||
sinon.match({
|
||||
message: 'project not found',
|
||||
info: { code: 'ProjectNotFound' },
|
||||
})
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with an error from web', function () {
|
||||
beforeEach(function () {
|
||||
this.request.post = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null, { statusCode: 500 }, null)
|
||||
return this.WebApiManager.joinProject(
|
||||
this.project_id,
|
||||
this.user_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
return it('should call the callback with an error', function () {
|
||||
return this.callback
|
||||
.calledWith(
|
||||
sinon.match({
|
||||
message: 'non-success status code from web',
|
||||
info: { statusCode: 500 },
|
||||
})
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with no data from web', function () {
|
||||
beforeEach(function () {
|
||||
this.request.post = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null, { statusCode: 200 }, null)
|
||||
return this.WebApiManager.joinProject(
|
||||
this.project_id,
|
||||
this.user_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
return it('should call the callback with an error', function () {
|
||||
return this.callback
|
||||
.calledWith(
|
||||
sinon.match({
|
||||
message: 'no data returned from joinProject request',
|
||||
})
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when the project is over its rate limit', function () {
|
||||
beforeEach(function () {
|
||||
this.request.post = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null, { statusCode: 429 }, null)
|
||||
return this.WebApiManager.joinProject(
|
||||
this.project_id,
|
||||
this.user_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
return it('should call the callback with a TooManyRequests error code', function () {
|
||||
return this.callback
|
||||
.calledWith(
|
||||
sinon.match({
|
||||
message: 'rate-limit hit when joining project',
|
||||
info: {
|
||||
code: 'TooManyRequests',
|
||||
},
|
||||
})
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
1662
services/real-time/test/unit/js/WebsocketControllerTests.js
Normal file
1662
services/real-time/test/unit/js/WebsocketControllerTests.js
Normal file
File diff suppressed because it is too large
Load diff
304
services/real-time/test/unit/js/WebsocketLoadBalancerTests.js
Normal file
304
services/real-time/test/unit/js/WebsocketLoadBalancerTests.js
Normal file
|
@ -0,0 +1,304 @@
|
|||
/* eslint-disable
|
||||
no-return-assign,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const sinon = require('sinon')
|
||||
const modulePath = require('path').join(
|
||||
__dirname,
|
||||
'../../../app/js/WebsocketLoadBalancer'
|
||||
)
|
||||
|
||||
describe('WebsocketLoadBalancer', function () {
|
||||
beforeEach(function () {
|
||||
this.rclient = {}
|
||||
this.RoomEvents = { on: sinon.stub() }
|
||||
this.WebsocketLoadBalancer = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'./RedisClientManager': {
|
||||
createClientList: () => [],
|
||||
},
|
||||
'./SafeJsonParse': (this.SafeJsonParse = {
|
||||
parse: (data, cb) => cb(null, JSON.parse(data)),
|
||||
}),
|
||||
'./EventLogger': { checkEventOrder: sinon.stub() },
|
||||
'./HealthCheckManager': { check: sinon.stub() },
|
||||
'./RoomManager': (this.RoomManager = {
|
||||
eventSource: sinon.stub().returns(this.RoomEvents),
|
||||
}),
|
||||
'./ChannelManager': (this.ChannelManager = { publish: sinon.stub() }),
|
||||
'./ConnectedUsersManager': (this.ConnectedUsersManager = {
|
||||
refreshClient: sinon.stub(),
|
||||
}),
|
||||
},
|
||||
})
|
||||
this.io = {}
|
||||
this.WebsocketLoadBalancer.rclientPubList = [{ publish: sinon.stub() }]
|
||||
this.WebsocketLoadBalancer.rclientSubList = [
|
||||
{
|
||||
subscribe: sinon.stub(),
|
||||
on: sinon.stub(),
|
||||
},
|
||||
]
|
||||
|
||||
this.room_id = 'room-id'
|
||||
this.message = 'otUpdateApplied'
|
||||
return (this.payload = ['argument one', 42])
|
||||
})
|
||||
|
||||
describe('emitToRoom', function () {
|
||||
beforeEach(function () {
|
||||
return this.WebsocketLoadBalancer.emitToRoom(
|
||||
this.room_id,
|
||||
this.message,
|
||||
...Array.from(this.payload)
|
||||
)
|
||||
})
|
||||
|
||||
return it('should publish the message to redis', function () {
|
||||
return this.ChannelManager.publish
|
||||
.calledWith(
|
||||
this.WebsocketLoadBalancer.rclientPubList[0],
|
||||
'editor-events',
|
||||
this.room_id,
|
||||
JSON.stringify({
|
||||
room_id: this.room_id,
|
||||
message: this.message,
|
||||
payload: this.payload,
|
||||
})
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('emitToAll', function () {
|
||||
beforeEach(function () {
|
||||
this.WebsocketLoadBalancer.emitToRoom = sinon.stub()
|
||||
return this.WebsocketLoadBalancer.emitToAll(
|
||||
this.message,
|
||||
...Array.from(this.payload)
|
||||
)
|
||||
})
|
||||
|
||||
return it("should emit to the room 'all'", function () {
|
||||
return this.WebsocketLoadBalancer.emitToRoom
|
||||
.calledWith('all', this.message, ...Array.from(this.payload))
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('listenForEditorEvents', function () {
|
||||
beforeEach(function () {
|
||||
this.WebsocketLoadBalancer._processEditorEvent = sinon.stub()
|
||||
return this.WebsocketLoadBalancer.listenForEditorEvents()
|
||||
})
|
||||
|
||||
it('should subscribe to the editor-events channel', function () {
|
||||
return this.WebsocketLoadBalancer.rclientSubList[0].subscribe
|
||||
.calledWith('editor-events')
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should process the events with _processEditorEvent', function () {
|
||||
return this.WebsocketLoadBalancer.rclientSubList[0].on
|
||||
.calledWith('message', sinon.match.func)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('_processEditorEvent', function () {
|
||||
describe('with bad JSON', function () {
|
||||
beforeEach(function () {
|
||||
this.isRestrictedUser = false
|
||||
this.SafeJsonParse.parse = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, new Error('oops'))
|
||||
return this.WebsocketLoadBalancer._processEditorEvent(
|
||||
this.io,
|
||||
'editor-events',
|
||||
'blah'
|
||||
)
|
||||
})
|
||||
|
||||
return it('should log an error', function () {
|
||||
return this.logger.error.called.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with a designated room', function () {
|
||||
beforeEach(function () {
|
||||
this.io.sockets = {
|
||||
clients: sinon.stub().returns([
|
||||
{
|
||||
id: 'client-id-1',
|
||||
emit: (this.emit1 = sinon.stub()),
|
||||
ol_context: {},
|
||||
},
|
||||
{
|
||||
id: 'client-id-2',
|
||||
emit: (this.emit2 = sinon.stub()),
|
||||
ol_context: {},
|
||||
},
|
||||
{
|
||||
id: 'client-id-1',
|
||||
emit: (this.emit3 = sinon.stub()),
|
||||
ol_context: {},
|
||||
}, // duplicate client
|
||||
]),
|
||||
}
|
||||
const data = JSON.stringify({
|
||||
room_id: this.room_id,
|
||||
message: this.message,
|
||||
payload: this.payload,
|
||||
})
|
||||
return this.WebsocketLoadBalancer._processEditorEvent(
|
||||
this.io,
|
||||
'editor-events',
|
||||
data
|
||||
)
|
||||
})
|
||||
|
||||
return it('should send the message to all (unique) clients in the room', function () {
|
||||
this.io.sockets.clients.calledWith(this.room_id).should.equal(true)
|
||||
this.emit1
|
||||
.calledWith(this.message, ...Array.from(this.payload))
|
||||
.should.equal(true)
|
||||
this.emit2
|
||||
.calledWith(this.message, ...Array.from(this.payload))
|
||||
.should.equal(true)
|
||||
return this.emit3.called.should.equal(false)
|
||||
})
|
||||
}) // duplicate client should be ignored
|
||||
|
||||
describe('with a designated room, and restricted clients, not restricted message', function () {
|
||||
beforeEach(function () {
|
||||
this.io.sockets = {
|
||||
clients: sinon.stub().returns([
|
||||
{
|
||||
id: 'client-id-1',
|
||||
emit: (this.emit1 = sinon.stub()),
|
||||
ol_context: {},
|
||||
},
|
||||
{
|
||||
id: 'client-id-2',
|
||||
emit: (this.emit2 = sinon.stub()),
|
||||
ol_context: {},
|
||||
},
|
||||
{
|
||||
id: 'client-id-1',
|
||||
emit: (this.emit3 = sinon.stub()),
|
||||
ol_context: {},
|
||||
}, // duplicate client
|
||||
{
|
||||
id: 'client-id-4',
|
||||
emit: (this.emit4 = sinon.stub()),
|
||||
ol_context: { is_restricted_user: true },
|
||||
},
|
||||
]),
|
||||
}
|
||||
const data = JSON.stringify({
|
||||
room_id: this.room_id,
|
||||
message: this.message,
|
||||
payload: this.payload,
|
||||
})
|
||||
return this.WebsocketLoadBalancer._processEditorEvent(
|
||||
this.io,
|
||||
'editor-events',
|
||||
data
|
||||
)
|
||||
})
|
||||
|
||||
return it('should send the message to all (unique) clients in the room', function () {
|
||||
this.io.sockets.clients.calledWith(this.room_id).should.equal(true)
|
||||
this.emit1
|
||||
.calledWith(this.message, ...Array.from(this.payload))
|
||||
.should.equal(true)
|
||||
this.emit2
|
||||
.calledWith(this.message, ...Array.from(this.payload))
|
||||
.should.equal(true)
|
||||
this.emit3.called.should.equal(false) // duplicate client should be ignored
|
||||
return this.emit4.called.should.equal(true)
|
||||
})
|
||||
}) // restricted client, but should be called
|
||||
|
||||
describe('with a designated room, and restricted clients, restricted message', function () {
|
||||
beforeEach(function () {
|
||||
this.io.sockets = {
|
||||
clients: sinon.stub().returns([
|
||||
{
|
||||
id: 'client-id-1',
|
||||
emit: (this.emit1 = sinon.stub()),
|
||||
ol_context: {},
|
||||
},
|
||||
{
|
||||
id: 'client-id-2',
|
||||
emit: (this.emit2 = sinon.stub()),
|
||||
ol_context: {},
|
||||
},
|
||||
{
|
||||
id: 'client-id-1',
|
||||
emit: (this.emit3 = sinon.stub()),
|
||||
ol_context: {},
|
||||
}, // duplicate client
|
||||
{
|
||||
id: 'client-id-4',
|
||||
emit: (this.emit4 = sinon.stub()),
|
||||
ol_context: { is_restricted_user: true },
|
||||
},
|
||||
]),
|
||||
}
|
||||
const data = JSON.stringify({
|
||||
room_id: this.room_id,
|
||||
message: (this.restrictedMessage = 'new-comment'),
|
||||
payload: this.payload,
|
||||
})
|
||||
return this.WebsocketLoadBalancer._processEditorEvent(
|
||||
this.io,
|
||||
'editor-events',
|
||||
data
|
||||
)
|
||||
})
|
||||
|
||||
return it('should send the message to all (unique) clients in the room, who are not restricted', function () {
|
||||
this.io.sockets.clients.calledWith(this.room_id).should.equal(true)
|
||||
this.emit1
|
||||
.calledWith(this.restrictedMessage, ...Array.from(this.payload))
|
||||
.should.equal(true)
|
||||
this.emit2
|
||||
.calledWith(this.restrictedMessage, ...Array.from(this.payload))
|
||||
.should.equal(true)
|
||||
this.emit3.called.should.equal(false) // duplicate client should be ignored
|
||||
return this.emit4.called.should.equal(false)
|
||||
})
|
||||
}) // restricted client, should not be called
|
||||
|
||||
return describe('when emitting to all', function () {
|
||||
beforeEach(function () {
|
||||
this.io.sockets = { emit: (this.emit = sinon.stub()) }
|
||||
const data = JSON.stringify({
|
||||
room_id: 'all',
|
||||
message: this.message,
|
||||
payload: this.payload,
|
||||
})
|
||||
return this.WebsocketLoadBalancer._processEditorEvent(
|
||||
this.io,
|
||||
'editor-events',
|
||||
data
|
||||
)
|
||||
})
|
||||
|
||||
return it('should send the message to all clients', function () {
|
||||
return this.emit
|
||||
.calledWith(this.message, ...Array.from(this.payload))
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
23
services/real-time/test/unit/js/helpers/MockClient.js
Normal file
23
services/real-time/test/unit/js/helpers/MockClient.js
Normal file
|
@ -0,0 +1,23 @@
|
|||
/* eslint-disable
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
let MockClient
|
||||
const sinon = require('sinon')
|
||||
|
||||
let idCounter = 0
|
||||
|
||||
module.exports = MockClient = class MockClient {
|
||||
constructor() {
|
||||
this.ol_context = {}
|
||||
this.join = sinon.stub()
|
||||
this.emit = sinon.stub()
|
||||
this.disconnect = sinon.stub()
|
||||
this.id = idCounter++
|
||||
this.publicId = idCounter++
|
||||
this.joinLeaveEpoch = 0
|
||||
}
|
||||
|
||||
disconnect() {}
|
||||
}
|
Loading…
Reference in a new issue