diff --git a/services/document-updater/.dockerignore b/services/document-updater/.dockerignore
new file mode 100644
index 0000000000..ba1c3442de
--- /dev/null
+++ b/services/document-updater/.dockerignore
@@ -0,0 +1,7 @@
+node_modules/*
+gitrev
+.git
+.gitignore
+.npm
+.nvmrc
+nodemon.json
diff --git a/services/document-updater/.eslintrc b/services/document-updater/.eslintrc
new file mode 100644
index 0000000000..a97661b15f
--- /dev/null
+++ b/services/document-updater/.eslintrc
@@ -0,0 +1,86 @@
+// this file was auto-generated, do not edit it directly.
+// instead run bin/update_build_scripts from
+// https://github.com/sharelatex/sharelatex-dev-environment
+{
+ "extends": [
+ "eslint:recommended",
+ "standard",
+ "prettier"
+ ],
+ "parserOptions": {
+ "ecmaVersion": 2018
+ },
+ "plugins": [
+ "mocha",
+ "chai-expect",
+ "chai-friendly"
+ ],
+ "env": {
+ "node": true,
+ "mocha": true
+ },
+ "rules": {
+ // TODO(das7pad): remove overrides after fixing all the violations manually (https://github.com/overleaf/issues/issues/3882#issuecomment-878999671)
+ // START of temporary overrides
+ "array-callback-return": "off",
+ "no-dupe-else-if": "off",
+ "no-var": "off",
+ "no-empty": "off",
+ "node/handle-callback-err": "off",
+ "no-loss-of-precision": "off",
+ "node/no-callback-literal": "off",
+ "node/no-path-concat": "off",
+ "prefer-regex-literals": "off",
+ // END of temporary overrides
+
+ // Swap the no-unused-expressions rule with a more chai-friendly one
+ "no-unused-expressions": 0,
+ "chai-friendly/no-unused-expressions": "error",
+
+ // Do not allow importing of implicit dependencies.
+ "import/no-extraneous-dependencies": "error"
+ },
+ "overrides": [
+ {
+ // Test specific rules
+ "files": ["test/**/*.js"],
+ "globals": {
+ "expect": true
+ },
+ "rules": {
+ // mocha-specific rules
+ "mocha/handle-done-callback": "error",
+ "mocha/no-exclusive-tests": "error",
+ "mocha/no-global-tests": "error",
+ "mocha/no-identical-title": "error",
+ "mocha/no-nested-tests": "error",
+ "mocha/no-pending-tests": "error",
+ "mocha/no-skipped-tests": "error",
+ "mocha/no-mocha-arrows": "error",
+
+ // chai-specific rules
+ "chai-expect/missing-assertion": "error",
+ "chai-expect/terminating-properties": "error",
+
+ // prefer-arrow-callback applies to all callbacks, not just ones in mocha tests.
+ // we don't enforce this at the top-level - just in tests to manage `this` scope
+ // based on mocha's context mechanism
+ "mocha/prefer-arrow-callback": "error"
+ }
+ },
+ {
+ // Backend specific rules
+ "files": ["app/**/*.js", "app.js", "index.js"],
+ "rules": {
+ // don't allow console.log in backend code
+ "no-console": "error",
+
+ // Do not allow importing of implicit dependencies.
+ "import/no-extraneous-dependencies": ["error", {
+ // Do not allow importing of devDependencies.
+ "devDependencies": false
+ }]
+ }
+ }
+ ]
+}
diff --git a/services/document-updater/.github/ISSUE_TEMPLATE.md b/services/document-updater/.github/ISSUE_TEMPLATE.md
new file mode 100644
index 0000000000..e0093aa90c
--- /dev/null
+++ b/services/document-updater/.github/ISSUE_TEMPLATE.md
@@ -0,0 +1,38 @@
+
+
+## Steps to Reproduce
+
+
+
+1.
+2.
+3.
+
+## Expected Behaviour
+
+
+## Observed Behaviour
+
+
+
+## Context
+
+
+## Technical Info
+
+
+* URL:
+* Browser Name and version:
+* Operating System and version (desktop or mobile):
+* Signed in as:
+* Project and/or file:
+
+## Analysis
+
+
+## Who Needs to Know?
+
+
+
+-
+-
diff --git a/services/document-updater/.github/PULL_REQUEST_TEMPLATE.md b/services/document-updater/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 0000000000..12bb2eeb3f
--- /dev/null
+++ b/services/document-updater/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,48 @@
+
+
+
+
+
+### Description
+
+
+
+#### Screenshots
+
+
+
+#### Related Issues / PRs
+
+
+
+### Review
+
+
+
+#### Potential Impact
+
+
+
+#### Manual Testing Performed
+
+- [ ]
+- [ ]
+
+#### Accessibility
+
+
+
+### Deployment
+
+
+
+#### Deployment Checklist
+
+- [ ] Update documentation not included in the PR (if any)
+- [ ]
+
+#### Metrics and Monitoring
+
+
+
+#### Who Needs to Know?
diff --git a/services/document-updater/.github/dependabot.yml b/services/document-updater/.github/dependabot.yml
new file mode 100644
index 0000000000..c856753655
--- /dev/null
+++ b/services/document-updater/.github/dependabot.yml
@@ -0,0 +1,23 @@
+version: 2
+updates:
+ - package-ecosystem: "npm"
+ directory: "/"
+ schedule:
+ interval: "daily"
+
+ pull-request-branch-name:
+ # Separate sections of the branch name with a hyphen
+ # Docker images use the branch name and do not support slashes in tags
+ # https://github.com/overleaf/google-ops/issues/822
+ # https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#pull-request-branch-nameseparator
+ separator: "-"
+
+ # Block informal upgrades -- security upgrades use a separate queue.
+ # https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#open-pull-requests-limit
+ open-pull-requests-limit: 0
+
+ # currently assign team-magma to all dependabot PRs - this may change in
+ # future if we reorganise teams
+ labels:
+ - "dependencies"
+ - "type:maintenance"
diff --git a/services/document-updater/.gitignore b/services/document-updater/.gitignore
new file mode 100644
index 0000000000..624e78f096
--- /dev/null
+++ b/services/document-updater/.gitignore
@@ -0,0 +1,52 @@
+compileFolder
+
+Compiled source #
+###################
+*.com
+*.class
+*.dll
+*.exe
+*.o
+*.so
+
+# Packages #
+############
+# it's better to unpack these files and commit the raw source
+# git has its own built in compression methods
+*.7z
+*.dmg
+*.gz
+*.iso
+*.jar
+*.rar
+*.tar
+*.zip
+
+# Logs and databases #
+######################
+*.log
+*.sql
+*.sqlite
+
+# OS generated files #
+######################
+.DS_Store?
+ehthumbs.db
+Icon?
+Thumbs.db
+
+/node_modules/*
+
+
+
+forever/
+
+**.swp
+
+# Redis cluster
+**/appendonly.aof
+**/dump.rdb
+**/nodes.conf
+
+# managed by dev-environment$ bin/update_build_scripts
+.npmrc
diff --git a/services/document-updater/.mocharc.json b/services/document-updater/.mocharc.json
new file mode 100644
index 0000000000..dc3280aa96
--- /dev/null
+++ b/services/document-updater/.mocharc.json
@@ -0,0 +1,3 @@
+{
+ "require": "test/setup.js"
+}
diff --git a/services/document-updater/.nvmrc b/services/document-updater/.nvmrc
new file mode 100644
index 0000000000..5a80a7e912
--- /dev/null
+++ b/services/document-updater/.nvmrc
@@ -0,0 +1 @@
+12.22.3
diff --git a/services/document-updater/.prettierrc b/services/document-updater/.prettierrc
new file mode 100644
index 0000000000..c92c3526e7
--- /dev/null
+++ b/services/document-updater/.prettierrc
@@ -0,0 +1,11 @@
+# This file was auto-generated, do not edit it directly.
+# Instead run bin/update_build_scripts from
+# https://github.com/sharelatex/sharelatex-dev-environment
+{
+ "arrowParens": "avoid",
+ "semi": false,
+ "singleQuote": true,
+ "trailingComma": "es5",
+ "tabWidth": 2,
+ "useTabs": false
+}
diff --git a/services/document-updater/Dockerfile b/services/document-updater/Dockerfile
new file mode 100644
index 0000000000..6b286376dc
--- /dev/null
+++ b/services/document-updater/Dockerfile
@@ -0,0 +1,23 @@
+# This file was auto-generated, do not edit it directly.
+# Instead run bin/update_build_scripts from
+# https://github.com/sharelatex/sharelatex-dev-environment
+
+FROM node:12.22.3 as base
+
+WORKDIR /app
+
+FROM base as app
+
+#wildcard as some files may not be in all repos
+COPY package*.json npm-shrink*.json /app/
+
+RUN npm ci --quiet
+
+COPY . /app
+
+FROM base
+
+COPY --from=app /app /app
+USER node
+
+CMD ["node", "--expose-gc", "app.js"]
diff --git a/services/document-updater/LICENSE b/services/document-updater/LICENSE
new file mode 100644
index 0000000000..ac8619dcb9
--- /dev/null
+++ b/services/document-updater/LICENSE
@@ -0,0 +1,662 @@
+
+ GNU AFFERO GENERAL PUBLIC LICENSE
+ Version 3, 19 November 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc.
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU Affero General Public License is a free, copyleft license for
+software and other kinds of works, specifically designed to ensure
+cooperation with the community in the case of network server software.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+our General Public Licenses are intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ Developers that use our General Public Licenses protect your rights
+with two steps: (1) assert copyright on the software, and (2) offer
+you this License which gives you legal permission to copy, distribute
+and/or modify the software.
+
+ A secondary benefit of defending all users' freedom is that
+improvements made in alternate versions of the program, if they
+receive widespread use, become available for other developers to
+incorporate. Many developers of free software are heartened and
+encouraged by the resulting cooperation. However, in the case of
+software used on network servers, this result may fail to come about.
+The GNU General Public License permits making a modified version and
+letting the public access it on a server without ever releasing its
+source code to the public.
+
+ The GNU Affero General Public License is designed specifically to
+ensure that, in such cases, the modified source code becomes available
+to the community. It requires the operator of a network server to
+provide the source code of the modified version running there to the
+users of that server. Therefore, public use of a modified version, on
+a publicly accessible server, gives the public access to the source
+code of the modified version.
+
+ An older license, called the Affero General Public License and
+published by Affero, was designed to accomplish similar goals. This is
+a different license, not a version of the Affero GPL, but Affero has
+released a new version of the Affero GPL which permits relicensing under
+this license.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU Affero General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Remote Network Interaction; Use with the GNU General Public License.
+
+ Notwithstanding any other provision of this License, if you modify the
+Program, your modified version must prominently offer all users
+interacting with it remotely through a computer network (if your version
+supports such interaction) an opportunity to receive the Corresponding
+Source of your version by providing access to the Corresponding Source
+from a network server at no charge, through some standard or customary
+means of facilitating copying of software. This Corresponding Source
+shall include the Corresponding Source for any work covered by version 3
+of the GNU General Public License that is incorporated pursuant to the
+following paragraph.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the work with which it is combined will remain governed by version
+3 of the GNU General Public License.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU Affero General Public License from time to time. Such new versions
+will be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU Affero General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU Affero General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU Affero General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+
+ Copyright (C)
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU Affero General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU Affero General Public License for more details.
+
+ You should have received a copy of the GNU Affero General Public License
+ along with this program. If not, see .
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If your software can interact with users remotely through a computer
+network, you should also make sure that it provides a way for users to
+get its source. For example, if your program is a web application, its
+interface could display a "Source" link that leads users to an archive
+of the code. There are many ways you could offer source, and different
+solutions will be better for different programs; see section 13 for the
+specific requirements.
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU AGPL, see
+.
diff --git a/services/document-updater/Makefile b/services/document-updater/Makefile
new file mode 100644
index 0000000000..7591d2a689
--- /dev/null
+++ b/services/document-updater/Makefile
@@ -0,0 +1,90 @@
+# This file was auto-generated, do not edit it directly.
+# Instead run bin/update_build_scripts from
+# https://github.com/sharelatex/sharelatex-dev-environment
+
+BUILD_NUMBER ?= local
+BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD)
+PROJECT_NAME = document-updater
+BUILD_DIR_NAME = $(shell pwd | xargs basename | tr -cd '[a-zA-Z0-9_.\-]')
+
+DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml
+DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \
+ BRANCH_NAME=$(BRANCH_NAME) \
+ PROJECT_NAME=$(PROJECT_NAME) \
+ MOCHA_GREP=${MOCHA_GREP} \
+ docker-compose ${DOCKER_COMPOSE_FLAGS}
+
+DOCKER_COMPOSE_TEST_ACCEPTANCE = \
+ COMPOSE_PROJECT_NAME=test_acceptance_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE)
+
+DOCKER_COMPOSE_TEST_UNIT = \
+ COMPOSE_PROJECT_NAME=test_unit_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE)
+
+clean:
+ -docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
+ -docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
+ -$(DOCKER_COMPOSE_TEST_UNIT) down --rmi local
+ -$(DOCKER_COMPOSE_TEST_ACCEPTANCE) down --rmi local
+
+format:
+ $(DOCKER_COMPOSE) run --rm test_unit npm run --silent format
+
+format_fix:
+ $(DOCKER_COMPOSE) run --rm test_unit npm run --silent format:fix
+
+lint:
+ $(DOCKER_COMPOSE) run --rm test_unit npm run --silent lint
+
+test: format lint test_unit test_acceptance
+
+test_unit:
+ifneq (,$(wildcard test/unit))
+ $(DOCKER_COMPOSE_TEST_UNIT) run --rm test_unit
+ $(MAKE) test_unit_clean
+endif
+
+test_clean: test_unit_clean
+test_unit_clean:
+ifneq (,$(wildcard test/unit))
+ $(DOCKER_COMPOSE_TEST_UNIT) down -v -t 0
+endif
+
+test_acceptance: test_acceptance_clean test_acceptance_pre_run test_acceptance_run
+ $(MAKE) test_acceptance_clean
+
+test_acceptance_debug: test_acceptance_clean test_acceptance_pre_run test_acceptance_run_debug
+ $(MAKE) test_acceptance_clean
+
+test_acceptance_run:
+ifneq (,$(wildcard test/acceptance))
+ $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance
+endif
+
+test_acceptance_run_debug:
+ifneq (,$(wildcard test/acceptance))
+ $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run -p 127.0.0.9:19999:19999 --rm test_acceptance npm run test:acceptance -- --inspect=0.0.0.0:19999 --inspect-brk
+endif
+
+test_clean: test_acceptance_clean
+test_acceptance_clean:
+ $(DOCKER_COMPOSE_TEST_ACCEPTANCE) down -v -t 0
+
+test_acceptance_pre_run:
+ifneq (,$(wildcard test/acceptance/js/scripts/pre-run))
+ $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance test/acceptance/js/scripts/pre-run
+endif
+
+build:
+ docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \
+ --tag gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \
+ .
+
+tar:
+ $(DOCKER_COMPOSE) up tar
+
+publish:
+
+ docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
+
+
+.PHONY: clean test test_unit test_acceptance test_clean build publish
diff --git a/services/document-updater/README.md b/services/document-updater/README.md
new file mode 100644
index 0000000000..2e6f0003a5
--- /dev/null
+++ b/services/document-updater/README.md
@@ -0,0 +1,12 @@
+overleaf/document-updater
+===========================
+
+An API for applying incoming updates to documents in real-time.
+
+License
+-------
+
+The code in this repository is released under the GNU AFFERO GENERAL PUBLIC LICENSE, version 3. A copy can be found in the `LICENSE` file.
+
+Copyright (c) Overleaf, 2014-2019.
+
diff --git a/services/document-updater/app.js b/services/document-updater/app.js
new file mode 100644
index 0000000000..61f254d7f2
--- /dev/null
+++ b/services/document-updater/app.js
@@ -0,0 +1,258 @@
+const Metrics = require('@overleaf/metrics')
+Metrics.initialize('doc-updater')
+
+const express = require('express')
+const Settings = require('@overleaf/settings')
+const logger = require('logger-sharelatex')
+logger.initialize('document-updater')
+
+logger.logger.addSerializers(require('./app/js/LoggerSerializers'))
+
+if (Settings.sentry != null && Settings.sentry.dsn != null) {
+ logger.initializeErrorReporting(Settings.sentry.dsn)
+}
+
+const RedisManager = require('./app/js/RedisManager')
+const DispatchManager = require('./app/js/DispatchManager')
+const DeleteQueueManager = require('./app/js/DeleteQueueManager')
+const Errors = require('./app/js/Errors')
+const HttpController = require('./app/js/HttpController')
+const mongodb = require('./app/js/mongodb')
+const async = require('async')
+
+const Path = require('path')
+const bodyParser = require('body-parser')
+
+Metrics.mongodb.monitor(
+ Path.resolve(__dirname, '/node_modules/mongodb'),
+ logger
+)
+Metrics.event_loop.monitor(logger, 100)
+
+const app = express()
+app.use(Metrics.http.monitor(logger))
+app.use(bodyParser.json({ limit: Settings.maxJsonRequestSize }))
+Metrics.injectMetricsRoute(app)
+
+DispatchManager.createAndStartDispatchers(Settings.dispatcherCount)
+
+app.param('project_id', (req, res, next, projectId) => {
+ if (projectId != null && projectId.match(/^[0-9a-f]{24}$/)) {
+ return next()
+ } else {
+ return next(new Error('invalid project id'))
+ }
+})
+
+app.param('doc_id', (req, res, next, docId) => {
+ if (docId != null && docId.match(/^[0-9a-f]{24}$/)) {
+ return next()
+ } else {
+ return next(new Error('invalid doc id'))
+ }
+})
+
+app.get('/project/:project_id/doc/:doc_id', HttpController.getDoc)
+app.get('/project/:project_id/doc/:doc_id/peek', HttpController.peekDoc)
+// temporarily keep the GET method for backwards compatibility
+app.get('/project/:project_id/doc', HttpController.getProjectDocsAndFlushIfOld)
+// will migrate to the POST method of get_and_flush_if_old instead
+app.post(
+ '/project/:project_id/get_and_flush_if_old',
+ HttpController.getProjectDocsAndFlushIfOld
+)
+app.post('/project/:project_id/clearState', HttpController.clearProjectState)
+app.post('/project/:project_id/doc/:doc_id', HttpController.setDoc)
+app.post(
+ '/project/:project_id/doc/:doc_id/flush',
+ HttpController.flushDocIfLoaded
+)
+app.delete('/project/:project_id/doc/:doc_id', HttpController.deleteDoc)
+app.delete('/project/:project_id', HttpController.deleteProject)
+app.delete('/project', HttpController.deleteMultipleProjects)
+app.post('/project/:project_id', HttpController.updateProject)
+app.post(
+ '/project/:project_id/history/resync',
+ HttpController.resyncProjectHistory
+)
+app.post('/project/:project_id/flush', HttpController.flushProject)
+app.post(
+ '/project/:project_id/doc/:doc_id/change/:change_id/accept',
+ HttpController.acceptChanges
+)
+app.post(
+ '/project/:project_id/doc/:doc_id/change/accept',
+ HttpController.acceptChanges
+)
+app.delete(
+ '/project/:project_id/doc/:doc_id/comment/:comment_id',
+ HttpController.deleteComment
+)
+
+app.get('/flush_all_projects', HttpController.flushAllProjects)
+app.get('/flush_queued_projects', HttpController.flushQueuedProjects)
+
+app.get('/total', (req, res, next) => {
+ const timer = new Metrics.Timer('http.allDocList')
+ RedisManager.getCountOfDocsInMemory((err, count) => {
+ if (err) {
+ return next(err)
+ }
+ timer.done()
+ res.send({ total: count })
+ })
+})
+
+app.get('/status', (req, res) => {
+ if (Settings.shuttingDown) {
+ return res.sendStatus(503) // Service unavailable
+ } else {
+ return res.send('document updater is alive')
+ }
+})
+
+const pubsubClient = require('@overleaf/redis-wrapper').createClient(
+ Settings.redis.pubsub
+)
+app.get('/health_check/redis', (req, res, next) => {
+ pubsubClient.healthCheck(error => {
+ if (error) {
+ logger.err({ err: error }, 'failed redis health check')
+ return res.sendStatus(500)
+ } else {
+ return res.sendStatus(200)
+ }
+ })
+})
+
+const docUpdaterRedisClient = require('@overleaf/redis-wrapper').createClient(
+ Settings.redis.documentupdater
+)
+app.get('/health_check/redis_cluster', (req, res, next) => {
+ docUpdaterRedisClient.healthCheck(error => {
+ if (error) {
+ logger.err({ err: error }, 'failed redis cluster health check')
+ return res.sendStatus(500)
+ } else {
+ return res.sendStatus(200)
+ }
+ })
+})
+
+app.get('/health_check', (req, res, next) => {
+ async.series(
+ [
+ cb => {
+ pubsubClient.healthCheck(error => {
+ if (error) {
+ logger.err({ err: error }, 'failed redis health check')
+ }
+ cb(error)
+ })
+ },
+ cb => {
+ docUpdaterRedisClient.healthCheck(error => {
+ if (error) {
+ logger.err({ err: error }, 'failed redis cluster health check')
+ }
+ cb(error)
+ })
+ },
+ cb => {
+ mongodb.healthCheck(error => {
+ if (error) {
+ logger.err({ err: error }, 'failed mongo health check')
+ }
+ cb(error)
+ })
+ },
+ ],
+ error => {
+ if (error) {
+ return res.sendStatus(500)
+ } else {
+ return res.sendStatus(200)
+ }
+ }
+ )
+})
+
+app.use((error, req, res, next) => {
+ if (error instanceof Errors.NotFoundError) {
+ return res.sendStatus(404)
+ } else if (error instanceof Errors.OpRangeNotAvailableError) {
+ return res.sendStatus(422) // Unprocessable Entity
+ } else if (error.statusCode === 413) {
+ return res.status(413).send('request entity too large')
+ } else {
+ logger.error({ err: error, req }, 'request errored')
+ return res.status(500).send('Oops, something went wrong')
+ }
+})
+
+const shutdownCleanly = signal => () => {
+ logger.log({ signal }, 'received interrupt, cleaning up')
+ Settings.shuttingDown = true
+ setTimeout(() => {
+ logger.log({ signal }, 'shutting down')
+ process.exit()
+ }, 10000)
+}
+
+const watchForEvent = eventName => {
+ docUpdaterRedisClient.on(eventName, e => {
+ console.log(`redis event: ${eventName} ${e}`) // eslint-disable-line no-console
+ })
+}
+
+const events = ['connect', 'ready', 'error', 'close', 'reconnecting', 'end']
+for (const eventName of events) {
+ watchForEvent(eventName)
+}
+
+const port =
+ Settings.internal.documentupdater.port ||
+ (Settings.api &&
+ Settings.api.documentupdater &&
+ Settings.api.documentupdater.port) ||
+ 3003
+const host = Settings.internal.documentupdater.host || 'localhost'
+
+if (!module.parent) {
+ // Called directly
+ mongodb
+ .waitForDb()
+ .then(() => {
+ app.listen(port, host, function (err) {
+ if (err) {
+ logger.fatal({ err }, `Cannot bind to ${host}:${port}. Exiting.`)
+ process.exit(1)
+ }
+ logger.info(
+ `Document-updater starting up, listening on ${host}:${port}`
+ )
+ if (Settings.continuousBackgroundFlush) {
+ logger.info('Starting continuous background flush')
+ DeleteQueueManager.startBackgroundFlush()
+ }
+ })
+ })
+ .catch(err => {
+ logger.fatal({ err }, 'Cannot connect to mongo. Exiting.')
+ process.exit(1)
+ })
+}
+
+module.exports = app
+
+for (const signal of [
+ 'SIGINT',
+ 'SIGHUP',
+ 'SIGQUIT',
+ 'SIGUSR1',
+ 'SIGUSR2',
+ 'SIGTERM',
+ 'SIGABRT',
+]) {
+ process.on(signal, shutdownCleanly(signal))
+}
diff --git a/services/document-updater/app/js/DeleteQueueManager.js b/services/document-updater/app/js/DeleteQueueManager.js
new file mode 100644
index 0000000000..492f8d3360
--- /dev/null
+++ b/services/document-updater/app/js/DeleteQueueManager.js
@@ -0,0 +1,143 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let DeleteQueueManager
+const Settings = require('@overleaf/settings')
+const RedisManager = require('./RedisManager')
+const ProjectManager = require('./ProjectManager')
+const logger = require('logger-sharelatex')
+const metrics = require('./Metrics')
+const async = require('async')
+
+// Maintain a sorted set of project flushAndDelete requests, ordered by timestamp
+// (ZADD), and process them from oldest to newest. A flushAndDelete request comes
+// from real-time and is triggered when a user leaves a project.
+//
+// The aim is to remove the project from redis 5 minutes after the last request
+// if there has been no activity (document updates) in that time. If there is
+// activity we can expect a further flushAndDelete request when the editing user
+// leaves the project.
+//
+// If a new flushAndDelete request comes in while an existing request is already
+// in the queue we update the timestamp as we can postpone flushing further.
+//
+// Documents are processed by checking the queue, seeing if the first entry is
+// older than 5 minutes, and popping it from the queue in that case.
+
+module.exports = DeleteQueueManager = {
+ flushAndDeleteOldProjects(options, callback) {
+ const startTime = Date.now()
+ const cutoffTime =
+ startTime - options.min_delete_age + 100 * (Math.random() - 0.5)
+ let count = 0
+
+ const flushProjectIfNotModified = (project_id, flushTimestamp, cb) =>
+ ProjectManager.getProjectDocsTimestamps(
+ project_id,
+ function (err, timestamps) {
+ if (err != null) {
+ return callback(err)
+ }
+ if (timestamps.length === 0) {
+ logger.log(
+ { project_id },
+ 'skipping flush of queued project - no timestamps'
+ )
+ return cb()
+ }
+ // are any of the timestamps newer than the time the project was flushed?
+ for (const timestamp of Array.from(timestamps)) {
+ if (timestamp > flushTimestamp) {
+ metrics.inc('queued-delete-skipped')
+ logger.debug(
+ { project_id, timestamps, flushTimestamp },
+ 'found newer timestamp, will skip delete'
+ )
+ return cb()
+ }
+ }
+ logger.log({ project_id, flushTimestamp }, 'flushing queued project')
+ return ProjectManager.flushAndDeleteProjectWithLocks(
+ project_id,
+ { skip_history_flush: false },
+ function (err) {
+ if (err != null) {
+ logger.err({ project_id, err }, 'error flushing queued project')
+ }
+ metrics.inc('queued-delete-completed')
+ return cb(null, true)
+ }
+ )
+ }
+ )
+
+ var flushNextProject = function () {
+ const now = Date.now()
+ if (now - startTime > options.timeout) {
+ logger.log('hit time limit on flushing old projects')
+ return callback(null, count)
+ }
+ if (count > options.limit) {
+ logger.log('hit count limit on flushing old projects')
+ return callback(null, count)
+ }
+ return RedisManager.getNextProjectToFlushAndDelete(
+ cutoffTime,
+ function (err, project_id, flushTimestamp, queueLength) {
+ if (err != null) {
+ return callback(err)
+ }
+ if (project_id == null) {
+ return callback(null, count)
+ }
+ logger.log({ project_id, queueLength }, 'flushing queued project')
+ metrics.globalGauge('queued-flush-backlog', queueLength)
+ return flushProjectIfNotModified(
+ project_id,
+ flushTimestamp,
+ function (err, flushed) {
+ if (flushed) {
+ count++
+ }
+ return flushNextProject()
+ }
+ )
+ }
+ )
+ }
+
+ return flushNextProject()
+ },
+
+ startBackgroundFlush() {
+ const SHORT_DELAY = 10
+ const LONG_DELAY = 1000
+ var doFlush = function () {
+ if (Settings.shuttingDown) {
+ logger.warn('discontinuing background flush due to shutdown')
+ return
+ }
+ return DeleteQueueManager.flushAndDeleteOldProjects(
+ {
+ timeout: 1000,
+ min_delete_age: 3 * 60 * 1000,
+ limit: 1000, // high value, to ensure we always flush enough projects
+ },
+ (err, flushed) =>
+ setTimeout(doFlush, flushed > 10 ? SHORT_DELAY : LONG_DELAY)
+ )
+ }
+ return doFlush()
+ },
+}
diff --git a/services/document-updater/app/js/DiffCodec.js b/services/document-updater/app/js/DiffCodec.js
new file mode 100644
index 0000000000..5c017f0d4e
--- /dev/null
+++ b/services/document-updater/app/js/DiffCodec.js
@@ -0,0 +1,40 @@
+const DMP = require('diff-match-patch')
+const dmp = new DMP()
+
+// Do not attempt to produce a diff for more than 100ms
+dmp.Diff_Timeout = 0.1
+
+module.exports = {
+ ADDED: 1,
+ REMOVED: -1,
+ UNCHANGED: 0,
+
+ diffAsShareJsOp(before, after, callback) {
+ const diffs = dmp.diff_main(before.join('\n'), after.join('\n'))
+ dmp.diff_cleanupSemantic(diffs)
+
+ const ops = []
+ let position = 0
+ for (const diff of diffs) {
+ const type = diff[0]
+ const content = diff[1]
+ if (type === this.ADDED) {
+ ops.push({
+ i: content,
+ p: position,
+ })
+ position += content.length
+ } else if (type === this.REMOVED) {
+ ops.push({
+ d: content,
+ p: position,
+ })
+ } else if (type === this.UNCHANGED) {
+ position += content.length
+ } else {
+ throw new Error('Unknown type')
+ }
+ }
+ callback(null, ops)
+ },
+}
diff --git a/services/document-updater/app/js/DispatchManager.js b/services/document-updater/app/js/DispatchManager.js
new file mode 100644
index 0000000000..d567fade2e
--- /dev/null
+++ b/services/document-updater/app/js/DispatchManager.js
@@ -0,0 +1,114 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS202: Simplify dynamic range loops
+ * DS205: Consider reworking code to avoid use of IIFEs
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let DispatchManager
+const Settings = require('@overleaf/settings')
+const logger = require('logger-sharelatex')
+const Keys = require('./UpdateKeys')
+const redis = require('@overleaf/redis-wrapper')
+const Errors = require('./Errors')
+const _ = require('lodash')
+
+const UpdateManager = require('./UpdateManager')
+const Metrics = require('./Metrics')
+const RateLimitManager = require('./RateLimitManager')
+
+module.exports = DispatchManager = {
+ createDispatcher(RateLimiter, queueShardNumber) {
+ let pendingListKey
+ if (queueShardNumber === 0) {
+ pendingListKey = 'pending-updates-list'
+ } else {
+ pendingListKey = `pending-updates-list-${queueShardNumber}`
+ }
+
+ const client = redis.createClient(Settings.redis.documentupdater)
+ var worker = {
+ client,
+ _waitForUpdateThenDispatchWorker(callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ const timer = new Metrics.Timer('worker.waiting')
+ return worker.client.blpop(pendingListKey, 0, function (error, result) {
+ logger.log(`getting ${queueShardNumber}`, error, result)
+ timer.done()
+ if (error != null) {
+ return callback(error)
+ }
+ if (result == null) {
+ return callback()
+ }
+ const [list_name, doc_key] = Array.from(result)
+ const [project_id, doc_id] = Array.from(
+ Keys.splitProjectIdAndDocId(doc_key)
+ )
+ // Dispatch this in the background
+ const backgroundTask = cb =>
+ UpdateManager.processOutstandingUpdatesWithLock(
+ project_id,
+ doc_id,
+ function (error) {
+ // log everything except OpRangeNotAvailable errors, these are normal
+ if (error != null) {
+ // downgrade OpRangeNotAvailable and "Delete component" errors so they are not sent to sentry
+ const logAsWarning =
+ error instanceof Errors.OpRangeNotAvailableError ||
+ error instanceof Errors.DeleteMismatchError
+ if (logAsWarning) {
+ logger.warn(
+ { err: error, project_id, doc_id },
+ 'error processing update'
+ )
+ } else {
+ logger.error(
+ { err: error, project_id, doc_id },
+ 'error processing update'
+ )
+ }
+ }
+ return cb()
+ }
+ )
+ return RateLimiter.run(backgroundTask, callback)
+ })
+ },
+
+ run() {
+ if (Settings.shuttingDown) {
+ return
+ }
+ return worker._waitForUpdateThenDispatchWorker(error => {
+ if (error != null) {
+ logger.error({ err: error }, 'Error in worker process')
+ throw error
+ } else {
+ return worker.run()
+ }
+ })
+ },
+ }
+
+ return worker
+ },
+
+ createAndStartDispatchers(number) {
+ const RateLimiter = new RateLimitManager(number)
+ _.times(number, function (shardNumber) {
+ return DispatchManager.createDispatcher(RateLimiter, shardNumber).run()
+ })
+ },
+}
diff --git a/services/document-updater/app/js/DocumentManager.js b/services/document-updater/app/js/DocumentManager.js
new file mode 100644
index 0000000000..a79d4aa187
--- /dev/null
+++ b/services/document-updater/app/js/DocumentManager.js
@@ -0,0 +1,808 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let DocumentManager
+const RedisManager = require('./RedisManager')
+const ProjectHistoryRedisManager = require('./ProjectHistoryRedisManager')
+const PersistenceManager = require('./PersistenceManager')
+const DiffCodec = require('./DiffCodec')
+const logger = require('logger-sharelatex')
+const Metrics = require('./Metrics')
+const HistoryManager = require('./HistoryManager')
+const RealTimeRedisManager = require('./RealTimeRedisManager')
+const Errors = require('./Errors')
+const RangesManager = require('./RangesManager')
+const async = require('async')
+
+const MAX_UNFLUSHED_AGE = 300 * 1000 // 5 mins, document should be flushed to mongo this time after a change
+
+module.exports = DocumentManager = {
+ getDoc(project_id, doc_id, _callback) {
+ if (_callback == null) {
+ _callback = function (
+ error,
+ lines,
+ version,
+ ranges,
+ pathname,
+ projectHistoryId,
+ unflushedTime,
+ alreadyLoaded
+ ) {}
+ }
+ const timer = new Metrics.Timer('docManager.getDoc')
+ const callback = function (...args) {
+ timer.done()
+ return _callback(...Array.from(args || []))
+ }
+
+ return RedisManager.getDoc(
+ project_id,
+ doc_id,
+ function (
+ error,
+ lines,
+ version,
+ ranges,
+ pathname,
+ projectHistoryId,
+ unflushedTime
+ ) {
+ if (error != null) {
+ return callback(error)
+ }
+ if (lines == null || version == null) {
+ logger.log(
+ { project_id, doc_id },
+ 'doc not in redis so getting from persistence API'
+ )
+ return PersistenceManager.getDoc(
+ project_id,
+ doc_id,
+ function (
+ error,
+ lines,
+ version,
+ ranges,
+ pathname,
+ projectHistoryId,
+ projectHistoryType
+ ) {
+ if (error != null) {
+ return callback(error)
+ }
+ logger.log(
+ {
+ project_id,
+ doc_id,
+ lines,
+ version,
+ pathname,
+ projectHistoryId,
+ projectHistoryType,
+ },
+ 'got doc from persistence API'
+ )
+ return RedisManager.putDocInMemory(
+ project_id,
+ doc_id,
+ lines,
+ version,
+ ranges,
+ pathname,
+ projectHistoryId,
+ function (error) {
+ if (error != null) {
+ return callback(error)
+ }
+ return RedisManager.setHistoryType(
+ doc_id,
+ projectHistoryType,
+ function (error) {
+ if (error != null) {
+ return callback(error)
+ }
+ return callback(
+ null,
+ lines,
+ version,
+ ranges || {},
+ pathname,
+ projectHistoryId,
+ null,
+ false
+ )
+ }
+ )
+ }
+ )
+ }
+ )
+ } else {
+ return callback(
+ null,
+ lines,
+ version,
+ ranges,
+ pathname,
+ projectHistoryId,
+ unflushedTime,
+ true
+ )
+ }
+ }
+ )
+ },
+
+ getDocAndRecentOps(project_id, doc_id, fromVersion, _callback) {
+ if (_callback == null) {
+ _callback = function (
+ error,
+ lines,
+ version,
+ ops,
+ ranges,
+ pathname,
+ projectHistoryId
+ ) {}
+ }
+ const timer = new Metrics.Timer('docManager.getDocAndRecentOps')
+ const callback = function (...args) {
+ timer.done()
+ return _callback(...Array.from(args || []))
+ }
+
+ return DocumentManager.getDoc(
+ project_id,
+ doc_id,
+ function (error, lines, version, ranges, pathname, projectHistoryId) {
+ if (error != null) {
+ return callback(error)
+ }
+ if (fromVersion === -1) {
+ return callback(
+ null,
+ lines,
+ version,
+ [],
+ ranges,
+ pathname,
+ projectHistoryId
+ )
+ } else {
+ return RedisManager.getPreviousDocOps(
+ doc_id,
+ fromVersion,
+ version,
+ function (error, ops) {
+ if (error != null) {
+ return callback(error)
+ }
+ return callback(
+ null,
+ lines,
+ version,
+ ops,
+ ranges,
+ pathname,
+ projectHistoryId
+ )
+ }
+ )
+ }
+ }
+ )
+ },
+
+ setDoc(project_id, doc_id, newLines, source, user_id, undoing, _callback) {
+ if (_callback == null) {
+ _callback = function (error) {}
+ }
+ const timer = new Metrics.Timer('docManager.setDoc')
+ const callback = function (...args) {
+ timer.done()
+ return _callback(...Array.from(args || []))
+ }
+
+ if (newLines == null) {
+ return callback(new Error('No lines were provided to setDoc'))
+ }
+
+ const UpdateManager = require('./UpdateManager')
+ return DocumentManager.getDoc(
+ project_id,
+ doc_id,
+ function (
+ error,
+ oldLines,
+ version,
+ ranges,
+ pathname,
+ projectHistoryId,
+ unflushedTime,
+ alreadyLoaded
+ ) {
+ if (error != null) {
+ return callback(error)
+ }
+
+ if (
+ oldLines != null &&
+ oldLines.length > 0 &&
+ oldLines[0].text != null
+ ) {
+ logger.log(
+ { doc_id, project_id, oldLines, newLines },
+ 'document is JSON so not updating'
+ )
+ return callback(null)
+ }
+
+ logger.log(
+ { doc_id, project_id, oldLines, newLines },
+ 'setting a document via http'
+ )
+ return DiffCodec.diffAsShareJsOp(
+ oldLines,
+ newLines,
+ function (error, op) {
+ if (error != null) {
+ return callback(error)
+ }
+ if (undoing) {
+ for (const o of Array.from(op || [])) {
+ o.u = true
+ } // Turn on undo flag for each op for track changes
+ }
+ const update = {
+ doc: doc_id,
+ op,
+ v: version,
+ meta: {
+ type: 'external',
+ source,
+ user_id,
+ },
+ }
+ return UpdateManager.applyUpdate(
+ project_id,
+ doc_id,
+ update,
+ function (error) {
+ if (error != null) {
+ return callback(error)
+ }
+ // If the document was loaded already, then someone has it open
+ // in a project, and the usual flushing mechanism will happen.
+ // Otherwise we should remove it immediately since nothing else
+ // is using it.
+ if (alreadyLoaded) {
+ return DocumentManager.flushDocIfLoaded(
+ project_id,
+ doc_id,
+ function (error) {
+ if (error != null) {
+ return callback(error)
+ }
+ return callback(null)
+ }
+ )
+ } else {
+ return DocumentManager.flushAndDeleteDoc(
+ project_id,
+ doc_id,
+ {},
+ function (error) {
+ // There is no harm in flushing project history if the previous
+ // call failed and sometimes it is required
+ HistoryManager.flushProjectChangesAsync(project_id)
+
+ if (error != null) {
+ return callback(error)
+ }
+ return callback(null)
+ }
+ )
+ }
+ }
+ )
+ }
+ )
+ }
+ )
+ },
+
+ flushDocIfLoaded(project_id, doc_id, _callback) {
+ if (_callback == null) {
+ _callback = function (error) {}
+ }
+ const timer = new Metrics.Timer('docManager.flushDocIfLoaded')
+ const callback = function (...args) {
+ timer.done()
+ return _callback(...Array.from(args || []))
+ }
+ return RedisManager.getDoc(
+ project_id,
+ doc_id,
+ function (
+ error,
+ lines,
+ version,
+ ranges,
+ pathname,
+ projectHistoryId,
+ unflushedTime,
+ lastUpdatedAt,
+ lastUpdatedBy
+ ) {
+ if (error != null) {
+ return callback(error)
+ }
+ if (lines == null || version == null) {
+ logger.log(
+ { project_id, doc_id },
+ 'doc is not loaded so not flushing'
+ )
+ return callback(null) // TODO: return a flag to bail out, as we go on to remove doc from memory?
+ } else {
+ logger.log({ project_id, doc_id, version }, 'flushing doc')
+ return PersistenceManager.setDoc(
+ project_id,
+ doc_id,
+ lines,
+ version,
+ ranges,
+ lastUpdatedAt,
+ lastUpdatedBy,
+ function (error) {
+ if (error != null) {
+ return callback(error)
+ }
+ return RedisManager.clearUnflushedTime(doc_id, callback)
+ }
+ )
+ }
+ }
+ )
+ },
+
+ flushAndDeleteDoc(project_id, doc_id, options, _callback) {
+ const timer = new Metrics.Timer('docManager.flushAndDeleteDoc')
+ const callback = function (...args) {
+ timer.done()
+ return _callback(...Array.from(args || []))
+ }
+
+ return DocumentManager.flushDocIfLoaded(
+ project_id,
+ doc_id,
+ function (error) {
+ if (error != null) {
+ if (options.ignoreFlushErrors) {
+ logger.warn(
+ { project_id, doc_id, err: error },
+ 'ignoring flush error while deleting document'
+ )
+ } else {
+ return callback(error)
+ }
+ }
+
+ // Flush in the background since it requires a http request
+ HistoryManager.flushDocChangesAsync(project_id, doc_id)
+
+ return RedisManager.removeDocFromMemory(
+ project_id,
+ doc_id,
+ function (error) {
+ if (error != null) {
+ return callback(error)
+ }
+ return callback(null)
+ }
+ )
+ }
+ )
+ },
+
+ acceptChanges(project_id, doc_id, change_ids, _callback) {
+ if (change_ids == null) {
+ change_ids = []
+ }
+ if (_callback == null) {
+ _callback = function (error) {}
+ }
+ const timer = new Metrics.Timer('docManager.acceptChanges')
+ const callback = function (...args) {
+ timer.done()
+ return _callback(...Array.from(args || []))
+ }
+
+ return DocumentManager.getDoc(
+ project_id,
+ doc_id,
+ function (error, lines, version, ranges) {
+ if (error != null) {
+ return callback(error)
+ }
+ if (lines == null || version == null) {
+ return callback(
+ new Errors.NotFoundError(`document not found: ${doc_id}`)
+ )
+ }
+ return RangesManager.acceptChanges(
+ change_ids,
+ ranges,
+ function (error, new_ranges) {
+ if (error != null) {
+ return callback(error)
+ }
+ return RedisManager.updateDocument(
+ project_id,
+ doc_id,
+ lines,
+ version,
+ [],
+ new_ranges,
+ {},
+ function (error) {
+ if (error != null) {
+ return callback(error)
+ }
+ return callback()
+ }
+ )
+ }
+ )
+ }
+ )
+ },
+
+ deleteComment(project_id, doc_id, comment_id, _callback) {
+ if (_callback == null) {
+ _callback = function (error) {}
+ }
+ const timer = new Metrics.Timer('docManager.deleteComment')
+ const callback = function (...args) {
+ timer.done()
+ return _callback(...Array.from(args || []))
+ }
+
+ return DocumentManager.getDoc(
+ project_id,
+ doc_id,
+ function (error, lines, version, ranges) {
+ if (error != null) {
+ return callback(error)
+ }
+ if (lines == null || version == null) {
+ return callback(
+ new Errors.NotFoundError(`document not found: ${doc_id}`)
+ )
+ }
+ return RangesManager.deleteComment(
+ comment_id,
+ ranges,
+ function (error, new_ranges) {
+ if (error != null) {
+ return callback(error)
+ }
+ return RedisManager.updateDocument(
+ project_id,
+ doc_id,
+ lines,
+ version,
+ [],
+ new_ranges,
+ {},
+ function (error) {
+ if (error != null) {
+ return callback(error)
+ }
+ return callback()
+ }
+ )
+ }
+ )
+ }
+ )
+ },
+
+ renameDoc(project_id, doc_id, user_id, update, projectHistoryId, _callback) {
+ if (_callback == null) {
+ _callback = function (error) {}
+ }
+ const timer = new Metrics.Timer('docManager.updateProject')
+ const callback = function (...args) {
+ timer.done()
+ return _callback(...Array.from(args || []))
+ }
+
+ return RedisManager.renameDoc(
+ project_id,
+ doc_id,
+ user_id,
+ update,
+ projectHistoryId,
+ callback
+ )
+ },
+
+ getDocAndFlushIfOld(project_id, doc_id, callback) {
+ if (callback == null) {
+ callback = function (error, doc) {}
+ }
+ return DocumentManager.getDoc(
+ project_id,
+ doc_id,
+ function (
+ error,
+ lines,
+ version,
+ ranges,
+ pathname,
+ projectHistoryId,
+ unflushedTime,
+ alreadyLoaded
+ ) {
+ if (error != null) {
+ return callback(error)
+ }
+ // if doc was already loaded see if it needs to be flushed
+ if (
+ alreadyLoaded &&
+ unflushedTime != null &&
+ Date.now() - unflushedTime > MAX_UNFLUSHED_AGE
+ ) {
+ return DocumentManager.flushDocIfLoaded(
+ project_id,
+ doc_id,
+ function (error) {
+ if (error != null) {
+ return callback(error)
+ }
+ return callback(null, lines, version)
+ }
+ )
+ } else {
+ return callback(null, lines, version)
+ }
+ }
+ )
+ },
+
+ resyncDocContents(project_id, doc_id, callback) {
+ logger.log({ project_id, doc_id }, 'start resyncing doc contents')
+ return RedisManager.getDoc(
+ project_id,
+ doc_id,
+ function (error, lines, version, ranges, pathname, projectHistoryId) {
+ if (error != null) {
+ return callback(error)
+ }
+
+ if (lines == null || version == null) {
+ logger.log(
+ { project_id, doc_id },
+ 'resyncing doc contents - not found in redis - retrieving from web'
+ )
+ return PersistenceManager.getDoc(
+ project_id,
+ doc_id,
+ function (
+ error,
+ lines,
+ version,
+ ranges,
+ pathname,
+ projectHistoryId
+ ) {
+ if (error != null) {
+ logger.error(
+ { project_id, doc_id, getDocError: error },
+ 'resyncing doc contents - error retrieving from web'
+ )
+ return callback(error)
+ }
+ return ProjectHistoryRedisManager.queueResyncDocContent(
+ project_id,
+ projectHistoryId,
+ doc_id,
+ lines,
+ version,
+ pathname,
+ callback
+ )
+ }
+ )
+ } else {
+ logger.log(
+ { project_id, doc_id },
+ 'resyncing doc contents - doc in redis - will queue in redis'
+ )
+ return ProjectHistoryRedisManager.queueResyncDocContent(
+ project_id,
+ projectHistoryId,
+ doc_id,
+ lines,
+ version,
+ pathname,
+ callback
+ )
+ }
+ }
+ )
+ },
+
+ getDocWithLock(project_id, doc_id, callback) {
+ if (callback == null) {
+ callback = function (error, lines, version) {}
+ }
+ const UpdateManager = require('./UpdateManager')
+ return UpdateManager.lockUpdatesAndDo(
+ DocumentManager.getDoc,
+ project_id,
+ doc_id,
+ callback
+ )
+ },
+
+ getDocAndRecentOpsWithLock(project_id, doc_id, fromVersion, callback) {
+ if (callback == null) {
+ callback = function (
+ error,
+ lines,
+ version,
+ ops,
+ ranges,
+ pathname,
+ projectHistoryId
+ ) {}
+ }
+ const UpdateManager = require('./UpdateManager')
+ return UpdateManager.lockUpdatesAndDo(
+ DocumentManager.getDocAndRecentOps,
+ project_id,
+ doc_id,
+ fromVersion,
+ callback
+ )
+ },
+
+ getDocAndFlushIfOldWithLock(project_id, doc_id, callback) {
+ if (callback == null) {
+ callback = function (error, doc) {}
+ }
+ const UpdateManager = require('./UpdateManager')
+ return UpdateManager.lockUpdatesAndDo(
+ DocumentManager.getDocAndFlushIfOld,
+ project_id,
+ doc_id,
+ callback
+ )
+ },
+
+ setDocWithLock(
+ project_id,
+ doc_id,
+ lines,
+ source,
+ user_id,
+ undoing,
+ callback
+ ) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ const UpdateManager = require('./UpdateManager')
+ return UpdateManager.lockUpdatesAndDo(
+ DocumentManager.setDoc,
+ project_id,
+ doc_id,
+ lines,
+ source,
+ user_id,
+ undoing,
+ callback
+ )
+ },
+
+ flushDocIfLoadedWithLock(project_id, doc_id, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ const UpdateManager = require('./UpdateManager')
+ return UpdateManager.lockUpdatesAndDo(
+ DocumentManager.flushDocIfLoaded,
+ project_id,
+ doc_id,
+ callback
+ )
+ },
+
+ flushAndDeleteDocWithLock(project_id, doc_id, options, callback) {
+ const UpdateManager = require('./UpdateManager')
+ return UpdateManager.lockUpdatesAndDo(
+ DocumentManager.flushAndDeleteDoc,
+ project_id,
+ doc_id,
+ options,
+ callback
+ )
+ },
+
+ acceptChangesWithLock(project_id, doc_id, change_ids, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ const UpdateManager = require('./UpdateManager')
+ return UpdateManager.lockUpdatesAndDo(
+ DocumentManager.acceptChanges,
+ project_id,
+ doc_id,
+ change_ids,
+ callback
+ )
+ },
+
+ deleteCommentWithLock(project_id, doc_id, thread_id, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ const UpdateManager = require('./UpdateManager')
+ return UpdateManager.lockUpdatesAndDo(
+ DocumentManager.deleteComment,
+ project_id,
+ doc_id,
+ thread_id,
+ callback
+ )
+ },
+
+ renameDocWithLock(
+ project_id,
+ doc_id,
+ user_id,
+ update,
+ projectHistoryId,
+ callback
+ ) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ const UpdateManager = require('./UpdateManager')
+ return UpdateManager.lockUpdatesAndDo(
+ DocumentManager.renameDoc,
+ project_id,
+ doc_id,
+ user_id,
+ update,
+ projectHistoryId,
+ callback
+ )
+ },
+
+ resyncDocContentsWithLock(project_id, doc_id, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ const UpdateManager = require('./UpdateManager')
+ return UpdateManager.lockUpdatesAndDo(
+ DocumentManager.resyncDocContents,
+ project_id,
+ doc_id,
+ callback
+ )
+ },
+}
diff --git a/services/document-updater/app/js/Errors.js b/services/document-updater/app/js/Errors.js
new file mode 100644
index 0000000000..78afd925bf
--- /dev/null
+++ b/services/document-updater/app/js/Errors.js
@@ -0,0 +1,45 @@
+/* eslint-disable
+ no-proto,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+let Errors
+var NotFoundError = function (message) {
+ const error = new Error(message)
+ error.name = 'NotFoundError'
+ error.__proto__ = NotFoundError.prototype
+ return error
+}
+NotFoundError.prototype.__proto__ = Error.prototype
+
+var OpRangeNotAvailableError = function (message) {
+ const error = new Error(message)
+ error.name = 'OpRangeNotAvailableError'
+ error.__proto__ = OpRangeNotAvailableError.prototype
+ return error
+}
+OpRangeNotAvailableError.prototype.__proto__ = Error.prototype
+
+var ProjectStateChangedError = function (message) {
+ const error = new Error(message)
+ error.name = 'ProjectStateChangedError'
+ error.__proto__ = ProjectStateChangedError.prototype
+ return error
+}
+ProjectStateChangedError.prototype.__proto__ = Error.prototype
+
+var DeleteMismatchError = function (message) {
+ const error = new Error(message)
+ error.name = 'DeleteMismatchError'
+ error.__proto__ = DeleteMismatchError.prototype
+ return error
+}
+DeleteMismatchError.prototype.__proto__ = Error.prototype
+
+module.exports = Errors = {
+ NotFoundError,
+ OpRangeNotAvailableError,
+ ProjectStateChangedError,
+ DeleteMismatchError,
+}
diff --git a/services/document-updater/app/js/HistoryManager.js b/services/document-updater/app/js/HistoryManager.js
new file mode 100644
index 0000000000..107e81d979
--- /dev/null
+++ b/services/document-updater/app/js/HistoryManager.js
@@ -0,0 +1,263 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS103: Rewrite code to no longer use __guard__
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let HistoryManager
+const async = require('async')
+const logger = require('logger-sharelatex')
+const request = require('request')
+const Settings = require('@overleaf/settings')
+const HistoryRedisManager = require('./HistoryRedisManager')
+const ProjectHistoryRedisManager = require('./ProjectHistoryRedisManager')
+const RedisManager = require('./RedisManager')
+const metrics = require('./Metrics')
+
+module.exports = HistoryManager = {
+ flushDocChangesAsync(project_id, doc_id) {
+ if (
+ (Settings.apis != null ? Settings.apis.trackchanges : undefined) == null
+ ) {
+ logger.warn(
+ { doc_id },
+ 'track changes API is not configured, so not flushing'
+ )
+ return
+ }
+ return RedisManager.getHistoryType(
+ doc_id,
+ function (err, projectHistoryType) {
+ if (err != null) {
+ logger.warn({ err, doc_id }, 'error getting history type')
+ }
+ // if there's an error continue and flush to track-changes for safety
+ if (
+ Settings.disableDoubleFlush &&
+ projectHistoryType === 'project-history'
+ ) {
+ return logger.debug(
+ { doc_id, projectHistoryType },
+ 'skipping track-changes flush'
+ )
+ } else {
+ metrics.inc('history-flush', 1, { status: 'track-changes' })
+ const url = `${Settings.apis.trackchanges.url}/project/${project_id}/doc/${doc_id}/flush`
+ logger.log(
+ { project_id, doc_id, url, projectHistoryType },
+ 'flushing doc in track changes api'
+ )
+ return request.post(url, function (error, res, body) {
+ if (error != null) {
+ return logger.error(
+ { error, doc_id, project_id },
+ 'track changes doc to track changes api'
+ )
+ } else if (res.statusCode < 200 && res.statusCode >= 300) {
+ return logger.error(
+ { doc_id, project_id },
+ `track changes api returned a failure status code: ${res.statusCode}`
+ )
+ }
+ })
+ }
+ }
+ )
+ },
+
+ // flush changes in the background
+ flushProjectChangesAsync(project_id) {
+ if (
+ !__guard__(
+ Settings.apis != null ? Settings.apis.project_history : undefined,
+ x => x.enabled
+ )
+ ) {
+ return
+ }
+ return HistoryManager.flushProjectChanges(
+ project_id,
+ { background: true },
+ function () {}
+ )
+ },
+
+ // flush changes and callback (for when we need to know the queue is flushed)
+ flushProjectChanges(project_id, options, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ if (
+ !__guard__(
+ Settings.apis != null ? Settings.apis.project_history : undefined,
+ x => x.enabled
+ )
+ ) {
+ return callback()
+ }
+ if (options.skip_history_flush) {
+ logger.log({ project_id }, 'skipping flush of project history')
+ return callback()
+ }
+ metrics.inc('history-flush', 1, { status: 'project-history' })
+ const url = `${Settings.apis.project_history.url}/project/${project_id}/flush`
+ const qs = {}
+ if (options.background) {
+ qs.background = true
+ } // pass on the background flush option if present
+ logger.log({ project_id, url, qs }, 'flushing doc in project history api')
+ return request.post({ url, qs }, function (error, res, body) {
+ if (error != null) {
+ logger.error(
+ { error, project_id },
+ 'project history doc to track changes api'
+ )
+ return callback(error)
+ } else if (res.statusCode < 200 && res.statusCode >= 300) {
+ logger.error(
+ { project_id },
+ `project history api returned a failure status code: ${res.statusCode}`
+ )
+ return callback(error)
+ } else {
+ return callback()
+ }
+ })
+ },
+
+ FLUSH_DOC_EVERY_N_OPS: 100,
+ FLUSH_PROJECT_EVERY_N_OPS: 500,
+
+ recordAndFlushHistoryOps(
+ project_id,
+ doc_id,
+ ops,
+ doc_ops_length,
+ project_ops_length,
+ callback
+ ) {
+ if (ops == null) {
+ ops = []
+ }
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ if (ops.length === 0) {
+ return callback()
+ }
+
+ // record updates for project history
+ if (
+ __guard__(
+ Settings.apis != null ? Settings.apis.project_history : undefined,
+ x => x.enabled
+ )
+ ) {
+ if (
+ HistoryManager.shouldFlushHistoryOps(
+ project_ops_length,
+ ops.length,
+ HistoryManager.FLUSH_PROJECT_EVERY_N_OPS
+ )
+ ) {
+ // Do this in the background since it uses HTTP and so may be too
+ // slow to wait for when processing a doc update.
+ logger.log(
+ { project_ops_length, project_id },
+ 'flushing project history api'
+ )
+ HistoryManager.flushProjectChangesAsync(project_id)
+ }
+ }
+
+ // if the doc_ops_length is undefined it means the project is not using track-changes
+ // so we can bail out here
+ if (typeof doc_ops_length === 'undefined') {
+ logger.debug(
+ { project_id, doc_id },
+ 'skipping flush to track-changes, only using project-history'
+ )
+ return callback()
+ }
+
+ // record updates for track-changes
+ return HistoryRedisManager.recordDocHasHistoryOps(
+ project_id,
+ doc_id,
+ ops,
+ function (error) {
+ if (error != null) {
+ return callback(error)
+ }
+ if (
+ HistoryManager.shouldFlushHistoryOps(
+ doc_ops_length,
+ ops.length,
+ HistoryManager.FLUSH_DOC_EVERY_N_OPS
+ )
+ ) {
+ // Do this in the background since it uses HTTP and so may be too
+ // slow to wait for when processing a doc update.
+ logger.log(
+ { doc_ops_length, doc_id, project_id },
+ 'flushing track changes api'
+ )
+ HistoryManager.flushDocChangesAsync(project_id, doc_id)
+ }
+ return callback()
+ }
+ )
+ },
+
+ shouldFlushHistoryOps(length, ops_length, threshold) {
+ if (!length) {
+ return false
+ } // don't flush unless we know the length
+ // We want to flush every 100 ops, i.e. 100, 200, 300, etc
+ // Find out which 'block' (i.e. 0-99, 100-199) we were in before and after pushing these
+ // ops. If we've changed, then we've gone over a multiple of 100 and should flush.
+ // (Most of the time, we will only hit 100 and then flushing will put us back to 0)
+ const previousLength = length - ops_length
+ const prevBlock = Math.floor(previousLength / threshold)
+ const newBlock = Math.floor(length / threshold)
+ return newBlock !== prevBlock
+ },
+
+ MAX_PARALLEL_REQUESTS: 4,
+
+ resyncProjectHistory(project_id, projectHistoryId, docs, files, callback) {
+ return ProjectHistoryRedisManager.queueResyncProjectStructure(
+ project_id,
+ projectHistoryId,
+ docs,
+ files,
+ function (error) {
+ if (error != null) {
+ return callback(error)
+ }
+ const DocumentManager = require('./DocumentManager')
+ const resyncDoc = (doc, cb) =>
+ DocumentManager.resyncDocContentsWithLock(project_id, doc.doc, cb)
+ return async.eachLimit(
+ docs,
+ HistoryManager.MAX_PARALLEL_REQUESTS,
+ resyncDoc,
+ callback
+ )
+ }
+ )
+ },
+}
+
+function __guard__(value, transform) {
+ return typeof value !== 'undefined' && value !== null
+ ? transform(value)
+ : undefined
+}
diff --git a/services/document-updater/app/js/HistoryRedisManager.js b/services/document-updater/app/js/HistoryRedisManager.js
new file mode 100644
index 0000000000..1979b89013
--- /dev/null
+++ b/services/document-updater/app/js/HistoryRedisManager.js
@@ -0,0 +1,45 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let HistoryRedisManager
+const Settings = require('@overleaf/settings')
+const rclient = require('@overleaf/redis-wrapper').createClient(
+ Settings.redis.history
+)
+const Keys = Settings.redis.history.key_schema
+const logger = require('logger-sharelatex')
+
+module.exports = HistoryRedisManager = {
+ recordDocHasHistoryOps(project_id, doc_id, ops, callback) {
+ if (ops == null) {
+ ops = []
+ }
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ if (ops.length === 0) {
+ return callback(new Error('cannot push no ops')) // This should never be called with no ops, but protect against a redis error if we sent an empty array to rpush
+ }
+ logger.log({ project_id, doc_id }, 'marking doc in project for history ops')
+ return rclient.sadd(
+ Keys.docsWithHistoryOps({ project_id }),
+ doc_id,
+ function (error) {
+ if (error != null) {
+ return callback(error)
+ }
+ return callback()
+ }
+ )
+ },
+}
diff --git a/services/document-updater/app/js/HttpController.js b/services/document-updater/app/js/HttpController.js
new file mode 100644
index 0000000000..4ea7a00d4c
--- /dev/null
+++ b/services/document-updater/app/js/HttpController.js
@@ -0,0 +1,407 @@
+const DocumentManager = require('./DocumentManager')
+const HistoryManager = require('./HistoryManager')
+const ProjectManager = require('./ProjectManager')
+const RedisManager = require('./RedisManager')
+const Errors = require('./Errors')
+const logger = require('logger-sharelatex')
+const Settings = require('@overleaf/settings')
+const Metrics = require('./Metrics')
+const ProjectFlusher = require('./ProjectFlusher')
+const DeleteQueueManager = require('./DeleteQueueManager')
+const async = require('async')
+
+module.exports = {
+ getDoc,
+ peekDoc,
+ getProjectDocsAndFlushIfOld,
+ clearProjectState,
+ setDoc,
+ flushDocIfLoaded,
+ deleteDoc,
+ flushProject,
+ deleteProject,
+ deleteMultipleProjects,
+ acceptChanges,
+ deleteComment,
+ updateProject,
+ resyncProjectHistory,
+ flushAllProjects,
+ flushQueuedProjects,
+}
+
+function getDoc(req, res, next) {
+ let fromVersion
+ const docId = req.params.doc_id
+ const projectId = req.params.project_id
+ logger.log({ projectId, docId }, 'getting doc via http')
+ const timer = new Metrics.Timer('http.getDoc')
+
+ if (req.query.fromVersion != null) {
+ fromVersion = parseInt(req.query.fromVersion, 10)
+ } else {
+ fromVersion = -1
+ }
+
+ DocumentManager.getDocAndRecentOpsWithLock(
+ projectId,
+ docId,
+ fromVersion,
+ (error, lines, version, ops, ranges, pathname) => {
+ timer.done()
+ if (error) {
+ return next(error)
+ }
+ logger.log({ projectId, docId }, 'got doc via http')
+ if (lines == null || version == null) {
+ return next(new Errors.NotFoundError('document not found'))
+ }
+ res.json({
+ id: docId,
+ lines,
+ version,
+ ops,
+ ranges,
+ pathname,
+ })
+ }
+ )
+}
+
+// return the doc from redis if present, but don't load it from mongo
+function peekDoc(req, res, next) {
+ const docId = req.params.doc_id
+ const projectId = req.params.project_id
+ logger.log({ projectId, docId }, 'peeking at doc via http')
+ RedisManager.getDoc(projectId, docId, function (error, lines, version) {
+ if (error) {
+ return next(error)
+ }
+ if (lines == null || version == null) {
+ return next(new Errors.NotFoundError('document not found'))
+ }
+ res.json({ id: docId, lines, version })
+ })
+}
+
+function _getTotalSizeOfLines(lines) {
+ let size = 0
+ for (const line of lines) {
+ size += line.length + 1
+ }
+ return size
+}
+
+function getProjectDocsAndFlushIfOld(req, res, next) {
+ const projectId = req.params.project_id
+ const projectStateHash = req.query.state
+ // exclude is string of existing docs "id:version,id:version,..."
+ const excludeItems =
+ req.query.exclude != null ? req.query.exclude.split(',') : []
+ logger.log({ projectId, exclude: excludeItems }, 'getting docs via http')
+ const timer = new Metrics.Timer('http.getAllDocs')
+ const excludeVersions = {}
+ for (const item of excludeItems) {
+ const [id, version] = item.split(':')
+ excludeVersions[id] = version
+ }
+ logger.log(
+ { projectId, projectStateHash, excludeVersions },
+ 'excluding versions'
+ )
+ ProjectManager.getProjectDocsAndFlushIfOld(
+ projectId,
+ projectStateHash,
+ excludeVersions,
+ (error, result) => {
+ timer.done()
+ if (error instanceof Errors.ProjectStateChangedError) {
+ res.sendStatus(409) // conflict
+ } else if (error) {
+ next(error)
+ } else {
+ logger.log(
+ {
+ projectId,
+ result: result.map(doc => `${doc._id}:${doc.v}`),
+ },
+ 'got docs via http'
+ )
+ res.send(result)
+ }
+ }
+ )
+}
+
+function clearProjectState(req, res, next) {
+ const projectId = req.params.project_id
+ const timer = new Metrics.Timer('http.clearProjectState')
+ logger.log({ projectId }, 'clearing project state via http')
+ ProjectManager.clearProjectState(projectId, error => {
+ timer.done()
+ if (error) {
+ next(error)
+ } else {
+ res.sendStatus(200)
+ }
+ })
+}
+
+function setDoc(req, res, next) {
+ const docId = req.params.doc_id
+ const projectId = req.params.project_id
+ const { lines, source, user_id: userId, undoing } = req.body
+ const lineSize = _getTotalSizeOfLines(lines)
+ if (lineSize > Settings.max_doc_length) {
+ logger.log(
+ { projectId, docId, source, lineSize, userId },
+ 'document too large, returning 406 response'
+ )
+ return res.sendStatus(406)
+ }
+ logger.log(
+ { projectId, docId, lines, source, userId, undoing },
+ 'setting doc via http'
+ )
+ const timer = new Metrics.Timer('http.setDoc')
+ DocumentManager.setDocWithLock(
+ projectId,
+ docId,
+ lines,
+ source,
+ userId,
+ undoing,
+ error => {
+ timer.done()
+ if (error) {
+ return next(error)
+ }
+ logger.log({ projectId, docId }, 'set doc via http')
+ res.sendStatus(204) // No Content
+ }
+ )
+}
+
+function flushDocIfLoaded(req, res, next) {
+ const docId = req.params.doc_id
+ const projectId = req.params.project_id
+ logger.log({ projectId, docId }, 'flushing doc via http')
+ const timer = new Metrics.Timer('http.flushDoc')
+ DocumentManager.flushDocIfLoadedWithLock(projectId, docId, error => {
+ timer.done()
+ if (error) {
+ return next(error)
+ }
+ logger.log({ projectId, docId }, 'flushed doc via http')
+ res.sendStatus(204) // No Content
+ })
+}
+
+function deleteDoc(req, res, next) {
+ const docId = req.params.doc_id
+ const projectId = req.params.project_id
+ const ignoreFlushErrors = req.query.ignore_flush_errors === 'true'
+ const timer = new Metrics.Timer('http.deleteDoc')
+ logger.log({ projectId, docId }, 'deleting doc via http')
+ DocumentManager.flushAndDeleteDocWithLock(
+ projectId,
+ docId,
+ { ignoreFlushErrors },
+ error => {
+ timer.done()
+ // There is no harm in flushing project history if the previous call
+ // failed and sometimes it is required
+ HistoryManager.flushProjectChangesAsync(projectId)
+
+ if (error) {
+ return next(error)
+ }
+ logger.log({ projectId, docId }, 'deleted doc via http')
+ res.sendStatus(204) // No Content
+ }
+ )
+}
+
+function flushProject(req, res, next) {
+ const projectId = req.params.project_id
+ logger.log({ projectId }, 'flushing project via http')
+ const timer = new Metrics.Timer('http.flushProject')
+ ProjectManager.flushProjectWithLocks(projectId, error => {
+ timer.done()
+ if (error) {
+ return next(error)
+ }
+ logger.log({ projectId }, 'flushed project via http')
+ res.sendStatus(204) // No Content
+ })
+}
+
+function deleteProject(req, res, next) {
+ const projectId = req.params.project_id
+ logger.log({ projectId }, 'deleting project via http')
+ const options = {}
+ if (req.query.background) {
+ options.background = true
+ } // allow non-urgent flushes to be queued
+ if (req.query.shutdown) {
+ options.skip_history_flush = true
+ } // don't flush history when realtime shuts down
+ if (req.query.background) {
+ ProjectManager.queueFlushAndDeleteProject(projectId, error => {
+ if (error) {
+ return next(error)
+ }
+ logger.log({ projectId }, 'queue delete of project via http')
+ res.sendStatus(204)
+ }) // No Content
+ } else {
+ const timer = new Metrics.Timer('http.deleteProject')
+ ProjectManager.flushAndDeleteProjectWithLocks(projectId, options, error => {
+ timer.done()
+ if (error) {
+ return next(error)
+ }
+ logger.log({ projectId }, 'deleted project via http')
+ res.sendStatus(204) // No Content
+ })
+ }
+}
+
+function deleteMultipleProjects(req, res, next) {
+ const projectIds = req.body.project_ids || []
+ logger.log({ projectIds }, 'deleting multiple projects via http')
+ async.eachSeries(
+ projectIds,
+ (projectId, cb) => {
+ logger.log({ projectId }, 'queue delete of project via http')
+ ProjectManager.queueFlushAndDeleteProject(projectId, cb)
+ },
+ error => {
+ if (error) {
+ return next(error)
+ }
+ res.sendStatus(204) // No Content
+ }
+ )
+}
+
+function acceptChanges(req, res, next) {
+ const { project_id: projectId, doc_id: docId } = req.params
+ let changeIds = req.body.change_ids
+ if (changeIds == null) {
+ changeIds = [req.params.change_id]
+ }
+ logger.log(
+ { projectId, docId },
+ `accepting ${changeIds.length} changes via http`
+ )
+ const timer = new Metrics.Timer('http.acceptChanges')
+ DocumentManager.acceptChangesWithLock(projectId, docId, changeIds, error => {
+ timer.done()
+ if (error) {
+ return next(error)
+ }
+ logger.log(
+ { projectId, docId },
+ `accepted ${changeIds.length} changes via http`
+ )
+ res.sendStatus(204) // No Content
+ })
+}
+
+function deleteComment(req, res, next) {
+ const {
+ project_id: projectId,
+ doc_id: docId,
+ comment_id: commentId,
+ } = req.params
+ logger.log({ projectId, docId, commentId }, 'deleting comment via http')
+ const timer = new Metrics.Timer('http.deleteComment')
+ DocumentManager.deleteCommentWithLock(projectId, docId, commentId, error => {
+ timer.done()
+ if (error) {
+ return next(error)
+ }
+ logger.log({ projectId, docId, commentId }, 'deleted comment via http')
+ res.sendStatus(204) // No Content
+ })
+}
+
+function updateProject(req, res, next) {
+ const timer = new Metrics.Timer('http.updateProject')
+ const projectId = req.params.project_id
+ const { projectHistoryId, userId, updates = [], version } = req.body
+ logger.log({ projectId, updates, version }, 'updating project via http')
+ ProjectManager.updateProjectWithLocks(
+ projectId,
+ projectHistoryId,
+ userId,
+ updates,
+ version,
+ error => {
+ timer.done()
+ if (error) {
+ return next(error)
+ }
+ logger.log({ projectId }, 'updated project via http')
+ res.sendStatus(204) // No Content
+ }
+ )
+}
+
+function resyncProjectHistory(req, res, next) {
+ const projectId = req.params.project_id
+ const { projectHistoryId, docs, files } = req.body
+
+ logger.log(
+ { projectId, docs, files },
+ 'queuing project history resync via http'
+ )
+ HistoryManager.resyncProjectHistory(
+ projectId,
+ projectHistoryId,
+ docs,
+ files,
+ error => {
+ if (error) {
+ return next(error)
+ }
+ logger.log({ projectId }, 'queued project history resync via http')
+ res.sendStatus(204)
+ }
+ )
+}
+
+function flushAllProjects(req, res, next) {
+ res.setTimeout(5 * 60 * 1000)
+ const options = {
+ limit: req.query.limit || 1000,
+ concurrency: req.query.concurrency || 5,
+ dryRun: req.query.dryRun || false,
+ }
+ ProjectFlusher.flushAllProjects(options, (err, projectIds) => {
+ if (err) {
+ logger.err({ err }, 'error bulk flushing projects')
+ res.sendStatus(500)
+ } else {
+ res.send(projectIds)
+ }
+ })
+}
+
+function flushQueuedProjects(req, res, next) {
+ res.setTimeout(10 * 60 * 1000)
+ const options = {
+ limit: req.query.limit || 1000,
+ timeout: 5 * 60 * 1000,
+ min_delete_age: req.query.min_delete_age || 5 * 60 * 1000,
+ }
+ DeleteQueueManager.flushAndDeleteOldProjects(options, (err, flushed) => {
+ if (err) {
+ logger.err({ err }, 'error flushing old projects')
+ res.sendStatus(500)
+ } else {
+ logger.log({ flushed }, 'flush of queued projects completed')
+ res.send({ flushed })
+ }
+ })
+}
diff --git a/services/document-updater/app/js/LockManager.js b/services/document-updater/app/js/LockManager.js
new file mode 100644
index 0000000000..70447bad3f
--- /dev/null
+++ b/services/document-updater/app/js/LockManager.js
@@ -0,0 +1,177 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+ no-return-assign,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let LockManager
+const metrics = require('./Metrics')
+const Settings = require('@overleaf/settings')
+const redis = require('@overleaf/redis-wrapper')
+const rclient = redis.createClient(Settings.redis.lock)
+const keys = Settings.redis.lock.key_schema
+const logger = require('logger-sharelatex')
+const os = require('os')
+const crypto = require('crypto')
+
+const Profiler = require('./Profiler')
+
+const HOST = os.hostname()
+const PID = process.pid
+const RND = crypto.randomBytes(4).toString('hex')
+let COUNT = 0
+
+const MAX_REDIS_REQUEST_LENGTH = 5000 // 5 seconds
+
+module.exports = LockManager = {
+ LOCK_TEST_INTERVAL: 50, // 50ms between each test of the lock
+ MAX_TEST_INTERVAL: 1000, // back off to 1s between each test of the lock
+ MAX_LOCK_WAIT_TIME: 10000, // 10s maximum time to spend trying to get the lock
+ LOCK_TTL: 30, // seconds. Time until lock auto expires in redis.
+
+ // Use a signed lock value as described in
+ // http://redis.io/topics/distlock#correct-implementation-with-a-single-instance
+ // to prevent accidental unlocking by multiple processes
+ randomLock() {
+ const time = Date.now()
+ return `locked:host=${HOST}:pid=${PID}:random=${RND}:time=${time}:count=${COUNT++}`
+ },
+
+ unlockScript:
+ 'if redis.call("get", KEYS[1]) == ARGV[1] then return redis.call("del", KEYS[1]) else return 0 end',
+
+ tryLock(doc_id, callback) {
+ if (callback == null) {
+ callback = function (err, isFree) {}
+ }
+ const lockValue = LockManager.randomLock()
+ const key = keys.blockingKey({ doc_id })
+ const profile = new Profiler('tryLock', { doc_id, key, lockValue })
+ return rclient.set(
+ key,
+ lockValue,
+ 'EX',
+ this.LOCK_TTL,
+ 'NX',
+ function (err, gotLock) {
+ if (err != null) {
+ return callback(err)
+ }
+ if (gotLock === 'OK') {
+ metrics.inc('doc-not-blocking')
+ const timeTaken = profile.log('got lock').end()
+ if (timeTaken > MAX_REDIS_REQUEST_LENGTH) {
+ // took too long, so try to free the lock
+ return LockManager.releaseLock(
+ doc_id,
+ lockValue,
+ function (err, result) {
+ if (err != null) {
+ return callback(err)
+ } // error freeing lock
+ return callback(null, false)
+ }
+ ) // tell caller they didn't get the lock
+ } else {
+ return callback(null, true, lockValue)
+ }
+ } else {
+ metrics.inc('doc-blocking')
+ profile.log('doc is locked').end()
+ return callback(null, false)
+ }
+ }
+ )
+ },
+
+ getLock(doc_id, callback) {
+ let attempt
+ if (callback == null) {
+ callback = function (error, lockValue) {}
+ }
+ const startTime = Date.now()
+ let testInterval = LockManager.LOCK_TEST_INTERVAL
+ const profile = new Profiler('getLock', { doc_id })
+ return (attempt = function () {
+ if (Date.now() - startTime > LockManager.MAX_LOCK_WAIT_TIME) {
+ const e = new Error('Timeout')
+ e.doc_id = doc_id
+ profile.log('timeout').end()
+ return callback(e)
+ }
+
+ return LockManager.tryLock(doc_id, function (error, gotLock, lockValue) {
+ if (error != null) {
+ return callback(error)
+ }
+ profile.log('tryLock')
+ if (gotLock) {
+ profile.end()
+ return callback(null, lockValue)
+ } else {
+ setTimeout(attempt, testInterval)
+ // back off when the lock is taken to avoid overloading
+ return (testInterval = Math.min(
+ testInterval * 2,
+ LockManager.MAX_TEST_INTERVAL
+ ))
+ }
+ })
+ })()
+ },
+
+ checkLock(doc_id, callback) {
+ if (callback == null) {
+ callback = function (err, isFree) {}
+ }
+ const key = keys.blockingKey({ doc_id })
+ return rclient.exists(key, function (err, exists) {
+ if (err != null) {
+ return callback(err)
+ }
+ exists = parseInt(exists)
+ if (exists === 1) {
+ metrics.inc('doc-blocking')
+ return callback(null, false)
+ } else {
+ metrics.inc('doc-not-blocking')
+ return callback(null, true)
+ }
+ })
+ },
+
+ releaseLock(doc_id, lockValue, callback) {
+ const key = keys.blockingKey({ doc_id })
+ const profile = new Profiler('releaseLock', { doc_id, key, lockValue })
+ return rclient.eval(
+ LockManager.unlockScript,
+ 1,
+ key,
+ lockValue,
+ function (err, result) {
+ if (err != null) {
+ return callback(err)
+ } else if (result != null && result !== 1) {
+ // successful unlock should release exactly one key
+ profile.log('unlockScript:expired-lock').end()
+ logger.error(
+ { doc_id, key, lockValue, redis_err: err, redis_result: result },
+ 'unlocking error'
+ )
+ metrics.inc('unlock-error')
+ return callback(new Error('tried to release timed out lock'))
+ } else {
+ profile.log('unlockScript:ok').end()
+ return callback(null, result)
+ }
+ }
+ )
+ },
+}
diff --git a/services/document-updater/app/js/LoggerSerializers.js b/services/document-updater/app/js/LoggerSerializers.js
new file mode 100644
index 0000000000..ac9605e757
--- /dev/null
+++ b/services/document-updater/app/js/LoggerSerializers.js
@@ -0,0 +1,67 @@
+/* eslint-disable
+ no-return-assign,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS103: Rewrite code to no longer use __guard__
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const _ = require('lodash')
+
+const showLength = function (thing) {
+ if (thing != null ? thing.length : undefined) {
+ return thing.length
+ } else {
+ return thing
+ }
+}
+
+const showUpdateLength = function (update) {
+ if ((update != null ? update.op : undefined) instanceof Array) {
+ const copy = _.cloneDeep(update)
+ copy.op.forEach(function (element, index) {
+ if (
+ __guard__(element != null ? element.i : undefined, x => x.length) !=
+ null
+ ) {
+ copy.op[index].i = element.i.length
+ }
+ if (
+ __guard__(element != null ? element.d : undefined, x1 => x1.length) !=
+ null
+ ) {
+ copy.op[index].d = element.d.length
+ }
+ if (
+ __guard__(element != null ? element.c : undefined, x2 => x2.length) !=
+ null
+ ) {
+ return (copy.op[index].c = element.c.length)
+ }
+ })
+ return copy
+ } else {
+ return update
+ }
+}
+
+module.exports = {
+ // replace long values with their length
+ lines: showLength,
+ oldLines: showLength,
+ newLines: showLength,
+ docLines: showLength,
+ newDocLines: showLength,
+ ranges: showLength,
+ update: showUpdateLength,
+}
+
+function __guard__(value, transform) {
+ return typeof value !== 'undefined' && value !== null
+ ? transform(value)
+ : undefined
+}
diff --git a/services/document-updater/app/js/Metrics.js b/services/document-updater/app/js/Metrics.js
new file mode 100644
index 0000000000..f0e57794fd
--- /dev/null
+++ b/services/document-updater/app/js/Metrics.js
@@ -0,0 +1,3 @@
+// TODO: This file was created by bulk-decaffeinate.
+// Sanity-check the conversion and remove this comment.
+module.exports = require('@overleaf/metrics')
diff --git a/services/document-updater/app/js/PersistenceManager.js b/services/document-updater/app/js/PersistenceManager.js
new file mode 100644
index 0000000000..d7df831683
--- /dev/null
+++ b/services/document-updater/app/js/PersistenceManager.js
@@ -0,0 +1,200 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+ no-unsafe-negation,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS205: Consider reworking code to avoid use of IIFEs
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let PersistenceManager
+const Settings = require('@overleaf/settings')
+const Errors = require('./Errors')
+const Metrics = require('./Metrics')
+const logger = require('logger-sharelatex')
+const request = require('requestretry').defaults({
+ maxAttempts: 2,
+ retryDelay: 10,
+})
+
+// We have to be quick with HTTP calls because we're holding a lock that
+// expires after 30 seconds. We can't let any errors in the rest of the stack
+// hold us up, and need to bail out quickly if there is a problem.
+const MAX_HTTP_REQUEST_LENGTH = 5000 // 5 seconds
+
+const updateMetric = function (method, error, response) {
+ // find the status, with special handling for connection timeouts
+ // https://github.com/request/request#timeouts
+ const status = (() => {
+ if ((error != null ? error.connect : undefined) === true) {
+ return `${error.code} (connect)`
+ } else if (error != null) {
+ return error.code
+ } else if (response != null) {
+ return response.statusCode
+ }
+ })()
+ Metrics.inc(method, 1, { status })
+ if ((error != null ? error.attempts : undefined) > 1) {
+ Metrics.inc(`${method}-retries`, 1, { status: 'error' })
+ }
+ if ((response != null ? response.attempts : undefined) > 1) {
+ return Metrics.inc(`${method}-retries`, 1, { status: 'success' })
+ }
+}
+
+module.exports = PersistenceManager = {
+ getDoc(project_id, doc_id, _callback) {
+ if (_callback == null) {
+ _callback = function (
+ error,
+ lines,
+ version,
+ ranges,
+ pathname,
+ projectHistoryId,
+ projectHistoryType
+ ) {}
+ }
+ const timer = new Metrics.Timer('persistenceManager.getDoc')
+ const callback = function (...args) {
+ timer.done()
+ return _callback(...Array.from(args || []))
+ }
+
+ const urlPath = `/project/${project_id}/doc/${doc_id}`
+ return request(
+ {
+ url: `${Settings.apis.web.url}${urlPath}`,
+ method: 'GET',
+ headers: {
+ accept: 'application/json',
+ },
+ auth: {
+ user: Settings.apis.web.user,
+ pass: Settings.apis.web.pass,
+ sendImmediately: true,
+ },
+ jar: false,
+ timeout: MAX_HTTP_REQUEST_LENGTH,
+ },
+ function (error, res, body) {
+ updateMetric('getDoc', error, res)
+ if (error != null) {
+ logger.error(
+ { err: error, project_id, doc_id },
+ 'web API request failed'
+ )
+ return callback(new Error('error connecting to web API'))
+ }
+ if (res.statusCode >= 200 && res.statusCode < 300) {
+ try {
+ body = JSON.parse(body)
+ } catch (e) {
+ return callback(e)
+ }
+ if (body.lines == null) {
+ return callback(new Error('web API response had no doc lines'))
+ }
+ if (body.version == null || !body.version instanceof Number) {
+ return callback(
+ new Error('web API response had no valid doc version')
+ )
+ }
+ if (body.pathname == null) {
+ return callback(
+ new Error('web API response had no valid doc pathname')
+ )
+ }
+ return callback(
+ null,
+ body.lines,
+ body.version,
+ body.ranges,
+ body.pathname,
+ body.projectHistoryId,
+ body.projectHistoryType
+ )
+ } else if (res.statusCode === 404) {
+ return callback(
+ new Errors.NotFoundError(`doc not not found: ${urlPath}`)
+ )
+ } else {
+ return callback(
+ new Error(`error accessing web API: ${urlPath} ${res.statusCode}`)
+ )
+ }
+ }
+ )
+ },
+
+ setDoc(
+ project_id,
+ doc_id,
+ lines,
+ version,
+ ranges,
+ lastUpdatedAt,
+ lastUpdatedBy,
+ _callback
+ ) {
+ if (_callback == null) {
+ _callback = function (error) {}
+ }
+ const timer = new Metrics.Timer('persistenceManager.setDoc')
+ const callback = function (...args) {
+ timer.done()
+ return _callback(...Array.from(args || []))
+ }
+
+ const urlPath = `/project/${project_id}/doc/${doc_id}`
+ return request(
+ {
+ url: `${Settings.apis.web.url}${urlPath}`,
+ method: 'POST',
+ json: {
+ lines,
+ ranges,
+ version,
+ lastUpdatedBy,
+ lastUpdatedAt,
+ },
+ auth: {
+ user: Settings.apis.web.user,
+ pass: Settings.apis.web.pass,
+ sendImmediately: true,
+ },
+ jar: false,
+ timeout: MAX_HTTP_REQUEST_LENGTH,
+ },
+ function (error, res, body) {
+ updateMetric('setDoc', error, res)
+ if (error != null) {
+ logger.error(
+ { err: error, project_id, doc_id },
+ 'web API request failed'
+ )
+ return callback(new Error('error connecting to web API'))
+ }
+ if (res.statusCode >= 200 && res.statusCode < 300) {
+ return callback(null)
+ } else if (res.statusCode === 404) {
+ return callback(
+ new Errors.NotFoundError(`doc not not found: ${urlPath}`)
+ )
+ } else {
+ return callback(
+ new Error(`error accessing web API: ${urlPath} ${res.statusCode}`)
+ )
+ }
+ }
+ )
+ },
+}
diff --git a/services/document-updater/app/js/Profiler.js b/services/document-updater/app/js/Profiler.js
new file mode 100644
index 0000000000..6f7a66a7aa
--- /dev/null
+++ b/services/document-updater/app/js/Profiler.js
@@ -0,0 +1,62 @@
+/* eslint-disable
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS206: Consider reworking classes to avoid initClass
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let Profiler
+const Settings = require('@overleaf/settings')
+const logger = require('logger-sharelatex')
+
+const deltaMs = function (ta, tb) {
+ const nanoSeconds = (ta[0] - tb[0]) * 1e9 + (ta[1] - tb[1])
+ const milliSeconds = Math.floor(nanoSeconds * 1e-6)
+ return milliSeconds
+}
+
+module.exports = Profiler = (function () {
+ Profiler = class Profiler {
+ static initClass() {
+ this.prototype.LOG_CUTOFF_TIME = 1000
+ }
+
+ constructor(name, args) {
+ this.name = name
+ this.args = args
+ this.t0 = this.t = process.hrtime()
+ this.start = new Date()
+ this.updateTimes = []
+ }
+
+ log(label) {
+ const t1 = process.hrtime()
+ const dtMilliSec = deltaMs(t1, this.t)
+ this.t = t1
+ this.updateTimes.push([label, dtMilliSec]) // timings in ms
+ return this // make it chainable
+ }
+
+ end(message) {
+ const totalTime = deltaMs(this.t, this.t0)
+ if (totalTime > this.LOG_CUTOFF_TIME) {
+ // log anything greater than cutoff
+ const args = {}
+ for (const k in this.args) {
+ const v = this.args[k]
+ args[k] = v
+ }
+ args.updateTimes = this.updateTimes
+ args.start = this.start
+ args.end = new Date()
+ logger.log(args, this.name)
+ }
+ return totalTime
+ }
+ }
+ Profiler.initClass()
+ return Profiler
+})()
diff --git a/services/document-updater/app/js/ProjectFlusher.js b/services/document-updater/app/js/ProjectFlusher.js
new file mode 100644
index 0000000000..12f885eb3c
--- /dev/null
+++ b/services/document-updater/app/js/ProjectFlusher.js
@@ -0,0 +1,135 @@
+/* eslint-disable
+ camelcase,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS205: Consider reworking code to avoid use of IIFEs
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const request = require('request')
+const Settings = require('@overleaf/settings')
+const RedisManager = require('./RedisManager')
+const { rclient } = RedisManager
+const docUpdaterKeys = Settings.redis.documentupdater.key_schema
+const async = require('async')
+const ProjectManager = require('./ProjectManager')
+const _ = require('lodash')
+const logger = require('logger-sharelatex')
+
+var ProjectFlusher = {
+ // iterate over keys asynchronously using redis scan (non-blocking)
+ // handle all the cluster nodes or single redis server
+ _getKeys(pattern, limit, callback) {
+ const nodes = (typeof rclient.nodes === 'function'
+ ? rclient.nodes('master')
+ : undefined) || [rclient]
+ const doKeyLookupForNode = (node, cb) =>
+ ProjectFlusher._getKeysFromNode(node, pattern, limit, cb)
+ return async.concatSeries(nodes, doKeyLookupForNode, callback)
+ },
+
+ _getKeysFromNode(node, pattern, limit, callback) {
+ if (limit == null) {
+ limit = 1000
+ }
+ let cursor = 0 // redis iterator
+ const keySet = {} // use hash to avoid duplicate results
+ const batchSize = limit != null ? Math.min(limit, 1000) : 1000
+ // scan over all keys looking for pattern
+ var doIteration = (
+ cb // avoid hitting redis too hard
+ ) =>
+ node.scan(
+ cursor,
+ 'MATCH',
+ pattern,
+ 'COUNT',
+ batchSize,
+ function (error, reply) {
+ let keys
+ if (error != null) {
+ return callback(error)
+ }
+ ;[cursor, keys] = Array.from(reply)
+ for (const key of Array.from(keys)) {
+ keySet[key] = true
+ }
+ keys = Object.keys(keySet)
+ const noResults = cursor === '0' // redis returns string results not numeric
+ const limitReached = limit != null && keys.length >= limit
+ if (noResults || limitReached) {
+ return callback(null, keys)
+ } else {
+ return setTimeout(doIteration, 10)
+ }
+ }
+ )
+ return doIteration()
+ },
+
+ // extract ids from keys like DocsWithHistoryOps:57fd0b1f53a8396d22b2c24b
+ // or docsInProject:{57fd0b1f53a8396d22b2c24b} (for redis cluster)
+ _extractIds(keyList) {
+ const ids = (() => {
+ const result = []
+ for (const key of Array.from(keyList)) {
+ const m = key.match(/:\{?([0-9a-f]{24})\}?/) // extract object id
+ result.push(m[1])
+ }
+ return result
+ })()
+ return ids
+ },
+
+ flushAllProjects(options, callback) {
+ logger.log({ options }, 'flushing all projects')
+ return ProjectFlusher._getKeys(
+ docUpdaterKeys.docsInProject({ project_id: '*' }),
+ options.limit,
+ function (error, project_keys) {
+ if (error != null) {
+ logger.err({ err: error }, 'error getting keys for flushing')
+ return callback(error)
+ }
+ const project_ids = ProjectFlusher._extractIds(project_keys)
+ if (options.dryRun) {
+ return callback(null, project_ids)
+ }
+ const jobs = _.map(
+ project_ids,
+ project_id => cb =>
+ ProjectManager.flushAndDeleteProjectWithLocks(
+ project_id,
+ { background: true },
+ cb
+ )
+ )
+ return async.parallelLimit(
+ async.reflectAll(jobs),
+ options.concurrency,
+ function (error, results) {
+ const success = []
+ const failure = []
+ _.each(results, function (result, i) {
+ if (result.error != null) {
+ return failure.push(project_ids[i])
+ } else {
+ return success.push(project_ids[i])
+ }
+ })
+ logger.log({ success, failure }, 'finished flushing all projects')
+ return callback(error, { success, failure })
+ }
+ )
+ }
+ )
+ },
+}
+
+module.exports = ProjectFlusher
diff --git a/services/document-updater/app/js/ProjectHistoryRedisManager.js b/services/document-updater/app/js/ProjectHistoryRedisManager.js
new file mode 100644
index 0000000000..45e98238f4
--- /dev/null
+++ b/services/document-updater/app/js/ProjectHistoryRedisManager.js
@@ -0,0 +1,176 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS103: Rewrite code to no longer use __guard__
+ * DS201: Simplify complex destructure assignments
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let ProjectHistoryRedisManager
+const Settings = require('@overleaf/settings')
+const projectHistoryKeys = __guard__(
+ Settings.redis != null ? Settings.redis.project_history : undefined,
+ x => x.key_schema
+)
+const rclient = require('@overleaf/redis-wrapper').createClient(
+ Settings.redis.project_history
+)
+const logger = require('logger-sharelatex')
+const metrics = require('./Metrics')
+
+module.exports = ProjectHistoryRedisManager = {
+ queueOps(project_id, ...rest) {
+ // Record metric for ops pushed onto queue
+ const adjustedLength = Math.max(rest.length, 1)
+ const ops = rest.slice(0, adjustedLength - 1)
+ const val = rest[adjustedLength - 1]
+ const callback = val != null ? val : function (error, projectUpdateCount) {}
+ for (const op of Array.from(ops)) {
+ metrics.summary('redis.projectHistoryOps', op.length, { status: 'push' })
+ }
+ const multi = rclient.multi()
+ // Push the ops onto the project history queue
+ multi.rpush(
+ projectHistoryKeys.projectHistoryOps({ project_id }),
+ ...Array.from(ops)
+ )
+ // To record the age of the oldest op on the queue set a timestamp if not
+ // already present (SETNX).
+ multi.setnx(
+ projectHistoryKeys.projectHistoryFirstOpTimestamp({ project_id }),
+ Date.now()
+ )
+ return multi.exec(function (error, result) {
+ if (error != null) {
+ return callback(error)
+ }
+ // return the number of entries pushed onto the project history queue
+ return callback(null, result[0])
+ })
+ },
+
+ queueRenameEntity(
+ project_id,
+ projectHistoryId,
+ entity_type,
+ entity_id,
+ user_id,
+ projectUpdate,
+ callback
+ ) {
+ projectUpdate = {
+ pathname: projectUpdate.pathname,
+ new_pathname: projectUpdate.newPathname,
+ meta: {
+ user_id,
+ ts: new Date(),
+ },
+ version: projectUpdate.version,
+ projectHistoryId,
+ }
+ projectUpdate[entity_type] = entity_id
+
+ logger.log(
+ { project_id, projectUpdate },
+ 'queue rename operation to project-history'
+ )
+ const jsonUpdate = JSON.stringify(projectUpdate)
+
+ return ProjectHistoryRedisManager.queueOps(project_id, jsonUpdate, callback)
+ },
+
+ queueAddEntity(
+ project_id,
+ projectHistoryId,
+ entity_type,
+ entitiy_id,
+ user_id,
+ projectUpdate,
+ callback
+ ) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ projectUpdate = {
+ pathname: projectUpdate.pathname,
+ docLines: projectUpdate.docLines,
+ url: projectUpdate.url,
+ meta: {
+ user_id,
+ ts: new Date(),
+ },
+ version: projectUpdate.version,
+ projectHistoryId,
+ }
+ projectUpdate[entity_type] = entitiy_id
+
+ logger.log(
+ { project_id, projectUpdate },
+ 'queue add operation to project-history'
+ )
+ const jsonUpdate = JSON.stringify(projectUpdate)
+
+ return ProjectHistoryRedisManager.queueOps(project_id, jsonUpdate, callback)
+ },
+
+ queueResyncProjectStructure(
+ project_id,
+ projectHistoryId,
+ docs,
+ files,
+ callback
+ ) {
+ logger.log({ project_id, docs, files }, 'queue project structure resync')
+ const projectUpdate = {
+ resyncProjectStructure: { docs, files },
+ projectHistoryId,
+ meta: {
+ ts: new Date(),
+ },
+ }
+ const jsonUpdate = JSON.stringify(projectUpdate)
+ return ProjectHistoryRedisManager.queueOps(project_id, jsonUpdate, callback)
+ },
+
+ queueResyncDocContent(
+ project_id,
+ projectHistoryId,
+ doc_id,
+ lines,
+ version,
+ pathname,
+ callback
+ ) {
+ logger.log(
+ { project_id, doc_id, lines, version, pathname },
+ 'queue doc content resync'
+ )
+ const projectUpdate = {
+ resyncDocContent: {
+ content: lines.join('\n'),
+ version,
+ },
+ projectHistoryId,
+ path: pathname,
+ doc: doc_id,
+ meta: {
+ ts: new Date(),
+ },
+ }
+ const jsonUpdate = JSON.stringify(projectUpdate)
+ return ProjectHistoryRedisManager.queueOps(project_id, jsonUpdate, callback)
+ },
+}
+
+function __guard__(value, transform) {
+ return typeof value !== 'undefined' && value !== null
+ ? transform(value)
+ : undefined
+}
diff --git a/services/document-updater/app/js/ProjectManager.js b/services/document-updater/app/js/ProjectManager.js
new file mode 100644
index 0000000000..20f79f3d08
--- /dev/null
+++ b/services/document-updater/app/js/ProjectManager.js
@@ -0,0 +1,301 @@
+const RedisManager = require('./RedisManager')
+const ProjectHistoryRedisManager = require('./ProjectHistoryRedisManager')
+const DocumentManager = require('./DocumentManager')
+const HistoryManager = require('./HistoryManager')
+const async = require('async')
+const logger = require('logger-sharelatex')
+const Metrics = require('./Metrics')
+const Errors = require('./Errors')
+
+module.exports = {
+ flushProjectWithLocks,
+ flushAndDeleteProjectWithLocks,
+ queueFlushAndDeleteProject,
+ getProjectDocsTimestamps,
+ getProjectDocsAndFlushIfOld,
+ clearProjectState,
+ updateProjectWithLocks,
+}
+
+function flushProjectWithLocks(projectId, _callback) {
+ const timer = new Metrics.Timer('projectManager.flushProjectWithLocks')
+ const callback = function (...args) {
+ timer.done()
+ _callback(...args)
+ }
+
+ RedisManager.getDocIdsInProject(projectId, (error, docIds) => {
+ if (error) {
+ return callback(error)
+ }
+ const errors = []
+ const jobs = docIds.map(docId => callback => {
+ DocumentManager.flushDocIfLoadedWithLock(projectId, docId, error => {
+ if (error instanceof Errors.NotFoundError) {
+ logger.warn(
+ { err: error, projectId, docId },
+ 'found deleted doc when flushing'
+ )
+ callback()
+ } else if (error) {
+ logger.error({ err: error, projectId, docId }, 'error flushing doc')
+ errors.push(error)
+ callback()
+ } else {
+ callback()
+ }
+ })
+ })
+
+ logger.log({ projectId, docIds }, 'flushing docs')
+ async.series(jobs, () => {
+ if (errors.length > 0) {
+ callback(new Error('Errors flushing docs. See log for details'))
+ } else {
+ callback(null)
+ }
+ })
+ })
+}
+
+function flushAndDeleteProjectWithLocks(projectId, options, _callback) {
+ const timer = new Metrics.Timer(
+ 'projectManager.flushAndDeleteProjectWithLocks'
+ )
+ const callback = function (...args) {
+ timer.done()
+ _callback(...args)
+ }
+
+ RedisManager.getDocIdsInProject(projectId, (error, docIds) => {
+ if (error) {
+ return callback(error)
+ }
+ const errors = []
+ const jobs = docIds.map(docId => callback => {
+ DocumentManager.flushAndDeleteDocWithLock(projectId, docId, {}, error => {
+ if (error) {
+ logger.error({ err: error, projectId, docId }, 'error deleting doc')
+ errors.push(error)
+ }
+ callback()
+ })
+ })
+
+ logger.log({ projectId, docIds }, 'deleting docs')
+ async.series(jobs, () =>
+ // When deleting the project here we want to ensure that project
+ // history is completely flushed because the project may be
+ // deleted in web after this call completes, and so further
+ // attempts to flush would fail after that.
+ HistoryManager.flushProjectChanges(projectId, options, error => {
+ if (errors.length > 0) {
+ callback(new Error('Errors deleting docs. See log for details'))
+ } else if (error) {
+ callback(error)
+ } else {
+ callback(null)
+ }
+ })
+ )
+ })
+}
+
+function queueFlushAndDeleteProject(projectId, callback) {
+ RedisManager.queueFlushAndDeleteProject(projectId, error => {
+ if (error) {
+ logger.error(
+ { projectId, error },
+ 'error adding project to flush and delete queue'
+ )
+ return callback(error)
+ }
+ Metrics.inc('queued-delete')
+ callback()
+ })
+}
+
+function getProjectDocsTimestamps(projectId, callback) {
+ RedisManager.getDocIdsInProject(projectId, (error, docIds) => {
+ if (error) {
+ return callback(error)
+ }
+ if (docIds.length === 0) {
+ return callback(null, [])
+ }
+ RedisManager.getDocTimestamps(docIds, (error, timestamps) => {
+ if (error) {
+ return callback(error)
+ }
+ callback(null, timestamps)
+ })
+ })
+}
+
+function getProjectDocsAndFlushIfOld(
+ projectId,
+ projectStateHash,
+ excludeVersions,
+ _callback
+) {
+ const timer = new Metrics.Timer('projectManager.getProjectDocsAndFlushIfOld')
+ const callback = function (...args) {
+ timer.done()
+ _callback(...args)
+ }
+
+ RedisManager.checkOrSetProjectState(
+ projectId,
+ projectStateHash,
+ (error, projectStateChanged) => {
+ if (error) {
+ logger.error(
+ { err: error, projectId },
+ 'error getting/setting project state in getProjectDocsAndFlushIfOld'
+ )
+ return callback(error)
+ }
+ // we can't return docs if project structure has changed
+ if (projectStateChanged) {
+ return callback(
+ Errors.ProjectStateChangedError('project state changed')
+ )
+ }
+ // project structure hasn't changed, return doc content from redis
+ RedisManager.getDocIdsInProject(projectId, (error, docIds) => {
+ if (error) {
+ logger.error(
+ { err: error, projectId },
+ 'error getting doc ids in getProjectDocs'
+ )
+ return callback(error)
+ }
+ // get the doc lines from redis
+ const jobs = docIds.map(docId => cb => {
+ DocumentManager.getDocAndFlushIfOldWithLock(
+ projectId,
+ docId,
+ (err, lines, version) => {
+ if (err) {
+ logger.error(
+ { err, projectId, docId },
+ 'error getting project doc lines in getProjectDocsAndFlushIfOld'
+ )
+ return cb(err)
+ }
+ const doc = { _id: docId, lines, v: version } // create a doc object to return
+ cb(null, doc)
+ }
+ )
+ })
+ async.series(jobs, (error, docs) => {
+ if (error) {
+ return callback(error)
+ }
+ callback(null, docs)
+ })
+ })
+ }
+ )
+}
+
+function clearProjectState(projectId, callback) {
+ RedisManager.clearProjectState(projectId, callback)
+}
+
+function updateProjectWithLocks(
+ projectId,
+ projectHistoryId,
+ userId,
+ updates,
+ projectVersion,
+ _callback
+) {
+ const timer = new Metrics.Timer('projectManager.updateProject')
+ const callback = function (...args) {
+ timer.done()
+ _callback(...args)
+ }
+
+ let projectSubversion = 0 // project versions can have multiple operations
+ let projectOpsLength = 0
+
+ function handleUpdate(update, cb) {
+ update.version = `${projectVersion}.${projectSubversion++}`
+ switch (update.type) {
+ case 'add-doc':
+ ProjectHistoryRedisManager.queueAddEntity(
+ projectId,
+ projectHistoryId,
+ 'doc',
+ update.id,
+ userId,
+ update,
+ (error, count) => {
+ projectOpsLength = count
+ cb(error)
+ }
+ )
+ break
+ case 'rename-doc':
+ DocumentManager.renameDocWithLock(
+ projectId,
+ update.id,
+ userId,
+ update,
+ projectHistoryId,
+ (error, count) => {
+ projectOpsLength = count
+ cb(error)
+ }
+ )
+ break
+ case 'add-file':
+ ProjectHistoryRedisManager.queueAddEntity(
+ projectId,
+ projectHistoryId,
+ 'file',
+ update.id,
+ userId,
+ update,
+ (error, count) => {
+ projectOpsLength = count
+ cb(error)
+ }
+ )
+ break
+ case 'rename-file':
+ ProjectHistoryRedisManager.queueRenameEntity(
+ projectId,
+ projectHistoryId,
+ 'file',
+ update.id,
+ userId,
+ update,
+ (error, count) => {
+ projectOpsLength = count
+ cb(error)
+ }
+ )
+ break
+ default:
+ cb(new Error(`Unknown update type: ${update.type}`))
+ }
+ }
+
+ async.eachSeries(updates, handleUpdate, error => {
+ if (error) {
+ return callback(error)
+ }
+ if (
+ HistoryManager.shouldFlushHistoryOps(
+ projectOpsLength,
+ updates.length,
+ HistoryManager.FLUSH_PROJECT_EVERY_N_OPS
+ )
+ ) {
+ HistoryManager.flushProjectChangesAsync(projectId)
+ }
+ callback()
+ })
+}
diff --git a/services/document-updater/app/js/RangesManager.js b/services/document-updater/app/js/RangesManager.js
new file mode 100644
index 0000000000..0de39134de
--- /dev/null
+++ b/services/document-updater/app/js/RangesManager.js
@@ -0,0 +1,163 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let RangesManager
+const RangesTracker = require('./RangesTracker')
+const logger = require('logger-sharelatex')
+const _ = require('lodash')
+
+module.exports = RangesManager = {
+ MAX_COMMENTS: 500,
+ MAX_CHANGES: 2000,
+
+ applyUpdate(project_id, doc_id, entries, updates, newDocLines, callback) {
+ let error
+ if (entries == null) {
+ entries = {}
+ }
+ if (updates == null) {
+ updates = []
+ }
+ if (callback == null) {
+ callback = function (error, new_entries, ranges_were_collapsed) {}
+ }
+ const { changes, comments } = _.cloneDeep(entries)
+ const rangesTracker = new RangesTracker(changes, comments)
+ const emptyRangeCountBefore = RangesManager._emptyRangesCount(rangesTracker)
+ for (const update of Array.from(updates)) {
+ rangesTracker.track_changes = !!update.meta.tc
+ if (update.meta.tc) {
+ rangesTracker.setIdSeed(update.meta.tc)
+ }
+ for (const op of Array.from(update.op)) {
+ try {
+ rangesTracker.applyOp(op, {
+ user_id: update.meta != null ? update.meta.user_id : undefined,
+ })
+ } catch (error1) {
+ error = error1
+ return callback(error)
+ }
+ }
+ }
+
+ if (
+ (rangesTracker.changes != null
+ ? rangesTracker.changes.length
+ : undefined) > RangesManager.MAX_CHANGES ||
+ (rangesTracker.comments != null
+ ? rangesTracker.comments.length
+ : undefined) > RangesManager.MAX_COMMENTS
+ ) {
+ return callback(new Error('too many comments or tracked changes'))
+ }
+
+ try {
+ // This is a consistency check that all of our ranges and
+ // comments still match the corresponding text
+ rangesTracker.validate(newDocLines.join('\n'))
+ } catch (error2) {
+ error = error2
+ logger.error(
+ { err: error, project_id, doc_id, newDocLines, updates },
+ 'error validating ranges'
+ )
+ return callback(error)
+ }
+
+ const emptyRangeCountAfter = RangesManager._emptyRangesCount(rangesTracker)
+ const rangesWereCollapsed = emptyRangeCountAfter > emptyRangeCountBefore
+ const response = RangesManager._getRanges(rangesTracker)
+ logger.log(
+ {
+ project_id,
+ doc_id,
+ changesCount:
+ response.changes != null ? response.changes.length : undefined,
+ commentsCount:
+ response.comments != null ? response.comments.length : undefined,
+ rangesWereCollapsed,
+ },
+ 'applied updates to ranges'
+ )
+ return callback(null, response, rangesWereCollapsed)
+ },
+
+ acceptChanges(change_ids, ranges, callback) {
+ if (callback == null) {
+ callback = function (error, ranges) {}
+ }
+ const { changes, comments } = ranges
+ logger.log(`accepting ${change_ids.length} changes in ranges`)
+ const rangesTracker = new RangesTracker(changes, comments)
+ rangesTracker.removeChangeIds(change_ids)
+ const response = RangesManager._getRanges(rangesTracker)
+ return callback(null, response)
+ },
+
+ deleteComment(comment_id, ranges, callback) {
+ if (callback == null) {
+ callback = function (error, ranges) {}
+ }
+ const { changes, comments } = ranges
+ logger.log({ comment_id }, 'deleting comment in ranges')
+ const rangesTracker = new RangesTracker(changes, comments)
+ rangesTracker.removeCommentId(comment_id)
+ const response = RangesManager._getRanges(rangesTracker)
+ return callback(null, response)
+ },
+
+ _getRanges(rangesTracker) {
+ // Return the minimal data structure needed, since most documents won't have any
+ // changes or comments
+ let response = {}
+ if (
+ (rangesTracker.changes != null
+ ? rangesTracker.changes.length
+ : undefined) > 0
+ ) {
+ if (response == null) {
+ response = {}
+ }
+ response.changes = rangesTracker.changes
+ }
+ if (
+ (rangesTracker.comments != null
+ ? rangesTracker.comments.length
+ : undefined) > 0
+ ) {
+ if (response == null) {
+ response = {}
+ }
+ response.comments = rangesTracker.comments
+ }
+ return response
+ },
+
+ _emptyRangesCount(ranges) {
+ let count = 0
+ for (const comment of Array.from(ranges.comments || [])) {
+ if (comment.op.c === '') {
+ count++
+ }
+ }
+ for (const change of Array.from(ranges.changes || [])) {
+ if (change.op.i != null) {
+ if (change.op.i === '') {
+ count++
+ }
+ }
+ }
+ return count
+ },
+}
diff --git a/services/document-updater/app/js/RangesTracker.js b/services/document-updater/app/js/RangesTracker.js
new file mode 100644
index 0000000000..2cc8869d99
--- /dev/null
+++ b/services/document-updater/app/js/RangesTracker.js
@@ -0,0 +1,849 @@
+/* eslint-disable
+ camelcase,
+ no-return-assign,
+ no-undef,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS205: Consider reworking code to avoid use of IIFEs
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+// This file is shared between document-updater and web, so that the server and client share
+// an identical track changes implementation. Do not edit it directly in web or document-updater,
+// instead edit it at https://github.com/sharelatex/ranges-tracker, where it has a suite of tests
+const load = function () {
+ let RangesTracker
+ return (RangesTracker = class RangesTracker {
+ // The purpose of this class is to track a set of inserts and deletes to a document, like
+ // track changes in Word. We store these as a set of ShareJs style ranges:
+ // {i: "foo", p: 42} # Insert 'foo' at offset 42
+ // {d: "bar", p: 37} # Delete 'bar' at offset 37
+ // We only track the inserts and deletes, not the whole document, but by being given all
+ // updates that are applied to a document, we can update these appropriately.
+ //
+ // Note that the set of inserts and deletes we store applies to the document as-is at the moment.
+ // So inserts correspond to text which is in the document, while deletes correspond to text which
+ // is no longer there, so their lengths do not affect the position of later offsets.
+ // E.g.
+ // this is the current text of the document
+ // |-----| |
+ // {i: "current ", p:12} -^ ^- {d: "old ", p: 31}
+ //
+ // Track changes rules (should be consistent with Word):
+ // * When text is inserted at a delete, the text goes to the left of the delete
+ // I.e. "foo|bar" -> "foobaz|bar", where | is the delete, and 'baz' is inserted
+ // * Deleting content flagged as 'inserted' does not create a new delete marker, it only
+ // removes the insert marker. E.g.
+ // * "abdefghijkl" -> "abfghijkl" when 'de' is deleted. No delete marker added
+ // |---| <- inserted |-| <- inserted
+ // * Deletes overlapping regular text and inserted text will insert a delete marker for the
+ // regular text:
+ // "abcdefghijkl" -> "abcdejkl" when 'fghi' is deleted
+ // |----| |--||
+ // ^- inserted 'bcdefg' \ ^- deleted 'hi'
+ // \--inserted 'bcde'
+ // * Deletes overlapping other deletes are merged. E.g.
+ // "abcghijkl" -> "ahijkl" when 'bcg is deleted'
+ // | <- delete 'def' | <- delete 'bcdefg'
+ // * Deletes by another user will consume deletes by the first user
+ // * Inserts by another user will not combine with inserts by the first user. If they are in the
+ // middle of a previous insert by the first user, the original insert will be split into two.
+ constructor(changes, comments) {
+ if (changes == null) {
+ changes = []
+ }
+ this.changes = changes
+ if (comments == null) {
+ comments = []
+ }
+ this.comments = comments
+ this.setIdSeed(RangesTracker.generateIdSeed())
+ this.resetDirtyState()
+ }
+
+ getIdSeed() {
+ return this.id_seed
+ }
+
+ setIdSeed(seed) {
+ this.id_seed = seed
+ return (this.id_increment = 0)
+ }
+
+ static generateIdSeed() {
+ // Generate a the first 18 characters of Mongo ObjectId, leaving 6 for the increment part
+ // Reference: https://github.com/dreampulse/ObjectId.js/blob/master/src/main/javascript/Objectid.js
+ const pid = Math.floor(Math.random() * 32767).toString(16)
+ const machine = Math.floor(Math.random() * 16777216).toString(16)
+ const timestamp = Math.floor(new Date().valueOf() / 1000).toString(16)
+ return (
+ '00000000'.substr(0, 8 - timestamp.length) +
+ timestamp +
+ '000000'.substr(0, 6 - machine.length) +
+ machine +
+ '0000'.substr(0, 4 - pid.length) +
+ pid
+ )
+ }
+
+ static generateId() {
+ return this.generateIdSeed() + '000001'
+ }
+
+ newId() {
+ this.id_increment++
+ const increment = this.id_increment.toString(16)
+ const id =
+ this.id_seed + '000000'.substr(0, 6 - increment.length) + increment
+ return id
+ }
+
+ getComment(comment_id) {
+ let comment = null
+ for (const c of Array.from(this.comments)) {
+ if (c.id === comment_id) {
+ comment = c
+ break
+ }
+ }
+ return comment
+ }
+
+ removeCommentId(comment_id) {
+ const comment = this.getComment(comment_id)
+ if (comment == null) {
+ return
+ }
+ this.comments = this.comments.filter(c => c.id !== comment_id)
+ return this._markAsDirty(comment, 'comment', 'removed')
+ }
+
+ moveCommentId(comment_id, position, text) {
+ return (() => {
+ const result = []
+ for (const comment of Array.from(this.comments)) {
+ if (comment.id === comment_id) {
+ comment.op.p = position
+ comment.op.c = text
+ result.push(this._markAsDirty(comment, 'comment', 'moved'))
+ } else {
+ result.push(undefined)
+ }
+ }
+ return result
+ })()
+ }
+
+ getChange(change_id) {
+ let change = null
+ for (const c of Array.from(this.changes)) {
+ if (c.id === change_id) {
+ change = c
+ break
+ }
+ }
+ return change
+ }
+
+ getChanges(change_ids) {
+ const changes_response = []
+ const ids_map = {}
+
+ for (const change_id of Array.from(change_ids)) {
+ ids_map[change_id] = true
+ }
+
+ for (const change of Array.from(this.changes)) {
+ if (ids_map[change.id]) {
+ delete ids_map[change.id]
+ changes_response.push(change)
+ }
+ }
+
+ return changes_response
+ }
+
+ removeChangeId(change_id) {
+ const change = this.getChange(change_id)
+ if (change == null) {
+ return
+ }
+ return this._removeChange(change)
+ }
+
+ removeChangeIds(change_to_remove_ids) {
+ if (
+ !(change_to_remove_ids != null
+ ? change_to_remove_ids.length
+ : undefined) > 0
+ ) {
+ return
+ }
+ const i = this.changes.length
+ const remove_change_id = {}
+ for (const change_id of Array.from(change_to_remove_ids)) {
+ remove_change_id[change_id] = true
+ }
+
+ const remaining_changes = []
+
+ for (const change of Array.from(this.changes)) {
+ if (remove_change_id[change.id]) {
+ delete remove_change_id[change.id]
+ this._markAsDirty(change, 'change', 'removed')
+ } else {
+ remaining_changes.push(change)
+ }
+ }
+
+ return (this.changes = remaining_changes)
+ }
+
+ validate(text) {
+ let content
+ for (const change of Array.from(this.changes)) {
+ if (change.op.i != null) {
+ content = text.slice(change.op.p, change.op.p + change.op.i.length)
+ if (content !== change.op.i) {
+ throw new Error(
+ `Change (${JSON.stringify(
+ change
+ )}) doesn't match text (${JSON.stringify(content)})`
+ )
+ }
+ }
+ }
+ for (const comment of Array.from(this.comments)) {
+ content = text.slice(comment.op.p, comment.op.p + comment.op.c.length)
+ if (content !== comment.op.c) {
+ throw new Error(
+ `Comment (${JSON.stringify(
+ comment
+ )}) doesn't match text (${JSON.stringify(content)})`
+ )
+ }
+ }
+ return true
+ }
+
+ applyOp(op, metadata) {
+ if (metadata == null) {
+ metadata = {}
+ }
+ if (metadata.ts == null) {
+ metadata.ts = new Date()
+ }
+ // Apply an op that has been applied to the document to our changes to keep them up to date
+ if (op.i != null) {
+ this.applyInsertToChanges(op, metadata)
+ return this.applyInsertToComments(op)
+ } else if (op.d != null) {
+ this.applyDeleteToChanges(op, metadata)
+ return this.applyDeleteToComments(op)
+ } else if (op.c != null) {
+ return this.addComment(op, metadata)
+ } else {
+ throw new Error('unknown op type')
+ }
+ }
+
+ applyOps(ops, metadata) {
+ if (metadata == null) {
+ metadata = {}
+ }
+ return Array.from(ops).map(op => this.applyOp(op, metadata))
+ }
+
+ addComment(op, metadata) {
+ const existing = this.getComment(op.t)
+ if (existing != null) {
+ this.moveCommentId(op.t, op.p, op.c)
+ return existing
+ } else {
+ let comment
+ this.comments.push(
+ (comment = {
+ id: op.t || this.newId(),
+ op: {
+ // Copy because we'll modify in place
+ c: op.c,
+ p: op.p,
+ t: op.t,
+ },
+ metadata,
+ })
+ )
+ this._markAsDirty(comment, 'comment', 'added')
+ return comment
+ }
+ }
+
+ applyInsertToComments(op) {
+ return (() => {
+ const result = []
+ for (const comment of Array.from(this.comments)) {
+ if (op.p <= comment.op.p) {
+ comment.op.p += op.i.length
+ result.push(this._markAsDirty(comment, 'comment', 'moved'))
+ } else if (op.p < comment.op.p + comment.op.c.length) {
+ const offset = op.p - comment.op.p
+ comment.op.c =
+ comment.op.c.slice(0, +(offset - 1) + 1 || undefined) +
+ op.i +
+ comment.op.c.slice(offset)
+ result.push(this._markAsDirty(comment, 'comment', 'moved'))
+ } else {
+ result.push(undefined)
+ }
+ }
+ return result
+ })()
+ }
+
+ applyDeleteToComments(op) {
+ const op_start = op.p
+ const op_length = op.d.length
+ const op_end = op.p + op_length
+ return (() => {
+ const result = []
+ for (const comment of Array.from(this.comments)) {
+ const comment_start = comment.op.p
+ const comment_end = comment.op.p + comment.op.c.length
+ const comment_length = comment_end - comment_start
+ if (op_end <= comment_start) {
+ // delete is fully before comment
+ comment.op.p -= op_length
+ result.push(this._markAsDirty(comment, 'comment', 'moved'))
+ } else if (op_start >= comment_end) {
+ // delete is fully after comment, nothing to do
+ } else {
+ // delete and comment overlap
+ var remaining_after, remaining_before
+ if (op_start <= comment_start) {
+ remaining_before = ''
+ } else {
+ remaining_before = comment.op.c.slice(0, op_start - comment_start)
+ }
+ if (op_end >= comment_end) {
+ remaining_after = ''
+ } else {
+ remaining_after = comment.op.c.slice(op_end - comment_start)
+ }
+
+ // Check deleted content matches delete op
+ const deleted_comment = comment.op.c.slice(
+ remaining_before.length,
+ comment_length - remaining_after.length
+ )
+ const offset = Math.max(0, comment_start - op_start)
+ const deleted_op_content = op.d
+ .slice(offset)
+ .slice(0, deleted_comment.length)
+ if (deleted_comment !== deleted_op_content) {
+ throw new Error('deleted content does not match comment content')
+ }
+
+ comment.op.p = Math.min(comment_start, op_start)
+ comment.op.c = remaining_before + remaining_after
+ result.push(this._markAsDirty(comment, 'comment', 'moved'))
+ }
+ }
+ return result
+ })()
+ }
+
+ applyInsertToChanges(op, metadata) {
+ let change
+ const op_start = op.p
+ const op_length = op.i.length
+ const op_end = op.p + op_length
+ const undoing = !!op.u
+
+ let already_merged = false
+ let previous_change = null
+ const moved_changes = []
+ const remove_changes = []
+ const new_changes = []
+
+ for (let i = 0; i < this.changes.length; i++) {
+ change = this.changes[i]
+ const change_start = change.op.p
+
+ if (change.op.d != null) {
+ // Shift any deletes after this along by the length of this insert
+ if (op_start < change_start) {
+ change.op.p += op_length
+ moved_changes.push(change)
+ } else if (op_start === change_start) {
+ // If we are undoing, then we want to cancel any existing delete ranges if we can.
+ // Check if the insert matches the start of the delete, and just remove it from the delete instead if so.
+ if (
+ undoing &&
+ change.op.d.length >= op.i.length &&
+ change.op.d.slice(0, op.i.length) === op.i
+ ) {
+ change.op.d = change.op.d.slice(op.i.length)
+ change.op.p += op.i.length
+ if (change.op.d === '') {
+ remove_changes.push(change)
+ } else {
+ moved_changes.push(change)
+ }
+ already_merged = true
+ } else {
+ change.op.p += op_length
+ moved_changes.push(change)
+ }
+ }
+ } else if (change.op.i != null) {
+ var offset
+ const change_end = change_start + change.op.i.length
+ const is_change_overlapping =
+ op_start >= change_start && op_start <= change_end
+
+ // Only merge inserts if they are from the same user
+ const is_same_user = metadata.user_id === change.metadata.user_id
+
+ // If we are undoing, then our changes will be removed from any delete ops just after. In that case, if there is also
+ // an insert op just before, then we shouldn't append it to this insert, but instead only cancel the following delete.
+ // E.g.
+ // foo|<--- about to insert 'b' here
+ // inserted 'foo' --^ ^-- deleted 'bar'
+ // should become just 'foo' not 'foob' (with the delete marker becoming just 'ar'), .
+ const next_change = this.changes[i + 1]
+ const is_op_adjacent_to_next_delete =
+ next_change != null &&
+ next_change.op.d != null &&
+ op.p === change_end &&
+ next_change.op.p === op.p
+ const will_op_cancel_next_delete =
+ undoing &&
+ is_op_adjacent_to_next_delete &&
+ next_change.op.d.slice(0, op.i.length) === op.i
+
+ // If there is a delete at the start of the insert, and we're inserting
+ // at the start, we SHOULDN'T merge since the delete acts as a partition.
+ // The previous op will be the delete, but it's already been shifted by this insert
+ //
+ // I.e.
+ // Originally: |-- existing insert --|
+ // | <- existing delete at same offset
+ //
+ // Now: |-- existing insert --| <- not shifted yet
+ // |-- this insert --|| <- existing delete shifted along to end of this op
+ //
+ // After: |-- existing insert --|
+ // |-- this insert --|| <- existing delete
+ //
+ // Without the delete, the inserts would be merged.
+ const is_insert_blocked_by_delete =
+ previous_change != null &&
+ previous_change.op.d != null &&
+ previous_change.op.p === op_end
+
+ // If the insert is overlapping another insert, either at the beginning in the middle or touching the end,
+ // then we merge them into one.
+ if (
+ this.track_changes &&
+ is_change_overlapping &&
+ !is_insert_blocked_by_delete &&
+ !already_merged &&
+ !will_op_cancel_next_delete &&
+ is_same_user
+ ) {
+ offset = op_start - change_start
+ change.op.i =
+ change.op.i.slice(0, offset) + op.i + change.op.i.slice(offset)
+ change.metadata.ts = metadata.ts
+ already_merged = true
+ moved_changes.push(change)
+ } else if (op_start <= change_start) {
+ // If we're fully before the other insert we can just shift the other insert by our length.
+ // If they are touching, and should have been merged, they will have been above.
+ // If not merged above, then it must be blocked by a delete, and will be after this insert, so we shift it along as well
+ change.op.p += op_length
+ moved_changes.push(change)
+ } else if (
+ (!is_same_user || !this.track_changes) &&
+ change_start < op_start &&
+ op_start < change_end
+ ) {
+ // This user is inserting inside a change by another user, so we need to split the
+ // other user's change into one before and after this one.
+ offset = op_start - change_start
+ const before_content = change.op.i.slice(0, offset)
+ const after_content = change.op.i.slice(offset)
+
+ // The existing change can become the 'before' change
+ change.op.i = before_content
+ moved_changes.push(change)
+
+ // Create a new op afterwards
+ const after_change = {
+ op: {
+ i: after_content,
+ p: change_start + offset + op_length,
+ },
+ metadata: {},
+ }
+ for (const key in change.metadata) {
+ const value = change.metadata[key]
+ after_change.metadata[key] = value
+ }
+ new_changes.push(after_change)
+ }
+ }
+
+ previous_change = change
+ }
+
+ if (this.track_changes && !already_merged) {
+ this._addOp(op, metadata)
+ }
+ for ({ op, metadata } of Array.from(new_changes)) {
+ this._addOp(op, metadata)
+ }
+
+ for (change of Array.from(remove_changes)) {
+ this._removeChange(change)
+ }
+
+ return (() => {
+ const result = []
+ for (change of Array.from(moved_changes)) {
+ result.push(this._markAsDirty(change, 'change', 'moved'))
+ }
+ return result
+ })()
+ }
+
+ applyDeleteToChanges(op, metadata) {
+ let change
+ const op_start = op.p
+ const op_length = op.d.length
+ const op_end = op.p + op_length
+ const remove_changes = []
+ let moved_changes = []
+
+ // We might end up modifying our delete op if it merges with existing deletes, or cancels out
+ // with an existing insert. Since we might do multiple modifications, we record them and do
+ // all the modifications after looping through the existing changes, so as not to mess up the
+ // offset indexes as we go.
+ const op_modifications = []
+ for (change of Array.from(this.changes)) {
+ var change_start
+ if (change.op.i != null) {
+ change_start = change.op.p
+ const change_end = change_start + change.op.i.length
+ if (op_end <= change_start) {
+ // Shift ops after us back by our length
+ change.op.p -= op_length
+ moved_changes.push(change)
+ } else if (op_start >= change_end) {
+ // Delete is after insert, nothing to do
+ } else {
+ // When the new delete overlaps an insert, we should remove the part of the insert that
+ // is now deleted, and also remove the part of the new delete that overlapped. I.e.
+ // the two cancel out where they overlap.
+ var delete_remaining_after,
+ delete_remaining_before,
+ insert_remaining_after,
+ insert_remaining_before
+ if (op_start >= change_start) {
+ // |-- existing insert --|
+ // insert_remaining_before -> |.....||-- new delete --|
+ delete_remaining_before = ''
+ insert_remaining_before = change.op.i.slice(
+ 0,
+ op_start - change_start
+ )
+ } else {
+ // delete_remaining_before -> |.....||-- existing insert --|
+ // |-- new delete --|
+ delete_remaining_before = op.d.slice(0, change_start - op_start)
+ insert_remaining_before = ''
+ }
+
+ if (op_end <= change_end) {
+ // |-- existing insert --|
+ // |-- new delete --||.....| <- insert_remaining_after
+ delete_remaining_after = ''
+ insert_remaining_after = change.op.i.slice(op_end - change_start)
+ } else {
+ // |-- existing insert --||.....| <- delete_remaining_after
+ // |-- new delete --|
+ delete_remaining_after = op.d.slice(change_end - op_start)
+ insert_remaining_after = ''
+ }
+
+ const insert_remaining =
+ insert_remaining_before + insert_remaining_after
+ if (insert_remaining.length > 0) {
+ change.op.i = insert_remaining
+ change.op.p = Math.min(change_start, op_start)
+ change.metadata.ts = metadata.ts
+ moved_changes.push(change)
+ } else {
+ remove_changes.push(change)
+ }
+
+ // We know what we want to preserve of our delete op before (delete_remaining_before) and what we want to preserve
+ // afterwards (delete_remaining_before). Now we need to turn that into a modification which deletes the
+ // chunk in the middle not covered by these.
+ const delete_removed_length =
+ op.d.length -
+ delete_remaining_before.length -
+ delete_remaining_after.length
+ const delete_removed_start = delete_remaining_before.length
+ const modification = {
+ d: op.d.slice(
+ delete_removed_start,
+ delete_removed_start + delete_removed_length
+ ),
+ p: delete_removed_start,
+ }
+ if (modification.d.length > 0) {
+ op_modifications.push(modification)
+ }
+ }
+ } else if (change.op.d != null) {
+ change_start = change.op.p
+ if (
+ op_end < change_start ||
+ (!this.track_changes && op_end === change_start)
+ ) {
+ // Shift ops after us back by our length.
+ // If we're tracking changes, it must be strictly before, since we'll merge
+ // below if they are touching. Otherwise, touching is fine.
+ change.op.p -= op_length
+ moved_changes.push(change)
+ } else if (op_start <= change_start && change_start <= op_end) {
+ if (this.track_changes) {
+ // If we overlap a delete, add it in our content, and delete the existing change.
+ // It's easier to do it this way, rather than modifying the existing delete in case
+ // we overlap many deletes and we'd need to track that. We have a workaround to
+ // update the delete in place if possible below.
+ const offset = change_start - op_start
+ op_modifications.push({ i: change.op.d, p: offset })
+ remove_changes.push(change)
+ } else {
+ change.op.p = op_start
+ moved_changes.push(change)
+ }
+ }
+ }
+ }
+
+ // Copy rather than modify because we still need to apply it to comments
+ op = {
+ p: op.p,
+ d: this._applyOpModifications(op.d, op_modifications),
+ }
+
+ for (change of Array.from(remove_changes)) {
+ // This is a bit of hack to avoid removing one delete and replacing it with another.
+ // If we don't do this, it causes the UI to flicker
+ if (
+ op.d.length > 0 &&
+ change.op.d != null &&
+ op.p <= change.op.p &&
+ change.op.p <= op.p + op.d.length
+ ) {
+ change.op.p = op.p
+ change.op.d = op.d
+ change.metadata = metadata
+ moved_changes.push(change)
+ op.d = '' // stop it being added
+ } else {
+ this._removeChange(change)
+ }
+ }
+
+ if (this.track_changes && op.d.length > 0) {
+ this._addOp(op, metadata)
+ } else {
+ // It's possible that we deleted an insert between two other inserts. I.e.
+ // If we delete 'user_2 insert' in:
+ // |-- user_1 insert --||-- user_2 insert --||-- user_1 insert --|
+ // it becomes:
+ // |-- user_1 insert --||-- user_1 insert --|
+ // We need to merge these together again
+ const results = this._scanAndMergeAdjacentUpdates()
+ moved_changes = moved_changes.concat(results.moved_changes)
+ for (change of Array.from(results.remove_changes)) {
+ this._removeChange(change)
+ moved_changes = moved_changes.filter(c => c !== change)
+ }
+ }
+
+ return (() => {
+ const result = []
+ for (change of Array.from(moved_changes)) {
+ result.push(this._markAsDirty(change, 'change', 'moved'))
+ }
+ return result
+ })()
+ }
+
+ _addOp(op, metadata) {
+ const change = {
+ id: this.newId(),
+ op: this._clone(op), // Don't take a reference to the existing op since we'll modify this in place with future changes
+ metadata: this._clone(metadata),
+ }
+ this.changes.push(change)
+
+ // Keep ops in order of offset, with deletes before inserts
+ this.changes.sort(function (c1, c2) {
+ const result = c1.op.p - c2.op.p
+ if (result !== 0) {
+ return result
+ } else if (c1.op.i != null && c2.op.d != null) {
+ return 1
+ } else if (c1.op.d != null && c2.op.i != null) {
+ return -1
+ } else {
+ return 0
+ }
+ })
+
+ return this._markAsDirty(change, 'change', 'added')
+ }
+
+ _removeChange(change) {
+ this.changes = this.changes.filter(c => c.id !== change.id)
+ return this._markAsDirty(change, 'change', 'removed')
+ }
+
+ _applyOpModifications(content, op_modifications) {
+ // Put in descending position order, with deleting first if at the same offset
+ // (Inserting first would modify the content that the delete will delete)
+ op_modifications.sort(function (a, b) {
+ const result = b.p - a.p
+ if (result !== 0) {
+ return result
+ } else if (a.i != null && b.d != null) {
+ return 1
+ } else if (a.d != null && b.i != null) {
+ return -1
+ } else {
+ return 0
+ }
+ })
+
+ for (const modification of Array.from(op_modifications)) {
+ if (modification.i != null) {
+ content =
+ content.slice(0, modification.p) +
+ modification.i +
+ content.slice(modification.p)
+ } else if (modification.d != null) {
+ if (
+ content.slice(
+ modification.p,
+ modification.p + modification.d.length
+ ) !== modification.d
+ ) {
+ throw new Error(
+ `deleted content does not match. content: ${JSON.stringify(
+ content
+ )}; modification: ${JSON.stringify(modification)}`
+ )
+ }
+ content =
+ content.slice(0, modification.p) +
+ content.slice(modification.p + modification.d.length)
+ }
+ }
+ return content
+ }
+
+ _scanAndMergeAdjacentUpdates() {
+ // This should only need calling when deleting an update between two
+ // other updates. There's no other way to get two adjacent updates from the
+ // same user, since they would be merged on insert.
+ let previous_change = null
+ const remove_changes = []
+ const moved_changes = []
+ for (const change of Array.from(this.changes)) {
+ if (
+ (previous_change != null ? previous_change.op.i : undefined) !=
+ null &&
+ change.op.i != null
+ ) {
+ const previous_change_end =
+ previous_change.op.p + previous_change.op.i.length
+ const previous_change_user_id = previous_change.metadata.user_id
+ const change_start = change.op.p
+ const change_user_id = change.metadata.user_id
+ if (
+ previous_change_end === change_start &&
+ previous_change_user_id === change_user_id
+ ) {
+ remove_changes.push(change)
+ previous_change.op.i += change.op.i
+ moved_changes.push(previous_change)
+ }
+ } else if (
+ (previous_change != null ? previous_change.op.d : undefined) !=
+ null &&
+ change.op.d != null &&
+ previous_change.op.p === change.op.p
+ ) {
+ // Merge adjacent deletes
+ previous_change.op.d += change.op.d
+ remove_changes.push(change)
+ moved_changes.push(previous_change)
+ } else {
+ // Only update to the current change if we haven't removed it.
+ previous_change = change
+ }
+ }
+ return { moved_changes, remove_changes }
+ }
+
+ resetDirtyState() {
+ return (this._dirtyState = {
+ comment: {
+ moved: {},
+ removed: {},
+ added: {},
+ },
+ change: {
+ moved: {},
+ removed: {},
+ added: {},
+ },
+ })
+ }
+
+ getDirtyState() {
+ return this._dirtyState
+ }
+
+ _markAsDirty(object, type, action) {
+ return (this._dirtyState[type][action][object.id] = object)
+ }
+
+ _clone(object) {
+ const clone = {}
+ for (const k in object) {
+ const v = object[k]
+ clone[k] = v
+ }
+ return clone
+ }
+ })
+}
+
+if (typeof define !== 'undefined' && define !== null) {
+ define([], load)
+} else {
+ module.exports = load()
+}
diff --git a/services/document-updater/app/js/RateLimitManager.js b/services/document-updater/app/js/RateLimitManager.js
new file mode 100644
index 0000000000..48e9b0b8f1
--- /dev/null
+++ b/services/document-updater/app/js/RateLimitManager.js
@@ -0,0 +1,80 @@
+/* eslint-disable
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let RateLimiter
+const Settings = require('@overleaf/settings')
+const logger = require('logger-sharelatex')
+const Metrics = require('./Metrics')
+
+module.exports = RateLimiter = class RateLimiter {
+ constructor(number) {
+ if (number == null) {
+ number = 10
+ }
+ this.ActiveWorkerCount = 0
+ this.CurrentWorkerLimit = number
+ this.BaseWorkerCount = number
+ }
+
+ _adjustLimitUp() {
+ this.CurrentWorkerLimit += 0.1 // allow target worker limit to increase gradually
+ return Metrics.gauge('currentLimit', Math.ceil(this.CurrentWorkerLimit))
+ }
+
+ _adjustLimitDown() {
+ this.CurrentWorkerLimit = Math.max(
+ this.BaseWorkerCount,
+ this.CurrentWorkerLimit * 0.9
+ )
+ logger.log(
+ { currentLimit: Math.ceil(this.CurrentWorkerLimit) },
+ 'reducing rate limit'
+ )
+ return Metrics.gauge('currentLimit', Math.ceil(this.CurrentWorkerLimit))
+ }
+
+ _trackAndRun(task, callback) {
+ if (callback == null) {
+ callback = function () {}
+ }
+ this.ActiveWorkerCount++
+ Metrics.gauge('processingUpdates', this.ActiveWorkerCount)
+ return task(err => {
+ this.ActiveWorkerCount--
+ Metrics.gauge('processingUpdates', this.ActiveWorkerCount)
+ return callback(err)
+ })
+ }
+
+ run(task, callback) {
+ if (this.ActiveWorkerCount < this.CurrentWorkerLimit) {
+ this._trackAndRun(task) // below the limit, just put the task in the background
+ callback() // return immediately
+ if (this.CurrentWorkerLimit > this.BaseWorkerCount) {
+ return this._adjustLimitDown()
+ }
+ } else {
+ logger.log(
+ {
+ active: this.ActiveWorkerCount,
+ currentLimit: Math.ceil(this.CurrentWorkerLimit),
+ },
+ 'hit rate limit'
+ )
+ return this._trackAndRun(task, err => {
+ if (err == null) {
+ this._adjustLimitUp()
+ } // don't increment rate limit if there was an error
+ return callback(err)
+ }) // only return after task completes
+ }
+ }
+}
diff --git a/services/document-updater/app/js/RealTimeRedisManager.js b/services/document-updater/app/js/RealTimeRedisManager.js
new file mode 100644
index 0000000000..af359ef227
--- /dev/null
+++ b/services/document-updater/app/js/RealTimeRedisManager.js
@@ -0,0 +1,87 @@
+/* eslint-disable
+ camelcase,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let RealTimeRedisManager
+const Settings = require('@overleaf/settings')
+const rclient = require('@overleaf/redis-wrapper').createClient(
+ Settings.redis.documentupdater
+)
+const pubsubClient = require('@overleaf/redis-wrapper').createClient(
+ Settings.redis.pubsub
+)
+const Keys = Settings.redis.documentupdater.key_schema
+const logger = require('logger-sharelatex')
+const os = require('os')
+const crypto = require('crypto')
+const metrics = require('./Metrics')
+
+const HOST = os.hostname()
+const RND = crypto.randomBytes(4).toString('hex') // generate a random key for this process
+let COUNT = 0
+
+const MAX_OPS_PER_ITERATION = 8 // process a limited number of ops for safety
+
+module.exports = RealTimeRedisManager = {
+ getPendingUpdatesForDoc(doc_id, callback) {
+ const multi = rclient.multi()
+ multi.lrange(Keys.pendingUpdates({ doc_id }), 0, MAX_OPS_PER_ITERATION - 1)
+ multi.ltrim(Keys.pendingUpdates({ doc_id }), MAX_OPS_PER_ITERATION, -1)
+ return multi.exec(function (error, replys) {
+ let jsonUpdate
+ if (error != null) {
+ return callback(error)
+ }
+ const jsonUpdates = replys[0]
+ for (jsonUpdate of Array.from(jsonUpdates)) {
+ // record metric for each update removed from queue
+ metrics.summary('redis.pendingUpdates', jsonUpdate.length, {
+ status: 'pop',
+ })
+ }
+ const updates = []
+ for (jsonUpdate of Array.from(jsonUpdates)) {
+ var update
+ try {
+ update = JSON.parse(jsonUpdate)
+ } catch (e) {
+ return callback(e)
+ }
+ updates.push(update)
+ }
+ return callback(error, updates)
+ })
+ },
+
+ getUpdatesLength(doc_id, callback) {
+ return rclient.llen(Keys.pendingUpdates({ doc_id }), callback)
+ },
+
+ sendData(data) {
+ // create a unique message id using a counter
+ const message_id = `doc:${HOST}:${RND}-${COUNT++}`
+ if (data != null) {
+ data._id = message_id
+ }
+
+ const blob = JSON.stringify(data)
+ metrics.summary('redis.publish.applied-ops', blob.length)
+
+ // publish on separate channels for individual projects and docs when
+ // configured (needs realtime to be configured for this too).
+ if (Settings.publishOnIndividualChannels) {
+ return pubsubClient.publish(`applied-ops:${data.doc_id}`, blob)
+ } else {
+ return pubsubClient.publish('applied-ops', blob)
+ }
+ },
+}
diff --git a/services/document-updater/app/js/RedisManager.js b/services/document-updater/app/js/RedisManager.js
new file mode 100644
index 0000000000..11ff1f8fcc
--- /dev/null
+++ b/services/document-updater/app/js/RedisManager.js
@@ -0,0 +1,744 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS103: Rewrite code to no longer use __guard__
+ * DS201: Simplify complex destructure assignments
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let RedisManager
+const Settings = require('@overleaf/settings')
+const rclient = require('@overleaf/redis-wrapper').createClient(
+ Settings.redis.documentupdater
+)
+const logger = require('logger-sharelatex')
+const metrics = require('./Metrics')
+const Errors = require('./Errors')
+const crypto = require('crypto')
+const async = require('async')
+const ProjectHistoryRedisManager = require('./ProjectHistoryRedisManager')
+
+// Sometimes Redis calls take an unexpectedly long time. We have to be
+// quick with Redis calls because we're holding a lock that expires
+// after 30 seconds. We can't let any errors in the rest of the stack
+// hold us up, and need to bail out quickly if there is a problem.
+const MAX_REDIS_REQUEST_LENGTH = 5000 // 5 seconds
+
+// Make times easy to read
+const minutes = 60 // seconds for Redis expire
+
+const logHashErrors =
+ Settings.documentupdater != null
+ ? Settings.documentupdater.logHashErrors
+ : undefined
+const logHashReadErrors = logHashErrors != null ? logHashErrors.read : undefined
+
+const MEGABYTES = 1024 * 1024
+const MAX_RANGES_SIZE = 3 * MEGABYTES
+
+const keys = Settings.redis.documentupdater.key_schema
+const historyKeys = Settings.redis.history.key_schema // note: this is track changes, not project-history
+
+module.exports = RedisManager = {
+ rclient,
+
+ putDocInMemory(
+ project_id,
+ doc_id,
+ docLines,
+ version,
+ ranges,
+ pathname,
+ projectHistoryId,
+ _callback
+ ) {
+ const timer = new metrics.Timer('redis.put-doc')
+ const callback = function (error) {
+ timer.done()
+ return _callback(error)
+ }
+ docLines = JSON.stringify(docLines)
+ if (docLines.indexOf('\u0000') !== -1) {
+ const error = new Error('null bytes found in doc lines')
+ // this check was added to catch memory corruption in JSON.stringify.
+ // It sometimes returned null bytes at the end of the string.
+ logger.error({ err: error, doc_id, docLines }, error.message)
+ return callback(error)
+ }
+ // Do a cheap size check on the serialized blob.
+ if (docLines.length > Settings.max_doc_length) {
+ const docSize = docLines.length
+ const err = new Error('blocking doc insert into redis: doc is too large')
+ logger.error({ project_id, doc_id, err, docSize }, err.message)
+ return callback(err)
+ }
+ const docHash = RedisManager._computeHash(docLines)
+ // record bytes sent to redis
+ metrics.summary('redis.docLines', docLines.length, { status: 'set' })
+ logger.log(
+ { project_id, doc_id, version, docHash, pathname, projectHistoryId },
+ 'putting doc in redis'
+ )
+ return RedisManager._serializeRanges(ranges, function (error, ranges) {
+ if (error != null) {
+ logger.error({ err: error, doc_id, project_id }, error.message)
+ return callback(error)
+ }
+ // update docsInProject set before writing doc contents
+ rclient.sadd(keys.docsInProject({ project_id }), doc_id, error => {
+ if (error) return callback(error)
+
+ rclient.mset(
+ {
+ [keys.docLines({ doc_id })]: docLines,
+ [keys.projectKey({ doc_id })]: project_id,
+ [keys.docVersion({ doc_id })]: version,
+ [keys.docHash({ doc_id })]: docHash,
+ [keys.ranges({ doc_id })]: ranges,
+ [keys.pathname({ doc_id })]: pathname,
+ [keys.projectHistoryId({ doc_id })]: projectHistoryId,
+ },
+ callback
+ )
+ })
+ })
+ },
+
+ removeDocFromMemory(project_id, doc_id, _callback) {
+ logger.log({ project_id, doc_id }, 'removing doc from redis')
+ const callback = function (err) {
+ if (err != null) {
+ logger.err({ project_id, doc_id, err }, 'error removing doc from redis')
+ return _callback(err)
+ } else {
+ logger.log({ project_id, doc_id }, 'removed doc from redis')
+ return _callback()
+ }
+ }
+
+ let multi = rclient.multi()
+ multi.strlen(keys.docLines({ doc_id }))
+ multi.del(
+ keys.docLines({ doc_id }),
+ keys.projectKey({ doc_id }),
+ keys.docVersion({ doc_id }),
+ keys.docHash({ doc_id }),
+ keys.ranges({ doc_id }),
+ keys.pathname({ doc_id }),
+ keys.projectHistoryId({ doc_id }),
+ keys.projectHistoryType({ doc_id }),
+ keys.unflushedTime({ doc_id }),
+ keys.lastUpdatedAt({ doc_id }),
+ keys.lastUpdatedBy({ doc_id })
+ )
+ return multi.exec(function (error, response) {
+ if (error != null) {
+ return callback(error)
+ }
+ const length = response != null ? response[0] : undefined
+ if (length > 0) {
+ // record bytes freed in redis
+ metrics.summary('redis.docLines', length, { status: 'del' })
+ }
+ multi = rclient.multi()
+ multi.srem(keys.docsInProject({ project_id }), doc_id)
+ multi.del(keys.projectState({ project_id }))
+ return multi.exec(callback)
+ })
+ },
+
+ checkOrSetProjectState(project_id, newState, callback) {
+ if (callback == null) {
+ callback = function (error, stateChanged) {}
+ }
+ const multi = rclient.multi()
+ multi.getset(keys.projectState({ project_id }), newState)
+ multi.expire(keys.projectState({ project_id }), 30 * minutes)
+ return multi.exec(function (error, response) {
+ if (error != null) {
+ return callback(error)
+ }
+ logger.log(
+ { project_id, newState, oldState: response[0] },
+ 'checking project state'
+ )
+ return callback(null, response[0] !== newState)
+ })
+ },
+
+ clearProjectState(project_id, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ return rclient.del(keys.projectState({ project_id }), callback)
+ },
+
+ getDoc(project_id, doc_id, callback) {
+ if (callback == null) {
+ callback = function (
+ error,
+ lines,
+ version,
+ ranges,
+ pathname,
+ projectHistoryId,
+ unflushedTime
+ ) {}
+ }
+ const timer = new metrics.Timer('redis.get-doc')
+ const collectKeys = [
+ keys.docLines({ doc_id }),
+ keys.docVersion({ doc_id }),
+ keys.docHash({ doc_id }),
+ keys.projectKey({ doc_id }),
+ keys.ranges({ doc_id }),
+ keys.pathname({ doc_id }),
+ keys.projectHistoryId({ doc_id }),
+ keys.unflushedTime({ doc_id }),
+ keys.lastUpdatedAt({ doc_id }),
+ keys.lastUpdatedBy({ doc_id }),
+ ]
+ rclient.mget(...collectKeys, (error, ...rest) => {
+ let [
+ docLines,
+ version,
+ storedHash,
+ doc_project_id,
+ ranges,
+ pathname,
+ projectHistoryId,
+ unflushedTime,
+ lastUpdatedAt,
+ lastUpdatedBy,
+ ] = Array.from(rest[0])
+ const timeSpan = timer.done()
+ if (error != null) {
+ return callback(error)
+ }
+ // check if request took too long and bail out. only do this for
+ // get, because it is the first call in each update, so if this
+ // passes we'll assume others have a reasonable chance to succeed.
+ if (timeSpan > MAX_REDIS_REQUEST_LENGTH) {
+ error = new Error('redis getDoc exceeded timeout')
+ return callback(error)
+ }
+ // record bytes loaded from redis
+ if (docLines != null) {
+ metrics.summary('redis.docLines', docLines.length, { status: 'get' })
+ }
+ // check sha1 hash value if present
+ if (docLines != null && storedHash != null) {
+ const computedHash = RedisManager._computeHash(docLines)
+ if (logHashReadErrors && computedHash !== storedHash) {
+ logger.error(
+ {
+ project_id,
+ doc_id,
+ doc_project_id,
+ computedHash,
+ storedHash,
+ docLines,
+ },
+ 'hash mismatch on retrieved document'
+ )
+ }
+ }
+
+ try {
+ docLines = JSON.parse(docLines)
+ ranges = RedisManager._deserializeRanges(ranges)
+ } catch (e) {
+ return callback(e)
+ }
+
+ version = parseInt(version || 0, 10)
+ // check doc is in requested project
+ if (doc_project_id != null && doc_project_id !== project_id) {
+ logger.error(
+ { project_id, doc_id, doc_project_id },
+ 'doc not in project'
+ )
+ return callback(new Errors.NotFoundError('document not found'))
+ }
+
+ if (projectHistoryId != null) {
+ projectHistoryId = parseInt(projectHistoryId)
+ }
+
+ callback(
+ null,
+ docLines,
+ version,
+ ranges,
+ pathname,
+ projectHistoryId,
+ unflushedTime,
+ lastUpdatedAt,
+ lastUpdatedBy
+ )
+ })
+ },
+
+ getDocVersion(doc_id, callback) {
+ if (callback == null) {
+ callback = function (error, version, projectHistoryType) {}
+ }
+ return rclient.mget(
+ keys.docVersion({ doc_id }),
+ keys.projectHistoryType({ doc_id }),
+ function (error, result) {
+ if (error != null) {
+ return callback(error)
+ }
+ let [version, projectHistoryType] = Array.from(result || [])
+ version = parseInt(version, 10)
+ return callback(null, version, projectHistoryType)
+ }
+ )
+ },
+
+ getDocLines(doc_id, callback) {
+ if (callback == null) {
+ callback = function (error, version) {}
+ }
+ return rclient.get(keys.docLines({ doc_id }), function (error, docLines) {
+ if (error != null) {
+ return callback(error)
+ }
+ return callback(null, docLines)
+ })
+ },
+
+ getPreviousDocOps(doc_id, start, end, callback) {
+ if (callback == null) {
+ callback = function (error, jsonOps) {}
+ }
+ const timer = new metrics.Timer('redis.get-prev-docops')
+ return rclient.llen(keys.docOps({ doc_id }), function (error, length) {
+ if (error != null) {
+ return callback(error)
+ }
+ return rclient.get(
+ keys.docVersion({ doc_id }),
+ function (error, version) {
+ if (error != null) {
+ return callback(error)
+ }
+ version = parseInt(version, 10)
+ const first_version_in_redis = version - length
+
+ if (start < first_version_in_redis || end > version) {
+ error = new Errors.OpRangeNotAvailableError(
+ 'doc ops range is not loaded in redis'
+ )
+ logger.warn(
+ { err: error, doc_id, length, version, start, end },
+ 'doc ops range is not loaded in redis'
+ )
+ return callback(error)
+ }
+
+ start = start - first_version_in_redis
+ if (end > -1) {
+ end = end - first_version_in_redis
+ }
+
+ if (isNaN(start) || isNaN(end)) {
+ error = new Error('inconsistent version or lengths')
+ logger.error(
+ { err: error, doc_id, length, version, start, end },
+ 'inconsistent version or length'
+ )
+ return callback(error)
+ }
+
+ return rclient.lrange(
+ keys.docOps({ doc_id }),
+ start,
+ end,
+ function (error, jsonOps) {
+ let ops
+ if (error != null) {
+ return callback(error)
+ }
+ try {
+ ops = jsonOps.map(jsonOp => JSON.parse(jsonOp))
+ } catch (e) {
+ return callback(e)
+ }
+ const timeSpan = timer.done()
+ if (timeSpan > MAX_REDIS_REQUEST_LENGTH) {
+ error = new Error('redis getPreviousDocOps exceeded timeout')
+ return callback(error)
+ }
+ return callback(null, ops)
+ }
+ )
+ }
+ )
+ })
+ },
+
+ getHistoryType(doc_id, callback) {
+ if (callback == null) {
+ callback = function (error, projectHistoryType) {}
+ }
+ return rclient.get(
+ keys.projectHistoryType({ doc_id }),
+ function (error, projectHistoryType) {
+ if (error != null) {
+ return callback(error)
+ }
+ return callback(null, projectHistoryType)
+ }
+ )
+ },
+
+ setHistoryType(doc_id, projectHistoryType, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ return rclient.set(
+ keys.projectHistoryType({ doc_id }),
+ projectHistoryType,
+ callback
+ )
+ },
+
+ DOC_OPS_TTL: 60 * minutes,
+ DOC_OPS_MAX_LENGTH: 100,
+ updateDocument(
+ project_id,
+ doc_id,
+ docLines,
+ newVersion,
+ appliedOps,
+ ranges,
+ updateMeta,
+ callback
+ ) {
+ if (appliedOps == null) {
+ appliedOps = []
+ }
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ return RedisManager.getDocVersion(
+ doc_id,
+ function (error, currentVersion, projectHistoryType) {
+ if (error != null) {
+ return callback(error)
+ }
+ if (currentVersion + appliedOps.length !== newVersion) {
+ error = new Error(`Version mismatch. '${doc_id}' is corrupted.`)
+ logger.error(
+ {
+ err: error,
+ doc_id,
+ currentVersion,
+ newVersion,
+ opsLength: appliedOps.length,
+ },
+ 'version mismatch'
+ )
+ return callback(error)
+ }
+
+ const jsonOps = appliedOps.map(op => JSON.stringify(op))
+ for (const op of Array.from(jsonOps)) {
+ if (op.indexOf('\u0000') !== -1) {
+ error = new Error('null bytes found in jsonOps')
+ // this check was added to catch memory corruption in JSON.stringify
+ logger.error({ err: error, doc_id, jsonOps }, error.message)
+ return callback(error)
+ }
+ }
+
+ const newDocLines = JSON.stringify(docLines)
+ if (newDocLines.indexOf('\u0000') !== -1) {
+ error = new Error('null bytes found in doc lines')
+ // this check was added to catch memory corruption in JSON.stringify
+ logger.error({ err: error, doc_id, newDocLines }, error.message)
+ return callback(error)
+ }
+ // Do a cheap size check on the serialized blob.
+ if (newDocLines.length > Settings.max_doc_length) {
+ const err = new Error('blocking doc update: doc is too large')
+ const docSize = newDocLines.length
+ logger.error({ project_id, doc_id, err, docSize }, err.message)
+ return callback(err)
+ }
+ const newHash = RedisManager._computeHash(newDocLines)
+
+ const opVersions = appliedOps.map(op => (op != null ? op.v : undefined))
+ logger.log(
+ {
+ doc_id,
+ version: newVersion,
+ hash: newHash,
+ op_versions: opVersions,
+ },
+ 'updating doc in redis'
+ )
+ // record bytes sent to redis in update
+ metrics.summary('redis.docLines', newDocLines.length, {
+ status: 'update',
+ })
+ return RedisManager._serializeRanges(ranges, function (error, ranges) {
+ if (error != null) {
+ logger.error({ err: error, doc_id }, error.message)
+ return callback(error)
+ }
+ if (ranges != null && ranges.indexOf('\u0000') !== -1) {
+ error = new Error('null bytes found in ranges')
+ // this check was added to catch memory corruption in JSON.stringify
+ logger.error({ err: error, doc_id, ranges }, error.message)
+ return callback(error)
+ }
+ const multi = rclient.multi()
+ multi.mset({
+ [keys.docLines({ doc_id })]: newDocLines,
+ [keys.docVersion({ doc_id })]: newVersion,
+ [keys.docHash({ doc_id })]: newHash,
+ [keys.ranges({ doc_id })]: ranges,
+ [keys.lastUpdatedAt({ doc_id })]: Date.now(),
+ [keys.lastUpdatedBy({ doc_id })]: updateMeta && updateMeta.user_id,
+ })
+ multi.ltrim(
+ keys.docOps({ doc_id }),
+ -RedisManager.DOC_OPS_MAX_LENGTH,
+ -1
+ ) // index 3
+ // push the ops last so we can get the lengths at fixed index position 7
+ if (jsonOps.length > 0) {
+ multi.rpush(keys.docOps({ doc_id }), ...Array.from(jsonOps)) // index 5
+ // expire must come after rpush since before it will be a no-op if the list is empty
+ multi.expire(keys.docOps({ doc_id }), RedisManager.DOC_OPS_TTL) // index 6
+ if (projectHistoryType === 'project-history') {
+ metrics.inc('history-queue', 1, { status: 'skip-track-changes' })
+ logger.log(
+ { doc_id },
+ 'skipping push of uncompressed ops for project using project-history'
+ )
+ } else {
+ // project is using old track-changes history service
+ metrics.inc('history-queue', 1, { status: 'track-changes' })
+ multi.rpush(
+ historyKeys.uncompressedHistoryOps({ doc_id }),
+ ...Array.from(jsonOps)
+ ) // index 7
+ }
+ // Set the unflushed timestamp to the current time if the doc
+ // hasn't been modified before (the content in mongo has been
+ // valid up to this point). Otherwise leave it alone ("NX" flag).
+ multi.set(keys.unflushedTime({ doc_id }), Date.now(), 'NX')
+ }
+ return multi.exec(function (error, result) {
+ let docUpdateCount
+ if (error != null) {
+ return callback(error)
+ }
+
+ if (projectHistoryType === 'project-history') {
+ docUpdateCount = undefined // only using project history, don't bother with track-changes
+ } else {
+ // project is using old track-changes history service
+ docUpdateCount = result[4]
+ }
+
+ if (
+ jsonOps.length > 0 &&
+ __guard__(
+ Settings.apis != null
+ ? Settings.apis.project_history
+ : undefined,
+ x => x.enabled
+ )
+ ) {
+ metrics.inc('history-queue', 1, { status: 'project-history' })
+ return ProjectHistoryRedisManager.queueOps(
+ project_id,
+ ...Array.from(jsonOps),
+ (error, projectUpdateCount) =>
+ callback(null, docUpdateCount, projectUpdateCount)
+ )
+ } else {
+ return callback(null, docUpdateCount)
+ }
+ })
+ })
+ }
+ )
+ },
+
+ renameDoc(project_id, doc_id, user_id, update, projectHistoryId, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ return RedisManager.getDoc(
+ project_id,
+ doc_id,
+ function (error, lines, version) {
+ if (error != null) {
+ return callback(error)
+ }
+
+ if (lines != null && version != null) {
+ return rclient.set(
+ keys.pathname({ doc_id }),
+ update.newPathname,
+ function (error) {
+ if (error != null) {
+ return callback(error)
+ }
+ return ProjectHistoryRedisManager.queueRenameEntity(
+ project_id,
+ projectHistoryId,
+ 'doc',
+ doc_id,
+ user_id,
+ update,
+ callback
+ )
+ }
+ )
+ } else {
+ return ProjectHistoryRedisManager.queueRenameEntity(
+ project_id,
+ projectHistoryId,
+ 'doc',
+ doc_id,
+ user_id,
+ update,
+ callback
+ )
+ }
+ }
+ )
+ },
+
+ clearUnflushedTime(doc_id, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ return rclient.del(keys.unflushedTime({ doc_id }), callback)
+ },
+
+ getDocIdsInProject(project_id, callback) {
+ if (callback == null) {
+ callback = function (error, doc_ids) {}
+ }
+ return rclient.smembers(keys.docsInProject({ project_id }), callback)
+ },
+
+ getDocTimestamps(doc_ids, callback) {
+ // get lastupdatedat timestamps for an array of doc_ids
+ if (callback == null) {
+ callback = function (error, result) {}
+ }
+ return async.mapSeries(
+ doc_ids,
+ (doc_id, cb) => rclient.get(keys.lastUpdatedAt({ doc_id }), cb),
+ callback
+ )
+ },
+
+ queueFlushAndDeleteProject(project_id, callback) {
+ // store the project id in a sorted set ordered by time with a random offset to smooth out spikes
+ const SMOOTHING_OFFSET =
+ Settings.smoothingOffset > 0
+ ? Math.round(Settings.smoothingOffset * Math.random())
+ : 0
+ return rclient.zadd(
+ keys.flushAndDeleteQueue(),
+ Date.now() + SMOOTHING_OFFSET,
+ project_id,
+ callback
+ )
+ },
+
+ getNextProjectToFlushAndDelete(cutoffTime, callback) {
+ // find the oldest queued flush that is before the cutoff time
+ if (callback == null) {
+ callback = function (error, key, timestamp) {}
+ }
+ return rclient.zrangebyscore(
+ keys.flushAndDeleteQueue(),
+ 0,
+ cutoffTime,
+ 'WITHSCORES',
+ 'LIMIT',
+ 0,
+ 1,
+ function (err, reply) {
+ if (err != null) {
+ return callback(err)
+ }
+ if (!(reply != null ? reply.length : undefined)) {
+ return callback()
+ } // return if no projects ready to be processed
+ // pop the oldest entry (get and remove in a multi)
+ const multi = rclient.multi()
+ // Poor man's version of ZPOPMIN, which is only available in Redis 5.
+ multi.zrange(keys.flushAndDeleteQueue(), 0, 0, 'WITHSCORES')
+ multi.zremrangebyrank(keys.flushAndDeleteQueue(), 0, 0)
+ multi.zcard(keys.flushAndDeleteQueue()) // the total length of the queue (for metrics)
+ return multi.exec(function (err, reply) {
+ if (err != null) {
+ return callback(err)
+ }
+ if (!(reply != null ? reply.length : undefined)) {
+ return callback()
+ }
+ const [key, timestamp] = Array.from(reply[0])
+ const queueLength = reply[2]
+ return callback(null, key, timestamp, queueLength)
+ })
+ }
+ )
+ },
+
+ _serializeRanges(ranges, callback) {
+ if (callback == null) {
+ callback = function (error, serializedRanges) {}
+ }
+ let jsonRanges = JSON.stringify(ranges)
+ if (jsonRanges != null && jsonRanges.length > MAX_RANGES_SIZE) {
+ return callback(new Error('ranges are too large'))
+ }
+ if (jsonRanges === '{}') {
+ // Most doc will have empty ranges so don't fill redis with lots of '{}' keys
+ jsonRanges = null
+ }
+ return callback(null, jsonRanges)
+ },
+
+ _deserializeRanges(ranges) {
+ if (ranges == null || ranges === '') {
+ return {}
+ } else {
+ return JSON.parse(ranges)
+ }
+ },
+
+ _computeHash(docLines) {
+ // use sha1 checksum of doclines to detect data corruption.
+ //
+ // note: must specify 'utf8' encoding explicitly, as the default is
+ // binary in node < v5
+ return crypto.createHash('sha1').update(docLines, 'utf8').digest('hex')
+ },
+}
+
+function __guard__(value, transform) {
+ return typeof value !== 'undefined' && value !== null
+ ? transform(value)
+ : undefined
+}
diff --git a/services/document-updater/app/js/ShareJsDB.js b/services/document-updater/app/js/ShareJsDB.js
new file mode 100644
index 0000000000..65e234d085
--- /dev/null
+++ b/services/document-updater/app/js/ShareJsDB.js
@@ -0,0 +1,85 @@
+/* eslint-disable
+ camelcase,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let ShareJsDB
+const Keys = require('./UpdateKeys')
+const RedisManager = require('./RedisManager')
+const Errors = require('./Errors')
+
+module.exports = ShareJsDB = class ShareJsDB {
+ constructor(project_id, doc_id, lines, version) {
+ this.project_id = project_id
+ this.doc_id = doc_id
+ this.lines = lines
+ this.version = version
+ this.appliedOps = {}
+ // ShareJS calls this detacted from the instance, so we need
+ // bind it to keep our context that can access @appliedOps
+ this.writeOp = this._writeOp.bind(this)
+ }
+
+ getOps(doc_key, start, end, callback) {
+ if (start === end) {
+ return callback(null, [])
+ }
+
+ // In redis, lrange values are inclusive.
+ if (end != null) {
+ end--
+ } else {
+ end = -1
+ }
+
+ const [project_id, doc_id] = Array.from(
+ Keys.splitProjectIdAndDocId(doc_key)
+ )
+ return RedisManager.getPreviousDocOps(doc_id, start, end, callback)
+ }
+
+ _writeOp(doc_key, opData, callback) {
+ if (this.appliedOps[doc_key] == null) {
+ this.appliedOps[doc_key] = []
+ }
+ this.appliedOps[doc_key].push(opData)
+ return callback()
+ }
+
+ getSnapshot(doc_key, callback) {
+ if (
+ doc_key !== Keys.combineProjectIdAndDocId(this.project_id, this.doc_id)
+ ) {
+ return callback(
+ new Errors.NotFoundError(
+ `unexpected doc_key ${doc_key}, expected ${Keys.combineProjectIdAndDocId(
+ this.project_id,
+ this.doc_id
+ )}`
+ )
+ )
+ } else {
+ return callback(null, {
+ snapshot: this.lines.join('\n'),
+ v: parseInt(this.version, 10),
+ type: 'text',
+ })
+ }
+ }
+
+ // To be able to remove a doc from the ShareJS memory
+ // we need to called Model::delete, which calls this
+ // method on the database. However, we will handle removing
+ // it from Redis ourselves
+ delete(docName, dbMeta, callback) {
+ return callback()
+ }
+}
diff --git a/services/document-updater/app/js/ShareJsUpdateManager.js b/services/document-updater/app/js/ShareJsUpdateManager.js
new file mode 100644
index 0000000000..79b92dfb79
--- /dev/null
+++ b/services/document-updater/app/js/ShareJsUpdateManager.js
@@ -0,0 +1,145 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let ShareJsUpdateManager
+const ShareJsModel = require('./sharejs/server/model')
+const ShareJsDB = require('./ShareJsDB')
+const logger = require('logger-sharelatex')
+const Settings = require('@overleaf/settings')
+const Keys = require('./UpdateKeys')
+const { EventEmitter } = require('events')
+const util = require('util')
+const RealTimeRedisManager = require('./RealTimeRedisManager')
+const crypto = require('crypto')
+const metrics = require('./Metrics')
+const Errors = require('./Errors')
+
+ShareJsModel.prototype = {}
+util.inherits(ShareJsModel, EventEmitter)
+
+const MAX_AGE_OF_OP = 80
+
+module.exports = ShareJsUpdateManager = {
+ getNewShareJsModel(project_id, doc_id, lines, version) {
+ const db = new ShareJsDB(project_id, doc_id, lines, version)
+ const model = new ShareJsModel(db, {
+ maxDocLength: Settings.max_doc_length,
+ maximumAge: MAX_AGE_OF_OP,
+ })
+ model.db = db
+ return model
+ },
+
+ applyUpdate(project_id, doc_id, update, lines, version, callback) {
+ if (callback == null) {
+ callback = function (error, updatedDocLines) {}
+ }
+ logger.log({ project_id, doc_id, update }, 'applying sharejs updates')
+ const jobs = []
+ // record the update version before it is modified
+ const incomingUpdateVersion = update.v
+ // We could use a global model for all docs, but we're hitting issues with the
+ // internal state of ShareJS not being accessible for clearing caches, and
+ // getting stuck due to queued callbacks (line 260 of sharejs/server/model.coffee)
+ // This adds a small but hopefully acceptable overhead (~12ms per 1000 updates on
+ // my 2009 MBP).
+ const model = this.getNewShareJsModel(project_id, doc_id, lines, version)
+ this._listenForOps(model)
+ const doc_key = Keys.combineProjectIdAndDocId(project_id, doc_id)
+ return model.applyOp(doc_key, update, function (error) {
+ if (error != null) {
+ if (error === 'Op already submitted') {
+ metrics.inc('sharejs.already-submitted')
+ logger.warn(
+ { project_id, doc_id, update },
+ 'op has already been submitted'
+ )
+ update.dup = true
+ ShareJsUpdateManager._sendOp(project_id, doc_id, update)
+ } else if (/^Delete component/.test(error)) {
+ metrics.inc('sharejs.delete-mismatch')
+ logger.warn(
+ { project_id, doc_id, update, shareJsErr: error },
+ 'sharejs delete does not match'
+ )
+ error = new Errors.DeleteMismatchError(
+ 'Delete component does not match'
+ )
+ return callback(error)
+ } else {
+ metrics.inc('sharejs.other-error')
+ return callback(error)
+ }
+ }
+ logger.log({ project_id, doc_id, error }, 'applied update')
+ return model.getSnapshot(doc_key, (error, data) => {
+ if (error != null) {
+ return callback(error)
+ }
+ const docSizeAfter = data.snapshot.length
+ if (docSizeAfter > Settings.max_doc_length) {
+ const docSizeBefore = lines.join('\n').length
+ const err = new Error(
+ 'blocking persistence of ShareJs update: doc size exceeds limits'
+ )
+ logger.error(
+ { project_id, doc_id, err, docSizeBefore, docSizeAfter },
+ err.message
+ )
+ metrics.inc('sharejs.other-error')
+ const publicError = 'Update takes doc over max doc size'
+ return callback(publicError)
+ }
+ // only check hash when present and no other updates have been applied
+ if (update.hash != null && incomingUpdateVersion === version) {
+ const ourHash = ShareJsUpdateManager._computeHash(data.snapshot)
+ if (ourHash !== update.hash) {
+ metrics.inc('sharejs.hash-fail')
+ return callback(new Error('Invalid hash'))
+ } else {
+ metrics.inc('sharejs.hash-pass', 0.001)
+ }
+ }
+ const docLines = data.snapshot.split(/\r\n|\n|\r/)
+ return callback(
+ null,
+ docLines,
+ data.v,
+ model.db.appliedOps[doc_key] || []
+ )
+ })
+ })
+ },
+
+ _listenForOps(model) {
+ return model.on('applyOp', function (doc_key, opData) {
+ const [project_id, doc_id] = Array.from(
+ Keys.splitProjectIdAndDocId(doc_key)
+ )
+ return ShareJsUpdateManager._sendOp(project_id, doc_id, opData)
+ })
+ },
+
+ _sendOp(project_id, doc_id, op) {
+ return RealTimeRedisManager.sendData({ project_id, doc_id, op })
+ },
+
+ _computeHash(content) {
+ return crypto
+ .createHash('sha1')
+ .update('blob ' + content.length + '\x00')
+ .update(content, 'utf8')
+ .digest('hex')
+ },
+}
diff --git a/services/document-updater/app/js/SnapshotManager.js b/services/document-updater/app/js/SnapshotManager.js
new file mode 100644
index 0000000000..3fd87b4540
--- /dev/null
+++ b/services/document-updater/app/js/SnapshotManager.js
@@ -0,0 +1,87 @@
+/* eslint-disable
+ camelcase,
+ no-return-assign,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let SnapshotManager
+const { db, ObjectId } = require('./mongodb')
+
+module.exports = SnapshotManager = {
+ recordSnapshot(
+ project_id,
+ doc_id,
+ version,
+ pathname,
+ lines,
+ ranges,
+ callback
+ ) {
+ try {
+ project_id = ObjectId(project_id)
+ doc_id = ObjectId(doc_id)
+ } catch (error) {
+ return callback(error)
+ }
+ db.docSnapshots.insertOne(
+ {
+ project_id,
+ doc_id,
+ version,
+ lines,
+ pathname,
+ ranges: SnapshotManager.jsonRangesToMongo(ranges),
+ ts: new Date(),
+ },
+ callback
+ )
+ },
+ // Suggested indexes:
+ // db.docSnapshots.createIndex({project_id:1, doc_id:1})
+ // db.docSnapshots.createIndex({ts:1},{expiresAfterSeconds: 30*24*3600)) # expires after 30 days
+
+ jsonRangesToMongo(ranges) {
+ if (ranges == null) {
+ return null
+ }
+
+ const updateMetadata = function (metadata) {
+ if ((metadata != null ? metadata.ts : undefined) != null) {
+ metadata.ts = new Date(metadata.ts)
+ }
+ if ((metadata != null ? metadata.user_id : undefined) != null) {
+ return (metadata.user_id = SnapshotManager._safeObjectId(
+ metadata.user_id
+ ))
+ }
+ }
+
+ for (const change of Array.from(ranges.changes || [])) {
+ change.id = SnapshotManager._safeObjectId(change.id)
+ updateMetadata(change.metadata)
+ }
+ for (const comment of Array.from(ranges.comments || [])) {
+ comment.id = SnapshotManager._safeObjectId(comment.id)
+ if ((comment.op != null ? comment.op.t : undefined) != null) {
+ comment.op.t = SnapshotManager._safeObjectId(comment.op.t)
+ }
+ updateMetadata(comment.metadata)
+ }
+ return ranges
+ },
+
+ _safeObjectId(data) {
+ try {
+ return ObjectId(data)
+ } catch (error) {
+ return data
+ }
+ },
+}
diff --git a/services/document-updater/app/js/UpdateKeys.js b/services/document-updater/app/js/UpdateKeys.js
new file mode 100644
index 0000000000..67e85e65c9
--- /dev/null
+++ b/services/document-updater/app/js/UpdateKeys.js
@@ -0,0 +1,13 @@
+/* eslint-disable
+ camelcase,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+module.exports = {
+ combineProjectIdAndDocId(project_id, doc_id) {
+ return `${project_id}:${doc_id}`
+ },
+ splitProjectIdAndDocId(project_and_doc_id) {
+ return project_and_doc_id.split(':')
+ },
+}
diff --git a/services/document-updater/app/js/UpdateManager.js b/services/document-updater/app/js/UpdateManager.js
new file mode 100644
index 0000000000..bf30db74b7
--- /dev/null
+++ b/services/document-updater/app/js/UpdateManager.js
@@ -0,0 +1,417 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS201: Simplify complex destructure assignments
+ * DS205: Consider reworking code to avoid use of IIFEs
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let UpdateManager
+const LockManager = require('./LockManager')
+const RedisManager = require('./RedisManager')
+const RealTimeRedisManager = require('./RealTimeRedisManager')
+const ShareJsUpdateManager = require('./ShareJsUpdateManager')
+const HistoryManager = require('./HistoryManager')
+const Settings = require('@overleaf/settings')
+const _ = require('lodash')
+const async = require('async')
+const logger = require('logger-sharelatex')
+const Metrics = require('./Metrics')
+const Errors = require('./Errors')
+const DocumentManager = require('./DocumentManager')
+const RangesManager = require('./RangesManager')
+const SnapshotManager = require('./SnapshotManager')
+const Profiler = require('./Profiler')
+
+module.exports = UpdateManager = {
+ processOutstandingUpdates(project_id, doc_id, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ const timer = new Metrics.Timer('updateManager.processOutstandingUpdates')
+ return UpdateManager.fetchAndApplyUpdates(
+ project_id,
+ doc_id,
+ function (error) {
+ timer.done()
+ if (error != null) {
+ return callback(error)
+ }
+ return callback()
+ }
+ )
+ },
+
+ processOutstandingUpdatesWithLock(project_id, doc_id, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ const profile = new Profiler('processOutstandingUpdatesWithLock', {
+ project_id,
+ doc_id,
+ })
+ return LockManager.tryLock(doc_id, (error, gotLock, lockValue) => {
+ if (error != null) {
+ return callback(error)
+ }
+ if (!gotLock) {
+ return callback()
+ }
+ profile.log('tryLock')
+ return UpdateManager.processOutstandingUpdates(
+ project_id,
+ doc_id,
+ function (error) {
+ if (error != null) {
+ return UpdateManager._handleErrorInsideLock(
+ doc_id,
+ lockValue,
+ error,
+ callback
+ )
+ }
+ profile.log('processOutstandingUpdates')
+ return LockManager.releaseLock(doc_id, lockValue, error => {
+ if (error != null) {
+ return callback(error)
+ }
+ profile.log('releaseLock').end()
+ return UpdateManager.continueProcessingUpdatesWithLock(
+ project_id,
+ doc_id,
+ callback
+ )
+ })
+ }
+ )
+ })
+ },
+
+ continueProcessingUpdatesWithLock(project_id, doc_id, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ return RealTimeRedisManager.getUpdatesLength(doc_id, (error, length) => {
+ if (error != null) {
+ return callback(error)
+ }
+ if (length > 0) {
+ return UpdateManager.processOutstandingUpdatesWithLock(
+ project_id,
+ doc_id,
+ callback
+ )
+ } else {
+ return callback()
+ }
+ })
+ },
+
+ fetchAndApplyUpdates(project_id, doc_id, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ const profile = new Profiler('fetchAndApplyUpdates', { project_id, doc_id })
+ return RealTimeRedisManager.getPendingUpdatesForDoc(
+ doc_id,
+ (error, updates) => {
+ if (error != null) {
+ return callback(error)
+ }
+ logger.log(
+ { project_id, doc_id, count: updates.length },
+ 'processing updates'
+ )
+ if (updates.length === 0) {
+ return callback()
+ }
+ profile.log('getPendingUpdatesForDoc')
+ const doUpdate = (update, cb) =>
+ UpdateManager.applyUpdate(project_id, doc_id, update, function (err) {
+ profile.log('applyUpdate')
+ return cb(err)
+ })
+ const finalCallback = function (err) {
+ profile.log('async done').end()
+ return callback(err)
+ }
+ return async.eachSeries(updates, doUpdate, finalCallback)
+ }
+ )
+ },
+
+ applyUpdate(project_id, doc_id, update, _callback) {
+ if (_callback == null) {
+ _callback = function (error) {}
+ }
+ const callback = function (error) {
+ if (error != null) {
+ RealTimeRedisManager.sendData({
+ project_id,
+ doc_id,
+ error: error.message || error,
+ })
+ profile.log('sendData')
+ }
+ profile.end()
+ return _callback(error)
+ }
+
+ var profile = new Profiler('applyUpdate', { project_id, doc_id })
+ UpdateManager._sanitizeUpdate(update)
+ profile.log('sanitizeUpdate')
+ return DocumentManager.getDoc(
+ project_id,
+ doc_id,
+ function (error, lines, version, ranges, pathname, projectHistoryId) {
+ profile.log('getDoc')
+ if (error != null) {
+ return callback(error)
+ }
+ if (lines == null || version == null) {
+ return callback(
+ new Errors.NotFoundError(`document not found: ${doc_id}`)
+ )
+ }
+ const previousVersion = version
+ return ShareJsUpdateManager.applyUpdate(
+ project_id,
+ doc_id,
+ update,
+ lines,
+ version,
+ function (error, updatedDocLines, version, appliedOps) {
+ profile.log('sharejs.applyUpdate')
+ if (error != null) {
+ return callback(error)
+ }
+ return RangesManager.applyUpdate(
+ project_id,
+ doc_id,
+ ranges,
+ appliedOps,
+ updatedDocLines,
+ function (error, new_ranges, ranges_were_collapsed) {
+ UpdateManager._addProjectHistoryMetadataToOps(
+ appliedOps,
+ pathname,
+ projectHistoryId,
+ lines
+ )
+ profile.log('RangesManager.applyUpdate')
+ if (error != null) {
+ return callback(error)
+ }
+ return RedisManager.updateDocument(
+ project_id,
+ doc_id,
+ updatedDocLines,
+ version,
+ appliedOps,
+ new_ranges,
+ update.meta,
+ function (error, doc_ops_length, project_ops_length) {
+ profile.log('RedisManager.updateDocument')
+ if (error != null) {
+ return callback(error)
+ }
+ return HistoryManager.recordAndFlushHistoryOps(
+ project_id,
+ doc_id,
+ appliedOps,
+ doc_ops_length,
+ project_ops_length,
+ function (error) {
+ profile.log('recordAndFlushHistoryOps')
+ if (error != null) {
+ return callback(error)
+ }
+ if (ranges_were_collapsed) {
+ logger.log(
+ {
+ project_id,
+ doc_id,
+ previousVersion,
+ lines,
+ ranges,
+ update,
+ },
+ 'update collapsed some ranges, snapshotting previous content'
+ )
+ // Do this last, since it's a mongo call, and so potentially longest running
+ // If it overruns the lock, it's ok, since all of our redis work is done
+ return SnapshotManager.recordSnapshot(
+ project_id,
+ doc_id,
+ previousVersion,
+ pathname,
+ lines,
+ ranges,
+ function (error) {
+ if (error != null) {
+ logger.error(
+ {
+ err: error,
+ project_id,
+ doc_id,
+ version,
+ lines,
+ ranges,
+ },
+ 'error recording snapshot'
+ )
+ return callback(error)
+ } else {
+ return callback()
+ }
+ }
+ )
+ } else {
+ return callback()
+ }
+ }
+ )
+ }
+ )
+ }
+ )
+ }
+ )
+ }
+ )
+ },
+
+ lockUpdatesAndDo(method, project_id, doc_id, ...rest) {
+ const adjustedLength = Math.max(rest.length, 1)
+ const args = rest.slice(0, adjustedLength - 1)
+ const callback = rest[adjustedLength - 1]
+ const profile = new Profiler('lockUpdatesAndDo', { project_id, doc_id })
+ return LockManager.getLock(doc_id, function (error, lockValue) {
+ profile.log('getLock')
+ if (error != null) {
+ return callback(error)
+ }
+ return UpdateManager.processOutstandingUpdates(
+ project_id,
+ doc_id,
+ function (error) {
+ if (error != null) {
+ return UpdateManager._handleErrorInsideLock(
+ doc_id,
+ lockValue,
+ error,
+ callback
+ )
+ }
+ profile.log('processOutstandingUpdates')
+ return method(
+ project_id,
+ doc_id,
+ ...Array.from(args),
+ function (error, ...response_args) {
+ if (error != null) {
+ return UpdateManager._handleErrorInsideLock(
+ doc_id,
+ lockValue,
+ error,
+ callback
+ )
+ }
+ profile.log('method')
+ return LockManager.releaseLock(
+ doc_id,
+ lockValue,
+ function (error) {
+ if (error != null) {
+ return callback(error)
+ }
+ profile.log('releaseLock').end()
+ callback(null, ...Array.from(response_args))
+ // We held the lock for a while so updates might have queued up
+ return UpdateManager.continueProcessingUpdatesWithLock(
+ project_id,
+ doc_id
+ )
+ }
+ )
+ }
+ )
+ }
+ )
+ })
+ },
+
+ _handleErrorInsideLock(doc_id, lockValue, original_error, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ return LockManager.releaseLock(doc_id, lockValue, lock_error =>
+ callback(original_error)
+ )
+ },
+
+ _sanitizeUpdate(update) {
+ // In Javascript, characters are 16-bits wide. It does not understand surrogates as characters.
+ //
+ // From Wikipedia (http://en.wikipedia.org/wiki/Plane_(Unicode)#Basic_Multilingual_Plane):
+ // "The High Surrogates (U+D800–U+DBFF) and Low Surrogate (U+DC00–U+DFFF) codes are reserved
+ // for encoding non-BMP characters in UTF-16 by using a pair of 16-bit codes: one High Surrogate
+ // and one Low Surrogate. A single surrogate code point will never be assigned a character.""
+ //
+ // The main offender seems to be \uD835 as a stand alone character, which would be the first
+ // 16-bit character of a blackboard bold character (http://www.fileformat.info/info/unicode/char/1d400/index.htm).
+ // Something must be going on client side that is screwing up the encoding and splitting the
+ // two 16-bit characters so that \uD835 is standalone.
+ for (const op of Array.from(update.op || [])) {
+ if (op.i != null) {
+ // Replace high and low surrogate characters with 'replacement character' (\uFFFD)
+ op.i = op.i.replace(/[\uD800-\uDFFF]/g, '\uFFFD')
+ }
+ }
+ return update
+ },
+
+ _addProjectHistoryMetadataToOps(updates, pathname, projectHistoryId, lines) {
+ let doc_length = _.reduce(lines, (chars, line) => chars + line.length, 0)
+ doc_length += lines.length - 1 // count newline characters
+ return updates.forEach(function (update) {
+ update.projectHistoryId = projectHistoryId
+ if (!update.meta) {
+ update.meta = {}
+ }
+ update.meta.pathname = pathname
+ update.meta.doc_length = doc_length
+ // Each update may contain multiple ops, i.e.
+ // [{
+ // ops: [{i: "foo", p: 4}, {d: "bar", p:8}]
+ // }, {
+ // ops: [{d: "baz", p: 40}, {i: "qux", p:8}]
+ // }]
+ // We want to include the doc_length at the start of each update,
+ // before it's ops are applied. However, we need to track any
+ // changes to it for the next update.
+ return (() => {
+ const result = []
+ for (const op of Array.from(update.op)) {
+ if (op.i != null) {
+ doc_length += op.i.length
+ }
+ if (op.d != null) {
+ result.push((doc_length -= op.d.length))
+ } else {
+ result.push(undefined)
+ }
+ }
+ return result
+ })()
+ })
+ },
+}
diff --git a/services/document-updater/app/js/mongodb.js b/services/document-updater/app/js/mongodb.js
new file mode 100644
index 0000000000..b265e99426
--- /dev/null
+++ b/services/document-updater/app/js/mongodb.js
@@ -0,0 +1,37 @@
+const Settings = require('@overleaf/settings')
+const { MongoClient, ObjectId } = require('mongodb')
+
+const clientPromise = MongoClient.connect(
+ Settings.mongo.url,
+ Settings.mongo.options
+)
+
+async function healthCheck() {
+ const internalDb = (await clientPromise).db()
+ const res = await internalDb.command({ ping: 1 })
+ if (!res.ok) {
+ throw new Error('failed mongo ping')
+ }
+}
+
+let setupDbPromise
+async function waitForDb() {
+ if (!setupDbPromise) {
+ setupDbPromise = setupDb()
+ }
+ await setupDbPromise
+}
+
+const db = {}
+async function setupDb() {
+ const internalDb = (await clientPromise).db()
+
+ db.docSnapshots = internalDb.collection('docSnapshots')
+}
+
+module.exports = {
+ db,
+ ObjectId,
+ healthCheck: require('util').callbackify(healthCheck),
+ waitForDb,
+}
diff --git a/services/document-updater/app/js/sharejs/README.md b/services/document-updater/app/js/sharejs/README.md
new file mode 100644
index 0000000000..22e68842dd
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/README.md
@@ -0,0 +1,48 @@
+This directory contains all the operational transform code. Each file defines a type.
+
+Most of the types in here are for testing or demonstration. The only types which are sent to the webclient
+are `text` and `json`.
+
+
+# An OT type
+
+All OT types have the following fields:
+
+`name`: _(string)_ Name of the type. Should match the filename.
+`create() -> snapshot`: Function which creates and returns a new document snapshot
+
+`apply(snapshot, op) -> snapshot`: A function which creates a new document snapshot with the op applied
+`transform(op1, op2, side) -> op1'`: OT transform function.
+
+Given op1, op2, `apply(s, op2, transform(op1, op2, 'left')) == apply(s, op1, transform(op2, op1, 'right'))`.
+
+Transform and apply must never modify their arguments.
+
+
+Optional properties:
+
+`tp2`: _(bool)_ True if the transform function supports TP2. This allows p2p architectures to work.
+`compose(op1, op2) -> op`: Create and return a new op which has the same effect as op1 + op2.
+`serialize(snapshot) -> JSON object`: Serialize a document to something we can JSON.stringify()
+`deserialize(object) -> snapshot`: Deserialize a JSON object into the document's internal snapshot format
+`prune(op1', op2, side) -> op1`: Inserse transform function. Only required for TP2 types.
+`normalize(op) -> op`: Fix up an op to make it valid. Eg, remove skips of size zero.
+`api`: _(object)_ Set of helper methods which will be mixed in to the client document object for manipulating documents. See below.
+
+
+# Examples
+
+`count` and `simple` are two trivial OT type definitions if you want to take a look. JSON defines
+the ot-for-JSON type (see the wiki for documentation) and all the text types define different text
+implementations. (I still have no idea which one I like the most, and they're fun to write!)
+
+
+# API
+
+Types can also define API functions. These methods are mixed into the client's Doc object when a document is created.
+You can use them to help construct ops programatically (so users don't need to understand how ops are structured).
+
+For example, the three text types defined here (text, text-composable and text-tp2) all provide the text API, supplying
+`.insert()`, `.del()`, `.getLength` and `.getText` methods.
+
+See text-api.coffee for an example.
diff --git a/services/document-updater/app/js/sharejs/count.js b/services/document-updater/app/js/sharejs/count.js
new file mode 100644
index 0000000000..246f6b7985
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/count.js
@@ -0,0 +1,37 @@
+// TODO: This file was created by bulk-decaffeinate.
+// Sanity-check the conversion and remove this comment.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+// This is a simple type used for testing other OT code. Each op is [expectedSnapshot, increment]
+
+exports.name = 'count'
+exports.create = () => 1
+
+exports.apply = function (snapshot, op) {
+ const [v, inc] = Array.from(op)
+ if (snapshot !== v) {
+ throw new Error(`Op ${v} != snapshot ${snapshot}`)
+ }
+ return snapshot + inc
+}
+
+// transform op1 by op2. Return transformed version of op1.
+exports.transform = function (op1, op2) {
+ if (op1[0] !== op2[0]) {
+ throw new Error(`Op1 ${op1[0]} != op2 ${op2[0]}`)
+ }
+ return [op1[0] + op2[1], op1[1]]
+}
+
+exports.compose = function (op1, op2) {
+ if (op1[0] + op1[1] !== op2[0]) {
+ throw new Error(`Op1 ${op1} + 1 != op2 ${op2}`)
+ }
+ return [op1[0], op1[1] + op2[1]]
+}
+
+exports.generateRandomOp = doc => [[doc, 1], doc + 1]
diff --git a/services/document-updater/app/js/sharejs/helpers.js b/services/document-updater/app/js/sharejs/helpers.js
new file mode 100644
index 0000000000..b1ab3c2a26
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/helpers.js
@@ -0,0 +1,116 @@
+/* eslint-disable
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+// These methods let you build a transform function from a transformComponent function
+// for OT types like text and JSON in which operations are lists of components
+// and transforming them requires N^2 work.
+
+// Add transform and transformX functions for an OT type which has transformComponent defined.
+// transformComponent(destination array, component, other component, side)
+let bootstrapTransform
+exports._bt = bootstrapTransform = function (
+ type,
+ transformComponent,
+ checkValidOp,
+ append
+) {
+ let transformX
+ const transformComponentX = function (left, right, destLeft, destRight) {
+ transformComponent(destLeft, left, right, 'left')
+ return transformComponent(destRight, right, left, 'right')
+ }
+
+ // Transforms rightOp by leftOp. Returns ['rightOp', clientOp']
+ type.transformX =
+ type.transformX =
+ transformX =
+ function (leftOp, rightOp) {
+ checkValidOp(leftOp)
+ checkValidOp(rightOp)
+
+ const newRightOp = []
+
+ for (let rightComponent of Array.from(rightOp)) {
+ // Generate newLeftOp by composing leftOp by rightComponent
+ const newLeftOp = []
+
+ let k = 0
+ while (k < leftOp.length) {
+ var l
+ const nextC = []
+ transformComponentX(leftOp[k], rightComponent, newLeftOp, nextC)
+ k++
+
+ if (nextC.length === 1) {
+ rightComponent = nextC[0]
+ } else if (nextC.length === 0) {
+ for (l of Array.from(leftOp.slice(k))) {
+ append(newLeftOp, l)
+ }
+ rightComponent = null
+ break
+ } else {
+ // Recurse.
+ const [l_, r_] = Array.from(transformX(leftOp.slice(k), nextC))
+ for (l of Array.from(l_)) {
+ append(newLeftOp, l)
+ }
+ for (const r of Array.from(r_)) {
+ append(newRightOp, r)
+ }
+ rightComponent = null
+ break
+ }
+ }
+
+ if (rightComponent != null) {
+ append(newRightOp, rightComponent)
+ }
+ leftOp = newLeftOp
+ }
+
+ return [leftOp, newRightOp]
+ }
+
+ // Transforms op with specified type ('left' or 'right') by otherOp.
+ return (type.transform = type.transform =
+ function (op, otherOp, type) {
+ let _
+ if (type !== 'left' && type !== 'right') {
+ throw new Error("type must be 'left' or 'right'")
+ }
+
+ if (otherOp.length === 0) {
+ return op
+ }
+
+ // TODO: Benchmark with and without this line. I _think_ it'll make a big difference...?
+ if (op.length === 1 && otherOp.length === 1) {
+ return transformComponent([], op[0], otherOp[0], type)
+ }
+
+ if (type === 'left') {
+ let left
+ ;[left, _] = Array.from(transformX(op, otherOp))
+ return left
+ } else {
+ let right
+ ;[_, right] = Array.from(transformX(otherOp, op))
+ return right
+ }
+ })
+}
+
+if (typeof WEB === 'undefined') {
+ exports.bootstrapTransform = bootstrapTransform
+}
diff --git a/services/document-updater/app/js/sharejs/index.js b/services/document-updater/app/js/sharejs/index.js
new file mode 100644
index 0000000000..7e3d6bbf26
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/index.js
@@ -0,0 +1,25 @@
+// TODO: This file was created by bulk-decaffeinate.
+// Sanity-check the conversion and remove this comment.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+
+const register = function (file) {
+ const type = require(file)
+ exports[type.name] = type
+ try {
+ return require(`${file}-api`)
+ } catch (error) {}
+}
+
+// Import all the built-in types.
+register('./simple')
+register('./count')
+
+register('./text')
+register('./text-composable')
+register('./text-tp2')
+
+register('./json')
diff --git a/services/document-updater/app/js/sharejs/json-api.js b/services/document-updater/app/js/sharejs/json-api.js
new file mode 100644
index 0000000000..f429f2b397
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/json-api.js
@@ -0,0 +1,357 @@
+/* eslint-disable
+ camelcase,
+ no-undef,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS205: Consider reworking code to avoid use of IIFEs
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+// API for JSON OT
+
+let json
+if (typeof WEB === 'undefined') {
+ json = require('./json')
+}
+
+if (typeof WEB !== 'undefined' && WEB !== null) {
+ const { extendDoc } = exports
+ exports.extendDoc = function (name, fn) {
+ SubDoc.prototype[name] = fn
+ return extendDoc(name, fn)
+ }
+}
+
+const depath = function (path) {
+ if (path.length === 1 && path[0].constructor === Array) {
+ return path[0]
+ } else {
+ return path
+ }
+}
+
+class SubDoc {
+ constructor(doc, path) {
+ this.doc = doc
+ this.path = path
+ }
+
+ at(...path) {
+ return this.doc.at(this.path.concat(depath(path)))
+ }
+
+ get() {
+ return this.doc.getAt(this.path)
+ }
+
+ // for objects and lists
+ set(value, cb) {
+ return this.doc.setAt(this.path, value, cb)
+ }
+
+ // for strings and lists.
+ insert(pos, value, cb) {
+ return this.doc.insertAt(this.path, pos, value, cb)
+ }
+
+ // for strings
+ del(pos, length, cb) {
+ return this.doc.deleteTextAt(this.path, length, pos, cb)
+ }
+
+ // for objects and lists
+ remove(cb) {
+ return this.doc.removeAt(this.path, cb)
+ }
+
+ push(value, cb) {
+ return this.insert(this.get().length, value, cb)
+ }
+
+ move(from, to, cb) {
+ return this.doc.moveAt(this.path, from, to, cb)
+ }
+
+ add(amount, cb) {
+ return this.doc.addAt(this.path, amount, cb)
+ }
+
+ on(event, cb) {
+ return this.doc.addListener(this.path, event, cb)
+ }
+
+ removeListener(l) {
+ return this.doc.removeListener(l)
+ }
+
+ // text API compatibility
+ getLength() {
+ return this.get().length
+ }
+
+ getText() {
+ return this.get()
+ }
+}
+
+const traverse = function (snapshot, path) {
+ const container = { data: snapshot }
+ let key = 'data'
+ let elem = container
+ for (const p of Array.from(path)) {
+ elem = elem[key]
+ key = p
+ if (typeof elem === 'undefined') {
+ throw new Error('bad path')
+ }
+ }
+ return { elem, key }
+}
+
+const pathEquals = function (p1, p2) {
+ if (p1.length !== p2.length) {
+ return false
+ }
+ for (let i = 0; i < p1.length; i++) {
+ const e = p1[i]
+ if (e !== p2[i]) {
+ return false
+ }
+ }
+ return true
+}
+
+json.api = {
+ provides: { json: true },
+
+ at(...path) {
+ return new SubDoc(this, depath(path))
+ },
+
+ get() {
+ return this.snapshot
+ },
+ set(value, cb) {
+ return this.setAt([], value, cb)
+ },
+
+ getAt(path) {
+ const { elem, key } = traverse(this.snapshot, path)
+ return elem[key]
+ },
+
+ setAt(path, value, cb) {
+ const { elem, key } = traverse(this.snapshot, path)
+ const op = { p: path }
+ if (elem.constructor === Array) {
+ op.li = value
+ if (typeof elem[key] !== 'undefined') {
+ op.ld = elem[key]
+ }
+ } else if (typeof elem === 'object') {
+ op.oi = value
+ if (typeof elem[key] !== 'undefined') {
+ op.od = elem[key]
+ }
+ } else {
+ throw new Error('bad path')
+ }
+ return this.submitOp([op], cb)
+ },
+
+ removeAt(path, cb) {
+ const { elem, key } = traverse(this.snapshot, path)
+ if (typeof elem[key] === 'undefined') {
+ throw new Error('no element at that path')
+ }
+ const op = { p: path }
+ if (elem.constructor === Array) {
+ op.ld = elem[key]
+ } else if (typeof elem === 'object') {
+ op.od = elem[key]
+ } else {
+ throw new Error('bad path')
+ }
+ return this.submitOp([op], cb)
+ },
+
+ insertAt(path, pos, value, cb) {
+ const { elem, key } = traverse(this.snapshot, path)
+ const op = { p: path.concat(pos) }
+ if (elem[key].constructor === Array) {
+ op.li = value
+ } else if (typeof elem[key] === 'string') {
+ op.si = value
+ }
+ return this.submitOp([op], cb)
+ },
+
+ moveAt(path, from, to, cb) {
+ const op = [{ p: path.concat(from), lm: to }]
+ return this.submitOp(op, cb)
+ },
+
+ addAt(path, amount, cb) {
+ const op = [{ p: path, na: amount }]
+ return this.submitOp(op, cb)
+ },
+
+ deleteTextAt(path, length, pos, cb) {
+ const { elem, key } = traverse(this.snapshot, path)
+ const op = [{ p: path.concat(pos), sd: elem[key].slice(pos, pos + length) }]
+ return this.submitOp(op, cb)
+ },
+
+ addListener(path, event, cb) {
+ const l = { path, event, cb }
+ this._listeners.push(l)
+ return l
+ },
+ removeListener(l) {
+ const i = this._listeners.indexOf(l)
+ if (i < 0) {
+ return false
+ }
+ this._listeners.splice(i, 1)
+ return true
+ },
+ _register() {
+ this._listeners = []
+ this.on('change', function (op) {
+ return (() => {
+ const result = []
+ for (const c of Array.from(op)) {
+ var i
+ if (c.na !== undefined || c.si !== undefined || c.sd !== undefined) {
+ // no change to structure
+ continue
+ }
+ var to_remove = []
+ for (i = 0; i < this._listeners.length; i++) {
+ // Transform a dummy op by the incoming op to work out what
+ // should happen to the listener.
+ const l = this._listeners[i]
+ const dummy = { p: l.path, na: 0 }
+ const xformed = this.type.transformComponent([], dummy, c, 'left')
+ if (xformed.length === 0) {
+ // The op was transformed to noop, so we should delete the listener.
+ to_remove.push(i)
+ } else if (xformed.length === 1) {
+ // The op remained, so grab its new path into the listener.
+ l.path = xformed[0].p
+ } else {
+ throw new Error(
+ "Bad assumption in json-api: xforming an 'si' op will always result in 0 or 1 components."
+ )
+ }
+ }
+ to_remove.sort((a, b) => b - a)
+ result.push(
+ (() => {
+ const result1 = []
+ for (i of Array.from(to_remove)) {
+ result1.push(this._listeners.splice(i, 1))
+ }
+ return result1
+ })()
+ )
+ }
+ return result
+ })()
+ })
+ return this.on('remoteop', function (op) {
+ return (() => {
+ const result = []
+ for (var c of Array.from(op)) {
+ var match_path =
+ c.na === undefined ? c.p.slice(0, c.p.length - 1) : c.p
+ result.push(
+ (() => {
+ const result1 = []
+ for (const { path, event, cb } of Array.from(this._listeners)) {
+ var common
+ if (pathEquals(path, match_path)) {
+ switch (event) {
+ case 'insert':
+ if (c.li !== undefined && c.ld === undefined) {
+ result1.push(cb(c.p[c.p.length - 1], c.li))
+ } else if (c.oi !== undefined && c.od === undefined) {
+ result1.push(cb(c.p[c.p.length - 1], c.oi))
+ } else if (c.si !== undefined) {
+ result1.push(cb(c.p[c.p.length - 1], c.si))
+ } else {
+ result1.push(undefined)
+ }
+ break
+ case 'delete':
+ if (c.li === undefined && c.ld !== undefined) {
+ result1.push(cb(c.p[c.p.length - 1], c.ld))
+ } else if (c.oi === undefined && c.od !== undefined) {
+ result1.push(cb(c.p[c.p.length - 1], c.od))
+ } else if (c.sd !== undefined) {
+ result1.push(cb(c.p[c.p.length - 1], c.sd))
+ } else {
+ result1.push(undefined)
+ }
+ break
+ case 'replace':
+ if (c.li !== undefined && c.ld !== undefined) {
+ result1.push(cb(c.p[c.p.length - 1], c.ld, c.li))
+ } else if (c.oi !== undefined && c.od !== undefined) {
+ result1.push(cb(c.p[c.p.length - 1], c.od, c.oi))
+ } else {
+ result1.push(undefined)
+ }
+ break
+ case 'move':
+ if (c.lm !== undefined) {
+ result1.push(cb(c.p[c.p.length - 1], c.lm))
+ } else {
+ result1.push(undefined)
+ }
+ break
+ case 'add':
+ if (c.na !== undefined) {
+ result1.push(cb(c.na))
+ } else {
+ result1.push(undefined)
+ }
+ break
+ default:
+ result1.push(undefined)
+ }
+ } else if (
+ (common = this.type.commonPath(match_path, path)) != null
+ ) {
+ if (event === 'child op') {
+ if (
+ match_path.length === path.length &&
+ path.length === common
+ ) {
+ throw new Error(
+ "paths match length and have commonality, but aren't equal?"
+ )
+ }
+ const child_path = c.p.slice(common + 1)
+ result1.push(cb(child_path, c))
+ } else {
+ result1.push(undefined)
+ }
+ } else {
+ result1.push(undefined)
+ }
+ }
+ return result1
+ })()
+ )
+ }
+ return result
+ })()
+ })
+ },
+}
diff --git a/services/document-updater/app/js/sharejs/json.js b/services/document-updater/app/js/sharejs/json.js
new file mode 100644
index 0000000000..14c3cbb519
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/json.js
@@ -0,0 +1,630 @@
+/* eslint-disable
+ no-return-assign,
+ no-undef,
+ no-useless-catch,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+// This is the implementation of the JSON OT type.
+//
+// Spec is here: https://github.com/josephg/ShareJS/wiki/JSON-Operations
+
+let text
+if (typeof WEB !== 'undefined' && WEB !== null) {
+ ;({ text } = exports.types)
+} else {
+ text = require('./text')
+}
+
+const json = {}
+
+json.name = 'json'
+
+json.create = () => null
+
+json.invertComponent = function (c) {
+ const c_ = { p: c.p }
+ if (c.si !== undefined) {
+ c_.sd = c.si
+ }
+ if (c.sd !== undefined) {
+ c_.si = c.sd
+ }
+ if (c.oi !== undefined) {
+ c_.od = c.oi
+ }
+ if (c.od !== undefined) {
+ c_.oi = c.od
+ }
+ if (c.li !== undefined) {
+ c_.ld = c.li
+ }
+ if (c.ld !== undefined) {
+ c_.li = c.ld
+ }
+ if (c.na !== undefined) {
+ c_.na = -c.na
+ }
+ if (c.lm !== undefined) {
+ c_.lm = c.p[c.p.length - 1]
+ c_.p = c.p.slice(0, c.p.length - 1).concat([c.lm])
+ }
+ return c_
+}
+
+json.invert = op =>
+ Array.from(op.slice().reverse()).map(c => json.invertComponent(c))
+
+json.checkValidOp = function (op) {}
+
+const isArray = o => Object.prototype.toString.call(o) === '[object Array]'
+json.checkList = function (elem) {
+ if (!isArray(elem)) {
+ throw new Error('Referenced element not a list')
+ }
+}
+
+json.checkObj = function (elem) {
+ if (elem.constructor !== Object) {
+ throw new Error(
+ `Referenced element not an object (it was ${JSON.stringify(elem)})`
+ )
+ }
+}
+
+json.apply = function (snapshot, op) {
+ json.checkValidOp(op)
+ op = clone(op)
+
+ const container = { data: clone(snapshot) }
+
+ try {
+ for (let i = 0; i < op.length; i++) {
+ const c = op[i]
+ let parent = null
+ let parentkey = null
+ let elem = container
+ let key = 'data'
+
+ for (const p of Array.from(c.p)) {
+ parent = elem
+ parentkey = key
+ elem = elem[key]
+ key = p
+
+ if (parent == null) {
+ throw new Error('Path invalid')
+ }
+ }
+
+ if (c.na !== undefined) {
+ // Number add
+ if (typeof elem[key] !== 'number') {
+ throw new Error('Referenced element not a number')
+ }
+ elem[key] += c.na
+ } else if (c.si !== undefined) {
+ // String insert
+ if (typeof elem !== 'string') {
+ throw new Error(
+ `Referenced element not a string (it was ${JSON.stringify(elem)})`
+ )
+ }
+ parent[parentkey] = elem.slice(0, key) + c.si + elem.slice(key)
+ } else if (c.sd !== undefined) {
+ // String delete
+ if (typeof elem !== 'string') {
+ throw new Error('Referenced element not a string')
+ }
+ if (elem.slice(key, key + c.sd.length) !== c.sd) {
+ throw new Error('Deleted string does not match')
+ }
+ parent[parentkey] = elem.slice(0, key) + elem.slice(key + c.sd.length)
+ } else if (c.li !== undefined && c.ld !== undefined) {
+ // List replace
+ json.checkList(elem)
+
+ // Should check the list element matches c.ld
+ elem[key] = c.li
+ } else if (c.li !== undefined) {
+ // List insert
+ json.checkList(elem)
+
+ elem.splice(key, 0, c.li)
+ } else if (c.ld !== undefined) {
+ // List delete
+ json.checkList(elem)
+
+ // Should check the list element matches c.ld here too.
+ elem.splice(key, 1)
+ } else if (c.lm !== undefined) {
+ // List move
+ json.checkList(elem)
+ if (c.lm !== key) {
+ const e = elem[key]
+ // Remove it...
+ elem.splice(key, 1)
+ // And insert it back.
+ elem.splice(c.lm, 0, e)
+ }
+ } else if (c.oi !== undefined) {
+ // Object insert / replace
+ json.checkObj(elem)
+
+ // Should check that elem[key] == c.od
+ elem[key] = c.oi
+ } else if (c.od !== undefined) {
+ // Object delete
+ json.checkObj(elem)
+
+ // Should check that elem[key] == c.od
+ delete elem[key]
+ } else {
+ throw new Error('invalid / missing instruction in op')
+ }
+ }
+ } catch (error) {
+ // TODO: Roll back all already applied changes. Write tests before implementing this code.
+ throw error
+ }
+
+ return container.data
+}
+
+// Checks if two paths, p1 and p2 match.
+json.pathMatches = function (p1, p2, ignoreLast) {
+ if (p1.length !== p2.length) {
+ return false
+ }
+
+ for (let i = 0; i < p1.length; i++) {
+ const p = p1[i]
+ if (p !== p2[i] && (!ignoreLast || i !== p1.length - 1)) {
+ return false
+ }
+ }
+
+ return true
+}
+
+json.append = function (dest, c) {
+ let last
+ c = clone(c)
+ if (
+ dest.length !== 0 &&
+ json.pathMatches(c.p, (last = dest[dest.length - 1]).p)
+ ) {
+ if (last.na !== undefined && c.na !== undefined) {
+ return (dest[dest.length - 1] = { p: last.p, na: last.na + c.na })
+ } else if (
+ last.li !== undefined &&
+ c.li === undefined &&
+ c.ld === last.li
+ ) {
+ // insert immediately followed by delete becomes a noop.
+ if (last.ld !== undefined) {
+ // leave the delete part of the replace
+ return delete last.li
+ } else {
+ return dest.pop()
+ }
+ } else if (
+ last.od !== undefined &&
+ last.oi === undefined &&
+ c.oi !== undefined &&
+ c.od === undefined
+ ) {
+ return (last.oi = c.oi)
+ } else if (c.lm !== undefined && c.p[c.p.length - 1] === c.lm) {
+ return null // don't do anything
+ } else {
+ return dest.push(c)
+ }
+ } else {
+ return dest.push(c)
+ }
+}
+
+json.compose = function (op1, op2) {
+ json.checkValidOp(op1)
+ json.checkValidOp(op2)
+
+ const newOp = clone(op1)
+ for (const c of Array.from(op2)) {
+ json.append(newOp, c)
+ }
+
+ return newOp
+}
+
+json.normalize = function (op) {
+ const newOp = []
+
+ if (!isArray(op)) {
+ op = [op]
+ }
+
+ for (const c of Array.from(op)) {
+ if (c.p == null) {
+ c.p = []
+ }
+ json.append(newOp, c)
+ }
+
+ return newOp
+}
+
+// hax, copied from test/types/json. Apparently this is still the fastest way to deep clone an object, assuming
+// we have browser support for JSON.
+// http://jsperf.com/cloning-an-object/12
+var clone = o => JSON.parse(JSON.stringify(o))
+
+json.commonPath = function (p1, p2) {
+ p1 = p1.slice()
+ p2 = p2.slice()
+ p1.unshift('data')
+ p2.unshift('data')
+ p1 = p1.slice(0, p1.length - 1)
+ p2 = p2.slice(0, p2.length - 1)
+ if (p2.length === 0) {
+ return -1
+ }
+ let i = 0
+ while (p1[i] === p2[i] && i < p1.length) {
+ i++
+ if (i === p2.length) {
+ return i - 1
+ }
+ }
+}
+
+// transform c so it applies to a document with otherC applied.
+json.transformComponent = function (dest, c, otherC, type) {
+ let oc
+ c = clone(c)
+ if (c.na !== undefined) {
+ c.p.push(0)
+ }
+ if (otherC.na !== undefined) {
+ otherC.p.push(0)
+ }
+
+ const common = json.commonPath(c.p, otherC.p)
+ const common2 = json.commonPath(otherC.p, c.p)
+
+ const cplength = c.p.length
+ const otherCplength = otherC.p.length
+
+ if (c.na !== undefined) {
+ c.p.pop()
+ } // hax
+ if (otherC.na !== undefined) {
+ otherC.p.pop()
+ }
+
+ if (otherC.na) {
+ if (
+ common2 != null &&
+ otherCplength >= cplength &&
+ otherC.p[common2] === c.p[common2]
+ ) {
+ if (c.ld !== undefined) {
+ oc = clone(otherC)
+ oc.p = oc.p.slice(cplength)
+ c.ld = json.apply(clone(c.ld), [oc])
+ } else if (c.od !== undefined) {
+ oc = clone(otherC)
+ oc.p = oc.p.slice(cplength)
+ c.od = json.apply(clone(c.od), [oc])
+ }
+ }
+ json.append(dest, c)
+ return dest
+ }
+
+ if (
+ common2 != null &&
+ otherCplength > cplength &&
+ c.p[common2] === otherC.p[common2]
+ ) {
+ // transform based on c
+ if (c.ld !== undefined) {
+ oc = clone(otherC)
+ oc.p = oc.p.slice(cplength)
+ c.ld = json.apply(clone(c.ld), [oc])
+ } else if (c.od !== undefined) {
+ oc = clone(otherC)
+ oc.p = oc.p.slice(cplength)
+ c.od = json.apply(clone(c.od), [oc])
+ }
+ }
+
+ if (common != null) {
+ let from, p, to
+ const commonOperand = cplength === otherCplength
+ // transform based on otherC
+ if (otherC.na !== undefined) {
+ // this case is handled above due to icky path hax
+ } else if (otherC.si !== undefined || otherC.sd !== undefined) {
+ // String op vs string op - pass through to text type
+ if (c.si !== undefined || c.sd !== undefined) {
+ if (!commonOperand) {
+ throw new Error('must be a string?')
+ }
+
+ // Convert an op component to a text op component
+ const convert = function (component) {
+ const newC = { p: component.p[component.p.length - 1] }
+ if (component.si) {
+ newC.i = component.si
+ } else {
+ newC.d = component.sd
+ }
+ return newC
+ }
+
+ const tc1 = convert(c)
+ const tc2 = convert(otherC)
+
+ const res = []
+ text._tc(res, tc1, tc2, type)
+ for (const tc of Array.from(res)) {
+ const jc = { p: c.p.slice(0, common) }
+ jc.p.push(tc.p)
+ if (tc.i != null) {
+ jc.si = tc.i
+ }
+ if (tc.d != null) {
+ jc.sd = tc.d
+ }
+ json.append(dest, jc)
+ }
+ return dest
+ }
+ } else if (otherC.li !== undefined && otherC.ld !== undefined) {
+ if (otherC.p[common] === c.p[common]) {
+ // noop
+ if (!commonOperand) {
+ // we're below the deleted element, so -> noop
+ return dest
+ } else if (c.ld !== undefined) {
+ // we're trying to delete the same element, -> noop
+ if (c.li !== undefined && type === 'left') {
+ // we're both replacing one element with another. only one can
+ // survive!
+ c.ld = clone(otherC.li)
+ } else {
+ return dest
+ }
+ }
+ }
+ } else if (otherC.li !== undefined) {
+ if (
+ c.li !== undefined &&
+ c.ld === undefined &&
+ commonOperand &&
+ c.p[common] === otherC.p[common]
+ ) {
+ // in li vs. li, left wins.
+ if (type === 'right') {
+ c.p[common]++
+ }
+ } else if (otherC.p[common] <= c.p[common]) {
+ c.p[common]++
+ }
+
+ if (c.lm !== undefined) {
+ if (commonOperand) {
+ // otherC edits the same list we edit
+ if (otherC.p[common] <= c.lm) {
+ c.lm++
+ }
+ }
+ }
+ // changing c.from is handled above.
+ } else if (otherC.ld !== undefined) {
+ if (c.lm !== undefined) {
+ if (commonOperand) {
+ if (otherC.p[common] === c.p[common]) {
+ // they deleted the thing we're trying to move
+ return dest
+ }
+ // otherC edits the same list we edit
+ p = otherC.p[common]
+ from = c.p[common]
+ to = c.lm
+ if (p < to || (p === to && from < to)) {
+ c.lm--
+ }
+ }
+ }
+
+ if (otherC.p[common] < c.p[common]) {
+ c.p[common]--
+ } else if (otherC.p[common] === c.p[common]) {
+ if (otherCplength < cplength) {
+ // we're below the deleted element, so -> noop
+ return dest
+ } else if (c.ld !== undefined) {
+ if (c.li !== undefined) {
+ // we're replacing, they're deleting. we become an insert.
+ delete c.ld
+ } else {
+ // we're trying to delete the same element, -> noop
+ return dest
+ }
+ }
+ }
+ } else if (otherC.lm !== undefined) {
+ if (c.lm !== undefined && cplength === otherCplength) {
+ // lm vs lm, here we go!
+ from = c.p[common]
+ to = c.lm
+ const otherFrom = otherC.p[common]
+ const otherTo = otherC.lm
+ if (otherFrom !== otherTo) {
+ // if otherFrom == otherTo, we don't need to change our op.
+
+ // where did my thing go?
+ if (from === otherFrom) {
+ // they moved it! tie break.
+ if (type === 'left') {
+ c.p[common] = otherTo
+ if (from === to) {
+ // ugh
+ c.lm = otherTo
+ }
+ } else {
+ return dest
+ }
+ } else {
+ // they moved around it
+ if (from > otherFrom) {
+ c.p[common]--
+ }
+ if (from > otherTo) {
+ c.p[common]++
+ } else if (from === otherTo) {
+ if (otherFrom > otherTo) {
+ c.p[common]++
+ if (from === to) {
+ // ugh, again
+ c.lm++
+ }
+ }
+ }
+
+ // step 2: where am i going to put it?
+ if (to > otherFrom) {
+ c.lm--
+ } else if (to === otherFrom) {
+ if (to > from) {
+ c.lm--
+ }
+ }
+ if (to > otherTo) {
+ c.lm++
+ } else if (to === otherTo) {
+ // if we're both moving in the same direction, tie break
+ if (
+ (otherTo > otherFrom && to > from) ||
+ (otherTo < otherFrom && to < from)
+ ) {
+ if (type === 'right') {
+ c.lm++
+ }
+ } else {
+ if (to > from) {
+ c.lm++
+ } else if (to === otherFrom) {
+ c.lm--
+ }
+ }
+ }
+ }
+ }
+ } else if (c.li !== undefined && c.ld === undefined && commonOperand) {
+ // li
+ from = otherC.p[common]
+ to = otherC.lm
+ p = c.p[common]
+ if (p > from) {
+ c.p[common]--
+ }
+ if (p > to) {
+ c.p[common]++
+ }
+ } else {
+ // ld, ld+li, si, sd, na, oi, od, oi+od, any li on an element beneath
+ // the lm
+ //
+ // i.e. things care about where their item is after the move.
+ from = otherC.p[common]
+ to = otherC.lm
+ p = c.p[common]
+ if (p === from) {
+ c.p[common] = to
+ } else {
+ if (p > from) {
+ c.p[common]--
+ }
+ if (p > to) {
+ c.p[common]++
+ } else if (p === to) {
+ if (from > to) {
+ c.p[common]++
+ }
+ }
+ }
+ }
+ } else if (otherC.oi !== undefined && otherC.od !== undefined) {
+ if (c.p[common] === otherC.p[common]) {
+ if (c.oi !== undefined && commonOperand) {
+ // we inserted where someone else replaced
+ if (type === 'right') {
+ // left wins
+ return dest
+ } else {
+ // we win, make our op replace what they inserted
+ c.od = otherC.oi
+ }
+ } else {
+ // -> noop if the other component is deleting the same object (or any
+ // parent)
+ return dest
+ }
+ }
+ } else if (otherC.oi !== undefined) {
+ if (c.oi !== undefined && c.p[common] === otherC.p[common]) {
+ // left wins if we try to insert at the same place
+ if (type === 'left') {
+ json.append(dest, { p: c.p, od: otherC.oi })
+ } else {
+ return dest
+ }
+ }
+ } else if (otherC.od !== undefined) {
+ if (c.p[common] === otherC.p[common]) {
+ if (!commonOperand) {
+ return dest
+ }
+ if (c.oi !== undefined) {
+ delete c.od
+ } else {
+ return dest
+ }
+ }
+ }
+ }
+
+ json.append(dest, c)
+ return dest
+}
+
+if (typeof WEB !== 'undefined' && WEB !== null) {
+ if (!exports.types) {
+ exports.types = {}
+ }
+
+ // This is kind of awful - come up with a better way to hook this helper code up.
+ exports._bt(json, json.transformComponent, json.checkValidOp, json.append)
+
+ // [] is used to prevent closure from renaming types.text
+ exports.types.json = json
+} else {
+ module.exports = json
+
+ require('./helpers').bootstrapTransform(
+ json,
+ json.transformComponent,
+ json.checkValidOp,
+ json.append
+ )
+}
diff --git a/services/document-updater/app/js/sharejs/model.js b/services/document-updater/app/js/sharejs/model.js
new file mode 100644
index 0000000000..aebcd8d549
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/model.js
@@ -0,0 +1,883 @@
+/* eslint-disable
+ no-console,
+ no-return-assign,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS103: Rewrite code to no longer use __guard__
+ * DS104: Avoid inline assignments
+ * DS204: Change includes calls to have a more natural evaluation order
+ * DS205: Consider reworking code to avoid use of IIFEs
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+// The model of all the ops. Responsible for applying & transforming remote deltas
+// and managing the storage layer.
+//
+// Actual storage is handled by the database wrappers in db/*, wrapped by DocCache
+
+let Model
+const { EventEmitter } = require('events')
+
+const queue = require('./syncqueue')
+const types = require('../types')
+
+const isArray = o => Object.prototype.toString.call(o) === '[object Array]'
+
+// This constructor creates a new Model object. There will be one model object
+// per server context.
+//
+// The model object is responsible for a lot of things:
+//
+// - It manages the interactions with the database
+// - It maintains (in memory) a set of all active documents
+// - It calls out to the OT functions when necessary
+//
+// The model is an event emitter. It emits the following events:
+//
+// create(docName, data): A document has been created with the specified name & data
+module.exports = Model = function (db, options) {
+ // db can be null if the user doesn't want persistance.
+
+ let getOps
+ if (!(this instanceof Model)) {
+ return new Model(db, options)
+ }
+
+ const model = this
+
+ if (options == null) {
+ options = {}
+ }
+
+ // This is a cache of 'live' documents.
+ //
+ // The cache is a map from docName -> {
+ // ops:[{op, meta}]
+ // snapshot
+ // type
+ // v
+ // meta
+ // eventEmitter
+ // reapTimer
+ // committedVersion: v
+ // snapshotWriteLock: bool to make sure writeSnapshot isn't re-entrant
+ // dbMeta: database specific data
+ // opQueue: syncQueue for processing ops
+ // }
+ //
+ // The ops list contains the document's last options.numCachedOps ops. (Or all
+ // of them if we're using a memory store).
+ //
+ // Documents are stored in this set so long as the document has been accessed in
+ // the last few seconds (options.reapTime) OR at least one client has the document
+ // open. I don't know if I should keep open (but not being edited) documents live -
+ // maybe if a client has a document open but the document isn't being edited, I should
+ // flush it from the cache.
+ //
+ // In any case, the API to model is designed such that if we want to change that later
+ // it should be pretty easy to do so without any external-to-the-model code changes.
+ const docs = {}
+
+ // This is a map from docName -> [callback]. It is used when a document hasn't been
+ // cached and multiple getSnapshot() / getVersion() requests come in. All requests
+ // are added to the callback list and called when db.getSnapshot() returns.
+ //
+ // callback(error, snapshot data)
+ const awaitingGetSnapshot = {}
+
+ // The time that documents which no clients have open will stay in the cache.
+ // Should be > 0.
+ if (options.reapTime == null) {
+ options.reapTime = 3000
+ }
+
+ // The number of operations the cache holds before reusing the space
+ if (options.numCachedOps == null) {
+ options.numCachedOps = 10
+ }
+
+ // This option forces documents to be reaped, even when there's no database backend.
+ // This is useful when you don't care about persistance and don't want to gradually
+ // fill memory.
+ //
+ // You might want to set reapTime to a day or something.
+ if (options.forceReaping == null) {
+ options.forceReaping = false
+ }
+
+ // Until I come up with a better strategy, we'll save a copy of the document snapshot
+ // to the database every ~20 submitted ops.
+ if (options.opsBeforeCommit == null) {
+ options.opsBeforeCommit = 20
+ }
+
+ // It takes some processing time to transform client ops. The server will punt ops back to the
+ // client to transform if they're too old.
+ if (options.maximumAge == null) {
+ options.maximumAge = 40
+ }
+
+ // **** Cache API methods
+
+ // Its important that all ops are applied in order. This helper method creates the op submission queue
+ // for a single document. This contains the logic for transforming & applying ops.
+ const makeOpQueue = (docName, doc) =>
+ queue(function (opData, callback) {
+ if (!(opData.v >= 0)) {
+ return callback('Version missing')
+ }
+ if (opData.v > doc.v) {
+ return callback('Op at future version')
+ }
+
+ // Punt the transforming work back to the client if the op is too old.
+ if (opData.v + options.maximumAge < doc.v) {
+ return callback('Op too old')
+ }
+
+ if (!opData.meta) {
+ opData.meta = {}
+ }
+ opData.meta.ts = Date.now()
+
+ // We'll need to transform the op to the current version of the document. This
+ // calls the callback immediately if opVersion == doc.v.
+ return getOps(docName, opData.v, doc.v, function (error, ops) {
+ let snapshot
+ if (error) {
+ return callback(error)
+ }
+
+ if (doc.v - opData.v !== ops.length) {
+ // This should never happen. It indicates that we didn't get all the ops we
+ // asked for. Its important that the submitted op is correctly transformed.
+ console.error(
+ `Could not get old ops in model for document ${docName}`
+ )
+ console.error(
+ `Expected ops ${opData.v} to ${doc.v} and got ${ops.length} ops`
+ )
+ return callback('Internal error')
+ }
+
+ if (ops.length > 0) {
+ try {
+ // If there's enough ops, it might be worth spinning this out into a webworker thread.
+ for (const oldOp of Array.from(ops)) {
+ // Dup detection works by sending the id(s) the op has been submitted with previously.
+ // If the id matches, we reject it. The client can also detect the op has been submitted
+ // already if it sees its own previous id in the ops it sees when it does catchup.
+ if (
+ oldOp.meta.source &&
+ opData.dupIfSource &&
+ Array.from(opData.dupIfSource).includes(oldOp.meta.source)
+ ) {
+ return callback('Op already submitted')
+ }
+
+ opData.op = doc.type.transform(opData.op, oldOp.op, 'left')
+ opData.v++
+ }
+ } catch (error1) {
+ error = error1
+ console.error(error.stack)
+ return callback(error.message)
+ }
+ }
+
+ try {
+ snapshot = doc.type.apply(doc.snapshot, opData.op)
+ } catch (error2) {
+ error = error2
+ console.error(error.stack)
+ return callback(error.message)
+ }
+
+ // The op data should be at the current version, and the new document data should be at
+ // the next version.
+ //
+ // This should never happen in practice, but its a nice little check to make sure everything
+ // is hunky-dory.
+ if (opData.v !== doc.v) {
+ // This should never happen.
+ console.error(
+ 'Version mismatch detected in model. File a ticket - this is a bug.'
+ )
+ console.error(`Expecting ${opData.v} == ${doc.v}`)
+ return callback('Internal error')
+ }
+
+ // newDocData = {snapshot, type:type.name, v:opVersion + 1, meta:docData.meta}
+ const writeOp =
+ (db != null ? db.writeOp : undefined) ||
+ ((docName, newOpData, callback) => callback())
+
+ return writeOp(docName, opData, function (error) {
+ if (error) {
+ // The user should probably know about this.
+ console.warn(`Error writing ops to database: ${error}`)
+ return callback(error)
+ }
+
+ __guardMethod__(options.stats, 'writeOp', o => o.writeOp())
+
+ // This is needed when we emit the 'change' event, below.
+ const oldSnapshot = doc.snapshot
+
+ // All the heavy lifting is now done. Finally, we'll update the cache with the new data
+ // and (maybe!) save a new document snapshot to the database.
+
+ doc.v = opData.v + 1
+ doc.snapshot = snapshot
+
+ doc.ops.push(opData)
+ if (db && doc.ops.length > options.numCachedOps) {
+ doc.ops.shift()
+ }
+
+ model.emit('applyOp', docName, opData, snapshot, oldSnapshot)
+ doc.eventEmitter.emit('op', opData, snapshot, oldSnapshot)
+
+ // The callback is called with the version of the document at which the op was applied.
+ // This is the op.v after transformation, and its doc.v - 1.
+ callback(null, opData.v)
+
+ // I need a decent strategy here for deciding whether or not to save the snapshot.
+ //
+ // The 'right' strategy looks something like "Store the snapshot whenever the snapshot
+ // is smaller than the accumulated op data". For now, I'll just store it every 20
+ // ops or something. (Configurable with doc.committedVersion)
+ if (
+ !doc.snapshotWriteLock &&
+ doc.committedVersion + options.opsBeforeCommit <= doc.v
+ ) {
+ return tryWriteSnapshot(docName, function (error) {
+ if (error) {
+ return console.warn(
+ `Error writing snapshot ${error}. This is nonfatal`
+ )
+ }
+ })
+ }
+ })
+ })
+ })
+
+ // Add the data for the given docName to the cache. The named document shouldn't already
+ // exist in the doc set.
+ //
+ // Returns the new doc.
+ const add = function (docName, error, data, committedVersion, ops, dbMeta) {
+ let callback, doc
+ const callbacks = awaitingGetSnapshot[docName]
+ delete awaitingGetSnapshot[docName]
+
+ if (error) {
+ if (callbacks) {
+ for (callback of Array.from(callbacks)) {
+ callback(error)
+ }
+ }
+ } else {
+ doc = docs[docName] = {
+ snapshot: data.snapshot,
+ v: data.v,
+ type: data.type,
+ meta: data.meta,
+
+ // Cache of ops
+ ops: ops || [],
+
+ eventEmitter: new EventEmitter(),
+
+ // Timer before the document will be invalidated from the cache (if the document has no
+ // listeners)
+ reapTimer: null,
+
+ // Version of the snapshot thats in the database
+ committedVersion: committedVersion != null ? committedVersion : data.v,
+ snapshotWriteLock: false,
+ dbMeta,
+ }
+
+ doc.opQueue = makeOpQueue(docName, doc)
+
+ refreshReapingTimeout(docName)
+ model.emit('add', docName, data)
+ if (callbacks) {
+ for (callback of Array.from(callbacks)) {
+ callback(null, doc)
+ }
+ }
+ }
+
+ return doc
+ }
+
+ // This is a little helper wrapper around db.getOps. It does two things:
+ //
+ // - If there's no database set, it returns an error to the callback
+ // - It adds version numbers to each op returned from the database
+ // (These can be inferred from context so the DB doesn't store them, but its useful to have them).
+ const getOpsInternal = function (docName, start, end, callback) {
+ if (!db) {
+ return typeof callback === 'function'
+ ? callback('Document does not exist')
+ : undefined
+ }
+
+ return db.getOps(docName, start, end, function (error, ops) {
+ if (error) {
+ return typeof callback === 'function' ? callback(error) : undefined
+ }
+
+ let v = start
+ for (const op of Array.from(ops)) {
+ op.v = v++
+ }
+
+ return typeof callback === 'function' ? callback(null, ops) : undefined
+ })
+ }
+
+ // Load the named document into the cache. This function is re-entrant.
+ //
+ // The callback is called with (error, doc)
+ const load = function (docName, callback) {
+ if (docs[docName]) {
+ // The document is already loaded. Return immediately.
+ __guardMethod__(options.stats, 'cacheHit', o => o.cacheHit('getSnapshot'))
+ return callback(null, docs[docName])
+ }
+
+ // We're a memory store. If we don't have it, nobody does.
+ if (!db) {
+ return callback('Document does not exist')
+ }
+
+ const callbacks = awaitingGetSnapshot[docName]
+
+ // The document is being loaded already. Add ourselves as a callback.
+ if (callbacks) {
+ return callbacks.push(callback)
+ }
+
+ __guardMethod__(options.stats, 'cacheMiss', o1 =>
+ o1.cacheMiss('getSnapshot')
+ )
+
+ // The document isn't loaded and isn't being loaded. Load it.
+ awaitingGetSnapshot[docName] = [callback]
+ return db.getSnapshot(docName, function (error, data, dbMeta) {
+ if (error) {
+ return add(docName, error)
+ }
+
+ const type = types[data.type]
+ if (!type) {
+ console.warn(`Type '${data.type}' missing`)
+ return callback('Type not found')
+ }
+ data.type = type
+
+ const committedVersion = data.v
+
+ // The server can close without saving the most recent document snapshot.
+ // In this case, there are extra ops which need to be applied before
+ // returning the snapshot.
+ return getOpsInternal(docName, data.v, null, function (error, ops) {
+ if (error) {
+ return callback(error)
+ }
+
+ if (ops.length > 0) {
+ console.log(`Catchup ${docName} ${data.v} -> ${data.v + ops.length}`)
+
+ try {
+ for (const op of Array.from(ops)) {
+ data.snapshot = type.apply(data.snapshot, op.op)
+ data.v++
+ }
+ } catch (e) {
+ // This should never happen - it indicates that whats in the
+ // database is invalid.
+ console.error(`Op data invalid for ${docName}: ${e.stack}`)
+ return callback('Op data invalid')
+ }
+ }
+
+ model.emit('load', docName, data)
+ return add(docName, error, data, committedVersion, ops, dbMeta)
+ })
+ })
+ }
+
+ // This makes sure the cache contains a document. If the doc cache doesn't contain
+ // a document, it is loaded from the database and stored.
+ //
+ // Documents are stored so long as either:
+ // - They have been accessed within the past #{PERIOD}
+ // - At least one client has the document open
+ var refreshReapingTimeout = function (docName) {
+ const doc = docs[docName]
+ if (!doc) {
+ return
+ }
+
+ // I want to let the clients list be updated before this is called.
+ return process.nextTick(function () {
+ // This is an awkward way to find out the number of clients on a document. If this
+ // causes performance issues, add a numClients field to the document.
+ //
+ // The first check is because its possible that between refreshReapingTimeout being called and this
+ // event being fired, someone called delete() on the document and hence the doc is something else now.
+ if (
+ doc === docs[docName] &&
+ doc.eventEmitter.listeners('op').length === 0 &&
+ (db || options.forceReaping) &&
+ doc.opQueue.busy === false
+ ) {
+ let reapTimer
+ clearTimeout(doc.reapTimer)
+ return (doc.reapTimer = reapTimer =
+ setTimeout(
+ () =>
+ tryWriteSnapshot(docName, function () {
+ // If the reaping timeout has been refreshed while we're writing the snapshot, or if we're
+ // in the middle of applying an operation, don't reap.
+ if (
+ docs[docName].reapTimer === reapTimer &&
+ doc.opQueue.busy === false
+ ) {
+ return delete docs[docName]
+ }
+ }),
+ options.reapTime
+ ))
+ }
+ })
+ }
+
+ var tryWriteSnapshot = function (docName, callback) {
+ if (!db) {
+ return typeof callback === 'function' ? callback() : undefined
+ }
+
+ const doc = docs[docName]
+
+ // The doc is closed
+ if (!doc) {
+ return typeof callback === 'function' ? callback() : undefined
+ }
+
+ // The document is already saved.
+ if (doc.committedVersion === doc.v) {
+ return typeof callback === 'function' ? callback() : undefined
+ }
+
+ if (doc.snapshotWriteLock) {
+ return typeof callback === 'function'
+ ? callback('Another snapshot write is in progress')
+ : undefined
+ }
+
+ doc.snapshotWriteLock = true
+
+ __guardMethod__(options.stats, 'writeSnapshot', o => o.writeSnapshot())
+
+ const writeSnapshot =
+ (db != null ? db.writeSnapshot : undefined) ||
+ ((docName, docData, dbMeta, callback) => callback())
+
+ const data = {
+ v: doc.v,
+ meta: doc.meta,
+ snapshot: doc.snapshot,
+ // The database doesn't know about object types.
+ type: doc.type.name,
+ }
+
+ // Commit snapshot.
+ return writeSnapshot(docName, data, doc.dbMeta, function (error, dbMeta) {
+ doc.snapshotWriteLock = false
+
+ // We have to use data.v here because the version in the doc could
+ // have been updated between the call to writeSnapshot() and now.
+ doc.committedVersion = data.v
+ doc.dbMeta = dbMeta
+
+ return typeof callback === 'function' ? callback(error) : undefined
+ })
+ }
+
+ // *** Model interface methods
+
+ // Create a new document.
+ //
+ // data should be {snapshot, type, [meta]}. The version of a new document is 0.
+ this.create = function (docName, type, meta, callback) {
+ if (typeof meta === 'function') {
+ ;[meta, callback] = Array.from([{}, meta])
+ }
+
+ if (docName.match(/\//)) {
+ return typeof callback === 'function'
+ ? callback('Invalid document name')
+ : undefined
+ }
+ if (docs[docName]) {
+ return typeof callback === 'function'
+ ? callback('Document already exists')
+ : undefined
+ }
+
+ if (typeof type === 'string') {
+ type = types[type]
+ }
+ if (!type) {
+ return typeof callback === 'function'
+ ? callback('Type not found')
+ : undefined
+ }
+
+ const data = {
+ snapshot: type.create(),
+ type: type.name,
+ meta: meta || {},
+ v: 0,
+ }
+
+ const done = function (error, dbMeta) {
+ // dbMeta can be used to cache extra state needed by the database to access the document, like an ID or something.
+ if (error) {
+ return typeof callback === 'function' ? callback(error) : undefined
+ }
+
+ // From here on we'll store the object version of the type name.
+ data.type = type
+ add(docName, null, data, 0, [], dbMeta)
+ model.emit('create', docName, data)
+ return typeof callback === 'function' ? callback() : undefined
+ }
+
+ if (db) {
+ return db.create(docName, data, done)
+ } else {
+ return done()
+ }
+ }
+
+ // Perminantly deletes the specified document.
+ // If listeners are attached, they are removed.
+ //
+ // The callback is called with (error) if there was an error. If error is null / undefined, the
+ // document was deleted.
+ //
+ // WARNING: This isn't well supported throughout the code. (Eg, streaming clients aren't told about the
+ // deletion. Subsequent op submissions will fail).
+ this.delete = function (docName, callback) {
+ const doc = docs[docName]
+
+ if (doc) {
+ clearTimeout(doc.reapTimer)
+ delete docs[docName]
+ }
+
+ const done = function (error) {
+ if (!error) {
+ model.emit('delete', docName)
+ }
+ return typeof callback === 'function' ? callback(error) : undefined
+ }
+
+ if (db) {
+ return db.delete(docName, doc != null ? doc.dbMeta : undefined, done)
+ } else {
+ return done(!doc ? 'Document does not exist' : undefined)
+ }
+ }
+
+ // This gets all operations from [start...end]. (That is, its not inclusive.)
+ //
+ // end can be null. This means 'get me all ops from start'.
+ //
+ // Each op returned is in the form {op:o, meta:m, v:version}.
+ //
+ // Callback is called with (error, [ops])
+ //
+ // If the document does not exist, getOps doesn't necessarily return an error. This is because
+ // its awkward to figure out whether or not the document exists for things
+ // like the redis database backend. I guess its a bit gross having this inconsistant
+ // with the other DB calls, but its certainly convenient.
+ //
+ // Use getVersion() to determine if a document actually exists, if thats what you're
+ // after.
+ this.getOps = getOps = function (docName, start, end, callback) {
+ // getOps will only use the op cache if its there. It won't fill the op cache in.
+ if (!(start >= 0)) {
+ throw new Error('start must be 0+')
+ }
+
+ if (typeof end === 'function') {
+ ;[end, callback] = Array.from([null, end])
+ }
+
+ const ops = docs[docName] != null ? docs[docName].ops : undefined
+
+ if (ops) {
+ const version = docs[docName].v
+
+ // Ops contains an array of ops. The last op in the list is the last op applied
+ if (end == null) {
+ end = version
+ }
+ start = Math.min(start, end)
+
+ if (start === end) {
+ return callback(null, [])
+ }
+
+ // Base is the version number of the oldest op we have cached
+ const base = version - ops.length
+
+ // If the database is null, we'll trim to the ops we do have and hope thats enough.
+ if (start >= base || db === null) {
+ refreshReapingTimeout(docName)
+ if (options.stats != null) {
+ options.stats.cacheHit('getOps')
+ }
+
+ return callback(null, ops.slice(start - base, end - base))
+ }
+ }
+
+ if (options.stats != null) {
+ options.stats.cacheMiss('getOps')
+ }
+
+ return getOpsInternal(docName, start, end, callback)
+ }
+
+ // Gets the snapshot data for the specified document.
+ // getSnapshot(docName, callback)
+ // Callback is called with (error, {v: , type: , snapshot: , meta: })
+ this.getSnapshot = (docName, callback) =>
+ load(docName, (error, doc) =>
+ callback(
+ error,
+ doc
+ ? { v: doc.v, type: doc.type, snapshot: doc.snapshot, meta: doc.meta }
+ : undefined
+ )
+ )
+
+ // Gets the latest version # of the document.
+ // getVersion(docName, callback)
+ // callback is called with (error, version).
+ this.getVersion = (docName, callback) =>
+ load(docName, (error, doc) =>
+ callback(error, doc != null ? doc.v : undefined)
+ )
+
+ // Apply an op to the specified document.
+ // The callback is passed (error, applied version #)
+ // opData = {op:op, v:v, meta:metadata}
+ //
+ // Ops are queued before being applied so that the following code applies op C before op B:
+ // model.applyOp 'doc', OPA, -> model.applyOp 'doc', OPB
+ // model.applyOp 'doc', OPC
+ this.applyOp = (
+ docName,
+ opData,
+ callback // All the logic for this is in makeOpQueue, above.
+ ) =>
+ load(docName, function (error, doc) {
+ if (error) {
+ return callback(error)
+ }
+
+ return process.nextTick(() =>
+ doc.opQueue(opData, function (error, newVersion) {
+ refreshReapingTimeout(docName)
+ return typeof callback === 'function'
+ ? callback(error, newVersion)
+ : undefined
+ })
+ )
+ })
+
+ // TODO: store (some) metadata in DB
+ // TODO: op and meta should be combineable in the op that gets sent
+ this.applyMetaOp = function (docName, metaOpData, callback) {
+ const { path, value } = metaOpData.meta
+
+ if (!isArray(path)) {
+ return typeof callback === 'function'
+ ? callback('path should be an array')
+ : undefined
+ }
+
+ return load(docName, function (error, doc) {
+ if (error != null) {
+ return typeof callback === 'function' ? callback(error) : undefined
+ } else {
+ let applied = false
+ switch (path[0]) {
+ case 'shout':
+ doc.eventEmitter.emit('op', metaOpData)
+ applied = true
+ break
+ }
+
+ if (applied) {
+ model.emit('applyMetaOp', docName, path, value)
+ }
+ return typeof callback === 'function'
+ ? callback(null, doc.v)
+ : undefined
+ }
+ })
+ }
+
+ // Listen to all ops from the specified version. If version is in the past, all
+ // ops since that version are sent immediately to the listener.
+ //
+ // The callback is called once the listener is attached, but before any ops have been passed
+ // to the listener.
+ //
+ // This will _not_ edit the document metadata.
+ //
+ // If there are any listeners, we don't purge the document from the cache. But be aware, this behaviour
+ // might change in a future version.
+ //
+ // version is the document version at which the document is opened. It can be left out if you want to open
+ // the document at the most recent version.
+ //
+ // listener is called with (opData) each time an op is applied.
+ //
+ // callback(error, openedVersion)
+ this.listen = function (docName, version, listener, callback) {
+ if (typeof version === 'function') {
+ ;[version, listener, callback] = Array.from([null, version, listener])
+ }
+
+ return load(docName, function (error, doc) {
+ if (error) {
+ return typeof callback === 'function' ? callback(error) : undefined
+ }
+
+ clearTimeout(doc.reapTimer)
+
+ if (version != null) {
+ return getOps(docName, version, null, function (error, data) {
+ if (error) {
+ return typeof callback === 'function' ? callback(error) : undefined
+ }
+
+ doc.eventEmitter.on('op', listener)
+ if (typeof callback === 'function') {
+ callback(null, version)
+ }
+ return (() => {
+ const result = []
+ for (const op of Array.from(data)) {
+ var needle
+ listener(op)
+
+ // The listener may well remove itself during the catchup phase. If this happens, break early.
+ // This is done in a quite inefficient way. (O(n) where n = #listeners on doc)
+ if (
+ ((needle = listener),
+ !Array.from(doc.eventEmitter.listeners('op')).includes(needle))
+ ) {
+ break
+ } else {
+ result.push(undefined)
+ }
+ }
+ return result
+ })()
+ })
+ } else {
+ // Version is null / undefined. Just add the listener.
+ doc.eventEmitter.on('op', listener)
+ return typeof callback === 'function'
+ ? callback(null, doc.v)
+ : undefined
+ }
+ })
+ }
+
+ // Remove a listener for a particular document.
+ //
+ // removeListener(docName, listener)
+ //
+ // This is synchronous.
+ this.removeListener = function (docName, listener) {
+ // The document should already be loaded.
+ const doc = docs[docName]
+ if (!doc) {
+ throw new Error('removeListener called but document not loaded')
+ }
+
+ doc.eventEmitter.removeListener('op', listener)
+ return refreshReapingTimeout(docName)
+ }
+
+ // Flush saves all snapshot data to the database. I'm not sure whether or not this is actually needed -
+ // sharejs will happily replay uncommitted ops when documents are re-opened anyway.
+ this.flush = function (callback) {
+ if (!db) {
+ return typeof callback === 'function' ? callback() : undefined
+ }
+
+ let pendingWrites = 0
+
+ for (const docName in docs) {
+ const doc = docs[docName]
+ if (doc.committedVersion < doc.v) {
+ pendingWrites++
+ // I'm hoping writeSnapshot will always happen in another thread.
+ tryWriteSnapshot(docName, () =>
+ process.nextTick(function () {
+ pendingWrites--
+ if (pendingWrites === 0) {
+ return typeof callback === 'function' ? callback() : undefined
+ }
+ })
+ )
+ }
+ }
+
+ // If nothing was queued, terminate immediately.
+ if (pendingWrites === 0) {
+ return typeof callback === 'function' ? callback() : undefined
+ }
+ }
+
+ // Close the database connection. This is needed so nodejs can shut down cleanly.
+ this.closeDb = function () {
+ __guardMethod__(db, 'close', o => o.close())
+ return (db = null)
+ }
+}
+
+// Model inherits from EventEmitter.
+Model.prototype = new EventEmitter()
+
+function __guardMethod__(obj, methodName, transform) {
+ if (
+ typeof obj !== 'undefined' &&
+ obj !== null &&
+ typeof obj[methodName] === 'function'
+ ) {
+ return transform(obj, methodName)
+ } else {
+ return undefined
+ }
+}
diff --git a/services/document-updater/app/js/sharejs/server/model.js b/services/document-updater/app/js/sharejs/server/model.js
new file mode 100644
index 0000000000..a5682f71e3
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/server/model.js
@@ -0,0 +1,890 @@
+/* eslint-disable
+ no-console,
+ no-return-assign,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS103: Rewrite code to no longer use __guard__
+ * DS104: Avoid inline assignments
+ * DS204: Change includes calls to have a more natural evaluation order
+ * DS205: Consider reworking code to avoid use of IIFEs
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+// The model of all the ops. Responsible for applying & transforming remote deltas
+// and managing the storage layer.
+//
+// Actual storage is handled by the database wrappers in db/*, wrapped by DocCache
+
+let Model
+const { EventEmitter } = require('events')
+
+const queue = require('./syncqueue')
+const types = require('../types')
+
+const isArray = o => Object.prototype.toString.call(o) === '[object Array]'
+
+// This constructor creates a new Model object. There will be one model object
+// per server context.
+//
+// The model object is responsible for a lot of things:
+//
+// - It manages the interactions with the database
+// - It maintains (in memory) a set of all active documents
+// - It calls out to the OT functions when necessary
+//
+// The model is an event emitter. It emits the following events:
+//
+// create(docName, data): A document has been created with the specified name & data
+module.exports = Model = function (db, options) {
+ // db can be null if the user doesn't want persistance.
+
+ let getOps
+ if (!(this instanceof Model)) {
+ return new Model(db, options)
+ }
+
+ const model = this
+
+ if (options == null) {
+ options = {}
+ }
+
+ // This is a cache of 'live' documents.
+ //
+ // The cache is a map from docName -> {
+ // ops:[{op, meta}]
+ // snapshot
+ // type
+ // v
+ // meta
+ // eventEmitter
+ // reapTimer
+ // committedVersion: v
+ // snapshotWriteLock: bool to make sure writeSnapshot isn't re-entrant
+ // dbMeta: database specific data
+ // opQueue: syncQueue for processing ops
+ // }
+ //
+ // The ops list contains the document's last options.numCachedOps ops. (Or all
+ // of them if we're using a memory store).
+ //
+ // Documents are stored in this set so long as the document has been accessed in
+ // the last few seconds (options.reapTime) OR at least one client has the document
+ // open. I don't know if I should keep open (but not being edited) documents live -
+ // maybe if a client has a document open but the document isn't being edited, I should
+ // flush it from the cache.
+ //
+ // In any case, the API to model is designed such that if we want to change that later
+ // it should be pretty easy to do so without any external-to-the-model code changes.
+ const docs = {}
+
+ // This is a map from docName -> [callback]. It is used when a document hasn't been
+ // cached and multiple getSnapshot() / getVersion() requests come in. All requests
+ // are added to the callback list and called when db.getSnapshot() returns.
+ //
+ // callback(error, snapshot data)
+ const awaitingGetSnapshot = {}
+
+ // The time that documents which no clients have open will stay in the cache.
+ // Should be > 0.
+ if (options.reapTime == null) {
+ options.reapTime = 3000
+ }
+
+ // The number of operations the cache holds before reusing the space
+ if (options.numCachedOps == null) {
+ options.numCachedOps = 10
+ }
+
+ // This option forces documents to be reaped, even when there's no database backend.
+ // This is useful when you don't care about persistance and don't want to gradually
+ // fill memory.
+ //
+ // You might want to set reapTime to a day or something.
+ if (options.forceReaping == null) {
+ options.forceReaping = false
+ }
+
+ // Until I come up with a better strategy, we'll save a copy of the document snapshot
+ // to the database every ~20 submitted ops.
+ if (options.opsBeforeCommit == null) {
+ options.opsBeforeCommit = 20
+ }
+
+ // It takes some processing time to transform client ops. The server will punt ops back to the
+ // client to transform if they're too old.
+ if (options.maximumAge == null) {
+ options.maximumAge = 40
+ }
+
+ // **** Cache API methods
+
+ // Its important that all ops are applied in order. This helper method creates the op submission queue
+ // for a single document. This contains the logic for transforming & applying ops.
+ const makeOpQueue = (docName, doc) =>
+ queue(function (opData, callback) {
+ if (!(opData.v >= 0)) {
+ return callback('Version missing')
+ }
+ if (opData.v > doc.v) {
+ return callback('Op at future version')
+ }
+
+ // Punt the transforming work back to the client if the op is too old.
+ if (opData.v + options.maximumAge < doc.v) {
+ return callback('Op too old')
+ }
+
+ if (!opData.meta) {
+ opData.meta = {}
+ }
+ opData.meta.ts = Date.now()
+
+ // We'll need to transform the op to the current version of the document. This
+ // calls the callback immediately if opVersion == doc.v.
+ return getOps(docName, opData.v, doc.v, function (error, ops) {
+ let snapshot
+ if (error) {
+ return callback(error)
+ }
+
+ if (doc.v - opData.v !== ops.length) {
+ // This should never happen. It indicates that we didn't get all the ops we
+ // asked for. Its important that the submitted op is correctly transformed.
+ console.error(
+ `Could not get old ops in model for document ${docName}`
+ )
+ console.error(
+ `Expected ops ${opData.v} to ${doc.v} and got ${ops.length} ops`
+ )
+ return callback('Internal error')
+ }
+
+ if (ops.length > 0) {
+ try {
+ // If there's enough ops, it might be worth spinning this out into a webworker thread.
+ for (const oldOp of Array.from(ops)) {
+ // Dup detection works by sending the id(s) the op has been submitted with previously.
+ // If the id matches, we reject it. The client can also detect the op has been submitted
+ // already if it sees its own previous id in the ops it sees when it does catchup.
+ if (
+ oldOp.meta.source &&
+ opData.dupIfSource &&
+ Array.from(opData.dupIfSource).includes(oldOp.meta.source)
+ ) {
+ return callback('Op already submitted')
+ }
+
+ opData.op = doc.type.transform(opData.op, oldOp.op, 'left')
+ opData.v++
+ }
+ } catch (error1) {
+ error = error1
+ console.error(error.stack)
+ return callback(error.message)
+ }
+ }
+
+ try {
+ snapshot = doc.type.apply(doc.snapshot, opData.op)
+ } catch (error2) {
+ error = error2
+ console.error(error.stack)
+ return callback(error.message)
+ }
+
+ if (
+ options.maxDocLength != null &&
+ doc.snapshot.length > options.maxDocLength
+ ) {
+ return callback('Update takes doc over max doc size')
+ }
+
+ // The op data should be at the current version, and the new document data should be at
+ // the next version.
+ //
+ // This should never happen in practice, but its a nice little check to make sure everything
+ // is hunky-dory.
+ if (opData.v !== doc.v) {
+ // This should never happen.
+ console.error(
+ 'Version mismatch detected in model. File a ticket - this is a bug.'
+ )
+ console.error(`Expecting ${opData.v} == ${doc.v}`)
+ return callback('Internal error')
+ }
+
+ // newDocData = {snapshot, type:type.name, v:opVersion + 1, meta:docData.meta}
+ const writeOp =
+ (db != null ? db.writeOp : undefined) ||
+ ((docName, newOpData, callback) => callback())
+
+ return writeOp(docName, opData, function (error) {
+ if (error) {
+ // The user should probably know about this.
+ console.warn(`Error writing ops to database: ${error}`)
+ return callback(error)
+ }
+
+ __guardMethod__(options.stats, 'writeOp', o => o.writeOp())
+
+ // This is needed when we emit the 'change' event, below.
+ const oldSnapshot = doc.snapshot
+
+ // All the heavy lifting is now done. Finally, we'll update the cache with the new data
+ // and (maybe!) save a new document snapshot to the database.
+
+ doc.v = opData.v + 1
+ doc.snapshot = snapshot
+
+ doc.ops.push(opData)
+ if (db && doc.ops.length > options.numCachedOps) {
+ doc.ops.shift()
+ }
+
+ model.emit('applyOp', docName, opData, snapshot, oldSnapshot)
+ doc.eventEmitter.emit('op', opData, snapshot, oldSnapshot)
+
+ // The callback is called with the version of the document at which the op was applied.
+ // This is the op.v after transformation, and its doc.v - 1.
+ callback(null, opData.v)
+
+ // I need a decent strategy here for deciding whether or not to save the snapshot.
+ //
+ // The 'right' strategy looks something like "Store the snapshot whenever the snapshot
+ // is smaller than the accumulated op data". For now, I'll just store it every 20
+ // ops or something. (Configurable with doc.committedVersion)
+ if (
+ !doc.snapshotWriteLock &&
+ doc.committedVersion + options.opsBeforeCommit <= doc.v
+ ) {
+ return tryWriteSnapshot(docName, function (error) {
+ if (error) {
+ return console.warn(
+ `Error writing snapshot ${error}. This is nonfatal`
+ )
+ }
+ })
+ }
+ })
+ })
+ })
+
+ // Add the data for the given docName to the cache. The named document shouldn't already
+ // exist in the doc set.
+ //
+ // Returns the new doc.
+ const add = function (docName, error, data, committedVersion, ops, dbMeta) {
+ let callback, doc
+ const callbacks = awaitingGetSnapshot[docName]
+ delete awaitingGetSnapshot[docName]
+
+ if (error) {
+ if (callbacks) {
+ for (callback of Array.from(callbacks)) {
+ callback(error)
+ }
+ }
+ } else {
+ doc = docs[docName] = {
+ snapshot: data.snapshot,
+ v: data.v,
+ type: data.type,
+ meta: data.meta,
+
+ // Cache of ops
+ ops: ops || [],
+
+ eventEmitter: new EventEmitter(),
+
+ // Timer before the document will be invalidated from the cache (if the document has no
+ // listeners)
+ reapTimer: null,
+
+ // Version of the snapshot thats in the database
+ committedVersion: committedVersion != null ? committedVersion : data.v,
+ snapshotWriteLock: false,
+ dbMeta,
+ }
+
+ doc.opQueue = makeOpQueue(docName, doc)
+
+ refreshReapingTimeout(docName)
+ model.emit('add', docName, data)
+ if (callbacks) {
+ for (callback of Array.from(callbacks)) {
+ callback(null, doc)
+ }
+ }
+ }
+
+ return doc
+ }
+
+ // This is a little helper wrapper around db.getOps. It does two things:
+ //
+ // - If there's no database set, it returns an error to the callback
+ // - It adds version numbers to each op returned from the database
+ // (These can be inferred from context so the DB doesn't store them, but its useful to have them).
+ const getOpsInternal = function (docName, start, end, callback) {
+ if (!db) {
+ return typeof callback === 'function'
+ ? callback('Document does not exist')
+ : undefined
+ }
+
+ return db.getOps(docName, start, end, function (error, ops) {
+ if (error) {
+ return typeof callback === 'function' ? callback(error) : undefined
+ }
+
+ let v = start
+ for (const op of Array.from(ops)) {
+ op.v = v++
+ }
+
+ return typeof callback === 'function' ? callback(null, ops) : undefined
+ })
+ }
+
+ // Load the named document into the cache. This function is re-entrant.
+ //
+ // The callback is called with (error, doc)
+ const load = function (docName, callback) {
+ if (docs[docName]) {
+ // The document is already loaded. Return immediately.
+ __guardMethod__(options.stats, 'cacheHit', o => o.cacheHit('getSnapshot'))
+ return callback(null, docs[docName])
+ }
+
+ // We're a memory store. If we don't have it, nobody does.
+ if (!db) {
+ return callback('Document does not exist')
+ }
+
+ const callbacks = awaitingGetSnapshot[docName]
+
+ // The document is being loaded already. Add ourselves as a callback.
+ if (callbacks) {
+ return callbacks.push(callback)
+ }
+
+ __guardMethod__(options.stats, 'cacheMiss', o1 =>
+ o1.cacheMiss('getSnapshot')
+ )
+
+ // The document isn't loaded and isn't being loaded. Load it.
+ awaitingGetSnapshot[docName] = [callback]
+ return db.getSnapshot(docName, function (error, data, dbMeta) {
+ if (error) {
+ return add(docName, error)
+ }
+
+ const type = types[data.type]
+ if (!type) {
+ console.warn(`Type '${data.type}' missing`)
+ return callback('Type not found')
+ }
+ data.type = type
+
+ const committedVersion = data.v
+
+ // The server can close without saving the most recent document snapshot.
+ // In this case, there are extra ops which need to be applied before
+ // returning the snapshot.
+ return getOpsInternal(docName, data.v, null, function (error, ops) {
+ if (error) {
+ return callback(error)
+ }
+
+ if (ops.length > 0) {
+ console.log(`Catchup ${docName} ${data.v} -> ${data.v + ops.length}`)
+
+ try {
+ for (const op of Array.from(ops)) {
+ data.snapshot = type.apply(data.snapshot, op.op)
+ data.v++
+ }
+ } catch (e) {
+ // This should never happen - it indicates that whats in the
+ // database is invalid.
+ console.error(`Op data invalid for ${docName}: ${e.stack}`)
+ return callback('Op data invalid')
+ }
+ }
+
+ model.emit('load', docName, data)
+ return add(docName, error, data, committedVersion, ops, dbMeta)
+ })
+ })
+ }
+
+ // This makes sure the cache contains a document. If the doc cache doesn't contain
+ // a document, it is loaded from the database and stored.
+ //
+ // Documents are stored so long as either:
+ // - They have been accessed within the past #{PERIOD}
+ // - At least one client has the document open
+ var refreshReapingTimeout = function (docName) {
+ const doc = docs[docName]
+ if (!doc) {
+ return
+ }
+
+ // I want to let the clients list be updated before this is called.
+ return process.nextTick(function () {
+ // This is an awkward way to find out the number of clients on a document. If this
+ // causes performance issues, add a numClients field to the document.
+ //
+ // The first check is because its possible that between refreshReapingTimeout being called and this
+ // event being fired, someone called delete() on the document and hence the doc is something else now.
+ if (
+ doc === docs[docName] &&
+ doc.eventEmitter.listeners('op').length === 0 &&
+ (db || options.forceReaping) &&
+ doc.opQueue.busy === false
+ ) {
+ let reapTimer
+ clearTimeout(doc.reapTimer)
+ return (doc.reapTimer = reapTimer =
+ setTimeout(
+ () =>
+ tryWriteSnapshot(docName, function () {
+ // If the reaping timeout has been refreshed while we're writing the snapshot, or if we're
+ // in the middle of applying an operation, don't reap.
+ if (
+ docs[docName].reapTimer === reapTimer &&
+ doc.opQueue.busy === false
+ ) {
+ return delete docs[docName]
+ }
+ }),
+ options.reapTime
+ ))
+ }
+ })
+ }
+
+ var tryWriteSnapshot = function (docName, callback) {
+ if (!db) {
+ return typeof callback === 'function' ? callback() : undefined
+ }
+
+ const doc = docs[docName]
+
+ // The doc is closed
+ if (!doc) {
+ return typeof callback === 'function' ? callback() : undefined
+ }
+
+ // The document is already saved.
+ if (doc.committedVersion === doc.v) {
+ return typeof callback === 'function' ? callback() : undefined
+ }
+
+ if (doc.snapshotWriteLock) {
+ return typeof callback === 'function'
+ ? callback('Another snapshot write is in progress')
+ : undefined
+ }
+
+ doc.snapshotWriteLock = true
+
+ __guardMethod__(options.stats, 'writeSnapshot', o => o.writeSnapshot())
+
+ const writeSnapshot =
+ (db != null ? db.writeSnapshot : undefined) ||
+ ((docName, docData, dbMeta, callback) => callback())
+
+ const data = {
+ v: doc.v,
+ meta: doc.meta,
+ snapshot: doc.snapshot,
+ // The database doesn't know about object types.
+ type: doc.type.name,
+ }
+
+ // Commit snapshot.
+ return writeSnapshot(docName, data, doc.dbMeta, function (error, dbMeta) {
+ doc.snapshotWriteLock = false
+
+ // We have to use data.v here because the version in the doc could
+ // have been updated between the call to writeSnapshot() and now.
+ doc.committedVersion = data.v
+ doc.dbMeta = dbMeta
+
+ return typeof callback === 'function' ? callback(error) : undefined
+ })
+ }
+
+ // *** Model interface methods
+
+ // Create a new document.
+ //
+ // data should be {snapshot, type, [meta]}. The version of a new document is 0.
+ this.create = function (docName, type, meta, callback) {
+ if (typeof meta === 'function') {
+ ;[meta, callback] = Array.from([{}, meta])
+ }
+
+ if (docName.match(/\//)) {
+ return typeof callback === 'function'
+ ? callback('Invalid document name')
+ : undefined
+ }
+ if (docs[docName]) {
+ return typeof callback === 'function'
+ ? callback('Document already exists')
+ : undefined
+ }
+
+ if (typeof type === 'string') {
+ type = types[type]
+ }
+ if (!type) {
+ return typeof callback === 'function'
+ ? callback('Type not found')
+ : undefined
+ }
+
+ const data = {
+ snapshot: type.create(),
+ type: type.name,
+ meta: meta || {},
+ v: 0,
+ }
+
+ const done = function (error, dbMeta) {
+ // dbMeta can be used to cache extra state needed by the database to access the document, like an ID or something.
+ if (error) {
+ return typeof callback === 'function' ? callback(error) : undefined
+ }
+
+ // From here on we'll store the object version of the type name.
+ data.type = type
+ add(docName, null, data, 0, [], dbMeta)
+ model.emit('create', docName, data)
+ return typeof callback === 'function' ? callback() : undefined
+ }
+
+ if (db) {
+ return db.create(docName, data, done)
+ } else {
+ return done()
+ }
+ }
+
+ // Perminantly deletes the specified document.
+ // If listeners are attached, they are removed.
+ //
+ // The callback is called with (error) if there was an error. If error is null / undefined, the
+ // document was deleted.
+ //
+ // WARNING: This isn't well supported throughout the code. (Eg, streaming clients aren't told about the
+ // deletion. Subsequent op submissions will fail).
+ this.delete = function (docName, callback) {
+ const doc = docs[docName]
+
+ if (doc) {
+ clearTimeout(doc.reapTimer)
+ delete docs[docName]
+ }
+
+ const done = function (error) {
+ if (!error) {
+ model.emit('delete', docName)
+ }
+ return typeof callback === 'function' ? callback(error) : undefined
+ }
+
+ if (db) {
+ return db.delete(docName, doc != null ? doc.dbMeta : undefined, done)
+ } else {
+ return done(!doc ? 'Document does not exist' : undefined)
+ }
+ }
+
+ // This gets all operations from [start...end]. (That is, its not inclusive.)
+ //
+ // end can be null. This means 'get me all ops from start'.
+ //
+ // Each op returned is in the form {op:o, meta:m, v:version}.
+ //
+ // Callback is called with (error, [ops])
+ //
+ // If the document does not exist, getOps doesn't necessarily return an error. This is because
+ // its awkward to figure out whether or not the document exists for things
+ // like the redis database backend. I guess its a bit gross having this inconsistant
+ // with the other DB calls, but its certainly convenient.
+ //
+ // Use getVersion() to determine if a document actually exists, if thats what you're
+ // after.
+ this.getOps = getOps = function (docName, start, end, callback) {
+ // getOps will only use the op cache if its there. It won't fill the op cache in.
+ if (!(start >= 0)) {
+ throw new Error('start must be 0+')
+ }
+
+ if (typeof end === 'function') {
+ ;[end, callback] = Array.from([null, end])
+ }
+
+ const ops = docs[docName] != null ? docs[docName].ops : undefined
+
+ if (ops) {
+ const version = docs[docName].v
+
+ // Ops contains an array of ops. The last op in the list is the last op applied
+ if (end == null) {
+ end = version
+ }
+ start = Math.min(start, end)
+
+ if (start === end) {
+ return callback(null, [])
+ }
+
+ // Base is the version number of the oldest op we have cached
+ const base = version - ops.length
+
+ // If the database is null, we'll trim to the ops we do have and hope thats enough.
+ if (start >= base || db === null) {
+ refreshReapingTimeout(docName)
+ if (options.stats != null) {
+ options.stats.cacheHit('getOps')
+ }
+
+ return callback(null, ops.slice(start - base, end - base))
+ }
+ }
+
+ if (options.stats != null) {
+ options.stats.cacheMiss('getOps')
+ }
+
+ return getOpsInternal(docName, start, end, callback)
+ }
+
+ // Gets the snapshot data for the specified document.
+ // getSnapshot(docName, callback)
+ // Callback is called with (error, {v: , type: , snapshot: , meta: })
+ this.getSnapshot = (docName, callback) =>
+ load(docName, (error, doc) =>
+ callback(
+ error,
+ doc
+ ? { v: doc.v, type: doc.type, snapshot: doc.snapshot, meta: doc.meta }
+ : undefined
+ )
+ )
+
+ // Gets the latest version # of the document.
+ // getVersion(docName, callback)
+ // callback is called with (error, version).
+ this.getVersion = (docName, callback) =>
+ load(docName, (error, doc) =>
+ callback(error, doc != null ? doc.v : undefined)
+ )
+
+ // Apply an op to the specified document.
+ // The callback is passed (error, applied version #)
+ // opData = {op:op, v:v, meta:metadata}
+ //
+ // Ops are queued before being applied so that the following code applies op C before op B:
+ // model.applyOp 'doc', OPA, -> model.applyOp 'doc', OPB
+ // model.applyOp 'doc', OPC
+ this.applyOp = (
+ docName,
+ opData,
+ callback // All the logic for this is in makeOpQueue, above.
+ ) =>
+ load(docName, function (error, doc) {
+ if (error) {
+ return callback(error)
+ }
+
+ return process.nextTick(() =>
+ doc.opQueue(opData, function (error, newVersion) {
+ refreshReapingTimeout(docName)
+ return typeof callback === 'function'
+ ? callback(error, newVersion)
+ : undefined
+ })
+ )
+ })
+
+ // TODO: store (some) metadata in DB
+ // TODO: op and meta should be combineable in the op that gets sent
+ this.applyMetaOp = function (docName, metaOpData, callback) {
+ const { path, value } = metaOpData.meta
+
+ if (!isArray(path)) {
+ return typeof callback === 'function'
+ ? callback('path should be an array')
+ : undefined
+ }
+
+ return load(docName, function (error, doc) {
+ if (error != null) {
+ return typeof callback === 'function' ? callback(error) : undefined
+ } else {
+ let applied = false
+ switch (path[0]) {
+ case 'shout':
+ doc.eventEmitter.emit('op', metaOpData)
+ applied = true
+ break
+ }
+
+ if (applied) {
+ model.emit('applyMetaOp', docName, path, value)
+ }
+ return typeof callback === 'function'
+ ? callback(null, doc.v)
+ : undefined
+ }
+ })
+ }
+
+ // Listen to all ops from the specified version. If version is in the past, all
+ // ops since that version are sent immediately to the listener.
+ //
+ // The callback is called once the listener is attached, but before any ops have been passed
+ // to the listener.
+ //
+ // This will _not_ edit the document metadata.
+ //
+ // If there are any listeners, we don't purge the document from the cache. But be aware, this behaviour
+ // might change in a future version.
+ //
+ // version is the document version at which the document is opened. It can be left out if you want to open
+ // the document at the most recent version.
+ //
+ // listener is called with (opData) each time an op is applied.
+ //
+ // callback(error, openedVersion)
+ this.listen = function (docName, version, listener, callback) {
+ if (typeof version === 'function') {
+ ;[version, listener, callback] = Array.from([null, version, listener])
+ }
+
+ return load(docName, function (error, doc) {
+ if (error) {
+ return typeof callback === 'function' ? callback(error) : undefined
+ }
+
+ clearTimeout(doc.reapTimer)
+
+ if (version != null) {
+ return getOps(docName, version, null, function (error, data) {
+ if (error) {
+ return typeof callback === 'function' ? callback(error) : undefined
+ }
+
+ doc.eventEmitter.on('op', listener)
+ if (typeof callback === 'function') {
+ callback(null, version)
+ }
+ return (() => {
+ const result = []
+ for (const op of Array.from(data)) {
+ var needle
+ listener(op)
+
+ // The listener may well remove itself during the catchup phase. If this happens, break early.
+ // This is done in a quite inefficient way. (O(n) where n = #listeners on doc)
+ if (
+ ((needle = listener),
+ !Array.from(doc.eventEmitter.listeners('op')).includes(needle))
+ ) {
+ break
+ } else {
+ result.push(undefined)
+ }
+ }
+ return result
+ })()
+ })
+ } else {
+ // Version is null / undefined. Just add the listener.
+ doc.eventEmitter.on('op', listener)
+ return typeof callback === 'function'
+ ? callback(null, doc.v)
+ : undefined
+ }
+ })
+ }
+
+ // Remove a listener for a particular document.
+ //
+ // removeListener(docName, listener)
+ //
+ // This is synchronous.
+ this.removeListener = function (docName, listener) {
+ // The document should already be loaded.
+ const doc = docs[docName]
+ if (!doc) {
+ throw new Error('removeListener called but document not loaded')
+ }
+
+ doc.eventEmitter.removeListener('op', listener)
+ return refreshReapingTimeout(docName)
+ }
+
+ // Flush saves all snapshot data to the database. I'm not sure whether or not this is actually needed -
+ // sharejs will happily replay uncommitted ops when documents are re-opened anyway.
+ this.flush = function (callback) {
+ if (!db) {
+ return typeof callback === 'function' ? callback() : undefined
+ }
+
+ let pendingWrites = 0
+
+ for (const docName in docs) {
+ const doc = docs[docName]
+ if (doc.committedVersion < doc.v) {
+ pendingWrites++
+ // I'm hoping writeSnapshot will always happen in another thread.
+ tryWriteSnapshot(docName, () =>
+ process.nextTick(function () {
+ pendingWrites--
+ if (pendingWrites === 0) {
+ return typeof callback === 'function' ? callback() : undefined
+ }
+ })
+ )
+ }
+ }
+
+ // If nothing was queued, terminate immediately.
+ if (pendingWrites === 0) {
+ return typeof callback === 'function' ? callback() : undefined
+ }
+ }
+
+ // Close the database connection. This is needed so nodejs can shut down cleanly.
+ this.closeDb = function () {
+ __guardMethod__(db, 'close', o => o.close())
+ return (db = null)
+ }
+}
+
+// Model inherits from EventEmitter.
+Model.prototype = new EventEmitter()
+
+function __guardMethod__(obj, methodName, transform) {
+ if (
+ typeof obj !== 'undefined' &&
+ obj !== null &&
+ typeof obj[methodName] === 'function'
+ ) {
+ return transform(obj, methodName)
+ } else {
+ return undefined
+ }
+}
diff --git a/services/document-updater/app/js/sharejs/server/syncqueue.js b/services/document-updater/app/js/sharejs/server/syncqueue.js
new file mode 100644
index 0000000000..7b83c5b436
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/server/syncqueue.js
@@ -0,0 +1,60 @@
+// TODO: This file was created by bulk-decaffeinate.
+// Sanity-check the conversion and remove this comment.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+// A synchronous processing queue. The queue calls process on the arguments,
+// ensuring that process() is only executing once at a time.
+//
+// process(data, callback) _MUST_ eventually call its callback.
+//
+// Example:
+//
+// queue = require 'syncqueue'
+//
+// fn = queue (data, callback) ->
+// asyncthing data, ->
+// callback(321)
+//
+// fn(1)
+// fn(2)
+// fn(3, (result) -> console.log(result))
+//
+// ^--- async thing will only be running once at any time.
+
+module.exports = function (process) {
+ if (typeof process !== 'function') {
+ throw new Error('process is not a function')
+ }
+ const queue = []
+
+ const enqueue = function (data, callback) {
+ queue.push([data, callback])
+ return flush()
+ }
+
+ enqueue.busy = false
+
+ var flush = function () {
+ if (enqueue.busy || queue.length === 0) {
+ return
+ }
+
+ enqueue.busy = true
+ const [data, callback] = Array.from(queue.shift())
+ return process(data, function (...result) {
+ // TODO: Make this not use varargs - varargs are really slow.
+ enqueue.busy = false
+ // This is called after busy = false so a user can check if enqueue.busy is set in the callback.
+ if (callback) {
+ callback.apply(null, result)
+ }
+ return flush()
+ })
+ }
+
+ return enqueue
+}
diff --git a/services/document-updater/app/js/sharejs/simple.js b/services/document-updater/app/js/sharejs/simple.js
new file mode 100644
index 0000000000..41f7eed285
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/simple.js
@@ -0,0 +1,54 @@
+// TODO: This file was created by bulk-decaffeinate.
+// Sanity-check the conversion and remove this comment.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+// This is a really simple OT type. Its not compiled with the web client, but it could be.
+//
+// Its mostly included for demonstration purposes and its used in a lot of unit tests.
+//
+// This defines a really simple text OT type which only allows inserts. (No deletes).
+//
+// Ops look like:
+// {position:#, text:"asdf"}
+//
+// Document snapshots look like:
+// {str:string}
+
+module.exports = {
+ // The name of the OT type. The type is stored in types[type.name]. The name can be
+ // used in place of the actual type in all the API methods.
+ name: 'simple',
+
+ // Create a new document snapshot
+ create() {
+ return { str: '' }
+ },
+
+ // Apply the given op to the document snapshot. Returns the new snapshot.
+ //
+ // The original snapshot should not be modified.
+ apply(snapshot, op) {
+ if (!(op.position >= 0 && op.position <= snapshot.str.length)) {
+ throw new Error('Invalid position')
+ }
+
+ let { str } = snapshot
+ str = str.slice(0, op.position) + op.text + str.slice(op.position)
+ return { str }
+ },
+
+ // transform op1 by op2. Return transformed version of op1.
+ // sym describes the symmetry of the op. Its 'left' or 'right' depending on whether the
+ // op being transformed comes from the client or the server.
+ transform(op1, op2, sym) {
+ let pos = op1.position
+ if (op2.position < pos || (op2.position === pos && sym === 'left')) {
+ pos += op2.text.length
+ }
+
+ return { position: pos, text: op1.text }
+ },
+}
diff --git a/services/document-updater/app/js/sharejs/syncqueue.js b/services/document-updater/app/js/sharejs/syncqueue.js
new file mode 100644
index 0000000000..7b83c5b436
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/syncqueue.js
@@ -0,0 +1,60 @@
+// TODO: This file was created by bulk-decaffeinate.
+// Sanity-check the conversion and remove this comment.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+// A synchronous processing queue. The queue calls process on the arguments,
+// ensuring that process() is only executing once at a time.
+//
+// process(data, callback) _MUST_ eventually call its callback.
+//
+// Example:
+//
+// queue = require 'syncqueue'
+//
+// fn = queue (data, callback) ->
+// asyncthing data, ->
+// callback(321)
+//
+// fn(1)
+// fn(2)
+// fn(3, (result) -> console.log(result))
+//
+// ^--- async thing will only be running once at any time.
+
+module.exports = function (process) {
+ if (typeof process !== 'function') {
+ throw new Error('process is not a function')
+ }
+ const queue = []
+
+ const enqueue = function (data, callback) {
+ queue.push([data, callback])
+ return flush()
+ }
+
+ enqueue.busy = false
+
+ var flush = function () {
+ if (enqueue.busy || queue.length === 0) {
+ return
+ }
+
+ enqueue.busy = true
+ const [data, callback] = Array.from(queue.shift())
+ return process(data, function (...result) {
+ // TODO: Make this not use varargs - varargs are really slow.
+ enqueue.busy = false
+ // This is called after busy = false so a user can check if enqueue.busy is set in the callback.
+ if (callback) {
+ callback.apply(null, result)
+ }
+ return flush()
+ })
+ }
+
+ return enqueue
+}
diff --git a/services/document-updater/app/js/sharejs/text-api.js b/services/document-updater/app/js/sharejs/text-api.js
new file mode 100644
index 0000000000..aa2beef446
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/text-api.js
@@ -0,0 +1,52 @@
+// TODO: This file was created by bulk-decaffeinate.
+// Sanity-check the conversion and remove this comment.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+// Text document API for text
+
+let text
+if (typeof WEB === 'undefined') {
+ text = require('./text')
+}
+
+text.api = {
+ provides: { text: true },
+
+ // The number of characters in the string
+ getLength() {
+ return this.snapshot.length
+ },
+
+ // Get the text contents of a document
+ getText() {
+ return this.snapshot
+ },
+
+ insert(pos, text, callback) {
+ const op = [{ p: pos, i: text }]
+
+ this.submitOp(op, callback)
+ return op
+ },
+
+ del(pos, length, callback) {
+ const op = [{ p: pos, d: this.snapshot.slice(pos, pos + length) }]
+
+ this.submitOp(op, callback)
+ return op
+ },
+
+ _register() {
+ return this.on('remoteop', function (op) {
+ return Array.from(op).map(component =>
+ component.i !== undefined
+ ? this.emit('insert', component.p, component.i)
+ : this.emit('delete', component.p, component.d)
+ )
+ })
+ },
+}
diff --git a/services/document-updater/app/js/sharejs/text-composable-api.js b/services/document-updater/app/js/sharejs/text-composable-api.js
new file mode 100644
index 0000000000..122e119ae4
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/text-composable-api.js
@@ -0,0 +1,76 @@
+/* eslint-disable
+ no-undef,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS205: Consider reworking code to avoid use of IIFEs
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+// Text document API for text
+
+let type
+if (typeof WEB !== 'undefined' && WEB !== null) {
+ type = exports.types['text-composable']
+} else {
+ type = require('./text-composable')
+}
+
+type.api = {
+ provides: { text: true },
+
+ // The number of characters in the string
+ getLength() {
+ return this.snapshot.length
+ },
+
+ // Get the text contents of a document
+ getText() {
+ return this.snapshot
+ },
+
+ insert(pos, text, callback) {
+ const op = type.normalize([pos, { i: text }, this.snapshot.length - pos])
+
+ this.submitOp(op, callback)
+ return op
+ },
+
+ del(pos, length, callback) {
+ const op = type.normalize([
+ pos,
+ { d: this.snapshot.slice(pos, pos + length) },
+ this.snapshot.length - pos - length,
+ ])
+
+ this.submitOp(op, callback)
+ return op
+ },
+
+ _register() {
+ return this.on('remoteop', function (op) {
+ let pos = 0
+ return (() => {
+ const result = []
+ for (const component of Array.from(op)) {
+ if (typeof component === 'number') {
+ result.push((pos += component))
+ } else if (component.i !== undefined) {
+ this.emit('insert', pos, component.i)
+ result.push((pos += component.i.length))
+ } else {
+ // delete
+ result.push(this.emit('delete', pos, component.d))
+ }
+ }
+ return result
+ })()
+ })
+ },
+}
+// We don't increment pos, because the position
+// specified is after the delete has happened.
diff --git a/services/document-updater/app/js/sharejs/text-composable.js b/services/document-updater/app/js/sharejs/text-composable.js
new file mode 100644
index 0000000000..58c4df2b28
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/text-composable.js
@@ -0,0 +1,399 @@
+/* eslint-disable
+ no-cond-assign,
+ no-return-assign,
+ no-undef,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS205: Consider reworking code to avoid use of IIFEs
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+// An alternate composable implementation for text. This is much closer
+// to the implementation used by google wave.
+//
+// Ops are lists of components which iterate over the whole document.
+// Components are either:
+// A number N: Skip N characters in the original document
+// {i:'str'}: Insert 'str' at the current position in the document
+// {d:'str'}: Delete 'str', which appears at the current position in the document
+//
+// Eg: [3, {i:'hi'}, 5, {d:'internet'}]
+//
+// Snapshots are strings.
+
+let makeAppend
+const p = function () {} // require('util').debug
+const i = function () {} // require('util').inspect
+
+const exports = typeof WEB !== 'undefined' && WEB !== null ? {} : module.exports
+
+exports.name = 'text-composable'
+
+exports.create = () => ''
+
+// -------- Utility methods
+
+const checkOp = function (op) {
+ if (!Array.isArray(op)) {
+ throw new Error('Op must be an array of components')
+ }
+ let last = null
+ return (() => {
+ const result = []
+ for (const c of Array.from(op)) {
+ if (typeof c === 'object') {
+ if (
+ (c.i == null || !(c.i.length > 0)) &&
+ (c.d == null || !(c.d.length > 0))
+ ) {
+ throw new Error(`Invalid op component: ${i(c)}`)
+ }
+ } else {
+ if (typeof c !== 'number') {
+ throw new Error('Op components must be objects or numbers')
+ }
+ if (!(c > 0)) {
+ throw new Error('Skip components must be a positive number')
+ }
+ if (typeof last === 'number') {
+ throw new Error('Adjacent skip components should be added')
+ }
+ }
+
+ result.push((last = c))
+ }
+ return result
+ })()
+}
+
+// Makes a function for appending components to a given op.
+// Exported for the randomOpGenerator.
+exports._makeAppend = makeAppend = op =>
+ function (component) {
+ if (component === 0 || component.i === '' || component.d === '') {
+ } else if (op.length === 0) {
+ return op.push(component)
+ } else if (
+ typeof component === 'number' &&
+ typeof op[op.length - 1] === 'number'
+ ) {
+ return (op[op.length - 1] += component)
+ } else if (component.i != null && op[op.length - 1].i != null) {
+ return (op[op.length - 1].i += component.i)
+ } else if (component.d != null && op[op.length - 1].d != null) {
+ return (op[op.length - 1].d += component.d)
+ } else {
+ return op.push(component)
+ }
+ }
+
+// checkOp op
+
+// Makes 2 functions for taking components from the start of an op, and for peeking
+// at the next op that could be taken.
+const makeTake = function (op) {
+ // The index of the next component to take
+ let idx = 0
+ // The offset into the component
+ let offset = 0
+
+ // Take up to length n from the front of op. If n is null, take the next
+ // op component. If indivisableField == 'd', delete components won't be separated.
+ // If indivisableField == 'i', insert components won't be separated.
+ const take = function (n, indivisableField) {
+ let c
+ if (idx === op.length) {
+ return null
+ }
+ // assert.notStrictEqual op.length, i, 'The op is too short to traverse the document'
+
+ if (typeof op[idx] === 'number') {
+ if (n == null || op[idx] - offset <= n) {
+ c = op[idx] - offset
+ ++idx
+ offset = 0
+ return c
+ } else {
+ offset += n
+ return n
+ }
+ } else {
+ // Take from the string
+ const field = op[idx].i ? 'i' : 'd'
+ c = {}
+ if (
+ n == null ||
+ op[idx][field].length - offset <= n ||
+ field === indivisableField
+ ) {
+ c[field] = op[idx][field].slice(offset)
+ ++idx
+ offset = 0
+ } else {
+ c[field] = op[idx][field].slice(offset, offset + n)
+ offset += n
+ }
+ return c
+ }
+ }
+
+ const peekType = () => op[idx]
+
+ return [take, peekType]
+}
+
+// Find and return the length of an op component
+const componentLength = function (component) {
+ if (typeof component === 'number') {
+ return component
+ } else if (component.i != null) {
+ return component.i.length
+ } else {
+ return component.d.length
+ }
+}
+
+// Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate
+// adjacent inserts and deletes.
+exports.normalize = function (op) {
+ const newOp = []
+ const append = makeAppend(newOp)
+ for (const component of Array.from(op)) {
+ append(component)
+ }
+ return newOp
+}
+
+// Apply the op to the string. Returns the new string.
+exports.apply = function (str, op) {
+ p(`Applying ${i(op)} to '${str}'`)
+ if (typeof str !== 'string') {
+ throw new Error('Snapshot should be a string')
+ }
+ checkOp(op)
+
+ const pos = 0
+ const newDoc = []
+
+ for (const component of Array.from(op)) {
+ if (typeof component === 'number') {
+ if (component > str.length) {
+ throw new Error('The op is too long for this document')
+ }
+ newDoc.push(str.slice(0, component))
+ str = str.slice(component)
+ } else if (component.i != null) {
+ newDoc.push(component.i)
+ } else {
+ if (component.d !== str.slice(0, component.d.length)) {
+ throw new Error(
+ `The deleted text '${
+ component.d
+ }' doesn't match the next characters in the document '${str.slice(
+ 0,
+ component.d.length
+ )}'`
+ )
+ }
+ str = str.slice(component.d.length)
+ }
+ }
+
+ if (str !== '') {
+ throw new Error("The applied op doesn't traverse the entire document")
+ }
+
+ return newDoc.join('')
+}
+
+// transform op1 by op2. Return transformed version of op1.
+// op1 and op2 are unchanged by transform.
+exports.transform = function (op, otherOp, side) {
+ let component
+ if (side !== 'left' && side !== 'right') {
+ throw new Error(`side (${side} must be 'left' or 'right'`)
+ }
+
+ checkOp(op)
+ checkOp(otherOp)
+ const newOp = []
+
+ const append = makeAppend(newOp)
+ const [take, peek] = Array.from(makeTake(op))
+
+ for (component of Array.from(otherOp)) {
+ var chunk, length
+ if (typeof component === 'number') {
+ // Skip
+ length = component
+ while (length > 0) {
+ chunk = take(length, 'i')
+ if (chunk === null) {
+ throw new Error(
+ 'The op traverses more elements than the document has'
+ )
+ }
+
+ append(chunk)
+ if (typeof chunk !== 'object' || chunk.i == null) {
+ length -= componentLength(chunk)
+ }
+ }
+ } else if (component.i != null) {
+ // Insert
+ if (side === 'left') {
+ // The left insert should go first.
+ const o = peek()
+ if (o != null ? o.i : undefined) {
+ append(take())
+ }
+ }
+
+ // Otherwise, skip the inserted text.
+ append(component.i.length)
+ } else {
+ // Delete.
+ // assert.ok component.d
+ ;({ length } = component.d)
+ while (length > 0) {
+ chunk = take(length, 'i')
+ if (chunk === null) {
+ throw new Error(
+ 'The op traverses more elements than the document has'
+ )
+ }
+
+ if (typeof chunk === 'number') {
+ length -= chunk
+ } else if (chunk.i != null) {
+ append(chunk)
+ } else {
+ // assert.ok chunk.d
+ // The delete is unnecessary now.
+ length -= chunk.d.length
+ }
+ }
+ }
+ }
+
+ // Append extras from op1
+ while ((component = take())) {
+ if ((component != null ? component.i : undefined) == null) {
+ throw new Error(`Remaining fragments in the op: ${i(component)}`)
+ }
+ append(component)
+ }
+
+ return newOp
+}
+
+// Compose 2 ops into 1 op.
+exports.compose = function (op1, op2) {
+ let component
+ p(`COMPOSE ${i(op1)} + ${i(op2)}`)
+ checkOp(op1)
+ checkOp(op2)
+
+ const result = []
+
+ const append = makeAppend(result)
+ const [take, _] = Array.from(makeTake(op1))
+
+ for (component of Array.from(op2)) {
+ var chunk, length
+ if (typeof component === 'number') {
+ // Skip
+ length = component
+ while (length > 0) {
+ chunk = take(length, 'd')
+ if (chunk === null) {
+ throw new Error(
+ 'The op traverses more elements than the document has'
+ )
+ }
+
+ append(chunk)
+ if (typeof chunk !== 'object' || chunk.d == null) {
+ length -= componentLength(chunk)
+ }
+ }
+ } else if (component.i != null) {
+ // Insert
+ append({ i: component.i })
+ } else {
+ // Delete
+ let offset = 0
+ while (offset < component.d.length) {
+ chunk = take(component.d.length - offset, 'd')
+ if (chunk === null) {
+ throw new Error(
+ 'The op traverses more elements than the document has'
+ )
+ }
+
+ // If its delete, append it. If its skip, drop it and decrease length. If its insert, check the strings match, drop it and decrease length.
+ if (typeof chunk === 'number') {
+ append({ d: component.d.slice(offset, offset + chunk) })
+ offset += chunk
+ } else if (chunk.i != null) {
+ if (component.d.slice(offset, offset + chunk.i.length) !== chunk.i) {
+ throw new Error("The deleted text doesn't match the inserted text")
+ }
+ offset += chunk.i.length
+ // The ops cancel each other out.
+ } else {
+ // Delete
+ append(chunk)
+ }
+ }
+ }
+ }
+
+ // Append extras from op1
+ while ((component = take())) {
+ if ((component != null ? component.d : undefined) == null) {
+ throw new Error(`Trailing stuff in op1 ${i(component)}`)
+ }
+ append(component)
+ }
+
+ return result
+}
+
+const invertComponent = function (c) {
+ if (typeof c === 'number') {
+ return c
+ } else if (c.i != null) {
+ return { d: c.i }
+ } else {
+ return { i: c.d }
+ }
+}
+
+// Invert an op
+exports.invert = function (op) {
+ const result = []
+ const append = makeAppend(result)
+
+ for (const component of Array.from(op)) {
+ append(invertComponent(component))
+ }
+
+ return result
+}
+
+if (typeof window !== 'undefined' && window !== null) {
+ if (!window.ot) {
+ window.ot = {}
+ }
+ if (!window.ot.types) {
+ window.ot.types = {}
+ }
+ window.ot.types.text = exports
+}
diff --git a/services/document-updater/app/js/sharejs/text-tp2-api.js b/services/document-updater/app/js/sharejs/text-tp2-api.js
new file mode 100644
index 0000000000..8ffbd60c50
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/text-tp2-api.js
@@ -0,0 +1,133 @@
+/* eslint-disable
+ no-undef,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS205: Consider reworking code to avoid use of IIFEs
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+// Text document API for text-tp2
+
+let type
+if (typeof WEB !== 'undefined' && WEB !== null) {
+ type = exports.types['text-tp2']
+} else {
+ type = require('./text-tp2')
+}
+
+const { _takeDoc: takeDoc, _append: append } = type
+
+const appendSkipChars = (op, doc, pos, maxlength) =>
+ (() => {
+ const result = []
+ while (
+ (maxlength === undefined || maxlength > 0) &&
+ pos.index < doc.data.length
+ ) {
+ const part = takeDoc(doc, pos, maxlength, true)
+ if (maxlength !== undefined && typeof part === 'string') {
+ maxlength -= part.length
+ }
+ result.push(append(op, part.length || part))
+ }
+ return result
+ })()
+
+type.api = {
+ provides: { text: true },
+
+ // The number of characters in the string
+ getLength() {
+ return this.snapshot.charLength
+ },
+
+ // Flatten a document into a string
+ getText() {
+ const strings = Array.from(this.snapshot.data).filter(
+ elem => typeof elem === 'string'
+ )
+ return strings.join('')
+ },
+
+ insert(pos, text, callback) {
+ if (pos === undefined) {
+ pos = 0
+ }
+
+ const op = []
+ const docPos = { index: 0, offset: 0 }
+
+ appendSkipChars(op, this.snapshot, docPos, pos)
+ append(op, { i: text })
+ appendSkipChars(op, this.snapshot, docPos)
+
+ this.submitOp(op, callback)
+ return op
+ },
+
+ del(pos, length, callback) {
+ const op = []
+ const docPos = { index: 0, offset: 0 }
+
+ appendSkipChars(op, this.snapshot, docPos, pos)
+
+ while (length > 0) {
+ const part = takeDoc(this.snapshot, docPos, length, true)
+ if (typeof part === 'string') {
+ append(op, { d: part.length })
+ length -= part.length
+ } else {
+ append(op, part)
+ }
+ }
+
+ appendSkipChars(op, this.snapshot, docPos)
+
+ this.submitOp(op, callback)
+ return op
+ },
+
+ _register() {
+ // Interpret recieved ops + generate more detailed events for them
+ return this.on('remoteop', function (op, snapshot) {
+ let textPos = 0
+ const docPos = { index: 0, offset: 0 }
+
+ for (const component of Array.from(op)) {
+ var part, remainder
+ if (typeof component === 'number') {
+ // Skip
+ remainder = component
+ while (remainder > 0) {
+ part = takeDoc(snapshot, docPos, remainder)
+ if (typeof part === 'string') {
+ textPos += part.length
+ }
+ remainder -= part.length || part
+ }
+ } else if (component.i !== undefined) {
+ // Insert
+ if (typeof component.i === 'string') {
+ this.emit('insert', textPos, component.i)
+ textPos += component.i.length
+ }
+ } else {
+ // Delete
+ remainder = component.d
+ while (remainder > 0) {
+ part = takeDoc(snapshot, docPos, remainder)
+ if (typeof part === 'string') {
+ this.emit('delete', textPos, part)
+ }
+ remainder -= part.length || part
+ }
+ }
+ }
+ })
+ },
+}
diff --git a/services/document-updater/app/js/sharejs/text-tp2.js b/services/document-updater/app/js/sharejs/text-tp2.js
new file mode 100644
index 0000000000..67a70439c7
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/text-tp2.js
@@ -0,0 +1,497 @@
+/* eslint-disable
+ no-cond-assign,
+ no-return-assign,
+ no-undef,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS103: Rewrite code to no longer use __guard__
+ * DS205: Consider reworking code to avoid use of IIFEs
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+// A TP2 implementation of text, following this spec:
+// http://code.google.com/p/lightwave/source/browse/trunk/experimental/ot/README
+//
+// A document is made up of a string and a set of tombstones inserted throughout
+// the string. For example, 'some ', (2 tombstones), 'string'.
+//
+// This is encoded in a document as: {s:'some string', t:[5, -2, 6]}
+//
+// Ops are lists of components which iterate over the whole document.
+// Components are either:
+// N: Skip N characters in the original document
+// {i:'str'}: Insert 'str' at the current position in the document
+// {i:N}: Insert N tombstones at the current position in the document
+// {d:N}: Delete (tombstone) N characters at the current position in the document
+//
+// Eg: [3, {i:'hi'}, 5, {d:8}]
+//
+// Snapshots are lists with characters and tombstones. Characters are stored in strings
+// and adjacent tombstones are flattened into numbers.
+//
+// Eg, the document: 'Hello .....world' ('.' denotes tombstoned (deleted) characters)
+// would be represented by a document snapshot of ['Hello ', 5, 'world']
+
+let append, appendDoc, takeDoc
+var type = {
+ name: 'text-tp2',
+ tp2: true,
+ create() {
+ return { charLength: 0, totalLength: 0, positionCache: [], data: [] }
+ },
+ serialize(doc) {
+ if (!doc.data) {
+ throw new Error('invalid doc snapshot')
+ }
+ return doc.data
+ },
+ deserialize(data) {
+ const doc = type.create()
+ doc.data = data
+
+ for (const component of Array.from(data)) {
+ if (typeof component === 'string') {
+ doc.charLength += component.length
+ doc.totalLength += component.length
+ } else {
+ doc.totalLength += component
+ }
+ }
+
+ return doc
+ },
+}
+
+const checkOp = function (op) {
+ if (!Array.isArray(op)) {
+ throw new Error('Op must be an array of components')
+ }
+ let last = null
+ return (() => {
+ const result = []
+ for (const c of Array.from(op)) {
+ if (typeof c === 'object') {
+ if (c.i !== undefined) {
+ if (
+ (typeof c.i !== 'string' || !(c.i.length > 0)) &&
+ (typeof c.i !== 'number' || !(c.i > 0))
+ ) {
+ throw new Error('Inserts must insert a string or a +ive number')
+ }
+ } else if (c.d !== undefined) {
+ if (typeof c.d !== 'number' || !(c.d > 0)) {
+ throw new Error('Deletes must be a +ive number')
+ }
+ } else {
+ throw new Error('Operation component must define .i or .d')
+ }
+ } else {
+ if (typeof c !== 'number') {
+ throw new Error('Op components must be objects or numbers')
+ }
+ if (!(c > 0)) {
+ throw new Error('Skip components must be a positive number')
+ }
+ if (typeof last === 'number') {
+ throw new Error('Adjacent skip components should be combined')
+ }
+ }
+
+ result.push((last = c))
+ }
+ return result
+ })()
+}
+
+// Take the next part from the specified position in a document snapshot.
+// position = {index, offset}. It will be updated.
+type._takeDoc = takeDoc = function (
+ doc,
+ position,
+ maxlength,
+ tombsIndivisible
+) {
+ if (position.index >= doc.data.length) {
+ throw new Error('Operation goes past the end of the document')
+ }
+
+ const part = doc.data[position.index]
+ // peel off data[0]
+ const result =
+ typeof part === 'string'
+ ? maxlength !== undefined
+ ? part.slice(position.offset, position.offset + maxlength)
+ : part.slice(position.offset)
+ : maxlength === undefined || tombsIndivisible
+ ? part - position.offset
+ : Math.min(maxlength, part - position.offset)
+
+ const resultLen = result.length || result
+
+ if ((part.length || part) - position.offset > resultLen) {
+ position.offset += resultLen
+ } else {
+ position.index++
+ position.offset = 0
+ }
+
+ return result
+}
+
+// Append a part to the end of a document
+type._appendDoc = appendDoc = function (doc, p) {
+ if (p === 0 || p === '') {
+ return
+ }
+
+ if (typeof p === 'string') {
+ doc.charLength += p.length
+ doc.totalLength += p.length
+ } else {
+ doc.totalLength += p
+ }
+
+ const { data } = doc
+ if (data.length === 0) {
+ data.push(p)
+ } else if (typeof data[data.length - 1] === typeof p) {
+ data[data.length - 1] += p
+ } else {
+ data.push(p)
+ }
+}
+
+// Apply the op to the document. The document is not modified in the process.
+type.apply = function (doc, op) {
+ if (
+ doc.totalLength === undefined ||
+ doc.charLength === undefined ||
+ doc.data.length === undefined
+ ) {
+ throw new Error('Snapshot is invalid')
+ }
+
+ checkOp(op)
+
+ const newDoc = type.create()
+ const position = { index: 0, offset: 0 }
+
+ for (const component of Array.from(op)) {
+ var part, remainder
+ if (typeof component === 'number') {
+ remainder = component
+ while (remainder > 0) {
+ part = takeDoc(doc, position, remainder)
+
+ appendDoc(newDoc, part)
+ remainder -= part.length || part
+ }
+ } else if (component.i !== undefined) {
+ appendDoc(newDoc, component.i)
+ } else if (component.d !== undefined) {
+ remainder = component.d
+ while (remainder > 0) {
+ part = takeDoc(doc, position, remainder)
+ remainder -= part.length || part
+ }
+ appendDoc(newDoc, component.d)
+ }
+ }
+
+ return newDoc
+}
+
+// Append an op component to the end of the specified op.
+// Exported for the randomOpGenerator.
+type._append = append = function (op, component) {
+ if (
+ component === 0 ||
+ component.i === '' ||
+ component.i === 0 ||
+ component.d === 0
+ ) {
+ } else if (op.length === 0) {
+ return op.push(component)
+ } else {
+ const last = op[op.length - 1]
+ if (typeof component === 'number' && typeof last === 'number') {
+ return (op[op.length - 1] += component)
+ } else if (
+ component.i !== undefined &&
+ last.i != null &&
+ typeof last.i === typeof component.i
+ ) {
+ return (last.i += component.i)
+ } else if (component.d !== undefined && last.d != null) {
+ return (last.d += component.d)
+ } else {
+ return op.push(component)
+ }
+ }
+}
+
+// Makes 2 functions for taking components from the start of an op, and for peeking
+// at the next op that could be taken.
+const makeTake = function (op) {
+ // The index of the next component to take
+ let index = 0
+ // The offset into the component
+ let offset = 0
+
+ // Take up to length maxlength from the op. If maxlength is not defined, there is no max.
+ // If insertsIndivisible is true, inserts (& insert tombstones) won't be separated.
+ //
+ // Returns null when op is fully consumed.
+ const take = function (maxlength, insertsIndivisible) {
+ let current
+ if (index === op.length) {
+ return null
+ }
+
+ const e = op[index]
+ if (
+ typeof (current = e) === 'number' ||
+ typeof (current = e.i) === 'number' ||
+ (current = e.d) !== undefined
+ ) {
+ let c
+ if (
+ maxlength == null ||
+ current - offset <= maxlength ||
+ (insertsIndivisible && e.i !== undefined)
+ ) {
+ // Return the rest of the current element.
+ c = current - offset
+ ++index
+ offset = 0
+ } else {
+ offset += maxlength
+ c = maxlength
+ }
+ if (e.i !== undefined) {
+ return { i: c }
+ } else if (e.d !== undefined) {
+ return { d: c }
+ } else {
+ return c
+ }
+ } else {
+ // Take from the inserted string
+ let result
+ if (
+ maxlength == null ||
+ e.i.length - offset <= maxlength ||
+ insertsIndivisible
+ ) {
+ result = { i: e.i.slice(offset) }
+ ++index
+ offset = 0
+ } else {
+ result = { i: e.i.slice(offset, offset + maxlength) }
+ offset += maxlength
+ }
+ return result
+ }
+ }
+
+ const peekType = () => op[index]
+
+ return [take, peekType]
+}
+
+// Find and return the length of an op component
+const componentLength = function (component) {
+ if (typeof component === 'number') {
+ return component
+ } else if (typeof component.i === 'string') {
+ return component.i.length
+ } else {
+ // This should work because c.d and c.i must be +ive.
+ return component.d || component.i
+ }
+}
+
+// Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate
+// adjacent inserts and deletes.
+type.normalize = function (op) {
+ const newOp = []
+ for (const component of Array.from(op)) {
+ append(newOp, component)
+ }
+ return newOp
+}
+
+// This is a helper method to transform and prune. goForwards is true for transform, false for prune.
+const transformer = function (op, otherOp, goForwards, side) {
+ let component
+ checkOp(op)
+ checkOp(otherOp)
+ const newOp = []
+
+ const [take, peek] = Array.from(makeTake(op))
+
+ for (component of Array.from(otherOp)) {
+ var chunk
+ let length = componentLength(component)
+
+ if (component.i !== undefined) {
+ // Insert text or tombs
+ if (goForwards) {
+ // transform - insert skips over inserted parts
+ if (side === 'left') {
+ // The left insert should go first.
+ while (__guard__(peek(), x => x.i) !== undefined) {
+ append(newOp, take())
+ }
+ }
+
+ // In any case, skip the inserted text.
+ append(newOp, length)
+ } else {
+ // Prune. Remove skips for inserts.
+ while (length > 0) {
+ chunk = take(length, true)
+
+ if (chunk === null) {
+ throw new Error('The transformed op is invalid')
+ }
+ if (chunk.d !== undefined) {
+ throw new Error(
+ 'The transformed op deletes locally inserted characters - it cannot be purged of the insert.'
+ )
+ }
+
+ if (typeof chunk === 'number') {
+ length -= chunk
+ } else {
+ append(newOp, chunk)
+ }
+ }
+ }
+ } else {
+ // Skip or delete
+ while (length > 0) {
+ chunk = take(length, true)
+ if (chunk === null) {
+ throw new Error(
+ 'The op traverses more elements than the document has'
+ )
+ }
+
+ append(newOp, chunk)
+ if (!chunk.i) {
+ length -= componentLength(chunk)
+ }
+ }
+ }
+ }
+
+ // Append extras from op1
+ while ((component = take())) {
+ if (component.i === undefined) {
+ throw new Error(`Remaining fragments in the op: ${component}`)
+ }
+ append(newOp, component)
+ }
+
+ return newOp
+}
+
+// transform op1 by op2. Return transformed version of op1.
+// op1 and op2 are unchanged by transform.
+// side should be 'left' or 'right', depending on if op1.id <> op2.id. 'left' == client op.
+type.transform = function (op, otherOp, side) {
+ if (side !== 'left' && side !== 'right') {
+ throw new Error(`side (${side}) should be 'left' or 'right'`)
+ }
+ return transformer(op, otherOp, true, side)
+}
+
+// Prune is the inverse of transform.
+type.prune = (op, otherOp) => transformer(op, otherOp, false)
+
+// Compose 2 ops into 1 op.
+type.compose = function (op1, op2) {
+ let component
+ if (op1 === null || op1 === undefined) {
+ return op2
+ }
+
+ checkOp(op1)
+ checkOp(op2)
+
+ const result = []
+
+ const [take, _] = Array.from(makeTake(op1))
+
+ for (component of Array.from(op2)) {
+ var chunk, length
+ if (typeof component === 'number') {
+ // Skip
+ // Just copy from op1.
+ length = component
+ while (length > 0) {
+ chunk = take(length)
+ if (chunk === null) {
+ throw new Error(
+ 'The op traverses more elements than the document has'
+ )
+ }
+
+ append(result, chunk)
+ length -= componentLength(chunk)
+ }
+ } else if (component.i !== undefined) {
+ // Insert
+ append(result, { i: component.i })
+ } else {
+ // Delete
+ length = component.d
+ while (length > 0) {
+ chunk = take(length)
+ if (chunk === null) {
+ throw new Error(
+ 'The op traverses more elements than the document has'
+ )
+ }
+
+ const chunkLength = componentLength(chunk)
+ if (chunk.i !== undefined) {
+ append(result, { i: chunkLength })
+ } else {
+ append(result, { d: chunkLength })
+ }
+
+ length -= chunkLength
+ }
+ }
+ }
+
+ // Append extras from op1
+ while ((component = take())) {
+ if (component.i === undefined) {
+ throw new Error(`Remaining fragments in op1: ${component}`)
+ }
+ append(result, component)
+ }
+
+ return result
+}
+
+if (typeof WEB !== 'undefined' && WEB !== null) {
+ exports.types['text-tp2'] = type
+} else {
+ module.exports = type
+}
+
+function __guard__(value, transform) {
+ return typeof value !== 'undefined' && value !== null
+ ? transform(value)
+ : undefined
+}
diff --git a/services/document-updater/app/js/sharejs/text.js b/services/document-updater/app/js/sharejs/text.js
new file mode 100644
index 0000000000..54f7094c45
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/text.js
@@ -0,0 +1,314 @@
+/* eslint-disable
+ camelcase,
+ no-return-assign,
+ no-undef,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+// A simple text implementation
+//
+// Operations are lists of components.
+// Each component either inserts or deletes at a specified position in the document.
+//
+// Components are either:
+// {i:'str', p:100}: Insert 'str' at position 100 in the document
+// {d:'str', p:100}: Delete 'str' at position 100 in the document
+//
+// Components in an operation are executed sequentially, so the position of components
+// assumes previous components have already executed.
+//
+// Eg: This op:
+// [{i:'abc', p:0}]
+// is equivalent to this op:
+// [{i:'a', p:0}, {i:'b', p:1}, {i:'c', p:2}]
+
+// NOTE: The global scope here is shared with other sharejs files when built with closure.
+// Be careful what ends up in your namespace.
+
+let append, transformComponent
+const text = {}
+
+text.name = 'text'
+
+text.create = () => ''
+
+const strInject = (s1, pos, s2) => s1.slice(0, pos) + s2 + s1.slice(pos)
+
+const checkValidComponent = function (c) {
+ if (typeof c.p !== 'number') {
+ throw new Error('component missing position field')
+ }
+
+ const i_type = typeof c.i
+ const d_type = typeof c.d
+ if (!((i_type === 'string') ^ (d_type === 'string'))) {
+ throw new Error('component needs an i or d field')
+ }
+
+ if (!(c.p >= 0)) {
+ throw new Error('position cannot be negative')
+ }
+}
+
+const checkValidOp = function (op) {
+ for (const c of Array.from(op)) {
+ checkValidComponent(c)
+ }
+ return true
+}
+
+text.apply = function (snapshot, op) {
+ checkValidOp(op)
+ for (const component of Array.from(op)) {
+ if (component.i != null) {
+ snapshot = strInject(snapshot, component.p, component.i)
+ } else {
+ const deleted = snapshot.slice(
+ component.p,
+ component.p + component.d.length
+ )
+ if (component.d !== deleted) {
+ throw new Error(
+ `Delete component '${component.d}' does not match deleted text '${deleted}'`
+ )
+ }
+ snapshot =
+ snapshot.slice(0, component.p) +
+ snapshot.slice(component.p + component.d.length)
+ }
+ }
+
+ return snapshot
+}
+
+// Exported for use by the random op generator.
+//
+// For simplicity, this version of append does not compress adjacent inserts and deletes of
+// the same text. It would be nice to change that at some stage.
+text._append = append = function (newOp, c) {
+ if (c.i === '' || c.d === '') {
+ return
+ }
+ if (newOp.length === 0) {
+ return newOp.push(c)
+ } else {
+ const last = newOp[newOp.length - 1]
+
+ // Compose the insert into the previous insert if possible
+ if (
+ last.i != null &&
+ c.i != null &&
+ last.p <= c.p &&
+ c.p <= last.p + last.i.length
+ ) {
+ return (newOp[newOp.length - 1] = {
+ i: strInject(last.i, c.p - last.p, c.i),
+ p: last.p,
+ })
+ } else if (
+ last.d != null &&
+ c.d != null &&
+ c.p <= last.p &&
+ last.p <= c.p + c.d.length
+ ) {
+ return (newOp[newOp.length - 1] = {
+ d: strInject(c.d, last.p - c.p, last.d),
+ p: c.p,
+ })
+ } else {
+ return newOp.push(c)
+ }
+ }
+}
+
+text.compose = function (op1, op2) {
+ checkValidOp(op1)
+ checkValidOp(op2)
+
+ const newOp = op1.slice()
+ for (const c of Array.from(op2)) {
+ append(newOp, c)
+ }
+
+ return newOp
+}
+
+// Attempt to compress the op components together 'as much as possible'.
+// This implementation preserves order and preserves create/delete pairs.
+text.compress = op => text.compose([], op)
+
+text.normalize = function (op) {
+ const newOp = []
+
+ // Normalize should allow ops which are a single (unwrapped) component:
+ // {i:'asdf', p:23}.
+ // There's no good way to test if something is an array:
+ // http://perfectionkills.com/instanceof-considered-harmful-or-how-to-write-a-robust-isarray/
+ // so this is probably the least bad solution.
+ if (op.i != null || op.p != null) {
+ op = [op]
+ }
+
+ for (const c of Array.from(op)) {
+ if (c.p == null) {
+ c.p = 0
+ }
+ append(newOp, c)
+ }
+
+ return newOp
+}
+
+// This helper method transforms a position by an op component.
+//
+// If c is an insert, insertAfter specifies whether the transform
+// is pushed after the insert (true) or before it (false).
+//
+// insertAfter is optional for deletes.
+const transformPosition = function (pos, c, insertAfter) {
+ if (c.i != null) {
+ if (c.p < pos || (c.p === pos && insertAfter)) {
+ return pos + c.i.length
+ } else {
+ return pos
+ }
+ } else {
+ // I think this could also be written as: Math.min(c.p, Math.min(c.p - otherC.p, otherC.d.length))
+ // but I think its harder to read that way, and it compiles using ternary operators anyway
+ // so its no slower written like this.
+ if (pos <= c.p) {
+ return pos
+ } else if (pos <= c.p + c.d.length) {
+ return c.p
+ } else {
+ return pos - c.d.length
+ }
+ }
+}
+
+// Helper method to transform a cursor position as a result of an op.
+//
+// Like transformPosition above, if c is an insert, insertAfter specifies whether the cursor position
+// is pushed after an insert (true) or before it (false).
+text.transformCursor = function (position, op, side) {
+ const insertAfter = side === 'right'
+ for (const c of Array.from(op)) {
+ position = transformPosition(position, c, insertAfter)
+ }
+ return position
+}
+
+// Transform an op component by another op component. Asymmetric.
+// The result will be appended to destination.
+//
+// exported for use in JSON type
+text._tc = transformComponent = function (dest, c, otherC, side) {
+ checkValidOp([c])
+ checkValidOp([otherC])
+
+ if (c.i != null) {
+ append(dest, {
+ i: c.i,
+ p: transformPosition(c.p, otherC, side === 'right'),
+ })
+ } else {
+ // Delete
+ if (otherC.i != null) {
+ // delete vs insert
+ let s = c.d
+ if (c.p < otherC.p) {
+ append(dest, { d: s.slice(0, otherC.p - c.p), p: c.p })
+ s = s.slice(otherC.p - c.p)
+ }
+ if (s !== '') {
+ append(dest, { d: s, p: c.p + otherC.i.length })
+ }
+ } else {
+ // Delete vs delete
+ if (c.p >= otherC.p + otherC.d.length) {
+ append(dest, { d: c.d, p: c.p - otherC.d.length })
+ } else if (c.p + c.d.length <= otherC.p) {
+ append(dest, c)
+ } else {
+ // They overlap somewhere.
+ const newC = { d: '', p: c.p }
+ if (c.p < otherC.p) {
+ newC.d = c.d.slice(0, otherC.p - c.p)
+ }
+ if (c.p + c.d.length > otherC.p + otherC.d.length) {
+ newC.d += c.d.slice(otherC.p + otherC.d.length - c.p)
+ }
+
+ // This is entirely optional - just for a check that the deleted
+ // text in the two ops matches
+ const intersectStart = Math.max(c.p, otherC.p)
+ const intersectEnd = Math.min(
+ c.p + c.d.length,
+ otherC.p + otherC.d.length
+ )
+ const cIntersect = c.d.slice(intersectStart - c.p, intersectEnd - c.p)
+ const otherIntersect = otherC.d.slice(
+ intersectStart - otherC.p,
+ intersectEnd - otherC.p
+ )
+ if (cIntersect !== otherIntersect) {
+ throw new Error(
+ 'Delete ops delete different text in the same region of the document'
+ )
+ }
+
+ if (newC.d !== '') {
+ // This could be rewritten similarly to insert v delete, above.
+ newC.p = transformPosition(newC.p, otherC)
+ append(dest, newC)
+ }
+ }
+ }
+ }
+
+ return dest
+}
+
+const invertComponent = function (c) {
+ if (c.i != null) {
+ return { d: c.i, p: c.p }
+ } else {
+ return { i: c.d, p: c.p }
+ }
+}
+
+// No need to use append for invert, because the components won't be able to
+// cancel with one another.
+text.invert = op =>
+ Array.from(op.slice().reverse()).map(c => invertComponent(c))
+
+if (typeof WEB !== 'undefined' && WEB !== null) {
+ if (!exports.types) {
+ exports.types = {}
+ }
+
+ // This is kind of awful - come up with a better way to hook this helper code up.
+ bootstrapTransform(text, transformComponent, checkValidOp, append)
+
+ // [] is used to prevent closure from renaming types.text
+ exports.types.text = text
+} else {
+ module.exports = text
+
+ // The text type really shouldn't need this - it should be possible to define
+ // an efficient transform function by making a sort of transform map and passing each
+ // op component through it.
+ require('./helpers').bootstrapTransform(
+ text,
+ transformComponent,
+ checkValidOp,
+ append
+ )
+}
diff --git a/services/document-updater/app/js/sharejs/types/count.js b/services/document-updater/app/js/sharejs/types/count.js
new file mode 100644
index 0000000000..246f6b7985
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/types/count.js
@@ -0,0 +1,37 @@
+// TODO: This file was created by bulk-decaffeinate.
+// Sanity-check the conversion and remove this comment.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+// This is a simple type used for testing other OT code. Each op is [expectedSnapshot, increment]
+
+exports.name = 'count'
+exports.create = () => 1
+
+exports.apply = function (snapshot, op) {
+ const [v, inc] = Array.from(op)
+ if (snapshot !== v) {
+ throw new Error(`Op ${v} != snapshot ${snapshot}`)
+ }
+ return snapshot + inc
+}
+
+// transform op1 by op2. Return transformed version of op1.
+exports.transform = function (op1, op2) {
+ if (op1[0] !== op2[0]) {
+ throw new Error(`Op1 ${op1[0]} != op2 ${op2[0]}`)
+ }
+ return [op1[0] + op2[1], op1[1]]
+}
+
+exports.compose = function (op1, op2) {
+ if (op1[0] + op1[1] !== op2[0]) {
+ throw new Error(`Op1 ${op1} + 1 != op2 ${op2}`)
+ }
+ return [op1[0], op1[1] + op2[1]]
+}
+
+exports.generateRandomOp = doc => [[doc, 1], doc + 1]
diff --git a/services/document-updater/app/js/sharejs/types/helpers.js b/services/document-updater/app/js/sharejs/types/helpers.js
new file mode 100644
index 0000000000..b1ab3c2a26
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/types/helpers.js
@@ -0,0 +1,116 @@
+/* eslint-disable
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+// These methods let you build a transform function from a transformComponent function
+// for OT types like text and JSON in which operations are lists of components
+// and transforming them requires N^2 work.
+
+// Add transform and transformX functions for an OT type which has transformComponent defined.
+// transformComponent(destination array, component, other component, side)
+let bootstrapTransform
+exports._bt = bootstrapTransform = function (
+ type,
+ transformComponent,
+ checkValidOp,
+ append
+) {
+ let transformX
+ const transformComponentX = function (left, right, destLeft, destRight) {
+ transformComponent(destLeft, left, right, 'left')
+ return transformComponent(destRight, right, left, 'right')
+ }
+
+ // Transforms rightOp by leftOp. Returns ['rightOp', clientOp']
+ type.transformX =
+ type.transformX =
+ transformX =
+ function (leftOp, rightOp) {
+ checkValidOp(leftOp)
+ checkValidOp(rightOp)
+
+ const newRightOp = []
+
+ for (let rightComponent of Array.from(rightOp)) {
+ // Generate newLeftOp by composing leftOp by rightComponent
+ const newLeftOp = []
+
+ let k = 0
+ while (k < leftOp.length) {
+ var l
+ const nextC = []
+ transformComponentX(leftOp[k], rightComponent, newLeftOp, nextC)
+ k++
+
+ if (nextC.length === 1) {
+ rightComponent = nextC[0]
+ } else if (nextC.length === 0) {
+ for (l of Array.from(leftOp.slice(k))) {
+ append(newLeftOp, l)
+ }
+ rightComponent = null
+ break
+ } else {
+ // Recurse.
+ const [l_, r_] = Array.from(transformX(leftOp.slice(k), nextC))
+ for (l of Array.from(l_)) {
+ append(newLeftOp, l)
+ }
+ for (const r of Array.from(r_)) {
+ append(newRightOp, r)
+ }
+ rightComponent = null
+ break
+ }
+ }
+
+ if (rightComponent != null) {
+ append(newRightOp, rightComponent)
+ }
+ leftOp = newLeftOp
+ }
+
+ return [leftOp, newRightOp]
+ }
+
+ // Transforms op with specified type ('left' or 'right') by otherOp.
+ return (type.transform = type.transform =
+ function (op, otherOp, type) {
+ let _
+ if (type !== 'left' && type !== 'right') {
+ throw new Error("type must be 'left' or 'right'")
+ }
+
+ if (otherOp.length === 0) {
+ return op
+ }
+
+ // TODO: Benchmark with and without this line. I _think_ it'll make a big difference...?
+ if (op.length === 1 && otherOp.length === 1) {
+ return transformComponent([], op[0], otherOp[0], type)
+ }
+
+ if (type === 'left') {
+ let left
+ ;[left, _] = Array.from(transformX(op, otherOp))
+ return left
+ } else {
+ let right
+ ;[_, right] = Array.from(transformX(otherOp, op))
+ return right
+ }
+ })
+}
+
+if (typeof WEB === 'undefined') {
+ exports.bootstrapTransform = bootstrapTransform
+}
diff --git a/services/document-updater/app/js/sharejs/types/index.js b/services/document-updater/app/js/sharejs/types/index.js
new file mode 100644
index 0000000000..7e3d6bbf26
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/types/index.js
@@ -0,0 +1,25 @@
+// TODO: This file was created by bulk-decaffeinate.
+// Sanity-check the conversion and remove this comment.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+
+const register = function (file) {
+ const type = require(file)
+ exports[type.name] = type
+ try {
+ return require(`${file}-api`)
+ } catch (error) {}
+}
+
+// Import all the built-in types.
+register('./simple')
+register('./count')
+
+register('./text')
+register('./text-composable')
+register('./text-tp2')
+
+register('./json')
diff --git a/services/document-updater/app/js/sharejs/types/json-api.js b/services/document-updater/app/js/sharejs/types/json-api.js
new file mode 100644
index 0000000000..f429f2b397
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/types/json-api.js
@@ -0,0 +1,357 @@
+/* eslint-disable
+ camelcase,
+ no-undef,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS205: Consider reworking code to avoid use of IIFEs
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+// API for JSON OT
+
+let json
+if (typeof WEB === 'undefined') {
+ json = require('./json')
+}
+
+if (typeof WEB !== 'undefined' && WEB !== null) {
+ const { extendDoc } = exports
+ exports.extendDoc = function (name, fn) {
+ SubDoc.prototype[name] = fn
+ return extendDoc(name, fn)
+ }
+}
+
+const depath = function (path) {
+ if (path.length === 1 && path[0].constructor === Array) {
+ return path[0]
+ } else {
+ return path
+ }
+}
+
+class SubDoc {
+ constructor(doc, path) {
+ this.doc = doc
+ this.path = path
+ }
+
+ at(...path) {
+ return this.doc.at(this.path.concat(depath(path)))
+ }
+
+ get() {
+ return this.doc.getAt(this.path)
+ }
+
+ // for objects and lists
+ set(value, cb) {
+ return this.doc.setAt(this.path, value, cb)
+ }
+
+ // for strings and lists.
+ insert(pos, value, cb) {
+ return this.doc.insertAt(this.path, pos, value, cb)
+ }
+
+ // for strings
+ del(pos, length, cb) {
+ return this.doc.deleteTextAt(this.path, length, pos, cb)
+ }
+
+ // for objects and lists
+ remove(cb) {
+ return this.doc.removeAt(this.path, cb)
+ }
+
+ push(value, cb) {
+ return this.insert(this.get().length, value, cb)
+ }
+
+ move(from, to, cb) {
+ return this.doc.moveAt(this.path, from, to, cb)
+ }
+
+ add(amount, cb) {
+ return this.doc.addAt(this.path, amount, cb)
+ }
+
+ on(event, cb) {
+ return this.doc.addListener(this.path, event, cb)
+ }
+
+ removeListener(l) {
+ return this.doc.removeListener(l)
+ }
+
+ // text API compatibility
+ getLength() {
+ return this.get().length
+ }
+
+ getText() {
+ return this.get()
+ }
+}
+
+const traverse = function (snapshot, path) {
+ const container = { data: snapshot }
+ let key = 'data'
+ let elem = container
+ for (const p of Array.from(path)) {
+ elem = elem[key]
+ key = p
+ if (typeof elem === 'undefined') {
+ throw new Error('bad path')
+ }
+ }
+ return { elem, key }
+}
+
+const pathEquals = function (p1, p2) {
+ if (p1.length !== p2.length) {
+ return false
+ }
+ for (let i = 0; i < p1.length; i++) {
+ const e = p1[i]
+ if (e !== p2[i]) {
+ return false
+ }
+ }
+ return true
+}
+
+json.api = {
+ provides: { json: true },
+
+ at(...path) {
+ return new SubDoc(this, depath(path))
+ },
+
+ get() {
+ return this.snapshot
+ },
+ set(value, cb) {
+ return this.setAt([], value, cb)
+ },
+
+ getAt(path) {
+ const { elem, key } = traverse(this.snapshot, path)
+ return elem[key]
+ },
+
+ setAt(path, value, cb) {
+ const { elem, key } = traverse(this.snapshot, path)
+ const op = { p: path }
+ if (elem.constructor === Array) {
+ op.li = value
+ if (typeof elem[key] !== 'undefined') {
+ op.ld = elem[key]
+ }
+ } else if (typeof elem === 'object') {
+ op.oi = value
+ if (typeof elem[key] !== 'undefined') {
+ op.od = elem[key]
+ }
+ } else {
+ throw new Error('bad path')
+ }
+ return this.submitOp([op], cb)
+ },
+
+ removeAt(path, cb) {
+ const { elem, key } = traverse(this.snapshot, path)
+ if (typeof elem[key] === 'undefined') {
+ throw new Error('no element at that path')
+ }
+ const op = { p: path }
+ if (elem.constructor === Array) {
+ op.ld = elem[key]
+ } else if (typeof elem === 'object') {
+ op.od = elem[key]
+ } else {
+ throw new Error('bad path')
+ }
+ return this.submitOp([op], cb)
+ },
+
+ insertAt(path, pos, value, cb) {
+ const { elem, key } = traverse(this.snapshot, path)
+ const op = { p: path.concat(pos) }
+ if (elem[key].constructor === Array) {
+ op.li = value
+ } else if (typeof elem[key] === 'string') {
+ op.si = value
+ }
+ return this.submitOp([op], cb)
+ },
+
+ moveAt(path, from, to, cb) {
+ const op = [{ p: path.concat(from), lm: to }]
+ return this.submitOp(op, cb)
+ },
+
+ addAt(path, amount, cb) {
+ const op = [{ p: path, na: amount }]
+ return this.submitOp(op, cb)
+ },
+
+ deleteTextAt(path, length, pos, cb) {
+ const { elem, key } = traverse(this.snapshot, path)
+ const op = [{ p: path.concat(pos), sd: elem[key].slice(pos, pos + length) }]
+ return this.submitOp(op, cb)
+ },
+
+ addListener(path, event, cb) {
+ const l = { path, event, cb }
+ this._listeners.push(l)
+ return l
+ },
+ removeListener(l) {
+ const i = this._listeners.indexOf(l)
+ if (i < 0) {
+ return false
+ }
+ this._listeners.splice(i, 1)
+ return true
+ },
+ _register() {
+ this._listeners = []
+ this.on('change', function (op) {
+ return (() => {
+ const result = []
+ for (const c of Array.from(op)) {
+ var i
+ if (c.na !== undefined || c.si !== undefined || c.sd !== undefined) {
+ // no change to structure
+ continue
+ }
+ var to_remove = []
+ for (i = 0; i < this._listeners.length; i++) {
+ // Transform a dummy op by the incoming op to work out what
+ // should happen to the listener.
+ const l = this._listeners[i]
+ const dummy = { p: l.path, na: 0 }
+ const xformed = this.type.transformComponent([], dummy, c, 'left')
+ if (xformed.length === 0) {
+ // The op was transformed to noop, so we should delete the listener.
+ to_remove.push(i)
+ } else if (xformed.length === 1) {
+ // The op remained, so grab its new path into the listener.
+ l.path = xformed[0].p
+ } else {
+ throw new Error(
+ "Bad assumption in json-api: xforming an 'si' op will always result in 0 or 1 components."
+ )
+ }
+ }
+ to_remove.sort((a, b) => b - a)
+ result.push(
+ (() => {
+ const result1 = []
+ for (i of Array.from(to_remove)) {
+ result1.push(this._listeners.splice(i, 1))
+ }
+ return result1
+ })()
+ )
+ }
+ return result
+ })()
+ })
+ return this.on('remoteop', function (op) {
+ return (() => {
+ const result = []
+ for (var c of Array.from(op)) {
+ var match_path =
+ c.na === undefined ? c.p.slice(0, c.p.length - 1) : c.p
+ result.push(
+ (() => {
+ const result1 = []
+ for (const { path, event, cb } of Array.from(this._listeners)) {
+ var common
+ if (pathEquals(path, match_path)) {
+ switch (event) {
+ case 'insert':
+ if (c.li !== undefined && c.ld === undefined) {
+ result1.push(cb(c.p[c.p.length - 1], c.li))
+ } else if (c.oi !== undefined && c.od === undefined) {
+ result1.push(cb(c.p[c.p.length - 1], c.oi))
+ } else if (c.si !== undefined) {
+ result1.push(cb(c.p[c.p.length - 1], c.si))
+ } else {
+ result1.push(undefined)
+ }
+ break
+ case 'delete':
+ if (c.li === undefined && c.ld !== undefined) {
+ result1.push(cb(c.p[c.p.length - 1], c.ld))
+ } else if (c.oi === undefined && c.od !== undefined) {
+ result1.push(cb(c.p[c.p.length - 1], c.od))
+ } else if (c.sd !== undefined) {
+ result1.push(cb(c.p[c.p.length - 1], c.sd))
+ } else {
+ result1.push(undefined)
+ }
+ break
+ case 'replace':
+ if (c.li !== undefined && c.ld !== undefined) {
+ result1.push(cb(c.p[c.p.length - 1], c.ld, c.li))
+ } else if (c.oi !== undefined && c.od !== undefined) {
+ result1.push(cb(c.p[c.p.length - 1], c.od, c.oi))
+ } else {
+ result1.push(undefined)
+ }
+ break
+ case 'move':
+ if (c.lm !== undefined) {
+ result1.push(cb(c.p[c.p.length - 1], c.lm))
+ } else {
+ result1.push(undefined)
+ }
+ break
+ case 'add':
+ if (c.na !== undefined) {
+ result1.push(cb(c.na))
+ } else {
+ result1.push(undefined)
+ }
+ break
+ default:
+ result1.push(undefined)
+ }
+ } else if (
+ (common = this.type.commonPath(match_path, path)) != null
+ ) {
+ if (event === 'child op') {
+ if (
+ match_path.length === path.length &&
+ path.length === common
+ ) {
+ throw new Error(
+ "paths match length and have commonality, but aren't equal?"
+ )
+ }
+ const child_path = c.p.slice(common + 1)
+ result1.push(cb(child_path, c))
+ } else {
+ result1.push(undefined)
+ }
+ } else {
+ result1.push(undefined)
+ }
+ }
+ return result1
+ })()
+ )
+ }
+ return result
+ })()
+ })
+ },
+}
diff --git a/services/document-updater/app/js/sharejs/types/json.js b/services/document-updater/app/js/sharejs/types/json.js
new file mode 100644
index 0000000000..14c3cbb519
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/types/json.js
@@ -0,0 +1,630 @@
+/* eslint-disable
+ no-return-assign,
+ no-undef,
+ no-useless-catch,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+// This is the implementation of the JSON OT type.
+//
+// Spec is here: https://github.com/josephg/ShareJS/wiki/JSON-Operations
+
+let text
+if (typeof WEB !== 'undefined' && WEB !== null) {
+ ;({ text } = exports.types)
+} else {
+ text = require('./text')
+}
+
+const json = {}
+
+json.name = 'json'
+
+json.create = () => null
+
+json.invertComponent = function (c) {
+ const c_ = { p: c.p }
+ if (c.si !== undefined) {
+ c_.sd = c.si
+ }
+ if (c.sd !== undefined) {
+ c_.si = c.sd
+ }
+ if (c.oi !== undefined) {
+ c_.od = c.oi
+ }
+ if (c.od !== undefined) {
+ c_.oi = c.od
+ }
+ if (c.li !== undefined) {
+ c_.ld = c.li
+ }
+ if (c.ld !== undefined) {
+ c_.li = c.ld
+ }
+ if (c.na !== undefined) {
+ c_.na = -c.na
+ }
+ if (c.lm !== undefined) {
+ c_.lm = c.p[c.p.length - 1]
+ c_.p = c.p.slice(0, c.p.length - 1).concat([c.lm])
+ }
+ return c_
+}
+
+json.invert = op =>
+ Array.from(op.slice().reverse()).map(c => json.invertComponent(c))
+
+json.checkValidOp = function (op) {}
+
+const isArray = o => Object.prototype.toString.call(o) === '[object Array]'
+json.checkList = function (elem) {
+ if (!isArray(elem)) {
+ throw new Error('Referenced element not a list')
+ }
+}
+
+json.checkObj = function (elem) {
+ if (elem.constructor !== Object) {
+ throw new Error(
+ `Referenced element not an object (it was ${JSON.stringify(elem)})`
+ )
+ }
+}
+
+json.apply = function (snapshot, op) {
+ json.checkValidOp(op)
+ op = clone(op)
+
+ const container = { data: clone(snapshot) }
+
+ try {
+ for (let i = 0; i < op.length; i++) {
+ const c = op[i]
+ let parent = null
+ let parentkey = null
+ let elem = container
+ let key = 'data'
+
+ for (const p of Array.from(c.p)) {
+ parent = elem
+ parentkey = key
+ elem = elem[key]
+ key = p
+
+ if (parent == null) {
+ throw new Error('Path invalid')
+ }
+ }
+
+ if (c.na !== undefined) {
+ // Number add
+ if (typeof elem[key] !== 'number') {
+ throw new Error('Referenced element not a number')
+ }
+ elem[key] += c.na
+ } else if (c.si !== undefined) {
+ // String insert
+ if (typeof elem !== 'string') {
+ throw new Error(
+ `Referenced element not a string (it was ${JSON.stringify(elem)})`
+ )
+ }
+ parent[parentkey] = elem.slice(0, key) + c.si + elem.slice(key)
+ } else if (c.sd !== undefined) {
+ // String delete
+ if (typeof elem !== 'string') {
+ throw new Error('Referenced element not a string')
+ }
+ if (elem.slice(key, key + c.sd.length) !== c.sd) {
+ throw new Error('Deleted string does not match')
+ }
+ parent[parentkey] = elem.slice(0, key) + elem.slice(key + c.sd.length)
+ } else if (c.li !== undefined && c.ld !== undefined) {
+ // List replace
+ json.checkList(elem)
+
+ // Should check the list element matches c.ld
+ elem[key] = c.li
+ } else if (c.li !== undefined) {
+ // List insert
+ json.checkList(elem)
+
+ elem.splice(key, 0, c.li)
+ } else if (c.ld !== undefined) {
+ // List delete
+ json.checkList(elem)
+
+ // Should check the list element matches c.ld here too.
+ elem.splice(key, 1)
+ } else if (c.lm !== undefined) {
+ // List move
+ json.checkList(elem)
+ if (c.lm !== key) {
+ const e = elem[key]
+ // Remove it...
+ elem.splice(key, 1)
+ // And insert it back.
+ elem.splice(c.lm, 0, e)
+ }
+ } else if (c.oi !== undefined) {
+ // Object insert / replace
+ json.checkObj(elem)
+
+ // Should check that elem[key] == c.od
+ elem[key] = c.oi
+ } else if (c.od !== undefined) {
+ // Object delete
+ json.checkObj(elem)
+
+ // Should check that elem[key] == c.od
+ delete elem[key]
+ } else {
+ throw new Error('invalid / missing instruction in op')
+ }
+ }
+ } catch (error) {
+ // TODO: Roll back all already applied changes. Write tests before implementing this code.
+ throw error
+ }
+
+ return container.data
+}
+
+// Checks if two paths, p1 and p2 match.
+json.pathMatches = function (p1, p2, ignoreLast) {
+ if (p1.length !== p2.length) {
+ return false
+ }
+
+ for (let i = 0; i < p1.length; i++) {
+ const p = p1[i]
+ if (p !== p2[i] && (!ignoreLast || i !== p1.length - 1)) {
+ return false
+ }
+ }
+
+ return true
+}
+
+json.append = function (dest, c) {
+ let last
+ c = clone(c)
+ if (
+ dest.length !== 0 &&
+ json.pathMatches(c.p, (last = dest[dest.length - 1]).p)
+ ) {
+ if (last.na !== undefined && c.na !== undefined) {
+ return (dest[dest.length - 1] = { p: last.p, na: last.na + c.na })
+ } else if (
+ last.li !== undefined &&
+ c.li === undefined &&
+ c.ld === last.li
+ ) {
+ // insert immediately followed by delete becomes a noop.
+ if (last.ld !== undefined) {
+ // leave the delete part of the replace
+ return delete last.li
+ } else {
+ return dest.pop()
+ }
+ } else if (
+ last.od !== undefined &&
+ last.oi === undefined &&
+ c.oi !== undefined &&
+ c.od === undefined
+ ) {
+ return (last.oi = c.oi)
+ } else if (c.lm !== undefined && c.p[c.p.length - 1] === c.lm) {
+ return null // don't do anything
+ } else {
+ return dest.push(c)
+ }
+ } else {
+ return dest.push(c)
+ }
+}
+
+json.compose = function (op1, op2) {
+ json.checkValidOp(op1)
+ json.checkValidOp(op2)
+
+ const newOp = clone(op1)
+ for (const c of Array.from(op2)) {
+ json.append(newOp, c)
+ }
+
+ return newOp
+}
+
+json.normalize = function (op) {
+ const newOp = []
+
+ if (!isArray(op)) {
+ op = [op]
+ }
+
+ for (const c of Array.from(op)) {
+ if (c.p == null) {
+ c.p = []
+ }
+ json.append(newOp, c)
+ }
+
+ return newOp
+}
+
+// hax, copied from test/types/json. Apparently this is still the fastest way to deep clone an object, assuming
+// we have browser support for JSON.
+// http://jsperf.com/cloning-an-object/12
+var clone = o => JSON.parse(JSON.stringify(o))
+
+json.commonPath = function (p1, p2) {
+ p1 = p1.slice()
+ p2 = p2.slice()
+ p1.unshift('data')
+ p2.unshift('data')
+ p1 = p1.slice(0, p1.length - 1)
+ p2 = p2.slice(0, p2.length - 1)
+ if (p2.length === 0) {
+ return -1
+ }
+ let i = 0
+ while (p1[i] === p2[i] && i < p1.length) {
+ i++
+ if (i === p2.length) {
+ return i - 1
+ }
+ }
+}
+
+// transform c so it applies to a document with otherC applied.
+json.transformComponent = function (dest, c, otherC, type) {
+ let oc
+ c = clone(c)
+ if (c.na !== undefined) {
+ c.p.push(0)
+ }
+ if (otherC.na !== undefined) {
+ otherC.p.push(0)
+ }
+
+ const common = json.commonPath(c.p, otherC.p)
+ const common2 = json.commonPath(otherC.p, c.p)
+
+ const cplength = c.p.length
+ const otherCplength = otherC.p.length
+
+ if (c.na !== undefined) {
+ c.p.pop()
+ } // hax
+ if (otherC.na !== undefined) {
+ otherC.p.pop()
+ }
+
+ if (otherC.na) {
+ if (
+ common2 != null &&
+ otherCplength >= cplength &&
+ otherC.p[common2] === c.p[common2]
+ ) {
+ if (c.ld !== undefined) {
+ oc = clone(otherC)
+ oc.p = oc.p.slice(cplength)
+ c.ld = json.apply(clone(c.ld), [oc])
+ } else if (c.od !== undefined) {
+ oc = clone(otherC)
+ oc.p = oc.p.slice(cplength)
+ c.od = json.apply(clone(c.od), [oc])
+ }
+ }
+ json.append(dest, c)
+ return dest
+ }
+
+ if (
+ common2 != null &&
+ otherCplength > cplength &&
+ c.p[common2] === otherC.p[common2]
+ ) {
+ // transform based on c
+ if (c.ld !== undefined) {
+ oc = clone(otherC)
+ oc.p = oc.p.slice(cplength)
+ c.ld = json.apply(clone(c.ld), [oc])
+ } else if (c.od !== undefined) {
+ oc = clone(otherC)
+ oc.p = oc.p.slice(cplength)
+ c.od = json.apply(clone(c.od), [oc])
+ }
+ }
+
+ if (common != null) {
+ let from, p, to
+ const commonOperand = cplength === otherCplength
+ // transform based on otherC
+ if (otherC.na !== undefined) {
+ // this case is handled above due to icky path hax
+ } else if (otherC.si !== undefined || otherC.sd !== undefined) {
+ // String op vs string op - pass through to text type
+ if (c.si !== undefined || c.sd !== undefined) {
+ if (!commonOperand) {
+ throw new Error('must be a string?')
+ }
+
+ // Convert an op component to a text op component
+ const convert = function (component) {
+ const newC = { p: component.p[component.p.length - 1] }
+ if (component.si) {
+ newC.i = component.si
+ } else {
+ newC.d = component.sd
+ }
+ return newC
+ }
+
+ const tc1 = convert(c)
+ const tc2 = convert(otherC)
+
+ const res = []
+ text._tc(res, tc1, tc2, type)
+ for (const tc of Array.from(res)) {
+ const jc = { p: c.p.slice(0, common) }
+ jc.p.push(tc.p)
+ if (tc.i != null) {
+ jc.si = tc.i
+ }
+ if (tc.d != null) {
+ jc.sd = tc.d
+ }
+ json.append(dest, jc)
+ }
+ return dest
+ }
+ } else if (otherC.li !== undefined && otherC.ld !== undefined) {
+ if (otherC.p[common] === c.p[common]) {
+ // noop
+ if (!commonOperand) {
+ // we're below the deleted element, so -> noop
+ return dest
+ } else if (c.ld !== undefined) {
+ // we're trying to delete the same element, -> noop
+ if (c.li !== undefined && type === 'left') {
+ // we're both replacing one element with another. only one can
+ // survive!
+ c.ld = clone(otherC.li)
+ } else {
+ return dest
+ }
+ }
+ }
+ } else if (otherC.li !== undefined) {
+ if (
+ c.li !== undefined &&
+ c.ld === undefined &&
+ commonOperand &&
+ c.p[common] === otherC.p[common]
+ ) {
+ // in li vs. li, left wins.
+ if (type === 'right') {
+ c.p[common]++
+ }
+ } else if (otherC.p[common] <= c.p[common]) {
+ c.p[common]++
+ }
+
+ if (c.lm !== undefined) {
+ if (commonOperand) {
+ // otherC edits the same list we edit
+ if (otherC.p[common] <= c.lm) {
+ c.lm++
+ }
+ }
+ }
+ // changing c.from is handled above.
+ } else if (otherC.ld !== undefined) {
+ if (c.lm !== undefined) {
+ if (commonOperand) {
+ if (otherC.p[common] === c.p[common]) {
+ // they deleted the thing we're trying to move
+ return dest
+ }
+ // otherC edits the same list we edit
+ p = otherC.p[common]
+ from = c.p[common]
+ to = c.lm
+ if (p < to || (p === to && from < to)) {
+ c.lm--
+ }
+ }
+ }
+
+ if (otherC.p[common] < c.p[common]) {
+ c.p[common]--
+ } else if (otherC.p[common] === c.p[common]) {
+ if (otherCplength < cplength) {
+ // we're below the deleted element, so -> noop
+ return dest
+ } else if (c.ld !== undefined) {
+ if (c.li !== undefined) {
+ // we're replacing, they're deleting. we become an insert.
+ delete c.ld
+ } else {
+ // we're trying to delete the same element, -> noop
+ return dest
+ }
+ }
+ }
+ } else if (otherC.lm !== undefined) {
+ if (c.lm !== undefined && cplength === otherCplength) {
+ // lm vs lm, here we go!
+ from = c.p[common]
+ to = c.lm
+ const otherFrom = otherC.p[common]
+ const otherTo = otherC.lm
+ if (otherFrom !== otherTo) {
+ // if otherFrom == otherTo, we don't need to change our op.
+
+ // where did my thing go?
+ if (from === otherFrom) {
+ // they moved it! tie break.
+ if (type === 'left') {
+ c.p[common] = otherTo
+ if (from === to) {
+ // ugh
+ c.lm = otherTo
+ }
+ } else {
+ return dest
+ }
+ } else {
+ // they moved around it
+ if (from > otherFrom) {
+ c.p[common]--
+ }
+ if (from > otherTo) {
+ c.p[common]++
+ } else if (from === otherTo) {
+ if (otherFrom > otherTo) {
+ c.p[common]++
+ if (from === to) {
+ // ugh, again
+ c.lm++
+ }
+ }
+ }
+
+ // step 2: where am i going to put it?
+ if (to > otherFrom) {
+ c.lm--
+ } else if (to === otherFrom) {
+ if (to > from) {
+ c.lm--
+ }
+ }
+ if (to > otherTo) {
+ c.lm++
+ } else if (to === otherTo) {
+ // if we're both moving in the same direction, tie break
+ if (
+ (otherTo > otherFrom && to > from) ||
+ (otherTo < otherFrom && to < from)
+ ) {
+ if (type === 'right') {
+ c.lm++
+ }
+ } else {
+ if (to > from) {
+ c.lm++
+ } else if (to === otherFrom) {
+ c.lm--
+ }
+ }
+ }
+ }
+ }
+ } else if (c.li !== undefined && c.ld === undefined && commonOperand) {
+ // li
+ from = otherC.p[common]
+ to = otherC.lm
+ p = c.p[common]
+ if (p > from) {
+ c.p[common]--
+ }
+ if (p > to) {
+ c.p[common]++
+ }
+ } else {
+ // ld, ld+li, si, sd, na, oi, od, oi+od, any li on an element beneath
+ // the lm
+ //
+ // i.e. things care about where their item is after the move.
+ from = otherC.p[common]
+ to = otherC.lm
+ p = c.p[common]
+ if (p === from) {
+ c.p[common] = to
+ } else {
+ if (p > from) {
+ c.p[common]--
+ }
+ if (p > to) {
+ c.p[common]++
+ } else if (p === to) {
+ if (from > to) {
+ c.p[common]++
+ }
+ }
+ }
+ }
+ } else if (otherC.oi !== undefined && otherC.od !== undefined) {
+ if (c.p[common] === otherC.p[common]) {
+ if (c.oi !== undefined && commonOperand) {
+ // we inserted where someone else replaced
+ if (type === 'right') {
+ // left wins
+ return dest
+ } else {
+ // we win, make our op replace what they inserted
+ c.od = otherC.oi
+ }
+ } else {
+ // -> noop if the other component is deleting the same object (or any
+ // parent)
+ return dest
+ }
+ }
+ } else if (otherC.oi !== undefined) {
+ if (c.oi !== undefined && c.p[common] === otherC.p[common]) {
+ // left wins if we try to insert at the same place
+ if (type === 'left') {
+ json.append(dest, { p: c.p, od: otherC.oi })
+ } else {
+ return dest
+ }
+ }
+ } else if (otherC.od !== undefined) {
+ if (c.p[common] === otherC.p[common]) {
+ if (!commonOperand) {
+ return dest
+ }
+ if (c.oi !== undefined) {
+ delete c.od
+ } else {
+ return dest
+ }
+ }
+ }
+ }
+
+ json.append(dest, c)
+ return dest
+}
+
+if (typeof WEB !== 'undefined' && WEB !== null) {
+ if (!exports.types) {
+ exports.types = {}
+ }
+
+ // This is kind of awful - come up with a better way to hook this helper code up.
+ exports._bt(json, json.transformComponent, json.checkValidOp, json.append)
+
+ // [] is used to prevent closure from renaming types.text
+ exports.types.json = json
+} else {
+ module.exports = json
+
+ require('./helpers').bootstrapTransform(
+ json,
+ json.transformComponent,
+ json.checkValidOp,
+ json.append
+ )
+}
diff --git a/services/document-updater/app/js/sharejs/types/model.js b/services/document-updater/app/js/sharejs/types/model.js
new file mode 100644
index 0000000000..aebcd8d549
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/types/model.js
@@ -0,0 +1,883 @@
+/* eslint-disable
+ no-console,
+ no-return-assign,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS103: Rewrite code to no longer use __guard__
+ * DS104: Avoid inline assignments
+ * DS204: Change includes calls to have a more natural evaluation order
+ * DS205: Consider reworking code to avoid use of IIFEs
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+// The model of all the ops. Responsible for applying & transforming remote deltas
+// and managing the storage layer.
+//
+// Actual storage is handled by the database wrappers in db/*, wrapped by DocCache
+
+let Model
+const { EventEmitter } = require('events')
+
+const queue = require('./syncqueue')
+const types = require('../types')
+
+const isArray = o => Object.prototype.toString.call(o) === '[object Array]'
+
+// This constructor creates a new Model object. There will be one model object
+// per server context.
+//
+// The model object is responsible for a lot of things:
+//
+// - It manages the interactions with the database
+// - It maintains (in memory) a set of all active documents
+// - It calls out to the OT functions when necessary
+//
+// The model is an event emitter. It emits the following events:
+//
+// create(docName, data): A document has been created with the specified name & data
+module.exports = Model = function (db, options) {
+ // db can be null if the user doesn't want persistance.
+
+ let getOps
+ if (!(this instanceof Model)) {
+ return new Model(db, options)
+ }
+
+ const model = this
+
+ if (options == null) {
+ options = {}
+ }
+
+ // This is a cache of 'live' documents.
+ //
+ // The cache is a map from docName -> {
+ // ops:[{op, meta}]
+ // snapshot
+ // type
+ // v
+ // meta
+ // eventEmitter
+ // reapTimer
+ // committedVersion: v
+ // snapshotWriteLock: bool to make sure writeSnapshot isn't re-entrant
+ // dbMeta: database specific data
+ // opQueue: syncQueue for processing ops
+ // }
+ //
+ // The ops list contains the document's last options.numCachedOps ops. (Or all
+ // of them if we're using a memory store).
+ //
+ // Documents are stored in this set so long as the document has been accessed in
+ // the last few seconds (options.reapTime) OR at least one client has the document
+ // open. I don't know if I should keep open (but not being edited) documents live -
+ // maybe if a client has a document open but the document isn't being edited, I should
+ // flush it from the cache.
+ //
+ // In any case, the API to model is designed such that if we want to change that later
+ // it should be pretty easy to do so without any external-to-the-model code changes.
+ const docs = {}
+
+ // This is a map from docName -> [callback]. It is used when a document hasn't been
+ // cached and multiple getSnapshot() / getVersion() requests come in. All requests
+ // are added to the callback list and called when db.getSnapshot() returns.
+ //
+ // callback(error, snapshot data)
+ const awaitingGetSnapshot = {}
+
+ // The time that documents which no clients have open will stay in the cache.
+ // Should be > 0.
+ if (options.reapTime == null) {
+ options.reapTime = 3000
+ }
+
+ // The number of operations the cache holds before reusing the space
+ if (options.numCachedOps == null) {
+ options.numCachedOps = 10
+ }
+
+ // This option forces documents to be reaped, even when there's no database backend.
+ // This is useful when you don't care about persistance and don't want to gradually
+ // fill memory.
+ //
+ // You might want to set reapTime to a day or something.
+ if (options.forceReaping == null) {
+ options.forceReaping = false
+ }
+
+ // Until I come up with a better strategy, we'll save a copy of the document snapshot
+ // to the database every ~20 submitted ops.
+ if (options.opsBeforeCommit == null) {
+ options.opsBeforeCommit = 20
+ }
+
+ // It takes some processing time to transform client ops. The server will punt ops back to the
+ // client to transform if they're too old.
+ if (options.maximumAge == null) {
+ options.maximumAge = 40
+ }
+
+ // **** Cache API methods
+
+ // Its important that all ops are applied in order. This helper method creates the op submission queue
+ // for a single document. This contains the logic for transforming & applying ops.
+ const makeOpQueue = (docName, doc) =>
+ queue(function (opData, callback) {
+ if (!(opData.v >= 0)) {
+ return callback('Version missing')
+ }
+ if (opData.v > doc.v) {
+ return callback('Op at future version')
+ }
+
+ // Punt the transforming work back to the client if the op is too old.
+ if (opData.v + options.maximumAge < doc.v) {
+ return callback('Op too old')
+ }
+
+ if (!opData.meta) {
+ opData.meta = {}
+ }
+ opData.meta.ts = Date.now()
+
+ // We'll need to transform the op to the current version of the document. This
+ // calls the callback immediately if opVersion == doc.v.
+ return getOps(docName, opData.v, doc.v, function (error, ops) {
+ let snapshot
+ if (error) {
+ return callback(error)
+ }
+
+ if (doc.v - opData.v !== ops.length) {
+ // This should never happen. It indicates that we didn't get all the ops we
+ // asked for. Its important that the submitted op is correctly transformed.
+ console.error(
+ `Could not get old ops in model for document ${docName}`
+ )
+ console.error(
+ `Expected ops ${opData.v} to ${doc.v} and got ${ops.length} ops`
+ )
+ return callback('Internal error')
+ }
+
+ if (ops.length > 0) {
+ try {
+ // If there's enough ops, it might be worth spinning this out into a webworker thread.
+ for (const oldOp of Array.from(ops)) {
+ // Dup detection works by sending the id(s) the op has been submitted with previously.
+ // If the id matches, we reject it. The client can also detect the op has been submitted
+ // already if it sees its own previous id in the ops it sees when it does catchup.
+ if (
+ oldOp.meta.source &&
+ opData.dupIfSource &&
+ Array.from(opData.dupIfSource).includes(oldOp.meta.source)
+ ) {
+ return callback('Op already submitted')
+ }
+
+ opData.op = doc.type.transform(opData.op, oldOp.op, 'left')
+ opData.v++
+ }
+ } catch (error1) {
+ error = error1
+ console.error(error.stack)
+ return callback(error.message)
+ }
+ }
+
+ try {
+ snapshot = doc.type.apply(doc.snapshot, opData.op)
+ } catch (error2) {
+ error = error2
+ console.error(error.stack)
+ return callback(error.message)
+ }
+
+ // The op data should be at the current version, and the new document data should be at
+ // the next version.
+ //
+ // This should never happen in practice, but its a nice little check to make sure everything
+ // is hunky-dory.
+ if (opData.v !== doc.v) {
+ // This should never happen.
+ console.error(
+ 'Version mismatch detected in model. File a ticket - this is a bug.'
+ )
+ console.error(`Expecting ${opData.v} == ${doc.v}`)
+ return callback('Internal error')
+ }
+
+ // newDocData = {snapshot, type:type.name, v:opVersion + 1, meta:docData.meta}
+ const writeOp =
+ (db != null ? db.writeOp : undefined) ||
+ ((docName, newOpData, callback) => callback())
+
+ return writeOp(docName, opData, function (error) {
+ if (error) {
+ // The user should probably know about this.
+ console.warn(`Error writing ops to database: ${error}`)
+ return callback(error)
+ }
+
+ __guardMethod__(options.stats, 'writeOp', o => o.writeOp())
+
+ // This is needed when we emit the 'change' event, below.
+ const oldSnapshot = doc.snapshot
+
+ // All the heavy lifting is now done. Finally, we'll update the cache with the new data
+ // and (maybe!) save a new document snapshot to the database.
+
+ doc.v = opData.v + 1
+ doc.snapshot = snapshot
+
+ doc.ops.push(opData)
+ if (db && doc.ops.length > options.numCachedOps) {
+ doc.ops.shift()
+ }
+
+ model.emit('applyOp', docName, opData, snapshot, oldSnapshot)
+ doc.eventEmitter.emit('op', opData, snapshot, oldSnapshot)
+
+ // The callback is called with the version of the document at which the op was applied.
+ // This is the op.v after transformation, and its doc.v - 1.
+ callback(null, opData.v)
+
+ // I need a decent strategy here for deciding whether or not to save the snapshot.
+ //
+ // The 'right' strategy looks something like "Store the snapshot whenever the snapshot
+ // is smaller than the accumulated op data". For now, I'll just store it every 20
+ // ops or something. (Configurable with doc.committedVersion)
+ if (
+ !doc.snapshotWriteLock &&
+ doc.committedVersion + options.opsBeforeCommit <= doc.v
+ ) {
+ return tryWriteSnapshot(docName, function (error) {
+ if (error) {
+ return console.warn(
+ `Error writing snapshot ${error}. This is nonfatal`
+ )
+ }
+ })
+ }
+ })
+ })
+ })
+
+ // Add the data for the given docName to the cache. The named document shouldn't already
+ // exist in the doc set.
+ //
+ // Returns the new doc.
+ const add = function (docName, error, data, committedVersion, ops, dbMeta) {
+ let callback, doc
+ const callbacks = awaitingGetSnapshot[docName]
+ delete awaitingGetSnapshot[docName]
+
+ if (error) {
+ if (callbacks) {
+ for (callback of Array.from(callbacks)) {
+ callback(error)
+ }
+ }
+ } else {
+ doc = docs[docName] = {
+ snapshot: data.snapshot,
+ v: data.v,
+ type: data.type,
+ meta: data.meta,
+
+ // Cache of ops
+ ops: ops || [],
+
+ eventEmitter: new EventEmitter(),
+
+ // Timer before the document will be invalidated from the cache (if the document has no
+ // listeners)
+ reapTimer: null,
+
+ // Version of the snapshot thats in the database
+ committedVersion: committedVersion != null ? committedVersion : data.v,
+ snapshotWriteLock: false,
+ dbMeta,
+ }
+
+ doc.opQueue = makeOpQueue(docName, doc)
+
+ refreshReapingTimeout(docName)
+ model.emit('add', docName, data)
+ if (callbacks) {
+ for (callback of Array.from(callbacks)) {
+ callback(null, doc)
+ }
+ }
+ }
+
+ return doc
+ }
+
+ // This is a little helper wrapper around db.getOps. It does two things:
+ //
+ // - If there's no database set, it returns an error to the callback
+ // - It adds version numbers to each op returned from the database
+ // (These can be inferred from context so the DB doesn't store them, but its useful to have them).
+ const getOpsInternal = function (docName, start, end, callback) {
+ if (!db) {
+ return typeof callback === 'function'
+ ? callback('Document does not exist')
+ : undefined
+ }
+
+ return db.getOps(docName, start, end, function (error, ops) {
+ if (error) {
+ return typeof callback === 'function' ? callback(error) : undefined
+ }
+
+ let v = start
+ for (const op of Array.from(ops)) {
+ op.v = v++
+ }
+
+ return typeof callback === 'function' ? callback(null, ops) : undefined
+ })
+ }
+
+ // Load the named document into the cache. This function is re-entrant.
+ //
+ // The callback is called with (error, doc)
+ const load = function (docName, callback) {
+ if (docs[docName]) {
+ // The document is already loaded. Return immediately.
+ __guardMethod__(options.stats, 'cacheHit', o => o.cacheHit('getSnapshot'))
+ return callback(null, docs[docName])
+ }
+
+ // We're a memory store. If we don't have it, nobody does.
+ if (!db) {
+ return callback('Document does not exist')
+ }
+
+ const callbacks = awaitingGetSnapshot[docName]
+
+ // The document is being loaded already. Add ourselves as a callback.
+ if (callbacks) {
+ return callbacks.push(callback)
+ }
+
+ __guardMethod__(options.stats, 'cacheMiss', o1 =>
+ o1.cacheMiss('getSnapshot')
+ )
+
+ // The document isn't loaded and isn't being loaded. Load it.
+ awaitingGetSnapshot[docName] = [callback]
+ return db.getSnapshot(docName, function (error, data, dbMeta) {
+ if (error) {
+ return add(docName, error)
+ }
+
+ const type = types[data.type]
+ if (!type) {
+ console.warn(`Type '${data.type}' missing`)
+ return callback('Type not found')
+ }
+ data.type = type
+
+ const committedVersion = data.v
+
+ // The server can close without saving the most recent document snapshot.
+ // In this case, there are extra ops which need to be applied before
+ // returning the snapshot.
+ return getOpsInternal(docName, data.v, null, function (error, ops) {
+ if (error) {
+ return callback(error)
+ }
+
+ if (ops.length > 0) {
+ console.log(`Catchup ${docName} ${data.v} -> ${data.v + ops.length}`)
+
+ try {
+ for (const op of Array.from(ops)) {
+ data.snapshot = type.apply(data.snapshot, op.op)
+ data.v++
+ }
+ } catch (e) {
+ // This should never happen - it indicates that whats in the
+ // database is invalid.
+ console.error(`Op data invalid for ${docName}: ${e.stack}`)
+ return callback('Op data invalid')
+ }
+ }
+
+ model.emit('load', docName, data)
+ return add(docName, error, data, committedVersion, ops, dbMeta)
+ })
+ })
+ }
+
+ // This makes sure the cache contains a document. If the doc cache doesn't contain
+ // a document, it is loaded from the database and stored.
+ //
+ // Documents are stored so long as either:
+ // - They have been accessed within the past #{PERIOD}
+ // - At least one client has the document open
+ var refreshReapingTimeout = function (docName) {
+ const doc = docs[docName]
+ if (!doc) {
+ return
+ }
+
+ // I want to let the clients list be updated before this is called.
+ return process.nextTick(function () {
+ // This is an awkward way to find out the number of clients on a document. If this
+ // causes performance issues, add a numClients field to the document.
+ //
+ // The first check is because its possible that between refreshReapingTimeout being called and this
+ // event being fired, someone called delete() on the document and hence the doc is something else now.
+ if (
+ doc === docs[docName] &&
+ doc.eventEmitter.listeners('op').length === 0 &&
+ (db || options.forceReaping) &&
+ doc.opQueue.busy === false
+ ) {
+ let reapTimer
+ clearTimeout(doc.reapTimer)
+ return (doc.reapTimer = reapTimer =
+ setTimeout(
+ () =>
+ tryWriteSnapshot(docName, function () {
+ // If the reaping timeout has been refreshed while we're writing the snapshot, or if we're
+ // in the middle of applying an operation, don't reap.
+ if (
+ docs[docName].reapTimer === reapTimer &&
+ doc.opQueue.busy === false
+ ) {
+ return delete docs[docName]
+ }
+ }),
+ options.reapTime
+ ))
+ }
+ })
+ }
+
+ var tryWriteSnapshot = function (docName, callback) {
+ if (!db) {
+ return typeof callback === 'function' ? callback() : undefined
+ }
+
+ const doc = docs[docName]
+
+ // The doc is closed
+ if (!doc) {
+ return typeof callback === 'function' ? callback() : undefined
+ }
+
+ // The document is already saved.
+ if (doc.committedVersion === doc.v) {
+ return typeof callback === 'function' ? callback() : undefined
+ }
+
+ if (doc.snapshotWriteLock) {
+ return typeof callback === 'function'
+ ? callback('Another snapshot write is in progress')
+ : undefined
+ }
+
+ doc.snapshotWriteLock = true
+
+ __guardMethod__(options.stats, 'writeSnapshot', o => o.writeSnapshot())
+
+ const writeSnapshot =
+ (db != null ? db.writeSnapshot : undefined) ||
+ ((docName, docData, dbMeta, callback) => callback())
+
+ const data = {
+ v: doc.v,
+ meta: doc.meta,
+ snapshot: doc.snapshot,
+ // The database doesn't know about object types.
+ type: doc.type.name,
+ }
+
+ // Commit snapshot.
+ return writeSnapshot(docName, data, doc.dbMeta, function (error, dbMeta) {
+ doc.snapshotWriteLock = false
+
+ // We have to use data.v here because the version in the doc could
+ // have been updated between the call to writeSnapshot() and now.
+ doc.committedVersion = data.v
+ doc.dbMeta = dbMeta
+
+ return typeof callback === 'function' ? callback(error) : undefined
+ })
+ }
+
+ // *** Model interface methods
+
+ // Create a new document.
+ //
+ // data should be {snapshot, type, [meta]}. The version of a new document is 0.
+ this.create = function (docName, type, meta, callback) {
+ if (typeof meta === 'function') {
+ ;[meta, callback] = Array.from([{}, meta])
+ }
+
+ if (docName.match(/\//)) {
+ return typeof callback === 'function'
+ ? callback('Invalid document name')
+ : undefined
+ }
+ if (docs[docName]) {
+ return typeof callback === 'function'
+ ? callback('Document already exists')
+ : undefined
+ }
+
+ if (typeof type === 'string') {
+ type = types[type]
+ }
+ if (!type) {
+ return typeof callback === 'function'
+ ? callback('Type not found')
+ : undefined
+ }
+
+ const data = {
+ snapshot: type.create(),
+ type: type.name,
+ meta: meta || {},
+ v: 0,
+ }
+
+ const done = function (error, dbMeta) {
+ // dbMeta can be used to cache extra state needed by the database to access the document, like an ID or something.
+ if (error) {
+ return typeof callback === 'function' ? callback(error) : undefined
+ }
+
+ // From here on we'll store the object version of the type name.
+ data.type = type
+ add(docName, null, data, 0, [], dbMeta)
+ model.emit('create', docName, data)
+ return typeof callback === 'function' ? callback() : undefined
+ }
+
+ if (db) {
+ return db.create(docName, data, done)
+ } else {
+ return done()
+ }
+ }
+
+ // Perminantly deletes the specified document.
+ // If listeners are attached, they are removed.
+ //
+ // The callback is called with (error) if there was an error. If error is null / undefined, the
+ // document was deleted.
+ //
+ // WARNING: This isn't well supported throughout the code. (Eg, streaming clients aren't told about the
+ // deletion. Subsequent op submissions will fail).
+ this.delete = function (docName, callback) {
+ const doc = docs[docName]
+
+ if (doc) {
+ clearTimeout(doc.reapTimer)
+ delete docs[docName]
+ }
+
+ const done = function (error) {
+ if (!error) {
+ model.emit('delete', docName)
+ }
+ return typeof callback === 'function' ? callback(error) : undefined
+ }
+
+ if (db) {
+ return db.delete(docName, doc != null ? doc.dbMeta : undefined, done)
+ } else {
+ return done(!doc ? 'Document does not exist' : undefined)
+ }
+ }
+
+ // This gets all operations from [start...end]. (That is, its not inclusive.)
+ //
+ // end can be null. This means 'get me all ops from start'.
+ //
+ // Each op returned is in the form {op:o, meta:m, v:version}.
+ //
+ // Callback is called with (error, [ops])
+ //
+ // If the document does not exist, getOps doesn't necessarily return an error. This is because
+ // its awkward to figure out whether or not the document exists for things
+ // like the redis database backend. I guess its a bit gross having this inconsistant
+ // with the other DB calls, but its certainly convenient.
+ //
+ // Use getVersion() to determine if a document actually exists, if thats what you're
+ // after.
+ this.getOps = getOps = function (docName, start, end, callback) {
+ // getOps will only use the op cache if its there. It won't fill the op cache in.
+ if (!(start >= 0)) {
+ throw new Error('start must be 0+')
+ }
+
+ if (typeof end === 'function') {
+ ;[end, callback] = Array.from([null, end])
+ }
+
+ const ops = docs[docName] != null ? docs[docName].ops : undefined
+
+ if (ops) {
+ const version = docs[docName].v
+
+ // Ops contains an array of ops. The last op in the list is the last op applied
+ if (end == null) {
+ end = version
+ }
+ start = Math.min(start, end)
+
+ if (start === end) {
+ return callback(null, [])
+ }
+
+ // Base is the version number of the oldest op we have cached
+ const base = version - ops.length
+
+ // If the database is null, we'll trim to the ops we do have and hope thats enough.
+ if (start >= base || db === null) {
+ refreshReapingTimeout(docName)
+ if (options.stats != null) {
+ options.stats.cacheHit('getOps')
+ }
+
+ return callback(null, ops.slice(start - base, end - base))
+ }
+ }
+
+ if (options.stats != null) {
+ options.stats.cacheMiss('getOps')
+ }
+
+ return getOpsInternal(docName, start, end, callback)
+ }
+
+ // Gets the snapshot data for the specified document.
+ // getSnapshot(docName, callback)
+ // Callback is called with (error, {v: , type: , snapshot: , meta: })
+ this.getSnapshot = (docName, callback) =>
+ load(docName, (error, doc) =>
+ callback(
+ error,
+ doc
+ ? { v: doc.v, type: doc.type, snapshot: doc.snapshot, meta: doc.meta }
+ : undefined
+ )
+ )
+
+ // Gets the latest version # of the document.
+ // getVersion(docName, callback)
+ // callback is called with (error, version).
+ this.getVersion = (docName, callback) =>
+ load(docName, (error, doc) =>
+ callback(error, doc != null ? doc.v : undefined)
+ )
+
+ // Apply an op to the specified document.
+ // The callback is passed (error, applied version #)
+ // opData = {op:op, v:v, meta:metadata}
+ //
+ // Ops are queued before being applied so that the following code applies op C before op B:
+ // model.applyOp 'doc', OPA, -> model.applyOp 'doc', OPB
+ // model.applyOp 'doc', OPC
+ this.applyOp = (
+ docName,
+ opData,
+ callback // All the logic for this is in makeOpQueue, above.
+ ) =>
+ load(docName, function (error, doc) {
+ if (error) {
+ return callback(error)
+ }
+
+ return process.nextTick(() =>
+ doc.opQueue(opData, function (error, newVersion) {
+ refreshReapingTimeout(docName)
+ return typeof callback === 'function'
+ ? callback(error, newVersion)
+ : undefined
+ })
+ )
+ })
+
+ // TODO: store (some) metadata in DB
+ // TODO: op and meta should be combineable in the op that gets sent
+ this.applyMetaOp = function (docName, metaOpData, callback) {
+ const { path, value } = metaOpData.meta
+
+ if (!isArray(path)) {
+ return typeof callback === 'function'
+ ? callback('path should be an array')
+ : undefined
+ }
+
+ return load(docName, function (error, doc) {
+ if (error != null) {
+ return typeof callback === 'function' ? callback(error) : undefined
+ } else {
+ let applied = false
+ switch (path[0]) {
+ case 'shout':
+ doc.eventEmitter.emit('op', metaOpData)
+ applied = true
+ break
+ }
+
+ if (applied) {
+ model.emit('applyMetaOp', docName, path, value)
+ }
+ return typeof callback === 'function'
+ ? callback(null, doc.v)
+ : undefined
+ }
+ })
+ }
+
+ // Listen to all ops from the specified version. If version is in the past, all
+ // ops since that version are sent immediately to the listener.
+ //
+ // The callback is called once the listener is attached, but before any ops have been passed
+ // to the listener.
+ //
+ // This will _not_ edit the document metadata.
+ //
+ // If there are any listeners, we don't purge the document from the cache. But be aware, this behaviour
+ // might change in a future version.
+ //
+ // version is the document version at which the document is opened. It can be left out if you want to open
+ // the document at the most recent version.
+ //
+ // listener is called with (opData) each time an op is applied.
+ //
+ // callback(error, openedVersion)
+ this.listen = function (docName, version, listener, callback) {
+ if (typeof version === 'function') {
+ ;[version, listener, callback] = Array.from([null, version, listener])
+ }
+
+ return load(docName, function (error, doc) {
+ if (error) {
+ return typeof callback === 'function' ? callback(error) : undefined
+ }
+
+ clearTimeout(doc.reapTimer)
+
+ if (version != null) {
+ return getOps(docName, version, null, function (error, data) {
+ if (error) {
+ return typeof callback === 'function' ? callback(error) : undefined
+ }
+
+ doc.eventEmitter.on('op', listener)
+ if (typeof callback === 'function') {
+ callback(null, version)
+ }
+ return (() => {
+ const result = []
+ for (const op of Array.from(data)) {
+ var needle
+ listener(op)
+
+ // The listener may well remove itself during the catchup phase. If this happens, break early.
+ // This is done in a quite inefficient way. (O(n) where n = #listeners on doc)
+ if (
+ ((needle = listener),
+ !Array.from(doc.eventEmitter.listeners('op')).includes(needle))
+ ) {
+ break
+ } else {
+ result.push(undefined)
+ }
+ }
+ return result
+ })()
+ })
+ } else {
+ // Version is null / undefined. Just add the listener.
+ doc.eventEmitter.on('op', listener)
+ return typeof callback === 'function'
+ ? callback(null, doc.v)
+ : undefined
+ }
+ })
+ }
+
+ // Remove a listener for a particular document.
+ //
+ // removeListener(docName, listener)
+ //
+ // This is synchronous.
+ this.removeListener = function (docName, listener) {
+ // The document should already be loaded.
+ const doc = docs[docName]
+ if (!doc) {
+ throw new Error('removeListener called but document not loaded')
+ }
+
+ doc.eventEmitter.removeListener('op', listener)
+ return refreshReapingTimeout(docName)
+ }
+
+ // Flush saves all snapshot data to the database. I'm not sure whether or not this is actually needed -
+ // sharejs will happily replay uncommitted ops when documents are re-opened anyway.
+ this.flush = function (callback) {
+ if (!db) {
+ return typeof callback === 'function' ? callback() : undefined
+ }
+
+ let pendingWrites = 0
+
+ for (const docName in docs) {
+ const doc = docs[docName]
+ if (doc.committedVersion < doc.v) {
+ pendingWrites++
+ // I'm hoping writeSnapshot will always happen in another thread.
+ tryWriteSnapshot(docName, () =>
+ process.nextTick(function () {
+ pendingWrites--
+ if (pendingWrites === 0) {
+ return typeof callback === 'function' ? callback() : undefined
+ }
+ })
+ )
+ }
+ }
+
+ // If nothing was queued, terminate immediately.
+ if (pendingWrites === 0) {
+ return typeof callback === 'function' ? callback() : undefined
+ }
+ }
+
+ // Close the database connection. This is needed so nodejs can shut down cleanly.
+ this.closeDb = function () {
+ __guardMethod__(db, 'close', o => o.close())
+ return (db = null)
+ }
+}
+
+// Model inherits from EventEmitter.
+Model.prototype = new EventEmitter()
+
+function __guardMethod__(obj, methodName, transform) {
+ if (
+ typeof obj !== 'undefined' &&
+ obj !== null &&
+ typeof obj[methodName] === 'function'
+ ) {
+ return transform(obj, methodName)
+ } else {
+ return undefined
+ }
+}
diff --git a/services/document-updater/app/js/sharejs/types/simple.js b/services/document-updater/app/js/sharejs/types/simple.js
new file mode 100644
index 0000000000..41f7eed285
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/types/simple.js
@@ -0,0 +1,54 @@
+// TODO: This file was created by bulk-decaffeinate.
+// Sanity-check the conversion and remove this comment.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+// This is a really simple OT type. Its not compiled with the web client, but it could be.
+//
+// Its mostly included for demonstration purposes and its used in a lot of unit tests.
+//
+// This defines a really simple text OT type which only allows inserts. (No deletes).
+//
+// Ops look like:
+// {position:#, text:"asdf"}
+//
+// Document snapshots look like:
+// {str:string}
+
+module.exports = {
+ // The name of the OT type. The type is stored in types[type.name]. The name can be
+ // used in place of the actual type in all the API methods.
+ name: 'simple',
+
+ // Create a new document snapshot
+ create() {
+ return { str: '' }
+ },
+
+ // Apply the given op to the document snapshot. Returns the new snapshot.
+ //
+ // The original snapshot should not be modified.
+ apply(snapshot, op) {
+ if (!(op.position >= 0 && op.position <= snapshot.str.length)) {
+ throw new Error('Invalid position')
+ }
+
+ let { str } = snapshot
+ str = str.slice(0, op.position) + op.text + str.slice(op.position)
+ return { str }
+ },
+
+ // transform op1 by op2. Return transformed version of op1.
+ // sym describes the symmetry of the op. Its 'left' or 'right' depending on whether the
+ // op being transformed comes from the client or the server.
+ transform(op1, op2, sym) {
+ let pos = op1.position
+ if (op2.position < pos || (op2.position === pos && sym === 'left')) {
+ pos += op2.text.length
+ }
+
+ return { position: pos, text: op1.text }
+ },
+}
diff --git a/services/document-updater/app/js/sharejs/types/syncqueue.js b/services/document-updater/app/js/sharejs/types/syncqueue.js
new file mode 100644
index 0000000000..7b83c5b436
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/types/syncqueue.js
@@ -0,0 +1,60 @@
+// TODO: This file was created by bulk-decaffeinate.
+// Sanity-check the conversion and remove this comment.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+// A synchronous processing queue. The queue calls process on the arguments,
+// ensuring that process() is only executing once at a time.
+//
+// process(data, callback) _MUST_ eventually call its callback.
+//
+// Example:
+//
+// queue = require 'syncqueue'
+//
+// fn = queue (data, callback) ->
+// asyncthing data, ->
+// callback(321)
+//
+// fn(1)
+// fn(2)
+// fn(3, (result) -> console.log(result))
+//
+// ^--- async thing will only be running once at any time.
+
+module.exports = function (process) {
+ if (typeof process !== 'function') {
+ throw new Error('process is not a function')
+ }
+ const queue = []
+
+ const enqueue = function (data, callback) {
+ queue.push([data, callback])
+ return flush()
+ }
+
+ enqueue.busy = false
+
+ var flush = function () {
+ if (enqueue.busy || queue.length === 0) {
+ return
+ }
+
+ enqueue.busy = true
+ const [data, callback] = Array.from(queue.shift())
+ return process(data, function (...result) {
+ // TODO: Make this not use varargs - varargs are really slow.
+ enqueue.busy = false
+ // This is called after busy = false so a user can check if enqueue.busy is set in the callback.
+ if (callback) {
+ callback.apply(null, result)
+ }
+ return flush()
+ })
+ }
+
+ return enqueue
+}
diff --git a/services/document-updater/app/js/sharejs/types/text-api.js b/services/document-updater/app/js/sharejs/types/text-api.js
new file mode 100644
index 0000000000..aa2beef446
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/types/text-api.js
@@ -0,0 +1,52 @@
+// TODO: This file was created by bulk-decaffeinate.
+// Sanity-check the conversion and remove this comment.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+// Text document API for text
+
+let text
+if (typeof WEB === 'undefined') {
+ text = require('./text')
+}
+
+text.api = {
+ provides: { text: true },
+
+ // The number of characters in the string
+ getLength() {
+ return this.snapshot.length
+ },
+
+ // Get the text contents of a document
+ getText() {
+ return this.snapshot
+ },
+
+ insert(pos, text, callback) {
+ const op = [{ p: pos, i: text }]
+
+ this.submitOp(op, callback)
+ return op
+ },
+
+ del(pos, length, callback) {
+ const op = [{ p: pos, d: this.snapshot.slice(pos, pos + length) }]
+
+ this.submitOp(op, callback)
+ return op
+ },
+
+ _register() {
+ return this.on('remoteop', function (op) {
+ return Array.from(op).map(component =>
+ component.i !== undefined
+ ? this.emit('insert', component.p, component.i)
+ : this.emit('delete', component.p, component.d)
+ )
+ })
+ },
+}
diff --git a/services/document-updater/app/js/sharejs/types/text-composable-api.js b/services/document-updater/app/js/sharejs/types/text-composable-api.js
new file mode 100644
index 0000000000..122e119ae4
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/types/text-composable-api.js
@@ -0,0 +1,76 @@
+/* eslint-disable
+ no-undef,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS205: Consider reworking code to avoid use of IIFEs
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+// Text document API for text
+
+let type
+if (typeof WEB !== 'undefined' && WEB !== null) {
+ type = exports.types['text-composable']
+} else {
+ type = require('./text-composable')
+}
+
+type.api = {
+ provides: { text: true },
+
+ // The number of characters in the string
+ getLength() {
+ return this.snapshot.length
+ },
+
+ // Get the text contents of a document
+ getText() {
+ return this.snapshot
+ },
+
+ insert(pos, text, callback) {
+ const op = type.normalize([pos, { i: text }, this.snapshot.length - pos])
+
+ this.submitOp(op, callback)
+ return op
+ },
+
+ del(pos, length, callback) {
+ const op = type.normalize([
+ pos,
+ { d: this.snapshot.slice(pos, pos + length) },
+ this.snapshot.length - pos - length,
+ ])
+
+ this.submitOp(op, callback)
+ return op
+ },
+
+ _register() {
+ return this.on('remoteop', function (op) {
+ let pos = 0
+ return (() => {
+ const result = []
+ for (const component of Array.from(op)) {
+ if (typeof component === 'number') {
+ result.push((pos += component))
+ } else if (component.i !== undefined) {
+ this.emit('insert', pos, component.i)
+ result.push((pos += component.i.length))
+ } else {
+ // delete
+ result.push(this.emit('delete', pos, component.d))
+ }
+ }
+ return result
+ })()
+ })
+ },
+}
+// We don't increment pos, because the position
+// specified is after the delete has happened.
diff --git a/services/document-updater/app/js/sharejs/types/text-composable.js b/services/document-updater/app/js/sharejs/types/text-composable.js
new file mode 100644
index 0000000000..ce0f3ac226
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/types/text-composable.js
@@ -0,0 +1,398 @@
+/* eslint-disable
+ no-cond-assign,
+ no-return-assign,
+ no-undef,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS205: Consider reworking code to avoid use of IIFEs
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+// An alternate composable implementation for text. This is much closer
+// to the implementation used by google wave.
+//
+// Ops are lists of components which iterate over the whole document.
+// Components are either:
+// A number N: Skip N characters in the original document
+// {i:'str'}: Insert 'str' at the current position in the document
+// {d:'str'}: Delete 'str', which appears at the current position in the document
+//
+// Eg: [3, {i:'hi'}, 5, {d:'internet'}]
+//
+// Snapshots are strings.
+
+let makeAppend
+const p = function () {} // require('util').debug
+const i = function () {} // require('util').inspect
+
+const moduleExport =
+ typeof WEB !== 'undefined' && WEB !== null ? {} : module.exports
+
+moduleExport.name = 'text-composable'
+
+moduleExport.create = () => ''
+
+// -------- Utility methods
+
+const checkOp = function (op) {
+ if (!Array.isArray(op)) {
+ throw new Error('Op must be an array of components')
+ }
+ let last = null
+ return (() => {
+ const result = []
+ for (const c of Array.from(op)) {
+ if (typeof c === 'object') {
+ if (
+ (c.i == null || !(c.i.length > 0)) &&
+ (c.d == null || !(c.d.length > 0))
+ ) {
+ throw new Error(`Invalid op component: ${i(c)}`)
+ }
+ } else {
+ if (typeof c !== 'number') {
+ throw new Error('Op components must be objects or numbers')
+ }
+ if (!(c > 0)) {
+ throw new Error('Skip components must be a positive number')
+ }
+ if (typeof last === 'number') {
+ throw new Error('Adjacent skip components should be added')
+ }
+ }
+
+ result.push((last = c))
+ }
+ return result
+ })()
+}
+
+// Makes a function for appending components to a given op.
+// Exported for the randomOpGenerator.
+moduleExport._makeAppend = makeAppend = op =>
+ function (component) {
+ if (component === 0 || component.i === '' || component.d === '') {
+ } else if (op.length === 0) {
+ return op.push(component)
+ } else if (
+ typeof component === 'number' &&
+ typeof op[op.length - 1] === 'number'
+ ) {
+ return (op[op.length - 1] += component)
+ } else if (component.i != null && op[op.length - 1].i != null) {
+ return (op[op.length - 1].i += component.i)
+ } else if (component.d != null && op[op.length - 1].d != null) {
+ return (op[op.length - 1].d += component.d)
+ } else {
+ return op.push(component)
+ }
+ }
+
+// checkOp op
+
+// Makes 2 functions for taking components from the start of an op, and for peeking
+// at the next op that could be taken.
+const makeTake = function (op) {
+ // The index of the next component to take
+ let idx = 0
+ // The offset into the component
+ let offset = 0
+
+ // Take up to length n from the front of op. If n is null, take the next
+ // op component. If indivisableField == 'd', delete components won't be separated.
+ // If indivisableField == 'i', insert components won't be separated.
+ const take = function (n, indivisableField) {
+ let c
+ if (idx === op.length) {
+ return null
+ }
+ // assert.notStrictEqual op.length, i, 'The op is too short to traverse the document'
+
+ if (typeof op[idx] === 'number') {
+ if (n == null || op[idx] - offset <= n) {
+ c = op[idx] - offset
+ ++idx
+ offset = 0
+ return c
+ } else {
+ offset += n
+ return n
+ }
+ } else {
+ // Take from the string
+ const field = op[idx].i ? 'i' : 'd'
+ c = {}
+ if (
+ n == null ||
+ op[idx][field].length - offset <= n ||
+ field === indivisableField
+ ) {
+ c[field] = op[idx][field].slice(offset)
+ ++idx
+ offset = 0
+ } else {
+ c[field] = op[idx][field].slice(offset, offset + n)
+ offset += n
+ }
+ return c
+ }
+ }
+
+ const peekType = () => op[idx]
+
+ return [take, peekType]
+}
+
+// Find and return the length of an op component
+const componentLength = function (component) {
+ if (typeof component === 'number') {
+ return component
+ } else if (component.i != null) {
+ return component.i.length
+ } else {
+ return component.d.length
+ }
+}
+
+// Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate
+// adjacent inserts and deletes.
+moduleExport.normalize = function (op) {
+ const newOp = []
+ const append = makeAppend(newOp)
+ for (const component of Array.from(op)) {
+ append(component)
+ }
+ return newOp
+}
+
+// Apply the op to the string. Returns the new string.
+moduleExport.apply = function (str, op) {
+ p(`Applying ${i(op)} to '${str}'`)
+ if (typeof str !== 'string') {
+ throw new Error('Snapshot should be a string')
+ }
+ checkOp(op)
+
+ const pos = 0
+ const newDoc = []
+
+ for (const component of Array.from(op)) {
+ if (typeof component === 'number') {
+ if (component > str.length) {
+ throw new Error('The op is too long for this document')
+ }
+ newDoc.push(str.slice(0, component))
+ str = str.slice(component)
+ } else if (component.i != null) {
+ newDoc.push(component.i)
+ } else {
+ if (component.d !== str.slice(0, component.d.length)) {
+ throw new Error(
+ `The deleted text '${
+ component.d
+ }' doesn't match the next characters in the document '${str.slice(
+ 0,
+ component.d.length
+ )}'`
+ )
+ }
+ str = str.slice(component.d.length)
+ }
+ }
+
+ if (str !== '') {
+ throw new Error("The applied op doesn't traverse the entire document")
+ }
+
+ return newDoc.join('')
+}
+
+// transform op1 by op2. Return transformed version of op1.
+// op1 and op2 are unchanged by transform.
+moduleExport.transform = function (op, otherOp, side) {
+ if (side !== 'left' && side !== 'right') {
+ throw new Error(`side (${side} must be 'left' or 'right'`)
+ }
+
+ checkOp(op)
+ checkOp(otherOp)
+ const newOp = []
+
+ const append = makeAppend(newOp)
+ const [take, peek] = Array.from(makeTake(op))
+
+ for (component of Array.from(otherOp)) {
+ var chunk, length
+ if (typeof component === 'number') {
+ // Skip
+ length = component
+ while (length > 0) {
+ chunk = take(length, 'i')
+ if (chunk === null) {
+ throw new Error(
+ 'The op traverses more elements than the document has'
+ )
+ }
+
+ append(chunk)
+ if (typeof chunk !== 'object' || chunk.i == null) {
+ length -= componentLength(chunk)
+ }
+ }
+ } else if (component.i != null) {
+ // Insert
+ if (side === 'left') {
+ // The left insert should go first.
+ const o = peek()
+ if (o != null ? o.i : undefined) {
+ append(take())
+ }
+ }
+
+ // Otherwise, skip the inserted text.
+ append(component.i.length)
+ } else {
+ // Delete.
+ // assert.ok component.d
+ ;({ length } = component.d)
+ while (length > 0) {
+ chunk = take(length, 'i')
+ if (chunk === null) {
+ throw new Error(
+ 'The op traverses more elements than the document has'
+ )
+ }
+
+ if (typeof chunk === 'number') {
+ length -= chunk
+ } else if (chunk.i != null) {
+ append(chunk)
+ } else {
+ // assert.ok chunk.d
+ // The delete is unnecessary now.
+ length -= chunk.d.length
+ }
+ }
+ }
+ }
+
+ // Append extras from op1
+ while ((component = take())) {
+ if ((component != null ? component.i : undefined) == null) {
+ throw new Error(`Remaining fragments in the op: ${i(component)}`)
+ }
+ append(component)
+ }
+
+ return newOp
+}
+
+// Compose 2 ops into 1 op.
+moduleExport.compose = function (op1, op2) {
+ p(`COMPOSE ${i(op1)} + ${i(op2)}`)
+ checkOp(op1)
+ checkOp(op2)
+
+ const result = []
+
+ const append = makeAppend(result)
+ const [take, _] = Array.from(makeTake(op1))
+
+ for (component of Array.from(op2)) {
+ var chunk, length
+ if (typeof component === 'number') {
+ // Skip
+ length = component
+ while (length > 0) {
+ chunk = take(length, 'd')
+ if (chunk === null) {
+ throw new Error(
+ 'The op traverses more elements than the document has'
+ )
+ }
+
+ append(chunk)
+ if (typeof chunk !== 'object' || chunk.d == null) {
+ length -= componentLength(chunk)
+ }
+ }
+ } else if (component.i != null) {
+ // Insert
+ append({ i: component.i })
+ } else {
+ // Delete
+ let offset = 0
+ while (offset < component.d.length) {
+ chunk = take(component.d.length - offset, 'd')
+ if (chunk === null) {
+ throw new Error(
+ 'The op traverses more elements than the document has'
+ )
+ }
+
+ // If its delete, append it. If its skip, drop it and decrease length. If its insert, check the strings match, drop it and decrease length.
+ if (typeof chunk === 'number') {
+ append({ d: component.d.slice(offset, offset + chunk) })
+ offset += chunk
+ } else if (chunk.i != null) {
+ if (component.d.slice(offset, offset + chunk.i.length) !== chunk.i) {
+ throw new Error("The deleted text doesn't match the inserted text")
+ }
+ offset += chunk.i.length
+ // The ops cancel each other out.
+ } else {
+ // Delete
+ append(chunk)
+ }
+ }
+ }
+ }
+
+ // Append extras from op1
+ while ((component = take())) {
+ if ((component != null ? component.d : undefined) == null) {
+ throw new Error(`Trailing stuff in op1 ${i(component)}`)
+ }
+ append(component)
+ }
+
+ return result
+}
+
+const invertComponent = function (c) {
+ if (typeof c === 'number') {
+ return c
+ } else if (c.i != null) {
+ return { d: c.i }
+ } else {
+ return { i: c.d }
+ }
+}
+
+// Invert an op
+moduleExport.invert = function (op) {
+ const result = []
+ const append = makeAppend(result)
+
+ for (const component of Array.from(op)) {
+ append(invertComponent(component))
+ }
+
+ return result
+}
+
+if (typeof window !== 'undefined' && window !== null) {
+ if (!window.ot) {
+ window.ot = {}
+ }
+ if (!window.ot.types) {
+ window.ot.types = {}
+ }
+ window.ot.types.text = moduleExport
+}
diff --git a/services/document-updater/app/js/sharejs/types/text-tp2-api.js b/services/document-updater/app/js/sharejs/types/text-tp2-api.js
new file mode 100644
index 0000000000..8ffbd60c50
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/types/text-tp2-api.js
@@ -0,0 +1,133 @@
+/* eslint-disable
+ no-undef,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS205: Consider reworking code to avoid use of IIFEs
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+// Text document API for text-tp2
+
+let type
+if (typeof WEB !== 'undefined' && WEB !== null) {
+ type = exports.types['text-tp2']
+} else {
+ type = require('./text-tp2')
+}
+
+const { _takeDoc: takeDoc, _append: append } = type
+
+const appendSkipChars = (op, doc, pos, maxlength) =>
+ (() => {
+ const result = []
+ while (
+ (maxlength === undefined || maxlength > 0) &&
+ pos.index < doc.data.length
+ ) {
+ const part = takeDoc(doc, pos, maxlength, true)
+ if (maxlength !== undefined && typeof part === 'string') {
+ maxlength -= part.length
+ }
+ result.push(append(op, part.length || part))
+ }
+ return result
+ })()
+
+type.api = {
+ provides: { text: true },
+
+ // The number of characters in the string
+ getLength() {
+ return this.snapshot.charLength
+ },
+
+ // Flatten a document into a string
+ getText() {
+ const strings = Array.from(this.snapshot.data).filter(
+ elem => typeof elem === 'string'
+ )
+ return strings.join('')
+ },
+
+ insert(pos, text, callback) {
+ if (pos === undefined) {
+ pos = 0
+ }
+
+ const op = []
+ const docPos = { index: 0, offset: 0 }
+
+ appendSkipChars(op, this.snapshot, docPos, pos)
+ append(op, { i: text })
+ appendSkipChars(op, this.snapshot, docPos)
+
+ this.submitOp(op, callback)
+ return op
+ },
+
+ del(pos, length, callback) {
+ const op = []
+ const docPos = { index: 0, offset: 0 }
+
+ appendSkipChars(op, this.snapshot, docPos, pos)
+
+ while (length > 0) {
+ const part = takeDoc(this.snapshot, docPos, length, true)
+ if (typeof part === 'string') {
+ append(op, { d: part.length })
+ length -= part.length
+ } else {
+ append(op, part)
+ }
+ }
+
+ appendSkipChars(op, this.snapshot, docPos)
+
+ this.submitOp(op, callback)
+ return op
+ },
+
+ _register() {
+ // Interpret recieved ops + generate more detailed events for them
+ return this.on('remoteop', function (op, snapshot) {
+ let textPos = 0
+ const docPos = { index: 0, offset: 0 }
+
+ for (const component of Array.from(op)) {
+ var part, remainder
+ if (typeof component === 'number') {
+ // Skip
+ remainder = component
+ while (remainder > 0) {
+ part = takeDoc(snapshot, docPos, remainder)
+ if (typeof part === 'string') {
+ textPos += part.length
+ }
+ remainder -= part.length || part
+ }
+ } else if (component.i !== undefined) {
+ // Insert
+ if (typeof component.i === 'string') {
+ this.emit('insert', textPos, component.i)
+ textPos += component.i.length
+ }
+ } else {
+ // Delete
+ remainder = component.d
+ while (remainder > 0) {
+ part = takeDoc(snapshot, docPos, remainder)
+ if (typeof part === 'string') {
+ this.emit('delete', textPos, part)
+ }
+ remainder -= part.length || part
+ }
+ }
+ }
+ })
+ },
+}
diff --git a/services/document-updater/app/js/sharejs/types/text-tp2.js b/services/document-updater/app/js/sharejs/types/text-tp2.js
new file mode 100644
index 0000000000..67a70439c7
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/types/text-tp2.js
@@ -0,0 +1,497 @@
+/* eslint-disable
+ no-cond-assign,
+ no-return-assign,
+ no-undef,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS103: Rewrite code to no longer use __guard__
+ * DS205: Consider reworking code to avoid use of IIFEs
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+// A TP2 implementation of text, following this spec:
+// http://code.google.com/p/lightwave/source/browse/trunk/experimental/ot/README
+//
+// A document is made up of a string and a set of tombstones inserted throughout
+// the string. For example, 'some ', (2 tombstones), 'string'.
+//
+// This is encoded in a document as: {s:'some string', t:[5, -2, 6]}
+//
+// Ops are lists of components which iterate over the whole document.
+// Components are either:
+// N: Skip N characters in the original document
+// {i:'str'}: Insert 'str' at the current position in the document
+// {i:N}: Insert N tombstones at the current position in the document
+// {d:N}: Delete (tombstone) N characters at the current position in the document
+//
+// Eg: [3, {i:'hi'}, 5, {d:8}]
+//
+// Snapshots are lists with characters and tombstones. Characters are stored in strings
+// and adjacent tombstones are flattened into numbers.
+//
+// Eg, the document: 'Hello .....world' ('.' denotes tombstoned (deleted) characters)
+// would be represented by a document snapshot of ['Hello ', 5, 'world']
+
+let append, appendDoc, takeDoc
+var type = {
+ name: 'text-tp2',
+ tp2: true,
+ create() {
+ return { charLength: 0, totalLength: 0, positionCache: [], data: [] }
+ },
+ serialize(doc) {
+ if (!doc.data) {
+ throw new Error('invalid doc snapshot')
+ }
+ return doc.data
+ },
+ deserialize(data) {
+ const doc = type.create()
+ doc.data = data
+
+ for (const component of Array.from(data)) {
+ if (typeof component === 'string') {
+ doc.charLength += component.length
+ doc.totalLength += component.length
+ } else {
+ doc.totalLength += component
+ }
+ }
+
+ return doc
+ },
+}
+
+const checkOp = function (op) {
+ if (!Array.isArray(op)) {
+ throw new Error('Op must be an array of components')
+ }
+ let last = null
+ return (() => {
+ const result = []
+ for (const c of Array.from(op)) {
+ if (typeof c === 'object') {
+ if (c.i !== undefined) {
+ if (
+ (typeof c.i !== 'string' || !(c.i.length > 0)) &&
+ (typeof c.i !== 'number' || !(c.i > 0))
+ ) {
+ throw new Error('Inserts must insert a string or a +ive number')
+ }
+ } else if (c.d !== undefined) {
+ if (typeof c.d !== 'number' || !(c.d > 0)) {
+ throw new Error('Deletes must be a +ive number')
+ }
+ } else {
+ throw new Error('Operation component must define .i or .d')
+ }
+ } else {
+ if (typeof c !== 'number') {
+ throw new Error('Op components must be objects or numbers')
+ }
+ if (!(c > 0)) {
+ throw new Error('Skip components must be a positive number')
+ }
+ if (typeof last === 'number') {
+ throw new Error('Adjacent skip components should be combined')
+ }
+ }
+
+ result.push((last = c))
+ }
+ return result
+ })()
+}
+
+// Take the next part from the specified position in a document snapshot.
+// position = {index, offset}. It will be updated.
+type._takeDoc = takeDoc = function (
+ doc,
+ position,
+ maxlength,
+ tombsIndivisible
+) {
+ if (position.index >= doc.data.length) {
+ throw new Error('Operation goes past the end of the document')
+ }
+
+ const part = doc.data[position.index]
+ // peel off data[0]
+ const result =
+ typeof part === 'string'
+ ? maxlength !== undefined
+ ? part.slice(position.offset, position.offset + maxlength)
+ : part.slice(position.offset)
+ : maxlength === undefined || tombsIndivisible
+ ? part - position.offset
+ : Math.min(maxlength, part - position.offset)
+
+ const resultLen = result.length || result
+
+ if ((part.length || part) - position.offset > resultLen) {
+ position.offset += resultLen
+ } else {
+ position.index++
+ position.offset = 0
+ }
+
+ return result
+}
+
+// Append a part to the end of a document
+type._appendDoc = appendDoc = function (doc, p) {
+ if (p === 0 || p === '') {
+ return
+ }
+
+ if (typeof p === 'string') {
+ doc.charLength += p.length
+ doc.totalLength += p.length
+ } else {
+ doc.totalLength += p
+ }
+
+ const { data } = doc
+ if (data.length === 0) {
+ data.push(p)
+ } else if (typeof data[data.length - 1] === typeof p) {
+ data[data.length - 1] += p
+ } else {
+ data.push(p)
+ }
+}
+
+// Apply the op to the document. The document is not modified in the process.
+type.apply = function (doc, op) {
+ if (
+ doc.totalLength === undefined ||
+ doc.charLength === undefined ||
+ doc.data.length === undefined
+ ) {
+ throw new Error('Snapshot is invalid')
+ }
+
+ checkOp(op)
+
+ const newDoc = type.create()
+ const position = { index: 0, offset: 0 }
+
+ for (const component of Array.from(op)) {
+ var part, remainder
+ if (typeof component === 'number') {
+ remainder = component
+ while (remainder > 0) {
+ part = takeDoc(doc, position, remainder)
+
+ appendDoc(newDoc, part)
+ remainder -= part.length || part
+ }
+ } else if (component.i !== undefined) {
+ appendDoc(newDoc, component.i)
+ } else if (component.d !== undefined) {
+ remainder = component.d
+ while (remainder > 0) {
+ part = takeDoc(doc, position, remainder)
+ remainder -= part.length || part
+ }
+ appendDoc(newDoc, component.d)
+ }
+ }
+
+ return newDoc
+}
+
+// Append an op component to the end of the specified op.
+// Exported for the randomOpGenerator.
+type._append = append = function (op, component) {
+ if (
+ component === 0 ||
+ component.i === '' ||
+ component.i === 0 ||
+ component.d === 0
+ ) {
+ } else if (op.length === 0) {
+ return op.push(component)
+ } else {
+ const last = op[op.length - 1]
+ if (typeof component === 'number' && typeof last === 'number') {
+ return (op[op.length - 1] += component)
+ } else if (
+ component.i !== undefined &&
+ last.i != null &&
+ typeof last.i === typeof component.i
+ ) {
+ return (last.i += component.i)
+ } else if (component.d !== undefined && last.d != null) {
+ return (last.d += component.d)
+ } else {
+ return op.push(component)
+ }
+ }
+}
+
+// Makes 2 functions for taking components from the start of an op, and for peeking
+// at the next op that could be taken.
+const makeTake = function (op) {
+ // The index of the next component to take
+ let index = 0
+ // The offset into the component
+ let offset = 0
+
+ // Take up to length maxlength from the op. If maxlength is not defined, there is no max.
+ // If insertsIndivisible is true, inserts (& insert tombstones) won't be separated.
+ //
+ // Returns null when op is fully consumed.
+ const take = function (maxlength, insertsIndivisible) {
+ let current
+ if (index === op.length) {
+ return null
+ }
+
+ const e = op[index]
+ if (
+ typeof (current = e) === 'number' ||
+ typeof (current = e.i) === 'number' ||
+ (current = e.d) !== undefined
+ ) {
+ let c
+ if (
+ maxlength == null ||
+ current - offset <= maxlength ||
+ (insertsIndivisible && e.i !== undefined)
+ ) {
+ // Return the rest of the current element.
+ c = current - offset
+ ++index
+ offset = 0
+ } else {
+ offset += maxlength
+ c = maxlength
+ }
+ if (e.i !== undefined) {
+ return { i: c }
+ } else if (e.d !== undefined) {
+ return { d: c }
+ } else {
+ return c
+ }
+ } else {
+ // Take from the inserted string
+ let result
+ if (
+ maxlength == null ||
+ e.i.length - offset <= maxlength ||
+ insertsIndivisible
+ ) {
+ result = { i: e.i.slice(offset) }
+ ++index
+ offset = 0
+ } else {
+ result = { i: e.i.slice(offset, offset + maxlength) }
+ offset += maxlength
+ }
+ return result
+ }
+ }
+
+ const peekType = () => op[index]
+
+ return [take, peekType]
+}
+
+// Find and return the length of an op component
+const componentLength = function (component) {
+ if (typeof component === 'number') {
+ return component
+ } else if (typeof component.i === 'string') {
+ return component.i.length
+ } else {
+ // This should work because c.d and c.i must be +ive.
+ return component.d || component.i
+ }
+}
+
+// Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate
+// adjacent inserts and deletes.
+type.normalize = function (op) {
+ const newOp = []
+ for (const component of Array.from(op)) {
+ append(newOp, component)
+ }
+ return newOp
+}
+
+// This is a helper method to transform and prune. goForwards is true for transform, false for prune.
+const transformer = function (op, otherOp, goForwards, side) {
+ let component
+ checkOp(op)
+ checkOp(otherOp)
+ const newOp = []
+
+ const [take, peek] = Array.from(makeTake(op))
+
+ for (component of Array.from(otherOp)) {
+ var chunk
+ let length = componentLength(component)
+
+ if (component.i !== undefined) {
+ // Insert text or tombs
+ if (goForwards) {
+ // transform - insert skips over inserted parts
+ if (side === 'left') {
+ // The left insert should go first.
+ while (__guard__(peek(), x => x.i) !== undefined) {
+ append(newOp, take())
+ }
+ }
+
+ // In any case, skip the inserted text.
+ append(newOp, length)
+ } else {
+ // Prune. Remove skips for inserts.
+ while (length > 0) {
+ chunk = take(length, true)
+
+ if (chunk === null) {
+ throw new Error('The transformed op is invalid')
+ }
+ if (chunk.d !== undefined) {
+ throw new Error(
+ 'The transformed op deletes locally inserted characters - it cannot be purged of the insert.'
+ )
+ }
+
+ if (typeof chunk === 'number') {
+ length -= chunk
+ } else {
+ append(newOp, chunk)
+ }
+ }
+ }
+ } else {
+ // Skip or delete
+ while (length > 0) {
+ chunk = take(length, true)
+ if (chunk === null) {
+ throw new Error(
+ 'The op traverses more elements than the document has'
+ )
+ }
+
+ append(newOp, chunk)
+ if (!chunk.i) {
+ length -= componentLength(chunk)
+ }
+ }
+ }
+ }
+
+ // Append extras from op1
+ while ((component = take())) {
+ if (component.i === undefined) {
+ throw new Error(`Remaining fragments in the op: ${component}`)
+ }
+ append(newOp, component)
+ }
+
+ return newOp
+}
+
+// transform op1 by op2. Return transformed version of op1.
+// op1 and op2 are unchanged by transform.
+// side should be 'left' or 'right', depending on if op1.id <> op2.id. 'left' == client op.
+type.transform = function (op, otherOp, side) {
+ if (side !== 'left' && side !== 'right') {
+ throw new Error(`side (${side}) should be 'left' or 'right'`)
+ }
+ return transformer(op, otherOp, true, side)
+}
+
+// Prune is the inverse of transform.
+type.prune = (op, otherOp) => transformer(op, otherOp, false)
+
+// Compose 2 ops into 1 op.
+type.compose = function (op1, op2) {
+ let component
+ if (op1 === null || op1 === undefined) {
+ return op2
+ }
+
+ checkOp(op1)
+ checkOp(op2)
+
+ const result = []
+
+ const [take, _] = Array.from(makeTake(op1))
+
+ for (component of Array.from(op2)) {
+ var chunk, length
+ if (typeof component === 'number') {
+ // Skip
+ // Just copy from op1.
+ length = component
+ while (length > 0) {
+ chunk = take(length)
+ if (chunk === null) {
+ throw new Error(
+ 'The op traverses more elements than the document has'
+ )
+ }
+
+ append(result, chunk)
+ length -= componentLength(chunk)
+ }
+ } else if (component.i !== undefined) {
+ // Insert
+ append(result, { i: component.i })
+ } else {
+ // Delete
+ length = component.d
+ while (length > 0) {
+ chunk = take(length)
+ if (chunk === null) {
+ throw new Error(
+ 'The op traverses more elements than the document has'
+ )
+ }
+
+ const chunkLength = componentLength(chunk)
+ if (chunk.i !== undefined) {
+ append(result, { i: chunkLength })
+ } else {
+ append(result, { d: chunkLength })
+ }
+
+ length -= chunkLength
+ }
+ }
+ }
+
+ // Append extras from op1
+ while ((component = take())) {
+ if (component.i === undefined) {
+ throw new Error(`Remaining fragments in op1: ${component}`)
+ }
+ append(result, component)
+ }
+
+ return result
+}
+
+if (typeof WEB !== 'undefined' && WEB !== null) {
+ exports.types['text-tp2'] = type
+} else {
+ module.exports = type
+}
+
+function __guard__(value, transform) {
+ return typeof value !== 'undefined' && value !== null
+ ? transform(value)
+ : undefined
+}
diff --git a/services/document-updater/app/js/sharejs/types/text.js b/services/document-updater/app/js/sharejs/types/text.js
new file mode 100644
index 0000000000..bbbe36e0f6
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/types/text.js
@@ -0,0 +1,390 @@
+/* eslint-disable
+ camelcase,
+ no-return-assign,
+ no-undef,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+// A simple text implementation
+//
+// Operations are lists of components.
+// Each component either inserts or deletes at a specified position in the document.
+//
+// Components are either:
+// {i:'str', p:100}: Insert 'str' at position 100 in the document
+// {d:'str', p:100}: Delete 'str' at position 100 in the document
+//
+// Components in an operation are executed sequentially, so the position of components
+// assumes previous components have already executed.
+//
+// Eg: This op:
+// [{i:'abc', p:0}]
+// is equivalent to this op:
+// [{i:'a', p:0}, {i:'b', p:1}, {i:'c', p:2}]
+
+// NOTE: The global scope here is shared with other sharejs files when built with closure.
+// Be careful what ends up in your namespace.
+
+let append, transformComponent
+const text = {}
+
+text.name = 'text'
+
+text.create = () => ''
+
+const strInject = (s1, pos, s2) => s1.slice(0, pos) + s2 + s1.slice(pos)
+
+const checkValidComponent = function (c) {
+ if (typeof c.p !== 'number') {
+ throw new Error('component missing position field')
+ }
+
+ const i_type = typeof c.i
+ const d_type = typeof c.d
+ const c_type = typeof c.c
+ if (
+ !((i_type === 'string') ^ (d_type === 'string') ^ (c_type === 'string'))
+ ) {
+ throw new Error('component needs an i, d or c field')
+ }
+
+ if (!(c.p >= 0)) {
+ throw new Error('position cannot be negative')
+ }
+}
+
+const checkValidOp = function (op) {
+ for (const c of Array.from(op)) {
+ checkValidComponent(c)
+ }
+ return true
+}
+
+text.apply = function (snapshot, op) {
+ checkValidOp(op)
+ for (const component of Array.from(op)) {
+ if (component.i != null) {
+ snapshot = strInject(snapshot, component.p, component.i)
+ } else if (component.d != null) {
+ const deleted = snapshot.slice(
+ component.p,
+ component.p + component.d.length
+ )
+ if (component.d !== deleted) {
+ throw new Error(
+ `Delete component '${component.d}' does not match deleted text '${deleted}'`
+ )
+ }
+ snapshot =
+ snapshot.slice(0, component.p) +
+ snapshot.slice(component.p + component.d.length)
+ } else if (component.c != null) {
+ const comment = snapshot.slice(
+ component.p,
+ component.p + component.c.length
+ )
+ if (component.c !== comment) {
+ throw new Error(
+ `Comment component '${component.c}' does not match commented text '${comment}'`
+ )
+ }
+ } else {
+ throw new Error('Unknown op type')
+ }
+ }
+ return snapshot
+}
+
+// Exported for use by the random op generator.
+//
+// For simplicity, this version of append does not compress adjacent inserts and deletes of
+// the same text. It would be nice to change that at some stage.
+text._append = append = function (newOp, c) {
+ if (c.i === '' || c.d === '') {
+ return
+ }
+ if (newOp.length === 0) {
+ return newOp.push(c)
+ } else {
+ const last = newOp[newOp.length - 1]
+
+ // Compose the insert into the previous insert if possible
+ if (
+ last.i != null &&
+ c.i != null &&
+ last.p <= c.p &&
+ c.p <= last.p + last.i.length
+ ) {
+ return (newOp[newOp.length - 1] = {
+ i: strInject(last.i, c.p - last.p, c.i),
+ p: last.p,
+ })
+ } else if (
+ last.d != null &&
+ c.d != null &&
+ c.p <= last.p &&
+ last.p <= c.p + c.d.length
+ ) {
+ return (newOp[newOp.length - 1] = {
+ d: strInject(c.d, last.p - c.p, last.d),
+ p: c.p,
+ })
+ } else {
+ return newOp.push(c)
+ }
+ }
+}
+
+text.compose = function (op1, op2) {
+ checkValidOp(op1)
+ checkValidOp(op2)
+
+ const newOp = op1.slice()
+ for (const c of Array.from(op2)) {
+ append(newOp, c)
+ }
+
+ return newOp
+}
+
+// Attempt to compress the op components together 'as much as possible'.
+// This implementation preserves order and preserves create/delete pairs.
+text.compress = op => text.compose([], op)
+
+text.normalize = function (op) {
+ const newOp = []
+
+ // Normalize should allow ops which are a single (unwrapped) component:
+ // {i:'asdf', p:23}.
+ // There's no good way to test if something is an array:
+ // http://perfectionkills.com/instanceof-considered-harmful-or-how-to-write-a-robust-isarray/
+ // so this is probably the least bad solution.
+ if (op.i != null || op.p != null) {
+ op = [op]
+ }
+
+ for (const c of Array.from(op)) {
+ if (c.p == null) {
+ c.p = 0
+ }
+ append(newOp, c)
+ }
+
+ return newOp
+}
+
+// This helper method transforms a position by an op component.
+//
+// If c is an insert, insertAfter specifies whether the transform
+// is pushed after the insert (true) or before it (false).
+//
+// insertAfter is optional for deletes.
+const transformPosition = function (pos, c, insertAfter) {
+ if (c.i != null) {
+ if (c.p < pos || (c.p === pos && insertAfter)) {
+ return pos + c.i.length
+ } else {
+ return pos
+ }
+ } else if (c.d != null) {
+ // I think this could also be written as: Math.min(c.p, Math.min(c.p - otherC.p, otherC.d.length))
+ // but I think its harder to read that way, and it compiles using ternary operators anyway
+ // so its no slower written like this.
+ if (pos <= c.p) {
+ return pos
+ } else if (pos <= c.p + c.d.length) {
+ return c.p
+ } else {
+ return pos - c.d.length
+ }
+ } else if (c.c != null) {
+ return pos
+ } else {
+ throw new Error('unknown op type')
+ }
+}
+
+// Helper method to transform a cursor position as a result of an op.
+//
+// Like transformPosition above, if c is an insert, insertAfter specifies whether the cursor position
+// is pushed after an insert (true) or before it (false).
+text.transformCursor = function (position, op, side) {
+ const insertAfter = side === 'right'
+ for (const c of Array.from(op)) {
+ position = transformPosition(position, c, insertAfter)
+ }
+ return position
+}
+
+// Transform an op component by another op component. Asymmetric.
+// The result will be appended to destination.
+//
+// exported for use in JSON type
+text._tc = transformComponent = function (dest, c, otherC, side) {
+ let cIntersect, intersectEnd, intersectStart, newC, otherIntersect
+ checkValidOp([c])
+ checkValidOp([otherC])
+
+ if (c.i != null) {
+ append(dest, {
+ i: c.i,
+ p: transformPosition(c.p, otherC, side === 'right'),
+ })
+ } else if (c.d != null) {
+ // Delete
+ if (otherC.i != null) {
+ // delete vs insert
+ let s = c.d
+ if (c.p < otherC.p) {
+ append(dest, { d: s.slice(0, otherC.p - c.p), p: c.p })
+ s = s.slice(otherC.p - c.p)
+ }
+ if (s !== '') {
+ append(dest, { d: s, p: c.p + otherC.i.length })
+ }
+ } else if (otherC.d != null) {
+ // Delete vs delete
+ if (c.p >= otherC.p + otherC.d.length) {
+ append(dest, { d: c.d, p: c.p - otherC.d.length })
+ } else if (c.p + c.d.length <= otherC.p) {
+ append(dest, c)
+ } else {
+ // They overlap somewhere.
+ newC = { d: '', p: c.p }
+ if (c.p < otherC.p) {
+ newC.d = c.d.slice(0, otherC.p - c.p)
+ }
+ if (c.p + c.d.length > otherC.p + otherC.d.length) {
+ newC.d += c.d.slice(otherC.p + otherC.d.length - c.p)
+ }
+
+ // This is entirely optional - just for a check that the deleted
+ // text in the two ops matches
+ intersectStart = Math.max(c.p, otherC.p)
+ intersectEnd = Math.min(c.p + c.d.length, otherC.p + otherC.d.length)
+ cIntersect = c.d.slice(intersectStart - c.p, intersectEnd - c.p)
+ otherIntersect = otherC.d.slice(
+ intersectStart - otherC.p,
+ intersectEnd - otherC.p
+ )
+ if (cIntersect !== otherIntersect) {
+ throw new Error(
+ 'Delete ops delete different text in the same region of the document'
+ )
+ }
+
+ if (newC.d !== '') {
+ // This could be rewritten similarly to insert v delete, above.
+ newC.p = transformPosition(newC.p, otherC)
+ append(dest, newC)
+ }
+ }
+ } else if (otherC.c != null) {
+ append(dest, c)
+ } else {
+ throw new Error('unknown op type')
+ }
+ } else if (c.c != null) {
+ // Comment
+ if (otherC.i != null) {
+ if (c.p < otherC.p && otherC.p < c.p + c.c.length) {
+ const offset = otherC.p - c.p
+ const new_c =
+ c.c.slice(0, +(offset - 1) + 1 || undefined) +
+ otherC.i +
+ c.c.slice(offset)
+ append(dest, { c: new_c, p: c.p, t: c.t })
+ } else {
+ append(dest, {
+ c: c.c,
+ p: transformPosition(c.p, otherC, true),
+ t: c.t,
+ })
+ }
+ } else if (otherC.d != null) {
+ if (c.p >= otherC.p + otherC.d.length) {
+ append(dest, { c: c.c, p: c.p - otherC.d.length, t: c.t })
+ } else if (c.p + c.c.length <= otherC.p) {
+ append(dest, c)
+ } else {
+ // Delete overlaps comment
+ // They overlap somewhere.
+ newC = { c: '', p: c.p, t: c.t }
+ if (c.p < otherC.p) {
+ newC.c = c.c.slice(0, otherC.p - c.p)
+ }
+ if (c.p + c.c.length > otherC.p + otherC.d.length) {
+ newC.c += c.c.slice(otherC.p + otherC.d.length - c.p)
+ }
+
+ // This is entirely optional - just for a check that the deleted
+ // text in the two ops matches
+ intersectStart = Math.max(c.p, otherC.p)
+ intersectEnd = Math.min(c.p + c.c.length, otherC.p + otherC.d.length)
+ cIntersect = c.c.slice(intersectStart - c.p, intersectEnd - c.p)
+ otherIntersect = otherC.d.slice(
+ intersectStart - otherC.p,
+ intersectEnd - otherC.p
+ )
+ if (cIntersect !== otherIntersect) {
+ throw new Error(
+ 'Delete ops delete different text in the same region of the document'
+ )
+ }
+
+ newC.p = transformPosition(newC.p, otherC)
+ append(dest, newC)
+ }
+ } else if (otherC.c != null) {
+ append(dest, c)
+ } else {
+ throw new Error('unknown op type')
+ }
+ }
+
+ return dest
+}
+
+const invertComponent = function (c) {
+ if (c.i != null) {
+ return { d: c.i, p: c.p }
+ } else {
+ return { i: c.d, p: c.p }
+ }
+}
+
+// No need to use append for invert, because the components won't be able to
+// cancel with one another.
+text.invert = op =>
+ Array.from(op.slice().reverse()).map(c => invertComponent(c))
+
+if (typeof WEB !== 'undefined' && WEB !== null) {
+ if (!exports.types) {
+ exports.types = {}
+ }
+
+ // This is kind of awful - come up with a better way to hook this helper code up.
+ bootstrapTransform(text, transformComponent, checkValidOp, append)
+
+ // [] is used to prevent closure from renaming types.text
+ exports.types.text = text
+} else {
+ module.exports = text
+
+ // The text type really shouldn't need this - it should be possible to define
+ // an efficient transform function by making a sort of transform map and passing each
+ // op component through it.
+ require('./helpers').bootstrapTransform(
+ text,
+ transformComponent,
+ checkValidOp,
+ append
+ )
+}
diff --git a/services/document-updater/app/js/sharejs/types/web-prelude.js b/services/document-updater/app/js/sharejs/types/web-prelude.js
new file mode 100644
index 0000000000..a4c3a0f22e
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/types/web-prelude.js
@@ -0,0 +1,14 @@
+/* eslint-disable
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+// This is included at the top of each compiled type file for the web.
+
+/**
+ @const
+ @type {boolean}
+*/
+const WEB = true
+
+const exports = window.sharejs
diff --git a/services/document-updater/app/js/sharejs/web-prelude.js b/services/document-updater/app/js/sharejs/web-prelude.js
new file mode 100644
index 0000000000..a4c3a0f22e
--- /dev/null
+++ b/services/document-updater/app/js/sharejs/web-prelude.js
@@ -0,0 +1,14 @@
+/* eslint-disable
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+// This is included at the top of each compiled type file for the web.
+
+/**
+ @const
+ @type {boolean}
+*/
+const WEB = true
+
+const exports = window.sharejs
diff --git a/services/document-updater/benchmarks/multi_vs_mget_mset.rb b/services/document-updater/benchmarks/multi_vs_mget_mset.rb
new file mode 100644
index 0000000000..ea953cda14
--- /dev/null
+++ b/services/document-updater/benchmarks/multi_vs_mget_mset.rb
@@ -0,0 +1,188 @@
+require "benchmark"
+require "redis"
+
+N = (ARGV.first || 1).to_i
+DOC_ID = (ARGV.last || "606072b20bb4d3109fb5b122")
+
+@r = Redis.new
+
+
+def get
+ @r.get("doclines:{#{DOC_ID}}")
+ @r.get("DocVersion:{#{DOC_ID}}")
+ @r.get("DocHash:{#{DOC_ID}}")
+ @r.get("ProjectId:{#{DOC_ID}}")
+ @r.get("Ranges:{#{DOC_ID}}")
+ @r.get("Pathname:{#{DOC_ID}}")
+ @r.get("ProjectHistoryId:{#{DOC_ID}}")
+ @r.get("UnflushedTime:{#{DOC_ID}}")
+ @r.get("lastUpdatedAt:{#{DOC_ID}}")
+ @r.get("lastUpdatedBy:{#{DOC_ID}}")
+end
+
+def mget
+ @r.mget(
+ "doclines:{#{DOC_ID}}",
+ "DocVersion:{#{DOC_ID}}",
+ "DocHash:{#{DOC_ID}}",
+ "ProjectId:{#{DOC_ID}}",
+ "Ranges:{#{DOC_ID}}",
+ "Pathname:{#{DOC_ID}}",
+ "ProjectHistoryId:{#{DOC_ID}}",
+ "UnflushedTime:{#{DOC_ID}}",
+ "lastUpdatedAt:{#{DOC_ID}}",
+ "lastUpdatedBy:{#{DOC_ID}}",
+ )
+end
+
+def set
+ @r.set("doclines:{#{DOC_ID}}", "[\"@book{adams1995hitchhiker,\",\" title={The Hitchhiker's Guide to the Galaxy},\",\" author={Adams, D.},\",\" isbn={9781417642595},\",\" url={http://books.google.com/books?id=W-xMPgAACAAJ},\",\" year={1995},\",\" publisher={San Val}\",\"}\",\"\"]")
+ @r.set("DocVersion:{#{DOC_ID}}", "0")
+ @r.set("DocHash:{#{DOC_ID}}", "0075bb0629c6c13d0d68918443648bbfe7d98869")
+ @r.set("ProjectId:{#{DOC_ID}}", "606072b20bb4d3109fb5b11e")
+ @r.set("Ranges:{#{DOC_ID}}", "")
+ @r.set("Pathname:{#{DOC_ID}}", "/references.bib")
+ @r.set("ProjectHistoryId:{#{DOC_ID}}", "")
+ @r.set("UnflushedTime:{#{DOC_ID}}", "")
+ @r.set("lastUpdatedAt:{#{DOC_ID}}", "")
+ @r.set("lastUpdatedBy:{#{DOC_ID}}", "")
+end
+
+def mset
+ @r.mset(
+ "doclines:{#{DOC_ID}}", "[\"@book{adams1995hitchhiker,\",\" title={The Hitchhiker's Guide to the Galaxy},\",\" author={Adams, D.},\",\" isbn={9781417642595},\",\" url={http://books.google.com/books?id=W-xMPgAACAAJ},\",\" year={1995},\",\" publisher={San Val}\",\"}\",\"\"]",
+ "DocVersion:{#{DOC_ID}}", "0",
+ "DocHash:{#{DOC_ID}}", "0075bb0629c6c13d0d68918443648bbfe7d98869",
+ "ProjectId:{#{DOC_ID}}", "606072b20bb4d3109fb5b11e",
+ "Ranges:{#{DOC_ID}}", "",
+ "Pathname:{#{DOC_ID}}", "/references.bib",
+ "ProjectHistoryId:{#{DOC_ID}}", "",
+ "UnflushedTime:{#{DOC_ID}}", "",
+ "lastUpdatedAt:{#{DOC_ID}}", "",
+ "lastUpdatedBy:{#{DOC_ID}}", "",
+ )
+end
+
+
+def benchmark_multi_get(benchmark, i)
+ benchmark.report("#{i}: multi get") do
+ N.times do
+ @r.multi do
+ get
+ end
+ end
+ end
+end
+
+def benchmark_mget(benchmark, i)
+ benchmark.report("#{i}: mget") do
+ N.times do
+ mget
+ end
+ end
+end
+
+def benchmark_multi_set(benchmark, i)
+ benchmark.report("#{i}: multi set") do
+ N.times do
+ @r.multi do
+ set
+ end
+ end
+ end
+end
+
+def benchmark_mset(benchmark, i)
+ benchmark.report("#{i}: mset") do
+ N.times do
+ mset
+ end
+ end
+end
+
+
+# init
+set
+
+Benchmark.bmbm do |benchmark|
+ 3.times do |i|
+ benchmark_multi_get(benchmark, i)
+ benchmark_mget(benchmark, i)
+ benchmark_multi_set(benchmark, i)
+ benchmark_mset(benchmark, i)
+ end
+end
+
+
+
+=begin
+# Results
+
+I could not max out the redis-server process with this benchmark.
+The ruby process hit 100% of a modern i7 CPU thread and the redis-server process
+ barely hit 50% of a CPU thread.
+
+Based on the timings below, mget is about 3 times faster and mset about 4 times
+ faster than multiple get/set commands in a multi.
+=end
+
+=begin
+$ redis-server --version
+Redis server v=5.0.7 sha=00000000:0 malloc=jemalloc-5.2.1 bits=64 build=636cde3b5c7a3923
+$ ruby multi_vs_mget_mset.rb 100000
+Rehearsal ------------------------------------------------
+0: multi get 12.132423 4.246689 16.379112 ( 16.420069)
+0: mget 4.499457 0.947556 5.447013 ( 6.274883)
+0: multi set 12.685936 4.495241 17.181177 ( 17.225984)
+0: mset 2.543401 0.913448 3.456849 ( 4.554799)
+1: multi get 13.397207 4.581881 17.979088 ( 18.027755)
+1: mget 4.551287 1.160531 5.711818 ( 6.579168)
+1: multi set 13.018957 4.927175 17.946132 ( 17.987502)
+1: mset 2.561096 1.048416 3.609512 ( 4.780087)
+2: multi get 13.224422 5.014475 18.238897 ( 18.284152)
+2: mget 4.664434 1.051083 5.715517 ( 6.592088)
+2: multi set 12.972284 4.600422 17.572706 ( 17.613185)
+2: mset 2.621344 0.984123 3.605467 ( 4.766855)
+------------------------------------- total: 132.843288sec
+
+ user system total real
+0: multi get 13.341552 4.900892 18.242444 ( 18.289912)
+0: mget 5.056534 0.960954 6.017488 ( 6.971189)
+0: multi set 12.989880 4.823793 17.813673 ( 17.858393)
+0: mset 2.543434 1.025352 3.568786 ( 4.723040)
+1: multi get 13.059379 4.674345 17.733724 ( 17.777859)
+1: mget 4.698754 0.915637 5.614391 ( 6.489614)
+1: multi set 12.608293 4.729163 17.337456 ( 17.372993)
+1: mset 2.645290 0.940584 3.585874 ( 4.744134)
+2: multi get 13.678224 4.732373 18.410597 ( 18.457525)
+2: mget 4.716749 1.072064 5.788813 ( 6.697683)
+2: multi set 13.058710 4.889801 17.948511 ( 17.988742)
+2: mset 2.311854 0.989166 3.301020 ( 4.346467)
+=end
+
+=begin
+# multi get/set run at about O(65'000) operations per second
+$ redis-cli info | grep 'instantaneous_ops_per_sec'
+instantaneous_ops_per_sec:65557
+
+# mget runs at about O(15'000) operations per second
+$ redis-cli info | grep 'instantaneous_ops_per_sec'
+instantaneous_ops_per_sec:14580
+
+# mset runs at about O(20'000) operations per second
+$ redis-cli info | grep 'instantaneous_ops_per_sec'
+instantaneous_ops_per_sec:20792
+
+These numbers are pretty reasonable:
+multi: 100'000 * 12 ops / 18s = 66'666 ops/s
+mget : 100'000 * 1 ops / 7s = 14'285 ops/s
+mset : 100'000 * 1 ops / 5s = 20'000 ops/s
+
+
+
+Bonus: Running three benchmarks in parallel on different keys.
+multi get: O(125'000) ops/s and 80% CPU load of redis-server
+multi set: O(130'000) ops/s and 90% CPU load of redis-server
+mget : O( 30'000) ops/s and 70% CPU load of redis-server
+mset : O( 40'000) ops/s and 90% CPU load of redis-server
+=end
diff --git a/services/document-updater/buildscript.txt b/services/document-updater/buildscript.txt
new file mode 100644
index 0000000000..339a3c847f
--- /dev/null
+++ b/services/document-updater/buildscript.txt
@@ -0,0 +1,8 @@
+document-updater
+--dependencies=mongo,redis
+--docker-repos=gcr.io/overleaf-ops
+--env-add=
+--env-pass-through=
+--node-version=12.22.3
+--public-repo=True
+--script-version=3.11.0
diff --git a/services/document-updater/config/settings.defaults.js b/services/document-updater/config/settings.defaults.js
new file mode 100755
index 0000000000..d98eb9e265
--- /dev/null
+++ b/services/document-updater/config/settings.defaults.js
@@ -0,0 +1,195 @@
+module.exports = {
+ internal: {
+ documentupdater: {
+ host: process.env.LISTEN_ADDRESS || 'localhost',
+ port: 3003,
+ },
+ },
+
+ apis: {
+ web: {
+ url: `http://${
+ process.env.WEB_API_HOST || process.env.WEB_HOST || 'localhost'
+ }:${process.env.WEB_API_PORT || process.env.WEB_PORT || 3000}`,
+ user: process.env.WEB_API_USER || 'sharelatex',
+ pass: process.env.WEB_API_PASSWORD || 'password',
+ },
+ trackchanges: {
+ url: `http://${process.env.TRACK_CHANGES_HOST || 'localhost'}:3015`,
+ },
+ project_history: {
+ enabled: true,
+ url: `http://${process.env.PROJECT_HISTORY_HOST || 'localhost'}:3054`,
+ },
+ },
+
+ redis: {
+ pubsub: {
+ host:
+ process.env.PUBSUB_REDIS_HOST || process.env.REDIS_HOST || 'localhost',
+ port: process.env.PUBSUB_REDIS_PORT || process.env.REDIS_PORT || '6379',
+ password:
+ process.env.PUBSUB_REDIS_PASSWORD || process.env.REDIS_PASSWORD || '',
+ maxRetriesPerRequest: parseInt(
+ process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20'
+ ),
+ },
+
+ history: {
+ port: process.env.HISTORY_REDIS_PORT || process.env.REDIS_PORT || '6379',
+ host:
+ process.env.HISTORY_REDIS_HOST || process.env.REDIS_HOST || 'localhost',
+ password:
+ process.env.HISTORY_REDIS_PASSWORD || process.env.REDIS_PASSWORD || '',
+ maxRetriesPerRequest: parseInt(
+ process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20'
+ ),
+ key_schema: {
+ uncompressedHistoryOps({ doc_id: docId }) {
+ return `UncompressedHistoryOps:{${docId}}`
+ },
+ docsWithHistoryOps({ project_id: projectId }) {
+ return `DocsWithHistoryOps:{${projectId}}`
+ },
+ },
+ },
+
+ project_history: {
+ port:
+ process.env.NEW_HISTORY_REDIS_PORT || process.env.REDIS_PORT || '6379',
+ host:
+ process.env.NEW_HISTORY_REDIS_HOST ||
+ process.env.REDIS_HOST ||
+ 'localhost',
+ password:
+ process.env.NEW_HISTORY_REDIS_PASSWORD ||
+ process.env.REDIS_PASSWORD ||
+ '',
+ maxRetriesPerRequest: parseInt(
+ process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20'
+ ),
+ key_schema: {
+ projectHistoryOps({ project_id: projectId }) {
+ return `ProjectHistory:Ops:{${projectId}}`
+ },
+ projectHistoryFirstOpTimestamp({ project_id: projectId }) {
+ return `ProjectHistory:FirstOpTimestamp:{${projectId}}`
+ },
+ },
+ },
+
+ lock: {
+ port: process.env.LOCK_REDIS_PORT || process.env.REDIS_PORT || '6379',
+ host:
+ process.env.LOCK_REDIS_HOST || process.env.REDIS_HOST || 'localhost',
+ password:
+ process.env.LOCK_REDIS_PASSWORD || process.env.REDIS_PASSWORD || '',
+ maxRetriesPerRequest: parseInt(
+ process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20'
+ ),
+ key_schema: {
+ blockingKey({ doc_id: docId }) {
+ return `Blocking:{${docId}}`
+ },
+ },
+ },
+
+ documentupdater: {
+ port:
+ process.env.DOC_UPDATER_REDIS_PORT || process.env.REDIS_PORT || '6379',
+ host:
+ process.env.DOC_UPDATER_REDIS_HOST ||
+ process.env.REDIS_HOST ||
+ 'localhost',
+ password:
+ process.env.DOC_UPDATER_REDIS_PASSWORD ||
+ process.env.REDIS_PASSWORD ||
+ '',
+ maxRetriesPerRequest: parseInt(
+ process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20'
+ ),
+ key_schema: {
+ blockingKey({ doc_id: docId }) {
+ return `Blocking:{${docId}}`
+ },
+ docLines({ doc_id: docId }) {
+ return `doclines:{${docId}}`
+ },
+ docOps({ doc_id: docId }) {
+ return `DocOps:{${docId}}`
+ },
+ docVersion({ doc_id: docId }) {
+ return `DocVersion:{${docId}}`
+ },
+ docHash({ doc_id: docId }) {
+ return `DocHash:{${docId}}`
+ },
+ projectKey({ doc_id: docId }) {
+ return `ProjectId:{${docId}}`
+ },
+ docsInProject({ project_id: projectId }) {
+ return `DocsIn:{${projectId}}`
+ },
+ ranges({ doc_id: docId }) {
+ return `Ranges:{${docId}}`
+ },
+ unflushedTime({ doc_id: docId }) {
+ return `UnflushedTime:{${docId}}`
+ },
+ pathname({ doc_id: docId }) {
+ return `Pathname:{${docId}}`
+ },
+ projectHistoryId({ doc_id: docId }) {
+ return `ProjectHistoryId:{${docId}}`
+ },
+ projectHistoryType({ doc_id: docId }) {
+ return `ProjectHistoryType:{${docId}}`
+ },
+ projectState({ project_id: projectId }) {
+ return `ProjectState:{${projectId}}`
+ },
+ pendingUpdates({ doc_id: docId }) {
+ return `PendingUpdates:{${docId}}`
+ },
+ lastUpdatedBy({ doc_id: docId }) {
+ return `lastUpdatedBy:{${docId}}`
+ },
+ lastUpdatedAt({ doc_id: docId }) {
+ return `lastUpdatedAt:{${docId}}`
+ },
+ flushAndDeleteQueue() {
+ return 'DocUpdaterFlushAndDeleteQueue'
+ },
+ },
+ },
+ },
+
+ max_doc_length: 2 * 1024 * 1024, // 2mb
+ maxJsonRequestSize:
+ parseInt(process.env.MAX_JSON_REQUEST_SIZE, 10) || 8 * 1024 * 1024,
+
+ dispatcherCount: parseInt(process.env.DISPATCHER_COUNT || 10, 10),
+
+ mongo: {
+ options: {
+ useUnifiedTopology:
+ (process.env.MONGO_USE_UNIFIED_TOPOLOGY || 'true') === 'true',
+ },
+ url:
+ process.env.MONGO_CONNECTION_STRING ||
+ `mongodb://${process.env.MONGO_HOST || '127.0.0.1'}/sharelatex`,
+ },
+
+ sentry: {
+ dsn: process.env.SENTRY_DSN,
+ },
+
+ publishOnIndividualChannels:
+ process.env.PUBLISH_ON_INDIVIDUAL_CHANNELS || false,
+
+ continuousBackgroundFlush: process.env.CONTINUOUS_BACKGROUND_FLUSH || false,
+
+ smoothingOffset: process.env.SMOOTHING_OFFSET || 1000, // milliseconds
+
+ disableDoubleFlush: process.env.DISABLE_DOUBLE_FLUSH || false, // don't flush track-changes for projects using project-history
+}
diff --git a/services/document-updater/docker-compose.ci.yml b/services/document-updater/docker-compose.ci.yml
new file mode 100644
index 0000000000..1a25b3bf15
--- /dev/null
+++ b/services/document-updater/docker-compose.ci.yml
@@ -0,0 +1,57 @@
+# This file was auto-generated, do not edit it directly.
+# Instead run bin/update_build_scripts from
+# https://github.com/sharelatex/sharelatex-dev-environment
+
+version: "2.3"
+
+services:
+ test_unit:
+ image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
+ user: node
+ command: npm run test:unit:_run
+ environment:
+ NODE_ENV: test
+ NODE_OPTIONS: "--unhandled-rejections=strict"
+
+
+ test_acceptance:
+ build: .
+ image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
+ environment:
+ ELASTIC_SEARCH_DSN: es:9200
+ REDIS_HOST: redis
+ QUEUES_REDIS_HOST: redis
+ MONGO_HOST: mongo
+ POSTGRES_HOST: postgres
+ MOCHA_GREP: ${MOCHA_GREP}
+ NODE_ENV: test
+ NODE_OPTIONS: "--unhandled-rejections=strict"
+ depends_on:
+ mongo:
+ condition: service_healthy
+ redis:
+ condition: service_healthy
+ user: node
+ command: npm run test:acceptance:_run
+
+
+ tar:
+ build: .
+ image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
+ volumes:
+ - ./:/tmp/build/
+ command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
+ user: root
+ redis:
+ image: redis
+ healthcheck:
+ test: ping="$$(redis-cli ping)" && [ "$$ping" = 'PONG' ]
+ interval: 1s
+ retries: 20
+
+ mongo:
+ image: mongo:4.0
+ healthcheck:
+ test: "mongo --quiet localhost/test --eval 'quit(db.runCommand({ ping: 1 }).ok ? 0 : 1)'"
+ interval: 1s
+ retries: 20
diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml
new file mode 100644
index 0000000000..e3c720c9cd
--- /dev/null
+++ b/services/document-updater/docker-compose.yml
@@ -0,0 +1,56 @@
+# This file was auto-generated, do not edit it directly.
+# Instead run bin/update_build_scripts from
+# https://github.com/sharelatex/sharelatex-dev-environment
+
+version: "2.3"
+
+services:
+ test_unit:
+ image: node:12.22.3
+ volumes:
+ - .:/app
+ working_dir: /app
+ environment:
+ MOCHA_GREP: ${MOCHA_GREP}
+ NODE_ENV: test
+ NODE_OPTIONS: "--unhandled-rejections=strict"
+ command: npm run --silent test:unit
+ user: node
+
+ test_acceptance:
+ image: node:12.22.3
+ volumes:
+ - .:/app
+ working_dir: /app
+ environment:
+ ELASTIC_SEARCH_DSN: es:9200
+ REDIS_HOST: redis
+ QUEUES_REDIS_HOST: redis
+ MONGO_HOST: mongo
+ POSTGRES_HOST: postgres
+ MOCHA_GREP: ${MOCHA_GREP}
+ LOG_LEVEL: ERROR
+ NODE_ENV: test
+ NODE_OPTIONS: "--unhandled-rejections=strict"
+ user: node
+ depends_on:
+ mongo:
+ condition: service_healthy
+ redis:
+ condition: service_healthy
+ command: npm run --silent test:acceptance
+
+ redis:
+ image: redis
+ healthcheck:
+ test: ping=$$(redis-cli ping) && [ "$$ping" = 'PONG' ]
+ interval: 1s
+ retries: 20
+
+ mongo:
+ image: mongo:4.0
+ healthcheck:
+ test: "mongo --quiet localhost/test --eval 'quit(db.runCommand({ ping: 1 }).ok ? 0 : 1)'"
+ interval: 1s
+ retries: 20
+
diff --git a/services/document-updater/expire_docops.js b/services/document-updater/expire_docops.js
new file mode 100644
index 0000000000..c3b2b80706
--- /dev/null
+++ b/services/document-updater/expire_docops.js
@@ -0,0 +1,65 @@
+const Settings = require('@overleaf/settings')
+const rclient = require('@overleaf/redis-wrapper').createClient(
+ Settings.redis.documentupdater
+)
+let keys = Settings.redis.documentupdater.key_schema
+const async = require('async')
+const RedisManager = require('./app/js/RedisManager')
+
+const getKeysFromNode = function (node, pattern, callback) {
+ let cursor = 0 // redis iterator
+ const keySet = {} // use hash to avoid duplicate results
+ // scan over all keys looking for pattern
+ const doIteration = () =>
+ node.scan(cursor, 'MATCH', pattern, 'COUNT', 1000, function (error, reply) {
+ if (error) {
+ return callback(error)
+ }
+ ;[cursor, keys] = reply
+ console.log('SCAN', keys.length)
+ for (const key of keys) {
+ keySet[key] = true
+ }
+ if (cursor === '0') {
+ // note redis returns string result not numeric
+ return callback(null, Object.keys(keySet))
+ } else {
+ return doIteration()
+ }
+ })
+ return doIteration()
+}
+
+const getKeys = function (pattern, callback) {
+ const nodes = (typeof rclient.nodes === 'function'
+ ? rclient.nodes('master')
+ : undefined) || [rclient]
+ console.log('GOT NODES', nodes.length)
+ const doKeyLookupForNode = (node, cb) => getKeysFromNode(node, pattern, cb)
+ return async.concatSeries(nodes, doKeyLookupForNode, callback)
+}
+
+const expireDocOps = callback =>
+ // eslint-disable-next-line handle-callback-err
+ getKeys(keys.docOps({ doc_id: '*' }), (error, keys) =>
+ async.mapSeries(
+ keys,
+ function (key, cb) {
+ console.log(`EXPIRE ${key} ${RedisManager.DOC_OPS_TTL}`)
+ return rclient.expire(key, RedisManager.DOC_OPS_TTL, cb)
+ },
+ callback
+ )
+ )
+
+setTimeout(
+ () =>
+ // Give redis a chance to connect
+ expireDocOps(function (error) {
+ if (error) {
+ throw error
+ }
+ return process.exit()
+ }),
+ 1000
+)
diff --git a/services/document-updater/nodemon.json b/services/document-updater/nodemon.json
new file mode 100644
index 0000000000..e3e8817d90
--- /dev/null
+++ b/services/document-updater/nodemon.json
@@ -0,0 +1,17 @@
+{
+ "ignore": [
+ ".git",
+ "node_modules/"
+ ],
+ "verbose": true,
+ "legacyWatch": true,
+ "execMap": {
+ "js": "npm run start"
+ },
+ "watch": [
+ "app/js/",
+ "app.js",
+ "config/"
+ ],
+ "ext": "js"
+}
diff --git a/services/document-updater/package-lock.json b/services/document-updater/package-lock.json
new file mode 100644
index 0000000000..da92540020
--- /dev/null
+++ b/services/document-updater/package-lock.json
@@ -0,0 +1,5043 @@
+{
+ "name": "document-updater-sharelatex",
+ "version": "0.1.4",
+ "lockfileVersion": 1,
+ "requires": true,
+ "dependencies": {
+ "@babel/code-frame": {
+ "version": "7.12.11",
+ "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.11.tgz",
+ "integrity": "sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw==",
+ "dev": true,
+ "requires": {
+ "@babel/highlight": "^7.10.4"
+ }
+ },
+ "@babel/helper-validator-identifier": {
+ "version": "7.14.5",
+ "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.14.5.tgz",
+ "integrity": "sha512-5lsetuxCLilmVGyiLEfoHBRX8UCFD+1m2x3Rj97WrW3V7H3u4RWRXA4evMjImCsin2J2YT0QaVDGf+z8ondbAg==",
+ "dev": true
+ },
+ "@babel/highlight": {
+ "version": "7.14.5",
+ "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.14.5.tgz",
+ "integrity": "sha512-qf9u2WFWVV0MppaL877j2dBtQIDgmidgjGk5VIMw3OadXvYaXn66U1BFlH2t4+t3i+8PhedppRv+i40ABzd+gg==",
+ "dev": true,
+ "requires": {
+ "@babel/helper-validator-identifier": "^7.14.5",
+ "chalk": "^2.0.0",
+ "js-tokens": "^4.0.0"
+ }
+ },
+ "@eslint/eslintrc": {
+ "version": "0.4.2",
+ "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-0.4.2.tgz",
+ "integrity": "sha512-8nmGq/4ycLpIwzvhI4tNDmQztZ8sp+hI7cyG8i1nQDhkAbRzHpXPidRAHlNvCZQpJTKw5ItIpMw9RSToGF00mg==",
+ "dev": true,
+ "requires": {
+ "ajv": "^6.12.4",
+ "debug": "^4.1.1",
+ "espree": "^7.3.0",
+ "globals": "^13.9.0",
+ "ignore": "^4.0.6",
+ "import-fresh": "^3.2.1",
+ "js-yaml": "^3.13.1",
+ "minimatch": "^3.0.4",
+ "strip-json-comments": "^3.1.1"
+ },
+ "dependencies": {
+ "ajv": {
+ "version": "6.12.6",
+ "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
+ "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
+ "dev": true,
+ "requires": {
+ "fast-deep-equal": "^3.1.1",
+ "fast-json-stable-stringify": "^2.0.0",
+ "json-schema-traverse": "^0.4.1",
+ "uri-js": "^4.2.2"
+ }
+ },
+ "debug": {
+ "version": "4.3.2",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz",
+ "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==",
+ "dev": true,
+ "requires": {
+ "ms": "2.1.2"
+ }
+ },
+ "ms": {
+ "version": "2.1.2",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
+ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
+ "dev": true
+ }
+ }
+ },
+ "@google-cloud/common": {
+ "version": "3.6.0",
+ "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.6.0.tgz",
+ "integrity": "sha512-aHIFTqJZmeTNO9md8XxV+ywuvXF3xBm5WNmgWeeCK+XN5X+kGW0WEX94wGwj+/MdOnrVf4dL2RvSIt9J5yJG6Q==",
+ "requires": {
+ "@google-cloud/projectify": "^2.0.0",
+ "@google-cloud/promisify": "^2.0.0",
+ "arrify": "^2.0.1",
+ "duplexify": "^4.1.1",
+ "ent": "^2.2.0",
+ "extend": "^3.0.2",
+ "google-auth-library": "^7.0.2",
+ "retry-request": "^4.1.1",
+ "teeny-request": "^7.0.0"
+ }
+ },
+ "@google-cloud/debug-agent": {
+ "version": "5.1.3",
+ "resolved": "https://registry.npmjs.org/@google-cloud/debug-agent/-/debug-agent-5.1.3.tgz",
+ "integrity": "sha512-WbzeEz4MvPlM7DX2QBsPcWgF62u7LSQv/oMYPl0L+TddTebqjDKiVXwxpzWk61NIfcKiet3dyCbPIt3N5o8XPQ==",
+ "requires": {
+ "@google-cloud/common": "^3.0.0",
+ "acorn": "^8.0.0",
+ "coffeescript": "^2.0.0",
+ "console-log-level": "^1.4.0",
+ "extend": "^3.0.2",
+ "findit2": "^2.2.3",
+ "gcp-metadata": "^4.0.0",
+ "p-limit": "^3.0.1",
+ "semver": "^7.0.0",
+ "source-map": "^0.6.1",
+ "split": "^1.0.0"
+ },
+ "dependencies": {
+ "semver": {
+ "version": "7.3.5",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz",
+ "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==",
+ "requires": {
+ "lru-cache": "^6.0.0"
+ }
+ },
+ "uuid": {
+ "version": "8.3.2",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
+ "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg=="
+ },
+ "yallist": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
+ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
+ }
+ }
+ },
+ "@google-cloud/logging": {
+ "version": "9.2.0",
+ "resolved": "https://registry.npmjs.org/@google-cloud/logging/-/logging-9.2.0.tgz",
+ "integrity": "sha512-eQRDKPq9Pq0pbDeo2/OaVrPRX+TDqaxZ7JagDAJx20dqxVwBtaA1rBUcCEXCAFrMZ2cUYhj3sDVuzqNwSObF2Q==",
+ "requires": {
+ "@google-cloud/common": "^3.4.1",
+ "@google-cloud/paginator": "^3.0.0",
+ "@google-cloud/projectify": "^2.0.0",
+ "@google-cloud/promisify": "^2.0.0",
+ "@opencensus/propagation-stackdriver": "0.0.22",
+ "arrify": "^2.0.1",
+ "dot-prop": "^6.0.0",
+ "eventid": "^1.0.0",
+ "extend": "^3.0.2",
+ "gcp-metadata": "^4.0.0",
+ "google-auth-library": "^7.0.0",
+ "google-gax": "^2.9.2",
+ "on-finished": "^2.3.0",
+ "pumpify": "^2.0.1",
+ "snakecase-keys": "^3.1.2",
+ "stream-events": "^1.0.5",
+ "through2": "^4.0.0"
+ },
+ "dependencies": {
+ "through2": {
+ "version": "4.0.2",
+ "resolved": "https://registry.npmjs.org/through2/-/through2-4.0.2.tgz",
+ "integrity": "sha512-iOqSav00cVxEEICeD7TjLB1sueEL+81Wpzp2bY17uZjZN0pWZPuo4suZ/61VujxmqSGFfgOcNuTZ85QJwNZQpw==",
+ "requires": {
+ "readable-stream": "3"
+ }
+ }
+ }
+ },
+ "@google-cloud/logging-bunyan": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/@google-cloud/logging-bunyan/-/logging-bunyan-3.0.2.tgz",
+ "integrity": "sha512-7BmXGZLYsnDs5UT9qvb0/rA0i2BbD3AyKqwXl/hP0pDGboCg0GE8viVmwzmY8f/cUzRZHAxOgV0bTENeTd6KEA==",
+ "requires": {
+ "@google-cloud/logging": "^9.0.0",
+ "google-auth-library": "^7.0.0"
+ }
+ },
+ "@google-cloud/paginator": {
+ "version": "3.0.5",
+ "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-3.0.5.tgz",
+ "integrity": "sha512-N4Uk4BT1YuskfRhKXBs0n9Lg2YTROZc6IMpkO/8DIHODtm5s3xY8K5vVBo23v/2XulY3azwITQlYWgT4GdLsUw==",
+ "requires": {
+ "arrify": "^2.0.0",
+ "extend": "^3.0.2"
+ }
+ },
+ "@google-cloud/profiler": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/@google-cloud/profiler/-/profiler-4.1.1.tgz",
+ "integrity": "sha512-qk08aDxTaLnu+NoNEh5Jh+Fs5iR8lRLMr5Mb3YJDoZw72jHJI4f5N5F2JWt1xRc9D6da4gA6stBUJrbfbubvGQ==",
+ "requires": {
+ "@google-cloud/common": "^3.0.0",
+ "@types/console-log-level": "^1.4.0",
+ "@types/semver": "^7.0.0",
+ "console-log-level": "^1.4.0",
+ "delay": "^5.0.0",
+ "extend": "^3.0.2",
+ "gcp-metadata": "^4.0.0",
+ "parse-duration": "^1.0.0",
+ "pprof": "3.0.0",
+ "pretty-ms": "^7.0.0",
+ "protobufjs": "~6.10.0",
+ "semver": "^7.0.0",
+ "teeny-request": "^7.0.0"
+ },
+ "dependencies": {
+ "@types/node": {
+ "version": "13.13.48",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.48.tgz",
+ "integrity": "sha512-z8wvSsgWQzkr4sVuMEEOvwMdOQjiRY2Y/ZW4fDfjfe3+TfQrZqFKOthBgk2RnVEmtOKrkwdZ7uTvsxTBLjKGDQ=="
+ },
+ "semver": {
+ "version": "7.3.5",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz",
+ "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==",
+ "requires": {
+ "lru-cache": "^6.0.0"
+ }
+ },
+ "uuid": {
+ "version": "8.3.2",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
+ "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg=="
+ },
+ "yallist": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
+ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
+ }
+ }
+ },
+ "@google-cloud/projectify": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz",
+ "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ=="
+ },
+ "@google-cloud/promisify": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.3.tgz",
+ "integrity": "sha512-d4VSA86eL/AFTe5xtyZX+ePUjE8dIFu2T8zmdeNBSa5/kNgXPCx/o/wbFNHAGLJdGnk1vddRuMESD9HbOC8irw=="
+ },
+ "@google-cloud/trace-agent": {
+ "version": "5.1.3",
+ "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-5.1.3.tgz",
+ "integrity": "sha512-f+5DX7n6QpDlHA+4kr81z69SLAdrlvd9T8skqCMgnYvtXx14AwzXZyzEDf3jppOYzYoqPPJv8XYiyYHHmYD0BA==",
+ "requires": {
+ "@google-cloud/common": "^3.0.0",
+ "@opencensus/propagation-stackdriver": "0.0.22",
+ "builtin-modules": "^3.0.0",
+ "console-log-level": "^1.4.0",
+ "continuation-local-storage": "^3.2.1",
+ "extend": "^3.0.2",
+ "gcp-metadata": "^4.0.0",
+ "google-auth-library": "^7.0.0",
+ "hex2dec": "^1.0.1",
+ "is": "^3.2.0",
+ "methods": "^1.1.1",
+ "require-in-the-middle": "^5.0.0",
+ "semver": "^7.0.0",
+ "shimmer": "^1.2.0",
+ "source-map-support": "^0.5.16",
+ "uuid": "^8.0.0"
+ },
+ "dependencies": {
+ "semver": {
+ "version": "7.3.5",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz",
+ "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==",
+ "requires": {
+ "lru-cache": "^6.0.0"
+ }
+ },
+ "uuid": {
+ "version": "8.3.2",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
+ "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg=="
+ },
+ "yallist": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
+ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
+ }
+ }
+ },
+ "@grpc/grpc-js": {
+ "version": "1.2.12",
+ "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.2.12.tgz",
+ "integrity": "sha512-+gPCklP1eqIgrNPyzddYQdt9+GvZqPlLpIjIo+TveE+gbtp74VV1A2ju8ExeO8ma8f7MbpaGZx/KJPYVWL9eDw==",
+ "requires": {
+ "@types/node": ">=12.12.47",
+ "google-auth-library": "^6.1.1",
+ "semver": "^6.2.0"
+ },
+ "dependencies": {
+ "google-auth-library": {
+ "version": "6.1.6",
+ "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.1.6.tgz",
+ "integrity": "sha512-Q+ZjUEvLQj/lrVHF/IQwRo6p3s8Nc44Zk/DALsN+ac3T4HY/g/3rrufkgtl+nZ1TW7DNAw5cTChdVp4apUXVgQ==",
+ "requires": {
+ "arrify": "^2.0.0",
+ "base64-js": "^1.3.0",
+ "ecdsa-sig-formatter": "^1.0.11",
+ "fast-text-encoding": "^1.0.0",
+ "gaxios": "^4.0.0",
+ "gcp-metadata": "^4.2.0",
+ "gtoken": "^5.0.4",
+ "jws": "^4.0.0",
+ "lru-cache": "^6.0.0"
+ }
+ }
+ }
+ },
+ "@grpc/proto-loader": {
+ "version": "0.5.6",
+ "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.5.6.tgz",
+ "integrity": "sha512-DT14xgw3PSzPxwS13auTEwxhMMOoz33DPUKNtmYK/QYbBSpLXJy78FGGs5yVoxVobEqPm4iW9MOIoz0A3bLTRQ==",
+ "requires": {
+ "lodash.camelcase": "^4.3.0",
+ "protobufjs": "^6.8.6"
+ }
+ },
+ "@humanwhocodes/config-array": {
+ "version": "0.5.0",
+ "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.5.0.tgz",
+ "integrity": "sha512-FagtKFz74XrTl7y6HCzQpwDfXP0yhxe9lHLD1UZxjvZIcbyRz8zTFF/yYNfSfzU414eDwZ1SrO0Qvtyf+wFMQg==",
+ "dev": true,
+ "requires": {
+ "@humanwhocodes/object-schema": "^1.2.0",
+ "debug": "^4.1.1",
+ "minimatch": "^3.0.4"
+ },
+ "dependencies": {
+ "debug": {
+ "version": "4.3.2",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz",
+ "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==",
+ "dev": true,
+ "requires": {
+ "ms": "2.1.2"
+ }
+ },
+ "ms": {
+ "version": "2.1.2",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
+ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
+ "dev": true
+ }
+ }
+ },
+ "@humanwhocodes/object-schema": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.0.tgz",
+ "integrity": "sha512-wdppn25U8z/2yiaT6YGquE6X8sSv7hNMWSXYSSU1jGv/yd6XqjXgTDJ8KP4NgjTXfJ3GbRjeeb8RTV7a/VpM+w==",
+ "dev": true
+ },
+ "@opencensus/core": {
+ "version": "0.0.22",
+ "resolved": "https://registry.npmjs.org/@opencensus/core/-/core-0.0.22.tgz",
+ "integrity": "sha512-ErazJtivjceNoOZI1bG9giQ6cWS45J4i6iPUtlp7dLNu58OLs/v+CD0FsaPCh47XgPxAI12vbBE8Ec09ViwHNA==",
+ "requires": {
+ "continuation-local-storage": "^3.2.1",
+ "log-driver": "^1.2.7",
+ "semver": "^7.0.0",
+ "shimmer": "^1.2.0",
+ "uuid": "^8.0.0"
+ },
+ "dependencies": {
+ "semver": {
+ "version": "7.3.5",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz",
+ "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==",
+ "requires": {
+ "lru-cache": "^6.0.0"
+ }
+ },
+ "uuid": {
+ "version": "8.3.2",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
+ "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg=="
+ }
+ }
+ },
+ "@opencensus/propagation-stackdriver": {
+ "version": "0.0.22",
+ "resolved": "https://registry.npmjs.org/@opencensus/propagation-stackdriver/-/propagation-stackdriver-0.0.22.tgz",
+ "integrity": "sha512-eBvf/ihb1mN8Yz/ASkz8nHzuMKqygu77+VNnUeR0yEh3Nj+ykB8VVR6lK+NAFXo1Rd1cOsTmgvuXAZgDAGleQQ==",
+ "requires": {
+ "@opencensus/core": "^0.0.22",
+ "hex2dec": "^1.0.1",
+ "uuid": "^8.0.0"
+ },
+ "dependencies": {
+ "uuid": {
+ "version": "8.3.2",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
+ "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg=="
+ }
+ }
+ },
+ "@overleaf/metrics": {
+ "version": "3.5.1",
+ "resolved": "https://registry.npmjs.org/@overleaf/metrics/-/metrics-3.5.1.tgz",
+ "integrity": "sha512-RLHxkMF7Y3725L3QwXo9cIn2gGobsMYUGuxKxg7PVMrPTMsomHEMeG7StOxCO7ML1Z/BwB/9nsVYNrsRdAJtKg==",
+ "requires": {
+ "@google-cloud/debug-agent": "^5.1.2",
+ "@google-cloud/profiler": "^4.0.3",
+ "@google-cloud/trace-agent": "^5.1.1",
+ "compression": "^1.7.4",
+ "prom-client": "^11.1.3",
+ "underscore": "~1.6.0",
+ "yn": "^3.1.1"
+ }
+ },
+ "@overleaf/o-error": {
+ "version": "3.3.1",
+ "resolved": "https://registry.npmjs.org/@overleaf/o-error/-/o-error-3.3.1.tgz",
+ "integrity": "sha512-1FRBYZO0lbJ0U+FRGZVS8ou6RhEw3e2B86WW/NbtBw554g0h5iC8ESf+juIfPMU/WDf/JDIFbg3eB/LnP2RSow==",
+ "requires": {
+ "core-js": "^3.8.3"
+ }
+ },
+ "@overleaf/redis-wrapper": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/@overleaf/redis-wrapper/-/redis-wrapper-2.0.1.tgz",
+ "integrity": "sha512-1TwCbEKJFz2yUhmwy2hQzy04NBhnseT371X2AU2szkNJ8Ip1C1HwJt1UAK/7Nh+hY7kFfH7Qpk+bZUF9f/rUMQ==",
+ "requires": {
+ "ioredis": "~4.27.1"
+ }
+ },
+ "@overleaf/settings": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/@overleaf/settings/-/settings-2.1.1.tgz",
+ "integrity": "sha512-vcJwqCGFKmQxTP/syUqCeMaSRjHmBcQgKOACR9He2uJcErg2GZPa1go+nGvszMbkElM4HfRKm/MfxvqHhoN4TQ=="
+ },
+ "@protobufjs/aspromise": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz",
+ "integrity": "sha1-m4sMxmPWaafY9vXQiToU00jzD78="
+ },
+ "@protobufjs/base64": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz",
+ "integrity": "sha1-TIVzDlm5ofHzSQR9vyQpYDS7JzU="
+ },
+ "@protobufjs/codegen": {
+ "version": "2.0.4",
+ "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz",
+ "integrity": "sha1-fvN/DQEPsCitGtWXIuUG2SYoFcs="
+ },
+ "@protobufjs/eventemitter": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz",
+ "integrity": "sha1-NVy8mLr61ZePntCV85diHx0Ga3A="
+ },
+ "@protobufjs/fetch": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz",
+ "integrity": "sha1-upn7WYYUr2VwDBYZ/wbUVLDYTEU=",
+ "requires": {
+ "@protobufjs/aspromise": "^1.1.1",
+ "@protobufjs/inquire": "^1.1.0"
+ }
+ },
+ "@protobufjs/float": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz",
+ "integrity": "sha1-Xp4avctz/Ap8uLKR33jIy9l7h9E="
+ },
+ "@protobufjs/inquire": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz",
+ "integrity": "sha1-/yAOPnzyQp4tyvwRQIKOjMY48Ik="
+ },
+ "@protobufjs/path": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz",
+ "integrity": "sha1-bMKyDFya1q0NzP0hynZz2Nf79o0="
+ },
+ "@protobufjs/pool": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz",
+ "integrity": "sha1-Cf0V8tbTq/qbZbw2ZQbWrXhG/1Q="
+ },
+ "@protobufjs/utf8": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz",
+ "integrity": "sha1-p3c2C1s5oaLlEG+OhY8v0tBgxXA="
+ },
+ "@sinonjs/commons": {
+ "version": "1.7.2",
+ "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.7.2.tgz",
+ "integrity": "sha512-+DUO6pnp3udV/v2VfUWgaY5BIE1IfT7lLfeDzPVeMT1XKkaAp9LgSI9x5RtrFQoZ9Oi0PgXQQHPaoKu7dCjVxw==",
+ "dev": true,
+ "requires": {
+ "type-detect": "4.0.8"
+ }
+ },
+ "@sinonjs/fake-timers": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-6.0.1.tgz",
+ "integrity": "sha512-MZPUxrmFubI36XS1DI3qmI0YdN1gks62JtFZvxR67ljjSNCeK6U08Zx4msEWOXuofgqUt6zPHSi1H9fbjR/NRA==",
+ "dev": true,
+ "requires": {
+ "@sinonjs/commons": "^1.7.0"
+ }
+ },
+ "@sinonjs/formatio": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/@sinonjs/formatio/-/formatio-5.0.1.tgz",
+ "integrity": "sha512-KaiQ5pBf1MpS09MuA0kp6KBQt2JUOQycqVG1NZXvzeaXe5LGFqAKueIS0bw4w0P9r7KuBSVdUk5QjXsUdu2CxQ==",
+ "dev": true,
+ "requires": {
+ "@sinonjs/commons": "^1",
+ "@sinonjs/samsam": "^5.0.2"
+ }
+ },
+ "@sinonjs/samsam": {
+ "version": "5.0.3",
+ "resolved": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-5.0.3.tgz",
+ "integrity": "sha512-QucHkc2uMJ0pFGjJUDP3F9dq5dx8QIaqISl9QgwLOh6P9yv877uONPGXh/OH/0zmM3tW1JjuJltAZV2l7zU+uQ==",
+ "dev": true,
+ "requires": {
+ "@sinonjs/commons": "^1.6.0",
+ "lodash.get": "^4.4.2",
+ "type-detect": "^4.0.8"
+ }
+ },
+ "@sinonjs/text-encoding": {
+ "version": "0.7.1",
+ "resolved": "https://registry.npmjs.org/@sinonjs/text-encoding/-/text-encoding-0.7.1.tgz",
+ "integrity": "sha512-+iTbntw2IZPb/anVDbypzfQa+ay64MW0Zo8aJ8gZPWMMK6/OubMVb6lUPMagqjOPnmtauXnFCACVl3O7ogjeqQ==",
+ "dev": true
+ },
+ "@tootallnate/once": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz",
+ "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw=="
+ },
+ "@types/console-log-level": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/@types/console-log-level/-/console-log-level-1.4.0.tgz",
+ "integrity": "sha512-x+OscEQwcx5Biair4enH7ov9W+clcqUWaZRaxn5IkT4yNWWjRr2oiYDkY/x1uXSTVZOQ2xlbFQySaQGB+VdXGQ=="
+ },
+ "@types/long": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.1.tgz",
+ "integrity": "sha512-5tXH6Bx/kNGd3MgffdmP4dy2Z+G4eaXw0SE81Tq3BNadtnMR5/ySMzX4SLEzHJzSmPNn4HIdpQsBvXMUykr58w=="
+ },
+ "@types/node": {
+ "version": "14.14.37",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.37.tgz",
+ "integrity": "sha512-XYmBiy+ohOR4Lh5jE379fV2IU+6Jn4g5qASinhitfyO71b/sCo6MKsMLF5tc7Zf2CE8hViVQyYSobJNke8OvUw=="
+ },
+ "@types/semver": {
+ "version": "7.3.4",
+ "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.3.4.tgz",
+ "integrity": "sha512-+nVsLKlcUCeMzD2ufHEYuJ9a2ovstb6Dp52A5VsoKxDXgvE051XgHI/33I1EymwkRGQkwnA0LkhnUzituGs4EQ=="
+ },
+ "@ungap/promise-all-settled": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/@ungap/promise-all-settled/-/promise-all-settled-1.1.2.tgz",
+ "integrity": "sha512-sL/cEvJWAnClXw0wHk85/2L0G6Sj8UB0Ctc1TEMbKSsmpRosqhwj9gWgFRZSrBr2f9tiXISwNhCPmlfqUqyb9Q==",
+ "dev": true
+ },
+ "abbrev": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz",
+ "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q=="
+ },
+ "abort-controller": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz",
+ "integrity": "sha1-6vVNU7YrrkE46AnKIlyEOabvs5I=",
+ "requires": {
+ "event-target-shim": "^5.0.0"
+ }
+ },
+ "accepts": {
+ "version": "1.3.7",
+ "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.7.tgz",
+ "integrity": "sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA==",
+ "requires": {
+ "mime-types": "~2.1.24",
+ "negotiator": "0.6.2"
+ }
+ },
+ "acorn": {
+ "version": "8.1.0",
+ "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.1.0.tgz",
+ "integrity": "sha512-LWCF/Wn0nfHOmJ9rzQApGnxnvgfROzGilS8936rqN/lfcYkY9MYZzdMqN+2NJ4SlTc+m5HiSa+kNfDtI64dwUA=="
+ },
+ "acorn-jsx": {
+ "version": "5.3.2",
+ "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz",
+ "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==",
+ "dev": true
+ },
+ "agent-base": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.1.tgz",
+ "integrity": "sha512-01q25QQDwLSsyfhrKbn8yuur+JNw0H+0Y4JiGIKd3z9aYk/w/2kxD/Upc+t2ZBBSUNff50VjPsSW2YxM8QYKVg==",
+ "requires": {
+ "debug": "4"
+ },
+ "dependencies": {
+ "debug": {
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-4.2.0.tgz",
+ "integrity": "sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg==",
+ "requires": {
+ "ms": "2.1.2"
+ }
+ },
+ "ms": {
+ "version": "2.1.2",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
+ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
+ }
+ }
+ },
+ "ajv": {
+ "version": "6.11.0",
+ "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.11.0.tgz",
+ "integrity": "sha512-nCprB/0syFYy9fVYU1ox1l2KN8S9I+tziH8D4zdZuLT3N6RMlGSGt5FSTpAiHB/Whv8Qs1cWHma1aMKZyaHRKA==",
+ "requires": {
+ "fast-deep-equal": "^3.1.1",
+ "fast-json-stable-stringify": "^2.0.0",
+ "json-schema-traverse": "^0.4.1",
+ "uri-js": "^4.2.2"
+ }
+ },
+ "ansi-colors": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz",
+ "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==",
+ "dev": true
+ },
+ "ansi-regex": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz",
+ "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==",
+ "dev": true
+ },
+ "ansi-styles": {
+ "version": "3.2.1",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz",
+ "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==",
+ "dev": true,
+ "requires": {
+ "color-convert": "^1.9.0"
+ }
+ },
+ "anymatch": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.1.tgz",
+ "integrity": "sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==",
+ "dev": true,
+ "requires": {
+ "normalize-path": "^3.0.0",
+ "picomatch": "^2.0.4"
+ }
+ },
+ "aproba": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz",
+ "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw=="
+ },
+ "are-we-there-yet": {
+ "version": "1.1.5",
+ "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz",
+ "integrity": "sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w==",
+ "requires": {
+ "delegates": "^1.0.0",
+ "readable-stream": "^2.0.6"
+ },
+ "dependencies": {
+ "readable-stream": {
+ "version": "2.3.7",
+ "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz",
+ "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==",
+ "requires": {
+ "core-util-is": "~1.0.0",
+ "inherits": "~2.0.3",
+ "isarray": "~1.0.0",
+ "process-nextick-args": "~2.0.0",
+ "safe-buffer": "~5.1.1",
+ "string_decoder": "~1.1.1",
+ "util-deprecate": "~1.0.1"
+ }
+ }
+ }
+ },
+ "argparse": {
+ "version": "1.0.10",
+ "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
+ "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==",
+ "dev": true,
+ "requires": {
+ "sprintf-js": "~1.0.2"
+ }
+ },
+ "array-flatten": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
+ "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI="
+ },
+ "array-includes": {
+ "version": "3.1.3",
+ "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.3.tgz",
+ "integrity": "sha512-gcem1KlBU7c9rB+Rq8/3PPKsK2kjqeEBa3bD5kkQo4nYlOHQCJqIJFqBXDEfwaRuYTT4E+FxA9xez7Gf/e3Q7A==",
+ "dev": true,
+ "requires": {
+ "call-bind": "^1.0.2",
+ "define-properties": "^1.1.3",
+ "es-abstract": "^1.18.0-next.2",
+ "get-intrinsic": "^1.1.1",
+ "is-string": "^1.0.5"
+ }
+ },
+ "array.prototype.flat": {
+ "version": "1.2.4",
+ "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.2.4.tgz",
+ "integrity": "sha512-4470Xi3GAPAjZqFcljX2xzckv1qeKPizoNkiS0+O4IoPR2ZNpcjE0pkhdihlDouK+x6QOast26B4Q/O9DJnwSg==",
+ "dev": true,
+ "requires": {
+ "call-bind": "^1.0.0",
+ "define-properties": "^1.1.3",
+ "es-abstract": "^1.18.0-next.1"
+ },
+ "dependencies": {
+ "has-symbols": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz",
+ "integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw=="
+ },
+ "is-callable": {
+ "version": "1.2.3",
+ "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.3.tgz",
+ "integrity": "sha512-J1DcMe8UYTBSrKezuIUTUwjXsho29693unXM2YhJUTR2txK/eG47bvNa/wipPFmZFgr/N6f1GA66dv0mEyTIyQ=="
+ },
+ "is-string": {
+ "version": "1.0.6",
+ "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.6.tgz",
+ "integrity": "sha512-2gdzbKUuqtQ3lYNrUTQYoClPhm7oQu4UdpSZMp1/DGgkHBT8E2Z1l0yMdb6D4zNAxwDiMv8MdulKROJGNl0Q0w=="
+ }
+ }
+ },
+ "arrify": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz",
+ "integrity": "sha1-yWVekzHgq81YjSp8rX6ZVvZnAfo="
+ },
+ "asn1": {
+ "version": "0.2.4",
+ "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz",
+ "integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==",
+ "requires": {
+ "safer-buffer": "~2.1.0"
+ }
+ },
+ "assert-plus": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz",
+ "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw=="
+ },
+ "assertion-error": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz",
+ "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==",
+ "dev": true
+ },
+ "astral-regex": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz",
+ "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==",
+ "dev": true
+ },
+ "async": {
+ "version": "2.6.3",
+ "resolved": "https://registry.npmjs.org/async/-/async-2.6.3.tgz",
+ "integrity": "sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==",
+ "requires": {
+ "lodash": "^4.17.14"
+ }
+ },
+ "async-listener": {
+ "version": "0.6.10",
+ "resolved": "https://registry.npmjs.org/async-listener/-/async-listener-0.6.10.tgz",
+ "integrity": "sha1-p8l6vlcLpgLXgic8DeYKUePhfLw=",
+ "requires": {
+ "semver": "^5.3.0",
+ "shimmer": "^1.1.0"
+ },
+ "dependencies": {
+ "semver": {
+ "version": "5.7.1",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
+ "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ=="
+ }
+ }
+ },
+ "asynckit": {
+ "version": "0.4.0",
+ "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
+ "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k="
+ },
+ "aws-sign2": {
+ "version": "0.7.0",
+ "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz",
+ "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg="
+ },
+ "aws4": {
+ "version": "1.9.1",
+ "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.9.1.tgz",
+ "integrity": "sha512-wMHVg2EOHaMRxbzgFJ9gtjOOCrI80OHLG14rxi28XwOW8ux6IiEbRCGGGqCtdAIg4FQCbW20k9RsT4y3gJlFug=="
+ },
+ "balanced-match": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
+ "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c="
+ },
+ "base64-js": {
+ "version": "1.3.1",
+ "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.1.tgz",
+ "integrity": "sha512-mLQ4i2QO1ytvGWFWmcngKO//JXAQueZvwEKtjgQFM4jIK0kU+ytMfplL8j+n5mspOfjHwoAg+9yhb7BwAHm36g=="
+ },
+ "bcrypt-pbkdf": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz",
+ "integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==",
+ "requires": {
+ "tweetnacl": "^0.14.3"
+ }
+ },
+ "bignumber.js": {
+ "version": "9.0.1",
+ "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.1.tgz",
+ "integrity": "sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA=="
+ },
+ "binary-extensions": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz",
+ "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==",
+ "dev": true
+ },
+ "bindings": {
+ "version": "1.5.0",
+ "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz",
+ "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==",
+ "requires": {
+ "file-uri-to-path": "1.0.0"
+ }
+ },
+ "bintrees": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.1.tgz",
+ "integrity": "sha1-DmVcm5wkNeqraL9AJyJtK1WjRSQ="
+ },
+ "bl": {
+ "version": "2.2.1",
+ "resolved": "https://registry.npmjs.org/bl/-/bl-2.2.1.tgz",
+ "integrity": "sha512-6Pesp1w0DEX1N550i/uGV/TqucVL4AM/pgThFSN/Qq9si1/DF9aIHs1BxD8V/QU0HoeHO6cQRTAuYnLPKq1e4g==",
+ "requires": {
+ "readable-stream": "^2.3.5",
+ "safe-buffer": "^5.1.1"
+ },
+ "dependencies": {
+ "readable-stream": {
+ "version": "2.3.7",
+ "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz",
+ "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==",
+ "requires": {
+ "core-util-is": "~1.0.0",
+ "inherits": "~2.0.3",
+ "isarray": "~1.0.0",
+ "process-nextick-args": "~2.0.0",
+ "safe-buffer": "~5.1.1",
+ "string_decoder": "~1.1.1",
+ "util-deprecate": "~1.0.1"
+ }
+ }
+ }
+ },
+ "body-parser": {
+ "version": "1.19.0",
+ "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.19.0.tgz",
+ "integrity": "sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw==",
+ "requires": {
+ "bytes": "3.1.0",
+ "content-type": "~1.0.4",
+ "debug": "2.6.9",
+ "depd": "~1.1.2",
+ "http-errors": "1.7.2",
+ "iconv-lite": "0.4.24",
+ "on-finished": "~2.3.0",
+ "qs": "6.7.0",
+ "raw-body": "2.4.0",
+ "type-is": "~1.6.17"
+ }
+ },
+ "brace-expansion": {
+ "version": "1.1.11",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
+ "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
+ "requires": {
+ "balanced-match": "^1.0.0",
+ "concat-map": "0.0.1"
+ }
+ },
+ "braces": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
+ "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
+ "dev": true,
+ "requires": {
+ "fill-range": "^7.0.1"
+ }
+ },
+ "browser-stdout": {
+ "version": "1.3.1",
+ "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz",
+ "integrity": "sha1-uqVZ7hTO1zRSIputcyZGfGH6vWA=",
+ "dev": true
+ },
+ "bson": {
+ "version": "1.1.6",
+ "resolved": "https://registry.npmjs.org/bson/-/bson-1.1.6.tgz",
+ "integrity": "sha512-EvVNVeGo4tHxwi8L6bPj3y3itEvStdwvvlojVxxbyYfoaxJ6keLgrTuKdyfEAszFK+H3olzBuafE0yoh0D1gdg=="
+ },
+ "buffer-equal-constant-time": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
+ "integrity": "sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk="
+ },
+ "buffer-from": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz",
+ "integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A=="
+ },
+ "builtin-modules": {
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.2.0.tgz",
+ "integrity": "sha512-lGzLKcioL90C7wMczpkY0n/oART3MbBa8R9OFGE1rJxoVI86u4WAGfEk8Wjv10eKSyTHVGkSo3bvBylCEtk7LA=="
+ },
+ "bunyan": {
+ "version": "1.8.15",
+ "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.15.tgz",
+ "integrity": "sha512-0tECWShh6wUysgucJcBAoYegf3JJoZWibxdqhTm7OHPeT42qdjkZ29QCMcKwbgU1kiH+auSIasNRXMLWXafXig==",
+ "requires": {
+ "dtrace-provider": "~0.8",
+ "moment": "^2.19.3",
+ "mv": "~2",
+ "safe-json-stringify": "~1"
+ }
+ },
+ "bytes": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz",
+ "integrity": "sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg=="
+ },
+ "call-bind": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz",
+ "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==",
+ "dev": true,
+ "requires": {
+ "function-bind": "^1.1.1",
+ "get-intrinsic": "^1.0.2"
+ }
+ },
+ "callsites": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
+ "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==",
+ "dev": true
+ },
+ "camelcase": {
+ "version": "6.2.0",
+ "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.2.0.tgz",
+ "integrity": "sha512-c7wVvbw3f37nuobQNtgsgG9POC9qMbNuMQmTCqZv23b6MIz0fcYpBiOlv9gEN/hdLdnZTDQhg6e9Dq5M1vKvfg==",
+ "dev": true
+ },
+ "caseless": {
+ "version": "0.12.0",
+ "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz",
+ "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw="
+ },
+ "chai": {
+ "version": "4.3.4",
+ "resolved": "https://registry.npmjs.org/chai/-/chai-4.3.4.tgz",
+ "integrity": "sha512-yS5H68VYOCtN1cjfwumDSuzn/9c+yza4f3reKXlE5rUg7SFcCEy90gJvydNgOYtblyf4Zi6jIWRnXOgErta0KA==",
+ "dev": true,
+ "requires": {
+ "assertion-error": "^1.1.0",
+ "check-error": "^1.0.2",
+ "deep-eql": "^3.0.1",
+ "get-func-name": "^2.0.0",
+ "pathval": "^1.1.1",
+ "type-detect": "^4.0.5"
+ }
+ },
+ "chai-as-promised": {
+ "version": "7.1.1",
+ "resolved": "https://registry.npmjs.org/chai-as-promised/-/chai-as-promised-7.1.1.tgz",
+ "integrity": "sha512-azL6xMoi+uxu6z4rhWQ1jbdUhOMhis2PvscD/xjLqNMkv3BPPp2JyyuTHOrf9BOosGpNQ11v6BKv/g57RXbiaA==",
+ "dev": true,
+ "requires": {
+ "check-error": "^1.0.2"
+ }
+ },
+ "chalk": {
+ "version": "2.4.2",
+ "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz",
+ "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==",
+ "dev": true,
+ "requires": {
+ "ansi-styles": "^3.2.1",
+ "escape-string-regexp": "^1.0.5",
+ "supports-color": "^5.3.0"
+ }
+ },
+ "charenc": {
+ "version": "0.0.2",
+ "resolved": "https://registry.npmjs.org/charenc/-/charenc-0.0.2.tgz",
+ "integrity": "sha1-wKHS86cJLgN3S/qD8UwPxXkKhmc="
+ },
+ "check-error": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz",
+ "integrity": "sha1-V00xLt2Iu13YkS6Sht1sCu1KrII=",
+ "dev": true
+ },
+ "chokidar": {
+ "version": "3.5.1",
+ "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.1.tgz",
+ "integrity": "sha512-9+s+Od+W0VJJzawDma/gvBNQqkTiqYTWLuZoyAsivsI4AaWTCzHG06/TMjsf1cYe9Cb97UCEhjz7HvnPk2p/tw==",
+ "dev": true,
+ "requires": {
+ "anymatch": "~3.1.1",
+ "braces": "~3.0.2",
+ "glob-parent": "~5.1.0",
+ "is-binary-path": "~2.1.0",
+ "is-glob": "~4.0.1",
+ "normalize-path": "~3.0.0",
+ "readdirp": "~3.5.0"
+ }
+ },
+ "chownr": {
+ "version": "1.1.4",
+ "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz",
+ "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg=="
+ },
+ "cliui": {
+ "version": "7.0.4",
+ "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz",
+ "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==",
+ "dev": true,
+ "requires": {
+ "string-width": "^4.2.0",
+ "strip-ansi": "^6.0.0",
+ "wrap-ansi": "^7.0.0"
+ }
+ },
+ "cluster-key-slot": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.0.tgz",
+ "integrity": "sha512-2Nii8p3RwAPiFwsnZvukotvow2rIHM+yQ6ZcBXGHdniadkYGZYiGmkHJIbZPIV9nfv7m/U1IPMVVcAhoWFeklw=="
+ },
+ "code-point-at": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz",
+ "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c="
+ },
+ "coffeescript": {
+ "version": "2.5.1",
+ "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.5.1.tgz",
+ "integrity": "sha512-J2jRPX0eeFh5VKyVnoLrfVFgLZtnnmp96WQSLAS8OrLm2wtQLcnikYKe1gViJKDH7vucjuhHvBKKBP3rKcD1tQ=="
+ },
+ "color-convert": {
+ "version": "1.9.3",
+ "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz",
+ "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==",
+ "dev": true,
+ "requires": {
+ "color-name": "1.1.3"
+ }
+ },
+ "color-name": {
+ "version": "1.1.3",
+ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
+ "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=",
+ "dev": true
+ },
+ "combined-stream": {
+ "version": "1.0.8",
+ "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
+ "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
+ "requires": {
+ "delayed-stream": "~1.0.0"
+ }
+ },
+ "compressible": {
+ "version": "2.0.18",
+ "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz",
+ "integrity": "sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==",
+ "requires": {
+ "mime-db": ">= 1.43.0 < 2"
+ }
+ },
+ "compression": {
+ "version": "1.7.4",
+ "resolved": "https://registry.npmjs.org/compression/-/compression-1.7.4.tgz",
+ "integrity": "sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ==",
+ "requires": {
+ "accepts": "~1.3.5",
+ "bytes": "3.0.0",
+ "compressible": "~2.0.16",
+ "debug": "2.6.9",
+ "on-headers": "~1.0.2",
+ "safe-buffer": "5.1.2",
+ "vary": "~1.1.2"
+ },
+ "dependencies": {
+ "bytes": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz",
+ "integrity": "sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg="
+ }
+ }
+ },
+ "concat-map": {
+ "version": "0.0.1",
+ "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
+ "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s="
+ },
+ "console-control-strings": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz",
+ "integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4="
+ },
+ "console-log-level": {
+ "version": "1.4.1",
+ "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.1.tgz",
+ "integrity": "sha512-VZzbIORbP+PPcN/gg3DXClTLPLg5Slwd5fL2MIc+o1qZ4BXBvWyc6QxPk6T/Mkr6IVjRpoAGf32XxP3ZWMVRcQ=="
+ },
+ "content-disposition": {
+ "version": "0.5.3",
+ "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.3.tgz",
+ "integrity": "sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g==",
+ "requires": {
+ "safe-buffer": "5.1.2"
+ }
+ },
+ "content-type": {
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz",
+ "integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA=="
+ },
+ "continuation-local-storage": {
+ "version": "3.2.1",
+ "resolved": "https://registry.npmjs.org/continuation-local-storage/-/continuation-local-storage-3.2.1.tgz",
+ "integrity": "sha1-EfYT906RT+mzTJKtLSj+auHbf/s=",
+ "requires": {
+ "async-listener": "^0.6.0",
+ "emitter-listener": "^1.1.1"
+ }
+ },
+ "cookie": {
+ "version": "0.4.0",
+ "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.0.tgz",
+ "integrity": "sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg=="
+ },
+ "cookie-signature": {
+ "version": "1.0.6",
+ "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz",
+ "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw="
+ },
+ "core-js": {
+ "version": "3.11.1",
+ "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.11.1.tgz",
+ "integrity": "sha512-k93Isqg7e4txZWMGNYwevZL9MiogLk8pd1PtwrmFmi8IBq4GXqUaVW/a33Llt6amSI36uSjd0GWwc9pTT9ALlQ=="
+ },
+ "core-util-is": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz",
+ "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac="
+ },
+ "cross-spawn": {
+ "version": "7.0.3",
+ "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
+ "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==",
+ "dev": true,
+ "requires": {
+ "path-key": "^3.1.0",
+ "shebang-command": "^2.0.0",
+ "which": "^2.0.1"
+ }
+ },
+ "crypt": {
+ "version": "0.0.2",
+ "resolved": "https://registry.npmjs.org/crypt/-/crypt-0.0.2.tgz",
+ "integrity": "sha1-iNf/fsDfuG9xPch7u0LQRNPmxBs="
+ },
+ "d64": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/d64/-/d64-1.0.0.tgz",
+ "integrity": "sha1-QAKofoUMv8n52XBrYPymE6MzbpA="
+ },
+ "dashdash": {
+ "version": "1.14.1",
+ "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz",
+ "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=",
+ "requires": {
+ "assert-plus": "^1.0.0"
+ }
+ },
+ "debug": {
+ "version": "2.6.9",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
+ "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
+ "requires": {
+ "ms": "2.0.0"
+ }
+ },
+ "decamelize": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz",
+ "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==",
+ "dev": true
+ },
+ "deep-eql": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz",
+ "integrity": "sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw==",
+ "dev": true,
+ "requires": {
+ "type-detect": "^4.0.0"
+ }
+ },
+ "deep-extend": {
+ "version": "0.6.0",
+ "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz",
+ "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA=="
+ },
+ "deep-is": {
+ "version": "0.1.3",
+ "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz",
+ "integrity": "sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ=",
+ "dev": true
+ },
+ "define-properties": {
+ "version": "1.1.3",
+ "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz",
+ "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==",
+ "dev": true,
+ "requires": {
+ "object-keys": "^1.0.12"
+ }
+ },
+ "delay": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/delay/-/delay-5.0.0.tgz",
+ "integrity": "sha512-ReEBKkIfe4ya47wlPYf/gu5ib6yUG0/Aez0JQZQz94kiWtRQvZIQbTiehsnwHvLSWJnQdhVeqYue7Id1dKr0qw=="
+ },
+ "delayed-stream": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
+ "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="
+ },
+ "delegates": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz",
+ "integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o="
+ },
+ "denque": {
+ "version": "1.5.0",
+ "resolved": "https://registry.npmjs.org/denque/-/denque-1.5.0.tgz",
+ "integrity": "sha512-CYiCSgIF1p6EUByQPlGkKnP1M9g0ZV3qMIrqMqZqdwazygIA/YP2vrbcyl1h/WppKJTdl1F85cXIle+394iDAQ=="
+ },
+ "depd": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz",
+ "integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak="
+ },
+ "destroy": {
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz",
+ "integrity": "sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA="
+ },
+ "detect-libc": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz",
+ "integrity": "sha1-+hN8S9aY7fVc1c0CrFWfkaTEups="
+ },
+ "diff": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz",
+ "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==",
+ "dev": true
+ },
+ "diff-match-patch": {
+ "version": "https://github.com/overleaf/diff-match-patch/archive/89805f9c671a77a263fc53461acd62aa7498f688.tar.gz",
+ "integrity": "sha512-rX+9ry8tosctHzJfYG9Vjpof6wTYYA/oFHnzpv6O1vkUd+5dTc9LpZCTUv+FK8i4grpITxY8BYSk8A3u4anwJQ=="
+ },
+ "doctrine": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz",
+ "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==",
+ "dev": true,
+ "requires": {
+ "esutils": "^2.0.2"
+ }
+ },
+ "dot-prop": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-6.0.1.tgz",
+ "integrity": "sha512-tE7ztYzXHIeyvc7N+hR3oi7FIbf/NIjVP9hmAt3yMXzrQ072/fpjGLx2GxNxGxUl5V73MEqYzioOMoVhGMJ5cA==",
+ "requires": {
+ "is-obj": "^2.0.0"
+ }
+ },
+ "dtrace-provider": {
+ "version": "0.8.8",
+ "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.8.8.tgz",
+ "integrity": "sha512-b7Z7cNtHPhH9EJhNNbbeqTcXB8LGFFZhq1PGgEvpeHlzd36bhbdTWoE/Ba/YguqpBSlAPKnARWhVlhunCMwfxg==",
+ "optional": true,
+ "requires": {
+ "nan": "^2.14.0"
+ }
+ },
+ "duplexify": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz",
+ "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==",
+ "requires": {
+ "end-of-stream": "^1.4.1",
+ "inherits": "^2.0.3",
+ "readable-stream": "^3.1.1",
+ "stream-shift": "^1.0.0"
+ }
+ },
+ "ecc-jsbn": {
+ "version": "0.1.2",
+ "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz",
+ "integrity": "sha512-eh9O+hwRHNbG4BLTjEl3nw044CkGm5X6LoaCf7LPp7UU8Qrt47JYNi6nPX8xjW97TKGKm1ouctg0QSpZe9qrnw==",
+ "requires": {
+ "jsbn": "~0.1.0",
+ "safer-buffer": "^2.1.0"
+ }
+ },
+ "ecdsa-sig-formatter": {
+ "version": "1.0.11",
+ "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz",
+ "integrity": "sha1-rg8PothQRe8UqBfao86azQSJ5b8=",
+ "requires": {
+ "safe-buffer": "^5.0.1"
+ }
+ },
+ "ee-first": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz",
+ "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow=="
+ },
+ "emitter-listener": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/emitter-listener/-/emitter-listener-1.1.2.tgz",
+ "integrity": "sha1-VrFA6PaZI3Wz18ssqxzHQy2WMug=",
+ "requires": {
+ "shimmer": "^1.2.0"
+ }
+ },
+ "emoji-regex": {
+ "version": "8.0.0",
+ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
+ "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
+ "dev": true
+ },
+ "encodeurl": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz",
+ "integrity": "sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k="
+ },
+ "end-of-stream": {
+ "version": "1.4.4",
+ "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz",
+ "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==",
+ "requires": {
+ "once": "^1.4.0"
+ }
+ },
+ "enquirer": {
+ "version": "2.3.6",
+ "resolved": "https://registry.npmjs.org/enquirer/-/enquirer-2.3.6.tgz",
+ "integrity": "sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==",
+ "dev": true,
+ "requires": {
+ "ansi-colors": "^4.1.1"
+ }
+ },
+ "ent": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/ent/-/ent-2.2.0.tgz",
+ "integrity": "sha1-6WQhkyWiHQX0RGai9obtbOX13R0="
+ },
+ "error-ex": {
+ "version": "1.3.2",
+ "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz",
+ "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==",
+ "dev": true,
+ "requires": {
+ "is-arrayish": "^0.2.1"
+ }
+ },
+ "es-abstract": {
+ "version": "1.18.3",
+ "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.3.tgz",
+ "integrity": "sha512-nQIr12dxV7SSxE6r6f1l3DtAeEYdsGpps13dR0TwJg1S8gyp4ZPgy3FZcHBgbiQqnoqSTb+oC+kO4UQ0C/J8vw==",
+ "dev": true,
+ "requires": {
+ "call-bind": "^1.0.2",
+ "es-to-primitive": "^1.2.1",
+ "function-bind": "^1.1.1",
+ "get-intrinsic": "^1.1.1",
+ "has": "^1.0.3",
+ "is-negative-zero": "^2.0.1",
+ "is-regex": "^1.1.3",
+ "object-inspect": "^1.10.3",
+ "object-keys": "^1.1.1",
+ "object.assign": "^4.1.2",
+ "string.prototype.trimend": "^1.0.4",
+ "string.prototype.trimstart": "^1.0.4",
+ "unbox-primitive": "^1.0.1"
+ }
+ },
+ "es-to-primitive": {
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz",
+ "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==",
+ "dev": true,
+ "requires": {
+ "is-callable": "^1.1.4",
+ "is-date-object": "^1.0.1",
+ "is-symbol": "^1.0.2"
+ }
+ },
+ "escalade": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz",
+ "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==",
+ "dev": true
+ },
+ "escape-html": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz",
+ "integrity": "sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg="
+ },
+ "escape-string-regexp": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
+ "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=",
+ "dev": true
+ },
+ "eslint": {
+ "version": "7.30.0",
+ "resolved": "https://registry.npmjs.org/eslint/-/eslint-7.30.0.tgz",
+ "integrity": "sha512-VLqz80i3as3NdloY44BQSJpFw534L9Oh+6zJOUaViV4JPd+DaHwutqP7tcpkW3YiXbK6s05RZl7yl7cQn+lijg==",
+ "dev": true,
+ "requires": {
+ "@babel/code-frame": "7.12.11",
+ "@eslint/eslintrc": "^0.4.2",
+ "@humanwhocodes/config-array": "^0.5.0",
+ "ajv": "^6.10.0",
+ "chalk": "^4.0.0",
+ "cross-spawn": "^7.0.2",
+ "debug": "^4.0.1",
+ "doctrine": "^3.0.0",
+ "enquirer": "^2.3.5",
+ "escape-string-regexp": "^4.0.0",
+ "eslint-scope": "^5.1.1",
+ "eslint-utils": "^2.1.0",
+ "eslint-visitor-keys": "^2.0.0",
+ "espree": "^7.3.1",
+ "esquery": "^1.4.0",
+ "esutils": "^2.0.2",
+ "fast-deep-equal": "^3.1.3",
+ "file-entry-cache": "^6.0.1",
+ "functional-red-black-tree": "^1.0.1",
+ "glob-parent": "^5.1.2",
+ "globals": "^13.6.0",
+ "ignore": "^4.0.6",
+ "import-fresh": "^3.0.0",
+ "imurmurhash": "^0.1.4",
+ "is-glob": "^4.0.0",
+ "js-yaml": "^3.13.1",
+ "json-stable-stringify-without-jsonify": "^1.0.1",
+ "levn": "^0.4.1",
+ "lodash.merge": "^4.6.2",
+ "minimatch": "^3.0.4",
+ "natural-compare": "^1.4.0",
+ "optionator": "^0.9.1",
+ "progress": "^2.0.0",
+ "regexpp": "^3.1.0",
+ "semver": "^7.2.1",
+ "strip-ansi": "^6.0.0",
+ "strip-json-comments": "^3.1.0",
+ "table": "^6.0.9",
+ "text-table": "^0.2.0",
+ "v8-compile-cache": "^2.0.3"
+ },
+ "dependencies": {
+ "chalk": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.1.tgz",
+ "integrity": "sha512-diHzdDKxcU+bAsUboHLPEDQiw0qEe0qd7SYUn3HgcFlWgbDcfLGswOHYeGrHKzG9z6UYf01d9VFMfZxPM1xZSg==",
+ "dev": true,
+ "requires": {
+ "ansi-styles": "^4.1.0",
+ "supports-color": "^7.1.0"
+ },
+ "dependencies": {
+ "ansi-styles": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
+ "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
+ "dev": true,
+ "requires": {
+ "color-convert": "^2.0.1"
+ }
+ },
+ "supports-color": {
+ "version": "7.2.0",
+ "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
+ "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
+ "dev": true,
+ "requires": {
+ "has-flag": "^4.0.0"
+ }
+ }
+ }
+ },
+ "color-convert": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
+ "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
+ "dev": true,
+ "requires": {
+ "color-name": "~1.1.4"
+ }
+ },
+ "color-name": {
+ "version": "1.1.4",
+ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
+ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
+ "dev": true
+ },
+ "debug": {
+ "version": "4.3.2",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz",
+ "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==",
+ "dev": true,
+ "requires": {
+ "ms": "2.1.2"
+ }
+ },
+ "escape-string-regexp": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz",
+ "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==",
+ "dev": true
+ },
+ "eslint-visitor-keys": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz",
+ "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==",
+ "dev": true
+ },
+ "estraverse": {
+ "version": "5.2.0",
+ "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.2.0.tgz",
+ "integrity": "sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ=="
+ },
+ "fast-deep-equal": {
+ "version": "3.1.3",
+ "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
+ "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==",
+ "dev": true
+ },
+ "glob": {
+ "version": "7.1.7",
+ "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz",
+ "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==",
+ "requires": {
+ "fs.realpath": "^1.0.0",
+ "inflight": "^1.0.4",
+ "inherits": "2",
+ "minimatch": "^3.0.4",
+ "once": "^1.3.0",
+ "path-is-absolute": "^1.0.0"
+ }
+ },
+ "glob-parent": {
+ "version": "5.1.2",
+ "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
+ "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
+ "dev": true,
+ "requires": {
+ "is-glob": "^4.0.1"
+ }
+ },
+ "has-flag": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
+ "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
+ "dev": true
+ },
+ "is-fullwidth-code-point": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
+ "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="
+ },
+ "json-schema-traverse": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz",
+ "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug=="
+ },
+ "ms": {
+ "version": "2.1.2",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
+ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
+ "dev": true
+ },
+ "rimraf": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
+ "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
+ "requires": {
+ "glob": "^7.1.3"
+ }
+ },
+ "semver": {
+ "version": "7.3.5",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz",
+ "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==",
+ "dev": true,
+ "requires": {
+ "lru-cache": "^6.0.0"
+ }
+ }
+ }
+ },
+ "eslint-config-prettier": {
+ "version": "8.3.0",
+ "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.3.0.tgz",
+ "integrity": "sha512-BgZuLUSeKzvlL/VUjx/Yb787VQ26RU3gGjA3iiFvdsp/2bMfVIWUVP7tjxtjS0e+HP409cPlPvNkQloz8C91ew==",
+ "dev": true
+ },
+ "eslint-config-standard": {
+ "version": "16.0.3",
+ "resolved": "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-16.0.3.tgz",
+ "integrity": "sha512-x4fmJL5hGqNJKGHSjnLdgA6U6h1YW/G2dW9fA+cyVur4SK6lyue8+UgNKWlZtUDTXvgKDD/Oa3GQjmB5kjtVvg==",
+ "dev": true
+ },
+ "eslint-import-resolver-node": {
+ "version": "0.3.4",
+ "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.4.tgz",
+ "integrity": "sha512-ogtf+5AB/O+nM6DIeBUNr2fuT7ot9Qg/1harBfBtaP13ekEWFQEEMP94BCB7zaNW3gyY+8SHYF00rnqYwXKWOA==",
+ "dev": true,
+ "requires": {
+ "debug": "^2.6.9",
+ "resolve": "^1.13.1"
+ }
+ },
+ "eslint-module-utils": {
+ "version": "2.6.1",
+ "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.6.1.tgz",
+ "integrity": "sha512-ZXI9B8cxAJIH4nfkhTwcRTEAnrVfobYqwjWy/QMCZ8rHkZHFjf9yO4BzpiF9kCSfNlMG54eKigISHpX0+AaT4A==",
+ "dev": true,
+ "requires": {
+ "debug": "^3.2.7",
+ "pkg-dir": "^2.0.0"
+ },
+ "dependencies": {
+ "debug": {
+ "version": "3.2.7",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz",
+ "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==",
+ "dev": true,
+ "requires": {
+ "ms": "^2.1.1"
+ }
+ },
+ "ms": {
+ "version": "2.1.3",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
+ "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
+ "dev": true
+ }
+ }
+ },
+ "eslint-plugin-chai-expect": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/eslint-plugin-chai-expect/-/eslint-plugin-chai-expect-2.2.0.tgz",
+ "integrity": "sha512-ExTJKhgeYMfY8wDj3UiZmgpMKJOUHGNHmWMlxT49JUDB1vTnw0sSNfXJSxnX+LcebyBD/gudXzjzD136WqPJrQ==",
+ "dev": true
+ },
+ "eslint-plugin-chai-friendly": {
+ "version": "0.6.0",
+ "resolved": "https://registry.npmjs.org/eslint-plugin-chai-friendly/-/eslint-plugin-chai-friendly-0.6.0.tgz",
+ "integrity": "sha512-Uvvv1gkbRGp/qfN15B0kQyQWg+oFA8buDSqrwmW3egNSk/FpqH2MjQqKOuKwmEL6w4QIQrIjDp+gg6kGGmD3oQ==",
+ "dev": true
+ },
+ "eslint-plugin-es": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-3.0.0.tgz",
+ "integrity": "sha512-6/Jb/J/ZvSebydwbBJO1R9E5ky7YeElfK56Veh7e4QGFHCXoIXGH9HhVz+ibJLM3XJ1XjP+T7rKBLUa/Y7eIng==",
+ "dev": true,
+ "requires": {
+ "eslint-utils": "^2.0.0",
+ "regexpp": "^3.0.0"
+ }
+ },
+ "eslint-plugin-import": {
+ "version": "2.23.4",
+ "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.23.4.tgz",
+ "integrity": "sha512-6/wP8zZRsnQFiR3iaPFgh5ImVRM1WN5NUWfTIRqwOdeiGJlBcSk82o1FEVq8yXmy4lkIzTo7YhHCIxlU/2HyEQ==",
+ "dev": true,
+ "requires": {
+ "array-includes": "^3.1.3",
+ "array.prototype.flat": "^1.2.4",
+ "debug": "^2.6.9",
+ "doctrine": "^2.1.0",
+ "eslint-import-resolver-node": "^0.3.4",
+ "eslint-module-utils": "^2.6.1",
+ "find-up": "^2.0.0",
+ "has": "^1.0.3",
+ "is-core-module": "^2.4.0",
+ "minimatch": "^3.0.4",
+ "object.values": "^1.1.3",
+ "pkg-up": "^2.0.0",
+ "read-pkg-up": "^3.0.0",
+ "resolve": "^1.20.0",
+ "tsconfig-paths": "^3.9.0"
+ },
+ "dependencies": {
+ "doctrine": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz",
+ "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==",
+ "dev": true,
+ "requires": {
+ "esutils": "^2.0.2"
+ }
+ },
+ "has-symbols": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz",
+ "integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw=="
+ },
+ "is-callable": {
+ "version": "1.2.3",
+ "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.3.tgz",
+ "integrity": "sha512-J1DcMe8UYTBSrKezuIUTUwjXsho29693unXM2YhJUTR2txK/eG47bvNa/wipPFmZFgr/N6f1GA66dv0mEyTIyQ=="
+ },
+ "p-limit": {
+ "version": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz",
+ "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==",
+ "requires": {
+ "p-try": "^1.0.0"
+ }
+ },
+ "resolve": {
+ "version": "1.20.0",
+ "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz",
+ "integrity": "sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A==",
+ "dev": true,
+ "requires": {
+ "is-core-module": "^2.2.0",
+ "path-parse": "^1.0.6"
+ }
+ }
+ }
+ },
+ "eslint-plugin-mocha": {
+ "version": "8.2.0",
+ "resolved": "https://registry.npmjs.org/eslint-plugin-mocha/-/eslint-plugin-mocha-8.2.0.tgz",
+ "integrity": "sha512-8oOR47Ejt+YJPNQzedbiklDqS1zurEaNrxXpRs+Uk4DMDPVmKNagShFeUaYsfvWP55AhI+P1non5QZAHV6K78A==",
+ "dev": true,
+ "requires": {
+ "eslint-utils": "^2.1.0",
+ "ramda": "^0.27.1"
+ }
+ },
+ "eslint-plugin-node": {
+ "version": "11.1.0",
+ "resolved": "https://registry.npmjs.org/eslint-plugin-node/-/eslint-plugin-node-11.1.0.tgz",
+ "integrity": "sha512-oUwtPJ1W0SKD0Tr+wqu92c5xuCeQqB3hSCHasn/ZgjFdA9iDGNkNf2Zi9ztY7X+hNuMib23LNGRm6+uN+KLE3g==",
+ "dev": true,
+ "requires": {
+ "eslint-plugin-es": "^3.0.0",
+ "eslint-utils": "^2.0.0",
+ "ignore": "^5.1.1",
+ "minimatch": "^3.0.4",
+ "resolve": "^1.10.1",
+ "semver": "^6.1.0"
+ },
+ "dependencies": {
+ "ignore": {
+ "version": "5.1.4",
+ "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.1.4.tgz",
+ "integrity": "sha512-MzbUSahkTW1u7JpKKjY7LCARd1fU5W2rLdxlM4kdkayuCwZImjkpluF9CM1aLewYJguPDqewLam18Y6AU69A8A==",
+ "dev": true
+ }
+ }
+ },
+ "eslint-plugin-prettier": {
+ "version": "3.1.3",
+ "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-3.1.3.tgz",
+ "integrity": "sha512-+HG5jmu/dN3ZV3T6eCD7a4BlAySdN7mLIbJYo0z1cFQuI+r2DiTJEFeF68ots93PsnrMxbzIZ2S/ieX+mkrBeQ==",
+ "dev": true,
+ "requires": {
+ "prettier-linter-helpers": "^1.0.0"
+ }
+ },
+ "eslint-plugin-promise": {
+ "version": "4.2.1",
+ "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-4.2.1.tgz",
+ "integrity": "sha512-VoM09vT7bfA7D+upt+FjeBO5eHIJQBUWki1aPvB+vbNiHS3+oGIJGIeyBtKQTME6UPXXy3vV07OL1tHd3ANuDw==",
+ "dev": true
+ },
+ "eslint-scope": {
+ "version": "5.1.1",
+ "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz",
+ "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==",
+ "dev": true,
+ "requires": {
+ "esrecurse": "^4.3.0",
+ "estraverse": "^4.1.1"
+ }
+ },
+ "eslint-utils": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz",
+ "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==",
+ "dev": true,
+ "requires": {
+ "eslint-visitor-keys": "^1.1.0"
+ }
+ },
+ "eslint-visitor-keys": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.1.0.tgz",
+ "integrity": "sha512-8y9YjtM1JBJU/A9Kc+SbaOV4y29sSWckBwMHa+FGtVj5gN/sbnKDf6xJUl+8g7FAij9LVaP8C24DUiH/f/2Z9A==",
+ "dev": true
+ },
+ "espree": {
+ "version": "7.3.1",
+ "resolved": "https://registry.npmjs.org/espree/-/espree-7.3.1.tgz",
+ "integrity": "sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g==",
+ "dev": true,
+ "requires": {
+ "acorn": "^7.4.0",
+ "acorn-jsx": "^5.3.1",
+ "eslint-visitor-keys": "^1.3.0"
+ },
+ "dependencies": {
+ "acorn": {
+ "version": "7.4.1",
+ "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz",
+ "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==",
+ "dev": true
+ },
+ "eslint-visitor-keys": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz",
+ "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==",
+ "dev": true
+ }
+ }
+ },
+ "esprima": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz",
+ "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==",
+ "dev": true
+ },
+ "esquery": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz",
+ "integrity": "sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==",
+ "dev": true
+ },
+ "esrecurse": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz",
+ "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==",
+ "dev": true,
+ "requires": {
+ "estraverse": "^5.2.0"
+ },
+ "dependencies": {
+ "estraverse": {
+ "version": "5.2.0",
+ "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.2.0.tgz",
+ "integrity": "sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ==",
+ "dev": true
+ }
+ }
+ },
+ "estraverse": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz",
+ "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==",
+ "dev": true
+ },
+ "esutils": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz",
+ "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==",
+ "dev": true
+ },
+ "etag": {
+ "version": "1.8.1",
+ "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz",
+ "integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc="
+ },
+ "event-target-shim": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz",
+ "integrity": "sha1-XU0+vflYPWOlMzzi3rdICrKwV4k="
+ },
+ "eventid": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/eventid/-/eventid-1.0.0.tgz",
+ "integrity": "sha512-4upSDsvpxhWPsmw4fsJCp0zj8S7I0qh1lCDTmZXP8V3TtryQKDI8CgQPN+e5JakbWwzaAX3lrdp2b3KSoMSUpw==",
+ "requires": {
+ "d64": "^1.0.0",
+ "uuid": "^3.0.1"
+ }
+ },
+ "express": {
+ "version": "4.17.1",
+ "resolved": "https://registry.npmjs.org/express/-/express-4.17.1.tgz",
+ "integrity": "sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g==",
+ "requires": {
+ "accepts": "~1.3.7",
+ "array-flatten": "1.1.1",
+ "body-parser": "1.19.0",
+ "content-disposition": "0.5.3",
+ "content-type": "~1.0.4",
+ "cookie": "0.4.0",
+ "cookie-signature": "1.0.6",
+ "debug": "2.6.9",
+ "depd": "~1.1.2",
+ "encodeurl": "~1.0.2",
+ "escape-html": "~1.0.3",
+ "etag": "~1.8.1",
+ "finalhandler": "~1.1.2",
+ "fresh": "0.5.2",
+ "merge-descriptors": "1.0.1",
+ "methods": "~1.1.2",
+ "on-finished": "~2.3.0",
+ "parseurl": "~1.3.3",
+ "path-to-regexp": "0.1.7",
+ "proxy-addr": "~2.0.5",
+ "qs": "6.7.0",
+ "range-parser": "~1.2.1",
+ "safe-buffer": "5.1.2",
+ "send": "0.17.1",
+ "serve-static": "1.14.1",
+ "setprototypeof": "1.1.1",
+ "statuses": "~1.5.0",
+ "type-is": "~1.6.18",
+ "utils-merge": "1.0.1",
+ "vary": "~1.1.2"
+ }
+ },
+ "extend": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
+ "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="
+ },
+ "extsprintf": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz",
+ "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU="
+ },
+ "fast-deep-equal": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.1.tgz",
+ "integrity": "sha512-8UEa58QDLauDNfpbrX55Q9jrGHThw2ZMdOky5Gl1CDtVeJDPVrG4Jxx1N8jw2gkWaff5UUuX1KJd+9zGe2B+ZA=="
+ },
+ "fast-diff": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.2.0.tgz",
+ "integrity": "sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w==",
+ "dev": true
+ },
+ "fast-json-stable-stringify": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz",
+ "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw=="
+ },
+ "fast-levenshtein": {
+ "version": "2.0.6",
+ "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz",
+ "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=",
+ "dev": true
+ },
+ "fast-text-encoding": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.0.tgz",
+ "integrity": "sha1-PlzoKTQJz6pxd6cbnKhOGx5vJe8="
+ },
+ "file-entry-cache": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz",
+ "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==",
+ "dev": true,
+ "requires": {
+ "flat-cache": "^3.0.4"
+ }
+ },
+ "file-uri-to-path": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz",
+ "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw=="
+ },
+ "fill-range": {
+ "version": "7.0.1",
+ "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
+ "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
+ "dev": true,
+ "requires": {
+ "to-regex-range": "^5.0.1"
+ }
+ },
+ "finalhandler": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz",
+ "integrity": "sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA==",
+ "requires": {
+ "debug": "2.6.9",
+ "encodeurl": "~1.0.2",
+ "escape-html": "~1.0.3",
+ "on-finished": "~2.3.0",
+ "parseurl": "~1.3.3",
+ "statuses": "~1.5.0",
+ "unpipe": "~1.0.0"
+ }
+ },
+ "find-up": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz",
+ "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=",
+ "dev": true,
+ "requires": {
+ "locate-path": "^2.0.0"
+ }
+ },
+ "findit2": {
+ "version": "2.2.3",
+ "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz",
+ "integrity": "sha1-WKRmaX34piBc39vzlVNri9d3pfY="
+ },
+ "flat": {
+ "version": "5.0.2",
+ "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz",
+ "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==",
+ "dev": true
+ },
+ "flat-cache": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz",
+ "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==",
+ "dev": true,
+ "requires": {
+ "flatted": "^3.1.0"
+ }
+ },
+ "flatted": {
+ "version": "3.2.1",
+ "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.1.tgz",
+ "integrity": "sha512-OMQjaErSFHmHqZe+PSidH5n8j3O0F2DdnVh8JB4j4eUQ2k6KvB0qGfrKIhapvez5JerBbmWkaLYUYWISaESoXg==",
+ "dev": true
+ },
+ "forever-agent": {
+ "version": "0.6.1",
+ "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz",
+ "integrity": "sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw=="
+ },
+ "form-data": {
+ "version": "2.3.3",
+ "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz",
+ "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==",
+ "requires": {
+ "asynckit": "^0.4.0",
+ "combined-stream": "^1.0.6",
+ "mime-types": "^2.1.12"
+ }
+ },
+ "forwarded": {
+ "version": "0.1.2",
+ "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz",
+ "integrity": "sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ="
+ },
+ "fresh": {
+ "version": "0.5.2",
+ "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz",
+ "integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac="
+ },
+ "fs-minipass": {
+ "version": "1.2.7",
+ "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-1.2.7.tgz",
+ "integrity": "sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA==",
+ "requires": {
+ "minipass": "^2.6.0"
+ }
+ },
+ "fs.realpath": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
+ "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8="
+ },
+ "function-bind": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz",
+ "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==",
+ "dev": true
+ },
+ "functional-red-black-tree": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz",
+ "integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=",
+ "dev": true
+ },
+ "gauge": {
+ "version": "2.7.4",
+ "resolved": "https://registry.npmjs.org/gauge/-/gauge-2.7.4.tgz",
+ "integrity": "sha1-LANAXHU4w51+s3sxcCLjJfsBi/c=",
+ "requires": {
+ "aproba": "^1.0.3",
+ "console-control-strings": "^1.0.0",
+ "has-unicode": "^2.0.0",
+ "object-assign": "^4.1.0",
+ "signal-exit": "^3.0.0",
+ "string-width": "^1.0.1",
+ "strip-ansi": "^3.0.1",
+ "wide-align": "^1.1.0"
+ },
+ "dependencies": {
+ "ansi-regex": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz",
+ "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8="
+ },
+ "is-fullwidth-code-point": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz",
+ "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=",
+ "requires": {
+ "number-is-nan": "^1.0.0"
+ }
+ },
+ "string-width": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz",
+ "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=",
+ "requires": {
+ "code-point-at": "^1.0.0",
+ "is-fullwidth-code-point": "^1.0.0",
+ "strip-ansi": "^3.0.0"
+ }
+ },
+ "strip-ansi": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz",
+ "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=",
+ "requires": {
+ "ansi-regex": "^2.0.0"
+ }
+ }
+ }
+ },
+ "gaxios": {
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.2.0.tgz",
+ "integrity": "sha512-Ms7fNifGv0XVU+6eIyL9LB7RVESeML9+cMvkwGS70xyD6w2Z80wl6RiqiJ9k1KFlJCUTQqFFc8tXmPQfSKUe8g==",
+ "requires": {
+ "abort-controller": "^3.0.0",
+ "extend": "^3.0.2",
+ "https-proxy-agent": "^5.0.0",
+ "is-stream": "^2.0.0",
+ "node-fetch": "^2.3.0"
+ }
+ },
+ "gcp-metadata": {
+ "version": "4.2.1",
+ "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.2.1.tgz",
+ "integrity": "sha512-tSk+REe5iq/N+K+SK1XjZJUrFPuDqGZVzCy2vocIHIGmPlTGsa8owXMJwGkrXr73NO0AzhPW4MF2DEHz7P2AVw==",
+ "requires": {
+ "gaxios": "^4.0.0",
+ "json-bigint": "^1.0.0"
+ }
+ },
+ "get-caller-file": {
+ "version": "2.0.5",
+ "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
+ "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==",
+ "dev": true
+ },
+ "get-func-name": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz",
+ "integrity": "sha1-6td0q+5y4gQJQzoGY2YCPdaIekE=",
+ "dev": true
+ },
+ "get-intrinsic": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz",
+ "integrity": "sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==",
+ "dev": true,
+ "requires": {
+ "function-bind": "^1.1.1",
+ "has": "^1.0.3",
+ "has-symbols": "^1.0.1"
+ }
+ },
+ "getpass": {
+ "version": "0.1.7",
+ "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz",
+ "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=",
+ "requires": {
+ "assert-plus": "^1.0.0"
+ }
+ },
+ "glob": {
+ "version": "6.0.4",
+ "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz",
+ "integrity": "sha1-DwiGD2oVUSey+t1PnOJLGqtuTSI=",
+ "optional": true,
+ "requires": {
+ "inflight": "^1.0.4",
+ "inherits": "2",
+ "minimatch": "2 || 3",
+ "once": "^1.3.0",
+ "path-is-absolute": "^1.0.0"
+ }
+ },
+ "glob-parent": {
+ "version": "5.1.2",
+ "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
+ "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
+ "dev": true,
+ "requires": {
+ "is-glob": "^4.0.1"
+ }
+ },
+ "globals": {
+ "version": "13.10.0",
+ "resolved": "https://registry.npmjs.org/globals/-/globals-13.10.0.tgz",
+ "integrity": "sha512-piHC3blgLGFjvOuMmWZX60f+na1lXFDhQXBf1UYp2fXPXqvEUbOhNwi6BsQ0bQishwedgnjkwv1d9zKf+MWw3g==",
+ "dev": true,
+ "requires": {
+ "type-fest": "^0.20.2"
+ }
+ },
+ "google-auth-library": {
+ "version": "7.0.4",
+ "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-7.0.4.tgz",
+ "integrity": "sha512-o8irYyeijEiecTXeoEe8UKNEzV1X+uhR4b2oNdapDMZixypp0J+eHimGOyx5Joa3UAeokGngdtDLXtq9vDqG2Q==",
+ "requires": {
+ "arrify": "^2.0.0",
+ "base64-js": "^1.3.0",
+ "ecdsa-sig-formatter": "^1.0.11",
+ "fast-text-encoding": "^1.0.0",
+ "gaxios": "^4.0.0",
+ "gcp-metadata": "^4.2.0",
+ "gtoken": "^5.0.4",
+ "jws": "^4.0.0",
+ "lru-cache": "^6.0.0"
+ }
+ },
+ "google-gax": {
+ "version": "2.11.2",
+ "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-2.11.2.tgz",
+ "integrity": "sha512-PNqXv7Oi5XBMgoMWVxLZHUidfMv7cPHrDSDXqLyEd6kY6pqFnVKC8jt2T1df4JPSc2+VLPdeo6L7X9mbdQG8Xw==",
+ "requires": {
+ "@grpc/grpc-js": "~1.2.0",
+ "@grpc/proto-loader": "^0.5.1",
+ "@types/long": "^4.0.0",
+ "abort-controller": "^3.0.0",
+ "duplexify": "^4.0.0",
+ "fast-text-encoding": "^1.0.3",
+ "google-auth-library": "^7.0.2",
+ "is-stream-ended": "^0.1.4",
+ "node-fetch": "^2.6.1",
+ "protobufjs": "^6.10.2",
+ "retry-request": "^4.0.0"
+ },
+ "dependencies": {
+ "fast-text-encoding": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.3.tgz",
+ "integrity": "sha512-dtm4QZH9nZtcDt8qJiOH9fcQd1NAgi+K1O2DbE6GG1PPCK/BWfOH3idCTRQ4ImXRUOyopDEgDEnVEE7Y/2Wrig=="
+ }
+ }
+ },
+ "google-p12-pem": {
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.3.tgz",
+ "integrity": "sha512-wS0ek4ZtFx/ACKYF3JhyGe5kzH7pgiQ7J5otlumqR9psmWMYc+U9cErKlCYVYHoUaidXHdZ2xbo34kB+S+24hA==",
+ "requires": {
+ "node-forge": "^0.10.0"
+ }
+ },
+ "graceful-fs": {
+ "version": "4.2.6",
+ "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.6.tgz",
+ "integrity": "sha512-nTnJ528pbqxYanhpDYsi4Rd8MAeaBA67+RZ10CM1m3bTAVFEDcd5AuA4a6W5YkGZ1iNXHzZz8T6TBKLeBuNriQ==",
+ "dev": true
+ },
+ "growl": {
+ "version": "1.10.5",
+ "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz",
+ "integrity": "sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==",
+ "dev": true
+ },
+ "gtoken": {
+ "version": "5.2.1",
+ "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.2.1.tgz",
+ "integrity": "sha512-OY0BfPKe3QnMsY9MzTHTSKn+Vl2l1CcLe6BwDEQj00mbbkl5nyQ/7EUREstg4fQNZ8iYE7br4JJ7TdKeDOPWmw==",
+ "requires": {
+ "gaxios": "^4.0.0",
+ "google-p12-pem": "^3.0.3",
+ "jws": "^4.0.0"
+ }
+ },
+ "har-schema": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz",
+ "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI="
+ },
+ "har-validator": {
+ "version": "5.1.3",
+ "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz",
+ "integrity": "sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g==",
+ "requires": {
+ "ajv": "^6.5.5",
+ "har-schema": "^2.0.0"
+ }
+ },
+ "has": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz",
+ "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==",
+ "dev": true,
+ "requires": {
+ "function-bind": "^1.1.1"
+ }
+ },
+ "has-bigints": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.1.tgz",
+ "integrity": "sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA==",
+ "dev": true
+ },
+ "has-flag": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
+ "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==",
+ "dev": true
+ },
+ "has-symbols": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz",
+ "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==",
+ "dev": true
+ },
+ "has-unicode": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz",
+ "integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk="
+ },
+ "he": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz",
+ "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==",
+ "dev": true
+ },
+ "hex2dec": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/hex2dec/-/hex2dec-1.1.2.tgz",
+ "integrity": "sha1-jhzkvvNqdPfVcjw/swkMKGAHczg="
+ },
+ "hosted-git-info": {
+ "version": "2.8.9",
+ "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz",
+ "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==",
+ "dev": true
+ },
+ "http-errors": {
+ "version": "1.7.2",
+ "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.7.2.tgz",
+ "integrity": "sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg==",
+ "requires": {
+ "depd": "~1.1.2",
+ "inherits": "2.0.3",
+ "setprototypeof": "1.1.1",
+ "statuses": ">= 1.5.0 < 2",
+ "toidentifier": "1.0.0"
+ },
+ "dependencies": {
+ "inherits": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz",
+ "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4="
+ }
+ }
+ },
+ "http-proxy-agent": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz",
+ "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==",
+ "requires": {
+ "@tootallnate/once": "1",
+ "agent-base": "6",
+ "debug": "4"
+ },
+ "dependencies": {
+ "debug": {
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-4.2.0.tgz",
+ "integrity": "sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg==",
+ "requires": {
+ "ms": "2.1.2"
+ }
+ },
+ "ms": {
+ "version": "2.1.2",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
+ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
+ }
+ }
+ },
+ "http-signature": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz",
+ "integrity": "sha512-CAbnr6Rz4CYQkLYUtSNXxQPUH2gK8f3iWexVlsnMeD+GjlsQ0Xsy1cOX+mN3dtxYomRy21CiOzU8Uhw6OwncEQ==",
+ "requires": {
+ "assert-plus": "^1.0.0",
+ "jsprim": "^1.2.2",
+ "sshpk": "^1.7.0"
+ }
+ },
+ "https-proxy-agent": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz",
+ "integrity": "sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA==",
+ "requires": {
+ "agent-base": "6",
+ "debug": "4"
+ },
+ "dependencies": {
+ "debug": {
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-4.2.0.tgz",
+ "integrity": "sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg==",
+ "requires": {
+ "ms": "2.1.2"
+ }
+ },
+ "ms": {
+ "version": "2.1.2",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
+ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
+ }
+ }
+ },
+ "iconv-lite": {
+ "version": "0.4.24",
+ "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
+ "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==",
+ "requires": {
+ "safer-buffer": ">= 2.1.2 < 3"
+ }
+ },
+ "ignore": {
+ "version": "4.0.6",
+ "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz",
+ "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==",
+ "dev": true
+ },
+ "ignore-walk": {
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-3.0.3.tgz",
+ "integrity": "sha512-m7o6xuOaT1aqheYHKf8W6J5pYH85ZI9w077erOzLje3JsB1gkafkAhHHY19dqjulgIZHFm32Cp5uNZgcQqdJKw==",
+ "requires": {
+ "minimatch": "^3.0.4"
+ }
+ },
+ "import-fresh": {
+ "version": "3.2.1",
+ "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.2.1.tgz",
+ "integrity": "sha512-6e1q1cnWP2RXD9/keSkxHScg508CdXqXWgWBaETNhyuBFz+kUZlKboh+ISK+bU++DmbHimVBrOz/zzPe0sZ3sQ==",
+ "dev": true,
+ "requires": {
+ "parent-module": "^1.0.0",
+ "resolve-from": "^4.0.0"
+ }
+ },
+ "imurmurhash": {
+ "version": "0.1.4",
+ "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz",
+ "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=",
+ "dev": true
+ },
+ "inflight": {
+ "version": "1.0.6",
+ "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
+ "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=",
+ "requires": {
+ "once": "^1.3.0",
+ "wrappy": "1"
+ }
+ },
+ "inherits": {
+ "version": "2.0.4",
+ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
+ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
+ },
+ "ini": {
+ "version": "1.3.8",
+ "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz",
+ "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew=="
+ },
+ "ioredis": {
+ "version": "4.27.1",
+ "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-4.27.1.tgz",
+ "integrity": "sha512-PaFNFeBbOcEYHXAdrJuy7uesJcyvzStTM1aYMchTuky+VgKqDbXhnTJHaDsjAwcTwPx8Asatx+l2DW8zZ2xlsQ==",
+ "requires": {
+ "cluster-key-slot": "^1.1.0",
+ "debug": "^4.3.1",
+ "denque": "^1.1.0",
+ "lodash.defaults": "^4.2.0",
+ "lodash.flatten": "^4.4.0",
+ "p-map": "^2.1.0",
+ "redis-commands": "1.7.0",
+ "redis-errors": "^1.2.0",
+ "redis-parser": "^3.0.0",
+ "standard-as-callback": "^2.1.0"
+ },
+ "dependencies": {
+ "debug": {
+ "version": "4.3.1",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz",
+ "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==",
+ "requires": {
+ "ms": "2.1.2"
+ }
+ },
+ "ms": {
+ "version": "2.1.2",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
+ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
+ }
+ }
+ },
+ "ipaddr.js": {
+ "version": "1.9.1",
+ "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz",
+ "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g=="
+ },
+ "is": {
+ "version": "3.3.0",
+ "resolved": "https://registry.npmjs.org/is/-/is-3.3.0.tgz",
+ "integrity": "sha1-Yc/23TxBk9uUo9YlggcrROVkXXk="
+ },
+ "is-arrayish": {
+ "version": "0.2.1",
+ "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz",
+ "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=",
+ "dev": true
+ },
+ "is-bigint": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.2.tgz",
+ "integrity": "sha512-0JV5+SOCQkIdzjBK9buARcV804Ddu7A0Qet6sHi3FimE9ne6m4BGQZfRn+NZiXbBk4F4XmHfDZIipLj9pX8dSA==",
+ "dev": true
+ },
+ "is-binary-path": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz",
+ "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==",
+ "dev": true,
+ "requires": {
+ "binary-extensions": "^2.0.0"
+ }
+ },
+ "is-boolean-object": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.1.tgz",
+ "integrity": "sha512-bXdQWkECBUIAcCkeH1unwJLIpZYaa5VvuygSyS/c2lf719mTKZDU5UdDRlpd01UjADgmW8RfqaP+mRaVPdr/Ng==",
+ "dev": true,
+ "requires": {
+ "call-bind": "^1.0.2"
+ }
+ },
+ "is-buffer": {
+ "version": "1.1.6",
+ "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz",
+ "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w=="
+ },
+ "is-callable": {
+ "version": "1.1.5",
+ "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz",
+ "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==",
+ "dev": true
+ },
+ "is-core-module": {
+ "version": "2.5.0",
+ "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.5.0.tgz",
+ "integrity": "sha512-TXCMSDsEHMEEZ6eCA8rwRDbLu55MRGmrctljsBX/2v1d9/GzqHOxW5c5oPSgrUt2vBFXebu9rGqckXGPWOlYpg==",
+ "dev": true,
+ "requires": {
+ "has": "^1.0.3"
+ }
+ },
+ "is-date-object": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.2.tgz",
+ "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==",
+ "dev": true
+ },
+ "is-extglob": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
+ "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=",
+ "dev": true
+ },
+ "is-fullwidth-code-point": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz",
+ "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8="
+ },
+ "is-glob": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz",
+ "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==",
+ "dev": true,
+ "requires": {
+ "is-extglob": "^2.1.1"
+ }
+ },
+ "is-negative-zero": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.1.tgz",
+ "integrity": "sha512-2z6JzQvZRa9A2Y7xC6dQQm4FSTSTNWjKIYYTt4246eMTJmIo0Q+ZyOsU66X8lxK1AbB92dFeglPLrhwpeRKO6w==",
+ "dev": true
+ },
+ "is-number": {
+ "version": "7.0.0",
+ "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
+ "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
+ "dev": true
+ },
+ "is-number-object": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.5.tgz",
+ "integrity": "sha512-RU0lI/n95pMoUKu9v1BZP5MBcZuNSVJkMkAG2dJqC4z2GlkGUNeH68SuHuBKBD/XFe+LHZ+f9BKkLET60Niedw==",
+ "dev": true
+ },
+ "is-obj": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz",
+ "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w=="
+ },
+ "is-plain-obj": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz",
+ "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==",
+ "dev": true
+ },
+ "is-regex": {
+ "version": "1.1.3",
+ "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.3.tgz",
+ "integrity": "sha512-qSVXFz28HM7y+IWX6vLCsexdlvzT1PJNFSBuaQLQ5o0IEw8UDYW6/2+eCMVyIsbM8CNLX2a/QWmSpyxYEHY7CQ==",
+ "dev": true,
+ "requires": {
+ "call-bind": "^1.0.2"
+ }
+ },
+ "is-stream": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.0.tgz",
+ "integrity": "sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw=="
+ },
+ "is-stream-ended": {
+ "version": "0.1.4",
+ "resolved": "https://registry.npmjs.org/is-stream-ended/-/is-stream-ended-0.1.4.tgz",
+ "integrity": "sha512-xj0XPvmr7bQFTvirqnFr50o0hQIh6ZItDqloxt5aJrR4NQsYeSsyFQERYGCAzfindAcnKjINnwEEgLx4IqVzQw=="
+ },
+ "is-string": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.5.tgz",
+ "integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==",
+ "dev": true
+ },
+ "is-symbol": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.3.tgz",
+ "integrity": "sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==",
+ "dev": true,
+ "requires": {
+ "has-symbols": "^1.0.1"
+ }
+ },
+ "is-typedarray": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz",
+ "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo="
+ },
+ "isarray": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
+ "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE="
+ },
+ "isexe": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
+ "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=",
+ "dev": true
+ },
+ "isstream": {
+ "version": "0.1.2",
+ "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz",
+ "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo="
+ },
+ "js-tokens": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
+ "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==",
+ "dev": true
+ },
+ "js-yaml": {
+ "version": "3.13.1",
+ "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz",
+ "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==",
+ "dev": true,
+ "requires": {
+ "argparse": "^1.0.7",
+ "esprima": "^4.0.0"
+ }
+ },
+ "jsbn": {
+ "version": "0.1.1",
+ "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
+ "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM="
+ },
+ "json-bigint": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz",
+ "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==",
+ "requires": {
+ "bignumber.js": "^9.0.0"
+ }
+ },
+ "json-parse-better-errors": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz",
+ "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==",
+ "dev": true
+ },
+ "json-schema": {
+ "version": "0.2.3",
+ "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz",
+ "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM="
+ },
+ "json-schema-traverse": {
+ "version": "0.4.1",
+ "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
+ "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="
+ },
+ "json-stable-stringify-without-jsonify": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz",
+ "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=",
+ "dev": true
+ },
+ "json-stringify-safe": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz",
+ "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus="
+ },
+ "json5": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.0.tgz",
+ "integrity": "sha512-f+8cldu7X/y7RAJurMEJmdoKXGB/X550w2Nr3tTbezL6RwEE/iMcm+tZnXeoZtKuOq6ft8+CqzEkrIgx1fPoQA==",
+ "dev": true,
+ "requires": {
+ "minimist": "^1.2.5"
+ }
+ },
+ "jsprim": {
+ "version": "1.4.1",
+ "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz",
+ "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=",
+ "requires": {
+ "assert-plus": "1.0.0",
+ "extsprintf": "1.3.0",
+ "json-schema": "0.2.3",
+ "verror": "1.10.0"
+ }
+ },
+ "just-extend": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-4.1.0.tgz",
+ "integrity": "sha512-ApcjaOdVTJ7y4r08xI5wIqpvwS48Q0PBG4DJROcEkH1f8MdAiNFyFxz3xoL0LWAVwjrwPYZdVHHxhRHcx/uGLA==",
+ "dev": true
+ },
+ "jwa": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz",
+ "integrity": "sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==",
+ "requires": {
+ "buffer-equal-constant-time": "1.0.1",
+ "ecdsa-sig-formatter": "1.0.11",
+ "safe-buffer": "^5.0.1"
+ }
+ },
+ "jws": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz",
+ "integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==",
+ "requires": {
+ "jwa": "^2.0.0",
+ "safe-buffer": "^5.0.1"
+ }
+ },
+ "levn": {
+ "version": "0.4.1",
+ "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz",
+ "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==",
+ "dev": true,
+ "requires": {
+ "prelude-ls": "^1.2.1",
+ "type-check": "~0.4.0"
+ }
+ },
+ "load-json-file": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz",
+ "integrity": "sha1-L19Fq5HjMhYjT9U62rZo607AmTs=",
+ "dev": true,
+ "requires": {
+ "graceful-fs": "^4.1.2",
+ "parse-json": "^4.0.0",
+ "pify": "^3.0.0",
+ "strip-bom": "^3.0.0"
+ },
+ "dependencies": {
+ "pify": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz",
+ "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=",
+ "dev": true
+ }
+ }
+ },
+ "locate-path": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz",
+ "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=",
+ "dev": true,
+ "requires": {
+ "p-locate": "^2.0.0",
+ "path-exists": "^3.0.0"
+ }
+ },
+ "lodash": {
+ "version": "4.17.21",
+ "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
+ "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
+ },
+ "lodash.camelcase": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz",
+ "integrity": "sha1-soqmKIorn8ZRA1x3EfZathkDMaY="
+ },
+ "lodash.clonedeep": {
+ "version": "4.5.0",
+ "resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz",
+ "integrity": "sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8=",
+ "dev": true
+ },
+ "lodash.defaults": {
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz",
+ "integrity": "sha1-0JF4cW/+pN3p5ft7N/bwgCJ0WAw="
+ },
+ "lodash.flatten": {
+ "version": "4.4.0",
+ "resolved": "https://registry.npmjs.org/lodash.flatten/-/lodash.flatten-4.4.0.tgz",
+ "integrity": "sha1-8xwiIlqWMtK7+OSt2+8kCqdlph8="
+ },
+ "lodash.get": {
+ "version": "4.4.2",
+ "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz",
+ "integrity": "sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=",
+ "dev": true
+ },
+ "lodash.merge": {
+ "version": "4.6.2",
+ "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
+ "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==",
+ "dev": true
+ },
+ "lodash.truncate": {
+ "version": "4.4.2",
+ "resolved": "https://registry.npmjs.org/lodash.truncate/-/lodash.truncate-4.4.2.tgz",
+ "integrity": "sha1-WjUNoLERO4N+z//VgSy+WNbq4ZM=",
+ "dev": true
+ },
+ "log-driver": {
+ "version": "1.2.7",
+ "resolved": "https://registry.npmjs.org/log-driver/-/log-driver-1.2.7.tgz",
+ "integrity": "sha512-U7KCmLdqsGHBLeWqYlFA0V0Sl6P08EE1ZrmA9cxjUE0WVqT9qnyVDPz1kzpFEP0jdJuFnasWIfSd7fsaNXkpbg=="
+ },
+ "log-symbols": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.0.0.tgz",
+ "integrity": "sha512-FN8JBzLx6CzeMrB0tg6pqlGU1wCrXW+ZXGH481kfsBqer0hToTIiHdjH4Mq8xJUbvATujKCvaREGWpGUionraA==",
+ "dev": true,
+ "requires": {
+ "chalk": "^4.0.0"
+ },
+ "dependencies": {
+ "ansi-styles": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
+ "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
+ "dev": true,
+ "requires": {
+ "color-convert": "^2.0.1"
+ }
+ },
+ "chalk": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz",
+ "integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==",
+ "dev": true,
+ "requires": {
+ "ansi-styles": "^4.1.0",
+ "supports-color": "^7.1.0"
+ }
+ },
+ "color-convert": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
+ "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
+ "dev": true,
+ "requires": {
+ "color-name": "~1.1.4"
+ }
+ },
+ "color-name": {
+ "version": "1.1.4",
+ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
+ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
+ "dev": true
+ },
+ "has-flag": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
+ "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
+ "dev": true
+ },
+ "supports-color": {
+ "version": "7.2.0",
+ "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
+ "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
+ "dev": true,
+ "requires": {
+ "has-flag": "^4.0.0"
+ }
+ }
+ }
+ },
+ "logger-sharelatex": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-2.2.0.tgz",
+ "integrity": "sha512-ko+OmE25XHJJCiz1R9EgwlfM7J/5olpunUfR3WcfuqOQrcUqsdBrDA2sOytngT0ViwjCR0Fh4qZVPwEWfmrvwA==",
+ "requires": {
+ "@google-cloud/logging-bunyan": "^3.0.0",
+ "@overleaf/o-error": "^3.0.0",
+ "bunyan": "^1.8.14",
+ "node-fetch": "^2.6.0",
+ "raven": "^2.6.4",
+ "yn": "^4.0.0"
+ },
+ "dependencies": {
+ "yn": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/yn/-/yn-4.0.0.tgz",
+ "integrity": "sha512-huWiiCS4TxKc4SfgmTwW1K7JmXPPAmuXWYy4j9qjQo4+27Kni8mGhAAi1cloRWmBe2EqcLgt3IGqQoRL/MtPgg=="
+ }
+ }
+ },
+ "long": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz",
+ "integrity": "sha1-mntxz7fTYaGU6lVSQckvdGjVvyg="
+ },
+ "lru-cache": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
+ "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
+ "requires": {
+ "yallist": "^4.0.0"
+ },
+ "dependencies": {
+ "yallist": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
+ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
+ }
+ }
+ },
+ "map-obj": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.1.0.tgz",
+ "integrity": "sha512-glc9y00wgtwcDmp7GaE/0b0OnxpNJsVf3ael/An6Fe2Q51LLwN1er6sdomLRzz5h0+yMpiYLhWYF5R7HeqVd4g=="
+ },
+ "md5": {
+ "version": "2.3.0",
+ "resolved": "https://registry.npmjs.org/md5/-/md5-2.3.0.tgz",
+ "integrity": "sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g==",
+ "requires": {
+ "charenc": "0.0.2",
+ "crypt": "0.0.2",
+ "is-buffer": "~1.1.6"
+ }
+ },
+ "media-typer": {
+ "version": "0.3.0",
+ "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
+ "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g="
+ },
+ "memory-pager": {
+ "version": "1.5.0",
+ "resolved": "https://registry.npmjs.org/memory-pager/-/memory-pager-1.5.0.tgz",
+ "integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==",
+ "optional": true
+ },
+ "merge-descriptors": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz",
+ "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E="
+ },
+ "methods": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz",
+ "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4="
+ },
+ "mime": {
+ "version": "1.6.0",
+ "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz",
+ "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg=="
+ },
+ "mime-db": {
+ "version": "1.43.0",
+ "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.43.0.tgz",
+ "integrity": "sha512-+5dsGEEovYbT8UY9yD7eE4XTc4UwJ1jBYlgaQQF38ENsKR3wj/8q8RFZrF9WIZpB2V1ArTVFUva8sAul1NzRzQ=="
+ },
+ "mime-types": {
+ "version": "2.1.26",
+ "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.26.tgz",
+ "integrity": "sha512-01paPWYgLrkqAyrlDorC1uDwl2p3qZT7yl806vW7DvDoxwXi46jsjFbg+WdwotBIk6/MbEhO/dh5aZ5sNj/dWQ==",
+ "requires": {
+ "mime-db": "1.43.0"
+ }
+ },
+ "minimatch": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
+ "integrity": "sha1-UWbihkV/AzBgZL5Ul+jbsMPTIIM=",
+ "requires": {
+ "brace-expansion": "^1.1.7"
+ }
+ },
+ "minimist": {
+ "version": "1.2.5",
+ "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz",
+ "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw=="
+ },
+ "minipass": {
+ "version": "2.9.0",
+ "resolved": "https://registry.npmjs.org/minipass/-/minipass-2.9.0.tgz",
+ "integrity": "sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg==",
+ "requires": {
+ "safe-buffer": "^5.1.2",
+ "yallist": "^3.0.0"
+ }
+ },
+ "minizlib": {
+ "version": "1.3.3",
+ "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-1.3.3.tgz",
+ "integrity": "sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q==",
+ "requires": {
+ "minipass": "^2.9.0"
+ }
+ },
+ "mkdirp": {
+ "version": "0.5.5",
+ "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz",
+ "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==",
+ "requires": {
+ "minimist": "^1.2.5"
+ }
+ },
+ "mocha": {
+ "version": "8.3.2",
+ "resolved": "https://registry.npmjs.org/mocha/-/mocha-8.3.2.tgz",
+ "integrity": "sha512-UdmISwr/5w+uXLPKspgoV7/RXZwKRTiTjJ2/AC5ZiEztIoOYdfKb19+9jNmEInzx5pBsCyJQzarAxqIGBNYJhg==",
+ "dev": true,
+ "requires": {
+ "@ungap/promise-all-settled": "1.1.2",
+ "ansi-colors": "4.1.1",
+ "browser-stdout": "1.3.1",
+ "chokidar": "3.5.1",
+ "debug": "4.3.1",
+ "diff": "5.0.0",
+ "escape-string-regexp": "4.0.0",
+ "find-up": "5.0.0",
+ "glob": "7.1.6",
+ "growl": "1.10.5",
+ "he": "1.2.0",
+ "js-yaml": "4.0.0",
+ "log-symbols": "4.0.0",
+ "minimatch": "3.0.4",
+ "ms": "2.1.3",
+ "nanoid": "3.1.20",
+ "serialize-javascript": "5.0.1",
+ "strip-json-comments": "3.1.1",
+ "supports-color": "8.1.1",
+ "which": "2.0.2",
+ "wide-align": "1.1.3",
+ "workerpool": "6.1.0",
+ "yargs": "16.2.0",
+ "yargs-parser": "20.2.4",
+ "yargs-unparser": "2.0.0"
+ },
+ "dependencies": {
+ "ansi-styles": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
+ "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
+ "requires": {
+ "color-convert": "^2.0.1"
+ }
+ },
+ "argparse": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
+ "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==",
+ "dev": true
+ },
+ "color-convert": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
+ "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
+ "requires": {
+ "color-name": "~1.1.4"
+ }
+ },
+ "color-name": {
+ "version": "1.1.4",
+ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
+ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
+ },
+ "debug": {
+ "version": "4.3.1",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz",
+ "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==",
+ "dev": true,
+ "requires": {
+ "ms": "2.1.2"
+ },
+ "dependencies": {
+ "ms": {
+ "version": "2.1.2",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
+ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
+ "dev": true
+ }
+ }
+ },
+ "escape-string-regexp": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz",
+ "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==",
+ "dev": true
+ },
+ "find-up": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz",
+ "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==",
+ "dev": true,
+ "requires": {
+ "locate-path": "^6.0.0",
+ "path-exists": "^4.0.0"
+ }
+ },
+ "glob": {
+ "version": "7.1.6",
+ "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz",
+ "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==",
+ "dev": true,
+ "requires": {
+ "fs.realpath": "^1.0.0",
+ "inflight": "^1.0.4",
+ "inherits": "2",
+ "minimatch": "^3.0.4",
+ "once": "^1.3.0",
+ "path-is-absolute": "^1.0.0"
+ }
+ },
+ "has-flag": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
+ "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
+ "dev": true
+ },
+ "is-fullwidth-code-point": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
+ "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="
+ },
+ "js-yaml": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.0.0.tgz",
+ "integrity": "sha512-pqon0s+4ScYUvX30wxQi3PogGFAlUyH0awepWvwkj4jD4v+ova3RiYw8bmA6x2rDrEaj8i/oWKoRxpVNW+Re8Q==",
+ "dev": true,
+ "requires": {
+ "argparse": "^2.0.1"
+ }
+ },
+ "locate-path": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz",
+ "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==",
+ "dev": true,
+ "requires": {
+ "p-locate": "^5.0.0"
+ }
+ },
+ "ms": {
+ "version": "2.1.3",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
+ "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
+ "dev": true
+ },
+ "p-locate": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz",
+ "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==",
+ "dev": true,
+ "requires": {
+ "p-limit": "^3.0.2"
+ }
+ },
+ "path-exists": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
+ "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
+ "dev": true
+ },
+ "supports-color": {
+ "version": "8.1.1",
+ "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz",
+ "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==",
+ "dev": true,
+ "requires": {
+ "has-flag": "^4.0.0"
+ }
+ }
+ }
+ },
+ "module-details-from-path": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.3.tgz",
+ "integrity": "sha1-EUyUlnPiqKNenTV4hSeqN7Z52is="
+ },
+ "moment": {
+ "version": "2.29.1",
+ "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.1.tgz",
+ "integrity": "sha512-kHmoybcPV8Sqy59DwNDY3Jefr64lK/by/da0ViFcuA4DH0vQg5Q6Ze5VimxkfQNSC+Mls/Kx53s7TjP1RhFEDQ==",
+ "optional": true
+ },
+ "mongodb": {
+ "version": "3.6.6",
+ "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-3.6.6.tgz",
+ "integrity": "sha512-WlirMiuV1UPbej5JeCMqE93JRfZ/ZzqE7nJTwP85XzjAF4rRSeq2bGCb1cjfoHLOF06+HxADaPGqT0g3SbVT1w==",
+ "requires": {
+ "bl": "^2.2.1",
+ "bson": "^1.1.4",
+ "denque": "^1.4.1",
+ "optional-require": "^1.0.2",
+ "safe-buffer": "^5.1.2",
+ "saslprep": "^1.0.0"
+ }
+ },
+ "ms": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
+ "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g="
+ },
+ "mv": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz",
+ "integrity": "sha1-rmzg1vbV4KT32JN5jQPB6pVZtqI=",
+ "optional": true,
+ "requires": {
+ "mkdirp": "~0.5.1",
+ "ncp": "~2.0.0",
+ "rimraf": "~2.4.0"
+ }
+ },
+ "nan": {
+ "version": "2.14.0",
+ "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz",
+ "integrity": "sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg=="
+ },
+ "nanoid": {
+ "version": "3.1.20",
+ "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.1.20.tgz",
+ "integrity": "sha512-a1cQNyczgKbLX9jwbS/+d7W8fX/RfgYR7lVWwWOGIPNgK2m0MWvrGF6/m4kk6U3QcFMnZf3RIhL0v2Jgh/0Uxw==",
+ "dev": true
+ },
+ "natural-compare": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz",
+ "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=",
+ "dev": true
+ },
+ "ncp": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz",
+ "integrity": "sha1-GVoh1sRuNh0vsSgbo4uR6d9727M=",
+ "optional": true
+ },
+ "needle": {
+ "version": "2.6.0",
+ "resolved": "https://registry.npmjs.org/needle/-/needle-2.6.0.tgz",
+ "integrity": "sha512-KKYdza4heMsEfSWD7VPUIz3zX2XDwOyX2d+geb4vrERZMT5RMU6ujjaD+I5Yr54uZxQ2w6XRTAhHBbSCyovZBg==",
+ "requires": {
+ "debug": "^3.2.6",
+ "iconv-lite": "^0.4.4",
+ "sax": "^1.2.4"
+ },
+ "dependencies": {
+ "debug": {
+ "version": "3.2.7",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz",
+ "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==",
+ "requires": {
+ "ms": "^2.1.1"
+ }
+ },
+ "ms": {
+ "version": "2.1.3",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
+ "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="
+ }
+ }
+ },
+ "negotiator": {
+ "version": "0.6.2",
+ "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.2.tgz",
+ "integrity": "sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw=="
+ },
+ "nise": {
+ "version": "4.0.3",
+ "resolved": "https://registry.npmjs.org/nise/-/nise-4.0.3.tgz",
+ "integrity": "sha512-EGlhjm7/4KvmmE6B/UFsKh7eHykRl9VH+au8dduHLCyWUO/hr7+N+WtTvDUwc9zHuM1IaIJs/0lQ6Ag1jDkQSg==",
+ "dev": true,
+ "requires": {
+ "@sinonjs/commons": "^1.7.0",
+ "@sinonjs/fake-timers": "^6.0.0",
+ "@sinonjs/text-encoding": "^0.7.1",
+ "just-extend": "^4.0.2",
+ "path-to-regexp": "^1.7.0"
+ },
+ "dependencies": {
+ "isarray": {
+ "version": "0.0.1",
+ "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz",
+ "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=",
+ "dev": true
+ },
+ "path-to-regexp": {
+ "version": "1.8.0",
+ "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz",
+ "integrity": "sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA==",
+ "dev": true,
+ "requires": {
+ "isarray": "0.0.1"
+ }
+ }
+ }
+ },
+ "node-fetch": {
+ "version": "2.6.1",
+ "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.1.tgz",
+ "integrity": "sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw=="
+ },
+ "node-forge": {
+ "version": "0.10.0",
+ "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz",
+ "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA=="
+ },
+ "node-pre-gyp": {
+ "version": "0.16.0",
+ "resolved": "https://registry.npmjs.org/node-pre-gyp/-/node-pre-gyp-0.16.0.tgz",
+ "integrity": "sha512-4efGA+X/YXAHLi1hN8KaPrILULaUn2nWecFrn1k2I+99HpoyvcOGEbtcOxpDiUwPF2ZANMJDh32qwOUPenuR1g==",
+ "requires": {
+ "detect-libc": "^1.0.2",
+ "mkdirp": "^0.5.3",
+ "needle": "^2.5.0",
+ "nopt": "^4.0.1",
+ "npm-packlist": "^1.1.6",
+ "npmlog": "^4.0.2",
+ "rc": "^1.2.7",
+ "rimraf": "^2.6.1",
+ "semver": "^5.3.0",
+ "tar": "^4.4.2"
+ },
+ "dependencies": {
+ "glob": {
+ "version": "7.1.6",
+ "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz",
+ "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==",
+ "requires": {
+ "fs.realpath": "^1.0.0",
+ "inflight": "^1.0.4",
+ "inherits": "2",
+ "minimatch": "^3.0.4",
+ "once": "^1.3.0",
+ "path-is-absolute": "^1.0.0"
+ }
+ },
+ "rimraf": {
+ "version": "2.7.1",
+ "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz",
+ "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==",
+ "requires": {
+ "glob": "^7.1.3"
+ }
+ },
+ "semver": {
+ "version": "5.7.1",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
+ "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ=="
+ }
+ }
+ },
+ "nopt": {
+ "version": "4.0.3",
+ "resolved": "https://registry.npmjs.org/nopt/-/nopt-4.0.3.tgz",
+ "integrity": "sha512-CvaGwVMztSMJLOeXPrez7fyfObdZqNUK1cPAEzLHrTybIua9pMdmmPR5YwtfNftIOMv3DPUhFaxsZMNTQO20Kg==",
+ "requires": {
+ "abbrev": "1",
+ "osenv": "^0.1.4"
+ }
+ },
+ "normalize-package-data": {
+ "version": "2.5.0",
+ "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz",
+ "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==",
+ "dev": true,
+ "requires": {
+ "hosted-git-info": "^2.1.4",
+ "resolve": "^1.10.0",
+ "semver": "2 || 3 || 4 || 5",
+ "validate-npm-package-license": "^3.0.1"
+ },
+ "dependencies": {
+ "semver": {
+ "version": "5.7.1",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
+ "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==",
+ "dev": true
+ }
+ }
+ },
+ "normalize-path": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
+ "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==",
+ "dev": true
+ },
+ "npm-bundled": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-1.1.1.tgz",
+ "integrity": "sha512-gqkfgGePhTpAEgUsGEgcq1rqPXA+tv/aVBlgEzfXwA1yiUJF7xtEt3CtVwOjNYQOVknDk0F20w58Fnm3EtG0fA==",
+ "requires": {
+ "npm-normalize-package-bin": "^1.0.1"
+ }
+ },
+ "npm-normalize-package-bin": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-1.0.1.tgz",
+ "integrity": "sha512-EPfafl6JL5/rU+ot6P3gRSCpPDW5VmIzX959Ob1+ySFUuuYHWHekXpwdUZcKP5C+DS4GEtdJluwBjnsNDl+fSA=="
+ },
+ "npm-packlist": {
+ "version": "1.4.8",
+ "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-1.4.8.tgz",
+ "integrity": "sha512-5+AZgwru5IevF5ZdnFglB5wNlHG1AOOuw28WhUq8/8emhBmLv6jX5by4WJCh7lW0uSYZYS6DXqIsyZVIXRZU9A==",
+ "requires": {
+ "ignore-walk": "^3.0.1",
+ "npm-bundled": "^1.0.1",
+ "npm-normalize-package-bin": "^1.0.1"
+ }
+ },
+ "npmlog": {
+ "version": "4.1.2",
+ "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz",
+ "integrity": "sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==",
+ "requires": {
+ "are-we-there-yet": "~1.1.2",
+ "console-control-strings": "~1.1.0",
+ "gauge": "~2.7.3",
+ "set-blocking": "~2.0.0"
+ }
+ },
+ "number-is-nan": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz",
+ "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0="
+ },
+ "oauth-sign": {
+ "version": "0.9.0",
+ "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz",
+ "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ=="
+ },
+ "object-assign": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
+ "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM="
+ },
+ "object-inspect": {
+ "version": "1.11.0",
+ "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.11.0.tgz",
+ "integrity": "sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg==",
+ "dev": true
+ },
+ "object-keys": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz",
+ "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==",
+ "dev": true
+ },
+ "object.assign": {
+ "version": "4.1.2",
+ "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.2.tgz",
+ "integrity": "sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ==",
+ "dev": true,
+ "requires": {
+ "call-bind": "^1.0.0",
+ "define-properties": "^1.1.3",
+ "has-symbols": "^1.0.1",
+ "object-keys": "^1.1.1"
+ }
+ },
+ "object.values": {
+ "version": "1.1.4",
+ "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.4.tgz",
+ "integrity": "sha512-TnGo7j4XSnKQoK3MfvkzqKCi0nVe/D9I9IjwTNYdb/fxYHpjrluHVOgw0AF6jrRFGMPHdfuidR09tIDiIvnaSg==",
+ "dev": true,
+ "requires": {
+ "call-bind": "^1.0.2",
+ "define-properties": "^1.1.3",
+ "es-abstract": "^1.18.2"
+ }
+ },
+ "on-finished": {
+ "version": "2.3.0",
+ "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz",
+ "integrity": "sha512-ikqdkGAAyf/X/gPhXGvfgAytDZtDbr+bkNUJ0N9h5MI/dmdgCs3l6hoHrcUv41sRKew3jIwrp4qQDXiK99Utww==",
+ "requires": {
+ "ee-first": "1.1.1"
+ }
+ },
+ "on-headers": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz",
+ "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA=="
+ },
+ "once": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
+ "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=",
+ "requires": {
+ "wrappy": "1"
+ }
+ },
+ "optional-require": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/optional-require/-/optional-require-1.0.3.tgz",
+ "integrity": "sha512-RV2Zp2MY2aeYK5G+B/Sps8lW5NHAzE5QClbFP15j+PWmP+T9PxlJXBOOLoSAdgwFvS4t0aMR4vpedMkbHfh0nA=="
+ },
+ "optionator": {
+ "version": "0.9.1",
+ "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz",
+ "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==",
+ "dev": true,
+ "requires": {
+ "deep-is": "^0.1.3",
+ "fast-levenshtein": "^2.0.6",
+ "levn": "^0.4.1",
+ "prelude-ls": "^1.2.1",
+ "type-check": "^0.4.0",
+ "word-wrap": "^1.2.3"
+ }
+ },
+ "os-homedir": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz",
+ "integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M="
+ },
+ "os-tmpdir": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz",
+ "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ="
+ },
+ "osenv": {
+ "version": "0.1.5",
+ "resolved": "https://registry.npmjs.org/osenv/-/osenv-0.1.5.tgz",
+ "integrity": "sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g==",
+ "requires": {
+ "os-homedir": "^1.0.0",
+ "os-tmpdir": "^1.0.0"
+ }
+ },
+ "p-limit": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
+ "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==",
+ "requires": {
+ "yocto-queue": "^0.1.0"
+ }
+ },
+ "p-locate": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz",
+ "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=",
+ "dev": true
+ },
+ "p-map": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/p-map/-/p-map-2.1.0.tgz",
+ "integrity": "sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw=="
+ },
+ "parent-module": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
+ "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==",
+ "dev": true,
+ "requires": {
+ "callsites": "^3.0.0"
+ }
+ },
+ "parse-duration": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-1.0.0.tgz",
+ "integrity": "sha512-X4kUkCTHU1N/kEbwK9FpUJ0UZQa90VzeczfS704frR30gljxDG0pSziws06XlK+CGRSo/1wtG1mFIdBFQTMQNw=="
+ },
+ "parse-json": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz",
+ "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=",
+ "dev": true,
+ "requires": {
+ "error-ex": "^1.3.1",
+ "json-parse-better-errors": "^1.0.1"
+ }
+ },
+ "parse-ms": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-2.1.0.tgz",
+ "integrity": "sha512-kHt7kzLoS9VBZfUsiKjv43mr91ea+U05EyKkEtqp7vNbHxmaVuEqN7XxeEVnGrMtYOAxGrDElSi96K7EgO1zCA=="
+ },
+ "parseurl": {
+ "version": "1.3.3",
+ "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz",
+ "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ=="
+ },
+ "path-exists": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz",
+ "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=",
+ "dev": true
+ },
+ "path-is-absolute": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
+ "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18="
+ },
+ "path-key": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz",
+ "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
+ "dev": true
+ },
+ "path-parse": {
+ "version": "1.0.6",
+ "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz",
+ "integrity": "sha1-1i27VnlAXXLEc37FhgDp3c8G0kw="
+ },
+ "path-to-regexp": {
+ "version": "0.1.7",
+ "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz",
+ "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w="
+ },
+ "path-type": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz",
+ "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==",
+ "dev": true,
+ "requires": {
+ "pify": "^3.0.0"
+ },
+ "dependencies": {
+ "pify": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz",
+ "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=",
+ "dev": true
+ }
+ }
+ },
+ "pathval": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz",
+ "integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==",
+ "dev": true
+ },
+ "performance-now": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz",
+ "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns="
+ },
+ "picomatch": {
+ "version": "2.2.2",
+ "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.2.2.tgz",
+ "integrity": "sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==",
+ "dev": true
+ },
+ "pify": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/pify/-/pify-5.0.0.tgz",
+ "integrity": "sha512-eW/gHNMlxdSP6dmG6uJip6FXN0EQBwm2clYYd8Wul42Cwu/DK8HEftzsapcNdYe2MfLiIwZqsDk2RDEsTE79hA=="
+ },
+ "pkg-dir": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz",
+ "integrity": "sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s=",
+ "dev": true,
+ "requires": {
+ "find-up": "^2.1.0"
+ },
+ "dependencies": {
+ "p-limit": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz",
+ "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==",
+ "requires": {
+ "p-try": "^1.0.0"
+ }
+ }
+ }
+ },
+ "pkg-up": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/pkg-up/-/pkg-up-2.0.0.tgz",
+ "integrity": "sha1-yBmscoBZpGHKscOImivjxJoATX8=",
+ "dev": true,
+ "requires": {
+ "find-up": "^2.1.0"
+ },
+ "dependencies": {
+ "p-limit": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz",
+ "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==",
+ "requires": {
+ "p-try": "^1.0.0"
+ }
+ }
+ }
+ },
+ "pprof": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/pprof/-/pprof-3.0.0.tgz",
+ "integrity": "sha512-uPWbAhoH/zvq1kM3/Fd/wshb4D7sLlGap8t6uCTER4aZRWqqyPYgXzpjWbT0Unn5U25pEy2VREUu27nQ9o9VPA==",
+ "requires": {
+ "bindings": "^1.2.1",
+ "delay": "^4.0.1",
+ "findit2": "^2.2.3",
+ "nan": "^2.14.0",
+ "node-pre-gyp": "^0.16.0",
+ "p-limit": "^3.0.0",
+ "pify": "^5.0.0",
+ "protobufjs": "~6.10.0",
+ "source-map": "^0.7.3",
+ "split": "^1.0.1"
+ },
+ "dependencies": {
+ "@types/node": {
+ "version": "13.13.48",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.48.tgz",
+ "integrity": "sha512-z8wvSsgWQzkr4sVuMEEOvwMdOQjiRY2Y/ZW4fDfjfe3+TfQrZqFKOthBgk2RnVEmtOKrkwdZ7uTvsxTBLjKGDQ=="
+ },
+ "delay": {
+ "version": "4.4.1",
+ "resolved": "https://registry.npmjs.org/delay/-/delay-4.4.1.tgz",
+ "integrity": "sha512-aL3AhqtfhOlT/3ai6sWXeqwnw63ATNpnUiN4HL7x9q+My5QtHlO3OIkasmug9LKzpheLdmUKGRKnYXYAS7FQkQ=="
+ },
+ "source-map": {
+ "version": "0.7.3",
+ "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz",
+ "integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ=="
+ }
+ }
+ },
+ "prelude-ls": {
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz",
+ "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==",
+ "dev": true
+ },
+ "prettier": {
+ "version": "2.3.2",
+ "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.3.2.tgz",
+ "integrity": "sha512-lnJzDfJ66zkMy58OL5/NY5zp70S7Nz6KqcKkXYzn2tMVrNxvbqaBpg7H3qHaLxCJ5lNMsGuM8+ohS7cZrthdLQ==",
+ "dev": true
+ },
+ "prettier-linter-helpers": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz",
+ "integrity": "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==",
+ "dev": true,
+ "requires": {
+ "fast-diff": "^1.1.2"
+ }
+ },
+ "pretty-ms": {
+ "version": "7.0.1",
+ "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-7.0.1.tgz",
+ "integrity": "sha512-973driJZvxiGOQ5ONsFhOF/DtzPMOMtgC11kCpUrPGMTgqp2q/1gwzCquocrN33is0VZ5GFHXZYMM9l6h67v2Q==",
+ "requires": {
+ "parse-ms": "^2.1.0"
+ }
+ },
+ "process-nextick-args": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz",
+ "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag=="
+ },
+ "progress": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz",
+ "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==",
+ "dev": true
+ },
+ "prom-client": {
+ "version": "11.5.3",
+ "resolved": "https://registry.npmjs.org/prom-client/-/prom-client-11.5.3.tgz",
+ "integrity": "sha512-iz22FmTbtkyL2vt0MdDFY+kWof+S9UB/NACxSn2aJcewtw+EERsen0urSkZ2WrHseNdydsvcxCTAnPcSMZZv4Q==",
+ "requires": {
+ "tdigest": "^0.1.1"
+ }
+ },
+ "protobufjs": {
+ "version": "6.10.2",
+ "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.2.tgz",
+ "integrity": "sha512-27yj+04uF6ya9l+qfpH187aqEzfCF4+Uit0I9ZBQVqK09hk/SQzKa2MUqUpXaVa7LOFRg1TSSr3lVxGOk6c0SQ==",
+ "requires": {
+ "@protobufjs/aspromise": "^1.1.2",
+ "@protobufjs/base64": "^1.1.2",
+ "@protobufjs/codegen": "^2.0.4",
+ "@protobufjs/eventemitter": "^1.1.0",
+ "@protobufjs/fetch": "^1.1.0",
+ "@protobufjs/float": "^1.0.2",
+ "@protobufjs/inquire": "^1.1.0",
+ "@protobufjs/path": "^1.1.2",
+ "@protobufjs/pool": "^1.1.0",
+ "@protobufjs/utf8": "^1.1.0",
+ "@types/long": "^4.0.1",
+ "@types/node": "^13.7.0",
+ "long": "^4.0.0"
+ },
+ "dependencies": {
+ "@types/node": {
+ "version": "13.13.48",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.48.tgz",
+ "integrity": "sha512-z8wvSsgWQzkr4sVuMEEOvwMdOQjiRY2Y/ZW4fDfjfe3+TfQrZqFKOthBgk2RnVEmtOKrkwdZ7uTvsxTBLjKGDQ=="
+ }
+ }
+ },
+ "proxy-addr": {
+ "version": "2.0.6",
+ "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.6.tgz",
+ "integrity": "sha512-dh/frvCBVmSsDYzw6n926jv974gddhkFPfiN8hPOi30Wax25QZyZEGveluCgliBnqmuM+UJmBErbAUFIoDbjOw==",
+ "requires": {
+ "forwarded": "~0.1.2",
+ "ipaddr.js": "1.9.1"
+ }
+ },
+ "psl": {
+ "version": "1.7.0",
+ "resolved": "https://registry.npmjs.org/psl/-/psl-1.7.0.tgz",
+ "integrity": "sha512-5NsSEDv8zY70ScRnOTn7bK7eanl2MvFrOrS/R6x+dBt5g1ghnj9Zv90kO8GwT8gxcu2ANyFprnFYB85IogIJOQ=="
+ },
+ "pump": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz",
+ "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==",
+ "requires": {
+ "end-of-stream": "^1.1.0",
+ "once": "^1.3.1"
+ }
+ },
+ "pumpify": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/pumpify/-/pumpify-2.0.1.tgz",
+ "integrity": "sha512-m7KOje7jZxrmutanlkS1daj1dS6z6BgslzOXmcSEpIlCxM3VJH7lG5QLeck/6hgF6F4crFf01UtQmNsJfweTAw==",
+ "requires": {
+ "duplexify": "^4.1.1",
+ "inherits": "^2.0.3",
+ "pump": "^3.0.0"
+ }
+ },
+ "punycode": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
+ "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A=="
+ },
+ "qs": {
+ "version": "6.7.0",
+ "resolved": "https://registry.npmjs.org/qs/-/qs-6.7.0.tgz",
+ "integrity": "sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ=="
+ },
+ "ramda": {
+ "version": "0.27.1",
+ "resolved": "https://registry.npmjs.org/ramda/-/ramda-0.27.1.tgz",
+ "integrity": "sha512-PgIdVpn5y5Yns8vqb8FzBUEYn98V3xcPgawAkkgj0YJ0qDsnHCiNmZYfOGMgOvoB0eWFLpYbhxUR3mxfDIMvpw==",
+ "dev": true
+ },
+ "randombytes": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz",
+ "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==",
+ "dev": true,
+ "requires": {
+ "safe-buffer": "^5.1.0"
+ }
+ },
+ "range-parser": {
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz",
+ "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg=="
+ },
+ "raven": {
+ "version": "2.6.4",
+ "resolved": "https://registry.npmjs.org/raven/-/raven-2.6.4.tgz",
+ "integrity": "sha512-6PQdfC4+DQSFncowthLf+B6Hr0JpPsFBgTVYTAOq7tCmx/kR4SXbeawtPch20+3QfUcQDoJBLjWW1ybvZ4kXTw==",
+ "requires": {
+ "cookie": "0.3.1",
+ "md5": "^2.2.1",
+ "stack-trace": "0.0.10",
+ "timed-out": "4.0.1",
+ "uuid": "3.3.2"
+ },
+ "dependencies": {
+ "cookie": {
+ "version": "0.3.1",
+ "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz",
+ "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s="
+ },
+ "stack-trace": {
+ "version": "0.0.10",
+ "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz",
+ "integrity": "sha1-VHxws0fo0ytOEI6hoqFZ5f3eGcA="
+ },
+ "uuid": {
+ "version": "3.3.2",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz",
+ "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA=="
+ }
+ }
+ },
+ "raw-body": {
+ "version": "2.4.0",
+ "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.4.0.tgz",
+ "integrity": "sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q==",
+ "requires": {
+ "bytes": "3.1.0",
+ "http-errors": "1.7.2",
+ "iconv-lite": "0.4.24",
+ "unpipe": "1.0.0"
+ }
+ },
+ "rc": {
+ "version": "1.2.8",
+ "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz",
+ "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==",
+ "requires": {
+ "deep-extend": "^0.6.0",
+ "ini": "~1.3.0",
+ "minimist": "^1.2.0",
+ "strip-json-comments": "~2.0.1"
+ },
+ "dependencies": {
+ "strip-json-comments": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz",
+ "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo="
+ }
+ }
+ },
+ "read-pkg": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz",
+ "integrity": "sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k=",
+ "dev": true,
+ "requires": {
+ "load-json-file": "^4.0.0",
+ "normalize-package-data": "^2.3.2",
+ "path-type": "^3.0.0"
+ }
+ },
+ "read-pkg-up": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-3.0.0.tgz",
+ "integrity": "sha1-PtSWaF26D4/hGNBpHcUfSh/5bwc=",
+ "dev": true,
+ "requires": {
+ "find-up": "^2.0.0",
+ "read-pkg": "^3.0.0"
+ },
+ "dependencies": {
+ "p-limit": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz",
+ "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==",
+ "requires": {
+ "p-try": "^1.0.0"
+ }
+ }
+ }
+ },
+ "readable-stream": {
+ "version": "3.6.0",
+ "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
+ "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==",
+ "requires": {
+ "inherits": "^2.0.3",
+ "string_decoder": "^1.1.1",
+ "util-deprecate": "^1.0.1"
+ }
+ },
+ "readdirp": {
+ "version": "3.5.0",
+ "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.5.0.tgz",
+ "integrity": "sha512-cMhu7c/8rdhkHXWsY+osBhfSy0JikwpHK/5+imo+LpeasTF8ouErHrlYkwT0++njiyuDvc7OFY5T3ukvZ8qmFQ==",
+ "dev": true,
+ "requires": {
+ "picomatch": "^2.2.1"
+ }
+ },
+ "redis-commands": {
+ "version": "1.7.0",
+ "resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.7.0.tgz",
+ "integrity": "sha512-nJWqw3bTFy21hX/CPKHth6sfhZbdiHP6bTawSgQBlKOVRG7EZkfHbbHwQJnrE4vsQf0CMNE+3gJ4Fmm16vdVlQ=="
+ },
+ "redis-errors": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz",
+ "integrity": "sha1-62LSrbFeTq9GEMBK/hUpOEJQq60="
+ },
+ "redis-parser": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-3.0.0.tgz",
+ "integrity": "sha1-tm2CjNyv5rS4pCin3vTGvKwxyLQ=",
+ "requires": {
+ "redis-errors": "^1.0.0"
+ }
+ },
+ "regexpp": {
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz",
+ "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==",
+ "dev": true
+ },
+ "request": {
+ "version": "2.88.2",
+ "resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz",
+ "integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==",
+ "requires": {
+ "aws-sign2": "~0.7.0",
+ "aws4": "^1.8.0",
+ "caseless": "~0.12.0",
+ "combined-stream": "~1.0.6",
+ "extend": "~3.0.2",
+ "forever-agent": "~0.6.1",
+ "form-data": "~2.3.2",
+ "har-validator": "~5.1.3",
+ "http-signature": "~1.2.0",
+ "is-typedarray": "~1.0.0",
+ "isstream": "~0.1.2",
+ "json-stringify-safe": "~5.0.1",
+ "mime-types": "~2.1.19",
+ "oauth-sign": "~0.9.0",
+ "performance-now": "^2.1.0",
+ "qs": "~6.5.2",
+ "safe-buffer": "^5.1.2",
+ "tough-cookie": "~2.5.0",
+ "tunnel-agent": "^0.6.0",
+ "uuid": "^3.3.2"
+ },
+ "dependencies": {
+ "qs": {
+ "version": "6.5.2",
+ "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz",
+ "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA=="
+ }
+ }
+ },
+ "requestretry": {
+ "version": "4.1.2",
+ "resolved": "https://registry.npmjs.org/requestretry/-/requestretry-4.1.2.tgz",
+ "integrity": "sha512-N1WAp+8eOy8NfsVBChcSxNCKvPY1azOpliQ4Sby4WDe0HFEhdKywlNZeROMBQ+BI3Jpc0eNOT1KVFGREawtahA==",
+ "requires": {
+ "extend": "^3.0.2",
+ "lodash": "^4.17.15",
+ "when": "^3.7.7"
+ }
+ },
+ "require-directory": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
+ "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=",
+ "dev": true
+ },
+ "require-from-string": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz",
+ "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==",
+ "dev": true
+ },
+ "require-in-the-middle": {
+ "version": "5.1.0",
+ "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-5.1.0.tgz",
+ "integrity": "sha512-M2rLKVupQfJ5lf9OvqFGIT+9iVLnTmjgbOmpil12hiSQNn5zJTKGPoIisETNjfK+09vP3rpm1zJajmErpr2sEQ==",
+ "requires": {
+ "debug": "^4.1.1",
+ "module-details-from-path": "^1.0.3",
+ "resolve": "^1.12.0"
+ },
+ "dependencies": {
+ "debug": {
+ "version": "4.3.1",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz",
+ "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==",
+ "requires": {
+ "ms": "2.1.2"
+ }
+ },
+ "ms": {
+ "version": "2.1.2",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
+ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
+ }
+ }
+ },
+ "require-like": {
+ "version": "0.1.2",
+ "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz",
+ "integrity": "sha1-rW8wwTvs15cBDEaK+ndcDAprR/o=",
+ "dev": true
+ },
+ "resolve": {
+ "version": "1.15.1",
+ "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.15.1.tgz",
+ "integrity": "sha512-84oo6ZTtoTUpjgNEr5SJyzQhzL72gaRodsSfyxC/AXRvwu0Yse9H8eF9IpGo7b8YetZhlI6v7ZQ6bKBFV/6S7w==",
+ "requires": {
+ "path-parse": "^1.0.6"
+ }
+ },
+ "resolve-from": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz",
+ "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==",
+ "dev": true
+ },
+ "retry-request": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-4.1.1.tgz",
+ "integrity": "sha512-BINDzVtLI2BDukjWmjAIRZ0oglnCAkpP2vQjM3jdLhmT62h0xnQgciPwBRDAvHqpkPT2Wo1XuUyLyn6nbGrZQQ==",
+ "requires": {
+ "debug": "^4.1.1",
+ "through2": "^3.0.1"
+ },
+ "dependencies": {
+ "debug": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz",
+ "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==",
+ "requires": {
+ "ms": "^2.1.1"
+ }
+ },
+ "ms": {
+ "version": "2.1.2",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
+ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
+ }
+ }
+ },
+ "rimraf": {
+ "version": "2.4.5",
+ "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz",
+ "integrity": "sha1-7nEM5dk6j9uFb7Xqj/Di11k0sto=",
+ "optional": true,
+ "requires": {
+ "glob": "^6.0.1"
+ }
+ },
+ "safe-buffer": {
+ "version": "5.1.2",
+ "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
+ "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="
+ },
+ "safe-json-stringify": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.2.0.tgz",
+ "integrity": "sha512-gH8eh2nZudPQO6TytOvbxnuhYBOvDBBLW52tz5q6X58lJcd/tkmqFR+5Z9adS8aJtURSXWThWy/xJtJwixErvg==",
+ "optional": true
+ },
+ "safer-buffer": {
+ "version": "2.1.2",
+ "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
+ "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
+ },
+ "sandboxed-module": {
+ "version": "2.0.4",
+ "resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-2.0.4.tgz",
+ "integrity": "sha512-AwEPOdO8mg/wJjr876yCHP2DHqVN0MaggEXhp6IIf3bcI5cYoQl9QrrCHSrvToHjvdEiS5x4TVZRgjD2bEmNTA==",
+ "dev": true,
+ "requires": {
+ "require-like": "0.1.2",
+ "stack-trace": "0.0.9"
+ }
+ },
+ "saslprep": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/saslprep/-/saslprep-1.0.3.tgz",
+ "integrity": "sha512-/MY/PEMbk2SuY5sScONwhUDsV2p77Znkb/q3nSVstq/yQzYJOH/Azh29p9oJLsl3LnQwSvZDKagDGBsBwSooag==",
+ "optional": true,
+ "requires": {
+ "sparse-bitfield": "^3.0.3"
+ }
+ },
+ "sax": {
+ "version": "1.2.4",
+ "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz",
+ "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw=="
+ },
+ "semver": {
+ "version": "6.3.0",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
+ "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw=="
+ },
+ "send": {
+ "version": "0.17.1",
+ "resolved": "https://registry.npmjs.org/send/-/send-0.17.1.tgz",
+ "integrity": "sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg==",
+ "requires": {
+ "debug": "2.6.9",
+ "depd": "~1.1.2",
+ "destroy": "~1.0.4",
+ "encodeurl": "~1.0.2",
+ "escape-html": "~1.0.3",
+ "etag": "~1.8.1",
+ "fresh": "0.5.2",
+ "http-errors": "~1.7.2",
+ "mime": "1.6.0",
+ "ms": "2.1.1",
+ "on-finished": "~2.3.0",
+ "range-parser": "~1.2.1",
+ "statuses": "~1.5.0"
+ },
+ "dependencies": {
+ "ms": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz",
+ "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg=="
+ }
+ }
+ },
+ "serialize-javascript": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-5.0.1.tgz",
+ "integrity": "sha512-SaaNal9imEO737H2c05Og0/8LUXG7EnsZyMa8MzkmuHoELfT6txuj0cMqRj6zfPKnmQ1yasR4PCJc8x+M4JSPA==",
+ "dev": true,
+ "requires": {
+ "randombytes": "^2.1.0"
+ }
+ },
+ "serve-static": {
+ "version": "1.14.1",
+ "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.14.1.tgz",
+ "integrity": "sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg==",
+ "requires": {
+ "encodeurl": "~1.0.2",
+ "escape-html": "~1.0.3",
+ "parseurl": "~1.3.3",
+ "send": "0.17.1"
+ }
+ },
+ "set-blocking": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz",
+ "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc="
+ },
+ "setprototypeof": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.1.tgz",
+ "integrity": "sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw=="
+ },
+ "shebang-command": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
+ "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
+ "dev": true,
+ "requires": {
+ "shebang-regex": "^3.0.0"
+ }
+ },
+ "shebang-regex": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz",
+ "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
+ "dev": true
+ },
+ "shimmer": {
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.1.tgz",
+ "integrity": "sha1-YQhZ994ye1h+/r9QH7QxF/mv8zc="
+ },
+ "signal-exit": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz",
+ "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0="
+ },
+ "sinon": {
+ "version": "9.0.2",
+ "resolved": "https://registry.npmjs.org/sinon/-/sinon-9.0.2.tgz",
+ "integrity": "sha512-0uF8Q/QHkizNUmbK3LRFqx5cpTttEVXudywY9Uwzy8bTfZUhljZ7ARzSxnRHWYWtVTeh4Cw+tTb3iU21FQVO9A==",
+ "dev": true,
+ "requires": {
+ "@sinonjs/commons": "^1.7.2",
+ "@sinonjs/fake-timers": "^6.0.1",
+ "@sinonjs/formatio": "^5.0.1",
+ "@sinonjs/samsam": "^5.0.3",
+ "diff": "^4.0.2",
+ "nise": "^4.0.1",
+ "supports-color": "^7.1.0"
+ },
+ "dependencies": {
+ "diff": {
+ "version": "4.0.2",
+ "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
+ "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==",
+ "dev": true
+ },
+ "has-flag": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
+ "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
+ "dev": true
+ },
+ "supports-color": {
+ "version": "7.1.0",
+ "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz",
+ "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==",
+ "dev": true,
+ "requires": {
+ "has-flag": "^4.0.0"
+ }
+ }
+ }
+ },
+ "slice-ansi": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz",
+ "integrity": "sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==",
+ "dev": true,
+ "requires": {
+ "ansi-styles": "^4.0.0",
+ "astral-regex": "^2.0.0"
+ },
+ "dependencies": {
+ "ansi-styles": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
+ "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
+ "dev": true
+ }
+ }
+ },
+ "snakecase-keys": {
+ "version": "3.2.1",
+ "resolved": "https://registry.npmjs.org/snakecase-keys/-/snakecase-keys-3.2.1.tgz",
+ "integrity": "sha512-CjU5pyRfwOtaOITYv5C8DzpZ8XA/ieRsDpr93HI2r6e3YInC6moZpSQbmUtg8cTk58tq2x3jcG2gv+p1IZGmMA==",
+ "requires": {
+ "map-obj": "^4.1.0",
+ "to-snake-case": "^1.0.0"
+ }
+ },
+ "source-map": {
+ "version": "0.6.1",
+ "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
+ "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="
+ },
+ "source-map-support": {
+ "version": "0.5.19",
+ "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.19.tgz",
+ "integrity": "sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw==",
+ "requires": {
+ "buffer-from": "^1.0.0",
+ "source-map": "^0.6.0"
+ }
+ },
+ "sparse-bitfield": {
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/sparse-bitfield/-/sparse-bitfield-3.0.3.tgz",
+ "integrity": "sha1-/0rm5oZWBWuks+eSqzM004JzyhE=",
+ "optional": true,
+ "requires": {
+ "memory-pager": "^1.0.2"
+ }
+ },
+ "spdx-correct": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.1.tgz",
+ "integrity": "sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==",
+ "dev": true,
+ "requires": {
+ "spdx-expression-parse": "^3.0.0",
+ "spdx-license-ids": "^3.0.0"
+ }
+ },
+ "spdx-exceptions": {
+ "version": "2.3.0",
+ "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz",
+ "integrity": "sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==",
+ "dev": true
+ },
+ "spdx-expression-parse": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz",
+ "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==",
+ "dev": true,
+ "requires": {
+ "spdx-exceptions": "^2.1.0",
+ "spdx-license-ids": "^3.0.0"
+ }
+ },
+ "spdx-license-ids": {
+ "version": "3.0.9",
+ "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.9.tgz",
+ "integrity": "sha512-Ki212dKK4ogX+xDo4CtOZBVIwhsKBEfsEEcwmJfLQzirgc2jIWdzg40Unxz/HzEUqM1WFzVlQSMF9kZZ2HboLQ==",
+ "dev": true
+ },
+ "split": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz",
+ "integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==",
+ "requires": {
+ "through": "2"
+ }
+ },
+ "sprintf-js": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
+ "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=",
+ "dev": true
+ },
+ "sshpk": {
+ "version": "1.16.1",
+ "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz",
+ "integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==",
+ "requires": {
+ "asn1": "~0.2.3",
+ "assert-plus": "^1.0.0",
+ "bcrypt-pbkdf": "^1.0.0",
+ "dashdash": "^1.12.0",
+ "ecc-jsbn": "~0.1.1",
+ "getpass": "^0.1.1",
+ "jsbn": "~0.1.0",
+ "safer-buffer": "^2.0.2",
+ "tweetnacl": "~0.14.0"
+ }
+ },
+ "stack-trace": {
+ "version": "0.0.9",
+ "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz",
+ "integrity": "sha1-qPbq7KkGdMMz58Q5U/J1tFFRBpU=",
+ "dev": true
+ },
+ "standard-as-callback": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.1.0.tgz",
+ "integrity": "sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A=="
+ },
+ "statuses": {
+ "version": "1.5.0",
+ "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz",
+ "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow="
+ },
+ "stream-events": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/stream-events/-/stream-events-1.0.5.tgz",
+ "integrity": "sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg==",
+ "requires": {
+ "stubs": "^3.0.0"
+ }
+ },
+ "stream-shift": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.1.tgz",
+ "integrity": "sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ=="
+ },
+ "string-width": {
+ "version": "4.2.2",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz",
+ "integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==",
+ "dev": true,
+ "requires": {
+ "emoji-regex": "^8.0.0",
+ "strip-ansi": "^6.0.0"
+ }
+ },
+ "string.prototype.trimend": {
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.4.tgz",
+ "integrity": "sha512-y9xCjw1P23Awk8EvTpcyL2NIr1j7wJ39f+k6lvRnSMz+mz9CGz9NYPelDk42kOz6+ql8xjfK8oYzy3jAP5QU5A==",
+ "dev": true,
+ "requires": {
+ "call-bind": "^1.0.2",
+ "define-properties": "^1.1.3"
+ }
+ },
+ "string.prototype.trimstart": {
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.4.tgz",
+ "integrity": "sha512-jh6e984OBfvxS50tdY2nRZnoC5/mLFKOREQfw8t5yytkoUsJRNxvI/E39qu1sD0OtWI3OC0XgKSmcWwziwYuZw==",
+ "dev": true,
+ "requires": {
+ "call-bind": "^1.0.2",
+ "define-properties": "^1.1.3"
+ }
+ },
+ "string_decoder": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz",
+ "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==",
+ "requires": {
+ "safe-buffer": "~5.1.0"
+ }
+ },
+ "strip-ansi": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz",
+ "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==",
+ "dev": true,
+ "requires": {
+ "ansi-regex": "^5.0.0"
+ }
+ },
+ "strip-bom": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz",
+ "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=",
+ "dev": true
+ },
+ "strip-json-comments": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz",
+ "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==",
+ "dev": true
+ },
+ "stubs": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz",
+ "integrity": "sha1-6NK6H6nJBXAwPAMLaQD31fiavls="
+ },
+ "supports-color": {
+ "version": "5.4.0",
+ "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.4.0.tgz",
+ "integrity": "sha512-zjaXglF5nnWpsq470jSv6P9DwPvgLkuapYmfDm3JWOm0vkNTVF2tI4UrN2r6jH1qM/uc/WtxYY1hYoA2dOKj5w==",
+ "dev": true,
+ "requires": {
+ "has-flag": "^3.0.0"
+ }
+ },
+ "table": {
+ "version": "6.7.1",
+ "resolved": "https://registry.npmjs.org/table/-/table-6.7.1.tgz",
+ "integrity": "sha512-ZGum47Yi6KOOFDE8m223td53ath2enHcYLgOCjGr5ngu8bdIARQk6mN/wRMv4yMRcHnCSnHbCEha4sobQx5yWg==",
+ "dev": true,
+ "requires": {
+ "ajv": "^8.0.1",
+ "lodash.clonedeep": "^4.5.0",
+ "lodash.truncate": "^4.4.2",
+ "slice-ansi": "^4.0.0",
+ "string-width": "^4.2.0",
+ "strip-ansi": "^6.0.0"
+ },
+ "dependencies": {
+ "ajv": {
+ "version": "8.6.1",
+ "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.6.1.tgz",
+ "integrity": "sha512-42VLtQUOLefAvKFAQIxIZDaThq6om/PrfP0CYk3/vn+y4BMNkKnbli8ON2QCiHov4KkzOSJ/xSoBJdayiiYvVQ==",
+ "dev": true,
+ "requires": {
+ "fast-deep-equal": "^3.1.1",
+ "require-from-string": "^2.0.2",
+ "uri-js": "^4.2.2"
+ }
+ }
+ }
+ },
+ "tar": {
+ "version": "4.4.13",
+ "resolved": "https://registry.npmjs.org/tar/-/tar-4.4.13.tgz",
+ "integrity": "sha512-w2VwSrBoHa5BsSyH+KxEqeQBAllHhccyMFVHtGtdMpF4W7IRWfZjFiQceJPChOeTsSDVUpER2T8FA93pr0L+QA==",
+ "requires": {
+ "chownr": "^1.1.1",
+ "fs-minipass": "^1.2.5",
+ "minipass": "^2.8.6",
+ "minizlib": "^1.2.1",
+ "mkdirp": "^0.5.0",
+ "safe-buffer": "^5.1.2",
+ "yallist": "^3.0.3"
+ }
+ },
+ "tdigest": {
+ "version": "0.1.1",
+ "resolved": "https://registry.npmjs.org/tdigest/-/tdigest-0.1.1.tgz",
+ "integrity": "sha1-Ljyyw56kSeVdHmzZEReszKRYgCE=",
+ "requires": {
+ "bintrees": "1.0.1"
+ }
+ },
+ "teeny-request": {
+ "version": "7.0.1",
+ "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.1.tgz",
+ "integrity": "sha512-sasJmQ37klOlplL4Ia/786M5YlOcoLGQyq2TE4WHSRupbAuDaQW0PfVxV4MtdBtRJ4ngzS+1qim8zP6Zp35qCw==",
+ "requires": {
+ "http-proxy-agent": "^4.0.0",
+ "https-proxy-agent": "^5.0.0",
+ "node-fetch": "^2.6.1",
+ "stream-events": "^1.0.5",
+ "uuid": "^8.0.0"
+ },
+ "dependencies": {
+ "uuid": {
+ "version": "8.3.2",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
+ "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg=="
+ }
+ }
+ },
+ "text-table": {
+ "version": "0.2.0",
+ "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz",
+ "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=",
+ "dev": true
+ },
+ "through": {
+ "version": "2.3.8",
+ "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz",
+ "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU="
+ },
+ "through2": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/through2/-/through2-3.0.1.tgz",
+ "integrity": "sha512-M96dvTalPT3YbYLaKaCuwu+j06D/8Jfib0o/PxbVt6Amhv3dUAtW6rTV1jPgJSBG83I/e04Y6xkVdVhSRhi0ww==",
+ "requires": {
+ "readable-stream": "2 || 3"
+ },
+ "dependencies": {
+ "safe-buffer": {
+ "version": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.0.tgz",
+ "integrity": "sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg=="
+ }
+ }
+ },
+ "timed-out": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/timed-out/-/timed-out-4.0.1.tgz",
+ "integrity": "sha1-8y6srFoXW+ol1/q1Zas+2HQe9W8="
+ },
+ "timekeeper": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/timekeeper/-/timekeeper-2.2.0.tgz",
+ "integrity": "sha512-W3AmPTJWZkRwu+iSNxPIsLZ2ByADsOLbbLxe46UJyWj3mlYLlwucKiq+/dPm0l9wTzqoF3/2PH0AGFCebjq23A==",
+ "dev": true
+ },
+ "to-no-case": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/to-no-case/-/to-no-case-1.0.2.tgz",
+ "integrity": "sha1-xyKQcWTvaxeBMsjmmTAhLRtKoWo="
+ },
+ "to-regex-range": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
+ "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
+ "dev": true,
+ "requires": {
+ "is-number": "^7.0.0"
+ }
+ },
+ "to-snake-case": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/to-snake-case/-/to-snake-case-1.0.0.tgz",
+ "integrity": "sha1-znRpE4l5RgGah+Yu366upMYIq4w=",
+ "requires": {
+ "to-space-case": "^1.0.0"
+ }
+ },
+ "to-space-case": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/to-space-case/-/to-space-case-1.0.0.tgz",
+ "integrity": "sha1-sFLar7Gysp3HcM6gFj5ewOvJ/Bc=",
+ "requires": {
+ "to-no-case": "^1.0.0"
+ }
+ },
+ "toidentifier": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz",
+ "integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw=="
+ },
+ "tough-cookie": {
+ "version": "2.5.0",
+ "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz",
+ "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==",
+ "requires": {
+ "psl": "^1.1.28",
+ "punycode": "^2.1.1"
+ }
+ },
+ "tsconfig-paths": {
+ "version": "3.10.1",
+ "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.10.1.tgz",
+ "integrity": "sha512-rETidPDgCpltxF7MjBZlAFPUHv5aHH2MymyPvh+vEyWAED4Eb/WeMbsnD/JDr4OKPOA1TssDHgIcpTN5Kh0p6Q==",
+ "dev": true,
+ "requires": {
+ "json5": "^2.2.0",
+ "minimist": "^1.2.0",
+ "strip-bom": "^3.0.0"
+ }
+ },
+ "tunnel-agent": {
+ "version": "0.6.0",
+ "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz",
+ "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==",
+ "requires": {
+ "safe-buffer": "^5.0.1"
+ }
+ },
+ "tweetnacl": {
+ "version": "0.14.5",
+ "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz",
+ "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q="
+ },
+ "type-check": {
+ "version": "0.4.0",
+ "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz",
+ "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==",
+ "dev": true,
+ "requires": {
+ "prelude-ls": "^1.2.1"
+ }
+ },
+ "type-detect": {
+ "version": "4.0.8",
+ "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz",
+ "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==",
+ "dev": true
+ },
+ "type-fest": {
+ "version": "0.20.2",
+ "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz",
+ "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==",
+ "dev": true
+ },
+ "type-is": {
+ "version": "1.6.18",
+ "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz",
+ "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==",
+ "requires": {
+ "media-typer": "0.3.0",
+ "mime-types": "~2.1.24"
+ }
+ },
+ "unbox-primitive": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.1.tgz",
+ "integrity": "sha512-tZU/3NqK3dA5gpE1KtyiJUrEB0lxnGkMFHptJ7q6ewdZ8s12QrODwNbhIJStmJkd1QDXa1NRA8aF2A1zk/Ypyw==",
+ "dev": true,
+ "requires": {
+ "function-bind": "^1.1.1",
+ "has-bigints": "^1.0.1",
+ "has-symbols": "^1.0.2",
+ "which-boxed-primitive": "^1.0.2"
+ },
+ "dependencies": {
+ "has-symbols": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz",
+ "integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==",
+ "dev": true
+ }
+ }
+ },
+ "underscore": {
+ "version": "1.6.0",
+ "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz",
+ "integrity": "sha1-izixDKze9jM3uLJOT/htRa6lKag="
+ },
+ "unpipe": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz",
+ "integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw="
+ },
+ "uri-js": {
+ "version": "4.2.2",
+ "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz",
+ "integrity": "sha1-lMVA4f93KVbiKZUHwBCupsiDjrA=",
+ "requires": {
+ "punycode": "^2.1.0"
+ }
+ },
+ "util-deprecate": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
+ "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8="
+ },
+ "utils-merge": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz",
+ "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM="
+ },
+ "uuid": {
+ "version": "3.4.0",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz",
+ "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A=="
+ },
+ "v8-compile-cache": {
+ "version": "2.3.0",
+ "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz",
+ "integrity": "sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==",
+ "dev": true
+ },
+ "validate-npm-package-license": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz",
+ "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==",
+ "dev": true,
+ "requires": {
+ "spdx-correct": "^3.0.0",
+ "spdx-expression-parse": "^3.0.0"
+ }
+ },
+ "vary": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz",
+ "integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw="
+ },
+ "verror": {
+ "version": "1.10.0",
+ "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz",
+ "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=",
+ "requires": {
+ "assert-plus": "^1.0.0",
+ "core-util-is": "1.0.2",
+ "extsprintf": "^1.2.0"
+ }
+ },
+ "when": {
+ "version": "3.7.8",
+ "resolved": "https://registry.npmjs.org/when/-/when-3.7.8.tgz",
+ "integrity": "sha1-xxMLan6gRpPoQs3J56Hyqjmjn4I="
+ },
+ "which": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
+ "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
+ "dev": true,
+ "requires": {
+ "isexe": "^2.0.0"
+ }
+ },
+ "which-boxed-primitive": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz",
+ "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==",
+ "dev": true,
+ "requires": {
+ "is-bigint": "^1.0.1",
+ "is-boolean-object": "^1.1.0",
+ "is-number-object": "^1.0.4",
+ "is-string": "^1.0.5",
+ "is-symbol": "^1.0.3"
+ }
+ },
+ "wide-align": {
+ "version": "1.1.3",
+ "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz",
+ "integrity": "sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==",
+ "requires": {
+ "string-width": "^1.0.2 || 2"
+ },
+ "dependencies": {
+ "ansi-regex": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz",
+ "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg="
+ },
+ "string-width": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz",
+ "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==",
+ "requires": {
+ "is-fullwidth-code-point": "^2.0.0",
+ "strip-ansi": "^4.0.0"
+ }
+ },
+ "strip-ansi": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz",
+ "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=",
+ "requires": {
+ "ansi-regex": "^3.0.0"
+ }
+ }
+ }
+ },
+ "word-wrap": {
+ "version": "1.2.3",
+ "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz",
+ "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==",
+ "dev": true
+ },
+ "workerpool": {
+ "version": "6.1.0",
+ "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.1.0.tgz",
+ "integrity": "sha512-toV7q9rWNYha963Pl/qyeZ6wG+3nnsyvolaNUS8+R5Wtw6qJPTxIlOP1ZSvcGhEJw+l3HMMmtiNo9Gl61G4GVg==",
+ "dev": true
+ },
+ "wrap-ansi": {
+ "version": "7.0.0",
+ "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
+ "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
+ "dev": true,
+ "requires": {
+ "string-width": "^4.1.0",
+ "strip-ansi": "^6.0.0"
+ }
+ },
+ "wrappy": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
+ "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8="
+ },
+ "y18n": {
+ "version": "5.0.6",
+ "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.6.tgz",
+ "integrity": "sha512-PlVX4Y0lDTN6E2V4ES2tEdyvXkeKzxa8c/vo0pxPr/TqbztddTP0yn7zZylIyiAuxerqj0Q5GhpJ1YJCP8LaZQ==",
+ "dev": true
+ },
+ "yallist": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz",
+ "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g=="
+ },
+ "yargs": {
+ "version": "16.2.0",
+ "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz",
+ "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==",
+ "dev": true,
+ "requires": {
+ "cliui": "^7.0.2",
+ "escalade": "^3.1.1",
+ "get-caller-file": "^2.0.5",
+ "require-directory": "^2.1.1",
+ "string-width": "^4.2.0",
+ "y18n": "^5.0.5",
+ "yargs-parser": "^20.2.2"
+ }
+ },
+ "yargs-parser": {
+ "version": "20.2.4",
+ "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz",
+ "integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==",
+ "dev": true
+ },
+ "yargs-unparser": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz",
+ "integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==",
+ "dev": true,
+ "requires": {
+ "camelcase": "^6.0.0",
+ "decamelize": "^4.0.0",
+ "flat": "^5.0.2",
+ "is-plain-obj": "^2.1.0"
+ }
+ },
+ "yn": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz",
+ "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q=="
+ },
+ "yocto-queue": {
+ "version": "0.1.0",
+ "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
+ "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q=="
+ }
+ }
+}
diff --git a/services/document-updater/package.json b/services/document-updater/package.json
new file mode 100644
index 0000000000..c46eb73c60
--- /dev/null
+++ b/services/document-updater/package.json
@@ -0,0 +1,57 @@
+{
+ "name": "document-updater-sharelatex",
+ "version": "0.1.4",
+ "description": "An API for applying incoming updates to documents in real-time",
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/sharelatex/document-updater-sharelatex.git"
+ },
+ "scripts": {
+ "start": "node $NODE_APP_OPTIONS app.js",
+ "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js",
+ "test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP",
+ "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js",
+ "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP",
+ "nodemon": "nodemon --config nodemon.json",
+ "lint": "eslint --max-warnings 0 --format unix .",
+ "format": "prettier --list-different $PWD/'**/*.js'",
+ "format:fix": "prettier --write $PWD/'**/*.js'",
+ "lint:fix": "eslint --fix ."
+ },
+ "dependencies": {
+ "@overleaf/metrics": "^3.5.1",
+ "@overleaf/o-error": "^3.3.1",
+ "@overleaf/redis-wrapper": "^2.0.1",
+ "@overleaf/settings": "^2.1.1",
+ "async": "^2.5.0",
+ "body-parser": "^1.19.0",
+ "bunyan": "^1.8.15",
+ "diff-match-patch": "https://github.com/overleaf/diff-match-patch/archive/89805f9c671a77a263fc53461acd62aa7498f688.tar.gz",
+ "express": "4.17.1",
+ "lodash": "^4.17.21",
+ "logger-sharelatex": "^2.2.0",
+ "mongodb": "^3.6.6",
+ "request": "^2.88.2",
+ "requestretry": "^4.1.2"
+ },
+ "devDependencies": {
+ "chai": "^4.2.0",
+ "chai-as-promised": "^7.1.1",
+ "cluster-key-slot": "^1.0.5",
+ "eslint": "^7.21.0",
+ "eslint-config-prettier": "^8.1.0",
+ "eslint-config-standard": "^16.0.2",
+ "eslint-plugin-chai-expect": "^2.2.0",
+ "eslint-plugin-chai-friendly": "^0.6.0",
+ "eslint-plugin-import": "^2.22.1",
+ "eslint-plugin-mocha": "^8.0.0",
+ "eslint-plugin-node": "^11.1.0",
+ "eslint-plugin-prettier": "^3.1.2",
+ "eslint-plugin-promise": "^4.2.1",
+ "mocha": "^8.3.2",
+ "prettier": "^2.2.1",
+ "sandboxed-module": "^2.0.4",
+ "sinon": "^9.0.2",
+ "timekeeper": "^2.0.0"
+ }
+}
diff --git a/services/document-updater/redis_cluster/7000/redis.conf b/services/document-updater/redis_cluster/7000/redis.conf
new file mode 100644
index 0000000000..10dc9cb85c
--- /dev/null
+++ b/services/document-updater/redis_cluster/7000/redis.conf
@@ -0,0 +1,5 @@
+port 7000
+cluster-enabled yes
+cluster-config-file nodes.conf
+cluster-node-timeout 5000
+appendonly yes
\ No newline at end of file
diff --git a/services/document-updater/redis_cluster/7001/redis.conf b/services/document-updater/redis_cluster/7001/redis.conf
new file mode 100644
index 0000000000..10df3f5a5c
--- /dev/null
+++ b/services/document-updater/redis_cluster/7001/redis.conf
@@ -0,0 +1,5 @@
+port 7001
+cluster-enabled yes
+cluster-config-file nodes.conf
+cluster-node-timeout 5000
+appendonly yes
\ No newline at end of file
diff --git a/services/document-updater/redis_cluster/7002/redis.conf b/services/document-updater/redis_cluster/7002/redis.conf
new file mode 100644
index 0000000000..d16f4e2a7f
--- /dev/null
+++ b/services/document-updater/redis_cluster/7002/redis.conf
@@ -0,0 +1,5 @@
+port 7002
+cluster-enabled yes
+cluster-config-file nodes.conf
+cluster-node-timeout 5000
+appendonly yes
\ No newline at end of file
diff --git a/services/document-updater/redis_cluster/7003/redis.conf b/services/document-updater/redis_cluster/7003/redis.conf
new file mode 100644
index 0000000000..f54103d83f
--- /dev/null
+++ b/services/document-updater/redis_cluster/7003/redis.conf
@@ -0,0 +1,5 @@
+port 7003
+cluster-enabled yes
+cluster-config-file nodes.conf
+cluster-node-timeout 5000
+appendonly yes
\ No newline at end of file
diff --git a/services/document-updater/redis_cluster/7004/redis.conf b/services/document-updater/redis_cluster/7004/redis.conf
new file mode 100644
index 0000000000..8b3af0834b
--- /dev/null
+++ b/services/document-updater/redis_cluster/7004/redis.conf
@@ -0,0 +1,5 @@
+port 7004
+cluster-enabled yes
+cluster-config-file nodes.conf
+cluster-node-timeout 5000
+appendonly yes
\ No newline at end of file
diff --git a/services/document-updater/redis_cluster/7005/redis.conf b/services/document-updater/redis_cluster/7005/redis.conf
new file mode 100644
index 0000000000..ce0b227510
--- /dev/null
+++ b/services/document-updater/redis_cluster/7005/redis.conf
@@ -0,0 +1,5 @@
+port 7005
+cluster-enabled yes
+cluster-config-file nodes.conf
+cluster-node-timeout 5000
+appendonly yes
\ No newline at end of file
diff --git a/services/document-updater/redis_cluster/redis-cluster.sh b/services/document-updater/redis_cluster/redis-cluster.sh
new file mode 100755
index 0000000000..e25359bcf7
--- /dev/null
+++ b/services/document-updater/redis_cluster/redis-cluster.sh
@@ -0,0 +1,23 @@
+#!/bin/sh
+
+(cd 7000 && redis-server redis.conf) &
+PID1="$!"
+
+(cd 7001 && redis-server redis.conf) &
+PID2="$!"
+
+(cd 7002 && redis-server redis.conf) &
+PID3="$!"
+
+(cd 7003 && redis-server redis.conf) &
+PID4="$!"
+
+(cd 7004 && redis-server redis.conf) &
+PID5="$!"
+
+(cd 7005 && redis-server redis.conf) &
+PID6="$!"
+
+trap "kill $PID1 $PID2 $PID3 $PID4 $PID5 $PID6" exit INT TERM
+
+wait
\ No newline at end of file
diff --git a/services/document-updater/redis_cluster/redis-trib.rb b/services/document-updater/redis_cluster/redis-trib.rb
new file mode 100755
index 0000000000..ccb0551586
--- /dev/null
+++ b/services/document-updater/redis_cluster/redis-trib.rb
@@ -0,0 +1,1696 @@
+#!/usr/bin/env ruby
+
+# TODO (temporary here, we'll move this into the Github issues once
+# redis-trib initial implementation is completed).
+#
+# - Make sure that if the rehashing fails in the middle redis-trib will try
+# to recover.
+# - When redis-trib performs a cluster check, if it detects a slot move in
+# progress it should prompt the user to continue the move from where it
+# stopped.
+# - Gracefully handle Ctrl+C in move_slot to prompt the user if really stop
+# while rehashing, and performing the best cleanup possible if the user
+# forces the quit.
+# - When doing "fix" set a global Fix to true, and prompt the user to
+# fix the problem if automatically fixable every time there is something
+# to fix. For instance:
+# 1) If there is a node that pretend to receive a slot, or to migrate a
+# slot, but has no entries in that slot, fix it.
+# 2) If there is a node having keys in slots that are not owned by it
+# fix this condition moving the entries in the same node.
+# 3) Perform more possibly slow tests about the state of the cluster.
+# 4) When aborted slot migration is detected, fix it.
+
+require 'rubygems'
+require 'redis'
+
+ClusterHashSlots = 16384
+MigrateDefaultTimeout = 60000
+MigrateDefaultPipeline = 10
+RebalanceDefaultThreshold = 2
+
+$verbose = false
+
+def xputs(s)
+ case s[0..2]
+ when ">>>"
+ color="29;1"
+ when "[ER"
+ color="31;1"
+ when "[WA"
+ color="31;1"
+ when "[OK"
+ color="32"
+ when "[FA","***"
+ color="33"
+ else
+ color=nil
+ end
+
+ color = nil if ENV['TERM'] != "xterm"
+ print "\033[#{color}m" if color
+ print s
+ print "\033[0m" if color
+ print "\n"
+end
+
+class ClusterNode
+ def initialize(addr)
+ s = addr.split("@")[0].split(":")
+ if s.length < 2
+ puts "Invalid IP or Port (given as #{addr}) - use IP:Port format"
+ exit 1
+ end
+ port = s.pop # removes port from split array
+ ip = s.join(":") # if s.length > 1 here, it's IPv6, so restore address
+ @r = nil
+ @info = {}
+ @info[:host] = ip
+ @info[:port] = port
+ @info[:slots] = {}
+ @info[:migrating] = {}
+ @info[:importing] = {}
+ @info[:replicate] = false
+ @dirty = false # True if we need to flush slots info into node.
+ @friends = []
+ end
+
+ def friends
+ @friends
+ end
+
+ def slots
+ @info[:slots]
+ end
+
+ def has_flag?(flag)
+ @info[:flags].index(flag)
+ end
+
+ def to_s
+ "#{@info[:host]}:#{@info[:port]}"
+ end
+
+ def connect(o={})
+ return if @r
+ print "Connecting to node #{self}: " if $verbose
+ STDOUT.flush
+ begin
+ @r = Redis.new(:host => @info[:host], :port => @info[:port], :timeout => 60)
+ @r.ping
+ rescue
+ xputs "[ERR] Sorry, can't connect to node #{self}"
+ exit 1 if o[:abort]
+ @r = nil
+ end
+ xputs "OK" if $verbose
+ end
+
+ def assert_cluster
+ info = @r.info
+ if !info["cluster_enabled"] || info["cluster_enabled"].to_i == 0
+ xputs "[ERR] Node #{self} is not configured as a cluster node."
+ exit 1
+ end
+ end
+
+ def assert_empty
+ if !(@r.cluster("info").split("\r\n").index("cluster_known_nodes:1")) ||
+ (@r.info['db0'])
+ xputs "[ERR] Node #{self} is not empty. Either the node already knows other nodes (check with CLUSTER NODES) or contains some key in database 0."
+ exit 1
+ end
+ end
+
+ def load_info(o={})
+ self.connect
+ nodes = @r.cluster("nodes").split("\n")
+ nodes.each{|n|
+ # name addr flags role ping_sent ping_recv link_status slots
+ split = n.split
+ name,addr,flags,master_id,ping_sent,ping_recv,config_epoch,link_status = split[0..6]
+ slots = split[8..-1]
+ info = {
+ :name => name,
+ :addr => addr,
+ :flags => flags.split(","),
+ :replicate => master_id,
+ :ping_sent => ping_sent.to_i,
+ :ping_recv => ping_recv.to_i,
+ :link_status => link_status
+ }
+ info[:replicate] = false if master_id == "-"
+
+ if info[:flags].index("myself")
+ @info = @info.merge(info)
+ @info[:slots] = {}
+ slots.each{|s|
+ if s[0..0] == '['
+ if s.index("->-") # Migrating
+ slot,dst = s[1..-1].split("->-")
+ @info[:migrating][slot.to_i] = dst
+ elsif s.index("-<-") # Importing
+ slot,src = s[1..-1].split("-<-")
+ @info[:importing][slot.to_i] = src
+ end
+ elsif s.index("-")
+ start,stop = s.split("-")
+ self.add_slots((start.to_i)..(stop.to_i))
+ else
+ self.add_slots((s.to_i)..(s.to_i))
+ end
+ } if slots
+ @dirty = false
+ @r.cluster("info").split("\n").each{|e|
+ k,v=e.split(":")
+ k = k.to_sym
+ v.chop!
+ if k != :cluster_state
+ @info[k] = v.to_i
+ else
+ @info[k] = v
+ end
+ }
+ elsif o[:getfriends]
+ @friends << info
+ end
+ }
+ end
+
+ def add_slots(slots)
+ slots.each{|s|
+ @info[:slots][s] = :new
+ }
+ @dirty = true
+ end
+
+ def set_as_replica(node_id)
+ @info[:replicate] = node_id
+ @dirty = true
+ end
+
+ def flush_node_config
+ return if !@dirty
+ if @info[:replicate]
+ begin
+ @r.cluster("replicate",@info[:replicate])
+ rescue
+ # If the cluster did not already joined it is possible that
+ # the slave does not know the master node yet. So on errors
+ # we return ASAP leaving the dirty flag set, to flush the
+ # config later.
+ return
+ end
+ else
+ new = []
+ @info[:slots].each{|s,val|
+ if val == :new
+ new << s
+ @info[:slots][s] = true
+ end
+ }
+ @r.cluster("addslots",*new)
+ end
+ @dirty = false
+ end
+
+ def info_string
+ # We want to display the hash slots assigned to this node
+ # as ranges, like in: "1-5,8-9,20-25,30"
+ #
+ # Note: this could be easily written without side effects,
+ # we use 'slots' just to split the computation into steps.
+
+ # First step: we want an increasing array of integers
+ # for instance: [1,2,3,4,5,8,9,20,21,22,23,24,25,30]
+ slots = @info[:slots].keys.sort
+
+ # As we want to aggregate adjacent slots we convert all the
+ # slot integers into ranges (with just one element)
+ # So we have something like [1..1,2..2, ... and so forth.
+ slots.map!{|x| x..x}
+
+ # Finally we group ranges with adjacent elements.
+ slots = slots.reduce([]) {|a,b|
+ if !a.empty? && b.first == (a[-1].last)+1
+ a[0..-2] + [(a[-1].first)..(b.last)]
+ else
+ a + [b]
+ end
+ }
+
+ # Now our task is easy, we just convert ranges with just one
+ # element into a number, and a real range into a start-end format.
+ # Finally we join the array using the comma as separator.
+ slots = slots.map{|x|
+ x.count == 1 ? x.first.to_s : "#{x.first}-#{x.last}"
+ }.join(",")
+
+ role = self.has_flag?("master") ? "M" : "S"
+
+ if self.info[:replicate] and @dirty
+ is = "S: #{self.info[:name]} #{self.to_s}"
+ else
+ is = "#{role}: #{self.info[:name]} #{self.to_s}\n"+
+ " slots:#{slots} (#{self.slots.length} slots) "+
+ "#{(self.info[:flags]-["myself"]).join(",")}"
+ end
+ if self.info[:replicate]
+ is += "\n replicates #{info[:replicate]}"
+ elsif self.has_flag?("master") && self.info[:replicas]
+ is += "\n #{info[:replicas].length} additional replica(s)"
+ end
+ is
+ end
+
+ # Return a single string representing nodes and associated slots.
+ # TODO: remove slaves from config when slaves will be handled
+ # by Redis Cluster.
+ def get_config_signature
+ config = []
+ @r.cluster("nodes").each_line{|l|
+ s = l.split
+ slots = s[8..-1].select {|x| x[0..0] != "["}
+ next if slots.length == 0
+ config << s[0]+":"+(slots.sort.join(","))
+ }
+ config.sort.join("|")
+ end
+
+ def info
+ @info
+ end
+
+ def is_dirty?
+ @dirty
+ end
+
+ def r
+ @r
+ end
+end
+
+class RedisTrib
+ def initialize
+ @nodes = []
+ @fix = false
+ @errors = []
+ @timeout = MigrateDefaultTimeout
+ end
+
+ def check_arity(req_args, num_args)
+ if ((req_args > 0 and num_args != req_args) ||
+ (req_args < 0 and num_args < req_args.abs))
+ xputs "[ERR] Wrong number of arguments for specified sub command"
+ exit 1
+ end
+ end
+
+ def add_node(node)
+ @nodes << node
+ end
+
+ def reset_nodes
+ @nodes = []
+ end
+
+ def cluster_error(msg)
+ @errors << msg
+ xputs msg
+ end
+
+ # Return the node with the specified ID or Nil.
+ def get_node_by_name(name)
+ @nodes.each{|n|
+ return n if n.info[:name] == name.downcase
+ }
+ return nil
+ end
+
+ # Like get_node_by_name but the specified name can be just the first
+ # part of the node ID as long as the prefix in unique across the
+ # cluster.
+ def get_node_by_abbreviated_name(name)
+ l = name.length
+ candidates = []
+ @nodes.each{|n|
+ if n.info[:name][0...l] == name.downcase
+ candidates << n
+ end
+ }
+ return nil if candidates.length != 1
+ candidates[0]
+ end
+
+ # This function returns the master that has the least number of replicas
+ # in the cluster. If there are multiple masters with the same smaller
+ # number of replicas, one at random is returned.
+ def get_master_with_least_replicas
+ masters = @nodes.select{|n| n.has_flag? "master"}
+ sorted = masters.sort{|a,b|
+ a.info[:replicas].length <=> b.info[:replicas].length
+ }
+ sorted[0]
+ end
+
+ def check_cluster(opt={})
+ xputs ">>> Performing Cluster Check (using node #{@nodes[0]})"
+ show_nodes if !opt[:quiet]
+ check_config_consistency
+ check_open_slots
+ check_slots_coverage
+ end
+
+ def show_cluster_info
+ masters = 0
+ keys = 0
+ @nodes.each{|n|
+ if n.has_flag?("master")
+ puts "#{n} (#{n.info[:name][0...8]}...) -> #{n.r.dbsize} keys | #{n.slots.length} slots | "+
+ "#{n.info[:replicas].length} slaves."
+ masters += 1
+ keys += n.r.dbsize
+ end
+ }
+ xputs "[OK] #{keys} keys in #{masters} masters."
+ keys_per_slot = sprintf("%.2f",keys/16384.0)
+ puts "#{keys_per_slot} keys per slot on average."
+ end
+
+ # Merge slots of every known node. If the resulting slots are equal
+ # to ClusterHashSlots, then all slots are served.
+ def covered_slots
+ slots = {}
+ @nodes.each{|n|
+ slots = slots.merge(n.slots)
+ }
+ slots
+ end
+
+ def check_slots_coverage
+ xputs ">>> Check slots coverage..."
+ slots = covered_slots
+ if slots.length == ClusterHashSlots
+ xputs "[OK] All #{ClusterHashSlots} slots covered."
+ else
+ cluster_error \
+ "[ERR] Not all #{ClusterHashSlots} slots are covered by nodes."
+ fix_slots_coverage if @fix
+ end
+ end
+
+ def check_open_slots
+ xputs ">>> Check for open slots..."
+ open_slots = []
+ @nodes.each{|n|
+ if n.info[:migrating].size > 0
+ cluster_error \
+ "[WARNING] Node #{n} has slots in migrating state (#{n.info[:migrating].keys.join(",")})."
+ open_slots += n.info[:migrating].keys
+ end
+ if n.info[:importing].size > 0
+ cluster_error \
+ "[WARNING] Node #{n} has slots in importing state (#{n.info[:importing].keys.join(",")})."
+ open_slots += n.info[:importing].keys
+ end
+ }
+ open_slots.uniq!
+ if open_slots.length > 0
+ xputs "[WARNING] The following slots are open: #{open_slots.join(",")}"
+ end
+ if @fix
+ open_slots.each{|slot| fix_open_slot slot}
+ end
+ end
+
+ def nodes_with_keys_in_slot(slot)
+ nodes = []
+ @nodes.each{|n|
+ next if n.has_flag?("slave")
+ nodes << n if n.r.cluster("getkeysinslot",slot,1).length > 0
+ }
+ nodes
+ end
+
+ def fix_slots_coverage
+ not_covered = (0...ClusterHashSlots).to_a - covered_slots.keys
+ xputs ">>> Fixing slots coverage..."
+ xputs "List of not covered slots: " + not_covered.join(",")
+
+ # For every slot, take action depending on the actual condition:
+ # 1) No node has keys for this slot.
+ # 2) A single node has keys for this slot.
+ # 3) Multiple nodes have keys for this slot.
+ slots = {}
+ not_covered.each{|slot|
+ nodes = nodes_with_keys_in_slot(slot)
+ slots[slot] = nodes
+ xputs "Slot #{slot} has keys in #{nodes.length} nodes: #{nodes.join(", ")}"
+ }
+
+ none = slots.select {|k,v| v.length == 0}
+ single = slots.select {|k,v| v.length == 1}
+ multi = slots.select {|k,v| v.length > 1}
+
+ # Handle case "1": keys in no node.
+ if none.length > 0
+ xputs "The folowing uncovered slots have no keys across the cluster:"
+ xputs none.keys.join(",")
+ yes_or_die "Fix these slots by covering with a random node?"
+ none.each{|slot,nodes|
+ node = @nodes.sample
+ xputs ">>> Covering slot #{slot} with #{node}"
+ node.r.cluster("addslots",slot)
+ }
+ end
+
+ # Handle case "2": keys only in one node.
+ if single.length > 0
+ xputs "The folowing uncovered slots have keys in just one node:"
+ puts single.keys.join(",")
+ yes_or_die "Fix these slots by covering with those nodes?"
+ single.each{|slot,nodes|
+ xputs ">>> Covering slot #{slot} with #{nodes[0]}"
+ nodes[0].r.cluster("addslots",slot)
+ }
+ end
+
+ # Handle case "3": keys in multiple nodes.
+ if multi.length > 0
+ xputs "The folowing uncovered slots have keys in multiple nodes:"
+ xputs multi.keys.join(",")
+ yes_or_die "Fix these slots by moving keys into a single node?"
+ multi.each{|slot,nodes|
+ target = get_node_with_most_keys_in_slot(nodes,slot)
+ xputs ">>> Covering slot #{slot} moving keys to #{target}"
+
+ target.r.cluster('addslots',slot)
+ target.r.cluster('setslot',slot,'stable')
+ nodes.each{|src|
+ next if src == target
+ # Set the source node in 'importing' state (even if we will
+ # actually migrate keys away) in order to avoid receiving
+ # redirections for MIGRATE.
+ src.r.cluster('setslot',slot,'importing',target.info[:name])
+ move_slot(src,target,slot,:dots=>true,:fix=>true,:cold=>true)
+ src.r.cluster('setslot',slot,'stable')
+ }
+ }
+ end
+ end
+
+ # Return the owner of the specified slot
+ def get_slot_owners(slot)
+ owners = []
+ @nodes.each{|n|
+ next if n.has_flag?("slave")
+ n.slots.each{|s,_|
+ owners << n if s == slot
+ }
+ }
+ owners
+ end
+
+ # Return the node, among 'nodes' with the greatest number of keys
+ # in the specified slot.
+ def get_node_with_most_keys_in_slot(nodes,slot)
+ best = nil
+ best_numkeys = 0
+ @nodes.each{|n|
+ next if n.has_flag?("slave")
+ numkeys = n.r.cluster("countkeysinslot",slot)
+ if numkeys > best_numkeys || best == nil
+ best = n
+ best_numkeys = numkeys
+ end
+ }
+ return best
+ end
+
+ # Slot 'slot' was found to be in importing or migrating state in one or
+ # more nodes. This function fixes this condition by migrating keys where
+ # it seems more sensible.
+ def fix_open_slot(slot)
+ puts ">>> Fixing open slot #{slot}"
+
+ # Try to obtain the current slot owner, according to the current
+ # nodes configuration.
+ owners = get_slot_owners(slot)
+ owner = owners[0] if owners.length == 1
+
+ migrating = []
+ importing = []
+ @nodes.each{|n|
+ next if n.has_flag? "slave"
+ if n.info[:migrating][slot]
+ migrating << n
+ elsif n.info[:importing][slot]
+ importing << n
+ elsif n.r.cluster("countkeysinslot",slot) > 0 && n != owner
+ xputs "*** Found keys about slot #{slot} in node #{n}!"
+ importing << n
+ end
+ }
+ puts "Set as migrating in: #{migrating.join(",")}"
+ puts "Set as importing in: #{importing.join(",")}"
+
+ # If there is no slot owner, set as owner the slot with the biggest
+ # number of keys, among the set of migrating / importing nodes.
+ if !owner
+ xputs ">>> Nobody claims ownership, selecting an owner..."
+ owner = get_node_with_most_keys_in_slot(@nodes,slot)
+
+ # If we still don't have an owner, we can't fix it.
+ if !owner
+ xputs "[ERR] Can't select a slot owner. Impossible to fix."
+ exit 1
+ end
+
+ # Use ADDSLOTS to assign the slot.
+ puts "*** Configuring #{owner} as the slot owner"
+ owner.r.cluster("setslot",slot,"stable")
+ owner.r.cluster("addslots",slot)
+ # Make sure this information will propagate. Not strictly needed
+ # since there is no past owner, so all the other nodes will accept
+ # whatever epoch this node will claim the slot with.
+ owner.r.cluster("bumpepoch")
+
+ # Remove the owner from the list of migrating/importing
+ # nodes.
+ migrating.delete(owner)
+ importing.delete(owner)
+ end
+
+ # If there are multiple owners of the slot, we need to fix it
+ # so that a single node is the owner and all the other nodes
+ # are in importing state. Later the fix can be handled by one
+ # of the base cases above.
+ #
+ # Note that this case also covers multiple nodes having the slot
+ # in migrating state, since migrating is a valid state only for
+ # slot owners.
+ if owners.length > 1
+ owner = get_node_with_most_keys_in_slot(owners,slot)
+ owners.each{|n|
+ next if n == owner
+ n.r.cluster('delslots',slot)
+ n.r.cluster('setslot',slot,'importing',owner.info[:name])
+ importing.delete(n) # Avoid duplciates
+ importing << n
+ }
+ owner.r.cluster('bumpepoch')
+ end
+
+ # Case 1: The slot is in migrating state in one slot, and in
+ # importing state in 1 slot. That's trivial to address.
+ if migrating.length == 1 && importing.length == 1
+ move_slot(migrating[0],importing[0],slot,:dots=>true,:fix=>true)
+ # Case 2: There are multiple nodes that claim the slot as importing,
+ # they probably got keys about the slot after a restart so opened
+ # the slot. In this case we just move all the keys to the owner
+ # according to the configuration.
+ elsif migrating.length == 0 && importing.length > 0
+ xputs ">>> Moving all the #{slot} slot keys to its owner #{owner}"
+ importing.each {|node|
+ next if node == owner
+ move_slot(node,owner,slot,:dots=>true,:fix=>true,:cold=>true)
+ xputs ">>> Setting #{slot} as STABLE in #{node}"
+ node.r.cluster("setslot",slot,"stable")
+ }
+ # Case 3: There are no slots claiming to be in importing state, but
+ # there is a migrating node that actually don't have any key. We
+ # can just close the slot, probably a reshard interrupted in the middle.
+ elsif importing.length == 0 && migrating.length == 1 &&
+ migrating[0].r.cluster("getkeysinslot",slot,10).length == 0
+ migrating[0].r.cluster("setslot",slot,"stable")
+ else
+ xputs "[ERR] Sorry, Redis-trib can't fix this slot yet (work in progress). Slot is set as migrating in #{migrating.join(",")}, as importing in #{importing.join(",")}, owner is #{owner}"
+ end
+ end
+
+ # Check if all the nodes agree about the cluster configuration
+ def check_config_consistency
+ if !is_config_consistent?
+ cluster_error "[ERR] Nodes don't agree about configuration!"
+ else
+ xputs "[OK] All nodes agree about slots configuration."
+ end
+ end
+
+ def is_config_consistent?
+ signatures=[]
+ @nodes.each{|n|
+ signatures << n.get_config_signature
+ }
+ return signatures.uniq.length == 1
+ end
+
+ def wait_cluster_join
+ print "Waiting for the cluster to join"
+ while !is_config_consistent?
+ print "."
+ STDOUT.flush
+ sleep 1
+ end
+ print "\n"
+ end
+
+ def alloc_slots
+ nodes_count = @nodes.length
+ masters_count = @nodes.length / (@replicas+1)
+ masters = []
+
+ # The first step is to split instances by IP. This is useful as
+ # we'll try to allocate master nodes in different physical machines
+ # (as much as possible) and to allocate slaves of a given master in
+ # different physical machines as well.
+ #
+ # This code assumes just that if the IP is different, than it is more
+ # likely that the instance is running in a different physical host
+ # or at least a different virtual machine.
+ ips = {}
+ @nodes.each{|n|
+ ips[n.info[:host]] = [] if !ips[n.info[:host]]
+ ips[n.info[:host]] << n
+ }
+
+ # Select master instances
+ puts "Using #{masters_count} masters:"
+ interleaved = []
+ stop = false
+ while not stop do
+ # Take one node from each IP until we run out of nodes
+ # across every IP.
+ ips.each do |ip,nodes|
+ if nodes.empty?
+ # if this IP has no remaining nodes, check for termination
+ if interleaved.length == nodes_count
+ # stop when 'interleaved' has accumulated all nodes
+ stop = true
+ next
+ end
+ else
+ # else, move one node from this IP to 'interleaved'
+ interleaved.push nodes.shift
+ end
+ end
+ end
+
+ masters = interleaved.slice!(0, masters_count)
+ nodes_count -= masters.length
+
+ masters.each{|m| puts m}
+
+ # Alloc slots on masters
+ slots_per_node = ClusterHashSlots.to_f / masters_count
+ first = 0
+ cursor = 0.0
+ masters.each_with_index{|n,masternum|
+ last = (cursor+slots_per_node-1).round
+ if last > ClusterHashSlots || masternum == masters.length-1
+ last = ClusterHashSlots-1
+ end
+ last = first if last < first # Min step is 1.
+ n.add_slots first..last
+ first = last+1
+ cursor += slots_per_node
+ }
+
+ # Select N replicas for every master.
+ # We try to split the replicas among all the IPs with spare nodes
+ # trying to avoid the host where the master is running, if possible.
+ #
+ # Note we loop two times. The first loop assigns the requested
+ # number of replicas to each master. The second loop assigns any
+ # remaining instances as extra replicas to masters. Some masters
+ # may end up with more than their requested number of replicas, but
+ # all nodes will be used.
+ assignment_verbose = false
+
+ [:requested,:unused].each do |assign|
+ masters.each do |m|
+ assigned_replicas = 0
+ while assigned_replicas < @replicas
+ break if nodes_count == 0
+ if assignment_verbose
+ if assign == :requested
+ puts "Requesting total of #{@replicas} replicas " \
+ "(#{assigned_replicas} replicas assigned " \
+ "so far with #{nodes_count} total remaining)."
+ elsif assign == :unused
+ puts "Assigning extra instance to replication " \
+ "role too (#{nodes_count} remaining)."
+ end
+ end
+
+ # Return the first node not matching our current master
+ node = interleaved.find{|n| n.info[:host] != m.info[:host]}
+
+ # If we found a node, use it as a best-first match.
+ # Otherwise, we didn't find a node on a different IP, so we
+ # go ahead and use a same-IP replica.
+ if node
+ slave = node
+ interleaved.delete node
+ else
+ slave = interleaved.shift
+ end
+ slave.set_as_replica(m.info[:name])
+ nodes_count -= 1
+ assigned_replicas += 1
+ puts "Adding replica #{slave} to #{m}"
+
+ # If we are in the "assign extra nodes" loop,
+ # we want to assign one extra replica to each
+ # master before repeating masters.
+ # This break lets us assign extra replicas to masters
+ # in a round-robin way.
+ break if assign == :unused
+ end
+ end
+ end
+ end
+
+ def flush_nodes_config
+ @nodes.each{|n|
+ n.flush_node_config
+ }
+ end
+
+ def show_nodes
+ @nodes.each{|n|
+ xputs n.info_string
+ }
+ end
+
+ # Redis Cluster config epoch collision resolution code is able to eventually
+ # set a different epoch to each node after a new cluster is created, but
+ # it is slow compared to assign a progressive config epoch to each node
+ # before joining the cluster. However we do just a best-effort try here
+ # since if we fail is not a problem.
+ def assign_config_epoch
+ config_epoch = 1
+ @nodes.each{|n|
+ begin
+ n.r.cluster("set-config-epoch",config_epoch)
+ rescue
+ end
+ config_epoch += 1
+ }
+ end
+
+ def join_cluster
+ # We use a brute force approach to make sure the node will meet
+ # each other, that is, sending CLUSTER MEET messages to all the nodes
+ # about the very same node.
+ # Thanks to gossip this information should propagate across all the
+ # cluster in a matter of seconds.
+ first = false
+ @nodes.each{|n|
+ if !first then first = n.info; next; end # Skip the first node
+ n.r.cluster("meet",first[:host],first[:port])
+ }
+ end
+
+ def yes_or_die(msg)
+ print "#{msg} (type 'yes' to accept): "
+ STDOUT.flush
+ if !(STDIN.gets.chomp.downcase == "yes")
+ xputs "*** Aborting..."
+ exit 1
+ end
+ end
+
+ def load_cluster_info_from_node(nodeaddr)
+ node = ClusterNode.new(nodeaddr)
+ node.connect(:abort => true)
+ node.assert_cluster
+ node.load_info(:getfriends => true)
+ add_node(node)
+ node.friends.each{|f|
+ next if f[:flags].index("noaddr") ||
+ f[:flags].index("disconnected") ||
+ f[:flags].index("fail")
+ fnode = ClusterNode.new(f[:addr])
+ fnode.connect()
+ next if !fnode.r
+ begin
+ fnode.load_info()
+ add_node(fnode)
+ rescue => e
+ xputs "[ERR] Unable to load info for node #{fnode}"
+ end
+ }
+ populate_nodes_replicas_info
+ end
+
+ # This function is called by load_cluster_info_from_node in order to
+ # add additional information to every node as a list of replicas.
+ def populate_nodes_replicas_info
+ # Start adding the new field to every node.
+ @nodes.each{|n|
+ n.info[:replicas] = []
+ }
+
+ # Populate the replicas field using the replicate field of slave
+ # nodes.
+ @nodes.each{|n|
+ if n.info[:replicate]
+ master = get_node_by_name(n.info[:replicate])
+ if !master
+ xputs "*** WARNING: #{n} claims to be slave of unknown node ID #{n.info[:replicate]}."
+ else
+ master.info[:replicas] << n
+ end
+ end
+ }
+ end
+
+ # Given a list of source nodes return a "resharding plan"
+ # with what slots to move in order to move "numslots" slots to another
+ # instance.
+ def compute_reshard_table(sources,numslots)
+ moved = []
+ # Sort from bigger to smaller instance, for two reasons:
+ # 1) If we take less slots than instances it is better to start
+ # getting from the biggest instances.
+ # 2) We take one slot more from the first instance in the case of not
+ # perfect divisibility. Like we have 3 nodes and need to get 10
+ # slots, we take 4 from the first, and 3 from the rest. So the
+ # biggest is always the first.
+ sources = sources.sort{|a,b| b.slots.length <=> a.slots.length}
+ source_tot_slots = sources.inject(0) {|sum,source|
+ sum+source.slots.length
+ }
+ sources.each_with_index{|s,i|
+ # Every node will provide a number of slots proportional to the
+ # slots it has assigned.
+ n = (numslots.to_f/source_tot_slots*s.slots.length)
+ if i == 0
+ n = n.ceil
+ else
+ n = n.floor
+ end
+ s.slots.keys.sort[(0...n)].each{|slot|
+ if moved.length < numslots
+ moved << {:source => s, :slot => slot}
+ end
+ }
+ }
+ return moved
+ end
+
+ def show_reshard_table(table)
+ table.each{|e|
+ puts " Moving slot #{e[:slot]} from #{e[:source].info[:name]}"
+ }
+ end
+
+ # Move slots between source and target nodes using MIGRATE.
+ #
+ # Options:
+ # :verbose -- Print a dot for every moved key.
+ # :fix -- We are moving in the context of a fix. Use REPLACE.
+ # :cold -- Move keys without opening slots / reconfiguring the nodes.
+ # :update -- Update nodes.info[:slots] for source/target nodes.
+ # :quiet -- Don't print info messages.
+ def move_slot(source,target,slot,o={})
+ o = {:pipeline => MigrateDefaultPipeline}.merge(o)
+
+ # We start marking the slot as importing in the destination node,
+ # and the slot as migrating in the target host. Note that the order of
+ # the operations is important, as otherwise a client may be redirected
+ # to the target node that does not yet know it is importing this slot.
+ if !o[:quiet]
+ print "Moving slot #{slot} from #{source} to #{target}: "
+ STDOUT.flush
+ end
+
+ if !o[:cold]
+ target.r.cluster("setslot",slot,"importing",source.info[:name])
+ source.r.cluster("setslot",slot,"migrating",target.info[:name])
+ end
+ # Migrate all the keys from source to target using the MIGRATE command
+ while true
+ keys = source.r.cluster("getkeysinslot",slot,o[:pipeline])
+ break if keys.length == 0
+ begin
+ source.r.client.call(["migrate",target.info[:host],target.info[:port],"",0,@timeout,:keys,*keys])
+ rescue => e
+ if o[:fix] && e.to_s =~ /BUSYKEY/
+ xputs "*** Target key exists. Replacing it for FIX."
+ source.r.client.call(["migrate",target.info[:host],target.info[:port],"",0,@timeout,:replace,:keys,*keys])
+ else
+ puts ""
+ xputs "[ERR] Calling MIGRATE: #{e}"
+ exit 1
+ end
+ end
+ print "."*keys.length if o[:dots]
+ STDOUT.flush
+ end
+
+ puts if !o[:quiet]
+ # Set the new node as the owner of the slot in all the known nodes.
+ if !o[:cold]
+ @nodes.each{|n|
+ next if n.has_flag?("slave")
+ n.r.cluster("setslot",slot,"node",target.info[:name])
+ }
+ end
+
+ # Update the node logical config
+ if o[:update] then
+ source.info[:slots].delete(slot)
+ target.info[:slots][slot] = true
+ end
+ end
+
+ # redis-trib subcommands implementations.
+
+ def check_cluster_cmd(argv,opt)
+ load_cluster_info_from_node(argv[0])
+ check_cluster
+ end
+
+ def info_cluster_cmd(argv,opt)
+ load_cluster_info_from_node(argv[0])
+ show_cluster_info
+ end
+
+ def rebalance_cluster_cmd(argv,opt)
+ opt = {
+ 'pipeline' => MigrateDefaultPipeline,
+ 'threshold' => RebalanceDefaultThreshold
+ }.merge(opt)
+
+ # Load nodes info before parsing options, otherwise we can't
+ # handle --weight.
+ load_cluster_info_from_node(argv[0])
+
+ # Options parsing
+ threshold = opt['threshold'].to_i
+ autoweights = opt['auto-weights']
+ weights = {}
+ opt['weight'].each{|w|
+ fields = w.split("=")
+ node = get_node_by_abbreviated_name(fields[0])
+ if !node || !node.has_flag?("master")
+ puts "*** No such master node #{fields[0]}"
+ exit 1
+ end
+ weights[node.info[:name]] = fields[1].to_f
+ } if opt['weight']
+ useempty = opt['use-empty-masters']
+
+ # Assign a weight to each node, and compute the total cluster weight.
+ total_weight = 0
+ nodes_involved = 0
+ @nodes.each{|n|
+ if n.has_flag?("master")
+ next if !useempty && n.slots.length == 0
+ n.info[:w] = weights[n.info[:name]] ? weights[n.info[:name]] : 1
+ total_weight += n.info[:w]
+ nodes_involved += 1
+ end
+ }
+
+ # Check cluster, only proceed if it looks sane.
+ check_cluster(:quiet => true)
+ if @errors.length != 0
+ puts "*** Please fix your cluster problems before rebalancing"
+ exit 1
+ end
+
+ # Calculate the slots balance for each node. It's the number of
+ # slots the node should lose (if positive) or gain (if negative)
+ # in order to be balanced.
+ threshold = opt['threshold'].to_f
+ threshold_reached = false
+ @nodes.each{|n|
+ if n.has_flag?("master")
+ next if !n.info[:w]
+ expected = ((ClusterHashSlots.to_f / total_weight) *
+ n.info[:w]).to_i
+ n.info[:balance] = n.slots.length - expected
+ # Compute the percentage of difference between the
+ # expected number of slots and the real one, to see
+ # if it's over the threshold specified by the user.
+ over_threshold = false
+ if threshold > 0
+ if n.slots.length > 0
+ err_perc = (100-(100.0*expected/n.slots.length)).abs
+ over_threshold = true if err_perc > threshold
+ elsif expected > 0
+ over_threshold = true
+ end
+ end
+ threshold_reached = true if over_threshold
+ end
+ }
+ if !threshold_reached
+ xputs "*** No rebalancing needed! All nodes are within the #{threshold}% threshold."
+ return
+ end
+
+ # Only consider nodes we want to change
+ sn = @nodes.select{|n|
+ n.has_flag?("master") && n.info[:w]
+ }
+
+ # Because of rounding, it is possible that the balance of all nodes
+ # summed does not give 0. Make sure that nodes that have to provide
+ # slots are always matched by nodes receiving slots.
+ total_balance = sn.map{|x| x.info[:balance]}.reduce{|a,b| a+b}
+ while total_balance > 0
+ sn.each{|n|
+ if n.info[:balance] < 0 && total_balance > 0
+ n.info[:balance] -= 1
+ total_balance -= 1
+ end
+ }
+ end
+
+ # Sort nodes by their slots balance.
+ sn = sn.sort{|a,b|
+ a.info[:balance] <=> b.info[:balance]
+ }
+
+ xputs ">>> Rebalancing across #{nodes_involved} nodes. Total weight = #{total_weight}"
+
+ if $verbose
+ sn.each{|n|
+ puts "#{n} balance is #{n.info[:balance]} slots"
+ }
+ end
+
+ # Now we have at the start of the 'sn' array nodes that should get
+ # slots, at the end nodes that must give slots.
+ # We take two indexes, one at the start, and one at the end,
+ # incrementing or decrementing the indexes accordingly til we
+ # find nodes that need to get/provide slots.
+ dst_idx = 0
+ src_idx = sn.length - 1
+
+ while dst_idx < src_idx
+ dst = sn[dst_idx]
+ src = sn[src_idx]
+ numslots = [dst.info[:balance],src.info[:balance]].map{|n|
+ n.abs
+ }.min
+
+ if numslots > 0
+ puts "Moving #{numslots} slots from #{src} to #{dst}"
+
+ # Actaully move the slots.
+ reshard_table = compute_reshard_table([src],numslots)
+ if reshard_table.length != numslots
+ xputs "*** Assertio failed: Reshard table != number of slots"
+ exit 1
+ end
+ if opt['simulate']
+ print "#"*reshard_table.length
+ else
+ reshard_table.each{|e|
+ move_slot(e[:source],dst,e[:slot],
+ :quiet=>true,
+ :dots=>false,
+ :update=>true,
+ :pipeline=>opt['pipeline'])
+ print "#"
+ STDOUT.flush
+ }
+ end
+ puts
+ end
+
+ # Update nodes balance.
+ dst.info[:balance] += numslots
+ src.info[:balance] -= numslots
+ dst_idx += 1 if dst.info[:balance] == 0
+ src_idx -= 1 if src.info[:balance] == 0
+ end
+ end
+
+ def fix_cluster_cmd(argv,opt)
+ @fix = true
+ @timeout = opt['timeout'].to_i if opt['timeout']
+
+ load_cluster_info_from_node(argv[0])
+ check_cluster
+ end
+
+ def reshard_cluster_cmd(argv,opt)
+ opt = {'pipeline' => MigrateDefaultPipeline}.merge(opt)
+
+ load_cluster_info_from_node(argv[0])
+ check_cluster
+ if @errors.length != 0
+ puts "*** Please fix your cluster problems before resharding"
+ exit 1
+ end
+
+ @timeout = opt['timeout'].to_i if opt['timeout'].to_i
+
+ # Get number of slots
+ if opt['slots']
+ numslots = opt['slots'].to_i
+ else
+ numslots = 0
+ while numslots <= 0 or numslots > ClusterHashSlots
+ print "How many slots do you want to move (from 1 to #{ClusterHashSlots})? "
+ numslots = STDIN.gets.to_i
+ end
+ end
+
+ # Get the target instance
+ if opt['to']
+ target = get_node_by_name(opt['to'])
+ if !target || target.has_flag?("slave")
+ xputs "*** The specified node is not known or not a master, please retry."
+ exit 1
+ end
+ else
+ target = nil
+ while not target
+ print "What is the receiving node ID? "
+ target = get_node_by_name(STDIN.gets.chop)
+ if !target || target.has_flag?("slave")
+ xputs "*** The specified node is not known or not a master, please retry."
+ target = nil
+ end
+ end
+ end
+
+ # Get the source instances
+ sources = []
+ if opt['from']
+ opt['from'].split(',').each{|node_id|
+ if node_id == "all"
+ sources = "all"
+ break
+ end
+ src = get_node_by_name(node_id)
+ if !src || src.has_flag?("slave")
+ xputs "*** The specified node is not known or is not a master, please retry."
+ exit 1
+ end
+ sources << src
+ }
+ else
+ xputs "Please enter all the source node IDs."
+ xputs " Type 'all' to use all the nodes as source nodes for the hash slots."
+ xputs " Type 'done' once you entered all the source nodes IDs."
+ while true
+ print "Source node ##{sources.length+1}:"
+ line = STDIN.gets.chop
+ src = get_node_by_name(line)
+ if line == "done"
+ break
+ elsif line == "all"
+ sources = "all"
+ break
+ elsif !src || src.has_flag?("slave")
+ xputs "*** The specified node is not known or is not a master, please retry."
+ elsif src.info[:name] == target.info[:name]
+ xputs "*** It is not possible to use the target node as source node."
+ else
+ sources << src
+ end
+ end
+ end
+
+ if sources.length == 0
+ puts "*** No source nodes given, operation aborted"
+ exit 1
+ end
+
+ # Handle soures == all.
+ if sources == "all"
+ sources = []
+ @nodes.each{|n|
+ next if n.info[:name] == target.info[:name]
+ next if n.has_flag?("slave")
+ sources << n
+ }
+ end
+
+ # Check if the destination node is the same of any source nodes.
+ if sources.index(target)
+ xputs "*** Target node is also listed among the source nodes!"
+ exit 1
+ end
+
+ puts "\nReady to move #{numslots} slots."
+ puts " Source nodes:"
+ sources.each{|s| puts " "+s.info_string}
+ puts " Destination node:"
+ puts " #{target.info_string}"
+ reshard_table = compute_reshard_table(sources,numslots)
+ puts " Resharding plan:"
+ show_reshard_table(reshard_table)
+ if !opt['yes']
+ print "Do you want to proceed with the proposed reshard plan (yes/no)? "
+ yesno = STDIN.gets.chop
+ exit(1) if (yesno != "yes")
+ end
+ reshard_table.each{|e|
+ move_slot(e[:source],target,e[:slot],
+ :dots=>true,
+ :pipeline=>opt['pipeline'])
+ }
+ end
+
+ # This is an helper function for create_cluster_cmd that verifies if
+ # the number of nodes and the specified replicas have a valid configuration
+ # where there are at least three master nodes and enough replicas per node.
+ def check_create_parameters
+ masters = @nodes.length/(@replicas+1)
+ if masters < 3
+ puts "*** ERROR: Invalid configuration for cluster creation."
+ puts "*** Redis Cluster requires at least 3 master nodes."
+ puts "*** This is not possible with #{@nodes.length} nodes and #{@replicas} replicas per node."
+ puts "*** At least #{3*(@replicas+1)} nodes are required."
+ exit 1
+ end
+ end
+
+ def create_cluster_cmd(argv,opt)
+ opt = {'replicas' => 0}.merge(opt)
+ @replicas = opt['replicas'].to_i
+
+ xputs ">>> Creating cluster"
+ argv[0..-1].each{|n|
+ node = ClusterNode.new(n)
+ node.connect(:abort => true)
+ node.assert_cluster
+ node.load_info
+ node.assert_empty
+ add_node(node)
+ }
+ check_create_parameters
+ xputs ">>> Performing hash slots allocation on #{@nodes.length} nodes..."
+ alloc_slots
+ show_nodes
+ yes_or_die "Can I set the above configuration?"
+ flush_nodes_config
+ xputs ">>> Nodes configuration updated"
+ xputs ">>> Assign a different config epoch to each node"
+ assign_config_epoch
+ xputs ">>> Sending CLUSTER MEET messages to join the cluster"
+ join_cluster
+ # Give one second for the join to start, in order to avoid that
+ # wait_cluster_join will find all the nodes agree about the config as
+ # they are still empty with unassigned slots.
+ sleep 1
+ wait_cluster_join
+ flush_nodes_config # Useful for the replicas
+ check_cluster
+ end
+
+ def addnode_cluster_cmd(argv,opt)
+ xputs ">>> Adding node #{argv[0]} to cluster #{argv[1]}"
+
+ # Check the existing cluster
+ load_cluster_info_from_node(argv[1])
+ check_cluster
+
+ # If --master-id was specified, try to resolve it now so that we
+ # abort before starting with the node configuration.
+ if opt['slave']
+ if opt['master-id']
+ master = get_node_by_name(opt['master-id'])
+ if !master
+ xputs "[ERR] No such master ID #{opt['master-id']}"
+ end
+ else
+ master = get_master_with_least_replicas
+ xputs "Automatically selected master #{master}"
+ end
+ end
+
+ # Add the new node
+ new = ClusterNode.new(argv[0])
+ new.connect(:abort => true)
+ new.assert_cluster
+ new.load_info
+ new.assert_empty
+ first = @nodes.first.info
+ add_node(new)
+
+ # Send CLUSTER MEET command to the new node
+ xputs ">>> Send CLUSTER MEET to node #{new} to make it join the cluster."
+ new.r.cluster("meet",first[:host],first[:port])
+
+ # Additional configuration is needed if the node is added as
+ # a slave.
+ if opt['slave']
+ wait_cluster_join
+ xputs ">>> Configure node as replica of #{master}."
+ new.r.cluster("replicate",master.info[:name])
+ end
+ xputs "[OK] New node added correctly."
+ end
+
+ def delnode_cluster_cmd(argv,opt)
+ id = argv[1].downcase
+ xputs ">>> Removing node #{id} from cluster #{argv[0]}"
+
+ # Load cluster information
+ load_cluster_info_from_node(argv[0])
+
+ # Check if the node exists and is not empty
+ node = get_node_by_name(id)
+
+ if !node
+ xputs "[ERR] No such node ID #{id}"
+ exit 1
+ end
+
+ if node.slots.length != 0
+ xputs "[ERR] Node #{node} is not empty! Reshard data away and try again."
+ exit 1
+ end
+
+ # Send CLUSTER FORGET to all the nodes but the node to remove
+ xputs ">>> Sending CLUSTER FORGET messages to the cluster..."
+ @nodes.each{|n|
+ next if n == node
+ if n.info[:replicate] && n.info[:replicate].downcase == id
+ # Reconfigure the slave to replicate with some other node
+ master = get_master_with_least_replicas
+ xputs ">>> #{n} as replica of #{master}"
+ n.r.cluster("replicate",master.info[:name])
+ end
+ n.r.cluster("forget",argv[1])
+ }
+
+ # Finally shutdown the node
+ xputs ">>> SHUTDOWN the node."
+ node.r.shutdown
+ end
+
+ def set_timeout_cluster_cmd(argv,opt)
+ timeout = argv[1].to_i
+ if timeout < 100
+ puts "Setting a node timeout of less than 100 milliseconds is a bad idea."
+ exit 1
+ end
+
+ # Load cluster information
+ load_cluster_info_from_node(argv[0])
+ ok_count = 0
+ err_count = 0
+
+ # Send CLUSTER FORGET to all the nodes but the node to remove
+ xputs ">>> Reconfiguring node timeout in every cluster node..."
+ @nodes.each{|n|
+ begin
+ n.r.config("set","cluster-node-timeout",timeout)
+ n.r.config("rewrite")
+ ok_count += 1
+ xputs "*** New timeout set for #{n}"
+ rescue => e
+ puts "ERR setting node-timeot for #{n}: #{e}"
+ err_count += 1
+ end
+ }
+ xputs ">>> New node timeout set. #{ok_count} OK, #{err_count} ERR."
+ end
+
+ def call_cluster_cmd(argv,opt)
+ cmd = argv[1..-1]
+ cmd[0] = cmd[0].upcase
+
+ # Load cluster information
+ load_cluster_info_from_node(argv[0])
+ xputs ">>> Calling #{cmd.join(" ")}"
+ @nodes.each{|n|
+ begin
+ res = n.r.send(*cmd)
+ puts "#{n}: #{res}"
+ rescue => e
+ puts "#{n}: #{e}"
+ end
+ }
+ end
+
+ def import_cluster_cmd(argv,opt)
+ source_addr = opt['from']
+ xputs ">>> Importing data from #{source_addr} to cluster #{argv[1]}"
+ use_copy = opt['copy']
+ use_replace = opt['replace']
+
+ # Check the existing cluster.
+ load_cluster_info_from_node(argv[0])
+ check_cluster
+
+ # Connect to the source node.
+ xputs ">>> Connecting to the source Redis instance"
+ src_host,src_port = source_addr.split(":")
+ source = Redis.new(:host =>src_host, :port =>src_port)
+ if source.info['cluster_enabled'].to_i == 1
+ xputs "[ERR] The source node should not be a cluster node."
+ end
+ xputs "*** Importing #{source.dbsize} keys from DB 0"
+
+ # Build a slot -> node map
+ slots = {}
+ @nodes.each{|n|
+ n.slots.each{|s,_|
+ slots[s] = n
+ }
+ }
+
+ # Use SCAN to iterate over the keys, migrating to the
+ # right node as needed.
+ cursor = nil
+ while cursor != 0
+ cursor,keys = source.scan(cursor, :count => 1000)
+ cursor = cursor.to_i
+ keys.each{|k|
+ # Migrate keys using the MIGRATE command.
+ slot = key_to_slot(k)
+ target = slots[slot]
+ print "Migrating #{k} to #{target}: "
+ STDOUT.flush
+ begin
+ cmd = ["migrate",target.info[:host],target.info[:port],k,0,@timeout]
+ cmd << :copy if use_copy
+ cmd << :replace if use_replace
+ source.client.call(cmd)
+ rescue => e
+ puts e
+ else
+ puts "OK"
+ end
+ }
+ end
+ end
+
+ def help_cluster_cmd(argv,opt)
+ show_help
+ exit 0
+ end
+
+ # Parse the options for the specific command "cmd".
+ # Returns an hash populate with option => value pairs, and the index of
+ # the first non-option argument in ARGV.
+ def parse_options(cmd)
+ idx = 1 ; # Current index into ARGV
+ options={}
+ while idx < ARGV.length && ARGV[idx][0..1] == '--'
+ if ARGV[idx][0..1] == "--"
+ option = ARGV[idx][2..-1]
+ idx += 1
+
+ # --verbose is a global option
+ if option == "verbose"
+ $verbose = true
+ next
+ end
+
+ if ALLOWED_OPTIONS[cmd] == nil || ALLOWED_OPTIONS[cmd][option] == nil
+ puts "Unknown option '#{option}' for command '#{cmd}'"
+ exit 1
+ end
+ if ALLOWED_OPTIONS[cmd][option] != false
+ value = ARGV[idx]
+ idx += 1
+ else
+ value = true
+ end
+
+ # If the option is set to [], it's a multiple arguments
+ # option. We just queue every new value into an array.
+ if ALLOWED_OPTIONS[cmd][option] == []
+ options[option] = [] if !options[option]
+ options[option] << value
+ else
+ options[option] = value
+ end
+ else
+ # Remaining arguments are not options.
+ break
+ end
+ end
+
+ # Enforce mandatory options
+ if ALLOWED_OPTIONS[cmd]
+ ALLOWED_OPTIONS[cmd].each {|option,val|
+ if !options[option] && val == :required
+ puts "Option '--#{option}' is required "+ \
+ "for subcommand '#{cmd}'"
+ exit 1
+ end
+ }
+ end
+ return options,idx
+ end
+end
+
+#################################################################################
+# Libraries
+#
+# We try to don't depend on external libs since this is a critical part
+# of Redis Cluster.
+#################################################################################
+
+# This is the CRC16 algorithm used by Redis Cluster to hash keys.
+# Implementation according to CCITT standards.
+#
+# This is actually the XMODEM CRC 16 algorithm, using the
+# following parameters:
+#
+# Name : "XMODEM", also known as "ZMODEM", "CRC-16/ACORN"
+# Width : 16 bit
+# Poly : 1021 (That is actually x^16 + x^12 + x^5 + 1)
+# Initialization : 0000
+# Reflect Input byte : False
+# Reflect Output CRC : False
+# Xor constant to output CRC : 0000
+# Output for "123456789" : 31C3
+
+module RedisClusterCRC16
+ def RedisClusterCRC16.crc16(bytes)
+ crc = 0
+ bytes.each_byte{|b|
+ crc = ((crc<<8) & 0xffff) ^ XMODEMCRC16Lookup[((crc>>8)^b) & 0xff]
+ }
+ crc
+ end
+
+private
+ XMODEMCRC16Lookup = [
+ 0x0000,0x1021,0x2042,0x3063,0x4084,0x50a5,0x60c6,0x70e7,
+ 0x8108,0x9129,0xa14a,0xb16b,0xc18c,0xd1ad,0xe1ce,0xf1ef,
+ 0x1231,0x0210,0x3273,0x2252,0x52b5,0x4294,0x72f7,0x62d6,
+ 0x9339,0x8318,0xb37b,0xa35a,0xd3bd,0xc39c,0xf3ff,0xe3de,
+ 0x2462,0x3443,0x0420,0x1401,0x64e6,0x74c7,0x44a4,0x5485,
+ 0xa56a,0xb54b,0x8528,0x9509,0xe5ee,0xf5cf,0xc5ac,0xd58d,
+ 0x3653,0x2672,0x1611,0x0630,0x76d7,0x66f6,0x5695,0x46b4,
+ 0xb75b,0xa77a,0x9719,0x8738,0xf7df,0xe7fe,0xd79d,0xc7bc,
+ 0x48c4,0x58e5,0x6886,0x78a7,0x0840,0x1861,0x2802,0x3823,
+ 0xc9cc,0xd9ed,0xe98e,0xf9af,0x8948,0x9969,0xa90a,0xb92b,
+ 0x5af5,0x4ad4,0x7ab7,0x6a96,0x1a71,0x0a50,0x3a33,0x2a12,
+ 0xdbfd,0xcbdc,0xfbbf,0xeb9e,0x9b79,0x8b58,0xbb3b,0xab1a,
+ 0x6ca6,0x7c87,0x4ce4,0x5cc5,0x2c22,0x3c03,0x0c60,0x1c41,
+ 0xedae,0xfd8f,0xcdec,0xddcd,0xad2a,0xbd0b,0x8d68,0x9d49,
+ 0x7e97,0x6eb6,0x5ed5,0x4ef4,0x3e13,0x2e32,0x1e51,0x0e70,
+ 0xff9f,0xefbe,0xdfdd,0xcffc,0xbf1b,0xaf3a,0x9f59,0x8f78,
+ 0x9188,0x81a9,0xb1ca,0xa1eb,0xd10c,0xc12d,0xf14e,0xe16f,
+ 0x1080,0x00a1,0x30c2,0x20e3,0x5004,0x4025,0x7046,0x6067,
+ 0x83b9,0x9398,0xa3fb,0xb3da,0xc33d,0xd31c,0xe37f,0xf35e,
+ 0x02b1,0x1290,0x22f3,0x32d2,0x4235,0x5214,0x6277,0x7256,
+ 0xb5ea,0xa5cb,0x95a8,0x8589,0xf56e,0xe54f,0xd52c,0xc50d,
+ 0x34e2,0x24c3,0x14a0,0x0481,0x7466,0x6447,0x5424,0x4405,
+ 0xa7db,0xb7fa,0x8799,0x97b8,0xe75f,0xf77e,0xc71d,0xd73c,
+ 0x26d3,0x36f2,0x0691,0x16b0,0x6657,0x7676,0x4615,0x5634,
+ 0xd94c,0xc96d,0xf90e,0xe92f,0x99c8,0x89e9,0xb98a,0xa9ab,
+ 0x5844,0x4865,0x7806,0x6827,0x18c0,0x08e1,0x3882,0x28a3,
+ 0xcb7d,0xdb5c,0xeb3f,0xfb1e,0x8bf9,0x9bd8,0xabbb,0xbb9a,
+ 0x4a75,0x5a54,0x6a37,0x7a16,0x0af1,0x1ad0,0x2ab3,0x3a92,
+ 0xfd2e,0xed0f,0xdd6c,0xcd4d,0xbdaa,0xad8b,0x9de8,0x8dc9,
+ 0x7c26,0x6c07,0x5c64,0x4c45,0x3ca2,0x2c83,0x1ce0,0x0cc1,
+ 0xef1f,0xff3e,0xcf5d,0xdf7c,0xaf9b,0xbfba,0x8fd9,0x9ff8,
+ 0x6e17,0x7e36,0x4e55,0x5e74,0x2e93,0x3eb2,0x0ed1,0x1ef0
+ ]
+end
+
+# Turn a key name into the corrisponding Redis Cluster slot.
+def key_to_slot(key)
+ # Only hash what is inside {...} if there is such a pattern in the key.
+ # Note that the specification requires the content that is between
+ # the first { and the first } after the first {. If we found {} without
+ # nothing in the middle, the whole key is hashed as usually.
+ s = key.index "{"
+ if s
+ e = key.index "}",s+1
+ if e && e != s+1
+ key = key[s+1..e-1]
+ end
+ end
+ RedisClusterCRC16.crc16(key) % 16384
+end
+
+#################################################################################
+# Definition of commands
+#################################################################################
+
+COMMANDS={
+ "create" => ["create_cluster_cmd", -2, "host1:port1 ... hostN:portN"],
+ "check" => ["check_cluster_cmd", 2, "host:port"],
+ "info" => ["info_cluster_cmd", 2, "host:port"],
+ "fix" => ["fix_cluster_cmd", 2, "host:port"],
+ "reshard" => ["reshard_cluster_cmd", 2, "host:port"],
+ "rebalance" => ["rebalance_cluster_cmd", -2, "host:port"],
+ "add-node" => ["addnode_cluster_cmd", 3, "new_host:new_port existing_host:existing_port"],
+ "del-node" => ["delnode_cluster_cmd", 3, "host:port node_id"],
+ "set-timeout" => ["set_timeout_cluster_cmd", 3, "host:port milliseconds"],
+ "call" => ["call_cluster_cmd", -3, "host:port command arg arg .. arg"],
+ "import" => ["import_cluster_cmd", 2, "host:port"],
+ "help" => ["help_cluster_cmd", 1, "(show this help)"]
+}
+
+ALLOWED_OPTIONS={
+ "create" => {"replicas" => true},
+ "add-node" => {"slave" => false, "master-id" => true},
+ "import" => {"from" => :required, "copy" => false, "replace" => false},
+ "reshard" => {"from" => true, "to" => true, "slots" => true, "yes" => false, "timeout" => true, "pipeline" => true},
+ "rebalance" => {"weight" => [], "auto-weights" => false, "use-empty-masters" => false, "timeout" => true, "simulate" => false, "pipeline" => true, "threshold" => true},
+ "fix" => {"timeout" => MigrateDefaultTimeout},
+}
+
+def show_help
+ puts "Usage: redis-trib \n\n"
+ COMMANDS.each{|k,v|
+ o = ""
+ puts " #{k.ljust(15)} #{v[2]}"
+ if ALLOWED_OPTIONS[k]
+ ALLOWED_OPTIONS[k].each{|optname,has_arg|
+ puts " --#{optname}" + (has_arg ? " " : "")
+ }
+ end
+ }
+ puts "\nFor check, fix, reshard, del-node, set-timeout you can specify the host and port of any working node in the cluster.\n"
+end
+
+# Sanity check
+if ARGV.length == 0
+ show_help
+ exit 1
+end
+
+rt = RedisTrib.new
+cmd_spec = COMMANDS[ARGV[0].downcase]
+if !cmd_spec
+ puts "Unknown redis-trib subcommand '#{ARGV[0]}'"
+ exit 1
+end
+
+# Parse options
+cmd_options,first_non_option = rt.parse_options(ARGV[0].downcase)
+rt.check_arity(cmd_spec[1],ARGV.length-(first_non_option-1))
+
+# Dispatch
+rt.send(cmd_spec[0],ARGV[first_non_option..-1],cmd_options)
\ No newline at end of file
diff --git a/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js b/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js
new file mode 100644
index 0000000000..5ae6f4eabd
--- /dev/null
+++ b/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js
@@ -0,0 +1,849 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const { expect } = require('chai')
+const async = require('async')
+const Settings = require('@overleaf/settings')
+const rclient_history = require('@overleaf/redis-wrapper').createClient(
+ Settings.redis.history
+) // note: this is track changes, not project-history
+const rclient_project_history = require('@overleaf/redis-wrapper').createClient(
+ Settings.redis.project_history
+)
+const rclient_du = require('@overleaf/redis-wrapper').createClient(
+ Settings.redis.documentupdater
+)
+const Keys = Settings.redis.documentupdater.key_schema
+const HistoryKeys = Settings.redis.history.key_schema
+const ProjectHistoryKeys = Settings.redis.project_history.key_schema
+
+const MockTrackChangesApi = require('./helpers/MockTrackChangesApi')
+const MockWebApi = require('./helpers/MockWebApi')
+const DocUpdaterClient = require('./helpers/DocUpdaterClient')
+const DocUpdaterApp = require('./helpers/DocUpdaterApp')
+
+describe('Applying updates to a doc', function () {
+ before(function (done) {
+ this.lines = ['one', 'two', 'three']
+ this.version = 42
+ this.update = {
+ doc: this.doc_id,
+ op: [
+ {
+ i: 'one and a half\n',
+ p: 4,
+ },
+ ],
+ v: this.version,
+ }
+ this.result = ['one', 'one and a half', 'two', 'three']
+ return DocUpdaterApp.ensureRunning(done)
+ })
+
+ describe('when the document is not loaded', function () {
+ before(function (done) {
+ ;[this.project_id, this.doc_id] = Array.from([
+ DocUpdaterClient.randomId(),
+ DocUpdaterClient.randomId(),
+ ])
+ sinon.spy(MockWebApi, 'getDocument')
+ this.startTime = Date.now()
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines: this.lines,
+ version: this.version,
+ })
+ DocUpdaterClient.sendUpdate(
+ this.project_id,
+ this.doc_id,
+ this.update,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return setTimeout(done, 200)
+ }
+ )
+ return null
+ })
+
+ after(function () {
+ return MockWebApi.getDocument.restore()
+ })
+
+ it('should load the document from the web API', function () {
+ return MockWebApi.getDocument
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should update the doc', function (done) {
+ DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, doc) => {
+ doc.lines.should.deep.equal(this.result)
+ return done()
+ }
+ )
+ return null
+ })
+
+ it('should push the applied updates to the track changes api', function (done) {
+ rclient_history.lrange(
+ HistoryKeys.uncompressedHistoryOps({ doc_id: this.doc_id }),
+ 0,
+ -1,
+ (error, updates) => {
+ if (error != null) {
+ throw error
+ }
+ JSON.parse(updates[0]).op.should.deep.equal(this.update.op)
+ return rclient_history.sismember(
+ HistoryKeys.docsWithHistoryOps({ project_id: this.project_id }),
+ this.doc_id,
+ (error, result) => {
+ if (error != null) {
+ throw error
+ }
+ result.should.equal(1)
+ return done()
+ }
+ )
+ }
+ )
+ return null
+ })
+
+ it('should push the applied updates to the project history changes api', function (done) {
+ rclient_project_history.lrange(
+ ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }),
+ 0,
+ -1,
+ (error, updates) => {
+ if (error != null) {
+ throw error
+ }
+ JSON.parse(updates[0]).op.should.deep.equal(this.update.op)
+ return done()
+ }
+ )
+ return null
+ })
+
+ it('should set the first op timestamp', function (done) {
+ rclient_project_history.get(
+ ProjectHistoryKeys.projectHistoryFirstOpTimestamp({
+ project_id: this.project_id,
+ }),
+ (error, result) => {
+ if (error != null) {
+ throw error
+ }
+ result = parseInt(result, 10)
+ result.should.be.within(this.startTime, Date.now())
+ this.firstOpTimestamp = result
+ return done()
+ }
+ )
+ return null
+ })
+
+ return describe('when sending another update', function () {
+ before(function (done) {
+ this.timeout = 10000
+ this.second_update = Object.create(this.update)
+ this.second_update.v = this.version + 1
+ DocUpdaterClient.sendUpdate(
+ this.project_id,
+ this.doc_id,
+ this.second_update,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return setTimeout(done, 200)
+ }
+ )
+ return null
+ })
+
+ return it('should not change the first op timestamp', function (done) {
+ rclient_project_history.get(
+ ProjectHistoryKeys.projectHistoryFirstOpTimestamp({
+ project_id: this.project_id,
+ }),
+ (error, result) => {
+ if (error != null) {
+ throw error
+ }
+ result = parseInt(result, 10)
+ result.should.equal(this.firstOpTimestamp)
+ return done()
+ }
+ )
+ return null
+ })
+ })
+ })
+
+ describe('when the document is loaded', function () {
+ before(function (done) {
+ ;[this.project_id, this.doc_id] = Array.from([
+ DocUpdaterClient.randomId(),
+ DocUpdaterClient.randomId(),
+ ])
+
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines: this.lines,
+ version: this.version,
+ })
+ DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => {
+ if (error != null) {
+ throw error
+ }
+ sinon.spy(MockWebApi, 'getDocument')
+ return DocUpdaterClient.sendUpdate(
+ this.project_id,
+ this.doc_id,
+ this.update,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return setTimeout(done, 200)
+ }
+ )
+ })
+ return null
+ })
+
+ after(function () {
+ return MockWebApi.getDocument.restore()
+ })
+
+ it('should not need to call the web api', function () {
+ return MockWebApi.getDocument.called.should.equal(false)
+ })
+
+ it('should update the doc', function (done) {
+ DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, doc) => {
+ doc.lines.should.deep.equal(this.result)
+ return done()
+ }
+ )
+ return null
+ })
+
+ it('should push the applied updates to the track changes api', function (done) {
+ rclient_history.lrange(
+ HistoryKeys.uncompressedHistoryOps({ doc_id: this.doc_id }),
+ 0,
+ -1,
+ (error, updates) => {
+ JSON.parse(updates[0]).op.should.deep.equal(this.update.op)
+ return rclient_history.sismember(
+ HistoryKeys.docsWithHistoryOps({ project_id: this.project_id }),
+ this.doc_id,
+ (error, result) => {
+ result.should.equal(1)
+ return done()
+ }
+ )
+ }
+ )
+ return null
+ })
+
+ return it('should push the applied updates to the project history changes api', function (done) {
+ rclient_project_history.lrange(
+ ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }),
+ 0,
+ -1,
+ (error, updates) => {
+ JSON.parse(updates[0]).op.should.deep.equal(this.update.op)
+ return done()
+ }
+ )
+ return null
+ })
+ })
+
+ describe('when the document is loaded and is using project-history only', function () {
+ before(function (done) {
+ ;[this.project_id, this.doc_id] = Array.from([
+ DocUpdaterClient.randomId(),
+ DocUpdaterClient.randomId(),
+ ])
+
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines: this.lines,
+ version: this.version,
+ projectHistoryType: 'project-history',
+ })
+ DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => {
+ if (error != null) {
+ throw error
+ }
+ sinon.spy(MockWebApi, 'getDocument')
+ return DocUpdaterClient.sendUpdate(
+ this.project_id,
+ this.doc_id,
+ this.update,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return setTimeout(done, 200)
+ }
+ )
+ })
+ return null
+ })
+
+ after(function () {
+ return MockWebApi.getDocument.restore()
+ })
+
+ it('should update the doc', function (done) {
+ DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, doc) => {
+ doc.lines.should.deep.equal(this.result)
+ return done()
+ }
+ )
+ return null
+ })
+
+ it('should not push any applied updates to the track changes api', function (done) {
+ rclient_history.lrange(
+ HistoryKeys.uncompressedHistoryOps({ doc_id: this.doc_id }),
+ 0,
+ -1,
+ (error, updates) => {
+ updates.length.should.equal(0)
+ return done()
+ }
+ )
+ return null
+ })
+
+ return it('should push the applied updates to the project history changes api', function (done) {
+ rclient_project_history.lrange(
+ ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }),
+ 0,
+ -1,
+ (error, updates) => {
+ JSON.parse(updates[0]).op.should.deep.equal(this.update.op)
+ return done()
+ }
+ )
+ return null
+ })
+ })
+
+ describe('when the document has been deleted', function () {
+ describe('when the ops come in a single linear order', function () {
+ before(function (done) {
+ ;[this.project_id, this.doc_id] = Array.from([
+ DocUpdaterClient.randomId(),
+ DocUpdaterClient.randomId(),
+ ])
+ const lines = ['', '', '']
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines,
+ version: 0,
+ })
+ this.updates = [
+ { doc_id: this.doc_id, v: 0, op: [{ i: 'h', p: 0 }] },
+ { doc_id: this.doc_id, v: 1, op: [{ i: 'e', p: 1 }] },
+ { doc_id: this.doc_id, v: 2, op: [{ i: 'l', p: 2 }] },
+ { doc_id: this.doc_id, v: 3, op: [{ i: 'l', p: 3 }] },
+ { doc_id: this.doc_id, v: 4, op: [{ i: 'o', p: 4 }] },
+ { doc_id: this.doc_id, v: 5, op: [{ i: ' ', p: 5 }] },
+ { doc_id: this.doc_id, v: 6, op: [{ i: 'w', p: 6 }] },
+ { doc_id: this.doc_id, v: 7, op: [{ i: 'o', p: 7 }] },
+ { doc_id: this.doc_id, v: 8, op: [{ i: 'r', p: 8 }] },
+ { doc_id: this.doc_id, v: 9, op: [{ i: 'l', p: 9 }] },
+ { doc_id: this.doc_id, v: 10, op: [{ i: 'd', p: 10 }] },
+ ]
+ this.my_result = ['hello world', '', '']
+ return done()
+ })
+
+ it('should be able to continue applying updates when the project has been deleted', function (done) {
+ let update
+ const actions = []
+ for (update of Array.from(this.updates.slice(0, 6))) {
+ ;(update => {
+ return actions.push(callback =>
+ DocUpdaterClient.sendUpdate(
+ this.project_id,
+ this.doc_id,
+ update,
+ callback
+ )
+ )
+ })(update)
+ }
+ actions.push(callback =>
+ DocUpdaterClient.deleteDoc(this.project_id, this.doc_id, callback)
+ )
+ for (update of Array.from(this.updates.slice(6))) {
+ ;(update => {
+ return actions.push(callback =>
+ DocUpdaterClient.sendUpdate(
+ this.project_id,
+ this.doc_id,
+ update,
+ callback
+ )
+ )
+ })(update)
+ }
+
+ async.series(actions, error => {
+ if (error != null) {
+ throw error
+ }
+ return DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, doc) => {
+ doc.lines.should.deep.equal(this.my_result)
+ return done()
+ }
+ )
+ })
+ return null
+ })
+
+ it('should push the applied updates to the track changes api', function (done) {
+ rclient_history.lrange(
+ HistoryKeys.uncompressedHistoryOps({ doc_id: this.doc_id }),
+ 0,
+ -1,
+ (error, updates) => {
+ updates = Array.from(updates).map(u => JSON.parse(u))
+ for (let i = 0; i < this.updates.length; i++) {
+ const appliedUpdate = this.updates[i]
+ appliedUpdate.op.should.deep.equal(updates[i].op)
+ }
+
+ return rclient_history.sismember(
+ HistoryKeys.docsWithHistoryOps({ project_id: this.project_id }),
+ this.doc_id,
+ (error, result) => {
+ result.should.equal(1)
+ return done()
+ }
+ )
+ }
+ )
+ return null
+ })
+
+ return it('should store the doc ops in the correct order', function (done) {
+ rclient_du.lrange(
+ Keys.docOps({ doc_id: this.doc_id }),
+ 0,
+ -1,
+ (error, updates) => {
+ updates = Array.from(updates).map(u => JSON.parse(u))
+ for (let i = 0; i < this.updates.length; i++) {
+ const appliedUpdate = this.updates[i]
+ appliedUpdate.op.should.deep.equal(updates[i].op)
+ }
+ return done()
+ }
+ )
+ return null
+ })
+ })
+
+ return describe('when older ops come in after the delete', function () {
+ before(function (done) {
+ ;[this.project_id, this.doc_id] = Array.from([
+ DocUpdaterClient.randomId(),
+ DocUpdaterClient.randomId(),
+ ])
+ const lines = ['', '', '']
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines,
+ version: 0,
+ })
+ this.updates = [
+ { doc_id: this.doc_id, v: 0, op: [{ i: 'h', p: 0 }] },
+ { doc_id: this.doc_id, v: 1, op: [{ i: 'e', p: 1 }] },
+ { doc_id: this.doc_id, v: 2, op: [{ i: 'l', p: 2 }] },
+ { doc_id: this.doc_id, v: 3, op: [{ i: 'l', p: 3 }] },
+ { doc_id: this.doc_id, v: 4, op: [{ i: 'o', p: 4 }] },
+ { doc_id: this.doc_id, v: 0, op: [{ i: 'world', p: 1 }] },
+ ]
+ this.my_result = ['hello', 'world', '']
+ return done()
+ })
+
+ return it('should be able to continue applying updates when the project has been deleted', function (done) {
+ let update
+ const actions = []
+ for (update of Array.from(this.updates.slice(0, 5))) {
+ ;(update => {
+ return actions.push(callback =>
+ DocUpdaterClient.sendUpdate(
+ this.project_id,
+ this.doc_id,
+ update,
+ callback
+ )
+ )
+ })(update)
+ }
+ actions.push(callback =>
+ DocUpdaterClient.deleteDoc(this.project_id, this.doc_id, callback)
+ )
+ for (update of Array.from(this.updates.slice(5))) {
+ ;(update => {
+ return actions.push(callback =>
+ DocUpdaterClient.sendUpdate(
+ this.project_id,
+ this.doc_id,
+ update,
+ callback
+ )
+ )
+ })(update)
+ }
+
+ async.series(actions, error => {
+ if (error != null) {
+ throw error
+ }
+ return DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, doc) => {
+ doc.lines.should.deep.equal(this.my_result)
+ return done()
+ }
+ )
+ })
+ return null
+ })
+ })
+ })
+
+ describe('with a broken update', function () {
+ before(function (done) {
+ ;[this.project_id, this.doc_id] = Array.from([
+ DocUpdaterClient.randomId(),
+ DocUpdaterClient.randomId(),
+ ])
+ this.broken_update = {
+ doc_id: this.doc_id,
+ v: this.version,
+ op: [{ d: 'not the correct content', p: 0 }],
+ }
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines: this.lines,
+ version: this.version,
+ })
+
+ DocUpdaterClient.subscribeToAppliedOps(
+ (this.messageCallback = sinon.stub())
+ )
+
+ DocUpdaterClient.sendUpdate(
+ this.project_id,
+ this.doc_id,
+ this.broken_update,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return setTimeout(done, 200)
+ }
+ )
+ return null
+ })
+
+ it('should not update the doc', function (done) {
+ DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, doc) => {
+ doc.lines.should.deep.equal(this.lines)
+ return done()
+ }
+ )
+ return null
+ })
+
+ return it('should send a message with an error', function () {
+ this.messageCallback.called.should.equal(true)
+ const [channel, message] = Array.from(this.messageCallback.args[0])
+ channel.should.equal('applied-ops')
+ return JSON.parse(message).should.deep.include({
+ project_id: this.project_id,
+ doc_id: this.doc_id,
+ error: 'Delete component does not match',
+ })
+ })
+ })
+
+ describe('with enough updates to flush to the track changes api', function () {
+ before(function (done) {
+ ;[this.project_id, this.doc_id] = Array.from([
+ DocUpdaterClient.randomId(),
+ DocUpdaterClient.randomId(),
+ ])
+ const updates = []
+ for (let v = 0; v <= 199; v++) {
+ // Should flush after 100 ops
+ updates.push({
+ doc_id: this.doc_id,
+ op: [{ i: v.toString(), p: 0 }],
+ v,
+ })
+ }
+
+ sinon.spy(MockTrackChangesApi, 'flushDoc')
+
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines: this.lines,
+ version: 0,
+ })
+
+ // Send updates in chunks to causes multiple flushes
+ const actions = []
+ for (let i = 0; i <= 19; i++) {
+ ;(i => {
+ return actions.push(cb => {
+ return DocUpdaterClient.sendUpdates(
+ this.project_id,
+ this.doc_id,
+ updates.slice(i * 10, (i + 1) * 10),
+ cb
+ )
+ })
+ })(i)
+ }
+ async.series(actions, error => {
+ if (error != null) {
+ throw error
+ }
+ return setTimeout(done, 2000)
+ })
+ return null
+ })
+
+ after(function () {
+ return MockTrackChangesApi.flushDoc.restore()
+ })
+
+ return it('should flush the doc twice', function () {
+ return MockTrackChangesApi.flushDoc.calledTwice.should.equal(true)
+ })
+ })
+
+ describe('when there is no version in Mongo', function () {
+ before(function (done) {
+ ;[this.project_id, this.doc_id] = Array.from([
+ DocUpdaterClient.randomId(),
+ DocUpdaterClient.randomId(),
+ ])
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines: this.lines,
+ })
+
+ const update = {
+ doc: this.doc_id,
+ op: this.update.op,
+ v: 0,
+ }
+ DocUpdaterClient.sendUpdate(
+ this.project_id,
+ this.doc_id,
+ update,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return setTimeout(done, 200)
+ }
+ )
+ return null
+ })
+
+ return it('should update the doc (using version = 0)', function (done) {
+ DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, doc) => {
+ doc.lines.should.deep.equal(this.result)
+ return done()
+ }
+ )
+ return null
+ })
+ })
+
+ describe('when the sending duplicate ops', function () {
+ before(function (done) {
+ ;[this.project_id, this.doc_id] = Array.from([
+ DocUpdaterClient.randomId(),
+ DocUpdaterClient.randomId(),
+ ])
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines: this.lines,
+ version: this.version,
+ })
+
+ DocUpdaterClient.subscribeToAppliedOps(
+ (this.messageCallback = sinon.stub())
+ )
+
+ // One user delete 'one', the next turns it into 'once'. The second becomes a NOP.
+ DocUpdaterClient.sendUpdate(
+ this.project_id,
+ this.doc_id,
+ {
+ doc: this.doc_id,
+ op: [
+ {
+ i: 'one and a half\n',
+ p: 4,
+ },
+ ],
+ v: this.version,
+ meta: {
+ source: 'ikHceq3yfAdQYzBo4-xZ',
+ },
+ },
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return setTimeout(() => {
+ return DocUpdaterClient.sendUpdate(
+ this.project_id,
+ this.doc_id,
+ {
+ doc: this.doc_id,
+ op: [
+ {
+ i: 'one and a half\n',
+ p: 4,
+ },
+ ],
+ v: this.version,
+ dupIfSource: ['ikHceq3yfAdQYzBo4-xZ'],
+ meta: {
+ source: 'ikHceq3yfAdQYzBo4-xZ',
+ },
+ },
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return setTimeout(done, 200)
+ }
+ )
+ }, 200)
+ }
+ )
+ return null
+ })
+
+ it('should update the doc', function (done) {
+ DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, doc) => {
+ doc.lines.should.deep.equal(this.result)
+ return done()
+ }
+ )
+ return null
+ })
+
+ return it('should return a message about duplicate ops', function () {
+ this.messageCallback.calledTwice.should.equal(true)
+ this.messageCallback.args[0][0].should.equal('applied-ops')
+ expect(JSON.parse(this.messageCallback.args[0][1]).op.dup).to.be.undefined
+ this.messageCallback.args[1][0].should.equal('applied-ops')
+ return expect(
+ JSON.parse(this.messageCallback.args[1][1]).op.dup
+ ).to.equal(true)
+ })
+ })
+
+ return describe('when sending updates for a non-existing doc id', function () {
+ before(function (done) {
+ ;[this.project_id, this.doc_id] = Array.from([
+ DocUpdaterClient.randomId(),
+ DocUpdaterClient.randomId(),
+ ])
+ this.non_existing = {
+ doc_id: this.doc_id,
+ v: this.version,
+ op: [{ d: 'content', p: 0 }],
+ }
+
+ DocUpdaterClient.subscribeToAppliedOps(
+ (this.messageCallback = sinon.stub())
+ )
+
+ DocUpdaterClient.sendUpdate(
+ this.project_id,
+ this.doc_id,
+ this.non_existing,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return setTimeout(done, 200)
+ }
+ )
+ return null
+ })
+
+ it('should not update or create a doc', function (done) {
+ DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, doc) => {
+ res.statusCode.should.equal(404)
+ return done()
+ }
+ )
+ return null
+ })
+
+ return it('should send a message with an error', function () {
+ this.messageCallback.called.should.equal(true)
+ const [channel, message] = Array.from(this.messageCallback.args[0])
+ channel.should.equal('applied-ops')
+ return JSON.parse(message).should.deep.include({
+ project_id: this.project_id,
+ doc_id: this.doc_id,
+ error: `doc not not found: /project/${this.project_id}/doc/${this.doc_id}`,
+ })
+ })
+ })
+})
diff --git a/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js b/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js
new file mode 100644
index 0000000000..6d4fc3c019
--- /dev/null
+++ b/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js
@@ -0,0 +1,503 @@
+const sinon = require('sinon')
+const Settings = require('@overleaf/settings')
+const rclientProjectHistory = require('@overleaf/redis-wrapper').createClient(
+ Settings.redis.project_history
+)
+const ProjectHistoryKeys = Settings.redis.project_history.key_schema
+
+const MockProjectHistoryApi = require('./helpers/MockProjectHistoryApi')
+const MockWebApi = require('./helpers/MockWebApi')
+const DocUpdaterClient = require('./helpers/DocUpdaterClient')
+const DocUpdaterApp = require('./helpers/DocUpdaterApp')
+
+describe("Applying updates to a project's structure", function () {
+ before(function () {
+ this.user_id = 'user-id-123'
+ this.version = 1234
+ })
+
+ describe('renaming a file', function () {
+ before(function (done) {
+ this.project_id = DocUpdaterClient.randomId()
+ this.fileUpdate = {
+ type: 'rename-file',
+ id: DocUpdaterClient.randomId(),
+ pathname: '/file-path',
+ newPathname: '/new-file-path',
+ }
+ this.updates = [this.fileUpdate]
+ DocUpdaterApp.ensureRunning(error => {
+ if (error) {
+ return done(error)
+ }
+ DocUpdaterClient.sendProjectUpdate(
+ this.project_id,
+ this.user_id,
+ this.updates,
+ this.version,
+ error => {
+ if (error) {
+ return done(error)
+ }
+ setTimeout(done, 200)
+ }
+ )
+ })
+ })
+
+ it('should push the applied file renames to the project history api', function (done) {
+ rclientProjectHistory.lrange(
+ ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }),
+ 0,
+ -1,
+ (error, updates) => {
+ if (error) {
+ return done(error)
+ }
+
+ const update = JSON.parse(updates[0])
+ update.file.should.equal(this.fileUpdate.id)
+ update.pathname.should.equal('/file-path')
+ update.new_pathname.should.equal('/new-file-path')
+ update.meta.user_id.should.equal(this.user_id)
+ update.meta.ts.should.be.a('string')
+ update.version.should.equal(`${this.version}.0`)
+
+ done()
+ }
+ )
+ })
+ })
+
+ describe('renaming a document', function () {
+ before(function () {
+ this.update = {
+ type: 'rename-doc',
+ id: DocUpdaterClient.randomId(),
+ pathname: '/doc-path',
+ newPathname: '/new-doc-path',
+ }
+ this.updates = [this.update]
+ })
+
+ describe('when the document is not loaded', function () {
+ before(function (done) {
+ this.project_id = DocUpdaterClient.randomId()
+ DocUpdaterClient.sendProjectUpdate(
+ this.project_id,
+ this.user_id,
+ this.updates,
+ this.version,
+ error => {
+ if (error) {
+ return done(error)
+ }
+ setTimeout(done, 200)
+ }
+ )
+ })
+
+ it('should push the applied doc renames to the project history api', function (done) {
+ rclientProjectHistory.lrange(
+ ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }),
+ 0,
+ -1,
+ (error, updates) => {
+ if (error) {
+ return done(error)
+ }
+
+ const update = JSON.parse(updates[0])
+ update.doc.should.equal(this.update.id)
+ update.pathname.should.equal('/doc-path')
+ update.new_pathname.should.equal('/new-doc-path')
+ update.meta.user_id.should.equal(this.user_id)
+ update.meta.ts.should.be.a('string')
+ update.version.should.equal(`${this.version}.0`)
+
+ done()
+ }
+ )
+ })
+ })
+
+ describe('when the document is loaded', function () {
+ before(function (done) {
+ this.project_id = DocUpdaterClient.randomId()
+ MockWebApi.insertDoc(this.project_id, this.update.id, {})
+ DocUpdaterClient.preloadDoc(this.project_id, this.update.id, error => {
+ if (error) {
+ return done(error)
+ }
+ sinon.spy(MockWebApi, 'getDocument')
+ DocUpdaterClient.sendProjectUpdate(
+ this.project_id,
+ this.user_id,
+ this.updates,
+ this.version,
+ error => {
+ if (error) {
+ return done(error)
+ }
+ setTimeout(done, 200)
+ }
+ )
+ })
+ })
+
+ after(function () {
+ MockWebApi.getDocument.restore()
+ })
+
+ it('should update the doc', function (done) {
+ DocUpdaterClient.getDoc(
+ this.project_id,
+ this.update.id,
+ (error, res, doc) => {
+ if (error) {
+ return done(error)
+ }
+ doc.pathname.should.equal(this.update.newPathname)
+ done()
+ }
+ )
+ })
+
+ it('should push the applied doc renames to the project history api', function (done) {
+ rclientProjectHistory.lrange(
+ ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }),
+ 0,
+ -1,
+ (error, updates) => {
+ if (error) {
+ return done(error)
+ }
+
+ const update = JSON.parse(updates[0])
+ update.doc.should.equal(this.update.id)
+ update.pathname.should.equal('/doc-path')
+ update.new_pathname.should.equal('/new-doc-path')
+ update.meta.user_id.should.equal(this.user_id)
+ update.meta.ts.should.be.a('string')
+ update.version.should.equal(`${this.version}.0`)
+
+ done()
+ }
+ )
+ })
+ })
+ })
+
+ describe('renaming multiple documents and files', function () {
+ before(function () {
+ this.docUpdate0 = {
+ type: 'rename-doc',
+ id: DocUpdaterClient.randomId(),
+ pathname: '/doc-path0',
+ newPathname: '/new-doc-path0',
+ }
+ this.docUpdate1 = {
+ type: 'rename-doc',
+ id: DocUpdaterClient.randomId(),
+ pathname: '/doc-path1',
+ newPathname: '/new-doc-path1',
+ }
+ this.fileUpdate0 = {
+ type: 'rename-file',
+ id: DocUpdaterClient.randomId(),
+ pathname: '/file-path0',
+ newPathname: '/new-file-path0',
+ }
+ this.fileUpdate1 = {
+ type: 'rename-file',
+ id: DocUpdaterClient.randomId(),
+ pathname: '/file-path1',
+ newPathname: '/new-file-path1',
+ }
+ this.updates = [
+ this.docUpdate0,
+ this.docUpdate1,
+ this.fileUpdate0,
+ this.fileUpdate1,
+ ]
+ })
+
+ describe('when the documents are not loaded', function () {
+ before(function (done) {
+ this.project_id = DocUpdaterClient.randomId()
+ DocUpdaterClient.sendProjectUpdate(
+ this.project_id,
+ this.user_id,
+ this.updates,
+ this.version,
+ error => {
+ if (error) {
+ return done(error)
+ }
+ setTimeout(done, 200)
+ }
+ )
+ })
+
+ it('should push the applied doc renames to the project history api', function (done) {
+ rclientProjectHistory.lrange(
+ ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }),
+ 0,
+ -1,
+ (error, updates) => {
+ if (error) {
+ return done(error)
+ }
+
+ let update = JSON.parse(updates[0])
+ update.doc.should.equal(this.docUpdate0.id)
+ update.pathname.should.equal('/doc-path0')
+ update.new_pathname.should.equal('/new-doc-path0')
+ update.meta.user_id.should.equal(this.user_id)
+ update.meta.ts.should.be.a('string')
+ update.version.should.equal(`${this.version}.0`)
+
+ update = JSON.parse(updates[1])
+ update.doc.should.equal(this.docUpdate1.id)
+ update.pathname.should.equal('/doc-path1')
+ update.new_pathname.should.equal('/new-doc-path1')
+ update.meta.user_id.should.equal(this.user_id)
+ update.meta.ts.should.be.a('string')
+ update.version.should.equal(`${this.version}.1`)
+
+ update = JSON.parse(updates[2])
+ update.file.should.equal(this.fileUpdate0.id)
+ update.pathname.should.equal('/file-path0')
+ update.new_pathname.should.equal('/new-file-path0')
+ update.meta.user_id.should.equal(this.user_id)
+ update.meta.ts.should.be.a('string')
+ update.version.should.equal(`${this.version}.2`)
+
+ update = JSON.parse(updates[3])
+ update.file.should.equal(this.fileUpdate1.id)
+ update.pathname.should.equal('/file-path1')
+ update.new_pathname.should.equal('/new-file-path1')
+ update.meta.user_id.should.equal(this.user_id)
+ update.meta.ts.should.be.a('string')
+ update.version.should.equal(`${this.version}.3`)
+
+ done()
+ }
+ )
+ })
+ })
+ })
+
+ describe('adding a file', function () {
+ before(function (done) {
+ this.project_id = DocUpdaterClient.randomId()
+ this.fileUpdate = {
+ type: 'add-file',
+ id: DocUpdaterClient.randomId(),
+ pathname: '/file-path',
+ url: 'filestore.example.com',
+ }
+ this.updates = [this.fileUpdate]
+ DocUpdaterClient.sendProjectUpdate(
+ this.project_id,
+ this.user_id,
+ this.updates,
+ this.version,
+ error => {
+ if (error) {
+ return done(error)
+ }
+ setTimeout(done, 200)
+ }
+ )
+ })
+
+ it('should push the file addition to the project history api', function (done) {
+ rclientProjectHistory.lrange(
+ ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }),
+ 0,
+ -1,
+ (error, updates) => {
+ if (error) {
+ return done(error)
+ }
+
+ const update = JSON.parse(updates[0])
+ update.file.should.equal(this.fileUpdate.id)
+ update.pathname.should.equal('/file-path')
+ update.url.should.equal('filestore.example.com')
+ update.meta.user_id.should.equal(this.user_id)
+ update.meta.ts.should.be.a('string')
+ update.version.should.equal(`${this.version}.0`)
+
+ done()
+ }
+ )
+ })
+ })
+
+ describe('adding a doc', function () {
+ before(function (done) {
+ this.project_id = DocUpdaterClient.randomId()
+ this.docUpdate = {
+ type: 'add-doc',
+ id: DocUpdaterClient.randomId(),
+ pathname: '/file-path',
+ docLines: 'a\nb',
+ }
+ this.updates = [this.docUpdate]
+ DocUpdaterClient.sendProjectUpdate(
+ this.project_id,
+ this.user_id,
+ this.updates,
+ this.version,
+ error => {
+ if (error) {
+ return done(error)
+ }
+ setTimeout(done, 200)
+ }
+ )
+ })
+
+ it('should push the doc addition to the project history api', function (done) {
+ rclientProjectHistory.lrange(
+ ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }),
+ 0,
+ -1,
+ (error, updates) => {
+ if (error) {
+ return done(error)
+ }
+
+ const update = JSON.parse(updates[0])
+ update.doc.should.equal(this.docUpdate.id)
+ update.pathname.should.equal('/file-path')
+ update.docLines.should.equal('a\nb')
+ update.meta.user_id.should.equal(this.user_id)
+ update.meta.ts.should.be.a('string')
+ update.version.should.equal(`${this.version}.0`)
+
+ done()
+ }
+ )
+ })
+ })
+
+ describe('with enough updates to flush to the history service', function () {
+ before(function (done) {
+ this.project_id = DocUpdaterClient.randomId()
+ this.user_id = DocUpdaterClient.randomId()
+ this.version0 = 12345
+ this.version1 = this.version0 + 1
+ const updates = []
+ for (let v = 0; v <= 599; v++) {
+ // Should flush after 500 ops
+ updates.push({
+ type: 'add-doc',
+ id: DocUpdaterClient.randomId(),
+ pathname: '/file-' + v,
+ docLines: 'a\nb',
+ })
+ }
+
+ sinon.spy(MockProjectHistoryApi, 'flushProject')
+
+ // Send updates in chunks to causes multiple flushes
+ const projectId = this.project_id
+ const userId = this.project_id
+ DocUpdaterClient.sendProjectUpdate(
+ projectId,
+ userId,
+ updates.slice(0, 250),
+ this.version0,
+ function (error) {
+ if (error) {
+ return done(error)
+ }
+ DocUpdaterClient.sendProjectUpdate(
+ projectId,
+ userId,
+ updates.slice(250),
+ this.version1,
+ error => {
+ if (error) {
+ return done(error)
+ }
+ setTimeout(done, 2000)
+ }
+ )
+ }
+ )
+ })
+
+ after(function () {
+ MockProjectHistoryApi.flushProject.restore()
+ })
+
+ it('should flush project history', function () {
+ MockProjectHistoryApi.flushProject
+ .calledWith(this.project_id)
+ .should.equal(true)
+ })
+ })
+
+ describe('with too few updates to flush to the history service', function () {
+ before(function (done) {
+ this.project_id = DocUpdaterClient.randomId()
+ this.user_id = DocUpdaterClient.randomId()
+ this.version0 = 12345
+ this.version1 = this.version0 + 1
+
+ const updates = []
+ for (let v = 0; v <= 42; v++) {
+ // Should flush after 500 ops
+ updates.push({
+ type: 'add-doc',
+ id: DocUpdaterClient.randomId(),
+ pathname: '/file-' + v,
+ docLines: 'a\nb',
+ })
+ }
+
+ sinon.spy(MockProjectHistoryApi, 'flushProject')
+
+ // Send updates in chunks
+ const projectId = this.project_id
+ const userId = this.project_id
+ DocUpdaterClient.sendProjectUpdate(
+ projectId,
+ userId,
+ updates.slice(0, 10),
+ this.version0,
+ function (error) {
+ if (error) {
+ return done(error)
+ }
+ DocUpdaterClient.sendProjectUpdate(
+ projectId,
+ userId,
+ updates.slice(10),
+ this.version1,
+ error => {
+ if (error) {
+ return done(error)
+ }
+ setTimeout(done, 2000)
+ }
+ )
+ }
+ )
+ })
+
+ after(function () {
+ MockProjectHistoryApi.flushProject.restore()
+ })
+
+ it('should not flush project history', function () {
+ MockProjectHistoryApi.flushProject
+ .calledWith(this.project_id)
+ .should.equal(false)
+ })
+ })
+})
diff --git a/services/document-updater/test/acceptance/js/DeletingADocumentTests.js b/services/document-updater/test/acceptance/js/DeletingADocumentTests.js
new file mode 100644
index 0000000000..9fa53d8052
--- /dev/null
+++ b/services/document-updater/test/acceptance/js/DeletingADocumentTests.js
@@ -0,0 +1,193 @@
+/* eslint-disable
+ handle-callback-err,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const MockTrackChangesApi = require('./helpers/MockTrackChangesApi')
+const MockProjectHistoryApi = require('./helpers/MockProjectHistoryApi')
+const MockWebApi = require('./helpers/MockWebApi')
+const DocUpdaterClient = require('./helpers/DocUpdaterClient')
+const DocUpdaterApp = require('./helpers/DocUpdaterApp')
+
+describe('Deleting a document', function () {
+ before(function (done) {
+ this.lines = ['one', 'two', 'three']
+ this.version = 42
+ this.update = {
+ doc: this.doc_id,
+ op: [
+ {
+ i: 'one and a half\n',
+ p: 4,
+ },
+ ],
+ v: this.version,
+ }
+ this.result = ['one', 'one and a half', 'two', 'three']
+
+ sinon.spy(MockTrackChangesApi, 'flushDoc')
+ sinon.spy(MockProjectHistoryApi, 'flushProject')
+ return DocUpdaterApp.ensureRunning(done)
+ })
+
+ after(function () {
+ MockTrackChangesApi.flushDoc.restore()
+ return MockProjectHistoryApi.flushProject.restore()
+ })
+
+ describe('when the updated doc exists in the doc updater', function () {
+ before(function (done) {
+ ;[this.project_id, this.doc_id] = Array.from([
+ DocUpdaterClient.randomId(),
+ DocUpdaterClient.randomId(),
+ ])
+ sinon.spy(MockWebApi, 'setDocument')
+ sinon.spy(MockWebApi, 'getDocument')
+
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines: this.lines,
+ version: this.version,
+ })
+ return DocUpdaterClient.preloadDoc(
+ this.project_id,
+ this.doc_id,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return DocUpdaterClient.sendUpdate(
+ this.project_id,
+ this.doc_id,
+ this.update,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return setTimeout(() => {
+ return DocUpdaterClient.deleteDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, body) => {
+ this.statusCode = res.statusCode
+ return setTimeout(done, 200)
+ }
+ )
+ }, 200)
+ }
+ )
+ }
+ )
+ })
+
+ after(function () {
+ MockWebApi.setDocument.restore()
+ return MockWebApi.getDocument.restore()
+ })
+
+ it('should return a 204 status code', function () {
+ return this.statusCode.should.equal(204)
+ })
+
+ it('should send the updated document and version to the web api', function () {
+ return MockWebApi.setDocument
+ .calledWith(this.project_id, this.doc_id, this.result, this.version + 1)
+ .should.equal(true)
+ })
+
+ it('should need to reload the doc if read again', function (done) {
+ MockWebApi.getDocument.resetHistory()
+ MockWebApi.getDocument.called.should.equals(false)
+ return DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, doc) => {
+ MockWebApi.getDocument
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ return done()
+ }
+ )
+ })
+
+ it('should flush track changes', function () {
+ return MockTrackChangesApi.flushDoc
+ .calledWith(this.doc_id)
+ .should.equal(true)
+ })
+
+ return it('should flush project history', function () {
+ return MockProjectHistoryApi.flushProject
+ .calledWith(this.project_id)
+ .should.equal(true)
+ })
+ })
+
+ return describe('when the doc is not in the doc updater', function () {
+ before(function (done) {
+ ;[this.project_id, this.doc_id] = Array.from([
+ DocUpdaterClient.randomId(),
+ DocUpdaterClient.randomId(),
+ ])
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines: this.lines,
+ })
+ sinon.spy(MockWebApi, 'setDocument')
+ sinon.spy(MockWebApi, 'getDocument')
+ return DocUpdaterClient.deleteDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, body) => {
+ this.statusCode = res.statusCode
+ return setTimeout(done, 200)
+ }
+ )
+ })
+
+ after(function () {
+ MockWebApi.setDocument.restore()
+ return MockWebApi.getDocument.restore()
+ })
+
+ it('should return a 204 status code', function () {
+ return this.statusCode.should.equal(204)
+ })
+
+ it('should not need to send the updated document to the web api', function () {
+ return MockWebApi.setDocument.called.should.equal(false)
+ })
+
+ it('should need to reload the doc if read again', function (done) {
+ MockWebApi.getDocument.called.should.equals(false)
+ return DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, doc) => {
+ MockWebApi.getDocument
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ return done()
+ }
+ )
+ })
+
+ it('should flush track changes', function () {
+ return MockTrackChangesApi.flushDoc
+ .calledWith(this.doc_id)
+ .should.equal(true)
+ })
+
+ return it('should flush project history', function () {
+ return MockProjectHistoryApi.flushProject
+ .calledWith(this.project_id)
+ .should.equal(true)
+ })
+ })
+})
diff --git a/services/document-updater/test/acceptance/js/DeletingAProjectTests.js b/services/document-updater/test/acceptance/js/DeletingAProjectTests.js
new file mode 100644
index 0000000000..e9ffeb8806
--- /dev/null
+++ b/services/document-updater/test/acceptance/js/DeletingAProjectTests.js
@@ -0,0 +1,293 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const async = require('async')
+
+const MockTrackChangesApi = require('./helpers/MockTrackChangesApi')
+const MockProjectHistoryApi = require('./helpers/MockProjectHistoryApi')
+const MockWebApi = require('./helpers/MockWebApi')
+const DocUpdaterClient = require('./helpers/DocUpdaterClient')
+const DocUpdaterApp = require('./helpers/DocUpdaterApp')
+
+describe('Deleting a project', function () {
+ before(function (done) {
+ let doc_id0, doc_id1
+ this.project_id = DocUpdaterClient.randomId()
+ this.docs = [
+ {
+ id: (doc_id0 = DocUpdaterClient.randomId()),
+ lines: ['one', 'two', 'three'],
+ update: {
+ doc: doc_id0,
+ op: [
+ {
+ i: 'one and a half\n',
+ p: 4,
+ },
+ ],
+ v: 0,
+ },
+ updatedLines: ['one', 'one and a half', 'two', 'three'],
+ },
+ {
+ id: (doc_id1 = DocUpdaterClient.randomId()),
+ lines: ['four', 'five', 'six'],
+ update: {
+ doc: doc_id1,
+ op: [
+ {
+ i: 'four and a half\n',
+ p: 5,
+ },
+ ],
+ v: 0,
+ },
+ updatedLines: ['four', 'four and a half', 'five', 'six'],
+ },
+ ]
+ for (const doc of Array.from(this.docs)) {
+ MockWebApi.insertDoc(this.project_id, doc.id, {
+ lines: doc.lines,
+ version: doc.update.v,
+ })
+ }
+
+ return DocUpdaterApp.ensureRunning(done)
+ })
+
+ describe('with documents which have been updated', function () {
+ before(function (done) {
+ sinon.spy(MockWebApi, 'setDocument')
+ sinon.spy(MockTrackChangesApi, 'flushDoc')
+ sinon.spy(MockProjectHistoryApi, 'flushProject')
+
+ return async.series(
+ this.docs.map(doc => {
+ return callback => {
+ return DocUpdaterClient.preloadDoc(
+ this.project_id,
+ doc.id,
+ error => {
+ if (error != null) {
+ return callback(error)
+ }
+ return DocUpdaterClient.sendUpdate(
+ this.project_id,
+ doc.id,
+ doc.update,
+ error => {
+ return callback(error)
+ }
+ )
+ }
+ )
+ }
+ }),
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return setTimeout(() => {
+ return DocUpdaterClient.deleteProject(
+ this.project_id,
+ (error, res, body) => {
+ this.statusCode = res.statusCode
+ return done()
+ }
+ )
+ }, 200)
+ }
+ )
+ })
+
+ after(function () {
+ MockWebApi.setDocument.restore()
+ MockTrackChangesApi.flushDoc.restore()
+ return MockProjectHistoryApi.flushProject.restore()
+ })
+
+ it('should return a 204 status code', function () {
+ return this.statusCode.should.equal(204)
+ })
+
+ it('should send each document to the web api', function () {
+ return Array.from(this.docs).map(doc =>
+ MockWebApi.setDocument
+ .calledWith(this.project_id, doc.id, doc.updatedLines)
+ .should.equal(true)
+ )
+ })
+
+ it('should need to reload the docs if read again', function (done) {
+ sinon.spy(MockWebApi, 'getDocument')
+ return async.series(
+ this.docs.map(doc => {
+ return callback => {
+ MockWebApi.getDocument
+ .calledWith(this.project_id, doc.id)
+ .should.equal(false)
+ return DocUpdaterClient.getDoc(
+ this.project_id,
+ doc.id,
+ (error, res, returnedDoc) => {
+ MockWebApi.getDocument
+ .calledWith(this.project_id, doc.id)
+ .should.equal(true)
+ return callback()
+ }
+ )
+ }
+ }),
+ () => {
+ MockWebApi.getDocument.restore()
+ return done()
+ }
+ )
+ })
+
+ it('should flush each doc in track changes', function () {
+ return Array.from(this.docs).map(doc =>
+ MockTrackChangesApi.flushDoc.calledWith(doc.id).should.equal(true)
+ )
+ })
+
+ return it('should flush each doc in project history', function () {
+ return MockProjectHistoryApi.flushProject
+ .calledWith(this.project_id)
+ .should.equal(true)
+ })
+ })
+
+ describe('with the background=true parameter from realtime and no request to flush the queue', function () {
+ before(function (done) {
+ sinon.spy(MockWebApi, 'setDocument')
+ sinon.spy(MockTrackChangesApi, 'flushDoc')
+ sinon.spy(MockProjectHistoryApi, 'flushProject')
+
+ return async.series(
+ this.docs.map(doc => {
+ return callback => {
+ return DocUpdaterClient.preloadDoc(
+ this.project_id,
+ doc.id,
+ callback
+ )
+ }
+ }),
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return setTimeout(() => {
+ return DocUpdaterClient.deleteProjectOnShutdown(
+ this.project_id,
+ (error, res, body) => {
+ this.statusCode = res.statusCode
+ return done()
+ }
+ )
+ }, 200)
+ }
+ )
+ })
+
+ after(function () {
+ MockWebApi.setDocument.restore()
+ MockTrackChangesApi.flushDoc.restore()
+ return MockProjectHistoryApi.flushProject.restore()
+ })
+
+ it('should return a 204 status code', function () {
+ return this.statusCode.should.equal(204)
+ })
+
+ it('should not send any documents to the web api', function () {
+ return MockWebApi.setDocument.called.should.equal(false)
+ })
+
+ it('should not flush any docs in track changes', function () {
+ return MockTrackChangesApi.flushDoc.called.should.equal(false)
+ })
+
+ return it('should not flush to project history', function () {
+ return MockProjectHistoryApi.flushProject.called.should.equal(false)
+ })
+ })
+
+ return describe('with the background=true parameter from realtime and a request to flush the queue', function () {
+ before(function (done) {
+ sinon.spy(MockWebApi, 'setDocument')
+ sinon.spy(MockTrackChangesApi, 'flushDoc')
+ sinon.spy(MockProjectHistoryApi, 'flushProject')
+
+ return async.series(
+ this.docs.map(doc => {
+ return callback => {
+ return DocUpdaterClient.preloadDoc(
+ this.project_id,
+ doc.id,
+ callback
+ )
+ }
+ }),
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return setTimeout(() => {
+ return DocUpdaterClient.deleteProjectOnShutdown(
+ this.project_id,
+ (error, res, body) => {
+ this.statusCode = res.statusCode
+ // after deleting the project and putting it in the queue, flush the queue
+ return setTimeout(
+ () => DocUpdaterClient.flushOldProjects(done),
+ 2000
+ )
+ }
+ )
+ }, 200)
+ }
+ )
+ })
+
+ after(function () {
+ MockWebApi.setDocument.restore()
+ MockTrackChangesApi.flushDoc.restore()
+ return MockProjectHistoryApi.flushProject.restore()
+ })
+
+ it('should return a 204 status code', function () {
+ return this.statusCode.should.equal(204)
+ })
+
+ it('should send each document to the web api', function () {
+ return Array.from(this.docs).map(doc =>
+ MockWebApi.setDocument
+ .calledWith(this.project_id, doc.id, doc.updatedLines)
+ .should.equal(true)
+ )
+ })
+
+ it('should flush each doc in track changes', function () {
+ return Array.from(this.docs).map(doc =>
+ MockTrackChangesApi.flushDoc.calledWith(doc.id).should.equal(true)
+ )
+ })
+
+ return it('should flush to project history', function () {
+ return MockProjectHistoryApi.flushProject.called.should.equal(true)
+ })
+ })
+})
diff --git a/services/document-updater/test/acceptance/js/FlushingAProjectTests.js b/services/document-updater/test/acceptance/js/FlushingAProjectTests.js
new file mode 100644
index 0000000000..0d7a46a990
--- /dev/null
+++ b/services/document-updater/test/acceptance/js/FlushingAProjectTests.js
@@ -0,0 +1,143 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const async = require('async')
+
+const MockWebApi = require('./helpers/MockWebApi')
+const DocUpdaterClient = require('./helpers/DocUpdaterClient')
+const DocUpdaterApp = require('./helpers/DocUpdaterApp')
+
+describe('Flushing a project', function () {
+ before(function (done) {
+ let doc_id0, doc_id1
+ this.project_id = DocUpdaterClient.randomId()
+ this.docs = [
+ {
+ id: (doc_id0 = DocUpdaterClient.randomId()),
+ lines: ['one', 'two', 'three'],
+ update: {
+ doc: doc_id0,
+ op: [
+ {
+ i: 'one and a half\n',
+ p: 4,
+ },
+ ],
+ v: 0,
+ },
+ updatedLines: ['one', 'one and a half', 'two', 'three'],
+ },
+ {
+ id: (doc_id1 = DocUpdaterClient.randomId()),
+ lines: ['four', 'five', 'six'],
+ update: {
+ doc: doc_id1,
+ op: [
+ {
+ i: 'four and a half\n',
+ p: 5,
+ },
+ ],
+ v: 0,
+ },
+ updatedLines: ['four', 'four and a half', 'five', 'six'],
+ },
+ ]
+ for (const doc of Array.from(this.docs)) {
+ MockWebApi.insertDoc(this.project_id, doc.id, {
+ lines: doc.lines,
+ version: doc.update.v,
+ })
+ }
+ return DocUpdaterApp.ensureRunning(done)
+ })
+
+ return describe('with documents which have been updated', function () {
+ before(function (done) {
+ sinon.spy(MockWebApi, 'setDocument')
+
+ return async.series(
+ this.docs.map(doc => {
+ return callback => {
+ return DocUpdaterClient.preloadDoc(
+ this.project_id,
+ doc.id,
+ error => {
+ if (error != null) {
+ return callback(error)
+ }
+ return DocUpdaterClient.sendUpdate(
+ this.project_id,
+ doc.id,
+ doc.update,
+ error => {
+ return callback(error)
+ }
+ )
+ }
+ )
+ }
+ }),
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return setTimeout(() => {
+ return DocUpdaterClient.flushProject(
+ this.project_id,
+ (error, res, body) => {
+ this.statusCode = res.statusCode
+ return done()
+ }
+ )
+ }, 200)
+ }
+ )
+ })
+
+ after(function () {
+ return MockWebApi.setDocument.restore()
+ })
+
+ it('should return a 204 status code', function () {
+ return this.statusCode.should.equal(204)
+ })
+
+ it('should send each document to the web api', function () {
+ return Array.from(this.docs).map(doc =>
+ MockWebApi.setDocument
+ .calledWith(this.project_id, doc.id, doc.updatedLines)
+ .should.equal(true)
+ )
+ })
+
+ return it('should update the lines in the doc updater', function (done) {
+ return async.series(
+ this.docs.map(doc => {
+ return callback => {
+ return DocUpdaterClient.getDoc(
+ this.project_id,
+ doc.id,
+ (error, res, returnedDoc) => {
+ returnedDoc.lines.should.deep.equal(doc.updatedLines)
+ return callback()
+ }
+ )
+ }
+ }),
+ done
+ )
+ })
+ })
+})
diff --git a/services/document-updater/test/acceptance/js/FlushingDocsTests.js b/services/document-updater/test/acceptance/js/FlushingDocsTests.js
new file mode 100644
index 0000000000..fdb82dc40d
--- /dev/null
+++ b/services/document-updater/test/acceptance/js/FlushingDocsTests.js
@@ -0,0 +1,163 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const { expect } = require('chai')
+const async = require('async')
+
+const MockWebApi = require('./helpers/MockWebApi')
+const DocUpdaterClient = require('./helpers/DocUpdaterClient')
+const DocUpdaterApp = require('./helpers/DocUpdaterApp')
+
+describe('Flushing a doc to Mongo', function () {
+ before(function (done) {
+ this.lines = ['one', 'two', 'three']
+ this.version = 42
+ this.update = {
+ doc: this.doc_id,
+ meta: { user_id: 'last-author-fake-id' },
+ op: [
+ {
+ i: 'one and a half\n',
+ p: 4,
+ },
+ ],
+ v: this.version,
+ }
+ this.result = ['one', 'one and a half', 'two', 'three']
+ return DocUpdaterApp.ensureRunning(done)
+ })
+
+ describe('when the updated doc exists in the doc updater', function () {
+ before(function (done) {
+ ;[this.project_id, this.doc_id] = Array.from([
+ DocUpdaterClient.randomId(),
+ DocUpdaterClient.randomId(),
+ ])
+ sinon.spy(MockWebApi, 'setDocument')
+
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines: this.lines,
+ version: this.version,
+ })
+ return DocUpdaterClient.sendUpdates(
+ this.project_id,
+ this.doc_id,
+ [this.update],
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return setTimeout(() => {
+ return DocUpdaterClient.flushDoc(this.project_id, this.doc_id, done)
+ }, 200)
+ }
+ )
+ })
+
+ after(function () {
+ return MockWebApi.setDocument.restore()
+ })
+
+ it('should flush the updated doc lines and version to the web api', function () {
+ return MockWebApi.setDocument
+ .calledWith(this.project_id, this.doc_id, this.result, this.version + 1)
+ .should.equal(true)
+ })
+
+ return it('should flush the last update author and time to the web api', function () {
+ const lastUpdatedAt = MockWebApi.setDocument.lastCall.args[5]
+ parseInt(lastUpdatedAt).should.be.closeTo(new Date().getTime(), 30000)
+
+ const lastUpdatedBy = MockWebApi.setDocument.lastCall.args[6]
+ return lastUpdatedBy.should.equal('last-author-fake-id')
+ })
+ })
+
+ describe('when the doc does not exist in the doc updater', function () {
+ before(function (done) {
+ ;[this.project_id, this.doc_id] = Array.from([
+ DocUpdaterClient.randomId(),
+ DocUpdaterClient.randomId(),
+ ])
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines: this.lines,
+ })
+ sinon.spy(MockWebApi, 'setDocument')
+ return DocUpdaterClient.flushDoc(this.project_id, this.doc_id, done)
+ })
+
+ after(function () {
+ return MockWebApi.setDocument.restore()
+ })
+
+ return it('should not flush the doc to the web api', function () {
+ return MockWebApi.setDocument.called.should.equal(false)
+ })
+ })
+
+ return describe('when the web api http request takes a long time on first request', function () {
+ before(function (done) {
+ ;[this.project_id, this.doc_id] = Array.from([
+ DocUpdaterClient.randomId(),
+ DocUpdaterClient.randomId(),
+ ])
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines: this.lines,
+ version: this.version,
+ })
+ let t = 30000
+ sinon
+ .stub(MockWebApi, 'setDocument')
+ .callsFake(
+ (
+ project_id,
+ doc_id,
+ lines,
+ version,
+ ranges,
+ lastUpdatedAt,
+ lastUpdatedBy,
+ callback
+ ) => {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ setTimeout(callback, t)
+ return (t = 0)
+ }
+ )
+ return DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, done)
+ })
+
+ after(function () {
+ return MockWebApi.setDocument.restore()
+ })
+
+ return it('should still work', function (done) {
+ const start = Date.now()
+ return DocUpdaterClient.flushDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, doc) => {
+ res.statusCode.should.equal(204)
+ const delta = Date.now() - start
+ expect(delta).to.be.below(20000)
+ return done()
+ }
+ )
+ })
+ })
+})
diff --git a/services/document-updater/test/acceptance/js/GettingADocumentTests.js b/services/document-updater/test/acceptance/js/GettingADocumentTests.js
new file mode 100644
index 0000000000..b6acd9ebfb
--- /dev/null
+++ b/services/document-updater/test/acceptance/js/GettingADocumentTests.js
@@ -0,0 +1,290 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const { expect } = require('chai')
+
+const MockWebApi = require('./helpers/MockWebApi')
+const DocUpdaterClient = require('./helpers/DocUpdaterClient')
+const DocUpdaterApp = require('./helpers/DocUpdaterApp')
+
+describe('Getting a document', function () {
+ before(function (done) {
+ this.lines = ['one', 'two', 'three']
+ this.version = 42
+ return DocUpdaterApp.ensureRunning(done)
+ })
+
+ describe('when the document is not loaded', function () {
+ before(function (done) {
+ ;[this.project_id, this.doc_id] = Array.from([
+ DocUpdaterClient.randomId(),
+ DocUpdaterClient.randomId(),
+ ])
+ sinon.spy(MockWebApi, 'getDocument')
+
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines: this.lines,
+ version: this.version,
+ })
+
+ return DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, returnedDoc) => {
+ this.returnedDoc = returnedDoc
+ return done()
+ }
+ )
+ })
+
+ after(function () {
+ return MockWebApi.getDocument.restore()
+ })
+
+ it('should load the document from the web API', function () {
+ return MockWebApi.getDocument
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should return the document lines', function () {
+ return this.returnedDoc.lines.should.deep.equal(this.lines)
+ })
+
+ return it('should return the document at its current version', function () {
+ return this.returnedDoc.version.should.equal(this.version)
+ })
+ })
+
+ describe('when the document is already loaded', function () {
+ before(function (done) {
+ ;[this.project_id, this.doc_id] = Array.from([
+ DocUpdaterClient.randomId(),
+ DocUpdaterClient.randomId(),
+ ])
+
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines: this.lines,
+ version: this.version,
+ })
+ return DocUpdaterClient.preloadDoc(
+ this.project_id,
+ this.doc_id,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ sinon.spy(MockWebApi, 'getDocument')
+ return DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, returnedDoc) => {
+ this.returnedDoc = returnedDoc
+ return done()
+ }
+ )
+ }
+ )
+ })
+
+ after(function () {
+ return MockWebApi.getDocument.restore()
+ })
+
+ it('should not load the document from the web API', function () {
+ return MockWebApi.getDocument.called.should.equal(false)
+ })
+
+ return it('should return the document lines', function () {
+ return this.returnedDoc.lines.should.deep.equal(this.lines)
+ })
+ })
+
+ describe('when the request asks for some recent ops', function () {
+ before(function (done) {
+ ;[this.project_id, this.doc_id] = Array.from([
+ DocUpdaterClient.randomId(),
+ DocUpdaterClient.randomId(),
+ ])
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines: (this.lines = ['one', 'two', 'three']),
+ })
+
+ this.updates = __range__(0, 199, true).map(v => ({
+ doc_id: this.doc_id,
+ op: [{ i: v.toString(), p: 0 }],
+ v,
+ }))
+
+ return DocUpdaterClient.sendUpdates(
+ this.project_id,
+ this.doc_id,
+ this.updates,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ sinon.spy(MockWebApi, 'getDocument')
+ return done()
+ }
+ )
+ })
+
+ after(function () {
+ return MockWebApi.getDocument.restore()
+ })
+
+ describe('when the ops are loaded', function () {
+ before(function (done) {
+ return DocUpdaterClient.getDocAndRecentOps(
+ this.project_id,
+ this.doc_id,
+ 190,
+ (error, res, returnedDoc) => {
+ this.returnedDoc = returnedDoc
+ return done()
+ }
+ )
+ })
+
+ return it('should return the recent ops', function () {
+ this.returnedDoc.ops.length.should.equal(10)
+ return Array.from(this.updates.slice(190, -1)).map((update, i) =>
+ this.returnedDoc.ops[i].op.should.deep.equal(update.op)
+ )
+ })
+ })
+
+ return describe('when the ops are not all loaded', function () {
+ before(function (done) {
+ // We only track 100 ops
+ return DocUpdaterClient.getDocAndRecentOps(
+ this.project_id,
+ this.doc_id,
+ 10,
+ (error, res, returnedDoc) => {
+ this.res = res
+ this.returnedDoc = returnedDoc
+ return done()
+ }
+ )
+ })
+
+ return it('should return UnprocessableEntity', function () {
+ return this.res.statusCode.should.equal(422)
+ })
+ })
+ })
+
+ describe('when the document does not exist', function () {
+ before(function (done) {
+ ;[this.project_id, this.doc_id] = Array.from([
+ DocUpdaterClient.randomId(),
+ DocUpdaterClient.randomId(),
+ ])
+ return DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, doc) => {
+ this.statusCode = res.statusCode
+ return done()
+ }
+ )
+ })
+
+ return it('should return 404', function () {
+ return this.statusCode.should.equal(404)
+ })
+ })
+
+ describe('when the web api returns an error', function () {
+ before(function (done) {
+ ;[this.project_id, this.doc_id] = Array.from([
+ DocUpdaterClient.randomId(),
+ DocUpdaterClient.randomId(),
+ ])
+ sinon
+ .stub(MockWebApi, 'getDocument')
+ .callsFake((project_id, doc_id, callback) => {
+ if (callback == null) {
+ callback = function (error, doc) {}
+ }
+ return callback(new Error('oops'))
+ })
+ return DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, doc) => {
+ this.statusCode = res.statusCode
+ return done()
+ }
+ )
+ })
+
+ after(function () {
+ return MockWebApi.getDocument.restore()
+ })
+
+ return it('should return 500', function () {
+ return this.statusCode.should.equal(500)
+ })
+ })
+
+ return describe('when the web api http request takes a long time', function () {
+ before(function (done) {
+ this.timeout = 10000
+ ;[this.project_id, this.doc_id] = Array.from([
+ DocUpdaterClient.randomId(),
+ DocUpdaterClient.randomId(),
+ ])
+ sinon
+ .stub(MockWebApi, 'getDocument')
+ .callsFake((project_id, doc_id, callback) => {
+ if (callback == null) {
+ callback = function (error, doc) {}
+ }
+ return setTimeout(callback, 30000)
+ })
+ return done()
+ })
+
+ after(function () {
+ return MockWebApi.getDocument.restore()
+ })
+
+ return it('should return quickly(ish)', function (done) {
+ const start = Date.now()
+ return DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, doc) => {
+ res.statusCode.should.equal(500)
+ const delta = Date.now() - start
+ expect(delta).to.be.below(20000)
+ return done()
+ }
+ )
+ })
+ })
+})
+
+function __range__(left, right, inclusive) {
+ const range = []
+ const ascending = left < right
+ const end = !inclusive ? right : ascending ? right + 1 : right - 1
+ for (let i = left; ascending ? i < end : i > end; ascending ? i++ : i--) {
+ range.push(i)
+ }
+ return range
+}
diff --git a/services/document-updater/test/acceptance/js/GettingProjectDocsTests.js b/services/document-updater/test/acceptance/js/GettingProjectDocsTests.js
new file mode 100644
index 0000000000..7d72d8161c
--- /dev/null
+++ b/services/document-updater/test/acceptance/js/GettingProjectDocsTests.js
@@ -0,0 +1,171 @@
+/* eslint-disable
+ handle-callback-err,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const { expect } = require('chai')
+
+const MockWebApi = require('./helpers/MockWebApi')
+const DocUpdaterClient = require('./helpers/DocUpdaterClient')
+const DocUpdaterApp = require('./helpers/DocUpdaterApp')
+
+describe('Getting documents for project', function () {
+ before(function (done) {
+ this.lines = ['one', 'two', 'three']
+ this.version = 42
+ return DocUpdaterApp.ensureRunning(done)
+ })
+
+ describe('when project state hash does not match', function () {
+ before(function (done) {
+ this.projectStateHash = DocUpdaterClient.randomId()
+ ;[this.project_id, this.doc_id] = Array.from([
+ DocUpdaterClient.randomId(),
+ DocUpdaterClient.randomId(),
+ ])
+
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines: this.lines,
+ version: this.version,
+ })
+ return DocUpdaterClient.preloadDoc(
+ this.project_id,
+ this.doc_id,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return DocUpdaterClient.getProjectDocs(
+ this.project_id,
+ this.projectStateHash,
+ (error, res, returnedDocs) => {
+ this.res = res
+ this.returnedDocs = returnedDocs
+ return done()
+ }
+ )
+ }
+ )
+ })
+
+ return it('should return a 409 Conflict response', function () {
+ return this.res.statusCode.should.equal(409)
+ })
+ })
+
+ describe('when project state hash matches', function () {
+ before(function (done) {
+ this.projectStateHash = DocUpdaterClient.randomId()
+ ;[this.project_id, this.doc_id] = Array.from([
+ DocUpdaterClient.randomId(),
+ DocUpdaterClient.randomId(),
+ ])
+
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines: this.lines,
+ version: this.version,
+ })
+ return DocUpdaterClient.preloadDoc(
+ this.project_id,
+ this.doc_id,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return DocUpdaterClient.getProjectDocs(
+ this.project_id,
+ this.projectStateHash,
+ (error, res0, returnedDocs0) => {
+ // set the hash
+ this.res0 = res0
+ this.returnedDocs0 = returnedDocs0
+ return DocUpdaterClient.getProjectDocs(
+ this.project_id,
+ this.projectStateHash,
+ (error, res, returnedDocs) => {
+ // the hash should now match
+ this.res = res
+ this.returnedDocs = returnedDocs
+ return done()
+ }
+ )
+ }
+ )
+ }
+ )
+ })
+
+ it('should return a 200 response', function () {
+ return this.res.statusCode.should.equal(200)
+ })
+
+ return it('should return the documents', function () {
+ return this.returnedDocs.should.deep.equal([
+ { _id: this.doc_id, lines: this.lines, v: this.version },
+ ])
+ })
+ })
+
+ return describe('when the doc has been removed', function () {
+ before(function (done) {
+ this.projectStateHash = DocUpdaterClient.randomId()
+ ;[this.project_id, this.doc_id] = Array.from([
+ DocUpdaterClient.randomId(),
+ DocUpdaterClient.randomId(),
+ ])
+
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines: this.lines,
+ version: this.version,
+ })
+ return DocUpdaterClient.preloadDoc(
+ this.project_id,
+ this.doc_id,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return DocUpdaterClient.getProjectDocs(
+ this.project_id,
+ this.projectStateHash,
+ (error, res0, returnedDocs0) => {
+ // set the hash
+ this.res0 = res0
+ this.returnedDocs0 = returnedDocs0
+ return DocUpdaterClient.deleteDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, body) => {
+ // delete the doc
+ return DocUpdaterClient.getProjectDocs(
+ this.project_id,
+ this.projectStateHash,
+ (error, res1, returnedDocs) => {
+ // the hash would match, but the doc has been deleted
+ this.res = res1
+ this.returnedDocs = returnedDocs
+ return done()
+ }
+ )
+ }
+ )
+ }
+ )
+ }
+ )
+ })
+
+ return it('should return a 409 Conflict response', function () {
+ return this.res.statusCode.should.equal(409)
+ })
+ })
+})
diff --git a/services/document-updater/test/acceptance/js/PeekingADoc.js b/services/document-updater/test/acceptance/js/PeekingADoc.js
new file mode 100644
index 0000000000..43e463ca51
--- /dev/null
+++ b/services/document-updater/test/acceptance/js/PeekingADoc.js
@@ -0,0 +1,99 @@
+const sinon = require('sinon')
+const MockWebApi = require('./helpers/MockWebApi')
+const DocUpdaterClient = require('./helpers/DocUpdaterClient')
+const DocUpdaterApp = require('./helpers/DocUpdaterApp')
+
+describe('Peeking a document', function () {
+ before(function (done) {
+ this.lines = ['one', 'two', 'three']
+ this.version = 42
+ return DocUpdaterApp.ensureRunning(done)
+ })
+
+ describe('when the document is not loaded', function () {
+ before(function (done) {
+ this.project_id = DocUpdaterClient.randomId()
+ this.doc_id = DocUpdaterClient.randomId()
+ sinon.spy(MockWebApi, 'getDocument')
+
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines: this.lines,
+ version: this.version,
+ })
+
+ return DocUpdaterClient.peekDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, returnedDoc) => {
+ this.error = error
+ this.res = res
+ this.returnedDoc = returnedDoc
+ return done()
+ }
+ )
+ })
+
+ after(function () {
+ return MockWebApi.getDocument.restore()
+ })
+
+ it('should return a 404 response', function () {
+ this.res.statusCode.should.equal(404)
+ })
+
+ it('should not load the document from the web API', function () {
+ return MockWebApi.getDocument.called.should.equal(false)
+ })
+ })
+
+ describe('when the document is already loaded', function () {
+ before(function (done) {
+ this.project_id = DocUpdaterClient.randomId()
+ this.doc_id = DocUpdaterClient.randomId()
+
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines: this.lines,
+ version: this.version,
+ })
+ return DocUpdaterClient.preloadDoc(
+ this.project_id,
+ this.doc_id,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ sinon.spy(MockWebApi, 'getDocument')
+ return DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, returnedDoc) => {
+ this.res = res
+ this.returnedDoc = returnedDoc
+ return done()
+ }
+ )
+ }
+ )
+ })
+
+ after(function () {
+ return MockWebApi.getDocument.restore()
+ })
+
+ it('should return a 200 response', function () {
+ this.res.statusCode.should.equal(200)
+ })
+
+ it('should return the document lines', function () {
+ return this.returnedDoc.lines.should.deep.equal(this.lines)
+ })
+
+ it('should return the document version', function () {
+ return this.returnedDoc.version.should.equal(this.version)
+ })
+
+ it('should not load the document from the web API', function () {
+ return MockWebApi.getDocument.called.should.equal(false)
+ })
+ })
+})
diff --git a/services/document-updater/test/acceptance/js/RangesTests.js b/services/document-updater/test/acceptance/js/RangesTests.js
new file mode 100644
index 0000000000..0275bab1fb
--- /dev/null
+++ b/services/document-updater/test/acceptance/js/RangesTests.js
@@ -0,0 +1,688 @@
+/* eslint-disable
+ handle-callback-err,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const { expect } = require('chai')
+const async = require('async')
+
+const { db, ObjectId } = require('../../../app/js/mongodb')
+const MockWebApi = require('./helpers/MockWebApi')
+const DocUpdaterClient = require('./helpers/DocUpdaterClient')
+const DocUpdaterApp = require('./helpers/DocUpdaterApp')
+
+describe('Ranges', function () {
+ before(function (done) {
+ return DocUpdaterApp.ensureRunning(done)
+ })
+
+ describe('tracking changes from ops', function () {
+ before(function (done) {
+ this.project_id = DocUpdaterClient.randomId()
+ this.user_id = DocUpdaterClient.randomId()
+ this.id_seed = '587357bd35e64f6157'
+ this.doc = {
+ id: DocUpdaterClient.randomId(),
+ lines: ['aaa'],
+ }
+ this.updates = [
+ {
+ doc: this.doc.id,
+ op: [{ i: '123', p: 1 }],
+ v: 0,
+ meta: { user_id: this.user_id },
+ },
+ {
+ doc: this.doc.id,
+ op: [{ i: '456', p: 5 }],
+ v: 1,
+ meta: { user_id: this.user_id, tc: this.id_seed },
+ },
+ {
+ doc: this.doc.id,
+ op: [{ d: '12', p: 1 }],
+ v: 2,
+ meta: { user_id: this.user_id },
+ },
+ ]
+ MockWebApi.insertDoc(this.project_id, this.doc.id, {
+ lines: this.doc.lines,
+ version: 0,
+ })
+ const jobs = []
+ for (const update of Array.from(this.updates)) {
+ ;(update => {
+ return jobs.push(callback =>
+ DocUpdaterClient.sendUpdate(
+ this.project_id,
+ this.doc.id,
+ update,
+ callback
+ )
+ )
+ })(update)
+ }
+
+ return DocUpdaterApp.ensureRunning(error => {
+ if (error != null) {
+ throw error
+ }
+ return DocUpdaterClient.preloadDoc(
+ this.project_id,
+ this.doc.id,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return async.series(jobs, error => {
+ if (error != null) {
+ throw error
+ }
+ return done()
+ })
+ }
+ )
+ })
+ })
+
+ it('should update the ranges', function (done) {
+ return DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc.id,
+ (error, res, data) => {
+ if (error != null) {
+ throw error
+ }
+ const { ranges } = data
+ const change = ranges.changes[0]
+ change.op.should.deep.equal({ i: '456', p: 3 })
+ change.id.should.equal(this.id_seed + '000001')
+ change.metadata.user_id.should.equal(this.user_id)
+ return done()
+ }
+ )
+ })
+
+ return describe('Adding comments', function () {
+ describe('standalone', function () {
+ before(function (done) {
+ this.project_id = DocUpdaterClient.randomId()
+ this.user_id = DocUpdaterClient.randomId()
+ this.doc = {
+ id: DocUpdaterClient.randomId(),
+ lines: ['foo bar baz'],
+ }
+ this.updates = [
+ {
+ doc: this.doc.id,
+ op: [
+ { c: 'bar', p: 4, t: (this.tid = DocUpdaterClient.randomId()) },
+ ],
+ v: 0,
+ },
+ ]
+ MockWebApi.insertDoc(this.project_id, this.doc.id, {
+ lines: this.doc.lines,
+ version: 0,
+ })
+ const jobs = []
+ for (const update of Array.from(this.updates)) {
+ ;(update => {
+ return jobs.push(callback =>
+ DocUpdaterClient.sendUpdate(
+ this.project_id,
+ this.doc.id,
+ update,
+ callback
+ )
+ )
+ })(update)
+ }
+ return DocUpdaterClient.preloadDoc(
+ this.project_id,
+ this.doc.id,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return async.series(jobs, error => {
+ if (error != null) {
+ throw error
+ }
+ return setTimeout(done, 200)
+ })
+ }
+ )
+ })
+
+ return it('should update the ranges', function (done) {
+ return DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc.id,
+ (error, res, data) => {
+ if (error != null) {
+ throw error
+ }
+ const { ranges } = data
+ const comment = ranges.comments[0]
+ comment.op.should.deep.equal({ c: 'bar', p: 4, t: this.tid })
+ comment.id.should.equal(this.tid)
+ return done()
+ }
+ )
+ })
+ })
+
+ return describe('with conflicting ops needing OT', function () {
+ before(function (done) {
+ this.project_id = DocUpdaterClient.randomId()
+ this.user_id = DocUpdaterClient.randomId()
+ this.doc = {
+ id: DocUpdaterClient.randomId(),
+ lines: ['foo bar baz'],
+ }
+ this.updates = [
+ {
+ doc: this.doc.id,
+ op: [{ i: 'ABC', p: 3 }],
+ v: 0,
+ meta: { user_id: this.user_id },
+ },
+ {
+ doc: this.doc.id,
+ op: [
+ { c: 'bar', p: 4, t: (this.tid = DocUpdaterClient.randomId()) },
+ ],
+ v: 0,
+ },
+ ]
+ MockWebApi.insertDoc(this.project_id, this.doc.id, {
+ lines: this.doc.lines,
+ version: 0,
+ })
+ const jobs = []
+ for (const update of Array.from(this.updates)) {
+ ;(update => {
+ return jobs.push(callback =>
+ DocUpdaterClient.sendUpdate(
+ this.project_id,
+ this.doc.id,
+ update,
+ callback
+ )
+ )
+ })(update)
+ }
+ return DocUpdaterClient.preloadDoc(
+ this.project_id,
+ this.doc.id,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return async.series(jobs, error => {
+ if (error != null) {
+ throw error
+ }
+ return setTimeout(done, 200)
+ })
+ }
+ )
+ })
+
+ return it('should update the comments with the OT shifted comment', function (done) {
+ return DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc.id,
+ (error, res, data) => {
+ if (error != null) {
+ throw error
+ }
+ const { ranges } = data
+ const comment = ranges.comments[0]
+ comment.op.should.deep.equal({ c: 'bar', p: 7, t: this.tid })
+ return done()
+ }
+ )
+ })
+ })
+ })
+ })
+
+ describe('Loading ranges from persistence layer', function () {
+ before(function (done) {
+ this.project_id = DocUpdaterClient.randomId()
+ this.user_id = DocUpdaterClient.randomId()
+ this.id_seed = '587357bd35e64f6157'
+ this.doc = {
+ id: DocUpdaterClient.randomId(),
+ lines: ['a123aa'],
+ }
+ this.update = {
+ doc: this.doc.id,
+ op: [{ i: '456', p: 5 }],
+ v: 0,
+ meta: { user_id: this.user_id, tc: this.id_seed },
+ }
+ MockWebApi.insertDoc(this.project_id, this.doc.id, {
+ lines: this.doc.lines,
+ version: 0,
+ ranges: {
+ changes: [
+ {
+ op: { i: '123', p: 1 },
+ metadata: {
+ user_id: this.user_id,
+ ts: new Date(),
+ },
+ },
+ ],
+ },
+ })
+ return DocUpdaterClient.preloadDoc(
+ this.project_id,
+ this.doc.id,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return DocUpdaterClient.sendUpdate(
+ this.project_id,
+ this.doc.id,
+ this.update,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return setTimeout(done, 200)
+ }
+ )
+ }
+ )
+ })
+
+ it('should have preloaded the existing ranges', function (done) {
+ return DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc.id,
+ (error, res, data) => {
+ if (error != null) {
+ throw error
+ }
+ const { changes } = data.ranges
+ changes[0].op.should.deep.equal({ i: '123', p: 1 })
+ changes[1].op.should.deep.equal({ i: '456', p: 5 })
+ return done()
+ }
+ )
+ })
+
+ return it('should flush the ranges to the persistence layer again', function (done) {
+ return DocUpdaterClient.flushDoc(this.project_id, this.doc.id, error => {
+ if (error != null) {
+ throw error
+ }
+ return MockWebApi.getDocument(
+ this.project_id,
+ this.doc.id,
+ (error, doc) => {
+ const { changes } = doc.ranges
+ changes[0].op.should.deep.equal({ i: '123', p: 1 })
+ changes[1].op.should.deep.equal({ i: '456', p: 5 })
+ return done()
+ }
+ )
+ })
+ })
+ })
+
+ describe('accepting a change', function () {
+ before(function (done) {
+ this.project_id = DocUpdaterClient.randomId()
+ this.user_id = DocUpdaterClient.randomId()
+ this.id_seed = '587357bd35e64f6157'
+ this.doc = {
+ id: DocUpdaterClient.randomId(),
+ lines: ['aaa'],
+ }
+ this.update = {
+ doc: this.doc.id,
+ op: [{ i: '456', p: 1 }],
+ v: 0,
+ meta: { user_id: this.user_id, tc: this.id_seed },
+ }
+ MockWebApi.insertDoc(this.project_id, this.doc.id, {
+ lines: this.doc.lines,
+ version: 0,
+ })
+ return DocUpdaterClient.preloadDoc(
+ this.project_id,
+ this.doc.id,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return DocUpdaterClient.sendUpdate(
+ this.project_id,
+ this.doc.id,
+ this.update,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return setTimeout(() => {
+ return DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc.id,
+ (error, res, data) => {
+ if (error != null) {
+ throw error
+ }
+ const { ranges } = data
+ const change = ranges.changes[0]
+ change.op.should.deep.equal({ i: '456', p: 1 })
+ change.id.should.equal(this.id_seed + '000001')
+ change.metadata.user_id.should.equal(this.user_id)
+ return done()
+ }
+ )
+ }, 200)
+ }
+ )
+ }
+ )
+ })
+
+ return it('should remove the change after accepting', function (done) {
+ return DocUpdaterClient.acceptChange(
+ this.project_id,
+ this.doc.id,
+ this.id_seed + '000001',
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc.id,
+ (error, res, data) => {
+ if (error != null) {
+ throw error
+ }
+ expect(data.ranges.changes).to.be.undefined
+ return done()
+ }
+ )
+ }
+ )
+ })
+ })
+
+ describe('deleting a comment range', function () {
+ before(function (done) {
+ this.project_id = DocUpdaterClient.randomId()
+ this.user_id = DocUpdaterClient.randomId()
+ this.doc = {
+ id: DocUpdaterClient.randomId(),
+ lines: ['foo bar'],
+ }
+ this.update = {
+ doc: this.doc.id,
+ op: [{ c: 'bar', p: 4, t: (this.tid = DocUpdaterClient.randomId()) }],
+ v: 0,
+ }
+ MockWebApi.insertDoc(this.project_id, this.doc.id, {
+ lines: this.doc.lines,
+ version: 0,
+ })
+ return DocUpdaterClient.preloadDoc(
+ this.project_id,
+ this.doc.id,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return DocUpdaterClient.sendUpdate(
+ this.project_id,
+ this.doc.id,
+ this.update,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return setTimeout(() => {
+ return DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc.id,
+ (error, res, data) => {
+ if (error != null) {
+ throw error
+ }
+ const { ranges } = data
+ const change = ranges.comments[0]
+ change.op.should.deep.equal({ c: 'bar', p: 4, t: this.tid })
+ change.id.should.equal(this.tid)
+ return done()
+ }
+ )
+ }, 200)
+ }
+ )
+ }
+ )
+ })
+
+ return it('should remove the comment range', function (done) {
+ return DocUpdaterClient.removeComment(
+ this.project_id,
+ this.doc.id,
+ this.tid,
+ (error, res) => {
+ if (error != null) {
+ throw error
+ }
+ expect(res.statusCode).to.equal(204)
+ return DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc.id,
+ (error, res, data) => {
+ if (error != null) {
+ throw error
+ }
+ expect(data.ranges.comments).to.be.undefined
+ return done()
+ }
+ )
+ }
+ )
+ })
+ })
+
+ describe('tripping range size limit', function () {
+ before(function (done) {
+ this.project_id = DocUpdaterClient.randomId()
+ this.user_id = DocUpdaterClient.randomId()
+ this.id_seed = DocUpdaterClient.randomId()
+ this.doc = {
+ id: DocUpdaterClient.randomId(),
+ lines: ['aaa'],
+ }
+ this.i = new Array(3 * 1024 * 1024).join('a')
+ this.updates = [
+ {
+ doc: this.doc.id,
+ op: [{ i: this.i, p: 1 }],
+ v: 0,
+ meta: { user_id: this.user_id, tc: this.id_seed },
+ },
+ ]
+ MockWebApi.insertDoc(this.project_id, this.doc.id, {
+ lines: this.doc.lines,
+ version: 0,
+ })
+ const jobs = []
+ for (const update of Array.from(this.updates)) {
+ ;(update => {
+ return jobs.push(callback =>
+ DocUpdaterClient.sendUpdate(
+ this.project_id,
+ this.doc.id,
+ update,
+ callback
+ )
+ )
+ })(update)
+ }
+ return DocUpdaterClient.preloadDoc(
+ this.project_id,
+ this.doc.id,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return async.series(jobs, error => {
+ if (error != null) {
+ throw error
+ }
+ return setTimeout(done, 200)
+ })
+ }
+ )
+ })
+
+ return it('should not update the ranges', function (done) {
+ return DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc.id,
+ (error, res, data) => {
+ if (error != null) {
+ throw error
+ }
+ const { ranges } = data
+ expect(ranges.changes).to.be.undefined
+ return done()
+ }
+ )
+ })
+ })
+
+ return describe('deleting text surrounding a comment', function () {
+ before(function (done) {
+ this.project_id = DocUpdaterClient.randomId()
+ this.user_id = DocUpdaterClient.randomId()
+ this.doc_id = DocUpdaterClient.randomId()
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines: ['foo bar baz'],
+ version: 0,
+ ranges: {
+ comments: [
+ {
+ op: {
+ c: 'a',
+ p: 5,
+ tid: (this.tid = DocUpdaterClient.randomId()),
+ },
+ metadata: {
+ user_id: this.user_id,
+ ts: new Date(),
+ },
+ },
+ ],
+ },
+ })
+ this.updates = [
+ {
+ doc: this.doc_id,
+ op: [{ d: 'foo ', p: 0 }],
+ v: 0,
+ meta: { user_id: this.user_id },
+ },
+ {
+ doc: this.doc_id,
+ op: [{ d: 'bar ', p: 0 }],
+ v: 1,
+ meta: { user_id: this.user_id },
+ },
+ ]
+ const jobs = []
+ for (const update of Array.from(this.updates)) {
+ ;(update => {
+ return jobs.push(callback =>
+ DocUpdaterClient.sendUpdate(
+ this.project_id,
+ this.doc_id,
+ update,
+ callback
+ )
+ )
+ })(update)
+ }
+ return DocUpdaterClient.preloadDoc(
+ this.project_id,
+ this.doc_id,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return async.series(jobs, function (error) {
+ if (error != null) {
+ throw error
+ }
+ return setTimeout(() => {
+ return DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, data) => {
+ if (error != null) {
+ throw error
+ }
+ return done()
+ }
+ )
+ }, 200)
+ })
+ }
+ )
+ })
+
+ return it('should write a snapshot from before the destructive change', function (done) {
+ return DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, data) => {
+ if (error != null) {
+ return done(error)
+ }
+ db.docSnapshots
+ .find({
+ project_id: ObjectId(this.project_id),
+ doc_id: ObjectId(this.doc_id),
+ })
+ .toArray((error, docSnapshots) => {
+ if (error != null) {
+ return done(error)
+ }
+ expect(docSnapshots.length).to.equal(1)
+ expect(docSnapshots[0].version).to.equal(1)
+ expect(docSnapshots[0].lines).to.deep.equal(['bar baz'])
+ expect(docSnapshots[0].ranges.comments[0].op).to.deep.equal({
+ c: 'a',
+ p: 1,
+ tid: this.tid,
+ })
+ return done()
+ })
+ }
+ )
+ })
+ })
+})
diff --git a/services/document-updater/test/acceptance/js/SettingADocumentTests.js b/services/document-updater/test/acceptance/js/SettingADocumentTests.js
new file mode 100644
index 0000000000..05955df5b0
--- /dev/null
+++ b/services/document-updater/test/acceptance/js/SettingADocumentTests.js
@@ -0,0 +1,473 @@
+const sinon = require('sinon')
+const { expect } = require('chai')
+const Settings = require('@overleaf/settings')
+const docUpdaterRedis = require('@overleaf/redis-wrapper').createClient(
+ Settings.redis.documentupdater
+)
+const Keys = Settings.redis.documentupdater.key_schema
+
+const MockTrackChangesApi = require('./helpers/MockTrackChangesApi')
+const MockProjectHistoryApi = require('./helpers/MockProjectHistoryApi')
+const MockWebApi = require('./helpers/MockWebApi')
+const DocUpdaterClient = require('./helpers/DocUpdaterClient')
+const DocUpdaterApp = require('./helpers/DocUpdaterApp')
+
+describe('Setting a document', function () {
+ before(function (done) {
+ this.lines = ['one', 'two', 'three']
+ this.version = 42
+ this.update = {
+ doc: this.doc_id,
+ op: [
+ {
+ i: 'one and a half\n',
+ p: 4,
+ },
+ ],
+ v: this.version,
+ }
+ this.result = ['one', 'one and a half', 'two', 'three']
+ this.newLines = ['these', 'are', 'the', 'new', 'lines']
+ this.source = 'dropbox'
+ this.user_id = 'user-id-123'
+
+ sinon.spy(MockTrackChangesApi, 'flushDoc')
+ sinon.spy(MockProjectHistoryApi, 'flushProject')
+ sinon.spy(MockWebApi, 'setDocument')
+ DocUpdaterApp.ensureRunning(done)
+ })
+
+ after(function () {
+ MockTrackChangesApi.flushDoc.restore()
+ MockProjectHistoryApi.flushProject.restore()
+ MockWebApi.setDocument.restore()
+ })
+
+ describe('when the updated doc exists in the doc updater', function () {
+ before(function (done) {
+ this.project_id = DocUpdaterClient.randomId()
+ this.doc_id = DocUpdaterClient.randomId()
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines: this.lines,
+ version: this.version,
+ })
+ DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => {
+ if (error) {
+ throw error
+ }
+ DocUpdaterClient.sendUpdate(
+ this.project_id,
+ this.doc_id,
+ this.update,
+ error => {
+ if (error) {
+ throw error
+ }
+ setTimeout(() => {
+ DocUpdaterClient.setDocLines(
+ this.project_id,
+ this.doc_id,
+ this.newLines,
+ this.source,
+ this.user_id,
+ false,
+ (error, res, body) => {
+ if (error) {
+ return done(error)
+ }
+ this.statusCode = res.statusCode
+ done()
+ }
+ )
+ }, 200)
+ }
+ )
+ })
+ })
+
+ after(function () {
+ MockTrackChangesApi.flushDoc.resetHistory()
+ MockProjectHistoryApi.flushProject.resetHistory()
+ MockWebApi.setDocument.resetHistory()
+ })
+
+ it('should return a 204 status code', function () {
+ this.statusCode.should.equal(204)
+ })
+
+ it('should send the updated doc lines and version to the web api', function () {
+ MockWebApi.setDocument
+ .calledWith(this.project_id, this.doc_id, this.newLines)
+ .should.equal(true)
+ })
+
+ it('should update the lines in the doc updater', function (done) {
+ DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, doc) => {
+ if (error) {
+ return done(error)
+ }
+ doc.lines.should.deep.equal(this.newLines)
+ done()
+ }
+ )
+ })
+
+ it('should bump the version in the doc updater', function (done) {
+ DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, doc) => {
+ if (error) {
+ return done(error)
+ }
+ doc.version.should.equal(this.version + 2)
+ done()
+ }
+ )
+ })
+
+ it('should leave the document in redis', function (done) {
+ docUpdaterRedis.get(
+ Keys.docLines({ doc_id: this.doc_id }),
+ (error, lines) => {
+ if (error) {
+ throw error
+ }
+ expect(JSON.parse(lines)).to.deep.equal(this.newLines)
+ done()
+ }
+ )
+ })
+ })
+
+ describe('when the updated doc does not exist in the doc updater', function () {
+ before(function (done) {
+ this.project_id = DocUpdaterClient.randomId()
+ this.doc_id = DocUpdaterClient.randomId()
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines: this.lines,
+ version: this.version,
+ })
+ DocUpdaterClient.setDocLines(
+ this.project_id,
+ this.doc_id,
+ this.newLines,
+ this.source,
+ this.user_id,
+ false,
+ (error, res, body) => {
+ if (error) {
+ return done(error)
+ }
+ this.statusCode = res.statusCode
+ setTimeout(done, 200)
+ }
+ )
+ })
+
+ after(function () {
+ MockTrackChangesApi.flushDoc.resetHistory()
+ MockProjectHistoryApi.flushProject.resetHistory()
+ MockWebApi.setDocument.resetHistory()
+ })
+
+ it('should return a 204 status code', function () {
+ this.statusCode.should.equal(204)
+ })
+
+ it('should send the updated doc lines to the web api', function () {
+ MockWebApi.setDocument
+ .calledWith(this.project_id, this.doc_id, this.newLines)
+ .should.equal(true)
+ })
+
+ it('should flush track changes', function () {
+ MockTrackChangesApi.flushDoc.calledWith(this.doc_id).should.equal(true)
+ })
+
+ it('should flush project history', function () {
+ MockProjectHistoryApi.flushProject
+ .calledWith(this.project_id)
+ .should.equal(true)
+ })
+
+ it('should remove the document from redis', function (done) {
+ docUpdaterRedis.get(
+ Keys.docLines({ doc_id: this.doc_id }),
+ (error, lines) => {
+ if (error) {
+ throw error
+ }
+ expect(lines).to.not.exist
+ done()
+ }
+ )
+ })
+ })
+
+ const DOC_TOO_LARGE_TEST_CASES = [
+ {
+ desc: 'when the updated doc is too large for the body parser',
+ size: Settings.maxJsonRequestSize,
+ expectedStatusCode: 413,
+ },
+ {
+ desc: 'when the updated doc is larger than the HTTP controller limit',
+ size: Settings.max_doc_length,
+ expectedStatusCode: 406,
+ },
+ ]
+
+ DOC_TOO_LARGE_TEST_CASES.forEach(testCase => {
+ describe(testCase.desc, function () {
+ before(function (done) {
+ this.project_id = DocUpdaterClient.randomId()
+ this.doc_id = DocUpdaterClient.randomId()
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines: this.lines,
+ version: this.version,
+ })
+ this.newLines = []
+ while (JSON.stringify(this.newLines).length <= testCase.size) {
+ this.newLines.push('(a long line of text)'.repeat(10000))
+ }
+ DocUpdaterClient.setDocLines(
+ this.project_id,
+ this.doc_id,
+ this.newLines,
+ this.source,
+ this.user_id,
+ false,
+ (error, res, body) => {
+ if (error) {
+ return done(error)
+ }
+ this.statusCode = res.statusCode
+ setTimeout(done, 200)
+ }
+ )
+ })
+
+ after(function () {
+ MockTrackChangesApi.flushDoc.resetHistory()
+ MockProjectHistoryApi.flushProject.resetHistory()
+ MockWebApi.setDocument.resetHistory()
+ })
+
+ it(`should return a ${testCase.expectedStatusCode} status code`, function () {
+ this.statusCode.should.equal(testCase.expectedStatusCode)
+ })
+
+ it('should not send the updated doc lines to the web api', function () {
+ MockWebApi.setDocument.called.should.equal(false)
+ })
+
+ it('should not flush track changes', function () {
+ MockTrackChangesApi.flushDoc.called.should.equal(false)
+ })
+
+ it('should not flush project history', function () {
+ MockProjectHistoryApi.flushProject.called.should.equal(false)
+ })
+ })
+ })
+
+ describe('when the updated doc is large but under the bodyParser and HTTPController size limit', function () {
+ before(function (done) {
+ this.project_id = DocUpdaterClient.randomId()
+ this.doc_id = DocUpdaterClient.randomId()
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines: this.lines,
+ version: this.version,
+ })
+
+ this.newLines = []
+ while (JSON.stringify(this.newLines).length < 2 * 1024 * 1024) {
+ // limit in HTTPController
+ this.newLines.push('(a long line of text)'.repeat(10000))
+ }
+ this.newLines.pop() // remove the line which took it over the limit
+ DocUpdaterClient.setDocLines(
+ this.project_id,
+ this.doc_id,
+ this.newLines,
+ this.source,
+ this.user_id,
+ false,
+ (error, res, body) => {
+ if (error) {
+ return done(error)
+ }
+ this.statusCode = res.statusCode
+ setTimeout(done, 200)
+ }
+ )
+ })
+
+ after(function () {
+ MockTrackChangesApi.flushDoc.resetHistory()
+ MockProjectHistoryApi.flushProject.resetHistory()
+ MockWebApi.setDocument.resetHistory()
+ })
+
+ it('should return a 204 status code', function () {
+ this.statusCode.should.equal(204)
+ })
+
+ it('should send the updated doc lines to the web api', function () {
+ MockWebApi.setDocument
+ .calledWith(this.project_id, this.doc_id, this.newLines)
+ .should.equal(true)
+ })
+ })
+
+ describe('with track changes', function () {
+ before(function () {
+ this.lines = ['one', 'one and a half', 'two', 'three']
+ this.id_seed = '587357bd35e64f6157'
+ this.update = {
+ doc: this.doc_id,
+ op: [
+ {
+ d: 'one and a half\n',
+ p: 4,
+ },
+ ],
+ meta: {
+ tc: this.id_seed,
+ user_id: this.user_id,
+ },
+ v: this.version,
+ }
+ })
+
+ describe('with the undo flag', function () {
+ before(function (done) {
+ this.project_id = DocUpdaterClient.randomId()
+ this.doc_id = DocUpdaterClient.randomId()
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines: this.lines,
+ version: this.version,
+ })
+ DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => {
+ if (error) {
+ throw error
+ }
+ DocUpdaterClient.sendUpdate(
+ this.project_id,
+ this.doc_id,
+ this.update,
+ error => {
+ if (error) {
+ throw error
+ }
+ // Go back to old lines, with undo flag
+ DocUpdaterClient.setDocLines(
+ this.project_id,
+ this.doc_id,
+ this.lines,
+ this.source,
+ this.user_id,
+ true,
+ (error, res, body) => {
+ if (error) {
+ return done(error)
+ }
+ this.statusCode = res.statusCode
+ setTimeout(done, 200)
+ }
+ )
+ }
+ )
+ })
+ })
+
+ after(function () {
+ MockTrackChangesApi.flushDoc.resetHistory()
+ MockProjectHistoryApi.flushProject.resetHistory()
+ MockWebApi.setDocument.resetHistory()
+ })
+
+ it('should undo the tracked changes', function (done) {
+ DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, data) => {
+ if (error) {
+ throw error
+ }
+ const { ranges } = data
+ expect(ranges.changes).to.be.undefined
+ done()
+ }
+ )
+ })
+ })
+
+ describe('without the undo flag', function () {
+ before(function (done) {
+ this.project_id = DocUpdaterClient.randomId()
+ this.doc_id = DocUpdaterClient.randomId()
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines: this.lines,
+ version: this.version,
+ })
+ DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => {
+ if (error) {
+ throw error
+ }
+ DocUpdaterClient.sendUpdate(
+ this.project_id,
+ this.doc_id,
+ this.update,
+ error => {
+ if (error) {
+ throw error
+ }
+ // Go back to old lines, without undo flag
+ DocUpdaterClient.setDocLines(
+ this.project_id,
+ this.doc_id,
+ this.lines,
+ this.source,
+ this.user_id,
+ false,
+ (error, res, body) => {
+ if (error) {
+ return done(error)
+ }
+ this.statusCode = res.statusCode
+ setTimeout(done, 200)
+ }
+ )
+ }
+ )
+ })
+ })
+
+ after(function () {
+ MockTrackChangesApi.flushDoc.resetHistory()
+ MockProjectHistoryApi.flushProject.resetHistory()
+ MockWebApi.setDocument.resetHistory()
+ })
+
+ it('should not undo the tracked changes', function (done) {
+ DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, data) => {
+ if (error) {
+ throw error
+ }
+ const { ranges } = data
+ expect(ranges.changes.length).to.equal(1)
+ done()
+ }
+ )
+ })
+ })
+ })
+})
diff --git a/services/document-updater/test/acceptance/js/SizeCheckTests.js b/services/document-updater/test/acceptance/js/SizeCheckTests.js
new file mode 100644
index 0000000000..6267df3b1a
--- /dev/null
+++ b/services/document-updater/test/acceptance/js/SizeCheckTests.js
@@ -0,0 +1,129 @@
+const { expect } = require('chai')
+const Settings = require('@overleaf/settings')
+
+const MockWebApi = require('./helpers/MockWebApi')
+const DocUpdaterClient = require('./helpers/DocUpdaterClient')
+const DocUpdaterApp = require('./helpers/DocUpdaterApp')
+
+describe('SizeChecks', function () {
+ before(function (done) {
+ DocUpdaterApp.ensureRunning(done)
+ })
+ beforeEach(function () {
+ this.version = 0
+ this.update = {
+ doc: this.doc_id,
+ op: [
+ {
+ i: 'insert some more lines that will bring it above the limit\n',
+ p: 42,
+ },
+ ],
+ v: this.version,
+ }
+ this.project_id = DocUpdaterClient.randomId()
+ this.doc_id = DocUpdaterClient.randomId()
+ })
+
+ describe('when a doc is above the doc size limit already', function () {
+ beforeEach(function () {
+ this.lines = ['0123456789'.repeat(Settings.max_doc_length / 10 + 1)]
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines: this.lines,
+ v: this.version,
+ })
+ })
+
+ it('should error when fetching the doc', function (done) {
+ DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res) => {
+ if (error) return done(error)
+ expect(res.statusCode).to.equal(500)
+ done()
+ })
+ })
+
+ describe('when trying to update', function () {
+ beforeEach(function (done) {
+ const update = {
+ doc: this.doc_id,
+ op: this.update.op,
+ v: this.version,
+ }
+ DocUpdaterClient.sendUpdate(
+ this.project_id,
+ this.doc_id,
+ update,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ setTimeout(done, 200)
+ }
+ )
+ })
+
+ it('should still error when fetching the doc', function (done) {
+ DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res) => {
+ if (error) return done(error)
+ expect(res.statusCode).to.equal(500)
+ done()
+ })
+ })
+ })
+ })
+
+ describe('when a doc is just below the doc size limit', function () {
+ beforeEach(function () {
+ this.lines = ['0123456789'.repeat(Settings.max_doc_length / 10 - 1)]
+ MockWebApi.insertDoc(this.project_id, this.doc_id, {
+ lines: this.lines,
+ v: this.version,
+ })
+ })
+
+ it('should be able to fetch the doc', function (done) {
+ DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, doc) => {
+ if (error) return done(error)
+ expect(doc.lines).to.deep.equal(this.lines)
+ done()
+ }
+ )
+ })
+
+ describe('when trying to update', function () {
+ beforeEach(function (done) {
+ const update = {
+ doc: this.doc_id,
+ op: this.update.op,
+ v: this.version,
+ }
+ DocUpdaterClient.sendUpdate(
+ this.project_id,
+ this.doc_id,
+ update,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ setTimeout(done, 200)
+ }
+ )
+ })
+
+ it('should not update the doc', function (done) {
+ DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, doc) => {
+ if (error) return done(error)
+ expect(doc.lines).to.deep.equal(this.lines)
+ done()
+ }
+ )
+ })
+ })
+ })
+})
diff --git a/services/document-updater/test/acceptance/js/helpers/DocUpdaterApp.js b/services/document-updater/test/acceptance/js/helpers/DocUpdaterApp.js
new file mode 100644
index 0000000000..270f4ca0c6
--- /dev/null
+++ b/services/document-updater/test/acceptance/js/helpers/DocUpdaterApp.js
@@ -0,0 +1,49 @@
+/* eslint-disable
+ handle-callback-err,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS205: Consider reworking code to avoid use of IIFEs
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const app = require('../../../../app')
+const { waitForDb } = require('../../../../app/js/mongodb')
+require('logger-sharelatex').logger.level('fatal')
+
+module.exports = {
+ running: false,
+ initing: false,
+ callbacks: [],
+ ensureRunning(callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ if (this.running) {
+ return callback()
+ } else if (this.initing) {
+ return this.callbacks.push(callback)
+ }
+ this.initing = true
+ this.callbacks.push(callback)
+ waitForDb().then(() => {
+ return app.listen(3003, 'localhost', error => {
+ if (error != null) {
+ throw error
+ }
+ this.running = true
+ return (() => {
+ const result = []
+ for (callback of Array.from(this.callbacks)) {
+ result.push(callback())
+ }
+ return result
+ })()
+ })
+ })
+ },
+}
diff --git a/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js
new file mode 100644
index 0000000000..9a3234628c
--- /dev/null
+++ b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js
@@ -0,0 +1,224 @@
+let DocUpdaterClient
+const Settings = require('@overleaf/settings')
+const _ = require('lodash')
+const rclient = require('@overleaf/redis-wrapper').createClient(
+ Settings.redis.documentupdater
+)
+const keys = Settings.redis.documentupdater.key_schema
+const request = require('request').defaults({ jar: false })
+const async = require('async')
+
+const rclientSub = require('@overleaf/redis-wrapper').createClient(
+ Settings.redis.pubsub
+)
+rclientSub.subscribe('applied-ops')
+rclientSub.setMaxListeners(0)
+
+module.exports = DocUpdaterClient = {
+ randomId() {
+ let str = ''
+ for (let i = 0; i < 24; i++) {
+ str += Math.floor(Math.random() * 16).toString(16)
+ }
+ return str
+ },
+
+ subscribeToAppliedOps(callback) {
+ rclientSub.on('message', callback)
+ },
+
+ _getPendingUpdateListKey() {
+ const shard = _.random(0, Settings.dispatcherCount - 1)
+ if (shard === 0) {
+ return 'pending-updates-list'
+ } else {
+ return `pending-updates-list-${shard}`
+ }
+ },
+
+ sendUpdate(projectId, docId, update, callback) {
+ rclient.rpush(
+ keys.pendingUpdates({ doc_id: docId }),
+ JSON.stringify(update),
+ error => {
+ if (error) {
+ return callback(error)
+ }
+ const docKey = `${projectId}:${docId}`
+ rclient.sadd('DocsWithPendingUpdates', docKey, error => {
+ if (error) {
+ return callback(error)
+ }
+
+ rclient.rpush(
+ DocUpdaterClient._getPendingUpdateListKey(),
+ docKey,
+ callback
+ )
+ })
+ }
+ )
+ },
+
+ sendUpdates(projectId, docId, updates, callback) {
+ DocUpdaterClient.preloadDoc(projectId, docId, error => {
+ if (error) {
+ return callback(error)
+ }
+ const jobs = updates.map(update => callback => {
+ DocUpdaterClient.sendUpdate(projectId, docId, update, callback)
+ })
+ async.series(jobs, err => {
+ if (err) {
+ return callback(err)
+ }
+ DocUpdaterClient.waitForPendingUpdates(projectId, docId, callback)
+ })
+ })
+ },
+
+ waitForPendingUpdates(projectId, docId, callback) {
+ async.retry(
+ { times: 30, interval: 100 },
+ cb =>
+ rclient.llen(keys.pendingUpdates({ doc_id: docId }), (err, length) => {
+ if (err) {
+ return cb(err)
+ }
+ if (length > 0) {
+ cb(new Error('updates still pending'))
+ } else {
+ cb()
+ }
+ }),
+ callback
+ )
+ },
+
+ getDoc(projectId, docId, callback) {
+ request.get(
+ `http://localhost:3003/project/${projectId}/doc/${docId}`,
+ (error, res, body) => {
+ if (body != null && res.statusCode >= 200 && res.statusCode < 300) {
+ body = JSON.parse(body)
+ }
+ callback(error, res, body)
+ }
+ )
+ },
+
+ getDocAndRecentOps(projectId, docId, fromVersion, callback) {
+ request.get(
+ `http://localhost:3003/project/${projectId}/doc/${docId}?fromVersion=${fromVersion}`,
+ (error, res, body) => {
+ if (body != null && res.statusCode >= 200 && res.statusCode < 300) {
+ body = JSON.parse(body)
+ }
+ callback(error, res, body)
+ }
+ )
+ },
+
+ preloadDoc(projectId, docId, callback) {
+ DocUpdaterClient.getDoc(projectId, docId, callback)
+ },
+
+ peekDoc(projectId, docId, callback) {
+ request.get(
+ `http://localhost:3003/project/${projectId}/doc/${docId}/peek`,
+ (error, res, body) => {
+ if (body != null && res.statusCode >= 200 && res.statusCode < 300) {
+ body = JSON.parse(body)
+ }
+ callback(error, res, body)
+ }
+ )
+ },
+
+ flushDoc(projectId, docId, callback) {
+ request.post(
+ `http://localhost:3003/project/${projectId}/doc/${docId}/flush`,
+ (error, res, body) => callback(error, res, body)
+ )
+ },
+
+ setDocLines(projectId, docId, lines, source, userId, undoing, callback) {
+ request.post(
+ {
+ url: `http://localhost:3003/project/${projectId}/doc/${docId}`,
+ json: {
+ lines,
+ source,
+ user_id: userId,
+ undoing,
+ },
+ },
+ (error, res, body) => callback(error, res, body)
+ )
+ },
+
+ deleteDoc(projectId, docId, callback) {
+ request.del(
+ `http://localhost:3003/project/${projectId}/doc/${docId}`,
+ (error, res, body) => callback(error, res, body)
+ )
+ },
+
+ flushProject(projectId, callback) {
+ request.post(`http://localhost:3003/project/${projectId}/flush`, callback)
+ },
+
+ deleteProject(projectId, callback) {
+ request.del(`http://localhost:3003/project/${projectId}`, callback)
+ },
+
+ deleteProjectOnShutdown(projectId, callback) {
+ request.del(
+ `http://localhost:3003/project/${projectId}?background=true&shutdown=true`,
+ callback
+ )
+ },
+
+ flushOldProjects(callback) {
+ request.get(
+ 'http://localhost:3003/flush_queued_projects?min_delete_age=1',
+ callback
+ )
+ },
+
+ acceptChange(projectId, docId, changeId, callback) {
+ request.post(
+ `http://localhost:3003/project/${projectId}/doc/${docId}/change/${changeId}/accept`,
+ callback
+ )
+ },
+
+ removeComment(projectId, docId, comment, callback) {
+ request.del(
+ `http://localhost:3003/project/${projectId}/doc/${docId}/comment/${comment}`,
+ callback
+ )
+ },
+
+ getProjectDocs(projectId, projectStateHash, callback) {
+ request.get(
+ `http://localhost:3003/project/${projectId}/doc?state=${projectStateHash}`,
+ (error, res, body) => {
+ if (body != null && res.statusCode >= 200 && res.statusCode < 300) {
+ body = JSON.parse(body)
+ }
+ callback(error, res, body)
+ }
+ )
+ },
+
+ sendProjectUpdate(projectId, userId, updates, version, callback) {
+ request.post(
+ {
+ url: `http://localhost:3003/project/${projectId}`,
+ json: { userId, updates, version },
+ },
+ (error, res, body) => callback(error, res, body)
+ )
+ },
+}
diff --git a/services/document-updater/test/acceptance/js/helpers/MockProjectHistoryApi.js b/services/document-updater/test/acceptance/js/helpers/MockProjectHistoryApi.js
new file mode 100644
index 0000000000..513475da3d
--- /dev/null
+++ b/services/document-updater/test/acceptance/js/helpers/MockProjectHistoryApi.js
@@ -0,0 +1,44 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let MockProjectHistoryApi
+const express = require('express')
+const app = express()
+
+module.exports = MockProjectHistoryApi = {
+ flushProject(doc_id, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ return callback()
+ },
+
+ run() {
+ app.post('/project/:project_id/flush', (req, res, next) => {
+ return this.flushProject(req.params.project_id, error => {
+ if (error != null) {
+ return res.sendStatus(500)
+ } else {
+ return res.sendStatus(204)
+ }
+ })
+ })
+
+ return app.listen(3054, error => {
+ if (error != null) {
+ throw error
+ }
+ })
+ },
+}
+
+MockProjectHistoryApi.run()
diff --git a/services/document-updater/test/acceptance/js/helpers/MockTrackChangesApi.js b/services/document-updater/test/acceptance/js/helpers/MockTrackChangesApi.js
new file mode 100644
index 0000000000..eb66b2b3b5
--- /dev/null
+++ b/services/document-updater/test/acceptance/js/helpers/MockTrackChangesApi.js
@@ -0,0 +1,49 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let MockTrackChangesApi
+const express = require('express')
+const app = express()
+
+module.exports = MockTrackChangesApi = {
+ flushDoc(doc_id, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ return callback()
+ },
+
+ run() {
+ app.post('/project/:project_id/doc/:doc_id/flush', (req, res, next) => {
+ return this.flushDoc(req.params.doc_id, error => {
+ if (error != null) {
+ return res.sendStatus(500)
+ } else {
+ return res.sendStatus(204)
+ }
+ })
+ })
+
+ return app
+ .listen(3015, error => {
+ if (error != null) {
+ throw error
+ }
+ })
+ .on('error', error => {
+ console.error('error starting MockTrackChangesApi:', error.message)
+ return process.exit(1)
+ })
+ },
+}
+
+MockTrackChangesApi.run()
diff --git a/services/document-updater/test/acceptance/js/helpers/MockWebApi.js b/services/document-updater/test/acceptance/js/helpers/MockWebApi.js
new file mode 100644
index 0000000000..818895fcba
--- /dev/null
+++ b/services/document-updater/test/acceptance/js/helpers/MockWebApi.js
@@ -0,0 +1,123 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+ no-return-assign,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let MockWebApi
+const express = require('express')
+const bodyParser = require('body-parser')
+const app = express()
+const MAX_REQUEST_SIZE = 2 * (2 * 1024 * 1024 + 64 * 1024)
+
+module.exports = MockWebApi = {
+ docs: {},
+
+ clearDocs() {
+ return (this.docs = {})
+ },
+
+ insertDoc(project_id, doc_id, doc) {
+ if (doc.version == null) {
+ doc.version = 0
+ }
+ if (doc.lines == null) {
+ doc.lines = []
+ }
+ doc.pathname = '/a/b/c.tex'
+ return (this.docs[`${project_id}:${doc_id}`] = doc)
+ },
+
+ setDocument(
+ project_id,
+ doc_id,
+ lines,
+ version,
+ ranges,
+ lastUpdatedAt,
+ lastUpdatedBy,
+ callback
+ ) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ const doc =
+ this.docs[`${project_id}:${doc_id}`] ||
+ (this.docs[`${project_id}:${doc_id}`] = {})
+ doc.lines = lines
+ doc.version = version
+ doc.ranges = ranges
+ doc.pathname = '/a/b/c.tex'
+ doc.lastUpdatedAt = lastUpdatedAt
+ doc.lastUpdatedBy = lastUpdatedBy
+ return callback(null)
+ },
+
+ getDocument(project_id, doc_id, callback) {
+ if (callback == null) {
+ callback = function (error, doc) {}
+ }
+ return callback(null, this.docs[`${project_id}:${doc_id}`])
+ },
+
+ run() {
+ app.get('/project/:project_id/doc/:doc_id', (req, res, next) => {
+ return this.getDocument(
+ req.params.project_id,
+ req.params.doc_id,
+ (error, doc) => {
+ if (error != null) {
+ return res.sendStatus(500)
+ } else if (doc != null) {
+ return res.send(JSON.stringify(doc))
+ } else {
+ return res.sendStatus(404)
+ }
+ }
+ )
+ })
+
+ app.post(
+ '/project/:project_id/doc/:doc_id',
+ bodyParser.json({ limit: MAX_REQUEST_SIZE }),
+ (req, res, next) => {
+ return MockWebApi.setDocument(
+ req.params.project_id,
+ req.params.doc_id,
+ req.body.lines,
+ req.body.version,
+ req.body.ranges,
+ req.body.lastUpdatedAt,
+ req.body.lastUpdatedBy,
+ error => {
+ if (error != null) {
+ return res.sendStatus(500)
+ } else {
+ return res.sendStatus(204)
+ }
+ }
+ )
+ }
+ )
+
+ return app
+ .listen(3000, error => {
+ if (error != null) {
+ throw error
+ }
+ })
+ .on('error', error => {
+ console.error('error starting MockWebApi:', error.message)
+ return process.exit(1)
+ })
+ },
+}
+
+MockWebApi.run()
diff --git a/services/document-updater/test/cluster_failover/js/test_blpop_failover.js b/services/document-updater/test/cluster_failover/js/test_blpop_failover.js
new file mode 100644
index 0000000000..b6a83e3b34
--- /dev/null
+++ b/services/document-updater/test/cluster_failover/js/test_blpop_failover.js
@@ -0,0 +1,65 @@
+let listenInBackground, sendPings
+const redis = require('@overleaf/redis-wrapper')
+const rclient1 = redis.createClient({
+ cluster: [
+ {
+ port: '7000',
+ host: 'localhost',
+ },
+ ],
+})
+
+const rclient2 = redis.createClient({
+ cluster: [
+ {
+ port: '7000',
+ host: 'localhost',
+ },
+ ],
+})
+
+let counter = 0
+const sendPing = function (cb) {
+ if (cb == null) {
+ cb = function () {}
+ }
+ return rclient1.rpush('test-blpop', counter, error => {
+ if (error != null) {
+ console.error('[SENDING ERROR]', error.message)
+ }
+ if (error == null) {
+ counter += 1
+ }
+ return cb()
+ })
+}
+
+let previous = null
+const listenForPing = cb =>
+ rclient2.blpop('test-blpop', 200, (error, result) => {
+ if (error != null) {
+ return cb(error)
+ }
+ let [, value] = Array.from(result)
+ value = parseInt(value, 10)
+ if (value % 10 === 0) {
+ console.log('.')
+ }
+ if (previous != null && value !== previous + 1) {
+ error = new Error(
+ `Counter not in order. Got ${value}, expected ${previous + 1}`
+ )
+ }
+ previous = value
+ return cb(error, value)
+ })
+
+const PING_DELAY = 100
+;(sendPings = () => sendPing(() => setTimeout(sendPings, PING_DELAY)))()
+;(listenInBackground = () =>
+ listenForPing(error => {
+ if (error) {
+ console.error('[RECEIVING ERROR]', error.message)
+ }
+ return setTimeout(listenInBackground)
+ }))()
diff --git a/services/document-updater/test/cluster_failover/js/test_pubsub_failover.js b/services/document-updater/test/cluster_failover/js/test_pubsub_failover.js
new file mode 100644
index 0000000000..44ad70c6ec
--- /dev/null
+++ b/services/document-updater/test/cluster_failover/js/test_pubsub_failover.js
@@ -0,0 +1,54 @@
+let sendPings
+const redis = require('@overleaf/redis-wrapper')
+const rclient1 = redis.createClient({
+ cluster: [
+ {
+ port: '7000',
+ host: 'localhost',
+ },
+ ],
+})
+
+const rclient2 = redis.createClient({
+ cluster: [
+ {
+ port: '7000',
+ host: 'localhost',
+ },
+ ],
+})
+
+let counter = 0
+const sendPing = function (cb) {
+ if (cb == null) {
+ cb = function () {}
+ }
+ return rclient1.publish('test-pubsub', counter, error => {
+ if (error) {
+ console.error('[SENDING ERROR]', error.message)
+ }
+ if (error == null) {
+ counter += 1
+ }
+ return cb()
+ })
+}
+
+let previous = null
+rclient2.subscribe('test-pubsub')
+rclient2.on('message', (channel, value) => {
+ value = parseInt(value, 10)
+ if (value % 10 === 0) {
+ console.log('.')
+ }
+ if (previous != null && value !== previous + 1) {
+ console.error(
+ '[RECEIVING ERROR]',
+ `Counter not in order. Got ${value}, expected ${previous + 1}`
+ )
+ }
+ return (previous = value)
+})
+
+const PING_DELAY = 100
+;(sendPings = () => sendPing(() => setTimeout(sendPings, PING_DELAY)))()
diff --git a/services/document-updater/test/setup.js b/services/document-updater/test/setup.js
new file mode 100644
index 0000000000..0212544400
--- /dev/null
+++ b/services/document-updater/test/setup.js
@@ -0,0 +1,37 @@
+const chai = require('chai')
+const SandboxedModule = require('sandboxed-module')
+const sinon = require('sinon')
+
+// Chai configuration
+chai.should()
+
+// Global stubs
+const sandbox = sinon.createSandbox()
+const stubs = {
+ logger: {
+ debug: sandbox.stub(),
+ log: sandbox.stub(),
+ warn: sandbox.stub(),
+ err: sandbox.stub(),
+ error: sandbox.stub(),
+ },
+}
+
+// SandboxedModule configuration
+SandboxedModule.configure({
+ requires: {
+ 'logger-sharelatex': stubs.logger,
+ },
+ globals: { Buffer, JSON, Math, console, process },
+})
+
+// Mocha hooks
+exports.mochaHooks = {
+ beforeEach() {
+ this.logger = stubs.logger
+ },
+
+ afterEach() {
+ sandbox.reset()
+ },
+}
diff --git a/services/document-updater/test/stress/js/run.js b/services/document-updater/test/stress/js/run.js
new file mode 100644
index 0000000000..8b0a9f353b
--- /dev/null
+++ b/services/document-updater/test/stress/js/run.js
@@ -0,0 +1,392 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+ no-return-assign,
+ no-undef,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS202: Simplify dynamic range loops
+ * DS205: Consider reworking code to avoid use of IIFEs
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const DocUpdaterClient = require('../../acceptance/js/helpers/DocUpdaterClient')
+// MockTrackChangesApi = require "../../acceptance/js/helpers/MockTrackChangesApi"
+// MockWebApi = require "../../acceptance/js/helpers/MockWebApi"
+const assert = require('assert')
+const async = require('async')
+
+const insert = function (string, pos, content) {
+ const result = string.slice(0, pos) + content + string.slice(pos)
+ return result
+}
+
+const transform = function (op1, op2) {
+ if (op2.p < op1.p) {
+ return {
+ p: op1.p + op2.i.length,
+ i: op1.i,
+ }
+ } else {
+ return op1
+ }
+}
+
+class StressTestClient {
+ constructor(options) {
+ if (options == null) {
+ options = {}
+ }
+ this.options = options
+ if (this.options.updateDelay == null) {
+ this.options.updateDelay = 200
+ }
+ this.project_id = this.options.project_id || DocUpdaterClient.randomId()
+ this.doc_id = this.options.doc_id || DocUpdaterClient.randomId()
+ this.pos = this.options.pos || 0
+ this.content = this.options.content || ''
+
+ this.client_id = DocUpdaterClient.randomId()
+ this.version = this.options.version || 0
+ this.inflight_op = null
+ this.charCode = 0
+
+ this.counts = {
+ conflicts: 0,
+ local_updates: 0,
+ remote_updates: 0,
+ max_delay: 0,
+ }
+
+ DocUpdaterClient.subscribeToAppliedOps((channel, update) => {
+ update = JSON.parse(update)
+ if (update.error != null) {
+ console.error(new Error(`Error from server: '${update.error}'`))
+ return
+ }
+ if (update.doc_id === this.doc_id) {
+ return this.processReply(update)
+ }
+ })
+ }
+
+ sendUpdate() {
+ const data = String.fromCharCode(65 + (this.charCode++ % 26))
+ this.content = insert(this.content, this.pos, data)
+ this.inflight_op = {
+ i: data,
+ p: this.pos++,
+ }
+ this.resendUpdate()
+ return (this.inflight_op_sent = Date.now())
+ }
+
+ resendUpdate() {
+ assert(this.inflight_op != null)
+ DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, {
+ doc: this.doc_id,
+ op: [this.inflight_op],
+ v: this.version,
+ meta: {
+ source: this.client_id,
+ },
+ dupIfSource: [this.client_id],
+ })
+ return (this.update_timer = setTimeout(() => {
+ console.log(
+ `[${new Date()}] \t[${this.client_id.slice(
+ 0,
+ 4
+ )}] WARN: Resending update after 5 seconds`
+ )
+ return this.resendUpdate()
+ }, 5000))
+ }
+
+ processReply(update) {
+ if (update.op.v !== this.version) {
+ if (update.op.v < this.version) {
+ console.log(
+ `[${new Date()}] \t[${this.client_id.slice(
+ 0,
+ 4
+ )}] WARN: Duplicate ack (already seen version)`
+ )
+ return
+ } else {
+ console.error(
+ `[${new Date()}] \t[${this.client_id.slice(
+ 0,
+ 4
+ )}] ERROR: Version jumped ahead (client: ${this.version}, op: ${
+ update.op.v
+ })`
+ )
+ }
+ }
+ this.version++
+ if (update.op.meta.source === this.client_id) {
+ if (this.inflight_op != null) {
+ this.counts.local_updates++
+ this.inflight_op = null
+ clearTimeout(this.update_timer)
+ const delay = Date.now() - this.inflight_op_sent
+ this.counts.max_delay = Math.max(this.counts.max_delay, delay)
+ return this.continue()
+ } else {
+ return console.log(
+ `[${new Date()}] \t[${this.client_id.slice(
+ 0,
+ 4
+ )}] WARN: Duplicate ack`
+ )
+ }
+ } else {
+ assert(update.op.op.length === 1)
+ this.counts.remote_updates++
+ let external_op = update.op.op[0]
+ if (this.inflight_op != null) {
+ this.counts.conflicts++
+ this.inflight_op = transform(this.inflight_op, external_op)
+ external_op = transform(external_op, this.inflight_op)
+ }
+ if (external_op.p < this.pos) {
+ this.pos += external_op.i.length
+ }
+ return (this.content = insert(this.content, external_op.p, external_op.i))
+ }
+ }
+
+ continue() {
+ if (this.updateCount > 0) {
+ this.updateCount--
+ return setTimeout(() => {
+ return this.sendUpdate()
+ }, this.options.updateDelay * (0.5 + Math.random()))
+ } else {
+ return this.updateCallback()
+ }
+ }
+
+ runForNUpdates(n, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ this.updateCallback = callback
+ this.updateCount = n
+ return this.continue()
+ }
+
+ check(callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ return DocUpdaterClient.getDoc(
+ this.project_id,
+ this.doc_id,
+ (error, res, body) => {
+ if (error != null) {
+ throw error
+ }
+ if (body.lines == null) {
+ return console.error(
+ `[${new Date()}] \t[${this.client_id.slice(
+ 0,
+ 4
+ )}] ERROR: Invalid response from get doc (${doc_id})`,
+ body
+ )
+ }
+ const content = body.lines.join('\n')
+ const { version } = body
+ if (content !== this.content) {
+ if (version === this.version) {
+ console.error(
+ `[${new Date()}] \t[${this.client_id.slice(
+ 0,
+ 4
+ )}] Error: Client content does not match server.`
+ )
+ console.error(`Server: ${content.split('a')}`)
+ console.error(`Client: ${this.content.split('a')}`)
+ } else {
+ console.error(
+ `[${new Date()}] \t[${this.client_id.slice(
+ 0,
+ 4
+ )}] Error: Version mismatch (Server: '${version}', Client: '${
+ this.version
+ }')`
+ )
+ }
+ }
+
+ if (!this.isContentValid(this.content)) {
+ const iterable = this.content.split('')
+ for (let i = 0; i < iterable.length; i++) {
+ const chunk = iterable[i]
+ if (chunk != null && chunk !== 'a') {
+ console.log(chunk, i)
+ }
+ }
+ throw new Error('bad content')
+ }
+ return callback()
+ }
+ )
+ }
+
+ isChunkValid(chunk) {
+ const char = 0
+ for (let i = 0; i < chunk.length; i++) {
+ const letter = chunk[i]
+ if (letter.charCodeAt(0) !== 65 + (i % 26)) {
+ console.error(
+ `[${new Date()}] \t[${this.client_id.slice(0, 4)}] Invalid Chunk:`,
+ chunk
+ )
+ return false
+ }
+ }
+ return true
+ }
+
+ isContentValid(content) {
+ for (const chunk of Array.from(content.split('a'))) {
+ if (chunk != null && chunk !== '') {
+ if (!this.isChunkValid(chunk)) {
+ console.error(
+ `[${new Date()}] \t[${this.client_id.slice(0, 4)}] Invalid content`,
+ content
+ )
+ return false
+ }
+ }
+ }
+ return true
+ }
+}
+
+const checkDocument = function (project_id, doc_id, clients, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ const jobs = clients.map(client => cb => client.check(cb))
+ return async.parallel(jobs, callback)
+}
+
+const printSummary = function (doc_id, clients) {
+ const slot = require('cluster-key-slot')
+ const now = new Date()
+ console.log(
+ `[${now}] [${doc_id.slice(0, 4)} (slot: ${slot(doc_id)})] ${
+ clients.length
+ } clients...`
+ )
+ return (() => {
+ const result = []
+ for (const client of Array.from(clients)) {
+ console.log(
+ `[${now}] \t[${client.client_id.slice(0, 4)}] { local: ${
+ client.counts.local_updates
+ }, remote: ${client.counts.remote_updates}, conflicts: ${
+ client.counts.conflicts
+ }, max_delay: ${client.counts.max_delay} }`
+ )
+ result.push(
+ (client.counts = {
+ local_updates: 0,
+ remote_updates: 0,
+ conflicts: 0,
+ max_delay: 0,
+ })
+ )
+ }
+ return result
+ })()
+}
+
+const CLIENT_COUNT = parseInt(process.argv[2], 10)
+const UPDATE_DELAY = parseInt(process.argv[3], 10)
+const SAMPLE_INTERVAL = parseInt(process.argv[4], 10)
+
+for (const doc_and_project_id of Array.from(process.argv.slice(5))) {
+ ;(function (doc_and_project_id) {
+ const [project_id, doc_id] = Array.from(doc_and_project_id.split(':'))
+ console.log({ project_id, doc_id })
+ return DocUpdaterClient.setDocLines(
+ project_id,
+ doc_id,
+ [new Array(CLIENT_COUNT + 2).join('a')],
+ null,
+ null,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return DocUpdaterClient.getDoc(
+ project_id,
+ doc_id,
+ (error, res, body) => {
+ let runBatch
+ if (error != null) {
+ throw error
+ }
+ if (body.lines == null) {
+ return console.error(
+ `[${new Date()}] ERROR: Invalid response from get doc (${doc_id})`,
+ body
+ )
+ }
+ const content = body.lines.join('\n')
+ const { version } = body
+
+ const clients = []
+ for (
+ let pos = 1, end = CLIENT_COUNT, asc = end >= 1;
+ asc ? pos <= end : pos >= end;
+ asc ? pos++ : pos--
+ ) {
+ ;(function (pos) {
+ const client = new StressTestClient({
+ doc_id,
+ project_id,
+ content,
+ pos,
+ version,
+ updateDelay: UPDATE_DELAY,
+ })
+ return clients.push(client)
+ })(pos)
+ }
+
+ return (runBatch = function () {
+ const jobs = clients.map(
+ client => cb =>
+ client.runForNUpdates(SAMPLE_INTERVAL / UPDATE_DELAY, cb)
+ )
+ return async.parallel(jobs, error => {
+ if (error != null) {
+ throw error
+ }
+ printSummary(doc_id, clients)
+ return checkDocument(project_id, doc_id, clients, error => {
+ if (error != null) {
+ throw error
+ }
+ return runBatch()
+ })
+ })
+ })()
+ }
+ )
+ }
+ )
+ })(doc_and_project_id)
+}
diff --git a/services/document-updater/test/unit/js/DiffCodec/DiffCodecTests.js b/services/document-updater/test/unit/js/DiffCodec/DiffCodecTests.js
new file mode 100644
index 0000000000..4f8b188c15
--- /dev/null
+++ b/services/document-updater/test/unit/js/DiffCodec/DiffCodecTests.js
@@ -0,0 +1,93 @@
+/* eslint-disable
+ handle-callback-err,
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const { expect } = require('chai')
+const modulePath = '../../../../app/js/DiffCodec.js'
+const SandboxedModule = require('sandboxed-module')
+
+describe('DiffCodec', function () {
+ beforeEach(function () {
+ this.callback = sinon.stub()
+ return (this.DiffCodec = SandboxedModule.require(modulePath))
+ })
+
+ return describe('diffAsShareJsOps', function () {
+ it('should insert new text correctly', function (done) {
+ this.before = ['hello world']
+ this.after = ['hello beautiful world']
+ return this.DiffCodec.diffAsShareJsOp(
+ this.before,
+ this.after,
+ (error, ops) => {
+ expect(ops).to.deep.equal([
+ {
+ i: 'beautiful ',
+ p: 6,
+ },
+ ])
+ return done()
+ }
+ )
+ })
+
+ it('should shift later inserts by previous inserts', function (done) {
+ this.before = ['the boy played with the ball']
+ this.after = ['the tall boy played with the red ball']
+ return this.DiffCodec.diffAsShareJsOp(
+ this.before,
+ this.after,
+ (error, ops) => {
+ expect(ops).to.deep.equal([
+ { i: 'tall ', p: 4 },
+ { i: 'red ', p: 29 },
+ ])
+ return done()
+ }
+ )
+ })
+
+ it('should delete text correctly', function (done) {
+ this.before = ['hello beautiful world']
+ this.after = ['hello world']
+ return this.DiffCodec.diffAsShareJsOp(
+ this.before,
+ this.after,
+ (error, ops) => {
+ expect(ops).to.deep.equal([
+ {
+ d: 'beautiful ',
+ p: 6,
+ },
+ ])
+ return done()
+ }
+ )
+ })
+
+ return it('should shift later deletes by the first deletes', function (done) {
+ this.before = ['the tall boy played with the red ball']
+ this.after = ['the boy played with the ball']
+ return this.DiffCodec.diffAsShareJsOp(
+ this.before,
+ this.after,
+ (error, ops) => {
+ expect(ops).to.deep.equal([
+ { d: 'tall ', p: 4 },
+ { d: 'red ', p: 24 },
+ ])
+ return done()
+ }
+ )
+ })
+ })
+})
diff --git a/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js b/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js
new file mode 100644
index 0000000000..4e17d58fff
--- /dev/null
+++ b/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js
@@ -0,0 +1,199 @@
+/* eslint-disable
+ handle-callback-err,
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const modulePath = '../../../../app/js/DispatchManager.js'
+const SandboxedModule = require('sandboxed-module')
+const Errors = require('../../../../app/js/Errors.js')
+
+describe('DispatchManager', function () {
+ beforeEach(function () {
+ let Timer
+ this.timeout(3000)
+ this.DispatchManager = SandboxedModule.require(modulePath, {
+ requires: {
+ './UpdateManager': (this.UpdateManager = {}),
+ '@overleaf/settings': (this.settings = {
+ redis: {
+ documentupdater: {},
+ },
+ }),
+ '@overleaf/redis-wrapper': (this.redis = {}),
+ './RateLimitManager': {},
+ './Errors': Errors,
+ './Metrics': (this.Metrics = {
+ Timer: (Timer = (function () {
+ Timer = class Timer {
+ static initClass() {
+ this.prototype.done = sinon.stub()
+ }
+ }
+ Timer.initClass()
+ return Timer
+ })()),
+ }),
+ },
+ })
+ this.callback = sinon.stub()
+ return (this.RateLimiter = {
+ run(task, cb) {
+ return task(cb)
+ },
+ })
+ }) // run task without rate limit
+
+ return describe('each worker', function () {
+ beforeEach(function () {
+ this.client = { auth: sinon.stub() }
+ this.redis.createClient = sinon.stub().returns(this.client)
+ return (this.worker = this.DispatchManager.createDispatcher(
+ this.RateLimiter,
+ 0
+ ))
+ })
+
+ it('should create a new redis client', function () {
+ return this.redis.createClient.called.should.equal(true)
+ })
+
+ describe('_waitForUpdateThenDispatchWorker', function () {
+ beforeEach(function () {
+ this.project_id = 'project-id-123'
+ this.doc_id = 'doc-id-123'
+ this.doc_key = `${this.project_id}:${this.doc_id}`
+ return (this.client.blpop = sinon
+ .stub()
+ .callsArgWith(2, null, ['pending-updates-list', this.doc_key]))
+ })
+
+ describe('in the normal case', function () {
+ beforeEach(function () {
+ this.UpdateManager.processOutstandingUpdatesWithLock = sinon
+ .stub()
+ .callsArg(2)
+ return this.worker._waitForUpdateThenDispatchWorker(this.callback)
+ })
+
+ it('should call redis with BLPOP', function () {
+ return this.client.blpop
+ .calledWith('pending-updates-list', 0)
+ .should.equal(true)
+ })
+
+ it('should call processOutstandingUpdatesWithLock', function () {
+ return this.UpdateManager.processOutstandingUpdatesWithLock
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should not log any errors', function () {
+ this.logger.error.called.should.equal(false)
+ return this.logger.warn.called.should.equal(false)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+
+ describe('with an error', function () {
+ beforeEach(function () {
+ this.UpdateManager.processOutstandingUpdatesWithLock = sinon
+ .stub()
+ .callsArgWith(2, new Error('a generic error'))
+ return this.worker._waitForUpdateThenDispatchWorker(this.callback)
+ })
+
+ it('should log an error', function () {
+ return this.logger.error.called.should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+
+ describe("with a 'Delete component' error", function () {
+ beforeEach(function () {
+ this.UpdateManager.processOutstandingUpdatesWithLock = sinon
+ .stub()
+ .callsArgWith(2, new Errors.DeleteMismatchError())
+ return this.worker._waitForUpdateThenDispatchWorker(this.callback)
+ })
+
+ it('should log a warning', function () {
+ return this.logger.warn.called.should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+
+ describe('pending updates list with shard key', function () {
+ beforeEach(function (done) {
+ this.client = {
+ auth: sinon.stub(),
+ blpop: sinon.stub().callsArgWith(2),
+ }
+ this.redis.createClient = sinon.stub().returns(this.client)
+ this.queueShardNumber = 7
+ this.worker = this.DispatchManager.createDispatcher(
+ this.RateLimiter,
+ this.queueShardNumber
+ )
+ this.worker._waitForUpdateThenDispatchWorker(done)
+ })
+
+ it('should call redis with BLPOP with the correct key', function () {
+ this.client.blpop
+ .calledWith(`pending-updates-list-${this.queueShardNumber}`, 0)
+ .should.equal(true)
+ })
+ })
+ })
+
+ return describe('run', function () {
+ return it('should call _waitForUpdateThenDispatchWorker until shutting down', function (done) {
+ let callCount = 0
+ this.worker._waitForUpdateThenDispatchWorker = callback => {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ callCount++
+ if (callCount === 3) {
+ this.settings.shuttingDown = true
+ }
+ return setTimeout(() => callback(), 10)
+ }
+ sinon.spy(this.worker, '_waitForUpdateThenDispatchWorker')
+
+ this.worker.run()
+
+ var checkStatus = () => {
+ if (!this.settings.shuttingDown) {
+ // retry until shutdown
+ setTimeout(checkStatus, 100)
+ } else {
+ this.worker._waitForUpdateThenDispatchWorker.callCount.should.equal(
+ 3
+ )
+ return done()
+ }
+ }
+
+ return checkStatus()
+ })
+ })
+ })
+})
diff --git a/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js b/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js
new file mode 100644
index 0000000000..bac5ae6a85
--- /dev/null
+++ b/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js
@@ -0,0 +1,1103 @@
+/* eslint-disable
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS206: Consider reworking classes to avoid initClass
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const modulePath = '../../../../app/js/DocumentManager.js'
+const SandboxedModule = require('sandboxed-module')
+const Errors = require('../../../../app/js/Errors')
+const tk = require('timekeeper')
+
+describe('DocumentManager', function () {
+ beforeEach(function () {
+ let Timer
+ tk.freeze(new Date())
+ this.DocumentManager = SandboxedModule.require(modulePath, {
+ requires: {
+ './RedisManager': (this.RedisManager = {}),
+ './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}),
+ './PersistenceManager': (this.PersistenceManager = {}),
+ './HistoryManager': (this.HistoryManager = {
+ flushDocChangesAsync: sinon.stub(),
+ flushProjectChangesAsync: sinon.stub(),
+ }),
+ './Metrics': (this.Metrics = {
+ Timer: (Timer = (function () {
+ Timer = class Timer {
+ static initClass() {
+ this.prototype.done = sinon.stub()
+ }
+ }
+ Timer.initClass()
+ return Timer
+ })()),
+ }),
+ './RealTimeRedisManager': (this.RealTimeRedisManager = {}),
+ './DiffCodec': (this.DiffCodec = {}),
+ './UpdateManager': (this.UpdateManager = {}),
+ './RangesManager': (this.RangesManager = {}),
+ './Errors': Errors,
+ },
+ })
+ this.project_id = 'project-id-123'
+ this.projectHistoryId = 'history-id-123'
+ this.projectHistoryType = 'project-history'
+ this.doc_id = 'doc-id-123'
+ this.user_id = 1234
+ this.callback = sinon.stub()
+ this.lines = ['one', 'two', 'three']
+ this.version = 42
+ this.ranges = { comments: 'mock', entries: 'mock' }
+ this.pathname = '/a/b/c.tex'
+ this.unflushedTime = Date.now()
+ this.lastUpdatedAt = Date.now()
+ return (this.lastUpdatedBy = 'last-author-id')
+ })
+
+ afterEach(function () {
+ return tk.reset()
+ })
+
+ describe('flushAndDeleteDoc', function () {
+ describe('successfully', function () {
+ beforeEach(function () {
+ this.RedisManager.removeDocFromMemory = sinon.stub().callsArg(2)
+ this.DocumentManager.flushDocIfLoaded = sinon.stub().callsArgWith(2)
+ return this.DocumentManager.flushAndDeleteDoc(
+ this.project_id,
+ this.doc_id,
+ {},
+ this.callback
+ )
+ })
+
+ it('should flush the doc', function () {
+ return this.DocumentManager.flushDocIfLoaded
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should remove the doc from redis', function () {
+ return this.RedisManager.removeDocFromMemory
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should call the callback without error', function () {
+ return this.callback.calledWith(null).should.equal(true)
+ })
+
+ it('should time the execution', function () {
+ return this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+
+ return it('should flush to the history api', function () {
+ return this.HistoryManager.flushDocChangesAsync
+ .calledWithExactly(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+ })
+
+ return describe('when a flush error occurs', function () {
+ beforeEach(function () {
+ this.DocumentManager.flushDocIfLoaded = sinon
+ .stub()
+ .callsArgWith(2, new Error('boom!'))
+ return (this.RedisManager.removeDocFromMemory = sinon
+ .stub()
+ .callsArg(2))
+ })
+
+ it('should not remove the doc from redis', function (done) {
+ return this.DocumentManager.flushAndDeleteDoc(
+ this.project_id,
+ this.doc_id,
+ {},
+ error => {
+ error.should.exist
+ this.RedisManager.removeDocFromMemory.called.should.equal(false)
+ return done()
+ }
+ )
+ })
+
+ return describe('when ignoring flush errors', function () {
+ return it('should remove the doc from redis', function (done) {
+ return this.DocumentManager.flushAndDeleteDoc(
+ this.project_id,
+ this.doc_id,
+ { ignoreFlushErrors: true },
+ error => {
+ if (error != null) {
+ return done(error)
+ }
+ this.RedisManager.removeDocFromMemory.called.should.equal(true)
+ return done()
+ }
+ )
+ })
+ })
+ })
+ })
+
+ describe('flushDocIfLoaded', function () {
+ describe('when the doc is in Redis', function () {
+ beforeEach(function () {
+ this.RedisManager.getDoc = sinon
+ .stub()
+ .callsArgWith(
+ 2,
+ null,
+ this.lines,
+ this.version,
+ this.ranges,
+ this.pathname,
+ this.projectHistoryId,
+ this.unflushedTime,
+ this.lastUpdatedAt,
+ this.lastUpdatedBy
+ )
+ this.RedisManager.clearUnflushedTime = sinon
+ .stub()
+ .callsArgWith(1, null)
+ this.PersistenceManager.setDoc = sinon.stub().yields()
+ return this.DocumentManager.flushDocIfLoaded(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('should get the doc from redis', function () {
+ return this.RedisManager.getDoc
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should write the doc lines to the persistence layer', function () {
+ return this.PersistenceManager.setDoc
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.lines,
+ this.version,
+ this.ranges,
+ this.lastUpdatedAt,
+ this.lastUpdatedBy
+ )
+ .should.equal(true)
+ })
+
+ it('should call the callback without error', function () {
+ return this.callback.calledWith(null).should.equal(true)
+ })
+
+ return it('should time the execution', function () {
+ return this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+ })
+
+ return describe('when the document is not in Redis', function () {
+ beforeEach(function () {
+ this.RedisManager.getDoc = sinon
+ .stub()
+ .callsArgWith(2, null, null, null, null)
+ this.PersistenceManager.setDoc = sinon.stub().yields()
+ return this.DocumentManager.flushDocIfLoaded(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('should get the doc from redis', function () {
+ return this.RedisManager.getDoc
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should not write anything to the persistence layer', function () {
+ this.PersistenceManager.setDoc.called.should.equal(false)
+ })
+
+ it('should call the callback without error', function () {
+ return this.callback.calledWith(null).should.equal(true)
+ })
+
+ return it('should time the execution', function () {
+ return this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+ })
+ })
+
+ describe('getDocAndRecentOps', function () {
+ describe('with a previous version specified', function () {
+ beforeEach(function () {
+ this.DocumentManager.getDoc = sinon
+ .stub()
+ .callsArgWith(
+ 2,
+ null,
+ this.lines,
+ this.version,
+ this.ranges,
+ this.pathname,
+ this.projectHistoryId
+ )
+ this.RedisManager.getPreviousDocOps = sinon
+ .stub()
+ .callsArgWith(3, null, this.ops)
+ return this.DocumentManager.getDocAndRecentOps(
+ this.project_id,
+ this.doc_id,
+ this.fromVersion,
+ this.callback
+ )
+ })
+
+ it('should get the doc', function () {
+ return this.DocumentManager.getDoc
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should get the doc ops', function () {
+ return this.RedisManager.getPreviousDocOps
+ .calledWith(this.doc_id, this.fromVersion, this.version)
+ .should.equal(true)
+ })
+
+ it('should call the callback with the doc info', function () {
+ return this.callback
+ .calledWith(
+ null,
+ this.lines,
+ this.version,
+ this.ops,
+ this.ranges,
+ this.pathname,
+ this.projectHistoryId
+ )
+ .should.equal(true)
+ })
+
+ return it('should time the execution', function () {
+ return this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+ })
+
+ return describe('with no previous version specified', function () {
+ beforeEach(function () {
+ this.DocumentManager.getDoc = sinon
+ .stub()
+ .callsArgWith(
+ 2,
+ null,
+ this.lines,
+ this.version,
+ this.ranges,
+ this.pathname,
+ this.projectHistoryId
+ )
+ this.RedisManager.getPreviousDocOps = sinon
+ .stub()
+ .callsArgWith(3, null, this.ops)
+ return this.DocumentManager.getDocAndRecentOps(
+ this.project_id,
+ this.doc_id,
+ -1,
+ this.callback
+ )
+ })
+
+ it('should get the doc', function () {
+ return this.DocumentManager.getDoc
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should not need to get the doc ops', function () {
+ return this.RedisManager.getPreviousDocOps.called.should.equal(false)
+ })
+
+ it('should call the callback with the doc info', function () {
+ return this.callback
+ .calledWith(
+ null,
+ this.lines,
+ this.version,
+ [],
+ this.ranges,
+ this.pathname,
+ this.projectHistoryId
+ )
+ .should.equal(true)
+ })
+
+ return it('should time the execution', function () {
+ return this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+ })
+ })
+
+ describe('getDoc', function () {
+ describe('when the doc exists in Redis', function () {
+ beforeEach(function () {
+ this.RedisManager.getDoc = sinon
+ .stub()
+ .callsArgWith(
+ 2,
+ null,
+ this.lines,
+ this.version,
+ this.ranges,
+ this.pathname,
+ this.projectHistoryId,
+ this.unflushedTime
+ )
+ return this.DocumentManager.getDoc(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('should get the doc from Redis', function () {
+ return this.RedisManager.getDoc
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should call the callback with the doc info', function () {
+ return this.callback
+ .calledWith(
+ null,
+ this.lines,
+ this.version,
+ this.ranges,
+ this.pathname,
+ this.projectHistoryId,
+ this.unflushedTime,
+ true
+ )
+ .should.equal(true)
+ })
+
+ return it('should time the execution', function () {
+ return this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+ })
+
+ return describe('when the doc does not exist in Redis', function () {
+ beforeEach(function () {
+ this.RedisManager.getDoc = sinon
+ .stub()
+ .callsArgWith(2, null, null, null, null, null, null)
+ this.PersistenceManager.getDoc = sinon
+ .stub()
+ .callsArgWith(
+ 2,
+ null,
+ this.lines,
+ this.version,
+ this.ranges,
+ this.pathname,
+ this.projectHistoryId,
+ this.projectHistoryType
+ )
+ this.RedisManager.putDocInMemory = sinon.stub().yields()
+ this.RedisManager.setHistoryType = sinon.stub().yields()
+ return this.DocumentManager.getDoc(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('should try to get the doc from Redis', function () {
+ return this.RedisManager.getDoc
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should get the doc from the PersistenceManager', function () {
+ return this.PersistenceManager.getDoc
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should set the doc in Redis', function () {
+ return this.RedisManager.putDocInMemory
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.lines,
+ this.version,
+ this.ranges,
+ this.pathname,
+ this.projectHistoryId
+ )
+ .should.equal(true)
+ })
+
+ it('should set the history type in Redis', function () {
+ return this.RedisManager.setHistoryType
+ .calledWith(this.doc_id, this.projectHistoryType)
+ .should.equal(true)
+ })
+
+ it('should call the callback with the doc info', function () {
+ return this.callback
+ .calledWith(
+ null,
+ this.lines,
+ this.version,
+ this.ranges,
+ this.pathname,
+ this.projectHistoryId,
+ null,
+ false
+ )
+ .should.equal(true)
+ })
+
+ return it('should time the execution', function () {
+ return this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+ })
+ })
+
+ describe('setDoc', function () {
+ return describe('with plain tex lines', function () {
+ beforeEach(function () {
+ this.beforeLines = ['before', 'lines']
+ this.afterLines = ['after', 'lines']
+ this.ops = [
+ { i: 'foo', p: 4 },
+ { d: 'bar', p: 42 },
+ ]
+ this.DocumentManager.getDoc = sinon
+ .stub()
+ .callsArgWith(
+ 2,
+ null,
+ this.beforeLines,
+ this.version,
+ this.ranges,
+ this.pathname,
+ this.projectHistoryId,
+ this.unflushedTime,
+ true
+ )
+ this.DiffCodec.diffAsShareJsOp = sinon
+ .stub()
+ .callsArgWith(2, null, this.ops)
+ this.UpdateManager.applyUpdate = sinon.stub().callsArgWith(3, null)
+ this.DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2)
+ return (this.DocumentManager.flushAndDeleteDoc = sinon
+ .stub()
+ .callsArg(3))
+ })
+
+ describe('when already loaded', function () {
+ beforeEach(function () {
+ return this.DocumentManager.setDoc(
+ this.project_id,
+ this.doc_id,
+ this.afterLines,
+ this.source,
+ this.user_id,
+ false,
+ this.callback
+ )
+ })
+
+ it('should get the current doc lines', function () {
+ return this.DocumentManager.getDoc
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should return a diff of the old and new lines', function () {
+ return this.DiffCodec.diffAsShareJsOp
+ .calledWith(this.beforeLines, this.afterLines)
+ .should.equal(true)
+ })
+
+ it('should apply the diff as a ShareJS op', function () {
+ return this.UpdateManager.applyUpdate
+ .calledWith(this.project_id, this.doc_id, {
+ doc: this.doc_id,
+ v: this.version,
+ op: this.ops,
+ meta: {
+ type: 'external',
+ source: this.source,
+ user_id: this.user_id,
+ },
+ })
+ .should.equal(true)
+ })
+
+ it('should flush the doc to Mongo', function () {
+ return this.DocumentManager.flushDocIfLoaded
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should not flush the project history', function () {
+ return this.HistoryManager.flushProjectChangesAsync.called.should.equal(
+ false
+ )
+ })
+
+ it('should call the callback', function () {
+ return this.callback.calledWith(null).should.equal(true)
+ })
+
+ return it('should time the execution', function () {
+ return this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+ })
+
+ describe('when not already loaded', function () {
+ beforeEach(function () {
+ this.DocumentManager.getDoc = sinon
+ .stub()
+ .callsArgWith(
+ 2,
+ null,
+ this.beforeLines,
+ this.version,
+ this.pathname,
+ null,
+ false
+ )
+ return this.DocumentManager.setDoc(
+ this.project_id,
+ this.doc_id,
+ this.afterLines,
+ this.source,
+ this.user_id,
+ false,
+ this.callback
+ )
+ })
+
+ it('should flush and delete the doc from the doc updater', function () {
+ return this.DocumentManager.flushAndDeleteDoc
+ .calledWith(this.project_id, this.doc_id, {})
+ .should.equal(true)
+ })
+
+ return it('should not flush the project history', function () {
+ return this.HistoryManager.flushProjectChangesAsync
+ .calledWithExactly(this.project_id)
+ .should.equal(true)
+ })
+ })
+
+ describe('without new lines', function () {
+ beforeEach(function () {
+ return this.DocumentManager.setDoc(
+ this.project_id,
+ this.doc_id,
+ null,
+ this.source,
+ this.user_id,
+ false,
+ this.callback
+ )
+ })
+
+ it('should return the callback with an error', function () {
+ return this.callback.calledWith(
+ new Error('No lines were passed to setDoc')
+ )
+ })
+
+ return it('should not try to get the doc lines', function () {
+ return this.DocumentManager.getDoc.called.should.equal(false)
+ })
+ })
+
+ return describe('with the undoing flag', function () {
+ beforeEach(function () {
+ // Copy ops so we don't interfere with other tests
+ this.ops = [
+ { i: 'foo', p: 4 },
+ { d: 'bar', p: 42 },
+ ]
+ this.DiffCodec.diffAsShareJsOp = sinon
+ .stub()
+ .callsArgWith(2, null, this.ops)
+ return this.DocumentManager.setDoc(
+ this.project_id,
+ this.doc_id,
+ this.afterLines,
+ this.source,
+ this.user_id,
+ true,
+ this.callback
+ )
+ })
+
+ return it('should set the undo flag on each op', function () {
+ return Array.from(this.ops).map(op => op.u.should.equal(true))
+ })
+ })
+ })
+ })
+
+ describe('acceptChanges', function () {
+ beforeEach(function () {
+ this.change_id = 'mock-change-id'
+ this.change_ids = [
+ 'mock-change-id-1',
+ 'mock-change-id-2',
+ 'mock-change-id-3',
+ 'mock-change-id-4',
+ ]
+ this.version = 34
+ this.lines = ['original', 'lines']
+ this.ranges = { entries: 'mock', comments: 'mock' }
+ this.updated_ranges = { entries: 'updated', comments: 'updated' }
+ this.DocumentManager.getDoc = sinon
+ .stub()
+ .yields(null, this.lines, this.version, this.ranges)
+ this.RangesManager.acceptChanges = sinon
+ .stub()
+ .yields(null, this.updated_ranges)
+ return (this.RedisManager.updateDocument = sinon.stub().yields())
+ })
+
+ describe('successfully with a single change', function () {
+ beforeEach(function () {
+ return this.DocumentManager.acceptChanges(
+ this.project_id,
+ this.doc_id,
+ [this.change_id],
+ this.callback
+ )
+ })
+
+ it("should get the document's current ranges", function () {
+ return this.DocumentManager.getDoc
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should apply the accept change to the ranges', function () {
+ return this.RangesManager.acceptChanges
+ .calledWith([this.change_id], this.ranges)
+ .should.equal(true)
+ })
+
+ it('should save the updated ranges', function () {
+ return this.RedisManager.updateDocument
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.lines,
+ this.version,
+ [],
+ this.updated_ranges,
+ {}
+ )
+ .should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+
+ describe('successfully with multiple changes', function () {
+ beforeEach(function () {
+ return this.DocumentManager.acceptChanges(
+ this.project_id,
+ this.doc_id,
+ this.change_ids,
+ this.callback
+ )
+ })
+
+ return it('should apply the accept change to the ranges', function () {
+ return this.RangesManager.acceptChanges
+ .calledWith(this.change_ids, this.ranges)
+ .should.equal(true)
+ })
+ })
+
+ return describe('when the doc is not found', function () {
+ beforeEach(function () {
+ this.DocumentManager.getDoc = sinon
+ .stub()
+ .yields(null, null, null, null)
+ return this.DocumentManager.acceptChanges(
+ this.project_id,
+ this.doc_id,
+ [this.change_id],
+ this.callback
+ )
+ })
+
+ it('should not save anything', function () {
+ return this.RedisManager.updateDocument.called.should.equal(false)
+ })
+
+ return it('should call the callback with a not found error', function () {
+ return this.callback
+ .calledWith(sinon.match.instanceOf(Errors.NotFoundError))
+ .should.equal(true)
+ })
+ })
+ })
+
+ describe('deleteComment', function () {
+ beforeEach(function () {
+ this.comment_id = 'mock-comment-id'
+ this.version = 34
+ this.lines = ['original', 'lines']
+ this.ranges = { comments: ['one', 'two', 'three'] }
+ this.updated_ranges = { comments: ['one', 'three'] }
+ this.DocumentManager.getDoc = sinon
+ .stub()
+ .yields(null, this.lines, this.version, this.ranges)
+ this.RangesManager.deleteComment = sinon
+ .stub()
+ .yields(null, this.updated_ranges)
+ return (this.RedisManager.updateDocument = sinon.stub().yields())
+ })
+
+ describe('successfully', function () {
+ beforeEach(function () {
+ return this.DocumentManager.deleteComment(
+ this.project_id,
+ this.doc_id,
+ this.comment_id,
+ this.callback
+ )
+ })
+
+ it("should get the document's current ranges", function () {
+ return this.DocumentManager.getDoc
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should delete the comment from the ranges', function () {
+ return this.RangesManager.deleteComment
+ .calledWith(this.comment_id, this.ranges)
+ .should.equal(true)
+ })
+
+ it('should save the updated ranges', function () {
+ return this.RedisManager.updateDocument
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.lines,
+ this.version,
+ [],
+ this.updated_ranges,
+ {}
+ )
+ .should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+
+ return describe('when the doc is not found', function () {
+ beforeEach(function () {
+ this.DocumentManager.getDoc = sinon
+ .stub()
+ .yields(null, null, null, null)
+ return this.DocumentManager.acceptChanges(
+ this.project_id,
+ this.doc_id,
+ [this.comment_id],
+ this.callback
+ )
+ })
+
+ it('should not save anything', function () {
+ return this.RedisManager.updateDocument.called.should.equal(false)
+ })
+
+ return it('should call the callback with a not found error', function () {
+ return this.callback
+ .calledWith(sinon.match.instanceOf(Errors.NotFoundError))
+ .should.equal(true)
+ })
+ })
+ })
+
+ describe('getDocAndFlushIfOld', function () {
+ beforeEach(function () {
+ return (this.DocumentManager.flushDocIfLoaded = sinon.stub().callsArg(2))
+ })
+
+ describe('when the doc is in Redis', function () {
+ describe('and has changes to be flushed', function () {
+ beforeEach(function () {
+ this.DocumentManager.getDoc = sinon
+ .stub()
+ .callsArgWith(
+ 2,
+ null,
+ this.lines,
+ this.version,
+ this.ranges,
+ this.projectHistoryId,
+ this.pathname,
+ Date.now() - 1e9,
+ true
+ )
+ return this.DocumentManager.getDocAndFlushIfOld(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('should get the doc', function () {
+ return this.DocumentManager.getDoc
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should flush the doc', function () {
+ return this.DocumentManager.flushDocIfLoaded
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ return it('should call the callback with the lines and versions', function () {
+ return this.callback
+ .calledWith(null, this.lines, this.version)
+ .should.equal(true)
+ })
+ })
+
+ return describe("and has only changes that don't need to be flushed", function () {
+ beforeEach(function () {
+ this.DocumentManager.getDoc = sinon
+ .stub()
+ .callsArgWith(
+ 2,
+ null,
+ this.lines,
+ this.version,
+ this.ranges,
+ this.pathname,
+ Date.now() - 100,
+ true
+ )
+ return this.DocumentManager.getDocAndFlushIfOld(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('should get the doc', function () {
+ return this.DocumentManager.getDoc
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should not flush the doc', function () {
+ return this.DocumentManager.flushDocIfLoaded.called.should.equal(
+ false
+ )
+ })
+
+ return it('should call the callback with the lines and versions', function () {
+ return this.callback
+ .calledWith(null, this.lines, this.version)
+ .should.equal(true)
+ })
+ })
+ })
+
+ return describe('when the doc is not in Redis', function () {
+ beforeEach(function () {
+ this.DocumentManager.getDoc = sinon
+ .stub()
+ .callsArgWith(
+ 2,
+ null,
+ this.lines,
+ this.version,
+ this.ranges,
+ null,
+ false
+ )
+ return this.DocumentManager.getDocAndFlushIfOld(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('should get the doc', function () {
+ return this.DocumentManager.getDoc
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should not flush the doc', function () {
+ return this.DocumentManager.flushDocIfLoaded.called.should.equal(false)
+ })
+
+ return it('should call the callback with the lines and versions', function () {
+ return this.callback
+ .calledWith(null, this.lines, this.version)
+ .should.equal(true)
+ })
+ })
+ })
+
+ describe('renameDoc', function () {
+ beforeEach(function () {
+ this.update = 'some-update'
+ return (this.RedisManager.renameDoc = sinon.stub().yields())
+ })
+
+ return describe('successfully', function () {
+ beforeEach(function () {
+ return this.DocumentManager.renameDoc(
+ this.project_id,
+ this.doc_id,
+ this.user_id,
+ this.update,
+ this.projectHistoryId,
+ this.callback
+ )
+ })
+
+ it('should rename the document', function () {
+ return this.RedisManager.renameDoc
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.user_id,
+ this.update,
+ this.projectHistoryId
+ )
+ .should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+ })
+
+ return describe('resyncDocContents', function () {
+ describe('when doc is loaded in redis', function () {
+ beforeEach(function () {
+ this.RedisManager.getDoc = sinon
+ .stub()
+ .callsArgWith(
+ 2,
+ null,
+ this.lines,
+ this.version,
+ this.ranges,
+ this.pathname,
+ this.projectHistoryId
+ )
+ this.ProjectHistoryRedisManager.queueResyncDocContent = sinon.stub()
+ return this.DocumentManager.resyncDocContents(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('gets the doc contents from redis', function () {
+ return this.RedisManager.getDoc
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ return it('queues a resync doc content update', function () {
+ return this.ProjectHistoryRedisManager.queueResyncDocContent
+ .calledWith(
+ this.project_id,
+ this.projectHistoryId,
+ this.doc_id,
+ this.lines,
+ this.version,
+ this.pathname,
+ this.callback
+ )
+ .should.equal(true)
+ })
+ })
+
+ return describe('when doc is not loaded in redis', function () {
+ beforeEach(function () {
+ this.RedisManager.getDoc = sinon.stub().callsArgWith(2, null)
+ this.PersistenceManager.getDoc = sinon
+ .stub()
+ .callsArgWith(
+ 2,
+ null,
+ this.lines,
+ this.version,
+ this.ranges,
+ this.pathname,
+ this.projectHistoryId
+ )
+ this.ProjectHistoryRedisManager.queueResyncDocContent = sinon.stub()
+ return this.DocumentManager.resyncDocContents(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('tries to get the doc contents from redis', function () {
+ return this.RedisManager.getDoc
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ it('gets the doc contents from web', function () {
+ return this.PersistenceManager.getDoc
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ return it('queues a resync doc content update', function () {
+ return this.ProjectHistoryRedisManager.queueResyncDocContent
+ .calledWith(
+ this.project_id,
+ this.projectHistoryId,
+ this.doc_id,
+ this.lines,
+ this.version,
+ this.pathname,
+ this.callback
+ )
+ .should.equal(true)
+ })
+ })
+ })
+})
diff --git a/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js b/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js
new file mode 100644
index 0000000000..988333c9b8
--- /dev/null
+++ b/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js
@@ -0,0 +1,419 @@
+/* eslint-disable
+ mocha/no-nested-tests,
+ no-return-assign,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const SandboxedModule = require('sandboxed-module')
+const sinon = require('sinon')
+const modulePath = require('path').join(
+ __dirname,
+ '../../../../app/js/HistoryManager'
+)
+
+describe('HistoryManager', function () {
+ beforeEach(function () {
+ this.HistoryManager = SandboxedModule.require(modulePath, {
+ requires: {
+ request: (this.request = {}),
+ '@overleaf/settings': (this.Settings = {
+ apis: {
+ project_history: {
+ enabled: true,
+ url: 'http://project_history.example.com',
+ },
+ trackchanges: {
+ url: 'http://trackchanges.example.com',
+ },
+ },
+ }),
+ './DocumentManager': (this.DocumentManager = {}),
+ './HistoryRedisManager': (this.HistoryRedisManager = {}),
+ './RedisManager': (this.RedisManager = {}),
+ './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}),
+ './Metrics': (this.metrics = { inc: sinon.stub() }),
+ },
+ })
+ this.project_id = 'mock-project-id'
+ this.doc_id = 'mock-doc-id'
+ return (this.callback = sinon.stub())
+ })
+
+ describe('flushDocChangesAsync', function () {
+ beforeEach(function () {
+ return (this.request.post = sinon
+ .stub()
+ .callsArgWith(1, null, { statusCode: 204 }))
+ })
+
+ describe('when the project uses track changes', function () {
+ beforeEach(function () {
+ this.RedisManager.getHistoryType = sinon
+ .stub()
+ .yields(null, 'track-changes')
+ return this.HistoryManager.flushDocChangesAsync(
+ this.project_id,
+ this.doc_id
+ )
+ })
+
+ return it('should send a request to the track changes api', function () {
+ return this.request.post
+ .calledWith(
+ `${this.Settings.apis.trackchanges.url}/project/${this.project_id}/doc/${this.doc_id}/flush`
+ )
+ .should.equal(true)
+ })
+ })
+
+ describe('when the project uses project history and double flush is not disabled', function () {
+ beforeEach(function () {
+ this.RedisManager.getHistoryType = sinon
+ .stub()
+ .yields(null, 'project-history')
+ return this.HistoryManager.flushDocChangesAsync(
+ this.project_id,
+ this.doc_id
+ )
+ })
+
+ return it('should send a request to the track changes api', function () {
+ return this.request.post.called.should.equal(true)
+ })
+ })
+
+ return describe('when the project uses project history and double flush is disabled', function () {
+ beforeEach(function () {
+ this.Settings.disableDoubleFlush = true
+ this.RedisManager.getHistoryType = sinon
+ .stub()
+ .yields(null, 'project-history')
+ return this.HistoryManager.flushDocChangesAsync(
+ this.project_id,
+ this.doc_id
+ )
+ })
+
+ return it('should not send a request to the track changes api', function () {
+ return this.request.post.called.should.equal(false)
+ })
+ })
+ })
+
+ describe('flushProjectChangesAsync', function () {
+ beforeEach(function () {
+ this.request.post = sinon
+ .stub()
+ .callsArgWith(1, null, { statusCode: 204 })
+
+ return this.HistoryManager.flushProjectChangesAsync(this.project_id)
+ })
+
+ return it('should send a request to the project history api', function () {
+ return this.request.post
+ .calledWith({
+ url: `${this.Settings.apis.project_history.url}/project/${this.project_id}/flush`,
+ qs: { background: true },
+ })
+ .should.equal(true)
+ })
+ })
+
+ describe('flushProjectChanges', function () {
+ describe('in the normal case', function () {
+ beforeEach(function () {
+ this.request.post = sinon
+ .stub()
+ .callsArgWith(1, null, { statusCode: 204 })
+ return this.HistoryManager.flushProjectChanges(this.project_id, {
+ background: true,
+ })
+ })
+
+ return it('should send a request to the project history api', function () {
+ return this.request.post
+ .calledWith({
+ url: `${this.Settings.apis.project_history.url}/project/${this.project_id}/flush`,
+ qs: { background: true },
+ })
+ .should.equal(true)
+ })
+ })
+
+ return describe('with the skip_history_flush option', function () {
+ beforeEach(function () {
+ this.request.post = sinon.stub()
+ return this.HistoryManager.flushProjectChanges(this.project_id, {
+ skip_history_flush: true,
+ })
+ })
+
+ return it('should not send a request to the project history api', function () {
+ return this.request.post.called.should.equal(false)
+ })
+ })
+ })
+
+ describe('recordAndFlushHistoryOps', function () {
+ beforeEach(function () {
+ this.ops = ['mock-ops']
+ this.project_ops_length = 10
+ this.doc_ops_length = 5
+
+ this.HistoryManager.flushProjectChangesAsync = sinon.stub()
+ this.HistoryRedisManager.recordDocHasHistoryOps = sinon.stub().callsArg(3)
+ return (this.HistoryManager.flushDocChangesAsync = sinon.stub())
+ })
+
+ describe('with no ops', function () {
+ beforeEach(function () {
+ return this.HistoryManager.recordAndFlushHistoryOps(
+ this.project_id,
+ this.doc_id,
+ [],
+ this.doc_ops_length,
+ this.project_ops_length,
+ this.callback
+ )
+ })
+
+ it('should not flush project changes', function () {
+ return this.HistoryManager.flushProjectChangesAsync.called.should.equal(
+ false
+ )
+ })
+
+ it('should not record doc has history ops', function () {
+ return this.HistoryRedisManager.recordDocHasHistoryOps.called.should.equal(
+ false
+ )
+ })
+
+ it('should not flush doc changes', function () {
+ return this.HistoryManager.flushDocChangesAsync.called.should.equal(
+ false
+ )
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+
+ describe('with enough ops to flush project changes', function () {
+ beforeEach(function () {
+ this.HistoryManager.shouldFlushHistoryOps = sinon.stub()
+ this.HistoryManager.shouldFlushHistoryOps
+ .withArgs(this.project_ops_length)
+ .returns(true)
+ this.HistoryManager.shouldFlushHistoryOps
+ .withArgs(this.doc_ops_length)
+ .returns(false)
+
+ return this.HistoryManager.recordAndFlushHistoryOps(
+ this.project_id,
+ this.doc_id,
+ this.ops,
+ this.doc_ops_length,
+ this.project_ops_length,
+ this.callback
+ )
+ })
+
+ it('should flush project changes', function () {
+ return this.HistoryManager.flushProjectChangesAsync
+ .calledWith(this.project_id)
+ .should.equal(true)
+ })
+
+ it('should record doc has history ops', function () {
+ return this.HistoryRedisManager.recordDocHasHistoryOps.calledWith(
+ this.project_id,
+ this.doc_id,
+ this.ops
+ )
+ })
+
+ it('should not flush doc changes', function () {
+ return this.HistoryManager.flushDocChangesAsync.called.should.equal(
+ false
+ )
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+
+ describe('with enough ops to flush doc changes', function () {
+ beforeEach(function () {
+ this.HistoryManager.shouldFlushHistoryOps = sinon.stub()
+ this.HistoryManager.shouldFlushHistoryOps
+ .withArgs(this.project_ops_length)
+ .returns(false)
+ this.HistoryManager.shouldFlushHistoryOps
+ .withArgs(this.doc_ops_length)
+ .returns(true)
+
+ return this.HistoryManager.recordAndFlushHistoryOps(
+ this.project_id,
+ this.doc_id,
+ this.ops,
+ this.doc_ops_length,
+ this.project_ops_length,
+ this.callback
+ )
+ })
+
+ it('should not flush project changes', function () {
+ return this.HistoryManager.flushProjectChangesAsync.called.should.equal(
+ false
+ )
+ })
+
+ it('should record doc has history ops', function () {
+ return this.HistoryRedisManager.recordDocHasHistoryOps.calledWith(
+ this.project_id,
+ this.doc_id,
+ this.ops
+ )
+ })
+
+ it('should flush doc changes', function () {
+ return this.HistoryManager.flushDocChangesAsync
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+
+ describe('when recording doc has history ops errors', function () {
+ beforeEach(function () {
+ this.error = new Error('error')
+ this.HistoryRedisManager.recordDocHasHistoryOps = sinon
+ .stub()
+ .callsArgWith(3, this.error)
+
+ return this.HistoryManager.recordAndFlushHistoryOps(
+ this.project_id,
+ this.doc_id,
+ this.ops,
+ this.doc_ops_length,
+ this.project_ops_length,
+ this.callback
+ )
+ })
+
+ it('should not flush doc changes', function () {
+ return this.HistoryManager.flushDocChangesAsync.called.should.equal(
+ false
+ )
+ })
+
+ return it('should call the callback with the error', function () {
+ return this.callback.calledWith(this.error).should.equal(true)
+ })
+ })
+
+ return describe('shouldFlushHistoryOps', function () {
+ it('should return false if the number of ops is not known', function () {
+ return this.HistoryManager.shouldFlushHistoryOps(
+ null,
+ ['a', 'b', 'c'].length,
+ 1
+ ).should.equal(false)
+ })
+
+ it("should return false if the updates didn't take us past the threshold", function () {
+ // Currently there are 14 ops
+ // Previously we were on 11 ops
+ // We didn't pass over a multiple of 5
+ this.HistoryManager.shouldFlushHistoryOps(
+ 14,
+ ['a', 'b', 'c'].length,
+ 5
+ ).should.equal(false)
+
+ it('should return true if the updates took to the threshold', function () {})
+ // Currently there are 15 ops
+ // Previously we were on 12 ops
+ // We've reached a new multiple of 5
+ return this.HistoryManager.shouldFlushHistoryOps(
+ 15,
+ ['a', 'b', 'c'].length,
+ 5
+ ).should.equal(true)
+ })
+
+ return it('should return true if the updates took past the threshold', function () {
+ // Currently there are 19 ops
+ // Previously we were on 16 ops
+ // We didn't pass over a multiple of 5
+ return this.HistoryManager.shouldFlushHistoryOps(
+ 17,
+ ['a', 'b', 'c'].length,
+ 5
+ ).should.equal(true)
+ })
+ })
+ })
+
+ return describe('resyncProjectHistory', function () {
+ beforeEach(function () {
+ this.projectHistoryId = 'history-id-1234'
+ this.docs = [
+ {
+ doc: this.doc_id,
+ path: 'main.tex',
+ },
+ ]
+ this.files = [
+ {
+ file: 'mock-file-id',
+ path: 'universe.png',
+ url: `www.filestore.test/${this.project_id}/mock-file-id`,
+ },
+ ]
+ this.ProjectHistoryRedisManager.queueResyncProjectStructure = sinon
+ .stub()
+ .yields()
+ this.DocumentManager.resyncDocContentsWithLock = sinon.stub().yields()
+ return this.HistoryManager.resyncProjectHistory(
+ this.project_id,
+ this.projectHistoryId,
+ this.docs,
+ this.files,
+ this.callback
+ )
+ })
+
+ it('should queue a project structure reync', function () {
+ return this.ProjectHistoryRedisManager.queueResyncProjectStructure
+ .calledWith(
+ this.project_id,
+ this.projectHistoryId,
+ this.docs,
+ this.files
+ )
+ .should.equal(true)
+ })
+
+ it('should queue doc content reyncs', function () {
+ return this.DocumentManager.resyncDocContentsWithLock
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+})
diff --git a/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js b/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js
new file mode 100644
index 0000000000..942884ec58
--- /dev/null
+++ b/services/document-updater/test/unit/js/HistoryRedisManager/HistoryRedisManagerTests.js
@@ -0,0 +1,100 @@
+/* eslint-disable
+ camelcase,
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const modulePath = '../../../../app/js/HistoryRedisManager.js'
+const SandboxedModule = require('sandboxed-module')
+const Errors = require('../../../../app/js/Errors')
+
+describe('HistoryRedisManager', function () {
+ beforeEach(function () {
+ this.rclient = {
+ auth() {},
+ exec: sinon.stub(),
+ }
+ this.rclient.multi = () => this.rclient
+ this.HistoryRedisManager = SandboxedModule.require(modulePath, {
+ requires: {
+ '@overleaf/redis-wrapper': { createClient: () => this.rclient },
+ '@overleaf/settings': {
+ redis: {
+ history: (this.settings = {
+ key_schema: {
+ uncompressedHistoryOps({ doc_id }) {
+ return `UncompressedHistoryOps:${doc_id}`
+ },
+ docsWithHistoryOps({ project_id }) {
+ return `DocsWithHistoryOps:${project_id}`
+ },
+ },
+ }),
+ },
+ },
+ },
+ })
+ this.doc_id = 'doc-id-123'
+ this.project_id = 'project-id-123'
+ return (this.callback = sinon.stub())
+ })
+
+ return describe('recordDocHasHistoryOps', function () {
+ beforeEach(function () {
+ this.ops = [{ op: [{ i: 'foo', p: 4 }] }, { op: [{ i: 'bar', p: 56 }] }]
+ return (this.rclient.sadd = sinon.stub().yields())
+ })
+
+ describe('with ops', function () {
+ beforeEach(function (done) {
+ return this.HistoryRedisManager.recordDocHasHistoryOps(
+ this.project_id,
+ this.doc_id,
+ this.ops,
+ (...args) => {
+ this.callback(...Array.from(args || []))
+ return done()
+ }
+ )
+ })
+
+ return it('should add the doc_id to the set of which records the project docs', function () {
+ return this.rclient.sadd
+ .calledWith(`DocsWithHistoryOps:${this.project_id}`, this.doc_id)
+ .should.equal(true)
+ })
+ })
+
+ return describe('with no ops', function () {
+ beforeEach(function (done) {
+ return this.HistoryRedisManager.recordDocHasHistoryOps(
+ this.project_id,
+ this.doc_id,
+ [],
+ (...args) => {
+ this.callback(...Array.from(args || []))
+ return done()
+ }
+ )
+ })
+
+ it('should not add the doc_id to the set of which records the project docs', function () {
+ return this.rclient.sadd.called.should.equal(false)
+ })
+
+ return it('should call the callback with an error', function () {
+ return this.callback
+ .calledWith(sinon.match.instanceOf(Error))
+ .should.equal(true)
+ })
+ })
+ })
+})
diff --git a/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js
new file mode 100644
index 0000000000..7bea76edd0
--- /dev/null
+++ b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js
@@ -0,0 +1,942 @@
+const sinon = require('sinon')
+const modulePath = '../../../../app/js/HttpController.js'
+const SandboxedModule = require('sandboxed-module')
+const Errors = require('../../../../app/js/Errors.js')
+
+describe('HttpController', function () {
+ beforeEach(function () {
+ this.HttpController = SandboxedModule.require(modulePath, {
+ requires: {
+ './DocumentManager': (this.DocumentManager = {}),
+ './HistoryManager': (this.HistoryManager = {
+ flushProjectChangesAsync: sinon.stub(),
+ }),
+ './ProjectManager': (this.ProjectManager = {}),
+ './ProjectFlusher': { flushAllProjects() {} },
+ './DeleteQueueManager': (this.DeleteQueueManager = {}),
+ './RedisManager': (this.RedisManager = {}),
+ './Metrics': (this.Metrics = {}),
+ './Errors': Errors,
+ },
+ })
+ this.Metrics.Timer = class Timer {}
+ this.Metrics.Timer.prototype.done = sinon.stub()
+
+ this.project_id = 'project-id-123'
+ this.doc_id = 'doc-id-123'
+ this.next = sinon.stub()
+ this.res = {
+ send: sinon.stub(),
+ sendStatus: sinon.stub(),
+ json: sinon.stub(),
+ }
+ })
+
+ describe('getDoc', function () {
+ beforeEach(function () {
+ this.lines = ['one', 'two', 'three']
+ this.ops = ['mock-op-1', 'mock-op-2']
+ this.version = 42
+ this.fromVersion = 42
+ this.ranges = { changes: 'mock', comments: 'mock' }
+ this.pathname = '/a/b/c'
+ this.req = {
+ params: {
+ project_id: this.project_id,
+ doc_id: this.doc_id,
+ },
+ query: {},
+ body: {},
+ }
+ })
+
+ describe('when the document exists and no recent ops are requested', function () {
+ beforeEach(function () {
+ this.DocumentManager.getDocAndRecentOpsWithLock = sinon
+ .stub()
+ .callsArgWith(
+ 3,
+ null,
+ this.lines,
+ this.version,
+ [],
+ this.ranges,
+ this.pathname
+ )
+ this.HttpController.getDoc(this.req, this.res, this.next)
+ })
+
+ it('should get the doc', function () {
+ this.DocumentManager.getDocAndRecentOpsWithLock
+ .calledWith(this.project_id, this.doc_id, -1)
+ .should.equal(true)
+ })
+
+ it('should return the doc as JSON', function () {
+ this.res.json
+ .calledWith({
+ id: this.doc_id,
+ lines: this.lines,
+ version: this.version,
+ ops: [],
+ ranges: this.ranges,
+ pathname: this.pathname,
+ })
+ .should.equal(true)
+ })
+
+ it('should log the request', function () {
+ this.logger.log
+ .calledWith(
+ { docId: this.doc_id, projectId: this.project_id },
+ 'getting doc via http'
+ )
+ .should.equal(true)
+ })
+
+ it('should time the request', function () {
+ this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+ })
+
+ describe('when recent ops are requested', function () {
+ beforeEach(function () {
+ this.DocumentManager.getDocAndRecentOpsWithLock = sinon
+ .stub()
+ .callsArgWith(
+ 3,
+ null,
+ this.lines,
+ this.version,
+ this.ops,
+ this.ranges,
+ this.pathname
+ )
+ this.req.query = { fromVersion: `${this.fromVersion}` }
+ this.HttpController.getDoc(this.req, this.res, this.next)
+ })
+
+ it('should get the doc', function () {
+ this.DocumentManager.getDocAndRecentOpsWithLock
+ .calledWith(this.project_id, this.doc_id, this.fromVersion)
+ .should.equal(true)
+ })
+
+ it('should return the doc as JSON', function () {
+ this.res.json
+ .calledWith({
+ id: this.doc_id,
+ lines: this.lines,
+ version: this.version,
+ ops: this.ops,
+ ranges: this.ranges,
+ pathname: this.pathname,
+ })
+ .should.equal(true)
+ })
+
+ it('should log the request', function () {
+ this.logger.log
+ .calledWith(
+ { docId: this.doc_id, projectId: this.project_id },
+ 'getting doc via http'
+ )
+ .should.equal(true)
+ })
+
+ it('should time the request', function () {
+ this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+ })
+
+ describe('when the document does not exist', function () {
+ beforeEach(function () {
+ this.DocumentManager.getDocAndRecentOpsWithLock = sinon
+ .stub()
+ .callsArgWith(3, null, null, null)
+ this.HttpController.getDoc(this.req, this.res, this.next)
+ })
+
+ it('should call next with NotFoundError', function () {
+ this.next
+ .calledWith(sinon.match.instanceOf(Errors.NotFoundError))
+ .should.equal(true)
+ })
+ })
+
+ describe('when an errors occurs', function () {
+ beforeEach(function () {
+ this.DocumentManager.getDocAndRecentOpsWithLock = sinon
+ .stub()
+ .callsArgWith(3, new Error('oops'), null, null)
+ this.HttpController.getDoc(this.req, this.res, this.next)
+ })
+
+ it('should call next with the error', function () {
+ this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true)
+ })
+ })
+ })
+
+ describe('setDoc', function () {
+ beforeEach(function () {
+ this.lines = ['one', 'two', 'three']
+ this.source = 'dropbox'
+ this.user_id = 'user-id-123'
+ this.req = {
+ headers: {},
+ params: {
+ project_id: this.project_id,
+ doc_id: this.doc_id,
+ },
+ query: {},
+ body: {
+ lines: this.lines,
+ source: this.source,
+ user_id: this.user_id,
+ undoing: (this.undoing = true),
+ },
+ }
+ })
+
+ describe('successfully', function () {
+ beforeEach(function () {
+ this.DocumentManager.setDocWithLock = sinon.stub().callsArgWith(6)
+ this.HttpController.setDoc(this.req, this.res, this.next)
+ })
+
+ it('should set the doc', function () {
+ this.DocumentManager.setDocWithLock
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.lines,
+ this.source,
+ this.user_id,
+ this.undoing
+ )
+ .should.equal(true)
+ })
+
+ it('should return a successful No Content response', function () {
+ this.res.sendStatus.calledWith(204).should.equal(true)
+ })
+
+ it('should log the request', function () {
+ this.logger.log
+ .calledWith(
+ {
+ docId: this.doc_id,
+ projectId: this.project_id,
+ lines: this.lines,
+ source: this.source,
+ userId: this.user_id,
+ undoing: this.undoing,
+ },
+ 'setting doc via http'
+ )
+ .should.equal(true)
+ })
+
+ it('should time the request', function () {
+ this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+ })
+
+ describe('when an errors occurs', function () {
+ beforeEach(function () {
+ this.DocumentManager.setDocWithLock = sinon
+ .stub()
+ .callsArgWith(6, new Error('oops'))
+ this.HttpController.setDoc(this.req, this.res, this.next)
+ })
+
+ it('should call next with the error', function () {
+ this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true)
+ })
+ })
+
+ describe('when the payload is too large', function () {
+ beforeEach(function () {
+ const lines = []
+ for (let _ = 0; _ <= 200000; _++) {
+ lines.push('test test test')
+ }
+ this.req.body.lines = lines
+ this.DocumentManager.setDocWithLock = sinon.stub().callsArgWith(6)
+ this.HttpController.setDoc(this.req, this.res, this.next)
+ })
+
+ it('should send back a 406 response', function () {
+ this.res.sendStatus.calledWith(406).should.equal(true)
+ })
+
+ it('should not call setDocWithLock', function () {
+ this.DocumentManager.setDocWithLock.callCount.should.equal(0)
+ })
+ })
+ })
+
+ describe('flushProject', function () {
+ beforeEach(function () {
+ this.req = {
+ params: {
+ project_id: this.project_id,
+ },
+ query: {},
+ body: {},
+ }
+ })
+
+ describe('successfully', function () {
+ beforeEach(function () {
+ this.ProjectManager.flushProjectWithLocks = sinon.stub().callsArgWith(1)
+ this.HttpController.flushProject(this.req, this.res, this.next)
+ })
+
+ it('should flush the project', function () {
+ this.ProjectManager.flushProjectWithLocks
+ .calledWith(this.project_id)
+ .should.equal(true)
+ })
+
+ it('should return a successful No Content response', function () {
+ this.res.sendStatus.calledWith(204).should.equal(true)
+ })
+
+ it('should log the request', function () {
+ this.logger.log
+ .calledWith(
+ { projectId: this.project_id },
+ 'flushing project via http'
+ )
+ .should.equal(true)
+ })
+
+ it('should time the request', function () {
+ this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+ })
+
+ describe('when an errors occurs', function () {
+ beforeEach(function () {
+ this.ProjectManager.flushProjectWithLocks = sinon
+ .stub()
+ .callsArgWith(1, new Error('oops'))
+ this.HttpController.flushProject(this.req, this.res, this.next)
+ })
+
+ it('should call next with the error', function () {
+ this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true)
+ })
+ })
+ })
+
+ describe('flushDocIfLoaded', function () {
+ beforeEach(function () {
+ this.lines = ['one', 'two', 'three']
+ this.version = 42
+ this.req = {
+ params: {
+ project_id: this.project_id,
+ doc_id: this.doc_id,
+ },
+ query: {},
+ body: {},
+ }
+ })
+
+ describe('successfully', function () {
+ beforeEach(function () {
+ this.DocumentManager.flushDocIfLoadedWithLock = sinon
+ .stub()
+ .callsArgWith(2)
+ this.HttpController.flushDocIfLoaded(this.req, this.res, this.next)
+ })
+
+ it('should flush the doc', function () {
+ this.DocumentManager.flushDocIfLoadedWithLock
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should return a successful No Content response', function () {
+ this.res.sendStatus.calledWith(204).should.equal(true)
+ })
+
+ it('should log the request', function () {
+ this.logger.log
+ .calledWith(
+ { docId: this.doc_id, projectId: this.project_id },
+ 'flushing doc via http'
+ )
+ .should.equal(true)
+ })
+
+ it('should time the request', function () {
+ this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+ })
+
+ describe('when an errors occurs', function () {
+ beforeEach(function () {
+ this.DocumentManager.flushDocIfLoadedWithLock = sinon
+ .stub()
+ .callsArgWith(2, new Error('oops'))
+ this.HttpController.flushDocIfLoaded(this.req, this.res, this.next)
+ })
+
+ it('should call next with the error', function () {
+ this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true)
+ })
+ })
+ })
+
+ describe('deleteDoc', function () {
+ beforeEach(function () {
+ this.req = {
+ params: {
+ project_id: this.project_id,
+ doc_id: this.doc_id,
+ },
+ query: {},
+ body: {},
+ }
+ })
+
+ describe('successfully', function () {
+ beforeEach(function () {
+ this.DocumentManager.flushAndDeleteDocWithLock = sinon
+ .stub()
+ .callsArgWith(3)
+ this.HttpController.deleteDoc(this.req, this.res, this.next)
+ })
+
+ it('should flush and delete the doc', function () {
+ this.DocumentManager.flushAndDeleteDocWithLock
+ .calledWith(this.project_id, this.doc_id, {
+ ignoreFlushErrors: false,
+ })
+ .should.equal(true)
+ })
+
+ it('should flush project history', function () {
+ this.HistoryManager.flushProjectChangesAsync
+ .calledWithExactly(this.project_id)
+ .should.equal(true)
+ })
+
+ it('should return a successful No Content response', function () {
+ this.res.sendStatus.calledWith(204).should.equal(true)
+ })
+
+ it('should log the request', function () {
+ this.logger.log
+ .calledWith(
+ { docId: this.doc_id, projectId: this.project_id },
+ 'deleting doc via http'
+ )
+ .should.equal(true)
+ })
+
+ it('should time the request', function () {
+ this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+ })
+
+ describe('ignoring errors', function () {
+ beforeEach(function () {
+ this.req.query.ignore_flush_errors = 'true'
+ this.DocumentManager.flushAndDeleteDocWithLock = sinon.stub().yields()
+ this.HttpController.deleteDoc(this.req, this.res, this.next)
+ })
+
+ it('should delete the doc', function () {
+ this.DocumentManager.flushAndDeleteDocWithLock
+ .calledWith(this.project_id, this.doc_id, { ignoreFlushErrors: true })
+ .should.equal(true)
+ })
+
+ it('should return a successful No Content response', function () {
+ this.res.sendStatus.calledWith(204).should.equal(true)
+ })
+ })
+
+ describe('when an errors occurs', function () {
+ beforeEach(function () {
+ this.DocumentManager.flushAndDeleteDocWithLock = sinon
+ .stub()
+ .callsArgWith(3, new Error('oops'))
+ this.HttpController.deleteDoc(this.req, this.res, this.next)
+ })
+
+ it('should flush project history', function () {
+ this.HistoryManager.flushProjectChangesAsync
+ .calledWithExactly(this.project_id)
+ .should.equal(true)
+ })
+
+ it('should call next with the error', function () {
+ this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true)
+ })
+ })
+ })
+
+ describe('deleteProject', function () {
+ beforeEach(function () {
+ this.req = {
+ params: {
+ project_id: this.project_id,
+ },
+ query: {},
+ body: {},
+ }
+ })
+
+ describe('successfully', function () {
+ beforeEach(function () {
+ this.ProjectManager.flushAndDeleteProjectWithLocks = sinon
+ .stub()
+ .callsArgWith(2)
+ this.HttpController.deleteProject(this.req, this.res, this.next)
+ })
+
+ it('should delete the project', function () {
+ this.ProjectManager.flushAndDeleteProjectWithLocks
+ .calledWith(this.project_id)
+ .should.equal(true)
+ })
+
+ it('should return a successful No Content response', function () {
+ this.res.sendStatus.calledWith(204).should.equal(true)
+ })
+
+ it('should log the request', function () {
+ this.logger.log
+ .calledWith(
+ { projectId: this.project_id },
+ 'deleting project via http'
+ )
+ .should.equal(true)
+ })
+
+ it('should time the request', function () {
+ this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+ })
+
+ describe('with the background=true option from realtime', function () {
+ beforeEach(function () {
+ this.ProjectManager.queueFlushAndDeleteProject = sinon
+ .stub()
+ .callsArgWith(1)
+ this.req.query = { background: true, shutdown: true }
+ this.HttpController.deleteProject(this.req, this.res, this.next)
+ })
+
+ it('should queue the flush and delete', function () {
+ this.ProjectManager.queueFlushAndDeleteProject
+ .calledWith(this.project_id)
+ .should.equal(true)
+ })
+ })
+
+ describe('when an errors occurs', function () {
+ beforeEach(function () {
+ this.ProjectManager.flushAndDeleteProjectWithLocks = sinon
+ .stub()
+ .callsArgWith(2, new Error('oops'))
+ this.HttpController.deleteProject(this.req, this.res, this.next)
+ })
+
+ it('should call next with the error', function () {
+ this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true)
+ })
+ })
+ })
+
+ describe('acceptChanges', function () {
+ beforeEach(function () {
+ this.req = {
+ params: {
+ project_id: this.project_id,
+ doc_id: this.doc_id,
+ change_id: (this.change_id = 'mock-change-od-1'),
+ },
+ query: {},
+ body: {},
+ }
+ })
+
+ describe('successfully with a single change', function () {
+ beforeEach(function () {
+ this.DocumentManager.acceptChangesWithLock = sinon
+ .stub()
+ .callsArgWith(3)
+ this.HttpController.acceptChanges(this.req, this.res, this.next)
+ })
+
+ it('should accept the change', function () {
+ this.DocumentManager.acceptChangesWithLock
+ .calledWith(this.project_id, this.doc_id, [this.change_id])
+ .should.equal(true)
+ })
+
+ it('should return a successful No Content response', function () {
+ this.res.sendStatus.calledWith(204).should.equal(true)
+ })
+
+ it('should log the request', function () {
+ this.logger.log
+ .calledWith(
+ { projectId: this.project_id, docId: this.doc_id },
+ 'accepting 1 changes via http'
+ )
+ .should.equal(true)
+ })
+
+ it('should time the request', function () {
+ this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+ })
+
+ describe('succesfully with with multiple changes', function () {
+ beforeEach(function () {
+ this.change_ids = [
+ 'mock-change-od-1',
+ 'mock-change-od-2',
+ 'mock-change-od-3',
+ 'mock-change-od-4',
+ ]
+ this.req.body = { change_ids: this.change_ids }
+ this.DocumentManager.acceptChangesWithLock = sinon
+ .stub()
+ .callsArgWith(3)
+ this.HttpController.acceptChanges(this.req, this.res, this.next)
+ })
+
+ it('should accept the changes in the body payload', function () {
+ this.DocumentManager.acceptChangesWithLock
+ .calledWith(this.project_id, this.doc_id, this.change_ids)
+ .should.equal(true)
+ })
+
+ it('should log the request with the correct number of changes', function () {
+ this.logger.log
+ .calledWith(
+ { projectId: this.project_id, docId: this.doc_id },
+ `accepting ${this.change_ids.length} changes via http`
+ )
+ .should.equal(true)
+ })
+ })
+
+ describe('when an errors occurs', function () {
+ beforeEach(function () {
+ this.DocumentManager.acceptChangesWithLock = sinon
+ .stub()
+ .callsArgWith(3, new Error('oops'))
+ this.HttpController.acceptChanges(this.req, this.res, this.next)
+ })
+
+ it('should call next with the error', function () {
+ this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true)
+ })
+ })
+ })
+
+ describe('deleteComment', function () {
+ beforeEach(function () {
+ this.req = {
+ params: {
+ project_id: this.project_id,
+ doc_id: this.doc_id,
+ comment_id: (this.comment_id = 'mock-comment-id'),
+ },
+ query: {},
+ body: {},
+ }
+ })
+
+ describe('successfully', function () {
+ beforeEach(function () {
+ this.DocumentManager.deleteCommentWithLock = sinon
+ .stub()
+ .callsArgWith(3)
+ this.HttpController.deleteComment(this.req, this.res, this.next)
+ })
+
+ it('should accept the change', function () {
+ this.DocumentManager.deleteCommentWithLock
+ .calledWith(this.project_id, this.doc_id, this.comment_id)
+ .should.equal(true)
+ })
+
+ it('should return a successful No Content response', function () {
+ this.res.sendStatus.calledWith(204).should.equal(true)
+ })
+
+ it('should log the request', function () {
+ this.logger.log
+ .calledWith(
+ {
+ projectId: this.project_id,
+ docId: this.doc_id,
+ commentId: this.comment_id,
+ },
+ 'deleting comment via http'
+ )
+ .should.equal(true)
+ })
+
+ it('should time the request', function () {
+ this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+ })
+
+ describe('when an errors occurs', function () {
+ beforeEach(function () {
+ this.DocumentManager.deleteCommentWithLock = sinon
+ .stub()
+ .callsArgWith(3, new Error('oops'))
+ this.HttpController.deleteComment(this.req, this.res, this.next)
+ })
+
+ it('should call next with the error', function () {
+ this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true)
+ })
+ })
+ })
+
+ describe('getProjectDocsAndFlushIfOld', function () {
+ beforeEach(function () {
+ this.state = '01234567890abcdef'
+ this.docs = [
+ { _id: '1234', lines: 'hello', v: 23 },
+ { _id: '4567', lines: 'world', v: 45 },
+ ]
+ this.req = {
+ params: {
+ project_id: this.project_id,
+ },
+ query: {
+ state: this.state,
+ },
+ body: {},
+ }
+ })
+
+ describe('successfully', function () {
+ beforeEach(function () {
+ this.ProjectManager.getProjectDocsAndFlushIfOld = sinon
+ .stub()
+ .callsArgWith(3, null, this.docs)
+ this.HttpController.getProjectDocsAndFlushIfOld(
+ this.req,
+ this.res,
+ this.next
+ )
+ })
+
+ it('should get docs from the project manager', function () {
+ this.ProjectManager.getProjectDocsAndFlushIfOld
+ .calledWith(this.project_id, this.state, {})
+ .should.equal(true)
+ })
+
+ it('should return a successful response', function () {
+ this.res.send.calledWith(this.docs).should.equal(true)
+ })
+
+ it('should log the request', function () {
+ this.logger.log
+ .calledWith(
+ { projectId: this.project_id, exclude: [] },
+ 'getting docs via http'
+ )
+ .should.equal(true)
+ })
+
+ it('should log the response', function () {
+ this.logger.log
+ .calledWith(
+ { projectId: this.project_id, result: ['1234:23', '4567:45'] },
+ 'got docs via http'
+ )
+ .should.equal(true)
+ })
+
+ it('should time the request', function () {
+ this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+ })
+
+ describe('when there is a conflict', function () {
+ beforeEach(function () {
+ this.ProjectManager.getProjectDocsAndFlushIfOld = sinon
+ .stub()
+ .callsArgWith(
+ 3,
+ new Errors.ProjectStateChangedError('project state changed')
+ )
+ this.HttpController.getProjectDocsAndFlushIfOld(
+ this.req,
+ this.res,
+ this.next
+ )
+ })
+
+ it('should return an HTTP 409 Conflict response', function () {
+ this.res.sendStatus.calledWith(409).should.equal(true)
+ })
+ })
+
+ describe('when an error occurs', function () {
+ beforeEach(function () {
+ this.ProjectManager.getProjectDocsAndFlushIfOld = sinon
+ .stub()
+ .callsArgWith(3, new Error('oops'))
+ this.HttpController.getProjectDocsAndFlushIfOld(
+ this.req,
+ this.res,
+ this.next
+ )
+ })
+
+ it('should call next with the error', function () {
+ this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true)
+ })
+ })
+ })
+
+ describe('updateProject', function () {
+ beforeEach(function () {
+ this.projectHistoryId = 'history-id-123'
+ this.userId = 'user-id-123'
+ this.updates = [
+ {
+ type: 'rename-doc',
+ id: 1,
+ pathname: 'thesis.tex',
+ newPathname: 'book.tex',
+ },
+ { type: 'add-doc', id: 2, pathname: 'article.tex', docLines: 'hello' },
+ {
+ type: 'rename-file',
+ id: 3,
+ pathname: 'apple.png',
+ newPathname: 'banana.png',
+ },
+ { type: 'add-file', id: 4, url: 'filestore.example.com/4' },
+ ]
+ this.version = 1234567
+ this.req = {
+ query: {},
+ body: {
+ projectHistoryId: this.projectHistoryId,
+ userId: this.userId,
+ updates: this.updates,
+ version: this.version,
+ },
+ params: {
+ project_id: this.project_id,
+ },
+ }
+ })
+
+ describe('successfully', function () {
+ beforeEach(function () {
+ this.ProjectManager.updateProjectWithLocks = sinon.stub().yields()
+ this.HttpController.updateProject(this.req, this.res, this.next)
+ })
+
+ it('should accept the change', function () {
+ this.ProjectManager.updateProjectWithLocks
+ .calledWith(
+ this.project_id,
+ this.projectHistoryId,
+ this.userId,
+ this.updates,
+ this.version
+ )
+ .should.equal(true)
+ })
+
+ it('should return a successful No Content response', function () {
+ this.res.sendStatus.calledWith(204).should.equal(true)
+ })
+
+ it('should time the request', function () {
+ this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+ })
+
+ describe('when an errors occurs', function () {
+ beforeEach(function () {
+ this.ProjectManager.updateProjectWithLocks = sinon
+ .stub()
+ .yields(new Error('oops'))
+ this.HttpController.updateProject(this.req, this.res, this.next)
+ })
+
+ it('should call next with the error', function () {
+ this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true)
+ })
+ })
+ })
+
+ describe('resyncProjectHistory', function () {
+ beforeEach(function () {
+ this.projectHistoryId = 'history-id-123'
+ this.docs = sinon.stub()
+ this.files = sinon.stub()
+ this.fileUpdates = sinon.stub()
+ this.req = {
+ query: {},
+ body: {
+ projectHistoryId: this.projectHistoryId,
+ docs: this.docs,
+ files: this.files,
+ },
+ params: {
+ project_id: this.project_id,
+ },
+ }
+ })
+
+ describe('successfully', function () {
+ beforeEach(function () {
+ this.HistoryManager.resyncProjectHistory = sinon.stub().callsArgWith(4)
+ this.HttpController.resyncProjectHistory(this.req, this.res, this.next)
+ })
+
+ it('should accept the change', function () {
+ this.HistoryManager.resyncProjectHistory
+ .calledWith(
+ this.project_id,
+ this.projectHistoryId,
+ this.docs,
+ this.files
+ )
+ .should.equal(true)
+ })
+
+ it('should return a successful No Content response', function () {
+ this.res.sendStatus.calledWith(204).should.equal(true)
+ })
+ })
+
+ describe('when an errors occurs', function () {
+ beforeEach(function () {
+ this.HistoryManager.resyncProjectHistory = sinon
+ .stub()
+ .callsArgWith(4, new Error('oops'))
+ this.HttpController.resyncProjectHistory(this.req, this.res, this.next)
+ })
+
+ it('should call next with the error', function () {
+ this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true)
+ })
+ })
+ })
+})
diff --git a/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js b/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js
new file mode 100644
index 0000000000..034974f805
--- /dev/null
+++ b/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js
@@ -0,0 +1,65 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS206: Consider reworking classes to avoid initClass
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const assert = require('assert')
+const path = require('path')
+const modulePath = path.join(__dirname, '../../../../app/js/LockManager.js')
+const project_id = 1234
+const doc_id = 5678
+const blockingKey = `Blocking:${doc_id}`
+const SandboxedModule = require('sandboxed-module')
+
+describe('LockManager - checking the lock', function () {
+ let Profiler
+ const existsStub = sinon.stub()
+
+ const mocks = {
+ '@overleaf/redis-wrapper': {
+ createClient() {
+ return {
+ auth() {},
+ exists: existsStub,
+ }
+ },
+ },
+ './Metrics': { inc() {} },
+ './Profiler': (Profiler = (function () {
+ Profiler = class Profiler {
+ static initClass() {
+ this.prototype.log = sinon.stub().returns({ end: sinon.stub() })
+ this.prototype.end = sinon.stub()
+ }
+ }
+ Profiler.initClass()
+ return Profiler
+ })()),
+ }
+ const LockManager = SandboxedModule.require(modulePath, { requires: mocks })
+
+ it('should return true if the key does not exists', function (done) {
+ existsStub.yields(null, '0')
+ return LockManager.checkLock(doc_id, (err, free) => {
+ free.should.equal(true)
+ return done()
+ })
+ })
+
+ return it('should return false if the key does exists', function (done) {
+ existsStub.yields(null, '1')
+ return LockManager.checkLock(doc_id, (err, free) => {
+ free.should.equal(false)
+ return done()
+ })
+ })
+})
diff --git a/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js b/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js
new file mode 100644
index 0000000000..47ad907a6b
--- /dev/null
+++ b/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js
@@ -0,0 +1,95 @@
+/* eslint-disable
+ camelcase,
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS206: Consider reworking classes to avoid initClass
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const assert = require('assert')
+const path = require('path')
+const modulePath = path.join(__dirname, '../../../../app/js/LockManager.js')
+const project_id = 1234
+const doc_id = 5678
+const SandboxedModule = require('sandboxed-module')
+
+describe('LockManager - releasing the lock', function () {
+ beforeEach(function () {
+ let Profiler
+ this.client = {
+ auth() {},
+ eval: sinon.stub(),
+ }
+ const mocks = {
+ '@overleaf/redis-wrapper': {
+ createClient: () => this.client,
+ },
+ '@overleaf/settings': {
+ redis: {
+ lock: {
+ key_schema: {
+ blockingKey({ doc_id }) {
+ return `Blocking:${doc_id}`
+ },
+ },
+ },
+ },
+ },
+ './Metrics': { inc() {} },
+ './Profiler': (Profiler = (function () {
+ Profiler = class Profiler {
+ static initClass() {
+ this.prototype.log = sinon.stub().returns({ end: sinon.stub() })
+ this.prototype.end = sinon.stub()
+ }
+ }
+ Profiler.initClass()
+ return Profiler
+ })()),
+ }
+ this.LockManager = SandboxedModule.require(modulePath, { requires: mocks })
+ this.lockValue = 'lock-value-stub'
+ return (this.callback = sinon.stub())
+ })
+
+ describe('when the lock is current', function () {
+ beforeEach(function () {
+ this.client.eval = sinon.stub().yields(null, 1)
+ return this.LockManager.releaseLock(doc_id, this.lockValue, this.callback)
+ })
+
+ it('should clear the data from redis', function () {
+ return this.client.eval
+ .calledWith(
+ this.LockManager.unlockScript,
+ 1,
+ `Blocking:${doc_id}`,
+ this.lockValue
+ )
+ .should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+
+ return describe('when the lock has expired', function () {
+ beforeEach(function () {
+ this.client.eval = sinon.stub().yields(null, 0)
+ return this.LockManager.releaseLock(doc_id, this.lockValue, this.callback)
+ })
+
+ return it('should return an error if the lock has expired', function () {
+ return this.callback
+ .calledWith(sinon.match.instanceOf(Error))
+ .should.equal(true)
+ })
+ })
+})
diff --git a/services/document-updater/test/unit/js/LockManager/getLockTests.js b/services/document-updater/test/unit/js/LockManager/getLockTests.js
new file mode 100644
index 0000000000..d1ba5cf728
--- /dev/null
+++ b/services/document-updater/test/unit/js/LockManager/getLockTests.js
@@ -0,0 +1,128 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS206: Consider reworking classes to avoid initClass
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const modulePath = '../../../../app/js/LockManager.js'
+const SandboxedModule = require('sandboxed-module')
+
+describe('LockManager - getting the lock', function () {
+ beforeEach(function () {
+ let Profiler
+ this.LockManager = SandboxedModule.require(modulePath, {
+ requires: {
+ '@overleaf/redis-wrapper': {
+ createClient: () => {
+ return { auth() {} }
+ },
+ },
+ './Metrics': { inc() {} },
+ './Profiler': (Profiler = (function () {
+ Profiler = class Profiler {
+ static initClass() {
+ this.prototype.log = sinon.stub().returns({ end: sinon.stub() })
+ this.prototype.end = sinon.stub()
+ }
+ }
+ Profiler.initClass()
+ return Profiler
+ })()),
+ },
+ })
+ this.callback = sinon.stub()
+ return (this.doc_id = 'doc-id-123')
+ })
+
+ describe('when the lock is not set', function () {
+ beforeEach(function (done) {
+ this.lockValue = 'mock-lock-value'
+ this.LockManager.tryLock = sinon
+ .stub()
+ .callsArgWith(1, null, true, this.lockValue)
+ return this.LockManager.getLock(this.doc_id, (...args) => {
+ this.callback(...Array.from(args || []))
+ return done()
+ })
+ })
+
+ it('should try to get the lock', function () {
+ return this.LockManager.tryLock.calledWith(this.doc_id).should.equal(true)
+ })
+
+ it('should only need to try once', function () {
+ return this.LockManager.tryLock.callCount.should.equal(1)
+ })
+
+ return it('should return the callback with the lock value', function () {
+ return this.callback.calledWith(null, this.lockValue).should.equal(true)
+ })
+ })
+
+ describe('when the lock is initially set', function () {
+ beforeEach(function (done) {
+ this.lockValue = 'mock-lock-value'
+ const startTime = Date.now()
+ let tries = 0
+ this.LockManager.LOCK_TEST_INTERVAL = 5
+ this.LockManager.tryLock = (doc_id, callback) => {
+ if (callback == null) {
+ callback = function (error, isFree) {}
+ }
+ if (Date.now() - startTime < 20 || tries < 2) {
+ tries = tries + 1
+ return callback(null, false)
+ } else {
+ return callback(null, true, this.lockValue)
+ }
+ }
+ sinon.spy(this.LockManager, 'tryLock')
+
+ return this.LockManager.getLock(this.doc_id, (...args) => {
+ this.callback(...Array.from(args || []))
+ return done()
+ })
+ })
+
+ it('should call tryLock multiple times until free', function () {
+ return (this.LockManager.tryLock.callCount > 1).should.equal(true)
+ })
+
+ return it('should return the callback with the lock value', function () {
+ return this.callback.calledWith(null, this.lockValue).should.equal(true)
+ })
+ })
+
+ return describe('when the lock times out', function () {
+ beforeEach(function (done) {
+ const time = Date.now()
+ this.LockManager.MAX_LOCK_WAIT_TIME = 5
+ this.LockManager.tryLock = sinon.stub().callsArgWith(1, null, false)
+ return this.LockManager.getLock(this.doc_id, (...args) => {
+ this.callback(...Array.from(args || []))
+ return done()
+ })
+ })
+
+ return it('should return the callback with an error', function () {
+ return this.callback
+ .calledWith(
+ sinon.match
+ .instanceOf(Error)
+ .and(sinon.match.has('doc_id', this.doc_id))
+ )
+ .should.equal(true)
+ })
+ })
+})
diff --git a/services/document-updater/test/unit/js/LockManager/tryLockTests.js b/services/document-updater/test/unit/js/LockManager/tryLockTests.js
new file mode 100644
index 0000000000..210307d51b
--- /dev/null
+++ b/services/document-updater/test/unit/js/LockManager/tryLockTests.js
@@ -0,0 +1,152 @@
+/* eslint-disable
+ camelcase,
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS206: Consider reworking classes to avoid initClass
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const modulePath = '../../../../app/js/LockManager.js'
+const SandboxedModule = require('sandboxed-module')
+
+describe('LockManager - trying the lock', function () {
+ beforeEach(function () {
+ let Profiler
+ this.LockManager = SandboxedModule.require(modulePath, {
+ requires: {
+ '@overleaf/redis-wrapper': {
+ createClient: () => {
+ return {
+ auth() {},
+ set: (this.set = sinon.stub()),
+ }
+ },
+ },
+ './Metrics': { inc() {} },
+ '@overleaf/settings': {
+ redis: {
+ lock: {
+ key_schema: {
+ blockingKey({ doc_id }) {
+ return `Blocking:${doc_id}`
+ },
+ },
+ },
+ },
+ },
+ './Profiler':
+ (this.Profiler = Profiler =
+ (function () {
+ Profiler = class Profiler {
+ static initClass() {
+ this.prototype.log = sinon
+ .stub()
+ .returns({ end: sinon.stub() })
+ this.prototype.end = sinon.stub()
+ }
+ }
+ Profiler.initClass()
+ return Profiler
+ })()),
+ },
+ })
+
+ this.callback = sinon.stub()
+ return (this.doc_id = 'doc-id-123')
+ })
+
+ describe('when the lock is not set', function () {
+ beforeEach(function () {
+ this.lockValue = 'mock-lock-value'
+ this.LockManager.randomLock = sinon.stub().returns(this.lockValue)
+ this.set.callsArgWith(5, null, 'OK')
+ return this.LockManager.tryLock(this.doc_id, this.callback)
+ })
+
+ it('should set the lock key with an expiry if it is not set', function () {
+ return this.set
+ .calledWith(`Blocking:${this.doc_id}`, this.lockValue, 'EX', 30, 'NX')
+ .should.equal(true)
+ })
+
+ return it('should return the callback with true and the lock value', function () {
+ return this.callback
+ .calledWith(null, true, this.lockValue)
+ .should.equal(true)
+ })
+ })
+
+ describe('when the lock is already set', function () {
+ beforeEach(function () {
+ this.set.callsArgWith(5, null, null)
+ return this.LockManager.tryLock(this.doc_id, this.callback)
+ })
+
+ return it('should return the callback with false', function () {
+ return this.callback.calledWith(null, false).should.equal(true)
+ })
+ })
+
+ return describe('when it takes a long time for redis to set the lock', function () {
+ beforeEach(function () {
+ this.Profiler.prototype.end = () => 7000 // take a long time
+ this.Profiler.prototype.log = sinon
+ .stub()
+ .returns({ end: this.Profiler.prototype.end })
+ this.lockValue = 'mock-lock-value'
+ this.LockManager.randomLock = sinon.stub().returns(this.lockValue)
+ this.LockManager.releaseLock = sinon.stub().callsArgWith(2, null)
+ return this.set.callsArgWith(5, null, 'OK')
+ })
+
+ describe('in all cases', function () {
+ beforeEach(function () {
+ return this.LockManager.tryLock(this.doc_id, this.callback)
+ })
+
+ it('should set the lock key with an expiry if it is not set', function () {
+ return this.set
+ .calledWith(`Blocking:${this.doc_id}`, this.lockValue, 'EX', 30, 'NX')
+ .should.equal(true)
+ })
+
+ return it('should try to release the lock', function () {
+ return this.LockManager.releaseLock
+ .calledWith(this.doc_id, this.lockValue)
+ .should.equal(true)
+ })
+ })
+
+ describe('if the lock is released successfully', function () {
+ beforeEach(function () {
+ this.LockManager.releaseLock = sinon.stub().callsArgWith(2, null)
+ return this.LockManager.tryLock(this.doc_id, this.callback)
+ })
+
+ return it('should return the callback with false', function () {
+ return this.callback.calledWith(null, false).should.equal(true)
+ })
+ })
+
+ return describe('if the lock has already timed out', function () {
+ beforeEach(function () {
+ this.LockManager.releaseLock = sinon
+ .stub()
+ .callsArgWith(2, new Error('tried to release timed out lock'))
+ return this.LockManager.tryLock(this.doc_id, this.callback)
+ })
+
+ return it('should return the callback with an error', function () {
+ return this.callback
+ .calledWith(sinon.match.instanceOf(Error))
+ .should.equal(true)
+ })
+ })
+ })
+})
diff --git a/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js b/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js
new file mode 100644
index 0000000000..cc669481f7
--- /dev/null
+++ b/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js
@@ -0,0 +1,443 @@
+/* eslint-disable
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS206: Consider reworking classes to avoid initClass
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const modulePath = '../../../../app/js/PersistenceManager.js'
+const SandboxedModule = require('sandboxed-module')
+const Errors = require('../../../../app/js/Errors')
+
+describe('PersistenceManager', function () {
+ beforeEach(function () {
+ let Timer
+ this.request = sinon.stub()
+ this.request.defaults = () => this.request
+ this.PersistenceManager = SandboxedModule.require(modulePath, {
+ requires: {
+ requestretry: this.request,
+ '@overleaf/settings': (this.Settings = {}),
+ './Metrics': (this.Metrics = {
+ Timer: (Timer = (function () {
+ Timer = class Timer {
+ static initClass() {
+ this.prototype.done = sinon.stub()
+ }
+ }
+ Timer.initClass()
+ return Timer
+ })()),
+ inc: sinon.stub(),
+ }),
+ './Errors': Errors,
+ },
+ })
+ this.project_id = 'project-id-123'
+ this.projectHistoryId = 'history-id-123'
+ this.doc_id = 'doc-id-123'
+ this.lines = ['one', 'two', 'three']
+ this.version = 42
+ this.callback = sinon.stub()
+ this.ranges = { comments: 'mock', entries: 'mock' }
+ this.pathname = '/a/b/c.tex'
+ this.lastUpdatedAt = Date.now()
+ this.lastUpdatedBy = 'last-author-id'
+ return (this.Settings.apis = {
+ web: {
+ url: (this.url = 'www.example.com'),
+ user: (this.user = 'sharelatex'),
+ pass: (this.pass = 'password'),
+ },
+ })
+ })
+
+ describe('getDoc', function () {
+ beforeEach(function () {
+ return (this.webResponse = {
+ lines: this.lines,
+ version: this.version,
+ ranges: this.ranges,
+ pathname: this.pathname,
+ projectHistoryId: this.projectHistoryId,
+ })
+ })
+
+ describe('with a successful response from the web api', function () {
+ beforeEach(function () {
+ this.request.callsArgWith(
+ 1,
+ null,
+ { statusCode: 200 },
+ JSON.stringify(this.webResponse)
+ )
+ return this.PersistenceManager.getDoc(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('should call the web api', function () {
+ return this.request
+ .calledWith({
+ url: `${this.url}/project/${this.project_id}/doc/${this.doc_id}`,
+ method: 'GET',
+ headers: {
+ accept: 'application/json',
+ },
+ auth: {
+ user: this.user,
+ pass: this.pass,
+ sendImmediately: true,
+ },
+ jar: false,
+ timeout: 5000,
+ })
+ .should.equal(true)
+ })
+
+ it('should call the callback with the doc lines, version and ranges', function () {
+ return this.callback
+ .calledWith(
+ null,
+ this.lines,
+ this.version,
+ this.ranges,
+ this.pathname,
+ this.projectHistoryId
+ )
+ .should.equal(true)
+ })
+
+ it('should time the execution', function () {
+ return this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+
+ return it('should increment the metric', function () {
+ return this.Metrics.inc
+ .calledWith('getDoc', 1, { status: 200 })
+ .should.equal(true)
+ })
+ })
+
+ describe('when request returns an error', function () {
+ beforeEach(function () {
+ this.error = new Error('oops')
+ this.error.code = 'EOOPS'
+ this.request.callsArgWith(1, this.error, null, null)
+ return this.PersistenceManager.getDoc(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('should return a generic connection error', function () {
+ return this.callback
+ .calledWith(
+ sinon.match
+ .instanceOf(Error)
+ .and(sinon.match.has('message', 'error connecting to web API'))
+ )
+ .should.equal(true)
+ })
+
+ it('should time the execution', function () {
+ return this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+
+ return it('should increment the metric', function () {
+ return this.Metrics.inc
+ .calledWith('getDoc', 1, { status: 'EOOPS' })
+ .should.equal(true)
+ })
+ })
+
+ describe('when the request returns 404', function () {
+ beforeEach(function () {
+ this.request.callsArgWith(1, null, { statusCode: 404 }, '')
+ return this.PersistenceManager.getDoc(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('should return a NotFoundError', function () {
+ return this.callback
+ .calledWith(sinon.match.instanceOf(Errors.NotFoundError))
+ .should.equal(true)
+ })
+
+ it('should time the execution', function () {
+ return this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+
+ return it('should increment the metric', function () {
+ return this.Metrics.inc
+ .calledWith('getDoc', 1, { status: 404 })
+ .should.equal(true)
+ })
+ })
+
+ describe('when the request returns an error status code', function () {
+ beforeEach(function () {
+ this.request.callsArgWith(1, null, { statusCode: 500 }, '')
+ return this.PersistenceManager.getDoc(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('should return an error', function () {
+ return this.callback
+ .calledWith(sinon.match.instanceOf(Error))
+ .should.equal(true)
+ })
+
+ it('should time the execution', function () {
+ return this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+
+ return it('should increment the metric', function () {
+ return this.Metrics.inc
+ .calledWith('getDoc', 1, { status: 500 })
+ .should.equal(true)
+ })
+ })
+
+ describe('when request returns an doc without lines', function () {
+ beforeEach(function () {
+ delete this.webResponse.lines
+ this.request.callsArgWith(
+ 1,
+ null,
+ { statusCode: 200 },
+ JSON.stringify(this.webResponse)
+ )
+ return this.PersistenceManager.getDoc(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ return it('should return and error', function () {
+ return this.callback
+ .calledWith(sinon.match.instanceOf(Error))
+ .should.equal(true)
+ })
+ })
+
+ describe('when request returns an doc without a version', function () {
+ beforeEach(function () {
+ delete this.webResponse.version
+ this.request.callsArgWith(
+ 1,
+ null,
+ { statusCode: 200 },
+ JSON.stringify(this.webResponse)
+ )
+ return this.PersistenceManager.getDoc(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ return it('should return and error', function () {
+ return this.callback
+ .calledWith(sinon.match.instanceOf(Error))
+ .should.equal(true)
+ })
+ })
+
+ return describe('when request returns an doc without a pathname', function () {
+ beforeEach(function () {
+ delete this.webResponse.pathname
+ this.request.callsArgWith(
+ 1,
+ null,
+ { statusCode: 200 },
+ JSON.stringify(this.webResponse)
+ )
+ return this.PersistenceManager.getDoc(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ return it('should return and error', function () {
+ return this.callback
+ .calledWith(sinon.match.instanceOf(Error))
+ .should.equal(true)
+ })
+ })
+ })
+
+ return describe('setDoc', function () {
+ describe('with a successful response from the web api', function () {
+ beforeEach(function () {
+ this.request.callsArgWith(1, null, { statusCode: 200 })
+ return this.PersistenceManager.setDoc(
+ this.project_id,
+ this.doc_id,
+ this.lines,
+ this.version,
+ this.ranges,
+ this.lastUpdatedAt,
+ this.lastUpdatedBy,
+ this.callback
+ )
+ })
+
+ it('should call the web api', function () {
+ return this.request
+ .calledWith({
+ url: `${this.url}/project/${this.project_id}/doc/${this.doc_id}`,
+ json: {
+ lines: this.lines,
+ version: this.version,
+ ranges: this.ranges,
+ lastUpdatedAt: this.lastUpdatedAt,
+ lastUpdatedBy: this.lastUpdatedBy,
+ },
+ method: 'POST',
+ auth: {
+ user: this.user,
+ pass: this.pass,
+ sendImmediately: true,
+ },
+ jar: false,
+ timeout: 5000,
+ })
+ .should.equal(true)
+ })
+
+ it('should call the callback without error', function () {
+ return this.callback.calledWith(null).should.equal(true)
+ })
+
+ it('should time the execution', function () {
+ return this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+
+ return it('should increment the metric', function () {
+ return this.Metrics.inc
+ .calledWith('setDoc', 1, { status: 200 })
+ .should.equal(true)
+ })
+ })
+
+ describe('when request returns an error', function () {
+ beforeEach(function () {
+ this.error = new Error('oops')
+ this.error.code = 'EOOPS'
+ this.request.callsArgWith(1, this.error, null, null)
+ return this.PersistenceManager.setDoc(
+ this.project_id,
+ this.doc_id,
+ this.lines,
+ this.version,
+ this.ranges,
+ this.lastUpdatedAt,
+ this.lastUpdatedBy,
+ this.callback
+ )
+ })
+
+ it('should return a generic connection error', function () {
+ return this.callback
+ .calledWith(
+ sinon.match
+ .instanceOf(Error)
+ .and(sinon.match.has('message', 'error connecting to web API'))
+ )
+ .should.equal(true)
+ })
+
+ it('should time the execution', function () {
+ return this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+
+ return it('should increment the metric', function () {
+ return this.Metrics.inc
+ .calledWith('setDoc', 1, { status: 'EOOPS' })
+ .should.equal(true)
+ })
+ })
+
+ describe('when the request returns 404', function () {
+ beforeEach(function () {
+ this.request.callsArgWith(1, null, { statusCode: 404 }, '')
+ return this.PersistenceManager.setDoc(
+ this.project_id,
+ this.doc_id,
+ this.lines,
+ this.version,
+ this.ranges,
+ this.lastUpdatedAt,
+ this.lastUpdatedBy,
+ this.callback
+ )
+ })
+
+ it('should return a NotFoundError', function () {
+ return this.callback
+ .calledWith(sinon.match.instanceOf(Errors.NotFoundError))
+ .should.equal(true)
+ })
+
+ it('should time the execution', function () {
+ return this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+
+ return it('should increment the metric', function () {
+ return this.Metrics.inc
+ .calledWith('setDoc', 1, { status: 404 })
+ .should.equal(true)
+ })
+ })
+
+ return describe('when the request returns an error status code', function () {
+ beforeEach(function () {
+ this.request.callsArgWith(1, null, { statusCode: 500 }, '')
+ return this.PersistenceManager.setDoc(
+ this.project_id,
+ this.doc_id,
+ this.lines,
+ this.version,
+ this.ranges,
+ this.lastUpdatedAt,
+ this.lastUpdatedBy,
+ this.callback
+ )
+ })
+
+ it('should return an error', function () {
+ return this.callback
+ .calledWith(sinon.match.instanceOf(Error))
+ .should.equal(true)
+ })
+
+ it('should time the execution', function () {
+ return this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+
+ return it('should increment the metric', function () {
+ return this.Metrics.inc
+ .calledWith('setDoc', 1, { status: 500 })
+ .should.equal(true)
+ })
+ })
+ })
+})
diff --git a/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js b/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js
new file mode 100644
index 0000000000..941d6b2008
--- /dev/null
+++ b/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js
@@ -0,0 +1,193 @@
+/* eslint-disable
+ camelcase,
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const modulePath = '../../../../app/js/ProjectHistoryRedisManager.js'
+const SandboxedModule = require('sandboxed-module')
+const tk = require('timekeeper')
+
+describe('ProjectHistoryRedisManager', function () {
+ beforeEach(function () {
+ this.project_id = 'project-id-123'
+ this.projectHistoryId = 'history-id-123'
+ this.user_id = 'user-id-123'
+ this.callback = sinon.stub()
+ this.rclient = {}
+ tk.freeze(new Date())
+ return (this.ProjectHistoryRedisManager = SandboxedModule.require(
+ modulePath,
+ {
+ requires: {
+ '@overleaf/settings': (this.settings = {
+ redis: {
+ project_history: {
+ key_schema: {
+ projectHistoryOps({ project_id }) {
+ return `ProjectHistory:Ops:${project_id}`
+ },
+ projectHistoryFirstOpTimestamp({ project_id }) {
+ return `ProjectHistory:FirstOpTimestamp:${project_id}`
+ },
+ },
+ },
+ },
+ }),
+ '@overleaf/redis-wrapper': {
+ createClient: () => this.rclient,
+ },
+ './Metrics': (this.metrics = { summary: sinon.stub() }),
+ },
+ }
+ ))
+ })
+
+ afterEach(function () {
+ return tk.reset()
+ })
+
+ describe('queueOps', function () {
+ beforeEach(function () {
+ this.ops = ['mock-op-1', 'mock-op-2']
+ this.multi = { exec: sinon.stub() }
+ this.multi.rpush = sinon.stub()
+ this.multi.setnx = sinon.stub()
+ this.rclient.multi = () => this.multi
+ // @rclient = multi: () => @multi
+ return this.ProjectHistoryRedisManager.queueOps(
+ this.project_id,
+ ...Array.from(this.ops),
+ this.callback
+ )
+ })
+
+ it('should queue an update', function () {
+ return this.multi.rpush
+ .calledWithExactly(
+ `ProjectHistory:Ops:${this.project_id}`,
+ this.ops[0],
+ this.ops[1]
+ )
+ .should.equal(true)
+ })
+
+ return it('should set the queue timestamp if not present', function () {
+ return this.multi.setnx
+ .calledWithExactly(
+ `ProjectHistory:FirstOpTimestamp:${this.project_id}`,
+ Date.now()
+ )
+ .should.equal(true)
+ })
+ })
+
+ describe('queueRenameEntity', function () {
+ beforeEach(function () {
+ this.file_id = 1234
+
+ this.rawUpdate = {
+ pathname: (this.pathname = '/old'),
+ newPathname: (this.newPathname = '/new'),
+ version: (this.version = 2),
+ }
+
+ this.ProjectHistoryRedisManager.queueOps = sinon.stub()
+ return this.ProjectHistoryRedisManager.queueRenameEntity(
+ this.project_id,
+ this.projectHistoryId,
+ 'file',
+ this.file_id,
+ this.user_id,
+ this.rawUpdate,
+ this.callback
+ )
+ })
+
+ return it('should queue an update', function () {
+ const update = {
+ pathname: this.pathname,
+ new_pathname: this.newPathname,
+ meta: {
+ user_id: this.user_id,
+ ts: new Date(),
+ },
+ version: this.version,
+ projectHistoryId: this.projectHistoryId,
+ file: this.file_id,
+ }
+
+ return this.ProjectHistoryRedisManager.queueOps
+ .calledWithExactly(
+ this.project_id,
+ JSON.stringify(update),
+ this.callback
+ )
+ .should.equal(true)
+ })
+ })
+
+ return describe('queueAddEntity', function () {
+ beforeEach(function () {
+ this.rclient.rpush = sinon.stub().yields()
+ this.doc_id = 1234
+
+ this.rawUpdate = {
+ pathname: (this.pathname = '/old'),
+ docLines: (this.docLines = 'a\nb'),
+ version: (this.version = 2),
+ url: (this.url = 'filestore.example.com'),
+ }
+
+ this.ProjectHistoryRedisManager.queueOps = sinon.stub()
+ return this.ProjectHistoryRedisManager.queueAddEntity(
+ this.project_id,
+ this.projectHistoryId,
+ 'doc',
+ this.doc_id,
+ this.user_id,
+ this.rawUpdate,
+ this.callback
+ )
+ })
+
+ it('should queue an update', function () {
+ const update = {
+ pathname: this.pathname,
+ docLines: this.docLines,
+ url: this.url,
+ meta: {
+ user_id: this.user_id,
+ ts: new Date(),
+ },
+ version: this.version,
+ projectHistoryId: this.projectHistoryId,
+ doc: this.doc_id,
+ }
+
+ return this.ProjectHistoryRedisManager.queueOps
+ .calledWithExactly(
+ this.project_id,
+ JSON.stringify(update),
+ this.callback
+ )
+ .should.equal(true)
+ })
+
+ describe('queueResyncProjectStructure', function () {
+ return it('should queue an update', function () {})
+ })
+
+ return describe('queueResyncDocContent', function () {
+ return it('should queue an update', function () {})
+ })
+ })
+})
diff --git a/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js
new file mode 100644
index 0000000000..da4013a534
--- /dev/null
+++ b/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js
@@ -0,0 +1,153 @@
+/* eslint-disable
+ camelcase,
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS206: Consider reworking classes to avoid initClass
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const modulePath = '../../../../app/js/ProjectManager.js'
+const SandboxedModule = require('sandboxed-module')
+
+describe('ProjectManager - flushAndDeleteProject', function () {
+ beforeEach(function () {
+ let Timer
+ this.ProjectManager = SandboxedModule.require(modulePath, {
+ requires: {
+ './RedisManager': (this.RedisManager = {}),
+ './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}),
+ './DocumentManager': (this.DocumentManager = {}),
+ './HistoryManager': (this.HistoryManager = {
+ flushProjectChanges: sinon.stub().callsArg(2),
+ }),
+ './Metrics': (this.Metrics = {
+ Timer: (Timer = (function () {
+ Timer = class Timer {
+ static initClass() {
+ this.prototype.done = sinon.stub()
+ }
+ }
+ Timer.initClass()
+ return Timer
+ })()),
+ }),
+ },
+ })
+ this.project_id = 'project-id-123'
+ return (this.callback = sinon.stub())
+ })
+
+ describe('successfully', function () {
+ beforeEach(function (done) {
+ this.doc_ids = ['doc-id-1', 'doc-id-2', 'doc-id-3']
+ this.RedisManager.getDocIdsInProject = sinon
+ .stub()
+ .callsArgWith(1, null, this.doc_ids)
+ this.DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArg(3)
+ return this.ProjectManager.flushAndDeleteProjectWithLocks(
+ this.project_id,
+ {},
+ error => {
+ this.callback(error)
+ return done()
+ }
+ )
+ })
+
+ it('should get the doc ids in the project', function () {
+ return this.RedisManager.getDocIdsInProject
+ .calledWith(this.project_id)
+ .should.equal(true)
+ })
+
+ it('should delete each doc in the project', function () {
+ return Array.from(this.doc_ids).map(doc_id =>
+ this.DocumentManager.flushAndDeleteDocWithLock
+ .calledWith(this.project_id, doc_id, {})
+ .should.equal(true)
+ )
+ })
+
+ it('should flush project history', function () {
+ return this.HistoryManager.flushProjectChanges
+ .calledWith(this.project_id, {})
+ .should.equal(true)
+ })
+
+ it('should call the callback without error', function () {
+ return this.callback.calledWith(null).should.equal(true)
+ })
+
+ return it('should time the execution', function () {
+ return this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+ })
+
+ return describe('when a doc errors', function () {
+ beforeEach(function (done) {
+ this.doc_ids = ['doc-id-1', 'doc-id-2', 'doc-id-3']
+ this.RedisManager.getDocIdsInProject = sinon
+ .stub()
+ .callsArgWith(1, null, this.doc_ids)
+ this.DocumentManager.flushAndDeleteDocWithLock = sinon.spy(
+ (project_id, doc_id, options, callback) => {
+ if (doc_id === 'doc-id-1') {
+ return callback(
+ (this.error = new Error('oops, something went wrong'))
+ )
+ } else {
+ return callback()
+ }
+ }
+ )
+ return this.ProjectManager.flushAndDeleteProjectWithLocks(
+ this.project_id,
+ {},
+ error => {
+ this.callback(error)
+ return done()
+ }
+ )
+ })
+
+ it('should still flush each doc in the project', function () {
+ return Array.from(this.doc_ids).map(doc_id =>
+ this.DocumentManager.flushAndDeleteDocWithLock
+ .calledWith(this.project_id, doc_id, {})
+ .should.equal(true)
+ )
+ })
+
+ it('should still flush project history', function () {
+ return this.HistoryManager.flushProjectChanges
+ .calledWith(this.project_id, {})
+ .should.equal(true)
+ })
+
+ it('should record the error', function () {
+ return this.logger.error
+ .calledWith(
+ { err: this.error, projectId: this.project_id, docId: 'doc-id-1' },
+ 'error deleting doc'
+ )
+ .should.equal(true)
+ })
+
+ it('should call the callback with an error', function () {
+ return this.callback
+ .calledWith(sinon.match.instanceOf(Error))
+ .should.equal(true)
+ })
+
+ return it('should time the execution', function () {
+ return this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+ })
+})
diff --git a/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js
new file mode 100644
index 0000000000..d607840494
--- /dev/null
+++ b/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js
@@ -0,0 +1,142 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS206: Consider reworking classes to avoid initClass
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const modulePath = '../../../../app/js/ProjectManager.js'
+const SandboxedModule = require('sandboxed-module')
+
+describe('ProjectManager - flushProject', function () {
+ beforeEach(function () {
+ let Timer
+ this.ProjectManager = SandboxedModule.require(modulePath, {
+ requires: {
+ './RedisManager': (this.RedisManager = {}),
+ './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}),
+ './DocumentManager': (this.DocumentManager = {}),
+ './HistoryManager': (this.HistoryManager = {}),
+ './Metrics': (this.Metrics = {
+ Timer: (Timer = (function () {
+ Timer = class Timer {
+ static initClass() {
+ this.prototype.done = sinon.stub()
+ }
+ }
+ Timer.initClass()
+ return Timer
+ })()),
+ }),
+ },
+ })
+ this.project_id = 'project-id-123'
+ return (this.callback = sinon.stub())
+ })
+
+ describe('successfully', function () {
+ beforeEach(function (done) {
+ this.doc_ids = ['doc-id-1', 'doc-id-2', 'doc-id-3']
+ this.RedisManager.getDocIdsInProject = sinon
+ .stub()
+ .callsArgWith(1, null, this.doc_ids)
+ this.DocumentManager.flushDocIfLoadedWithLock = sinon.stub().callsArg(2)
+ return this.ProjectManager.flushProjectWithLocks(
+ this.project_id,
+ error => {
+ this.callback(error)
+ return done()
+ }
+ )
+ })
+
+ it('should get the doc ids in the project', function () {
+ return this.RedisManager.getDocIdsInProject
+ .calledWith(this.project_id)
+ .should.equal(true)
+ })
+
+ it('should flush each doc in the project', function () {
+ return Array.from(this.doc_ids).map(doc_id =>
+ this.DocumentManager.flushDocIfLoadedWithLock
+ .calledWith(this.project_id, doc_id)
+ .should.equal(true)
+ )
+ })
+
+ it('should call the callback without error', function () {
+ return this.callback.calledWith(null).should.equal(true)
+ })
+
+ return it('should time the execution', function () {
+ return this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+ })
+
+ return describe('when a doc errors', function () {
+ beforeEach(function (done) {
+ this.doc_ids = ['doc-id-1', 'doc-id-2', 'doc-id-3']
+ this.RedisManager.getDocIdsInProject = sinon
+ .stub()
+ .callsArgWith(1, null, this.doc_ids)
+ this.DocumentManager.flushDocIfLoadedWithLock = sinon.spy(
+ (project_id, doc_id, callback) => {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ if (doc_id === 'doc-id-1') {
+ return callback(
+ (this.error = new Error('oops, something went wrong'))
+ )
+ } else {
+ return callback()
+ }
+ }
+ )
+ return this.ProjectManager.flushProjectWithLocks(
+ this.project_id,
+ error => {
+ this.callback(error)
+ return done()
+ }
+ )
+ })
+
+ it('should still flush each doc in the project', function () {
+ return Array.from(this.doc_ids).map(doc_id =>
+ this.DocumentManager.flushDocIfLoadedWithLock
+ .calledWith(this.project_id, doc_id)
+ .should.equal(true)
+ )
+ })
+
+ it('should record the error', function () {
+ return this.logger.error
+ .calledWith(
+ { err: this.error, projectId: this.project_id, docId: 'doc-id-1' },
+ 'error flushing doc'
+ )
+ .should.equal(true)
+ })
+
+ it('should call the callback with an error', function () {
+ return this.callback
+ .calledWith(sinon.match.instanceOf(Error))
+ .should.equal(true)
+ })
+
+ return it('should time the execution', function () {
+ return this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+ })
+})
diff --git a/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js b/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js
new file mode 100644
index 0000000000..7bd2c27aa2
--- /dev/null
+++ b/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js
@@ -0,0 +1,219 @@
+/* eslint-disable
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS206: Consider reworking classes to avoid initClass
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const modulePath = '../../../../app/js/ProjectManager.js'
+const SandboxedModule = require('sandboxed-module')
+const Errors = require('../../../../app/js/Errors.js')
+
+describe('ProjectManager - getProjectDocsAndFlushIfOld', function () {
+ beforeEach(function () {
+ let Timer
+ this.ProjectManager = SandboxedModule.require(modulePath, {
+ requires: {
+ './RedisManager': (this.RedisManager = {}),
+ './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}),
+ './DocumentManager': (this.DocumentManager = {}),
+ './HistoryManager': (this.HistoryManager = {}),
+ './Metrics': (this.Metrics = {
+ Timer: (Timer = (function () {
+ Timer = class Timer {
+ static initClass() {
+ this.prototype.done = sinon.stub()
+ }
+ }
+ Timer.initClass()
+ return Timer
+ })()),
+ }),
+ './Errors': Errors,
+ },
+ })
+ this.project_id = 'project-id-123'
+ this.callback = sinon.stub()
+ return (this.doc_versions = [111, 222, 333])
+ })
+
+ describe('successfully', function () {
+ beforeEach(function (done) {
+ this.doc_ids = ['doc-id-1', 'doc-id-2', 'doc-id-3']
+ this.doc_lines = [
+ ['aaa', 'aaa'],
+ ['bbb', 'bbb'],
+ ['ccc', 'ccc'],
+ ]
+ this.docs = [
+ {
+ _id: this.doc_ids[0],
+ lines: this.doc_lines[0],
+ v: this.doc_versions[0],
+ },
+ {
+ _id: this.doc_ids[1],
+ lines: this.doc_lines[1],
+ v: this.doc_versions[1],
+ },
+ {
+ _id: this.doc_ids[2],
+ lines: this.doc_lines[2],
+ v: this.doc_versions[2],
+ },
+ ]
+ this.RedisManager.checkOrSetProjectState = sinon
+ .stub()
+ .callsArgWith(2, null)
+ this.RedisManager.getDocIdsInProject = sinon
+ .stub()
+ .callsArgWith(1, null, this.doc_ids)
+ this.DocumentManager.getDocAndFlushIfOldWithLock = sinon.stub()
+ this.DocumentManager.getDocAndFlushIfOldWithLock
+ .withArgs(this.project_id, this.doc_ids[0])
+ .callsArgWith(2, null, this.doc_lines[0], this.doc_versions[0])
+ this.DocumentManager.getDocAndFlushIfOldWithLock
+ .withArgs(this.project_id, this.doc_ids[1])
+ .callsArgWith(2, null, this.doc_lines[1], this.doc_versions[1])
+ this.DocumentManager.getDocAndFlushIfOldWithLock
+ .withArgs(this.project_id, this.doc_ids[2])
+ .callsArgWith(2, null, this.doc_lines[2], this.doc_versions[2])
+ return this.ProjectManager.getProjectDocsAndFlushIfOld(
+ this.project_id,
+ this.projectStateHash,
+ this.excludeVersions,
+ (error, docs) => {
+ this.callback(error, docs)
+ return done()
+ }
+ )
+ })
+
+ it('should check the project state', function () {
+ return this.RedisManager.checkOrSetProjectState
+ .calledWith(this.project_id, this.projectStateHash)
+ .should.equal(true)
+ })
+
+ it('should get the doc ids in the project', function () {
+ return this.RedisManager.getDocIdsInProject
+ .calledWith(this.project_id)
+ .should.equal(true)
+ })
+
+ it('should call the callback without error', function () {
+ return this.callback.calledWith(null, this.docs).should.equal(true)
+ })
+
+ return it('should time the execution', function () {
+ return this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+ })
+
+ describe('when the state does not match', function () {
+ beforeEach(function (done) {
+ this.doc_ids = ['doc-id-1', 'doc-id-2', 'doc-id-3']
+ this.RedisManager.checkOrSetProjectState = sinon
+ .stub()
+ .callsArgWith(2, null, true)
+ return this.ProjectManager.getProjectDocsAndFlushIfOld(
+ this.project_id,
+ this.projectStateHash,
+ this.excludeVersions,
+ (error, docs) => {
+ this.callback(error, docs)
+ return done()
+ }
+ )
+ })
+
+ it('should check the project state', function () {
+ return this.RedisManager.checkOrSetProjectState
+ .calledWith(this.project_id, this.projectStateHash)
+ .should.equal(true)
+ })
+
+ it('should call the callback with an error', function () {
+ return this.callback
+ .calledWith(sinon.match.instanceOf(Errors.ProjectStateChangedError))
+ .should.equal(true)
+ })
+
+ return it('should time the execution', function () {
+ return this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+ })
+
+ describe('when a doc errors', function () {
+ beforeEach(function (done) {
+ this.doc_ids = ['doc-id-1', 'doc-id-2', 'doc-id-3']
+ this.RedisManager.checkOrSetProjectState = sinon
+ .stub()
+ .callsArgWith(2, null)
+ this.RedisManager.getDocIdsInProject = sinon
+ .stub()
+ .callsArgWith(1, null, this.doc_ids)
+ this.DocumentManager.getDocAndFlushIfOldWithLock = sinon.stub()
+ this.DocumentManager.getDocAndFlushIfOldWithLock
+ .withArgs(this.project_id, 'doc-id-1')
+ .callsArgWith(2, null, ['test doc content'], this.doc_versions[1])
+ this.DocumentManager.getDocAndFlushIfOldWithLock
+ .withArgs(this.project_id, 'doc-id-2')
+ .callsArgWith(2, (this.error = new Error('oops'))) // trigger an error
+ return this.ProjectManager.getProjectDocsAndFlushIfOld(
+ this.project_id,
+ this.projectStateHash,
+ this.excludeVersions,
+ (error, docs) => {
+ this.callback(error)
+ return done()
+ }
+ )
+ })
+
+ it('should record the error', function () {
+ return this.logger.error
+ .calledWith(
+ { err: this.error, projectId: this.project_id, docId: 'doc-id-2' },
+ 'error getting project doc lines in getProjectDocsAndFlushIfOld'
+ )
+ .should.equal(true)
+ })
+
+ it('should call the callback with an error', function () {
+ return this.callback
+ .calledWith(sinon.match.instanceOf(Error))
+ .should.equal(true)
+ })
+
+ return it('should time the execution', function () {
+ return this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+ })
+
+ return describe('clearing the project state with clearProjectState', function () {
+ beforeEach(function (done) {
+ this.RedisManager.clearProjectState = sinon.stub().callsArg(1)
+ return this.ProjectManager.clearProjectState(this.project_id, error => {
+ this.callback(error)
+ return done()
+ })
+ })
+
+ it('should clear the project state', function () {
+ return this.RedisManager.clearProjectState
+ .calledWith(this.project_id)
+ .should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+})
diff --git a/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js
new file mode 100644
index 0000000000..ffc1257fe0
--- /dev/null
+++ b/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js
@@ -0,0 +1,397 @@
+const sinon = require('sinon')
+const modulePath = '../../../../app/js/ProjectManager.js'
+const SandboxedModule = require('sandboxed-module')
+const _ = require('lodash')
+
+describe('ProjectManager', function () {
+ beforeEach(function () {
+ this.RedisManager = {}
+ this.ProjectHistoryRedisManager = {
+ queueRenameEntity: sinon.stub().yields(),
+ queueAddEntity: sinon.stub().yields(),
+ }
+ this.DocumentManager = {
+ renameDocWithLock: sinon.stub().yields(),
+ }
+ this.HistoryManager = {
+ flushProjectChangesAsync: sinon.stub(),
+ shouldFlushHistoryOps: sinon.stub().returns(false),
+ }
+ this.Metrics = {
+ Timer: class Timer {},
+ }
+ this.Metrics.Timer.prototype.done = sinon.stub()
+
+ this.ProjectManager = SandboxedModule.require(modulePath, {
+ requires: {
+ './RedisManager': this.RedisManager,
+ './ProjectHistoryRedisManager': this.ProjectHistoryRedisManager,
+ './DocumentManager': this.DocumentManager,
+ './HistoryManager': this.HistoryManager,
+ './Metrics': this.Metrics,
+ },
+ })
+
+ this.project_id = 'project-id-123'
+ this.projectHistoryId = 'history-id-123'
+ this.user_id = 'user-id-123'
+ this.version = 1234567
+ this.callback = sinon.stub()
+ })
+
+ describe('updateProjectWithLocks', function () {
+ describe('rename operations', function () {
+ beforeEach(function () {
+ this.firstDocUpdate = {
+ type: 'rename-doc',
+ id: 1,
+ pathname: 'foo',
+ newPathname: 'foo',
+ }
+ this.secondDocUpdate = {
+ type: 'rename-doc',
+ id: 2,
+ pathname: 'bar',
+ newPathname: 'bar2',
+ }
+ this.firstFileUpdate = {
+ type: 'rename-file',
+ id: 2,
+ pathname: 'bar',
+ newPathname: 'bar2',
+ }
+ this.updates = [
+ this.firstDocUpdate,
+ this.secondDocUpdate,
+ this.firstFileUpdate,
+ ]
+ })
+
+ describe('successfully', function () {
+ beforeEach(function () {
+ this.ProjectManager.updateProjectWithLocks(
+ this.project_id,
+ this.projectHistoryId,
+ this.user_id,
+ this.updates,
+ this.version,
+ this.callback
+ )
+ })
+
+ it('should rename the docs in the updates', function () {
+ const firstDocUpdateWithVersion = _.extend({}, this.firstDocUpdate, {
+ version: `${this.version}.0`,
+ })
+ const secondDocUpdateWithVersion = _.extend(
+ {},
+ this.secondDocUpdate,
+ { version: `${this.version}.1` }
+ )
+ this.DocumentManager.renameDocWithLock
+ .calledWith(
+ this.project_id,
+ this.firstDocUpdate.id,
+ this.user_id,
+ firstDocUpdateWithVersion,
+ this.projectHistoryId
+ )
+ .should.equal(true)
+ this.DocumentManager.renameDocWithLock
+ .calledWith(
+ this.project_id,
+ this.secondDocUpdate.id,
+ this.user_id,
+ secondDocUpdateWithVersion,
+ this.projectHistoryId
+ )
+ .should.equal(true)
+ })
+
+ it('should rename the files in the updates', function () {
+ const firstFileUpdateWithVersion = _.extend(
+ {},
+ this.firstFileUpdate,
+ { version: `${this.version}.2` }
+ )
+ this.ProjectHistoryRedisManager.queueRenameEntity
+ .calledWith(
+ this.project_id,
+ this.projectHistoryId,
+ 'file',
+ this.firstFileUpdate.id,
+ this.user_id,
+ firstFileUpdateWithVersion
+ )
+ .should.equal(true)
+ })
+
+ it('should not flush the history', function () {
+ this.HistoryManager.flushProjectChangesAsync
+ .calledWith(this.project_id)
+ .should.equal(false)
+ })
+
+ it('should call the callback', function () {
+ this.callback.called.should.equal(true)
+ })
+ })
+
+ describe('when renaming a doc fails', function () {
+ beforeEach(function () {
+ this.error = new Error('error')
+ this.DocumentManager.renameDocWithLock.yields(this.error)
+ this.ProjectManager.updateProjectWithLocks(
+ this.project_id,
+ this.projectHistoryId,
+ this.user_id,
+ this.updates,
+ this.version,
+ this.callback
+ )
+ })
+
+ it('should call the callback with the error', function () {
+ this.callback.calledWith(this.error).should.equal(true)
+ })
+ })
+
+ describe('when renaming a file fails', function () {
+ beforeEach(function () {
+ this.error = new Error('error')
+ this.ProjectHistoryRedisManager.queueRenameEntity.yields(this.error)
+ this.ProjectManager.updateProjectWithLocks(
+ this.project_id,
+ this.projectHistoryId,
+ this.user_id,
+ this.updates,
+ this.version,
+ this.callback
+ )
+ })
+
+ it('should call the callback with the error', function () {
+ this.callback.calledWith(this.error).should.equal(true)
+ })
+ })
+
+ describe('with enough ops to flush', function () {
+ beforeEach(function () {
+ this.HistoryManager.shouldFlushHistoryOps.returns(true)
+ this.ProjectManager.updateProjectWithLocks(
+ this.project_id,
+ this.projectHistoryId,
+ this.user_id,
+ this.updates,
+ this.version,
+ this.callback
+ )
+ })
+
+ it('should flush the history', function () {
+ this.HistoryManager.flushProjectChangesAsync
+ .calledWith(this.project_id)
+ .should.equal(true)
+ })
+ })
+ })
+
+ describe('add operations', function () {
+ beforeEach(function () {
+ this.firstDocUpdate = {
+ type: 'add-doc',
+ id: 1,
+ docLines: 'a\nb',
+ }
+ this.secondDocUpdate = {
+ type: 'add-doc',
+ id: 2,
+ docLines: 'a\nb',
+ }
+ this.firstFileUpdate = {
+ type: 'add-file',
+ id: 3,
+ url: 'filestore.example.com/2',
+ }
+ this.secondFileUpdate = {
+ type: 'add-file',
+ id: 4,
+ url: 'filestore.example.com/3',
+ }
+ this.updates = [
+ this.firstDocUpdate,
+ this.secondDocUpdate,
+ this.firstFileUpdate,
+ this.secondFileUpdate,
+ ]
+ })
+
+ describe('successfully', function () {
+ beforeEach(function () {
+ this.ProjectManager.updateProjectWithLocks(
+ this.project_id,
+ this.projectHistoryId,
+ this.user_id,
+ this.updates,
+ this.version,
+ this.callback
+ )
+ })
+
+ it('should add the docs in the updates', function () {
+ const firstDocUpdateWithVersion = _.extend({}, this.firstDocUpdate, {
+ version: `${this.version}.0`,
+ })
+ const secondDocUpdateWithVersion = _.extend(
+ {},
+ this.secondDocUpdate,
+ { version: `${this.version}.1` }
+ )
+ this.ProjectHistoryRedisManager.queueAddEntity
+ .getCall(0)
+ .calledWith(
+ this.project_id,
+ this.projectHistoryId,
+ 'doc',
+ this.firstDocUpdate.id,
+ this.user_id,
+ firstDocUpdateWithVersion
+ )
+ .should.equal(true)
+ this.ProjectHistoryRedisManager.queueAddEntity
+ .getCall(1)
+ .calledWith(
+ this.project_id,
+ this.projectHistoryId,
+ 'doc',
+ this.secondDocUpdate.id,
+ this.user_id,
+ secondDocUpdateWithVersion
+ )
+ .should.equal(true)
+ })
+
+ it('should add the files in the updates', function () {
+ const firstFileUpdateWithVersion = _.extend(
+ {},
+ this.firstFileUpdate,
+ { version: `${this.version}.2` }
+ )
+ const secondFileUpdateWithVersion = _.extend(
+ {},
+ this.secondFileUpdate,
+ { version: `${this.version}.3` }
+ )
+ this.ProjectHistoryRedisManager.queueAddEntity
+ .getCall(2)
+ .calledWith(
+ this.project_id,
+ this.projectHistoryId,
+ 'file',
+ this.firstFileUpdate.id,
+ this.user_id,
+ firstFileUpdateWithVersion
+ )
+ .should.equal(true)
+ this.ProjectHistoryRedisManager.queueAddEntity
+ .getCall(3)
+ .calledWith(
+ this.project_id,
+ this.projectHistoryId,
+ 'file',
+ this.secondFileUpdate.id,
+ this.user_id,
+ secondFileUpdateWithVersion
+ )
+ .should.equal(true)
+ })
+
+ it('should not flush the history', function () {
+ this.HistoryManager.flushProjectChangesAsync
+ .calledWith(this.project_id)
+ .should.equal(false)
+ })
+
+ it('should call the callback', function () {
+ this.callback.called.should.equal(true)
+ })
+ })
+
+ describe('when adding a doc fails', function () {
+ beforeEach(function () {
+ this.error = new Error('error')
+ this.ProjectHistoryRedisManager.queueAddEntity.yields(this.error)
+ this.ProjectManager.updateProjectWithLocks(
+ this.project_id,
+ this.projectHistoryId,
+ this.user_id,
+ this.updates,
+ this.version,
+ this.callback
+ )
+ })
+
+ it('should call the callback with the error', function () {
+ this.callback.calledWith(this.error).should.equal(true)
+ })
+ })
+
+ describe('when adding a file fails', function () {
+ beforeEach(function () {
+ this.error = new Error('error')
+ this.ProjectHistoryRedisManager.queueAddEntity.yields(this.error)
+ this.ProjectManager.updateProjectWithLocks(
+ this.project_id,
+ this.projectHistoryId,
+ this.user_id,
+ this.updates,
+ this.version,
+ this.callback
+ )
+ })
+
+ it('should call the callback with the error', function () {
+ this.callback.calledWith(this.error).should.equal(true)
+ })
+ })
+
+ describe('with enough ops to flush', function () {
+ beforeEach(function () {
+ this.HistoryManager.shouldFlushHistoryOps.returns(true)
+ this.ProjectManager.updateProjectWithLocks(
+ this.project_id,
+ this.projectHistoryId,
+ this.user_id,
+ this.updates,
+ this.version,
+ this.callback
+ )
+ })
+
+ it('should flush the history', function () {
+ this.HistoryManager.flushProjectChangesAsync
+ .calledWith(this.project_id)
+ .should.equal(true)
+ })
+ })
+ })
+
+ describe('when given an unknown operation type', function () {
+ beforeEach(function () {
+ this.updates = [{ type: 'brew-coffee' }]
+ this.ProjectManager.updateProjectWithLocks(
+ this.project_id,
+ this.projectHistoryId,
+ this.user_id,
+ this.updates,
+ this.version,
+ this.callback
+ )
+ })
+
+ it('should call back with an error', function () {
+ this.callback.calledWith(sinon.match.instanceOf(Error)).should.be.true
+ })
+ })
+ })
+})
diff --git a/services/document-updater/test/unit/js/RangesManager/RangesManagerTests.js b/services/document-updater/test/unit/js/RangesManager/RangesManagerTests.js
new file mode 100644
index 0000000000..6fbad0557a
--- /dev/null
+++ b/services/document-updater/test/unit/js/RangesManager/RangesManagerTests.js
@@ -0,0 +1,520 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const { expect } = require('chai')
+const modulePath = '../../../../app/js/RangesManager.js'
+const SandboxedModule = require('sandboxed-module')
+
+describe('RangesManager', function () {
+ beforeEach(function () {
+ this.RangesManager = SandboxedModule.require(modulePath)
+
+ this.doc_id = 'doc-id-123'
+ this.project_id = 'project-id-123'
+ this.user_id = 'user-id-123'
+ return (this.callback = sinon.stub())
+ })
+
+ describe('applyUpdate', function () {
+ beforeEach(function () {
+ this.updates = [
+ {
+ meta: {
+ user_id: this.user_id,
+ },
+ op: [
+ {
+ i: 'two ',
+ p: 4,
+ },
+ ],
+ },
+ ]
+ this.entries = {
+ comments: [
+ {
+ op: {
+ c: 'three ',
+ p: 4,
+ },
+ metadata: {
+ user_id: this.user_id,
+ },
+ },
+ ],
+ changes: [
+ {
+ op: {
+ i: 'five',
+ p: 15,
+ },
+ metadata: {
+ user_id: this.user_id,
+ },
+ },
+ ],
+ }
+ return (this.newDocLines = ['one two three four five'])
+ }) // old is "one three four five"
+
+ describe('successfully', function () {
+ beforeEach(function () {
+ return this.RangesManager.applyUpdate(
+ this.project_id,
+ this.doc_id,
+ this.entries,
+ this.updates,
+ this.newDocLines,
+ this.callback
+ )
+ })
+
+ return it('should return the modified the comments and changes', function () {
+ this.callback.called.should.equal(true)
+ const [error, entries, ranges_were_collapsed] = Array.from(
+ this.callback.args[0]
+ )
+ expect(error).to.be.null
+ expect(ranges_were_collapsed).to.equal(false)
+ entries.comments[0].op.should.deep.equal({
+ c: 'three ',
+ p: 8,
+ })
+ return entries.changes[0].op.should.deep.equal({
+ i: 'five',
+ p: 19,
+ })
+ })
+ })
+
+ describe('with empty comments', function () {
+ beforeEach(function () {
+ this.entries.comments = []
+ return this.RangesManager.applyUpdate(
+ this.project_id,
+ this.doc_id,
+ this.entries,
+ this.updates,
+ this.newDocLines,
+ this.callback
+ )
+ })
+
+ return it('should return an object with no comments', function () {
+ // Save space in redis and don't store just {}
+ this.callback.called.should.equal(true)
+ const [error, entries] = Array.from(this.callback.args[0])
+ expect(error).to.be.null
+ return expect(entries.comments).to.be.undefined
+ })
+ })
+
+ describe('with empty changes', function () {
+ beforeEach(function () {
+ this.entries.changes = []
+ return this.RangesManager.applyUpdate(
+ this.project_id,
+ this.doc_id,
+ this.entries,
+ this.updates,
+ this.newDocLines,
+ this.callback
+ )
+ })
+
+ return it('should return an object with no changes', function () {
+ // Save space in redis and don't store just {}
+ this.callback.called.should.equal(true)
+ const [error, entries] = Array.from(this.callback.args[0])
+ expect(error).to.be.null
+ return expect(entries.changes).to.be.undefined
+ })
+ })
+
+ describe('with too many comments', function () {
+ beforeEach(function () {
+ this.RangesManager.MAX_COMMENTS = 2
+ this.updates = [
+ {
+ meta: {
+ user_id: this.user_id,
+ },
+ op: [
+ {
+ c: 'one',
+ p: 0,
+ t: 'thread-id-1',
+ },
+ ],
+ },
+ ]
+ this.entries = {
+ comments: [
+ {
+ op: {
+ c: 'three ',
+ p: 4,
+ t: 'thread-id-2',
+ },
+ metadata: {
+ user_id: this.user_id,
+ },
+ },
+ {
+ op: {
+ c: 'four ',
+ p: 10,
+ t: 'thread-id-3',
+ },
+ metadata: {
+ user_id: this.user_id,
+ },
+ },
+ ],
+ changes: [],
+ }
+ return this.RangesManager.applyUpdate(
+ this.project_id,
+ this.doc_id,
+ this.entries,
+ this.updates,
+ this.newDocLines,
+ this.callback
+ )
+ })
+
+ return it('should return an error', function () {
+ this.callback.called.should.equal(true)
+ const [error, entries] = Array.from(this.callback.args[0])
+ expect(error).to.not.be.null
+ return expect(error.message).to.equal(
+ 'too many comments or tracked changes'
+ )
+ })
+ })
+
+ describe('with too many changes', function () {
+ beforeEach(function () {
+ this.RangesManager.MAX_CHANGES = 2
+ this.updates = [
+ {
+ meta: {
+ user_id: this.user_id,
+ tc: 'track-changes-id-yes',
+ },
+ op: [
+ {
+ i: 'one ',
+ p: 0,
+ },
+ ],
+ },
+ ]
+ this.entries = {
+ changes: [
+ {
+ op: {
+ i: 'three',
+ p: 4,
+ },
+ metadata: {
+ user_id: this.user_id,
+ },
+ },
+ {
+ op: {
+ i: 'four',
+ p: 10,
+ },
+ metadata: {
+ user_id: this.user_id,
+ },
+ },
+ ],
+ comments: [],
+ }
+ this.newDocLines = ['one two three four']
+ return this.RangesManager.applyUpdate(
+ this.project_id,
+ this.doc_id,
+ this.entries,
+ this.updates,
+ this.newDocLines,
+ this.callback
+ )
+ })
+
+ return it('should return an error', function () {
+ // Save space in redis and don't store just {}
+ this.callback.called.should.equal(true)
+ const [error, entries] = Array.from(this.callback.args[0])
+ expect(error).to.not.be.null
+ return expect(error.message).to.equal(
+ 'too many comments or tracked changes'
+ )
+ })
+ })
+
+ describe('inconsistent changes', function () {
+ beforeEach(function () {
+ this.updates = [
+ {
+ meta: {
+ user_id: this.user_id,
+ },
+ op: [
+ {
+ c: "doesn't match",
+ p: 0,
+ },
+ ],
+ },
+ ]
+ return this.RangesManager.applyUpdate(
+ this.project_id,
+ this.doc_id,
+ this.entries,
+ this.updates,
+ this.newDocLines,
+ this.callback
+ )
+ })
+
+ return it('should return an error', function () {
+ // Save space in redis and don't store just {}
+ this.callback.called.should.equal(true)
+ const [error, entries] = Array.from(this.callback.args[0])
+ expect(error).to.not.be.null
+ return expect(error.message).to.equal(
+ 'Change ({"op":{"i":"five","p":15},"metadata":{"user_id":"user-id-123"}}) doesn\'t match text ("our ")'
+ )
+ })
+ })
+
+ return describe('with an update that collapses a range', function () {
+ beforeEach(function () {
+ this.updates = [
+ {
+ meta: {
+ user_id: this.user_id,
+ },
+ op: [
+ {
+ d: 'one',
+ p: 0,
+ t: 'thread-id-1',
+ },
+ ],
+ },
+ ]
+ this.entries = {
+ comments: [
+ {
+ op: {
+ c: 'n',
+ p: 1,
+ t: 'thread-id-2',
+ },
+ metadata: {
+ user_id: this.user_id,
+ },
+ },
+ ],
+ changes: [],
+ }
+ return this.RangesManager.applyUpdate(
+ this.project_id,
+ this.doc_id,
+ this.entries,
+ this.updates,
+ this.newDocLines,
+ this.callback
+ )
+ })
+
+ return it('should return ranges_were_collapsed == true', function () {
+ this.callback.called.should.equal(true)
+ const [error, entries, ranges_were_collapsed] = Array.from(
+ this.callback.args[0]
+ )
+ return expect(ranges_were_collapsed).to.equal(true)
+ })
+ })
+ })
+
+ return describe('acceptChanges', function () {
+ beforeEach(function () {
+ this.RangesManager = SandboxedModule.require(modulePath, {
+ requires: {
+ './RangesTracker': (this.RangesTracker = SandboxedModule.require(
+ '../../../../app/js/RangesTracker.js'
+ )),
+ },
+ })
+
+ this.ranges = {
+ comments: [],
+ changes: [
+ {
+ id: 'a1',
+ op: {
+ i: 'lorem',
+ p: 0,
+ },
+ },
+ {
+ id: 'a2',
+ op: {
+ i: 'ipsum',
+ p: 10,
+ },
+ },
+ {
+ id: 'a3',
+ op: {
+ i: 'dolor',
+ p: 20,
+ },
+ },
+ {
+ id: 'a4',
+ op: {
+ i: 'sit',
+ p: 30,
+ },
+ },
+ {
+ id: 'a5',
+ op: {
+ i: 'amet',
+ p: 40,
+ },
+ },
+ ],
+ }
+ return (this.removeChangeIdsSpy = sinon.spy(
+ this.RangesTracker.prototype,
+ 'removeChangeIds'
+ ))
+ })
+
+ describe('successfully with a single change', function () {
+ beforeEach(function (done) {
+ this.change_ids = [this.ranges.changes[1].id]
+ return this.RangesManager.acceptChanges(
+ this.change_ids,
+ this.ranges,
+ (err, ranges) => {
+ this.rangesResponse = ranges
+ return done()
+ }
+ )
+ })
+
+ it('should log the call with the correct number of changes', function () {
+ return this.logger.log
+ .calledWith('accepting 1 changes in ranges')
+ .should.equal(true)
+ })
+
+ it('should delegate the change removal to the ranges tracker', function () {
+ return this.removeChangeIdsSpy
+ .calledWith(this.change_ids)
+ .should.equal(true)
+ })
+
+ it('should remove the change', function () {
+ return expect(
+ this.rangesResponse.changes.find(
+ change => change.id === this.ranges.changes[1].id
+ )
+ ).to.be.undefined
+ })
+
+ it('should return the original number of changes minus 1', function () {
+ return this.rangesResponse.changes.length.should.equal(
+ this.ranges.changes.length - 1
+ )
+ })
+
+ return it('should not touch other changes', function () {
+ return [0, 2, 3, 4].map(i =>
+ expect(
+ this.rangesResponse.changes.find(
+ change => change.id === this.ranges.changes[i].id
+ )
+ ).to.deep.equal(this.ranges.changes[i])
+ )
+ })
+ })
+
+ return describe('successfully with multiple changes', function () {
+ beforeEach(function (done) {
+ this.change_ids = [
+ this.ranges.changes[1].id,
+ this.ranges.changes[3].id,
+ this.ranges.changes[4].id,
+ ]
+ return this.RangesManager.acceptChanges(
+ this.change_ids,
+ this.ranges,
+ (err, ranges) => {
+ this.rangesResponse = ranges
+ return done()
+ }
+ )
+ })
+
+ it('should log the call with the correct number of changes', function () {
+ return this.logger.log
+ .calledWith(`accepting ${this.change_ids.length} changes in ranges`)
+ .should.equal(true)
+ })
+
+ it('should delegate the change removal to the ranges tracker', function () {
+ return this.removeChangeIdsSpy
+ .calledWith(this.change_ids)
+ .should.equal(true)
+ })
+
+ it('should remove the changes', function () {
+ return [1, 3, 4].map(
+ i =>
+ expect(
+ this.rangesResponse.changes.find(
+ change => change.id === this.ranges.changes[1].id
+ )
+ ).to.be.undefined
+ )
+ })
+
+ it('should return the original number of changes minus the number of accepted changes', function () {
+ return this.rangesResponse.changes.length.should.equal(
+ this.ranges.changes.length - 3
+ )
+ })
+
+ return it('should not touch other changes', function () {
+ return [0, 2].map(i =>
+ expect(
+ this.rangesResponse.changes.find(
+ change => change.id === this.ranges.changes[i].id
+ )
+ ).to.deep.equal(this.ranges.changes[i])
+ )
+ })
+ })
+ })
+})
diff --git a/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js b/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js
new file mode 100644
index 0000000000..09c4ebac52
--- /dev/null
+++ b/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js
@@ -0,0 +1,133 @@
+/* eslint-disable
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS206: Consider reworking classes to avoid initClass
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const { expect } = require('chai')
+const modulePath = '../../../../app/js/RateLimitManager.js'
+const SandboxedModule = require('sandboxed-module')
+
+describe('RateLimitManager', function () {
+ beforeEach(function () {
+ let Timer
+ this.RateLimitManager = SandboxedModule.require(modulePath, {
+ requires: {
+ '@overleaf/settings': (this.settings = {}),
+ './Metrics': (this.Metrics = {
+ Timer: (Timer = (function () {
+ Timer = class Timer {
+ static initClass() {
+ this.prototype.done = sinon.stub()
+ }
+ }
+ Timer.initClass()
+ return Timer
+ })()),
+ gauge: sinon.stub(),
+ }),
+ },
+ })
+ this.callback = sinon.stub()
+ return (this.RateLimiter = new this.RateLimitManager(1))
+ })
+
+ describe('for a single task', function () {
+ beforeEach(function () {
+ this.task = sinon.stub()
+ return this.RateLimiter.run(this.task, this.callback)
+ })
+
+ it('should execute the task in the background', function () {
+ return this.task.called.should.equal(true)
+ })
+
+ it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+
+ return it('should finish with a worker count of one', function () {
+ // because it's in the background
+ return expect(this.RateLimiter.ActiveWorkerCount).to.equal(1)
+ })
+ })
+
+ describe('for multiple tasks', function () {
+ beforeEach(function (done) {
+ this.task = sinon.stub()
+ this.finalTask = sinon.stub()
+ const task = cb => {
+ this.task()
+ return setTimeout(cb, 100)
+ }
+ const finalTask = cb => {
+ this.finalTask()
+ return setTimeout(cb, 100)
+ }
+ this.RateLimiter.run(task, this.callback)
+ this.RateLimiter.run(task, this.callback)
+ this.RateLimiter.run(task, this.callback)
+ return this.RateLimiter.run(finalTask, err => {
+ this.callback(err)
+ return done()
+ })
+ })
+
+ it('should execute the first three tasks', function () {
+ return this.task.calledThrice.should.equal(true)
+ })
+
+ it('should execute the final task', function () {
+ return this.finalTask.called.should.equal(true)
+ })
+
+ it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+
+ return it('should finish with worker count of zero', function () {
+ return expect(this.RateLimiter.ActiveWorkerCount).to.equal(0)
+ })
+ })
+
+ return describe('for a mixture of long-running tasks', function () {
+ beforeEach(function (done) {
+ this.task = sinon.stub()
+ this.finalTask = sinon.stub()
+ const finalTask = cb => {
+ this.finalTask()
+ return setTimeout(cb, 100)
+ }
+ this.RateLimiter.run(this.task, this.callback)
+ this.RateLimiter.run(this.task, this.callback)
+ this.RateLimiter.run(this.task, this.callback)
+ return this.RateLimiter.run(finalTask, err => {
+ this.callback(err)
+ return done()
+ })
+ })
+
+ it('should execute the first three tasks', function () {
+ return this.task.calledThrice.should.equal(true)
+ })
+
+ it('should execute the final task', function () {
+ return this.finalTask.called.should.equal(true)
+ })
+
+ it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+
+ return it('should finish with worker count of three', function () {
+ return expect(this.RateLimiter.ActiveWorkerCount).to.equal(3)
+ })
+ })
+})
diff --git a/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js b/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js
new file mode 100644
index 0000000000..b0672bc088
--- /dev/null
+++ b/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js
@@ -0,0 +1,168 @@
+/* eslint-disable
+ camelcase,
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const modulePath = '../../../../app/js/RealTimeRedisManager.js'
+const SandboxedModule = require('sandboxed-module')
+const Errors = require('../../../../app/js/Errors')
+
+describe('RealTimeRedisManager', function () {
+ beforeEach(function () {
+ this.rclient = {
+ auth() {},
+ exec: sinon.stub(),
+ }
+ this.rclient.multi = () => this.rclient
+ this.pubsubClient = { publish: sinon.stub() }
+ this.RealTimeRedisManager = SandboxedModule.require(modulePath, {
+ requires: {
+ '@overleaf/redis-wrapper': {
+ createClient: config =>
+ config.name === 'pubsub' ? this.pubsubClient : this.rclient,
+ },
+ '@overleaf/settings': {
+ redis: {
+ documentupdater: (this.settings = {
+ key_schema: {
+ pendingUpdates({ doc_id }) {
+ return `PendingUpdates:${doc_id}`
+ },
+ },
+ }),
+ pubsub: {
+ name: 'pubsub',
+ },
+ },
+ },
+ crypto: (this.crypto = {
+ randomBytes: sinon
+ .stub()
+ .withArgs(4)
+ .returns(Buffer.from([0x1, 0x2, 0x3, 0x4])),
+ }),
+ os: (this.os = { hostname: sinon.stub().returns('somehost') }),
+ './Metrics': (this.metrics = { summary: sinon.stub() }),
+ },
+ })
+
+ this.doc_id = 'doc-id-123'
+ this.project_id = 'project-id-123'
+ return (this.callback = sinon.stub())
+ })
+
+ describe('getPendingUpdatesForDoc', function () {
+ beforeEach(function () {
+ this.rclient.lrange = sinon.stub()
+ return (this.rclient.ltrim = sinon.stub())
+ })
+
+ describe('successfully', function () {
+ beforeEach(function () {
+ this.updates = [
+ { op: [{ i: 'foo', p: 4 }] },
+ { op: [{ i: 'foo', p: 4 }] },
+ ]
+ this.jsonUpdates = this.updates.map(update => JSON.stringify(update))
+ this.rclient.exec = sinon
+ .stub()
+ .callsArgWith(0, null, [this.jsonUpdates])
+ return this.RealTimeRedisManager.getPendingUpdatesForDoc(
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('should get the pending updates', function () {
+ return this.rclient.lrange
+ .calledWith(`PendingUpdates:${this.doc_id}`, 0, 7)
+ .should.equal(true)
+ })
+
+ it('should delete the pending updates', function () {
+ return this.rclient.ltrim
+ .calledWith(`PendingUpdates:${this.doc_id}`, 8, -1)
+ .should.equal(true)
+ })
+
+ return it('should call the callback with the updates', function () {
+ return this.callback.calledWith(null, this.updates).should.equal(true)
+ })
+ })
+
+ return describe("when the JSON doesn't parse", function () {
+ beforeEach(function () {
+ this.jsonUpdates = [
+ JSON.stringify({ op: [{ i: 'foo', p: 4 }] }),
+ 'broken json',
+ ]
+ this.rclient.exec = sinon
+ .stub()
+ .callsArgWith(0, null, [this.jsonUpdates])
+ return this.RealTimeRedisManager.getPendingUpdatesForDoc(
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ return it('should return an error to the callback', function () {
+ return this.callback
+ .calledWith(sinon.match.has('name', 'SyntaxError'))
+ .should.equal(true)
+ })
+ })
+ })
+
+ describe('getUpdatesLength', function () {
+ beforeEach(function () {
+ this.rclient.llen = sinon.stub().yields(null, (this.length = 3))
+ return this.RealTimeRedisManager.getUpdatesLength(
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('should look up the length', function () {
+ return this.rclient.llen
+ .calledWith(`PendingUpdates:${this.doc_id}`)
+ .should.equal(true)
+ })
+
+ return it('should return the length', function () {
+ return this.callback.calledWith(null, this.length).should.equal(true)
+ })
+ })
+
+ return describe('sendData', function () {
+ beforeEach(function () {
+ this.message_id = 'doc:somehost:01020304-0'
+ return this.RealTimeRedisManager.sendData({ op: 'thisop' })
+ })
+
+ it('should send the op with a message id', function () {
+ return this.pubsubClient.publish
+ .calledWith(
+ 'applied-ops',
+ JSON.stringify({ op: 'thisop', _id: this.message_id })
+ )
+ .should.equal(true)
+ })
+
+ return it('should track the payload size', function () {
+ return this.metrics.summary
+ .calledWith(
+ 'redis.publish.applied-ops',
+ JSON.stringify({ op: 'thisop', _id: this.message_id }).length
+ )
+ .should.equal(true)
+ })
+ })
+})
diff --git a/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js b/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js
new file mode 100644
index 0000000000..67ea6c7972
--- /dev/null
+++ b/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js
@@ -0,0 +1,1197 @@
+/* eslint-disable
+ camelcase,
+ mocha/no-identical-title,
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const modulePath = '../../../../app/js/RedisManager.js'
+const SandboxedModule = require('sandboxed-module')
+const Errors = require('../../../../app/js/Errors')
+const crypto = require('crypto')
+const tk = require('timekeeper')
+
+describe('RedisManager', function () {
+ beforeEach(function () {
+ let Timer
+ this.multi = { exec: sinon.stub() }
+ this.rclient = { multi: () => this.multi }
+ tk.freeze(new Date())
+ this.RedisManager = SandboxedModule.require(modulePath, {
+ requires: {
+ './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}),
+ '@overleaf/settings': (this.settings = {
+ documentupdater: { logHashErrors: { write: true, read: true } },
+ apis: {
+ project_history: { enabled: true },
+ },
+ redis: {
+ documentupdater: {
+ key_schema: {
+ blockingKey({ doc_id }) {
+ return `Blocking:${doc_id}`
+ },
+ docLines({ doc_id }) {
+ return `doclines:${doc_id}`
+ },
+ docOps({ doc_id }) {
+ return `DocOps:${doc_id}`
+ },
+ docVersion({ doc_id }) {
+ return `DocVersion:${doc_id}`
+ },
+ docHash({ doc_id }) {
+ return `DocHash:${doc_id}`
+ },
+ projectKey({ doc_id }) {
+ return `ProjectId:${doc_id}`
+ },
+ pendingUpdates({ doc_id }) {
+ return `PendingUpdates:${doc_id}`
+ },
+ docsInProject({ project_id }) {
+ return `DocsIn:${project_id}`
+ },
+ ranges({ doc_id }) {
+ return `Ranges:${doc_id}`
+ },
+ pathname({ doc_id }) {
+ return `Pathname:${doc_id}`
+ },
+ projectHistoryId({ doc_id }) {
+ return `ProjectHistoryId:${doc_id}`
+ },
+ projectHistoryType({ doc_id }) {
+ return `ProjectHistoryType:${doc_id}`
+ },
+ projectState({ project_id }) {
+ return `ProjectState:${project_id}`
+ },
+ unflushedTime({ doc_id }) {
+ return `UnflushedTime:${doc_id}`
+ },
+ lastUpdatedBy({ doc_id }) {
+ return `lastUpdatedBy:${doc_id}`
+ },
+ lastUpdatedAt({ doc_id }) {
+ return `lastUpdatedAt:${doc_id}`
+ },
+ },
+ },
+ history: {
+ key_schema: {
+ uncompressedHistoryOps({ doc_id }) {
+ return `UncompressedHistoryOps:${doc_id}`
+ },
+ docsWithHistoryOps({ project_id }) {
+ return `DocsWithHistoryOps:${project_id}`
+ },
+ },
+ },
+ },
+ }),
+ '@overleaf/redis-wrapper': {
+ createClient: () => this.rclient,
+ },
+ './Metrics': (this.metrics = {
+ inc: sinon.stub(),
+ summary: sinon.stub(),
+ Timer: (Timer = class Timer {
+ constructor() {
+ this.start = new Date()
+ }
+
+ done() {
+ const timeSpan = new Date() - this.start
+ return timeSpan
+ }
+ }),
+ }),
+ './Errors': Errors,
+ },
+ })
+
+ this.doc_id = 'doc-id-123'
+ this.project_id = 'project-id-123'
+ this.projectHistoryId = 123
+ return (this.callback = sinon.stub())
+ })
+
+ afterEach(function () {
+ return tk.reset()
+ })
+
+ describe('getDoc', function () {
+ beforeEach(function () {
+ this.lines = ['one', 'two', 'three', 'これは'] // include some utf8
+ this.jsonlines = JSON.stringify(this.lines)
+ this.version = 42
+ this.hash = crypto
+ .createHash('sha1')
+ .update(this.jsonlines, 'utf8')
+ .digest('hex')
+ this.ranges = { comments: 'mock', entries: 'mock' }
+ this.json_ranges = JSON.stringify(this.ranges)
+ this.unflushed_time = 12345
+ this.pathname = '/a/b/c.tex'
+ this.rclient.mget = sinon
+ .stub()
+ .yields(null, [
+ this.jsonlines,
+ this.version,
+ this.hash,
+ this.project_id,
+ this.json_ranges,
+ this.pathname,
+ this.projectHistoryId.toString(),
+ this.unflushed_time,
+ ])
+ })
+
+ describe('successfully', function () {
+ beforeEach(function () {
+ return this.RedisManager.getDoc(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('should get all the details in one call to redis', function () {
+ this.rclient.mget
+ .calledWith(
+ `doclines:${this.doc_id}`,
+ `DocVersion:${this.doc_id}`,
+ `DocHash:${this.doc_id}`,
+ `ProjectId:${this.doc_id}`,
+ `Ranges:${this.doc_id}`,
+ `Pathname:${this.doc_id}`,
+ `ProjectHistoryId:${this.doc_id}`,
+ `UnflushedTime:${this.doc_id}`,
+ `lastUpdatedAt:${this.doc_id}`,
+ `lastUpdatedBy:${this.doc_id}`
+ )
+ .should.equal(true)
+ })
+
+ it('should return the document', function () {
+ return this.callback
+ .calledWithExactly(
+ null,
+ this.lines,
+ this.version,
+ this.ranges,
+ this.pathname,
+ this.projectHistoryId,
+ this.unflushed_time,
+ this.lastUpdatedAt,
+ this.lastUpdatedBy
+ )
+ .should.equal(true)
+ })
+
+ return it('should not log any errors', function () {
+ return this.logger.error.calledWith().should.equal(false)
+ })
+ })
+
+ describe('with a corrupted document', function () {
+ beforeEach(function () {
+ this.badHash = 'INVALID-HASH-VALUE'
+ this.rclient.mget = sinon
+ .stub()
+ .yields(null, [
+ this.jsonlines,
+ this.version,
+ this.badHash,
+ this.project_id,
+ this.json_ranges,
+ ])
+ return this.RedisManager.getDoc(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('should log a hash error', function () {
+ return this.logger.error.calledWith().should.equal(true)
+ })
+
+ return it('should return the document', function () {
+ return this.callback
+ .calledWith(null, this.lines, this.version, this.ranges)
+ .should.equal(true)
+ })
+ })
+
+ describe('with a slow request to redis', function () {
+ beforeEach(function () {
+ this.clock = sinon.useFakeTimers()
+ this.rclient.mget = (...args) => {
+ const cb = args.pop()
+ this.clock.tick(6000)
+ return cb(null, [
+ this.jsonlines,
+ this.version,
+ this.another_project_id,
+ this.json_ranges,
+ this.pathname,
+ this.unflushed_time,
+ ])
+ }
+
+ return this.RedisManager.getDoc(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ afterEach(function () {
+ return this.clock.restore()
+ })
+
+ return it('should return an error', function () {
+ return this.callback
+ .calledWith(sinon.match.instanceOf(Error))
+ .should.equal(true)
+ })
+ })
+
+ return describe('getDoc with an invalid project id', function () {
+ beforeEach(function () {
+ this.another_project_id = 'project-id-456'
+ this.rclient.mget = sinon
+ .stub()
+ .yields(null, [
+ this.jsonlines,
+ this.version,
+ this.hash,
+ this.another_project_id,
+ this.json_ranges,
+ this.pathname,
+ this.unflushed_time,
+ ])
+ return this.RedisManager.getDoc(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ return it('should return an error', function () {
+ return this.callback
+ .calledWith(sinon.match.instanceOf(Errors.NotFoundError))
+ .should.equal(true)
+ })
+ })
+ })
+
+ describe('getPreviousDocOpsTests', function () {
+ describe('with a start and an end value', function () {
+ beforeEach(function () {
+ this.first_version_in_redis = 30
+ this.version = 70
+ this.length = this.version - this.first_version_in_redis
+ this.start = 50
+ this.end = 60
+ this.ops = [{ mock: 'op-1' }, { mock: 'op-2' }]
+ this.jsonOps = this.ops.map(op => JSON.stringify(op))
+ this.rclient.llen = sinon.stub().callsArgWith(1, null, this.length)
+ this.rclient.get = sinon
+ .stub()
+ .callsArgWith(1, null, this.version.toString())
+ this.rclient.lrange = sinon.stub().callsArgWith(3, null, this.jsonOps)
+ return this.RedisManager.getPreviousDocOps(
+ this.doc_id,
+ this.start,
+ this.end,
+ this.callback
+ )
+ })
+
+ it('should get the length of the existing doc ops', function () {
+ return this.rclient.llen
+ .calledWith(`DocOps:${this.doc_id}`)
+ .should.equal(true)
+ })
+
+ it('should get the current version of the doc', function () {
+ return this.rclient.get
+ .calledWith(`DocVersion:${this.doc_id}`)
+ .should.equal(true)
+ })
+
+ it('should get the appropriate docs ops', function () {
+ return this.rclient.lrange
+ .calledWith(
+ `DocOps:${this.doc_id}`,
+ this.start - this.first_version_in_redis,
+ this.end - this.first_version_in_redis
+ )
+ .should.equal(true)
+ })
+
+ return it('should return the docs with the doc ops deserialized', function () {
+ return this.callback.calledWith(null, this.ops).should.equal(true)
+ })
+ })
+
+ describe('with an end value of -1', function () {
+ beforeEach(function () {
+ this.first_version_in_redis = 30
+ this.version = 70
+ this.length = this.version - this.first_version_in_redis
+ this.start = 50
+ this.end = -1
+ this.ops = [{ mock: 'op-1' }, { mock: 'op-2' }]
+ this.jsonOps = this.ops.map(op => JSON.stringify(op))
+ this.rclient.llen = sinon.stub().callsArgWith(1, null, this.length)
+ this.rclient.get = sinon
+ .stub()
+ .callsArgWith(1, null, this.version.toString())
+ this.rclient.lrange = sinon.stub().callsArgWith(3, null, this.jsonOps)
+ return this.RedisManager.getPreviousDocOps(
+ this.doc_id,
+ this.start,
+ this.end,
+ this.callback
+ )
+ })
+
+ it('should get the appropriate docs ops to the end of list', function () {
+ return this.rclient.lrange
+ .calledWith(
+ `DocOps:${this.doc_id}`,
+ this.start - this.first_version_in_redis,
+ -1
+ )
+ .should.equal(true)
+ })
+
+ return it('should return the docs with the doc ops deserialized', function () {
+ return this.callback.calledWith(null, this.ops).should.equal(true)
+ })
+ })
+
+ describe('when the requested range is not in Redis', function () {
+ beforeEach(function () {
+ this.first_version_in_redis = 30
+ this.version = 70
+ this.length = this.version - this.first_version_in_redis
+ this.start = 20
+ this.end = -1
+ this.ops = [{ mock: 'op-1' }, { mock: 'op-2' }]
+ this.jsonOps = this.ops.map(op => JSON.stringify(op))
+ this.rclient.llen = sinon.stub().callsArgWith(1, null, this.length)
+ this.rclient.get = sinon
+ .stub()
+ .callsArgWith(1, null, this.version.toString())
+ this.rclient.lrange = sinon.stub().callsArgWith(3, null, this.jsonOps)
+ return this.RedisManager.getPreviousDocOps(
+ this.doc_id,
+ this.start,
+ this.end,
+ this.callback
+ )
+ })
+
+ it('should return an error', function () {
+ return this.callback
+ .calledWith(sinon.match.instanceOf(Errors.OpRangeNotAvailableError))
+ .should.equal(true)
+ })
+
+ return it('should log out the problem', function () {
+ return this.logger.warn.called.should.equal(true)
+ })
+ })
+
+ return describe('with a slow request to redis', function () {
+ beforeEach(function () {
+ this.first_version_in_redis = 30
+ this.version = 70
+ this.length = this.version - this.first_version_in_redis
+ this.start = 50
+ this.end = 60
+ this.ops = [{ mock: 'op-1' }, { mock: 'op-2' }]
+ this.jsonOps = this.ops.map(op => JSON.stringify(op))
+ this.rclient.llen = sinon.stub().callsArgWith(1, null, this.length)
+ this.rclient.get = sinon
+ .stub()
+ .callsArgWith(1, null, this.version.toString())
+ this.clock = sinon.useFakeTimers()
+ this.rclient.lrange = (key, start, end, cb) => {
+ this.clock.tick(6000)
+ return cb(null, this.jsonOps)
+ }
+ return this.RedisManager.getPreviousDocOps(
+ this.doc_id,
+ this.start,
+ this.end,
+ this.callback
+ )
+ })
+
+ afterEach(function () {
+ return this.clock.restore()
+ })
+
+ return it('should return an error', function () {
+ return this.callback
+ .calledWith(sinon.match.instanceOf(Error))
+ .should.equal(true)
+ })
+ })
+ })
+
+ describe('updateDocument', function () {
+ beforeEach(function () {
+ this.lines = ['one', 'two', 'three', 'これは']
+ this.ops = [{ op: [{ i: 'foo', p: 4 }] }, { op: [{ i: 'bar', p: 8 }] }]
+ this.version = 42
+ this.hash = crypto
+ .createHash('sha1')
+ .update(JSON.stringify(this.lines), 'utf8')
+ .digest('hex')
+ this.ranges = { comments: 'mock', entries: 'mock' }
+ this.updateMeta = { user_id: 'last-author-fake-id' }
+ this.doc_update_list_length = sinon.stub()
+ this.project_update_list_length = sinon.stub()
+
+ this.RedisManager.getDocVersion = sinon.stub()
+ this.multi.mset = sinon.stub()
+ this.multi.set = sinon.stub()
+ this.multi.rpush = sinon.stub()
+ this.multi.expire = sinon.stub()
+ this.multi.ltrim = sinon.stub()
+ this.multi.del = sinon.stub()
+ this.multi.exec = sinon
+ .stub()
+ .callsArgWith(0, null, [
+ null,
+ null,
+ null,
+ null,
+ this.doc_update_list_length,
+ null,
+ null,
+ ])
+ return (this.ProjectHistoryRedisManager.queueOps = sinon
+ .stub()
+ .callsArgWith(
+ this.ops.length + 1,
+ null,
+ this.project_update_list_length
+ ))
+ })
+
+ describe('with a consistent version', function () {
+ beforeEach(function () {})
+
+ describe('with project history enabled', function () {
+ beforeEach(function () {
+ this.settings.apis.project_history.enabled = true
+ this.RedisManager.getDocVersion
+ .withArgs(this.doc_id)
+ .yields(null, this.version - this.ops.length)
+ return this.RedisManager.updateDocument(
+ this.project_id,
+ this.doc_id,
+ this.lines,
+ this.version,
+ this.ops,
+ this.ranges,
+ this.updateMeta,
+ this.callback
+ )
+ })
+
+ it('should get the current doc version to check for consistency', function () {
+ return this.RedisManager.getDocVersion
+ .calledWith(this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should set most details in a single MSET call', function () {
+ this.multi.mset
+ .calledWith({
+ [`doclines:${this.doc_id}`]: JSON.stringify(this.lines),
+ [`DocVersion:${this.doc_id}`]: this.version,
+ [`DocHash:${this.doc_id}`]: this.hash,
+ [`Ranges:${this.doc_id}`]: JSON.stringify(this.ranges),
+ [`lastUpdatedAt:${this.doc_id}`]: Date.now(),
+ [`lastUpdatedBy:${this.doc_id}`]: 'last-author-fake-id',
+ })
+ .should.equal(true)
+ })
+
+ it('should set the unflushed time', function () {
+ return this.multi.set
+ .calledWith(`UnflushedTime:${this.doc_id}`, Date.now(), 'NX')
+ .should.equal(true)
+ })
+
+ it('should push the doc op into the doc ops list', function () {
+ return this.multi.rpush
+ .calledWith(
+ `DocOps:${this.doc_id}`,
+ JSON.stringify(this.ops[0]),
+ JSON.stringify(this.ops[1])
+ )
+ .should.equal(true)
+ })
+
+ it('should renew the expiry ttl on the doc ops array', function () {
+ return this.multi.expire
+ .calledWith(`DocOps:${this.doc_id}`, this.RedisManager.DOC_OPS_TTL)
+ .should.equal(true)
+ })
+
+ it('should truncate the list to 100 members', function () {
+ return this.multi.ltrim
+ .calledWith(
+ `DocOps:${this.doc_id}`,
+ -this.RedisManager.DOC_OPS_MAX_LENGTH,
+ -1
+ )
+ .should.equal(true)
+ })
+
+ it('should push the updates into the history ops list', function () {
+ return this.multi.rpush
+ .calledWith(
+ `UncompressedHistoryOps:${this.doc_id}`,
+ JSON.stringify(this.ops[0]),
+ JSON.stringify(this.ops[1])
+ )
+ .should.equal(true)
+ })
+
+ it('should push the updates into the project history ops list', function () {
+ return this.ProjectHistoryRedisManager.queueOps
+ .calledWith(this.project_id, JSON.stringify(this.ops[0]))
+ .should.equal(true)
+ })
+
+ it('should call the callback', function () {
+ return this.callback
+ .calledWith(
+ null,
+ this.doc_update_list_length,
+ this.project_update_list_length
+ )
+ .should.equal(true)
+ })
+
+ return it('should not log any errors', function () {
+ return this.logger.error.calledWith().should.equal(false)
+ })
+ })
+
+ describe('with project history disabled', function () {
+ beforeEach(function () {
+ this.settings.apis.project_history.enabled = false
+ this.RedisManager.getDocVersion
+ .withArgs(this.doc_id)
+ .yields(null, this.version - this.ops.length)
+ return this.RedisManager.updateDocument(
+ this.project_id,
+ this.doc_id,
+ this.lines,
+ this.version,
+ this.ops,
+ this.ranges,
+ this.updateMeta,
+ this.callback
+ )
+ })
+
+ it('should not push the updates into the project history ops list', function () {
+ return this.ProjectHistoryRedisManager.queueOps.called.should.equal(
+ false
+ )
+ })
+
+ return it('should call the callback', function () {
+ return this.callback
+ .calledWith(null, this.doc_update_list_length)
+ .should.equal(true)
+ })
+ })
+
+ return describe('with a doc using project history only', function () {
+ beforeEach(function () {
+ this.RedisManager.getDocVersion
+ .withArgs(this.doc_id)
+ .yields(null, this.version - this.ops.length, 'project-history')
+ return this.RedisManager.updateDocument(
+ this.project_id,
+ this.doc_id,
+ this.lines,
+ this.version,
+ this.ops,
+ this.ranges,
+ this.updateMeta,
+ this.callback
+ )
+ })
+
+ it('should not push the updates to the track-changes ops list', function () {
+ return this.multi.rpush
+ .calledWith(`UncompressedHistoryOps:${this.doc_id}`)
+ .should.equal(false)
+ })
+
+ it('should push the updates into the project history ops list', function () {
+ return this.ProjectHistoryRedisManager.queueOps
+ .calledWith(this.project_id, JSON.stringify(this.ops[0]))
+ .should.equal(true)
+ })
+
+ return it('should call the callback with the project update count only', function () {
+ return this.callback
+ .calledWith(null, undefined, this.project_update_list_length)
+ .should.equal(true)
+ })
+ })
+ })
+
+ describe('with an inconsistent version', function () {
+ beforeEach(function () {
+ this.RedisManager.getDocVersion
+ .withArgs(this.doc_id)
+ .yields(null, this.version - this.ops.length - 1)
+ return this.RedisManager.updateDocument(
+ this.project_id,
+ this.doc_id,
+ this.lines,
+ this.version,
+ this.ops,
+ this.ranges,
+ this.updateMeta,
+ this.callback
+ )
+ })
+
+ it('should not call multi.exec', function () {
+ return this.multi.exec.called.should.equal(false)
+ })
+
+ return it('should call the callback with an error', function () {
+ return this.callback
+ .calledWith(sinon.match.instanceOf(Error))
+ .should.equal(true)
+ })
+ })
+
+ describe('with no updates', function () {
+ beforeEach(function () {
+ this.RedisManager.getDocVersion
+ .withArgs(this.doc_id)
+ .yields(null, this.version)
+ return this.RedisManager.updateDocument(
+ this.project_id,
+ this.doc_id,
+ this.lines,
+ this.version,
+ [],
+ this.ranges,
+ this.updateMeta,
+ this.callback
+ )
+ })
+
+ it('should not try to enqueue doc updates', function () {
+ return this.multi.rpush.called.should.equal(false)
+ })
+
+ it('should not try to enqueue project updates', function () {
+ return this.ProjectHistoryRedisManager.queueOps.called.should.equal(
+ false
+ )
+ })
+
+ return it('should still set the doclines', function () {
+ this.multi.mset
+ .calledWith({
+ [`doclines:${this.doc_id}`]: JSON.stringify(this.lines),
+ [`DocVersion:${this.doc_id}`]: this.version,
+ [`DocHash:${this.doc_id}`]: this.hash,
+ [`Ranges:${this.doc_id}`]: JSON.stringify(this.ranges),
+ [`lastUpdatedAt:${this.doc_id}`]: Date.now(),
+ [`lastUpdatedBy:${this.doc_id}`]: 'last-author-fake-id',
+ })
+ .should.equal(true)
+ })
+ })
+
+ describe('with empty ranges', function () {
+ beforeEach(function () {
+ this.RedisManager.getDocVersion
+ .withArgs(this.doc_id)
+ .yields(null, this.version - this.ops.length)
+ return this.RedisManager.updateDocument(
+ this.project_id,
+ this.doc_id,
+ this.lines,
+ this.version,
+ this.ops,
+ {},
+ this.updateMeta,
+ this.callback
+ )
+ })
+
+ it('should set empty ranges', function () {
+ this.multi.mset
+ .calledWith({
+ [`doclines:${this.doc_id}`]: JSON.stringify(this.lines),
+ [`DocVersion:${this.doc_id}`]: this.version,
+ [`DocHash:${this.doc_id}`]: this.hash,
+ [`Ranges:${this.doc_id}`]: null,
+ [`lastUpdatedAt:${this.doc_id}`]: Date.now(),
+ [`lastUpdatedBy:${this.doc_id}`]: 'last-author-fake-id',
+ })
+ .should.equal(true)
+ })
+ })
+
+ describe('with null bytes in the serialized doc lines', function () {
+ beforeEach(function () {
+ this.RedisManager.getDocVersion
+ .withArgs(this.doc_id)
+ .yields(null, this.version - this.ops.length)
+ this.stringifyStub = sinon
+ .stub(JSON, 'stringify')
+ .callsFake(() => '["bad bytes! \u0000 <- here"]')
+ return this.RedisManager.updateDocument(
+ this.project_id,
+ this.doc_id,
+ this.lines,
+ this.version,
+ this.ops,
+ this.ranges,
+ this.updateMeta,
+ this.callback
+ )
+ })
+
+ afterEach(function () {
+ this.stringifyStub.restore()
+ })
+
+ it('should log an error', function () {
+ return this.logger.error.called.should.equal(true)
+ })
+
+ return it('should call the callback with an error', function () {
+ return this.callback
+ .calledWith(sinon.match.instanceOf(Error))
+ .should.equal(true)
+ })
+ })
+
+ describe('with ranges that are too big', function () {
+ beforeEach(function () {
+ this.RedisManager.getDocVersion
+ .withArgs(this.doc_id)
+ .yields(null, this.version - this.ops.length)
+ this.RedisManager._serializeRanges = sinon
+ .stub()
+ .yields(new Error('ranges are too large'))
+ return this.RedisManager.updateDocument(
+ this.project_id,
+ this.doc_id,
+ this.lines,
+ this.version,
+ this.ops,
+ this.ranges,
+ this.updateMeta,
+ this.callback
+ )
+ })
+
+ it('should log an error', function () {
+ return this.logger.error.called.should.equal(true)
+ })
+
+ return it('should call the callback with the error', function () {
+ return this.callback
+ .calledWith(sinon.match.instanceOf(Error))
+ .should.equal(true)
+ })
+ })
+
+ return describe('without user id from meta', function () {
+ beforeEach(function () {
+ this.RedisManager.getDocVersion
+ .withArgs(this.doc_id)
+ .yields(null, this.version - this.ops.length)
+ return this.RedisManager.updateDocument(
+ this.project_id,
+ this.doc_id,
+ this.lines,
+ this.version,
+ this.ops,
+ this.ranges,
+ {},
+ this.callback
+ )
+ })
+
+ it('should unset last updater', function () {
+ this.multi.mset
+ .calledWith({
+ [`doclines:${this.doc_id}`]: JSON.stringify(this.lines),
+ [`DocVersion:${this.doc_id}`]: this.version,
+ [`DocHash:${this.doc_id}`]: this.hash,
+ [`Ranges:${this.doc_id}`]: JSON.stringify(this.ranges),
+ [`lastUpdatedAt:${this.doc_id}`]: Date.now(),
+ [`lastUpdatedBy:${this.doc_id}`]: undefined,
+ })
+ .should.equal(true)
+ })
+ })
+ })
+
+ describe('putDocInMemory', function () {
+ beforeEach(function () {
+ this.rclient.mset = sinon.stub().yields(null)
+ this.rclient.sadd = sinon.stub().yields()
+ this.lines = ['one', 'two', 'three', 'これは']
+ this.version = 42
+ this.hash = crypto
+ .createHash('sha1')
+ .update(JSON.stringify(this.lines), 'utf8')
+ .digest('hex')
+ this.ranges = { comments: 'mock', entries: 'mock' }
+ return (this.pathname = '/a/b/c.tex')
+ })
+
+ describe('with non-empty ranges', function () {
+ beforeEach(function (done) {
+ return this.RedisManager.putDocInMemory(
+ this.project_id,
+ this.doc_id,
+ this.lines,
+ this.version,
+ this.ranges,
+ this.pathname,
+ this.projectHistoryId,
+ done
+ )
+ })
+
+ it('should set all the details in a single MSET call', function () {
+ this.rclient.mset
+ .calledWith({
+ [`doclines:${this.doc_id}`]: JSON.stringify(this.lines),
+ [`ProjectId:${this.doc_id}`]: this.project_id,
+ [`DocVersion:${this.doc_id}`]: this.version,
+ [`DocHash:${this.doc_id}`]: this.hash,
+ [`Ranges:${this.doc_id}`]: JSON.stringify(this.ranges),
+ [`Pathname:${this.doc_id}`]: this.pathname,
+ [`ProjectHistoryId:${this.doc_id}`]: this.projectHistoryId,
+ })
+ .should.equal(true)
+ })
+
+ it('should add the doc_id to the project set', function () {
+ return this.rclient.sadd
+ .calledWith(`DocsIn:${this.project_id}`, this.doc_id)
+ .should.equal(true)
+ })
+
+ return it('should not log any errors', function () {
+ return this.logger.error.calledWith().should.equal(false)
+ })
+ })
+
+ describe('with empty ranges', function () {
+ beforeEach(function (done) {
+ return this.RedisManager.putDocInMemory(
+ this.project_id,
+ this.doc_id,
+ this.lines,
+ this.version,
+ {},
+ this.pathname,
+ this.projectHistoryId,
+ done
+ )
+ })
+
+ it('should unset ranges', function () {
+ this.rclient.mset
+ .calledWith({
+ [`doclines:${this.doc_id}`]: JSON.stringify(this.lines),
+ [`ProjectId:${this.doc_id}`]: this.project_id,
+ [`DocVersion:${this.doc_id}`]: this.version,
+ [`DocHash:${this.doc_id}`]: this.hash,
+ [`Ranges:${this.doc_id}`]: null,
+ [`Pathname:${this.doc_id}`]: this.pathname,
+ [`ProjectHistoryId:${this.doc_id}`]: this.projectHistoryId,
+ })
+ .should.equal(true)
+ })
+ })
+
+ describe('with null bytes in the serialized doc lines', function () {
+ beforeEach(function () {
+ this.stringifyStub = sinon
+ .stub(JSON, 'stringify')
+ .callsFake(() => '["bad bytes! \u0000 <- here"]')
+ return this.RedisManager.putDocInMemory(
+ this.project_id,
+ this.doc_id,
+ this.lines,
+ this.version,
+ this.ranges,
+ this.pathname,
+ this.projectHistoryId,
+ this.callback
+ )
+ })
+
+ afterEach(function () {
+ this.stringifyStub.restore()
+ })
+
+ it('should log an error', function () {
+ return this.logger.error.called.should.equal(true)
+ })
+
+ return it('should call the callback with an error', function () {
+ return this.callback
+ .calledWith(sinon.match.instanceOf(Error))
+ .should.equal(true)
+ })
+ })
+
+ return describe('with ranges that are too big', function () {
+ beforeEach(function () {
+ this.RedisManager._serializeRanges = sinon
+ .stub()
+ .yields(new Error('ranges are too large'))
+ return this.RedisManager.putDocInMemory(
+ this.project_id,
+ this.doc_id,
+ this.lines,
+ this.version,
+ this.ranges,
+ this.pathname,
+ this.projectHistoryId,
+ this.callback
+ )
+ })
+
+ it('should log an error', function () {
+ return this.logger.error.called.should.equal(true)
+ })
+
+ return it('should call the callback with the error', function () {
+ return this.callback
+ .calledWith(sinon.match.instanceOf(Error))
+ .should.equal(true)
+ })
+ })
+ })
+
+ describe('removeDocFromMemory', function () {
+ beforeEach(function (done) {
+ this.multi.strlen = sinon.stub()
+ this.multi.del = sinon.stub()
+ this.multi.srem = sinon.stub()
+ this.multi.exec.yields()
+ return this.RedisManager.removeDocFromMemory(
+ this.project_id,
+ this.doc_id,
+ done
+ )
+ })
+
+ it('should check the length of the current doclines', function () {
+ return this.multi.strlen
+ .calledWith(`doclines:${this.doc_id}`)
+ .should.equal(true)
+ })
+
+ it('should delete the details in a singe call', function () {
+ return this.multi.del
+ .calledWith(
+ `doclines:${this.doc_id}`,
+ `ProjectId:${this.doc_id}`,
+ `DocVersion:${this.doc_id}`,
+ `DocHash:${this.doc_id}`,
+ `Ranges:${this.doc_id}`,
+ `Pathname:${this.doc_id}`,
+ `ProjectHistoryId:${this.doc_id}`,
+ `ProjectHistoryType:${this.doc_id}`,
+ `UnflushedTime:${this.doc_id}`,
+ `lastUpdatedAt:${this.doc_id}`,
+ `lastUpdatedBy:${this.doc_id}`
+ )
+ .should.equal(true)
+ })
+
+ it('should remove the doc_id from the project set', function () {
+ return this.multi.srem
+ .calledWith(`DocsIn:${this.project_id}`, this.doc_id)
+ .should.equal(true)
+ })
+ })
+
+ describe('clearProjectState', function () {
+ beforeEach(function (done) {
+ this.rclient.del = sinon.stub().callsArg(1)
+ return this.RedisManager.clearProjectState(this.project_id, done)
+ })
+
+ return it('should delete the project state', function () {
+ return this.rclient.del
+ .calledWith(`ProjectState:${this.project_id}`)
+ .should.equal(true)
+ })
+ })
+
+ return describe('renameDoc', function () {
+ beforeEach(function () {
+ this.rclient.rpush = sinon.stub().yields()
+ this.rclient.set = sinon.stub().yields()
+ return (this.update = {
+ id: this.doc_id,
+ pathname: (this.pathname = 'pathname'),
+ newPathname: (this.newPathname = 'new-pathname'),
+ })
+ })
+
+ describe('the document is cached in redis', function () {
+ beforeEach(function () {
+ this.RedisManager.getDoc = sinon
+ .stub()
+ .callsArgWith(2, null, 'lines', 'version')
+ this.ProjectHistoryRedisManager.queueRenameEntity = sinon
+ .stub()
+ .yields()
+ return this.RedisManager.renameDoc(
+ this.project_id,
+ this.doc_id,
+ this.userId,
+ this.update,
+ this.projectHistoryId,
+ this.callback
+ )
+ })
+
+ it('update the cached pathname', function () {
+ return this.rclient.set
+ .calledWith(`Pathname:${this.doc_id}`, this.newPathname)
+ .should.equal(true)
+ })
+
+ return it('should queue an update', function () {
+ return this.ProjectHistoryRedisManager.queueRenameEntity
+ .calledWithExactly(
+ this.project_id,
+ this.projectHistoryId,
+ 'doc',
+ this.doc_id,
+ this.userId,
+ this.update,
+ this.callback
+ )
+ .should.equal(true)
+ })
+ })
+
+ describe('the document is not cached in redis', function () {
+ beforeEach(function () {
+ this.RedisManager.getDoc = sinon
+ .stub()
+ .callsArgWith(2, null, null, null)
+ this.ProjectHistoryRedisManager.queueRenameEntity = sinon
+ .stub()
+ .yields()
+ return this.RedisManager.renameDoc(
+ this.project_id,
+ this.doc_id,
+ this.userId,
+ this.update,
+ this.projectHistoryId,
+ this.callback
+ )
+ })
+
+ it('does not update the cached pathname', function () {
+ return this.rclient.set.called.should.equal(false)
+ })
+
+ return it('should queue an update', function () {
+ return this.ProjectHistoryRedisManager.queueRenameEntity
+ .calledWithExactly(
+ this.project_id,
+ this.projectHistoryId,
+ 'doc',
+ this.doc_id,
+ this.userId,
+ this.update,
+ this.callback
+ )
+ .should.equal(true)
+ })
+ })
+
+ return describe('getDocVersion', function () {
+ beforeEach(function () {
+ return (this.version = 12345)
+ })
+
+ describe('when the document does not have a project history type set', function () {
+ beforeEach(function () {
+ this.rclient.mget = sinon
+ .stub()
+ .withArgs(
+ `DocVersion:${this.doc_id}`,
+ `ProjectHistoryType:${this.doc_id}`
+ )
+ .callsArgWith(2, null, [`${this.version}`])
+ return this.RedisManager.getDocVersion(this.doc_id, this.callback)
+ })
+
+ return it('should return the document version and an undefined history type', function () {
+ return this.callback
+ .calledWithExactly(null, this.version, undefined)
+ .should.equal(true)
+ })
+ })
+
+ return describe('when the document has a project history type set', function () {
+ beforeEach(function () {
+ this.rclient.mget = sinon
+ .stub()
+ .withArgs(
+ `DocVersion:${this.doc_id}`,
+ `ProjectHistoryType:${this.doc_id}`
+ )
+ .callsArgWith(2, null, [`${this.version}`, 'project-history'])
+ return this.RedisManager.getDocVersion(this.doc_id, this.callback)
+ })
+
+ return it('should return the document version and history type', function () {
+ return this.callback
+ .calledWithExactly(null, this.version, 'project-history')
+ .should.equal(true)
+ })
+ })
+ })
+ })
+})
diff --git a/services/document-updater/test/unit/js/ShareJS/TextTransformTests.js b/services/document-updater/test/unit/js/ShareJS/TextTransformTests.js
new file mode 100644
index 0000000000..0946a78442
--- /dev/null
+++ b/services/document-updater/test/unit/js/ShareJS/TextTransformTests.js
@@ -0,0 +1,438 @@
+/* eslint-disable
+ camelcase,
+ mocha/no-identical-title,
+ no-return-assign,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS202: Simplify dynamic range loops
+ * DS205: Consider reworking code to avoid use of IIFEs
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const text = require('../../../../app/js/sharejs/types/text')
+const RangesTracker = require('../../../../app/js/RangesTracker')
+
+describe('ShareJS text type', function () {
+ beforeEach(function () {
+ return (this.t = 'mock-thread-id')
+ })
+
+ describe('transform', function () {
+ describe('insert / insert', function () {
+ it('with an insert before', function () {
+ const dest = []
+ text._tc(dest, { i: 'foo', p: 9 }, { i: 'bar', p: 3 })
+ return dest.should.deep.equal([{ i: 'foo', p: 12 }])
+ })
+
+ it('with an insert after', function () {
+ const dest = []
+ text._tc(dest, { i: 'foo', p: 3 }, { i: 'bar', p: 9 })
+ return dest.should.deep.equal([{ i: 'foo', p: 3 }])
+ })
+
+ it("with an insert at the same place with side == 'right'", function () {
+ const dest = []
+ text._tc(dest, { i: 'foo', p: 3 }, { i: 'bar', p: 3 }, 'right')
+ return dest.should.deep.equal([{ i: 'foo', p: 6 }])
+ })
+
+ return it("with an insert at the same place with side == 'left'", function () {
+ const dest = []
+ text._tc(dest, { i: 'foo', p: 3 }, { i: 'bar', p: 3 }, 'left')
+ return dest.should.deep.equal([{ i: 'foo', p: 3 }])
+ })
+ })
+
+ describe('insert / delete', function () {
+ it('with a delete before', function () {
+ const dest = []
+ text._tc(dest, { i: 'foo', p: 9 }, { d: 'bar', p: 3 })
+ return dest.should.deep.equal([{ i: 'foo', p: 6 }])
+ })
+
+ it('with a delete after', function () {
+ const dest = []
+ text._tc(dest, { i: 'foo', p: 3 }, { d: 'bar', p: 9 })
+ return dest.should.deep.equal([{ i: 'foo', p: 3 }])
+ })
+
+ it("with a delete at the same place with side == 'right'", function () {
+ const dest = []
+ text._tc(dest, { i: 'foo', p: 3 }, { d: 'bar', p: 3 }, 'right')
+ return dest.should.deep.equal([{ i: 'foo', p: 3 }])
+ })
+
+ return it("with a delete at the same place with side == 'left'", function () {
+ const dest = []
+
+ text._tc(dest, { i: 'foo', p: 3 }, { d: 'bar', p: 3 }, 'left')
+ return dest.should.deep.equal([{ i: 'foo', p: 3 }])
+ })
+ })
+
+ describe('delete / insert', function () {
+ it('with an insert before', function () {
+ const dest = []
+ text._tc(dest, { d: 'foo', p: 9 }, { i: 'bar', p: 3 })
+ return dest.should.deep.equal([{ d: 'foo', p: 12 }])
+ })
+
+ it('with an insert after', function () {
+ const dest = []
+ text._tc(dest, { d: 'foo', p: 3 }, { i: 'bar', p: 9 })
+ return dest.should.deep.equal([{ d: 'foo', p: 3 }])
+ })
+
+ it("with an insert at the same place with side == 'right'", function () {
+ const dest = []
+ text._tc(dest, { d: 'foo', p: 3 }, { i: 'bar', p: 3 }, 'right')
+ return dest.should.deep.equal([{ d: 'foo', p: 6 }])
+ })
+
+ it("with an insert at the same place with side == 'left'", function () {
+ const dest = []
+ text._tc(dest, { d: 'foo', p: 3 }, { i: 'bar', p: 3 }, 'left')
+ return dest.should.deep.equal([{ d: 'foo', p: 6 }])
+ })
+
+ return it('with a delete that overlaps the insert location', function () {
+ const dest = []
+ text._tc(dest, { d: 'foo', p: 3 }, { i: 'bar', p: 4 })
+ return dest.should.deep.equal([
+ { d: 'f', p: 3 },
+ { d: 'oo', p: 6 },
+ ])
+ })
+ })
+
+ describe('delete / delete', function () {
+ it('with a delete before', function () {
+ const dest = []
+ text._tc(dest, { d: 'foo', p: 9 }, { d: 'bar', p: 3 })
+ return dest.should.deep.equal([{ d: 'foo', p: 6 }])
+ })
+
+ it('with a delete after', function () {
+ const dest = []
+ text._tc(dest, { d: 'foo', p: 3 }, { d: 'bar', p: 9 })
+ return dest.should.deep.equal([{ d: 'foo', p: 3 }])
+ })
+
+ it('with deleting the same content', function () {
+ const dest = []
+ text._tc(dest, { d: 'foo', p: 3 }, { d: 'foo', p: 3 }, 'right')
+ return dest.should.deep.equal([])
+ })
+
+ it('with the delete overlapping before', function () {
+ const dest = []
+ text._tc(dest, { d: 'foobar', p: 3 }, { d: 'abcfoo', p: 0 }, 'right')
+ return dest.should.deep.equal([{ d: 'bar', p: 0 }])
+ })
+
+ it('with the delete overlapping after', function () {
+ const dest = []
+ text._tc(dest, { d: 'abcfoo', p: 3 }, { d: 'foobar', p: 6 })
+ return dest.should.deep.equal([{ d: 'abc', p: 3 }])
+ })
+
+ it('with the delete overlapping the whole delete', function () {
+ const dest = []
+ text._tc(dest, { d: 'abcfoo123', p: 3 }, { d: 'foo', p: 6 })
+ return dest.should.deep.equal([{ d: 'abc123', p: 3 }])
+ })
+
+ return it('with the delete inside the whole delete', function () {
+ const dest = []
+ text._tc(dest, { d: 'foo', p: 6 }, { d: 'abcfoo123', p: 3 })
+ return dest.should.deep.equal([])
+ })
+ })
+
+ describe('comment / insert', function () {
+ it('with an insert before', function () {
+ const dest = []
+ text._tc(dest, { c: 'foo', p: 9, t: this.t }, { i: 'bar', p: 3 })
+ return dest.should.deep.equal([{ c: 'foo', p: 12, t: this.t }])
+ })
+
+ it('with an insert after', function () {
+ const dest = []
+ text._tc(dest, { c: 'foo', p: 3, t: this.t }, { i: 'bar', p: 9 })
+ return dest.should.deep.equal([{ c: 'foo', p: 3, t: this.t }])
+ })
+
+ it('with an insert at the left edge', function () {
+ const dest = []
+ text._tc(dest, { c: 'foo', p: 3, t: this.t }, { i: 'bar', p: 3 })
+ // RangesTracker doesn't inject inserts into comments on edges, so neither should we
+ return dest.should.deep.equal([{ c: 'foo', p: 6, t: this.t }])
+ })
+
+ it('with an insert at the right edge', function () {
+ const dest = []
+ text._tc(dest, { c: 'foo', p: 3, t: this.t }, { i: 'bar', p: 6 })
+ // RangesTracker doesn't inject inserts into comments on edges, so neither should we
+ return dest.should.deep.equal([{ c: 'foo', p: 3, t: this.t }])
+ })
+
+ return it('with an insert in the middle', function () {
+ const dest = []
+ text._tc(dest, { c: 'foo', p: 3, t: this.t }, { i: 'bar', p: 5 })
+ return dest.should.deep.equal([{ c: 'fobaro', p: 3, t: this.t }])
+ })
+ })
+
+ describe('comment / delete', function () {
+ it('with a delete before', function () {
+ const dest = []
+ text._tc(dest, { c: 'foo', p: 9, t: this.t }, { d: 'bar', p: 3 })
+ return dest.should.deep.equal([{ c: 'foo', p: 6, t: this.t }])
+ })
+
+ it('with a delete after', function () {
+ const dest = []
+ text._tc(dest, { c: 'foo', p: 3, t: this.t }, { i: 'bar', p: 9 })
+ return dest.should.deep.equal([{ c: 'foo', p: 3, t: this.t }])
+ })
+
+ it('with a delete overlapping the comment content before', function () {
+ const dest = []
+ text._tc(dest, { c: 'foobar', p: 6, t: this.t }, { d: '123foo', p: 3 })
+ return dest.should.deep.equal([{ c: 'bar', p: 3, t: this.t }])
+ })
+
+ it('with a delete overlapping the comment content after', function () {
+ const dest = []
+ text._tc(dest, { c: 'foobar', p: 6, t: this.t }, { d: 'bar123', p: 9 })
+ return dest.should.deep.equal([{ c: 'foo', p: 6, t: this.t }])
+ })
+
+ it('with a delete overlapping the comment content in the middle', function () {
+ const dest = []
+ text._tc(dest, { c: 'foo123bar', p: 6, t: this.t }, { d: '123', p: 9 })
+ return dest.should.deep.equal([{ c: 'foobar', p: 6, t: this.t }])
+ })
+
+ return it('with a delete overlapping the whole comment', function () {
+ const dest = []
+ text._tc(dest, { c: 'foo', p: 6, t: this.t }, { d: '123foo456', p: 3 })
+ return dest.should.deep.equal([{ c: '', p: 3, t: this.t }])
+ })
+ })
+
+ describe('comment / insert', function () {
+ return it('should not do anything', function () {
+ const dest = []
+ text._tc(dest, { i: 'foo', p: 6 }, { c: 'bar', p: 3 })
+ return dest.should.deep.equal([{ i: 'foo', p: 6 }])
+ })
+ })
+
+ describe('comment / delete', function () {
+ return it('should not do anything', function () {
+ const dest = []
+ text._tc(dest, { d: 'foo', p: 6 }, { c: 'bar', p: 3 })
+ return dest.should.deep.equal([{ d: 'foo', p: 6 }])
+ })
+ })
+
+ return describe('comment / comment', function () {
+ return it('should not do anything', function () {
+ const dest = []
+ text._tc(dest, { c: 'foo', p: 6 }, { c: 'bar', p: 3 })
+ return dest.should.deep.equal([{ c: 'foo', p: 6 }])
+ })
+ })
+ })
+
+ describe('apply', function () {
+ it('should apply an insert', function () {
+ return text.apply('foo', [{ i: 'bar', p: 2 }]).should.equal('fobaro')
+ })
+
+ it('should apply a delete', function () {
+ return text
+ .apply('foo123bar', [{ d: '123', p: 3 }])
+ .should.equal('foobar')
+ })
+
+ it('should do nothing with a comment', function () {
+ return text
+ .apply('foo123bar', [{ c: '123', p: 3 }])
+ .should.equal('foo123bar')
+ })
+
+ it('should throw an error when deleted content does not match', function () {
+ return (() => text.apply('foo123bar', [{ d: '456', p: 3 }])).should.throw(
+ Error
+ )
+ })
+
+ return it('should throw an error when comment content does not match', function () {
+ return (() => text.apply('foo123bar', [{ c: '456', p: 3 }])).should.throw(
+ Error
+ )
+ })
+ })
+
+ return describe('applying ops and comments in different orders', function () {
+ return it('should not matter which op or comment is applied first', function () {
+ let length, p
+ let asc, end
+ let asc1, end1
+ let asc3, end3
+ const transform = function (op1, op2, side) {
+ const d = []
+ text._tc(d, op1, op2, side)
+ return d
+ }
+
+ const applySnapshot = (snapshot, op) => text.apply(snapshot, op)
+
+ const applyRanges = function (rangesTracker, ops) {
+ for (const op of Array.from(ops)) {
+ rangesTracker.applyOp(op, {})
+ }
+ return rangesTracker
+ }
+
+ const commentsEqual = function (comments1, comments2) {
+ if (comments1.length !== comments2.length) {
+ return false
+ }
+ comments1.sort((a, b) => {
+ if (a.offset - b.offset === 0) {
+ return a.length - b.length
+ } else {
+ return a.offset - b.offset
+ }
+ })
+ comments2.sort((a, b) => {
+ if (a.offset - b.offset === 0) {
+ return a.length - b.length
+ } else {
+ return a.offset - b.offset
+ }
+ })
+ for (let i = 0; i < comments1.length; i++) {
+ const comment1 = comments1[i]
+ const comment2 = comments2[i]
+ if (
+ comment1.offset !== comment2.offset ||
+ comment1.length !== comment2.length
+ ) {
+ return false
+ }
+ }
+ return true
+ }
+
+ const SNAPSHOT = '123'
+
+ const OPS = []
+ // Insert ops
+ for (
+ p = 0, end = SNAPSHOT.length, asc = end >= 0;
+ asc ? p <= end : p >= end;
+ asc ? p++ : p--
+ ) {
+ OPS.push({ i: 'a', p })
+ OPS.push({ i: 'bc', p })
+ }
+ for (
+ p = 0, end1 = SNAPSHOT.length - 1, asc1 = end1 >= 0;
+ asc1 ? p <= end1 : p >= end1;
+ asc1 ? p++ : p--
+ ) {
+ var asc2, end2
+ for (
+ length = 1, end2 = SNAPSHOT.length - p, asc2 = end2 >= 1;
+ asc2 ? length <= end2 : length >= end2;
+ asc2 ? length++ : length--
+ ) {
+ OPS.push({ d: SNAPSHOT.slice(p, p + length), p })
+ }
+ }
+ for (
+ p = 0, end3 = SNAPSHOT.length - 1, asc3 = end3 >= 0;
+ asc3 ? p <= end3 : p >= end3;
+ asc3 ? p++ : p--
+ ) {
+ var asc4, end4
+ for (
+ length = 1, end4 = SNAPSHOT.length - p, asc4 = end4 >= 1;
+ asc4 ? length <= end4 : length >= end4;
+ asc4 ? length++ : length--
+ ) {
+ OPS.push({ c: SNAPSHOT.slice(p, p + length), p, t: this.t })
+ }
+ }
+
+ return (() => {
+ const result = []
+ for (var op1 of Array.from(OPS)) {
+ result.push(
+ (() => {
+ const result1 = []
+ for (const op2 of Array.from(OPS)) {
+ const op1_t = transform(op1, op2, 'left')
+ const op2_t = transform(op2, op1, 'right')
+
+ const rt12 = new RangesTracker()
+ const snapshot12 = applySnapshot(
+ applySnapshot(SNAPSHOT, [op1]),
+ op2_t
+ )
+ applyRanges(rt12, [op1])
+ applyRanges(rt12, op2_t)
+
+ const rt21 = new RangesTracker()
+ const snapshot21 = applySnapshot(
+ applySnapshot(SNAPSHOT, [op2]),
+ op1_t
+ )
+ applyRanges(rt21, [op2])
+ applyRanges(rt21, op1_t)
+
+ if (snapshot12 !== snapshot21) {
+ console.error(
+ { op1, op2, op1_t, op2_t, snapshot12, snapshot21 },
+ 'Ops are not consistent'
+ )
+ throw new Error('OT is inconsistent')
+ }
+
+ if (!commentsEqual(rt12.comments, rt21.comments)) {
+ console.log(rt12.comments)
+ console.log(rt21.comments)
+ console.error(
+ {
+ op1,
+ op2,
+ op1_t,
+ op2_t,
+ rt12_comments: rt12.comments,
+ rt21_comments: rt21.comments,
+ },
+ 'Comments are not consistent'
+ )
+ throw new Error('OT is inconsistent')
+ } else {
+ result1.push(undefined)
+ }
+ }
+ return result1
+ })()
+ )
+ }
+ return result
+ })()
+ })
+ })
+})
diff --git a/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js b/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js
new file mode 100644
index 0000000000..4ac16a8fe7
--- /dev/null
+++ b/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js
@@ -0,0 +1,150 @@
+/* eslint-disable
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const { expect } = require('chai')
+const modulePath = '../../../../app/js/ShareJsDB.js'
+const SandboxedModule = require('sandboxed-module')
+const Errors = require('../../../../app/js/Errors')
+
+describe('ShareJsDB', function () {
+ beforeEach(function () {
+ this.doc_id = 'document-id'
+ this.project_id = 'project-id'
+ this.doc_key = `${this.project_id}:${this.doc_id}`
+ this.callback = sinon.stub()
+ this.ShareJsDB = SandboxedModule.require(modulePath, {
+ requires: {
+ './RedisManager': (this.RedisManager = {}),
+ './Errors': Errors,
+ },
+ })
+
+ this.version = 42
+ this.lines = ['one', 'two', 'three']
+ return (this.db = new this.ShareJsDB(
+ this.project_id,
+ this.doc_id,
+ this.lines,
+ this.version
+ ))
+ })
+
+ describe('getSnapshot', function () {
+ describe('successfully', function () {
+ beforeEach(function () {
+ return this.db.getSnapshot(this.doc_key, this.callback)
+ })
+
+ it('should return the doc lines', function () {
+ return this.callback.args[0][1].snapshot.should.equal(
+ this.lines.join('\n')
+ )
+ })
+
+ it('should return the doc version', function () {
+ return this.callback.args[0][1].v.should.equal(this.version)
+ })
+
+ return it('should return the type as text', function () {
+ return this.callback.args[0][1].type.should.equal('text')
+ })
+ })
+
+ return describe('when the key does not match', function () {
+ beforeEach(function () {
+ return this.db.getSnapshot('bad:key', this.callback)
+ })
+
+ return it('should return the callback with a NotFoundError', function () {
+ return this.callback
+ .calledWith(sinon.match.instanceOf(Errors.NotFoundError))
+ .should.equal(true)
+ })
+ })
+ })
+
+ describe('getOps', function () {
+ describe('with start == end', function () {
+ beforeEach(function () {
+ this.start = this.end = 42
+ return this.db.getOps(this.doc_key, this.start, this.end, this.callback)
+ })
+
+ return it('should return an empty array', function () {
+ return this.callback.calledWith(null, []).should.equal(true)
+ })
+ })
+
+ describe('with a non empty range', function () {
+ beforeEach(function () {
+ this.start = 35
+ this.end = 42
+ this.RedisManager.getPreviousDocOps = sinon
+ .stub()
+ .callsArgWith(3, null, this.ops)
+ return this.db.getOps(this.doc_key, this.start, this.end, this.callback)
+ })
+
+ it('should get the range from redis', function () {
+ return this.RedisManager.getPreviousDocOps
+ .calledWith(this.doc_id, this.start, this.end - 1)
+ .should.equal(true)
+ })
+
+ return it('should return the ops', function () {
+ return this.callback.calledWith(null, this.ops).should.equal(true)
+ })
+ })
+
+ return describe('with no specified end', function () {
+ beforeEach(function () {
+ this.start = 35
+ this.end = null
+ this.RedisManager.getPreviousDocOps = sinon
+ .stub()
+ .callsArgWith(3, null, this.ops)
+ return this.db.getOps(this.doc_key, this.start, this.end, this.callback)
+ })
+
+ return it('should get until the end of the list', function () {
+ return this.RedisManager.getPreviousDocOps
+ .calledWith(this.doc_id, this.start, -1)
+ .should.equal(true)
+ })
+ })
+ })
+
+ return describe('writeOps', function () {
+ return describe('writing an op', function () {
+ beforeEach(function () {
+ this.opData = {
+ op: { p: 20, t: 'foo' },
+ meta: { source: 'bar' },
+ v: this.version,
+ }
+ return this.db.writeOp(this.doc_key, this.opData, this.callback)
+ })
+
+ it('should write into appliedOps', function () {
+ return expect(this.db.appliedOps[this.doc_key]).to.deep.equal([
+ this.opData,
+ ])
+ })
+
+ return it('should call the callback without an error', function () {
+ this.callback.called.should.equal(true)
+ return (this.callback.args[0][0] != null).should.equal(false)
+ })
+ })
+ })
+})
diff --git a/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js b/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js
new file mode 100644
index 0000000000..6fea29287f
--- /dev/null
+++ b/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js
@@ -0,0 +1,235 @@
+/* eslint-disable
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const modulePath = '../../../../app/js/ShareJsUpdateManager.js'
+const SandboxedModule = require('sandboxed-module')
+const crypto = require('crypto')
+
+describe('ShareJsUpdateManager', function () {
+ beforeEach(function () {
+ let Model
+ this.project_id = 'project-id-123'
+ this.doc_id = 'document-id-123'
+ this.callback = sinon.stub()
+ return (this.ShareJsUpdateManager = SandboxedModule.require(modulePath, {
+ requires: {
+ './sharejs/server/model': (Model = class Model {
+ constructor(db) {
+ this.db = db
+ }
+ }),
+ './ShareJsDB': (this.ShareJsDB = { mockDB: true }),
+ '@overleaf/redis-wrapper': {
+ createClient: () => {
+ return (this.rclient = { auth() {} })
+ },
+ },
+ './RealTimeRedisManager': (this.RealTimeRedisManager = {}),
+ './Metrics': (this.metrics = { inc: sinon.stub() }),
+ },
+ globals: {
+ clearTimeout: (this.clearTimeout = sinon.stub()),
+ },
+ }))
+ })
+
+ describe('applyUpdate', function () {
+ beforeEach(function () {
+ this.lines = ['one', 'two']
+ this.version = 34
+ this.updatedDocLines = ['onefoo', 'two']
+ const content = this.updatedDocLines.join('\n')
+ this.hash = crypto
+ .createHash('sha1')
+ .update('blob ' + content.length + '\x00')
+ .update(content, 'utf8')
+ .digest('hex')
+ this.update = { p: 4, t: 'foo', v: this.version, hash: this.hash }
+ this.model = {
+ applyOp: sinon.stub().callsArg(2),
+ getSnapshot: sinon.stub(),
+ db: {
+ appliedOps: {},
+ },
+ }
+ this.ShareJsUpdateManager.getNewShareJsModel = sinon
+ .stub()
+ .returns(this.model)
+ this.ShareJsUpdateManager._listenForOps = sinon.stub()
+ return (this.ShareJsUpdateManager.removeDocFromCache = sinon
+ .stub()
+ .callsArg(1))
+ })
+
+ describe('successfully', function () {
+ beforeEach(function (done) {
+ this.model.getSnapshot.callsArgWith(1, null, {
+ snapshot: this.updatedDocLines.join('\n'),
+ v: this.version,
+ })
+ this.model.db.appliedOps[`${this.project_id}:${this.doc_id}`] =
+ this.appliedOps = ['mock-ops']
+ return this.ShareJsUpdateManager.applyUpdate(
+ this.project_id,
+ this.doc_id,
+ this.update,
+ this.lines,
+ this.version,
+ (err, docLines, version, appliedOps) => {
+ this.callback(err, docLines, version, appliedOps)
+ return done()
+ }
+ )
+ })
+
+ it('should create a new ShareJs model', function () {
+ return this.ShareJsUpdateManager.getNewShareJsModel
+ .calledWith(this.project_id, this.doc_id, this.lines, this.version)
+ .should.equal(true)
+ })
+
+ it('should listen for ops on the model', function () {
+ return this.ShareJsUpdateManager._listenForOps
+ .calledWith(this.model)
+ .should.equal(true)
+ })
+
+ it('should send the update to ShareJs', function () {
+ return this.model.applyOp
+ .calledWith(`${this.project_id}:${this.doc_id}`, this.update)
+ .should.equal(true)
+ })
+
+ it('should get the updated doc lines', function () {
+ return this.model.getSnapshot
+ .calledWith(`${this.project_id}:${this.doc_id}`)
+ .should.equal(true)
+ })
+
+ return it('should return the updated doc lines, version and ops', function () {
+ return this.callback
+ .calledWith(null, this.updatedDocLines, this.version, this.appliedOps)
+ .should.equal(true)
+ })
+ })
+
+ describe('when applyOp fails', function () {
+ beforeEach(function (done) {
+ this.error = new Error('Something went wrong')
+ this.model.applyOp = sinon.stub().callsArgWith(2, this.error)
+ return this.ShareJsUpdateManager.applyUpdate(
+ this.project_id,
+ this.doc_id,
+ this.update,
+ this.lines,
+ this.version,
+ (err, docLines, version) => {
+ this.callback(err, docLines, version)
+ return done()
+ }
+ )
+ })
+
+ return it('should call the callback with the error', function () {
+ return this.callback.calledWith(this.error).should.equal(true)
+ })
+ })
+
+ describe('when getSnapshot fails', function () {
+ beforeEach(function (done) {
+ this.error = new Error('Something went wrong')
+ this.model.getSnapshot.callsArgWith(1, this.error)
+ return this.ShareJsUpdateManager.applyUpdate(
+ this.project_id,
+ this.doc_id,
+ this.update,
+ this.lines,
+ this.version,
+ (err, docLines, version) => {
+ this.callback(err, docLines, version)
+ return done()
+ }
+ )
+ })
+
+ return it('should call the callback with the error', function () {
+ return this.callback.calledWith(this.error).should.equal(true)
+ })
+ })
+
+ return describe('with an invalid hash', function () {
+ beforeEach(function (done) {
+ this.error = new Error('invalid hash')
+ this.model.getSnapshot.callsArgWith(1, null, {
+ snapshot: 'unexpected content',
+ v: this.version,
+ })
+ this.model.db.appliedOps[`${this.project_id}:${this.doc_id}`] =
+ this.appliedOps = ['mock-ops']
+ return this.ShareJsUpdateManager.applyUpdate(
+ this.project_id,
+ this.doc_id,
+ this.update,
+ this.lines,
+ this.version,
+ (err, docLines, version, appliedOps) => {
+ this.callback(err, docLines, version, appliedOps)
+ return done()
+ }
+ )
+ })
+
+ return it('should call the callback with the error', function () {
+ return this.callback
+ .calledWith(sinon.match.instanceOf(Error))
+ .should.equal(true)
+ })
+ })
+ })
+
+ return describe('_listenForOps', function () {
+ beforeEach(function () {
+ this.model = {
+ on: (event, callback) => {
+ return (this.callback = callback)
+ },
+ }
+ sinon.spy(this.model, 'on')
+ return this.ShareJsUpdateManager._listenForOps(this.model)
+ })
+
+ it('should listen to the model for updates', function () {
+ return this.model.on.calledWith('applyOp').should.equal(true)
+ })
+
+ return describe('the callback', function () {
+ beforeEach(function () {
+ this.opData = {
+ op: { t: 'foo', p: 1 },
+ meta: { source: 'bar' },
+ }
+ this.RealTimeRedisManager.sendData = sinon.stub()
+ return this.callback(`${this.project_id}:${this.doc_id}`, this.opData)
+ })
+
+ return it('should publish the op to redis', function () {
+ return this.RealTimeRedisManager.sendData
+ .calledWith({
+ project_id: this.project_id,
+ doc_id: this.doc_id,
+ op: this.opData,
+ })
+ .should.equal(true)
+ })
+ })
+ })
+})
diff --git a/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js b/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js
new file mode 100644
index 0000000000..2f23de7f68
--- /dev/null
+++ b/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js
@@ -0,0 +1,689 @@
+/* eslint-disable
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS206: Consider reworking classes to avoid initClass
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const modulePath = '../../../../app/js/UpdateManager.js'
+const SandboxedModule = require('sandboxed-module')
+
+describe('UpdateManager', function () {
+ beforeEach(function () {
+ let Profiler, Timer
+ this.project_id = 'project-id-123'
+ this.projectHistoryId = 'history-id-123'
+ this.doc_id = 'document-id-123'
+ this.callback = sinon.stub()
+ return (this.UpdateManager = SandboxedModule.require(modulePath, {
+ requires: {
+ './LockManager': (this.LockManager = {}),
+ './RedisManager': (this.RedisManager = {}),
+ './RealTimeRedisManager': (this.RealTimeRedisManager = {}),
+ './ShareJsUpdateManager': (this.ShareJsUpdateManager = {}),
+ './HistoryManager': (this.HistoryManager = {}),
+ './Metrics': (this.Metrics = {
+ Timer: (Timer = (function () {
+ Timer = class Timer {
+ static initClass() {
+ this.prototype.done = sinon.stub()
+ }
+ }
+ Timer.initClass()
+ return Timer
+ })()),
+ }),
+ '@overleaf/settings': (this.Settings = {}),
+ './DocumentManager': (this.DocumentManager = {}),
+ './RangesManager': (this.RangesManager = {}),
+ './SnapshotManager': (this.SnapshotManager = {}),
+ './Profiler': (Profiler = (function () {
+ Profiler = class Profiler {
+ static initClass() {
+ this.prototype.log = sinon.stub().returns({ end: sinon.stub() })
+ this.prototype.end = sinon.stub()
+ }
+ }
+ Profiler.initClass()
+ return Profiler
+ })()),
+ },
+ }))
+ })
+
+ describe('processOutstandingUpdates', function () {
+ beforeEach(function () {
+ this.UpdateManager.fetchAndApplyUpdates = sinon.stub().callsArg(2)
+ return this.UpdateManager.processOutstandingUpdates(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('should apply the updates', function () {
+ return this.UpdateManager.fetchAndApplyUpdates
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+
+ return it('should time the execution', function () {
+ return this.Metrics.Timer.prototype.done.called.should.equal(true)
+ })
+ })
+
+ describe('processOutstandingUpdatesWithLock', function () {
+ describe('when the lock is free', function () {
+ beforeEach(function () {
+ this.LockManager.tryLock = sinon
+ .stub()
+ .callsArgWith(1, null, true, (this.lockValue = 'mock-lock-value'))
+ this.LockManager.releaseLock = sinon.stub().callsArg(2)
+ this.UpdateManager.continueProcessingUpdatesWithLock = sinon
+ .stub()
+ .callsArg(2)
+ return (this.UpdateManager.processOutstandingUpdates = sinon
+ .stub()
+ .callsArg(2))
+ })
+
+ describe('successfully', function () {
+ beforeEach(function () {
+ return this.UpdateManager.processOutstandingUpdatesWithLock(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('should acquire the lock', function () {
+ return this.LockManager.tryLock
+ .calledWith(this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should free the lock', function () {
+ return this.LockManager.releaseLock
+ .calledWith(this.doc_id, this.lockValue)
+ .should.equal(true)
+ })
+
+ it('should process the outstanding updates', function () {
+ return this.UpdateManager.processOutstandingUpdates
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should do everything with the lock acquired', function () {
+ this.UpdateManager.processOutstandingUpdates
+ .calledAfter(this.LockManager.tryLock)
+ .should.equal(true)
+ return this.UpdateManager.processOutstandingUpdates
+ .calledBefore(this.LockManager.releaseLock)
+ .should.equal(true)
+ })
+
+ it('should continue processing new updates that may have come in', function () {
+ return this.UpdateManager.continueProcessingUpdatesWithLock
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ return it('should return the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+
+ return describe('when processOutstandingUpdates returns an error', function () {
+ beforeEach(function () {
+ this.UpdateManager.processOutstandingUpdates = sinon
+ .stub()
+ .callsArgWith(2, (this.error = new Error('Something went wrong')))
+ return this.UpdateManager.processOutstandingUpdatesWithLock(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('should free the lock', function () {
+ return this.LockManager.releaseLock
+ .calledWith(this.doc_id, this.lockValue)
+ .should.equal(true)
+ })
+
+ return it('should return the error in the callback', function () {
+ return this.callback.calledWith(this.error).should.equal(true)
+ })
+ })
+ })
+
+ return describe('when the lock is taken', function () {
+ beforeEach(function () {
+ this.LockManager.tryLock = sinon.stub().callsArgWith(1, null, false)
+ this.UpdateManager.processOutstandingUpdates = sinon.stub().callsArg(2)
+ return this.UpdateManager.processOutstandingUpdatesWithLock(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('should return the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+
+ return it('should not process the updates', function () {
+ return this.UpdateManager.processOutstandingUpdates.called.should.equal(
+ false
+ )
+ })
+ })
+ })
+
+ describe('continueProcessingUpdatesWithLock', function () {
+ describe('when there are outstanding updates', function () {
+ beforeEach(function () {
+ this.RealTimeRedisManager.getUpdatesLength = sinon
+ .stub()
+ .callsArgWith(1, null, 3)
+ this.UpdateManager.processOutstandingUpdatesWithLock = sinon
+ .stub()
+ .callsArg(2)
+ return this.UpdateManager.continueProcessingUpdatesWithLock(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('should process the outstanding updates', function () {
+ return this.UpdateManager.processOutstandingUpdatesWithLock
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ return it('should return the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+
+ return describe('when there are no outstanding updates', function () {
+ beforeEach(function () {
+ this.RealTimeRedisManager.getUpdatesLength = sinon
+ .stub()
+ .callsArgWith(1, null, 0)
+ this.UpdateManager.processOutstandingUpdatesWithLock = sinon
+ .stub()
+ .callsArg(2)
+ return this.UpdateManager.continueProcessingUpdatesWithLock(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('should not try to process the outstanding updates', function () {
+ return this.UpdateManager.processOutstandingUpdatesWithLock.called.should.equal(
+ false
+ )
+ })
+
+ return it('should return the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+ })
+
+ describe('fetchAndApplyUpdates', function () {
+ describe('with updates', function () {
+ beforeEach(function () {
+ this.updates = [{ p: 1, t: 'foo' }]
+ this.updatedDocLines = ['updated', 'lines']
+ this.version = 34
+ this.RealTimeRedisManager.getPendingUpdatesForDoc = sinon
+ .stub()
+ .callsArgWith(1, null, this.updates)
+ this.UpdateManager.applyUpdate = sinon
+ .stub()
+ .callsArgWith(3, null, this.updatedDocLines, this.version)
+ return this.UpdateManager.fetchAndApplyUpdates(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('should get the pending updates', function () {
+ return this.RealTimeRedisManager.getPendingUpdatesForDoc
+ .calledWith(this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should apply the updates', function () {
+ return Array.from(this.updates).map(update =>
+ this.UpdateManager.applyUpdate
+ .calledWith(this.project_id, this.doc_id, update)
+ .should.equal(true)
+ )
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+
+ return describe('when there are no updates', function () {
+ beforeEach(function () {
+ this.updates = []
+ this.RealTimeRedisManager.getPendingUpdatesForDoc = sinon
+ .stub()
+ .callsArgWith(1, null, this.updates)
+ this.UpdateManager.applyUpdate = sinon.stub()
+ this.RedisManager.setDocument = sinon.stub()
+ return this.UpdateManager.fetchAndApplyUpdates(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('should not call applyUpdate', function () {
+ return this.UpdateManager.applyUpdate.called.should.equal(false)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+ })
+
+ describe('applyUpdate', function () {
+ beforeEach(function () {
+ this.updateMeta = { user_id: 'last-author-fake-id' }
+ this.update = { op: [{ p: 42, i: 'foo' }], meta: this.updateMeta }
+ this.updatedDocLines = ['updated', 'lines']
+ this.version = 34
+ this.lines = ['original', 'lines']
+ this.ranges = { entries: 'mock', comments: 'mock' }
+ this.updated_ranges = { entries: 'updated', comments: 'updated' }
+ this.appliedOps = [
+ { v: 42, op: 'mock-op-42' },
+ { v: 45, op: 'mock-op-45' },
+ ]
+ this.doc_ops_length = sinon.stub()
+ this.project_ops_length = sinon.stub()
+ this.pathname = '/a/b/c.tex'
+ this.DocumentManager.getDoc = sinon
+ .stub()
+ .yields(
+ null,
+ this.lines,
+ this.version,
+ this.ranges,
+ this.pathname,
+ this.projectHistoryId
+ )
+ this.RangesManager.applyUpdate = sinon
+ .stub()
+ .yields(null, this.updated_ranges, false)
+ this.ShareJsUpdateManager.applyUpdate = sinon
+ .stub()
+ .yields(null, this.updatedDocLines, this.version, this.appliedOps)
+ this.RedisManager.updateDocument = sinon
+ .stub()
+ .yields(null, this.doc_ops_length, this.project_ops_length)
+ this.RealTimeRedisManager.sendData = sinon.stub()
+ this.UpdateManager._addProjectHistoryMetadataToOps = sinon.stub()
+ return (this.HistoryManager.recordAndFlushHistoryOps = sinon
+ .stub()
+ .callsArg(5))
+ })
+
+ describe('normally', function () {
+ beforeEach(function () {
+ return this.UpdateManager.applyUpdate(
+ this.project_id,
+ this.doc_id,
+ this.update,
+ this.callback
+ )
+ })
+
+ it('should apply the updates via ShareJS', function () {
+ return this.ShareJsUpdateManager.applyUpdate
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.update,
+ this.lines,
+ this.version
+ )
+ .should.equal(true)
+ })
+
+ it('should update the ranges', function () {
+ return this.RangesManager.applyUpdate
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.ranges,
+ this.appliedOps,
+ this.updatedDocLines
+ )
+ .should.equal(true)
+ })
+
+ it('should save the document', function () {
+ return this.RedisManager.updateDocument
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.updatedDocLines,
+ this.version,
+ this.appliedOps,
+ this.updated_ranges,
+ this.updateMeta
+ )
+ .should.equal(true)
+ })
+
+ it('should add metadata to the ops', function () {
+ return this.UpdateManager._addProjectHistoryMetadataToOps
+ .calledWith(
+ this.appliedOps,
+ this.pathname,
+ this.projectHistoryId,
+ this.lines
+ )
+ .should.equal(true)
+ })
+
+ it('should push the applied ops into the history queue', function () {
+ return this.HistoryManager.recordAndFlushHistoryOps
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.appliedOps,
+ this.doc_ops_length,
+ this.project_ops_length
+ )
+ .should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+
+ describe('with UTF-16 surrogate pairs in the update', function () {
+ beforeEach(function () {
+ this.update = { op: [{ p: 42, i: '\uD835\uDC00' }] }
+ return this.UpdateManager.applyUpdate(
+ this.project_id,
+ this.doc_id,
+ this.update,
+ this.callback
+ )
+ })
+
+ return it('should apply the update but with surrogate pairs removed', function () {
+ this.ShareJsUpdateManager.applyUpdate
+ .calledWith(this.project_id, this.doc_id, this.update)
+ .should.equal(true)
+
+ // \uFFFD is 'replacement character'
+ return this.update.op[0].i.should.equal('\uFFFD\uFFFD')
+ })
+ })
+
+ describe('with an error', function () {
+ beforeEach(function () {
+ this.error = new Error('something went wrong')
+ this.ShareJsUpdateManager.applyUpdate = sinon.stub().yields(this.error)
+ return this.UpdateManager.applyUpdate(
+ this.project_id,
+ this.doc_id,
+ this.update,
+ this.callback
+ )
+ })
+
+ it('should call RealTimeRedisManager.sendData with the error', function () {
+ return this.RealTimeRedisManager.sendData
+ .calledWith({
+ project_id: this.project_id,
+ doc_id: this.doc_id,
+ error: this.error.message,
+ })
+ .should.equal(true)
+ })
+
+ return it('should call the callback with the error', function () {
+ return this.callback.calledWith(this.error).should.equal(true)
+ })
+ })
+
+ return describe('when ranges get collapsed', function () {
+ beforeEach(function () {
+ this.RangesManager.applyUpdate = sinon
+ .stub()
+ .yields(null, this.updated_ranges, true)
+ this.SnapshotManager.recordSnapshot = sinon.stub().yields()
+ return this.UpdateManager.applyUpdate(
+ this.project_id,
+ this.doc_id,
+ this.update,
+ this.callback
+ )
+ })
+
+ return it('should call SnapshotManager.recordSnapshot', function () {
+ return this.SnapshotManager.recordSnapshot
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.version,
+ this.pathname,
+ this.lines,
+ this.ranges
+ )
+ .should.equal(true)
+ })
+ })
+ })
+
+ describe('_addProjectHistoryMetadataToOps', function () {
+ return it('should add projectHistoryId, pathname and doc_length metadata to the ops', function () {
+ const lines = ['some', 'test', 'data']
+ const appliedOps = [
+ {
+ v: 42,
+ op: [
+ { i: 'foo', p: 4 },
+ { i: 'bar', p: 6 },
+ ],
+ },
+ {
+ v: 45,
+ op: [
+ { d: 'qux', p: 4 },
+ { i: 'bazbaz', p: 14 },
+ ],
+ },
+ { v: 49, op: [{ i: 'penguin', p: 18 }] },
+ ]
+ this.UpdateManager._addProjectHistoryMetadataToOps(
+ appliedOps,
+ this.pathname,
+ this.projectHistoryId,
+ lines
+ )
+ return appliedOps.should.deep.equal([
+ {
+ projectHistoryId: this.projectHistoryId,
+ v: 42,
+ op: [
+ { i: 'foo', p: 4 },
+ { i: 'bar', p: 6 },
+ ],
+ meta: {
+ pathname: this.pathname,
+ doc_length: 14,
+ },
+ },
+ {
+ projectHistoryId: this.projectHistoryId,
+ v: 45,
+ op: [
+ { d: 'qux', p: 4 },
+ { i: 'bazbaz', p: 14 },
+ ],
+ meta: {
+ pathname: this.pathname,
+ doc_length: 20,
+ }, // 14 + 'foo' + 'bar'
+ },
+ {
+ projectHistoryId: this.projectHistoryId,
+ v: 49,
+ op: [{ i: 'penguin', p: 18 }],
+ meta: {
+ pathname: this.pathname,
+ doc_length: 23,
+ }, // 14 - 'qux' + 'bazbaz'
+ },
+ ])
+ })
+ })
+
+ return describe('lockUpdatesAndDo', function () {
+ beforeEach(function () {
+ this.method = sinon.stub().callsArgWith(3, null, this.response_arg1)
+ this.callback = sinon.stub()
+ this.arg1 = 'argument 1'
+ this.response_arg1 = 'response argument 1'
+ this.lockValue = 'mock-lock-value'
+ this.LockManager.getLock = sinon
+ .stub()
+ .callsArgWith(1, null, this.lockValue)
+ return (this.LockManager.releaseLock = sinon.stub().callsArg(2))
+ })
+
+ describe('successfully', function () {
+ beforeEach(function () {
+ this.UpdateManager.continueProcessingUpdatesWithLock = sinon.stub()
+ this.UpdateManager.processOutstandingUpdates = sinon.stub().callsArg(2)
+ return this.UpdateManager.lockUpdatesAndDo(
+ this.method,
+ this.project_id,
+ this.doc_id,
+ this.arg1,
+ this.callback
+ )
+ })
+
+ it('should lock the doc', function () {
+ return this.LockManager.getLock
+ .calledWith(this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should process any outstanding updates', function () {
+ return this.UpdateManager.processOutstandingUpdates
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should call the method', function () {
+ return this.method
+ .calledWith(this.project_id, this.doc_id, this.arg1)
+ .should.equal(true)
+ })
+
+ it('should return the method response to the callback', function () {
+ return this.callback
+ .calledWith(null, this.response_arg1)
+ .should.equal(true)
+ })
+
+ it('should release the lock', function () {
+ return this.LockManager.releaseLock
+ .calledWith(this.doc_id, this.lockValue)
+ .should.equal(true)
+ })
+
+ return it('should continue processing updates', function () {
+ return this.UpdateManager.continueProcessingUpdatesWithLock
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+ })
+
+ describe('when processOutstandingUpdates returns an error', function () {
+ beforeEach(function () {
+ this.UpdateManager.processOutstandingUpdates = sinon
+ .stub()
+ .callsArgWith(2, (this.error = new Error('Something went wrong')))
+ return this.UpdateManager.lockUpdatesAndDo(
+ this.method,
+ this.project_id,
+ this.doc_id,
+ this.arg1,
+ this.callback
+ )
+ })
+
+ it('should free the lock', function () {
+ return this.LockManager.releaseLock
+ .calledWith(this.doc_id, this.lockValue)
+ .should.equal(true)
+ })
+
+ return it('should return the error in the callback', function () {
+ return this.callback.calledWith(this.error).should.equal(true)
+ })
+ })
+
+ return describe('when the method returns an error', function () {
+ beforeEach(function () {
+ this.UpdateManager.processOutstandingUpdates = sinon.stub().callsArg(2)
+ this.method = sinon
+ .stub()
+ .callsArgWith(
+ 3,
+ (this.error = new Error('something went wrong')),
+ this.response_arg1
+ )
+ return this.UpdateManager.lockUpdatesAndDo(
+ this.method,
+ this.project_id,
+ this.doc_id,
+ this.arg1,
+ this.callback
+ )
+ })
+
+ it('should free the lock', function () {
+ return this.LockManager.releaseLock
+ .calledWith(this.doc_id, this.lockValue)
+ .should.equal(true)
+ })
+
+ return it('should return the error in the callback', function () {
+ return this.callback.calledWith(this.error).should.equal(true)
+ })
+ })
+ })
+})