diff --git a/services/track-changes/.dockerignore b/services/track-changes/.dockerignore
new file mode 100644
index 0000000000..ba1c3442de
--- /dev/null
+++ b/services/track-changes/.dockerignore
@@ -0,0 +1,7 @@
+node_modules/*
+gitrev
+.git
+.gitignore
+.npm
+.nvmrc
+nodemon.json
diff --git a/services/track-changes/.eslintignore b/services/track-changes/.eslintignore
new file mode 100644
index 0000000000..c59a33a4df
--- /dev/null
+++ b/services/track-changes/.eslintignore
@@ -0,0 +1 @@
+app/lib/diff_match_patch.js
diff --git a/services/track-changes/.eslintrc b/services/track-changes/.eslintrc
new file mode 100644
index 0000000000..a97661b15f
--- /dev/null
+++ b/services/track-changes/.eslintrc
@@ -0,0 +1,86 @@
+// this file was auto-generated, do not edit it directly.
+// instead run bin/update_build_scripts from
+// https://github.com/sharelatex/sharelatex-dev-environment
+{
+ "extends": [
+ "eslint:recommended",
+ "standard",
+ "prettier"
+ ],
+ "parserOptions": {
+ "ecmaVersion": 2018
+ },
+ "plugins": [
+ "mocha",
+ "chai-expect",
+ "chai-friendly"
+ ],
+ "env": {
+ "node": true,
+ "mocha": true
+ },
+ "rules": {
+ // TODO(das7pad): remove overrides after fixing all the violations manually (https://github.com/overleaf/issues/issues/3882#issuecomment-878999671)
+ // START of temporary overrides
+ "array-callback-return": "off",
+ "no-dupe-else-if": "off",
+ "no-var": "off",
+ "no-empty": "off",
+ "node/handle-callback-err": "off",
+ "no-loss-of-precision": "off",
+ "node/no-callback-literal": "off",
+ "node/no-path-concat": "off",
+ "prefer-regex-literals": "off",
+ // END of temporary overrides
+
+ // Swap the no-unused-expressions rule with a more chai-friendly one
+ "no-unused-expressions": 0,
+ "chai-friendly/no-unused-expressions": "error",
+
+ // Do not allow importing of implicit dependencies.
+ "import/no-extraneous-dependencies": "error"
+ },
+ "overrides": [
+ {
+ // Test specific rules
+ "files": ["test/**/*.js"],
+ "globals": {
+ "expect": true
+ },
+ "rules": {
+ // mocha-specific rules
+ "mocha/handle-done-callback": "error",
+ "mocha/no-exclusive-tests": "error",
+ "mocha/no-global-tests": "error",
+ "mocha/no-identical-title": "error",
+ "mocha/no-nested-tests": "error",
+ "mocha/no-pending-tests": "error",
+ "mocha/no-skipped-tests": "error",
+ "mocha/no-mocha-arrows": "error",
+
+ // chai-specific rules
+ "chai-expect/missing-assertion": "error",
+ "chai-expect/terminating-properties": "error",
+
+ // prefer-arrow-callback applies to all callbacks, not just ones in mocha tests.
+ // we don't enforce this at the top-level - just in tests to manage `this` scope
+ // based on mocha's context mechanism
+ "mocha/prefer-arrow-callback": "error"
+ }
+ },
+ {
+ // Backend specific rules
+ "files": ["app/**/*.js", "app.js", "index.js"],
+ "rules": {
+ // don't allow console.log in backend code
+ "no-console": "error",
+
+ // Do not allow importing of implicit dependencies.
+ "import/no-extraneous-dependencies": ["error", {
+ // Do not allow importing of devDependencies.
+ "devDependencies": false
+ }]
+ }
+ }
+ ]
+}
diff --git a/services/track-changes/.github/ISSUE_TEMPLATE.md b/services/track-changes/.github/ISSUE_TEMPLATE.md
new file mode 100644
index 0000000000..e0093aa90c
--- /dev/null
+++ b/services/track-changes/.github/ISSUE_TEMPLATE.md
@@ -0,0 +1,38 @@
+
+
+## Steps to Reproduce
+
+
+
+1.
+2.
+3.
+
+## Expected Behaviour
+
+
+## Observed Behaviour
+
+
+
+## Context
+
+
+## Technical Info
+
+
+* URL:
+* Browser Name and version:
+* Operating System and version (desktop or mobile):
+* Signed in as:
+* Project and/or file:
+
+## Analysis
+
+
+## Who Needs to Know?
+
+
+
+-
+-
diff --git a/services/track-changes/.github/PULL_REQUEST_TEMPLATE.md b/services/track-changes/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 0000000000..12bb2eeb3f
--- /dev/null
+++ b/services/track-changes/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,48 @@
+
+
+
+
+
+### Description
+
+
+
+#### Screenshots
+
+
+
+#### Related Issues / PRs
+
+
+
+### Review
+
+
+
+#### Potential Impact
+
+
+
+#### Manual Testing Performed
+
+- [ ]
+- [ ]
+
+#### Accessibility
+
+
+
+### Deployment
+
+
+
+#### Deployment Checklist
+
+- [ ] Update documentation not included in the PR (if any)
+- [ ]
+
+#### Metrics and Monitoring
+
+
+
+#### Who Needs to Know?
diff --git a/services/track-changes/.github/dependabot.yml b/services/track-changes/.github/dependabot.yml
new file mode 100644
index 0000000000..c856753655
--- /dev/null
+++ b/services/track-changes/.github/dependabot.yml
@@ -0,0 +1,23 @@
+version: 2
+updates:
+ - package-ecosystem: "npm"
+ directory: "/"
+ schedule:
+ interval: "daily"
+
+ pull-request-branch-name:
+ # Separate sections of the branch name with a hyphen
+ # Docker images use the branch name and do not support slashes in tags
+ # https://github.com/overleaf/google-ops/issues/822
+ # https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#pull-request-branch-nameseparator
+ separator: "-"
+
+ # Block informal upgrades -- security upgrades use a separate queue.
+ # https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#open-pull-requests-limit
+ open-pull-requests-limit: 0
+
+ # currently assign team-magma to all dependabot PRs - this may change in
+ # future if we reorganise teams
+ labels:
+ - "dependencies"
+ - "type:maintenance"
diff --git a/services/track-changes/.gitignore b/services/track-changes/.gitignore
new file mode 100644
index 0000000000..008dc714ba
--- /dev/null
+++ b/services/track-changes/.gitignore
@@ -0,0 +1,7 @@
+**.swp
+node_modules/
+forever/
+*.js.map
+
+# managed by dev-environment$ bin/update_build_scripts
+.npmrc
diff --git a/services/track-changes/.mocharc.json b/services/track-changes/.mocharc.json
new file mode 100644
index 0000000000..dc3280aa96
--- /dev/null
+++ b/services/track-changes/.mocharc.json
@@ -0,0 +1,3 @@
+{
+ "require": "test/setup.js"
+}
diff --git a/services/track-changes/.nvmrc b/services/track-changes/.nvmrc
new file mode 100644
index 0000000000..5a80a7e912
--- /dev/null
+++ b/services/track-changes/.nvmrc
@@ -0,0 +1 @@
+12.22.3
diff --git a/services/track-changes/.prettierrc b/services/track-changes/.prettierrc
new file mode 100644
index 0000000000..c92c3526e7
--- /dev/null
+++ b/services/track-changes/.prettierrc
@@ -0,0 +1,11 @@
+# This file was auto-generated, do not edit it directly.
+# Instead run bin/update_build_scripts from
+# https://github.com/sharelatex/sharelatex-dev-environment
+{
+ "arrowParens": "avoid",
+ "semi": false,
+ "singleQuote": true,
+ "trailingComma": "es5",
+ "tabWidth": 2,
+ "useTabs": false
+}
diff --git a/services/track-changes/Dockerfile b/services/track-changes/Dockerfile
new file mode 100644
index 0000000000..6b286376dc
--- /dev/null
+++ b/services/track-changes/Dockerfile
@@ -0,0 +1,23 @@
+# This file was auto-generated, do not edit it directly.
+# Instead run bin/update_build_scripts from
+# https://github.com/sharelatex/sharelatex-dev-environment
+
+FROM node:12.22.3 as base
+
+WORKDIR /app
+
+FROM base as app
+
+#wildcard as some files may not be in all repos
+COPY package*.json npm-shrink*.json /app/
+
+RUN npm ci --quiet
+
+COPY . /app
+
+FROM base
+
+COPY --from=app /app /app
+USER node
+
+CMD ["node", "--expose-gc", "app.js"]
diff --git a/services/track-changes/LICENSE b/services/track-changes/LICENSE
new file mode 100644
index 0000000000..ac8619dcb9
--- /dev/null
+++ b/services/track-changes/LICENSE
@@ -0,0 +1,662 @@
+
+ GNU AFFERO GENERAL PUBLIC LICENSE
+ Version 3, 19 November 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc.
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU Affero General Public License is a free, copyleft license for
+software and other kinds of works, specifically designed to ensure
+cooperation with the community in the case of network server software.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+our General Public Licenses are intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ Developers that use our General Public Licenses protect your rights
+with two steps: (1) assert copyright on the software, and (2) offer
+you this License which gives you legal permission to copy, distribute
+and/or modify the software.
+
+ A secondary benefit of defending all users' freedom is that
+improvements made in alternate versions of the program, if they
+receive widespread use, become available for other developers to
+incorporate. Many developers of free software are heartened and
+encouraged by the resulting cooperation. However, in the case of
+software used on network servers, this result may fail to come about.
+The GNU General Public License permits making a modified version and
+letting the public access it on a server without ever releasing its
+source code to the public.
+
+ The GNU Affero General Public License is designed specifically to
+ensure that, in such cases, the modified source code becomes available
+to the community. It requires the operator of a network server to
+provide the source code of the modified version running there to the
+users of that server. Therefore, public use of a modified version, on
+a publicly accessible server, gives the public access to the source
+code of the modified version.
+
+ An older license, called the Affero General Public License and
+published by Affero, was designed to accomplish similar goals. This is
+a different license, not a version of the Affero GPL, but Affero has
+released a new version of the Affero GPL which permits relicensing under
+this license.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU Affero General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Remote Network Interaction; Use with the GNU General Public License.
+
+ Notwithstanding any other provision of this License, if you modify the
+Program, your modified version must prominently offer all users
+interacting with it remotely through a computer network (if your version
+supports such interaction) an opportunity to receive the Corresponding
+Source of your version by providing access to the Corresponding Source
+from a network server at no charge, through some standard or customary
+means of facilitating copying of software. This Corresponding Source
+shall include the Corresponding Source for any work covered by version 3
+of the GNU General Public License that is incorporated pursuant to the
+following paragraph.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the work with which it is combined will remain governed by version
+3 of the GNU General Public License.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU Affero General Public License from time to time. Such new versions
+will be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU Affero General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU Affero General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU Affero General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+
+ Copyright (C)
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU Affero General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU Affero General Public License for more details.
+
+ You should have received a copy of the GNU Affero General Public License
+ along with this program. If not, see .
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If your software can interact with users remotely through a computer
+network, you should also make sure that it provides a way for users to
+get its source. For example, if your program is a web application, its
+interface could display a "Source" link that leads users to an archive
+of the code. There are many ways you could offer source, and different
+solutions will be better for different programs; see section 13 for the
+specific requirements.
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU AGPL, see
+.
diff --git a/services/track-changes/Makefile b/services/track-changes/Makefile
new file mode 100644
index 0000000000..664e4ca96d
--- /dev/null
+++ b/services/track-changes/Makefile
@@ -0,0 +1,90 @@
+# This file was auto-generated, do not edit it directly.
+# Instead run bin/update_build_scripts from
+# https://github.com/sharelatex/sharelatex-dev-environment
+
+BUILD_NUMBER ?= local
+BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD)
+PROJECT_NAME = track-changes
+BUILD_DIR_NAME = $(shell pwd | xargs basename | tr -cd '[a-zA-Z0-9_.\-]')
+
+DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml
+DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \
+ BRANCH_NAME=$(BRANCH_NAME) \
+ PROJECT_NAME=$(PROJECT_NAME) \
+ MOCHA_GREP=${MOCHA_GREP} \
+ docker-compose ${DOCKER_COMPOSE_FLAGS}
+
+DOCKER_COMPOSE_TEST_ACCEPTANCE = \
+ COMPOSE_PROJECT_NAME=test_acceptance_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE)
+
+DOCKER_COMPOSE_TEST_UNIT = \
+ COMPOSE_PROJECT_NAME=test_unit_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE)
+
+clean:
+ -docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
+ -docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
+ -$(DOCKER_COMPOSE_TEST_UNIT) down --rmi local
+ -$(DOCKER_COMPOSE_TEST_ACCEPTANCE) down --rmi local
+
+format:
+ $(DOCKER_COMPOSE) run --rm test_unit npm run --silent format
+
+format_fix:
+ $(DOCKER_COMPOSE) run --rm test_unit npm run --silent format:fix
+
+lint:
+ $(DOCKER_COMPOSE) run --rm test_unit npm run --silent lint
+
+test: format lint test_unit test_acceptance
+
+test_unit:
+ifneq (,$(wildcard test/unit))
+ $(DOCKER_COMPOSE_TEST_UNIT) run --rm test_unit
+ $(MAKE) test_unit_clean
+endif
+
+test_clean: test_unit_clean
+test_unit_clean:
+ifneq (,$(wildcard test/unit))
+ $(DOCKER_COMPOSE_TEST_UNIT) down -v -t 0
+endif
+
+test_acceptance: test_acceptance_clean test_acceptance_pre_run test_acceptance_run
+ $(MAKE) test_acceptance_clean
+
+test_acceptance_debug: test_acceptance_clean test_acceptance_pre_run test_acceptance_run_debug
+ $(MAKE) test_acceptance_clean
+
+test_acceptance_run:
+ifneq (,$(wildcard test/acceptance))
+ $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance
+endif
+
+test_acceptance_run_debug:
+ifneq (,$(wildcard test/acceptance))
+ $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run -p 127.0.0.9:19999:19999 --rm test_acceptance npm run test:acceptance -- --inspect=0.0.0.0:19999 --inspect-brk
+endif
+
+test_clean: test_acceptance_clean
+test_acceptance_clean:
+ $(DOCKER_COMPOSE_TEST_ACCEPTANCE) down -v -t 0
+
+test_acceptance_pre_run:
+ifneq (,$(wildcard test/acceptance/js/scripts/pre-run))
+ $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance test/acceptance/js/scripts/pre-run
+endif
+
+build:
+ docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \
+ --tag gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \
+ .
+
+tar:
+ $(DOCKER_COMPOSE) up tar
+
+publish:
+
+ docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
+
+
+.PHONY: clean test test_unit test_acceptance test_clean build publish
diff --git a/services/track-changes/README.md b/services/track-changes/README.md
new file mode 100644
index 0000000000..6a0f1d79ee
--- /dev/null
+++ b/services/track-changes/README.md
@@ -0,0 +1,20 @@
+overleaf/track-changes
+========================
+
+An API for converting raw editor updates into a compressed and browseable history.
+
+Acceptance tests can be run with the command
+```
+AWS_BUCKET= AWS_ACCESS_KEY_ID= AWS_SECRET_ACCESS_KEY= make test
+```
+where `bucket-name`, `aws-access-key` and `aws-secret-access-key` are the credentials for an AWS S3 bucket.
+
+
+
+
+License
+-------
+
+The code in this repository is released under the GNU AFFERO GENERAL PUBLIC LICENSE, version 3. A copy can be found in the `LICENSE` file.
+
+Copyright (c) Overleaf, 2014-2019.
diff --git a/services/track-changes/app.js b/services/track-changes/app.js
new file mode 100644
index 0000000000..a009431de0
--- /dev/null
+++ b/services/track-changes/app.js
@@ -0,0 +1,161 @@
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS103: Rewrite code to no longer use __guard__
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const Metrics = require('@overleaf/metrics')
+Metrics.initialize('track-changes')
+const Settings = require('@overleaf/settings')
+const logger = require('logger-sharelatex')
+const TrackChangesLogger = logger.initialize('track-changes').logger
+
+if ((Settings.sentry != null ? Settings.sentry.dsn : undefined) != null) {
+ logger.initializeErrorReporting(Settings.sentry.dsn)
+}
+
+// log updates as truncated strings
+const truncateFn = updates =>
+ JSON.parse(
+ JSON.stringify(updates, function (key, value) {
+ let len
+ if (typeof value === 'string' && (len = value.length) > 80) {
+ return (
+ value.substr(0, 32) +
+ `...(message of length ${len} truncated)...` +
+ value.substr(-32)
+ )
+ } else {
+ return value
+ }
+ })
+ )
+TrackChangesLogger.addSerializers({
+ rawUpdate: truncateFn,
+ rawUpdates: truncateFn,
+ newUpdates: truncateFn,
+ lastUpdate: truncateFn,
+})
+
+const Path = require('path')
+
+Metrics.memory.monitor(logger)
+
+const childProcess = require('child_process')
+
+const mongodb = require('./app/js/mongodb')
+const HttpController = require('./app/js/HttpController')
+const express = require('express')
+const bodyParser = require('body-parser')
+
+const app = express()
+
+app.use(bodyParser.json())
+
+app.use(Metrics.http.monitor(logger))
+
+Metrics.injectMetricsRoute(app)
+
+app.post('/project/:project_id/doc/:doc_id/flush', HttpController.flushDoc)
+
+app.get('/project/:project_id/doc/:doc_id/diff', HttpController.getDiff)
+
+app.get('/project/:project_id/doc/:doc_id/check', HttpController.checkDoc)
+
+app.get('/project/:project_id/updates', HttpController.getUpdates)
+app.get('/project/:project_id/export', HttpController.exportProject)
+
+app.post('/project/:project_id/flush', HttpController.flushProject)
+
+app.post(
+ '/project/:project_id/doc/:doc_id/version/:version/restore',
+ HttpController.restore
+)
+
+app.post('/project/:project_id/doc/:doc_id/push', HttpController.pushDocHistory)
+app.post('/project/:project_id/doc/:doc_id/pull', HttpController.pullDocHistory)
+
+app.post('/flush/all', HttpController.flushAll)
+app.post('/check/dangling', HttpController.checkDanglingUpdates)
+
+let packWorker = null // use a single packing worker
+
+app.post('/pack', function (req, res, next) {
+ if (packWorker != null) {
+ return res.send('pack already running')
+ } else {
+ logger.log('running pack')
+ packWorker = childProcess.fork(
+ Path.join(__dirname, '/app/js/PackWorker.js'),
+ [
+ req.query.limit || 1000,
+ req.query.delay || 1000,
+ req.query.timeout || 30 * 60 * 1000,
+ ]
+ )
+ packWorker.on('exit', function (code, signal) {
+ logger.log({ code, signal }, 'history auto pack exited')
+ return (packWorker = null)
+ })
+ return res.send('pack started')
+ }
+})
+
+app.get('/status', (req, res, next) => res.send('track-changes is alive'))
+
+app.get('/oops', function (req, res, next) {
+ throw new Error('dummy test error')
+})
+
+app.get('/check_lock', HttpController.checkLock)
+
+app.get('/health_check', HttpController.healthCheck)
+
+app.use(function (error, req, res, next) {
+ logger.error({ err: error, req }, 'an internal error occured')
+ return res.sendStatus(500)
+})
+
+const port =
+ __guard__(
+ Settings.internal != null ? Settings.internal.trackchanges : undefined,
+ x => x.port
+ ) || 3015
+const host =
+ __guard__(
+ Settings.internal != null ? Settings.internal.trackchanges : undefined,
+ x1 => x1.host
+ ) || 'localhost'
+
+if (!module.parent) {
+ // Called directly
+ mongodb
+ .waitForDb()
+ .then(() => {
+ app.listen(port, host, function (error) {
+ if (error != null) {
+ return logger.error(
+ { err: error },
+ 'could not start track-changes server'
+ )
+ } else {
+ return logger.info(
+ `trackchanges starting up, listening on ${host}:${port}`
+ )
+ }
+ })
+ })
+ .catch(err => {
+ logger.fatal({ err }, 'Cannot connect to mongo. Exiting.')
+ process.exit(1)
+ })
+}
+
+module.exports = app
+
+function __guard__(value, transform) {
+ return typeof value !== 'undefined' && value !== null
+ ? transform(value)
+ : undefined
+}
diff --git a/services/track-changes/app/js/DiffGenerator.js b/services/track-changes/app/js/DiffGenerator.js
new file mode 100644
index 0000000000..0d8ef0be94
--- /dev/null
+++ b/services/track-changes/app/js/DiffGenerator.js
@@ -0,0 +1,340 @@
+/* eslint-disable
+ camelcase,
+ no-proto,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let DiffGenerator
+var ConsistencyError = function (message) {
+ const error = new Error(message)
+ error.name = 'ConsistencyError'
+ error.__proto__ = ConsistencyError.prototype
+ return error
+}
+ConsistencyError.prototype.__proto__ = Error.prototype
+
+const logger = require('logger-sharelatex')
+
+module.exports = DiffGenerator = {
+ ConsistencyError,
+
+ rewindUpdate(content, update) {
+ for (let j = update.op.length - 1, i = j; j >= 0; j--, i = j) {
+ const op = update.op[i]
+ if (op.broken !== true) {
+ try {
+ content = DiffGenerator.rewindOp(content, op)
+ } catch (e) {
+ if (e instanceof ConsistencyError && (i = update.op.length - 1)) {
+ // catch known case where the last op in an array has been
+ // merged into a later op
+ logger.error(
+ { err: e, update, op: JSON.stringify(op) },
+ 'marking op as broken'
+ )
+ op.broken = true
+ } else {
+ throw e // rethrow the execption
+ }
+ }
+ }
+ }
+ return content
+ },
+
+ rewindOp(content, op) {
+ let p
+ if (op.i != null) {
+ // ShareJS will accept an op where p > content.length when applied,
+ // and it applies as though p == content.length. However, the op is
+ // passed to us with the original p > content.length. Detect if that
+ // is the case with this op, and shift p back appropriately to match
+ // ShareJS if so.
+ ;({ p } = op)
+ const max_p = content.length - op.i.length
+ if (p > max_p) {
+ logger.warn({ max_p, p }, 'truncating position to content length')
+ p = max_p
+ }
+
+ const textToBeRemoved = content.slice(p, p + op.i.length)
+ if (op.i !== textToBeRemoved) {
+ throw new ConsistencyError(
+ `Inserted content, '${op.i}', does not match text to be removed, '${textToBeRemoved}'`
+ )
+ }
+
+ return content.slice(0, p) + content.slice(p + op.i.length)
+ } else if (op.d != null) {
+ return content.slice(0, op.p) + op.d + content.slice(op.p)
+ } else {
+ return content
+ }
+ },
+
+ rewindUpdates(content, updates) {
+ for (const update of Array.from(updates.reverse())) {
+ try {
+ content = DiffGenerator.rewindUpdate(content, update)
+ } catch (e) {
+ e.attempted_update = update // keep a record of the attempted update
+ throw e // rethrow the exception
+ }
+ }
+ return content
+ },
+
+ buildDiff(initialContent, updates) {
+ let diff = [{ u: initialContent }]
+ for (const update of Array.from(updates)) {
+ diff = DiffGenerator.applyUpdateToDiff(diff, update)
+ }
+ diff = DiffGenerator.compressDiff(diff)
+ return diff
+ },
+
+ compressDiff(diff) {
+ const newDiff = []
+ for (const part of Array.from(diff)) {
+ const lastPart = newDiff[newDiff.length - 1]
+ if (
+ lastPart != null &&
+ (lastPart.meta != null ? lastPart.meta.user : undefined) != null &&
+ (part.meta != null ? part.meta.user : undefined) != null
+ ) {
+ if (
+ lastPart.i != null &&
+ part.i != null &&
+ lastPart.meta.user.id === part.meta.user.id
+ ) {
+ lastPart.i += part.i
+ lastPart.meta.start_ts = Math.min(
+ lastPart.meta.start_ts,
+ part.meta.start_ts
+ )
+ lastPart.meta.end_ts = Math.max(
+ lastPart.meta.end_ts,
+ part.meta.end_ts
+ )
+ } else if (
+ lastPart.d != null &&
+ part.d != null &&
+ lastPart.meta.user.id === part.meta.user.id
+ ) {
+ lastPart.d += part.d
+ lastPart.meta.start_ts = Math.min(
+ lastPart.meta.start_ts,
+ part.meta.start_ts
+ )
+ lastPart.meta.end_ts = Math.max(
+ lastPart.meta.end_ts,
+ part.meta.end_ts
+ )
+ } else {
+ newDiff.push(part)
+ }
+ } else {
+ newDiff.push(part)
+ }
+ }
+ return newDiff
+ },
+
+ applyOpToDiff(diff, op, meta) {
+ let consumedDiff
+ const position = 0
+
+ let remainingDiff = diff.slice()
+ ;({ consumedDiff, remainingDiff } = DiffGenerator._consumeToOffset(
+ remainingDiff,
+ op.p
+ ))
+ const newDiff = consumedDiff
+
+ if (op.i != null) {
+ newDiff.push({
+ i: op.i,
+ meta,
+ })
+ } else if (op.d != null) {
+ ;({ consumedDiff, remainingDiff } =
+ DiffGenerator._consumeDiffAffectedByDeleteOp(remainingDiff, op, meta))
+ newDiff.push(...Array.from(consumedDiff || []))
+ }
+
+ newDiff.push(...Array.from(remainingDiff || []))
+
+ return newDiff
+ },
+
+ applyUpdateToDiff(diff, update) {
+ for (const op of Array.from(update.op)) {
+ if (op.broken !== true) {
+ diff = DiffGenerator.applyOpToDiff(diff, op, update.meta)
+ }
+ }
+ return diff
+ },
+
+ _consumeToOffset(remainingDiff, totalOffset) {
+ let part
+ const consumedDiff = []
+ let position = 0
+ while ((part = remainingDiff.shift())) {
+ const length = DiffGenerator._getLengthOfDiffPart(part)
+ if (part.d != null) {
+ consumedDiff.push(part)
+ } else if (position + length >= totalOffset) {
+ const partOffset = totalOffset - position
+ if (partOffset > 0) {
+ consumedDiff.push(DiffGenerator._slicePart(part, 0, partOffset))
+ }
+ if (partOffset < length) {
+ remainingDiff.unshift(DiffGenerator._slicePart(part, partOffset))
+ }
+ break
+ } else {
+ position += length
+ consumedDiff.push(part)
+ }
+ }
+
+ return {
+ consumedDiff,
+ remainingDiff,
+ }
+ },
+
+ _consumeDiffAffectedByDeleteOp(remainingDiff, deleteOp, meta) {
+ const consumedDiff = []
+ let remainingOp = deleteOp
+ while (remainingOp && remainingDiff.length > 0) {
+ let newPart
+ ;({ newPart, remainingDiff, remainingOp } =
+ DiffGenerator._consumeDeletedPart(remainingDiff, remainingOp, meta))
+ if (newPart != null) {
+ consumedDiff.push(newPart)
+ }
+ }
+ return {
+ consumedDiff,
+ remainingDiff,
+ }
+ },
+
+ _consumeDeletedPart(remainingDiff, op, meta) {
+ let deletedContent, newPart, remainingOp
+ const part = remainingDiff.shift()
+ const partLength = DiffGenerator._getLengthOfDiffPart(part)
+
+ if (part.d != null) {
+ // Skip existing deletes
+ remainingOp = op
+ newPart = part
+ } else if (partLength > op.d.length) {
+ // Only the first bit of the part has been deleted
+ const remainingPart = DiffGenerator._slicePart(part, op.d.length)
+ remainingDiff.unshift(remainingPart)
+
+ deletedContent = DiffGenerator._getContentOfPart(part).slice(
+ 0,
+ op.d.length
+ )
+ if (deletedContent !== op.d) {
+ throw new ConsistencyError(
+ `deleted content, '${deletedContent}', does not match delete op, '${op.d}'`
+ )
+ }
+
+ if (part.u != null) {
+ newPart = {
+ d: op.d,
+ meta,
+ }
+ } else if (part.i != null) {
+ newPart = null
+ }
+
+ remainingOp = null
+ } else if (partLength === op.d.length) {
+ // The entire part has been deleted, but it is the last part
+
+ deletedContent = DiffGenerator._getContentOfPart(part)
+ if (deletedContent !== op.d) {
+ throw new ConsistencyError(
+ `deleted content, '${deletedContent}', does not match delete op, '${op.d}'`
+ )
+ }
+
+ if (part.u != null) {
+ newPart = {
+ d: op.d,
+ meta,
+ }
+ } else if (part.i != null) {
+ newPart = null
+ }
+
+ remainingOp = null
+ } else if (partLength < op.d.length) {
+ // The entire part has been deleted and there is more
+
+ deletedContent = DiffGenerator._getContentOfPart(part)
+ const opContent = op.d.slice(0, deletedContent.length)
+ if (deletedContent !== opContent) {
+ throw new ConsistencyError(
+ `deleted content, '${deletedContent}', does not match delete op, '${opContent}'`
+ )
+ }
+
+ if (part.u) {
+ newPart = {
+ d: part.u,
+ meta,
+ }
+ } else if (part.i != null) {
+ newPart = null
+ }
+
+ remainingOp = {
+ p: op.p,
+ d: op.d.slice(DiffGenerator._getLengthOfDiffPart(part)),
+ }
+ }
+
+ return {
+ newPart,
+ remainingDiff,
+ remainingOp,
+ }
+ },
+
+ _slicePart(basePart, from, to) {
+ let part
+ if (basePart.u != null) {
+ part = { u: basePart.u.slice(from, to) }
+ } else if (basePart.i != null) {
+ part = { i: basePart.i.slice(from, to) }
+ }
+ if (basePart.meta != null) {
+ part.meta = basePart.meta
+ }
+ return part
+ },
+
+ _getLengthOfDiffPart(part) {
+ return (part.u || part.d || part.i || '').length
+ },
+
+ _getContentOfPart(part) {
+ return part.u || part.d || part.i || ''
+ },
+}
diff --git a/services/track-changes/app/js/DiffManager.js b/services/track-changes/app/js/DiffManager.js
new file mode 100644
index 0000000000..7c8a9d4ceb
--- /dev/null
+++ b/services/track-changes/app/js/DiffManager.js
@@ -0,0 +1,188 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let DiffManager
+const UpdatesManager = require('./UpdatesManager')
+const DocumentUpdaterManager = require('./DocumentUpdaterManager')
+const DiffGenerator = require('./DiffGenerator')
+const logger = require('logger-sharelatex')
+
+module.exports = DiffManager = {
+ getLatestDocAndUpdates(project_id, doc_id, fromVersion, callback) {
+ // Get updates last, since then they must be ahead and it
+ // might be possible to rewind to the same version as the doc.
+ if (callback == null) {
+ callback = function (error, content, version, updates) {}
+ }
+ return DocumentUpdaterManager.getDocument(
+ project_id,
+ doc_id,
+ function (error, content, version) {
+ if (error != null) {
+ return callback(error)
+ }
+ if (fromVersion == null) {
+ // If we haven't been given a version, just return lastest doc and no updates
+ return callback(null, content, version, [])
+ }
+ return UpdatesManager.getDocUpdatesWithUserInfo(
+ project_id,
+ doc_id,
+ { from: fromVersion },
+ function (error, updates) {
+ if (error != null) {
+ return callback(error)
+ }
+ return callback(null, content, version, updates)
+ }
+ )
+ }
+ )
+ },
+
+ getDiff(project_id, doc_id, fromVersion, toVersion, callback) {
+ if (callback == null) {
+ callback = function (error, diff) {}
+ }
+ return DiffManager.getDocumentBeforeVersion(
+ project_id,
+ doc_id,
+ fromVersion,
+ function (error, startingContent, updates) {
+ let diff
+ if (error != null) {
+ if (error.message === 'broken-history') {
+ return callback(null, 'history unavailable')
+ } else {
+ return callback(error)
+ }
+ }
+
+ const updatesToApply = []
+ for (const update of Array.from(updates.slice().reverse())) {
+ if (update.v <= toVersion) {
+ updatesToApply.push(update)
+ }
+ }
+
+ try {
+ diff = DiffGenerator.buildDiff(startingContent, updatesToApply)
+ } catch (e) {
+ return callback(e)
+ }
+
+ return callback(null, diff)
+ }
+ )
+ },
+
+ getDocumentBeforeVersion(project_id, doc_id, version, _callback) {
+ // Whichever order we get the latest document and the latest updates,
+ // there is potential for updates to be applied between them so that
+ // they do not return the same 'latest' versions.
+ // If this happens, we just retry and hopefully get them at the compatible
+ // versions.
+ let retry
+ if (_callback == null) {
+ _callback = function (error, document, rewoundUpdates) {}
+ }
+ let retries = 3
+ const callback = function (error, ...args) {
+ if (error != null) {
+ if (error.retry && retries > 0) {
+ logger.warn(
+ { error, project_id, doc_id, version, retries },
+ 'retrying getDocumentBeforeVersion'
+ )
+ return retry()
+ } else {
+ return _callback(error)
+ }
+ } else {
+ return _callback(null, ...Array.from(args))
+ }
+ }
+
+ return (retry = function () {
+ retries--
+ return DiffManager._tryGetDocumentBeforeVersion(
+ project_id,
+ doc_id,
+ version,
+ callback
+ )
+ })()
+ },
+
+ _tryGetDocumentBeforeVersion(project_id, doc_id, version, callback) {
+ if (callback == null) {
+ callback = function (error, document, rewoundUpdates) {}
+ }
+ logger.log(
+ { project_id, doc_id, version },
+ 'getting document before version'
+ )
+ return DiffManager.getLatestDocAndUpdates(
+ project_id,
+ doc_id,
+ version,
+ function (error, content, version, updates) {
+ let startingContent
+ if (error != null) {
+ return callback(error)
+ }
+
+ // bail out if we hit a broken update
+ for (const u of Array.from(updates)) {
+ if (u.broken) {
+ return callback(new Error('broken-history'))
+ }
+ }
+
+ // discard any updates which are ahead of this document version
+ while ((updates[0] != null ? updates[0].v : undefined) >= version) {
+ updates.shift()
+ }
+
+ const lastUpdate = updates[0]
+ if (lastUpdate != null && lastUpdate.v !== version - 1) {
+ error = new Error(
+ `latest update version, ${lastUpdate.v}, does not match doc version, ${version}`
+ )
+ error.retry = true
+ return callback(error)
+ }
+
+ logger.log(
+ {
+ docVersion: version,
+ lastUpdateVersion: lastUpdate != null ? lastUpdate.v : undefined,
+ updateCount: updates.length,
+ },
+ 'rewinding updates'
+ )
+
+ const tryUpdates = updates.slice().reverse()
+
+ try {
+ startingContent = DiffGenerator.rewindUpdates(content, tryUpdates)
+ // tryUpdates is reversed, and any unapplied ops are marked as broken
+ } catch (e) {
+ return callback(e)
+ }
+
+ return callback(null, startingContent, tryUpdates)
+ }
+ )
+ },
+}
diff --git a/services/track-changes/app/js/DocumentUpdaterManager.js b/services/track-changes/app/js/DocumentUpdaterManager.js
new file mode 100644
index 0000000000..01cdf9e07c
--- /dev/null
+++ b/services/track-changes/app/js/DocumentUpdaterManager.js
@@ -0,0 +1,90 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let DocumentUpdaterManager
+const request = require('request')
+const logger = require('logger-sharelatex')
+const Settings = require('@overleaf/settings')
+
+module.exports = DocumentUpdaterManager = {
+ getDocument(project_id, doc_id, callback) {
+ if (callback == null) {
+ callback = function (error, content, version) {}
+ }
+ const url = `${Settings.apis.documentupdater.url}/project/${project_id}/doc/${doc_id}`
+ logger.log({ project_id, doc_id }, 'getting doc from document updater')
+ return request.get(url, function (error, res, body) {
+ if (error != null) {
+ return callback(error)
+ }
+ if (res.statusCode >= 200 && res.statusCode < 300) {
+ try {
+ body = JSON.parse(body)
+ } catch (error1) {
+ error = error1
+ return callback(error)
+ }
+ logger.log(
+ { project_id, doc_id, version: body.version },
+ 'got doc from document updater'
+ )
+ return callback(null, body.lines.join('\n'), body.version)
+ } else {
+ error = new Error(
+ `doc updater returned a non-success status code: ${res.statusCode}`
+ )
+ logger.error(
+ { err: error, project_id, doc_id, url },
+ 'error accessing doc updater'
+ )
+ return callback(error)
+ }
+ })
+ },
+
+ setDocument(project_id, doc_id, content, user_id, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ const url = `${Settings.apis.documentupdater.url}/project/${project_id}/doc/${doc_id}`
+ logger.log({ project_id, doc_id }, 'setting doc in document updater')
+ return request.post(
+ {
+ url,
+ json: {
+ lines: content.split('\n'),
+ source: 'restore',
+ user_id,
+ undoing: true,
+ },
+ },
+ function (error, res, body) {
+ if (error != null) {
+ return callback(error)
+ }
+ if (res.statusCode >= 200 && res.statusCode < 300) {
+ return callback(null)
+ } else {
+ error = new Error(
+ `doc updater returned a non-success status code: ${res.statusCode}`
+ )
+ logger.error(
+ { err: error, project_id, doc_id, url },
+ 'error accessing doc updater'
+ )
+ return callback(error)
+ }
+ }
+ )
+ },
+}
diff --git a/services/track-changes/app/js/HealthChecker.js b/services/track-changes/app/js/HealthChecker.js
new file mode 100644
index 0000000000..978f586f6e
--- /dev/null
+++ b/services/track-changes/app/js/HealthChecker.js
@@ -0,0 +1,81 @@
+/* eslint-disable
+ camelcase,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const { ObjectId } = require('./mongodb')
+const request = require('request')
+const async = require('async')
+const settings = require('@overleaf/settings')
+const { port } = settings.internal.trackchanges
+const logger = require('logger-sharelatex')
+const LockManager = require('./LockManager')
+
+module.exports = {
+ check(callback) {
+ const project_id = ObjectId(settings.trackchanges.healthCheck.project_id)
+ const url = `http://localhost:${port}/project/${project_id}`
+ logger.log({ project_id }, 'running health check')
+ const jobs = [
+ cb =>
+ request.get(
+ { url: `http://localhost:${port}/check_lock`, timeout: 3000 },
+ function (err, res, body) {
+ if (err != null) {
+ logger.err(
+ { err, project_id },
+ 'error checking lock for health check'
+ )
+ return cb(err)
+ } else if ((res != null ? res.statusCode : undefined) !== 200) {
+ return cb(`status code not 200, it's ${res.statusCode}`)
+ } else {
+ return cb()
+ }
+ }
+ ),
+ cb =>
+ request.post(
+ { url: `${url}/flush`, timeout: 10000 },
+ function (err, res, body) {
+ if (err != null) {
+ logger.err({ err, project_id }, 'error flushing for health check')
+ return cb(err)
+ } else if ((res != null ? res.statusCode : undefined) !== 204) {
+ return cb(`status code not 204, it's ${res.statusCode}`)
+ } else {
+ return cb()
+ }
+ }
+ ),
+ cb =>
+ request.get(
+ { url: `${url}/updates`, timeout: 10000 },
+ function (err, res, body) {
+ if (err != null) {
+ logger.err(
+ { err, project_id },
+ 'error getting updates for health check'
+ )
+ return cb(err)
+ } else if ((res != null ? res.statusCode : undefined) !== 200) {
+ return cb(`status code not 200, it's ${res.statusCode}`)
+ } else {
+ return cb()
+ }
+ }
+ ),
+ ]
+ return async.series(jobs, callback)
+ },
+
+ checkLock(callback) {
+ return LockManager.healthCheck(callback)
+ },
+}
diff --git a/services/track-changes/app/js/HttpController.js b/services/track-changes/app/js/HttpController.js
new file mode 100644
index 0000000000..8f665682eb
--- /dev/null
+++ b/services/track-changes/app/js/HttpController.js
@@ -0,0 +1,340 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let HttpController
+const UpdatesManager = require('./UpdatesManager')
+const DiffManager = require('./DiffManager')
+const PackManager = require('./PackManager')
+const RestoreManager = require('./RestoreManager')
+const logger = require('logger-sharelatex')
+const HealthChecker = require('./HealthChecker')
+const _ = require('underscore')
+
+module.exports = HttpController = {
+ flushDoc(req, res, next) {
+ if (next == null) {
+ next = function (error) {}
+ }
+ const { doc_id } = req.params
+ const { project_id } = req.params
+ logger.log({ project_id, doc_id }, 'compressing doc history')
+ return UpdatesManager.processUncompressedUpdatesWithLock(
+ project_id,
+ doc_id,
+ function (error) {
+ if (error != null) {
+ return next(error)
+ }
+ return res.sendStatus(204)
+ }
+ )
+ },
+
+ flushProject(req, res, next) {
+ if (next == null) {
+ next = function (error) {}
+ }
+ const { project_id } = req.params
+ logger.log({ project_id }, 'compressing project history')
+ return UpdatesManager.processUncompressedUpdatesForProject(
+ project_id,
+ function (error) {
+ if (error != null) {
+ return next(error)
+ }
+ return res.sendStatus(204)
+ }
+ )
+ },
+
+ flushAll(req, res, next) {
+ // limit on projects to flush or -1 for all (default)
+ if (next == null) {
+ next = function (error) {}
+ }
+ const limit = req.query.limit != null ? parseInt(req.query.limit, 10) : -1
+ logger.log({ limit }, 'flushing all projects')
+ return UpdatesManager.flushAll(limit, function (error, result) {
+ if (error != null) {
+ return next(error)
+ }
+ const { failed, succeeded, all } = result
+ const status = `${succeeded.length} succeeded, ${failed.length} failed`
+ if (limit === 0) {
+ return res
+ .status(200)
+ .send(`${status}\nwould flush:\n${all.join('\n')}\n`)
+ } else if (failed.length > 0) {
+ logger.log({ failed, succeeded }, 'error flushing projects')
+ return res
+ .status(500)
+ .send(`${status}\nfailed to flush:\n${failed.join('\n')}\n`)
+ } else {
+ return res
+ .status(200)
+ .send(
+ `${status}\nflushed ${succeeded.length} projects of ${all.length}\n`
+ )
+ }
+ })
+ },
+
+ checkDanglingUpdates(req, res, next) {
+ if (next == null) {
+ next = function (error) {}
+ }
+ logger.log('checking dangling updates')
+ return UpdatesManager.getDanglingUpdates(function (error, result) {
+ if (error != null) {
+ return next(error)
+ }
+ if (result.length > 0) {
+ logger.log({ dangling: result }, 'found dangling updates')
+ return res.status(500).send(`dangling updates:\n${result.join('\n')}\n`)
+ } else {
+ return res.status(200).send('no dangling updates found\n')
+ }
+ })
+ },
+
+ checkDoc(req, res, next) {
+ if (next == null) {
+ next = function (error) {}
+ }
+ const { doc_id } = req.params
+ const { project_id } = req.params
+ logger.log({ project_id, doc_id }, 'checking doc history')
+ return DiffManager.getDocumentBeforeVersion(
+ project_id,
+ doc_id,
+ 1,
+ function (error, document, rewoundUpdates) {
+ if (error != null) {
+ return next(error)
+ }
+ const broken = []
+ for (const update of Array.from(rewoundUpdates)) {
+ for (const op of Array.from(update.op)) {
+ if (op.broken === true) {
+ broken.push(op)
+ }
+ }
+ }
+ if (broken.length > 0) {
+ return res.send(broken)
+ } else {
+ return res.sendStatus(204)
+ }
+ }
+ )
+ },
+
+ getDiff(req, res, next) {
+ let from, to
+ if (next == null) {
+ next = function (error) {}
+ }
+ const { doc_id } = req.params
+ const { project_id } = req.params
+
+ if (req.query.from != null) {
+ from = parseInt(req.query.from, 10)
+ } else {
+ from = null
+ }
+ if (req.query.to != null) {
+ to = parseInt(req.query.to, 10)
+ } else {
+ to = null
+ }
+
+ logger.log({ project_id, doc_id, from, to }, 'getting diff')
+ return DiffManager.getDiff(
+ project_id,
+ doc_id,
+ from,
+ to,
+ function (error, diff) {
+ if (error != null) {
+ return next(error)
+ }
+ return res.json({ diff })
+ }
+ )
+ },
+
+ getUpdates(req, res, next) {
+ let before, min_count
+ if (next == null) {
+ next = function (error) {}
+ }
+ const { project_id } = req.params
+
+ if (req.query.before != null) {
+ before = parseInt(req.query.before, 10)
+ }
+ if (req.query.min_count != null) {
+ min_count = parseInt(req.query.min_count, 10)
+ }
+
+ return UpdatesManager.getSummarizedProjectUpdates(
+ project_id,
+ { before, min_count },
+ function (error, updates, nextBeforeTimestamp) {
+ if (error != null) {
+ return next(error)
+ }
+ return res.json({
+ updates,
+ nextBeforeTimestamp,
+ })
+ }
+ )
+ },
+
+ exportProject(req, res, next) {
+ // The project history can be huge:
+ // - updates can weight MBs for insert/delete of full doc
+ // - multiple updates form a pack
+ // Flush updates per pack onto the wire.
+ const { project_id } = req.params
+ logger.log({ project_id }, 'exporting project history')
+ UpdatesManager.exportProject(
+ project_id,
+ function (err, { updates, userIds }, confirmWrite) {
+ const abortStreaming = req.aborted || res.finished || res.destroyed
+ if (abortStreaming) {
+ // Tell the producer to stop emitting data
+ if (confirmWrite) confirmWrite(new Error('stop'))
+ return
+ }
+ const hasStartedStreamingResponse = res.headersSent
+ if (err) {
+ logger.error({ project_id, err }, 'export failed')
+ if (!hasStartedStreamingResponse) {
+ // Generate a nice 500
+ return next(err)
+ } else {
+ // Stop streaming
+ return res.destroy()
+ }
+ }
+ // Compose the response incrementally
+ const isFirstWrite = !hasStartedStreamingResponse
+ const isLastWrite = updates.length === 0
+ if (isFirstWrite) {
+ // The first write will emit the 200 status, headers and start of the
+ // response payload (open array)
+ res.setHeader('Content-Type', 'application/json')
+ res.setHeader('Trailer', 'X-User-Ids')
+ res.writeHead(200)
+ res.write('[')
+ }
+ if (!isFirstWrite && !isLastWrite) {
+ // Starting from the 2nd non-empty write, emit a continuing comma.
+ // write 1: [updates1
+ // write 2: ,updates2
+ // write 3: ,updates3
+ // write N: ]
+ res.write(',')
+ }
+
+ // Every write will emit a blob onto the response stream:
+ // '[update1,update2,...]'
+ // ^^^^^^^^^^^^^^^^^^^
+ res.write(JSON.stringify(updates).slice(1, -1), confirmWrite)
+
+ if (isLastWrite) {
+ // The last write will have no updates and will finish the response
+ // payload (close array) and emit the userIds as trailer.
+ res.addTrailers({ 'X-User-Ids': JSON.stringify(userIds) })
+ res.end(']')
+ }
+ }
+ )
+ },
+
+ restore(req, res, next) {
+ if (next == null) {
+ next = function (error) {}
+ }
+ let { doc_id, project_id, version } = req.params
+ const user_id = req.headers['x-user-id']
+ version = parseInt(version, 10)
+ return RestoreManager.restoreToBeforeVersion(
+ project_id,
+ doc_id,
+ version,
+ user_id,
+ function (error) {
+ if (error != null) {
+ return next(error)
+ }
+ return res.sendStatus(204)
+ }
+ )
+ },
+
+ pushDocHistory(req, res, next) {
+ if (next == null) {
+ next = function (error) {}
+ }
+ const { project_id } = req.params
+ const { doc_id } = req.params
+ logger.log({ project_id, doc_id }, 'pushing all finalised changes to s3')
+ return PackManager.pushOldPacks(project_id, doc_id, function (error) {
+ if (error != null) {
+ return next(error)
+ }
+ return res.sendStatus(204)
+ })
+ },
+
+ pullDocHistory(req, res, next) {
+ if (next == null) {
+ next = function (error) {}
+ }
+ const { project_id } = req.params
+ const { doc_id } = req.params
+ logger.log({ project_id, doc_id }, 'pulling all packs from s3')
+ return PackManager.pullOldPacks(project_id, doc_id, function (error) {
+ if (error != null) {
+ return next(error)
+ }
+ return res.sendStatus(204)
+ })
+ },
+
+ healthCheck(req, res) {
+ return HealthChecker.check(function (err) {
+ if (err != null) {
+ logger.err({ err }, 'error performing health check')
+ return res.sendStatus(500)
+ } else {
+ return res.sendStatus(200)
+ }
+ })
+ },
+
+ checkLock(req, res) {
+ return HealthChecker.checkLock(function (err) {
+ if (err != null) {
+ logger.err({ err }, 'error performing lock check')
+ return res.sendStatus(500)
+ } else {
+ return res.sendStatus(200)
+ }
+ })
+ },
+}
diff --git a/services/track-changes/app/js/LockManager.js b/services/track-changes/app/js/LockManager.js
new file mode 100644
index 0000000000..8be04dbaae
--- /dev/null
+++ b/services/track-changes/app/js/LockManager.js
@@ -0,0 +1,164 @@
+/* eslint-disable
+ handle-callback-err,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let LockManager
+const Settings = require('@overleaf/settings')
+const redis = require('@overleaf/redis-wrapper')
+const rclient = redis.createClient(Settings.redis.lock)
+const os = require('os')
+const crypto = require('crypto')
+const logger = require('logger-sharelatex')
+
+const HOST = os.hostname()
+const PID = process.pid
+const RND = crypto.randomBytes(4).toString('hex')
+let COUNT = 0
+
+module.exports = LockManager = {
+ LOCK_TEST_INTERVAL: 50, // 50ms between each test of the lock
+ MAX_LOCK_WAIT_TIME: 10000, // 10s maximum time to spend trying to get the lock
+ LOCK_TTL: 300, // seconds (allow 5 minutes for any operation to complete)
+
+ // Use a signed lock value as described in
+ // http://redis.io/topics/distlock#correct-implementation-with-a-single-instance
+ // to prevent accidental unlocking by multiple processes
+ randomLock() {
+ const time = Date.now()
+ return `locked:host=${HOST}:pid=${PID}:random=${RND}:time=${time}:count=${COUNT++}`
+ },
+
+ unlockScript:
+ 'if redis.call("get", KEYS[1]) == ARGV[1] then return redis.call("del", KEYS[1]) else return 0 end',
+
+ tryLock(key, callback) {
+ if (callback == null) {
+ callback = function (err, gotLock) {}
+ }
+ const lockValue = LockManager.randomLock()
+ return rclient.set(
+ key,
+ lockValue,
+ 'EX',
+ this.LOCK_TTL,
+ 'NX',
+ function (err, gotLock) {
+ if (err != null) {
+ return callback(err)
+ }
+ if (gotLock === 'OK') {
+ return callback(err, true, lockValue)
+ } else {
+ return callback(err, false)
+ }
+ }
+ )
+ },
+
+ getLock(key, callback) {
+ let attempt
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ const startTime = Date.now()
+ return (attempt = function () {
+ if (Date.now() - startTime > LockManager.MAX_LOCK_WAIT_TIME) {
+ const e = new Error('Timeout')
+ e.key = key
+ return callback(e)
+ }
+
+ return LockManager.tryLock(key, function (error, gotLock, lockValue) {
+ if (error != null) {
+ return callback(error)
+ }
+ if (gotLock) {
+ return callback(null, lockValue)
+ } else {
+ return setTimeout(attempt, LockManager.LOCK_TEST_INTERVAL)
+ }
+ })
+ })()
+ },
+
+ checkLock(key, callback) {
+ if (callback == null) {
+ callback = function (err, isFree) {}
+ }
+ return rclient.exists(key, function (err, exists) {
+ if (err != null) {
+ return callback(err)
+ }
+ exists = parseInt(exists)
+ if (exists === 1) {
+ return callback(err, false)
+ } else {
+ return callback(err, true)
+ }
+ })
+ },
+
+ releaseLock(key, lockValue, callback) {
+ return rclient.eval(
+ LockManager.unlockScript,
+ 1,
+ key,
+ lockValue,
+ function (err, result) {
+ if (err != null) {
+ return callback(err)
+ }
+ if (result != null && result !== 1) {
+ // successful unlock should release exactly one key
+ logger.error(
+ { key, lockValue, redis_err: err, redis_result: result },
+ 'unlocking error'
+ )
+ return callback(new Error('tried to release timed out lock'))
+ }
+ return callback(err, result)
+ }
+ )
+ },
+
+ runWithLock(key, runner, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ return LockManager.getLock(key, function (error, lockValue) {
+ if (error != null) {
+ return callback(error)
+ }
+ return runner(error1 =>
+ LockManager.releaseLock(key, lockValue, function (error2) {
+ error = error1 || error2
+ if (error != null) {
+ return callback(error)
+ }
+ return callback()
+ })
+ )
+ })
+ },
+
+ healthCheck(callback) {
+ const action = releaseLock => releaseLock()
+ return LockManager.runWithLock(
+ `HistoryLock:HealthCheck:host=${HOST}:pid=${PID}:random=${RND}`,
+ action,
+ callback
+ )
+ },
+
+ close(callback) {
+ rclient.quit()
+ return rclient.once('end', callback)
+ },
+}
diff --git a/services/track-changes/app/js/MongoAWS.js b/services/track-changes/app/js/MongoAWS.js
new file mode 100644
index 0000000000..f9e69b12c2
--- /dev/null
+++ b/services/track-changes/app/js/MongoAWS.js
@@ -0,0 +1,199 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let MongoAWS
+const settings = require('@overleaf/settings')
+const logger = require('logger-sharelatex')
+const AWS = require('aws-sdk')
+const S3S = require('s3-streams')
+const { db, ObjectId } = require('./mongodb')
+const JSONStream = require('JSONStream')
+const ReadlineStream = require('byline')
+const zlib = require('zlib')
+const Metrics = require('@overleaf/metrics')
+
+const DAYS = 24 * 3600 * 1000 // one day in milliseconds
+
+const createStream = function (streamConstructor, project_id, doc_id, pack_id) {
+ const AWS_CONFIG = {
+ accessKeyId: settings.trackchanges.s3.key,
+ secretAccessKey: settings.trackchanges.s3.secret,
+ endpoint: settings.trackchanges.s3.endpoint,
+ s3ForcePathStyle: settings.trackchanges.s3.pathStyle,
+ }
+
+ return streamConstructor(new AWS.S3(AWS_CONFIG), {
+ Bucket: settings.trackchanges.stores.doc_history,
+ Key: project_id + '/changes-' + doc_id + '/pack-' + pack_id,
+ })
+}
+
+module.exports = MongoAWS = {
+ archivePack(project_id, doc_id, pack_id, _callback) {
+ if (_callback == null) {
+ _callback = function (error) {}
+ }
+ const callback = function (...args) {
+ _callback(...Array.from(args || []))
+ return (_callback = function () {})
+ }
+
+ const query = {
+ _id: ObjectId(pack_id),
+ doc_id: ObjectId(doc_id),
+ }
+
+ if (project_id == null) {
+ return callback(new Error('invalid project id'))
+ }
+ if (doc_id == null) {
+ return callback(new Error('invalid doc id'))
+ }
+ if (pack_id == null) {
+ return callback(new Error('invalid pack id'))
+ }
+
+ logger.log({ project_id, doc_id, pack_id }, 'uploading data to s3')
+
+ const upload = createStream(S3S.WriteStream, project_id, doc_id, pack_id)
+
+ return db.docHistory.findOne(query, function (err, result) {
+ if (err != null) {
+ return callback(err)
+ }
+ if (result == null) {
+ return callback(new Error('cannot find pack to send to s3'))
+ }
+ if (result.expiresAt != null) {
+ return callback(new Error('refusing to send pack with TTL to s3'))
+ }
+ const uncompressedData = JSON.stringify(result)
+ if (uncompressedData.indexOf('\u0000') !== -1) {
+ const error = new Error('null bytes found in upload')
+ logger.error({ err: error, project_id, doc_id, pack_id }, error.message)
+ return callback(error)
+ }
+ return zlib.gzip(uncompressedData, function (err, buf) {
+ logger.log(
+ {
+ project_id,
+ doc_id,
+ pack_id,
+ origSize: uncompressedData.length,
+ newSize: buf.length,
+ },
+ 'compressed pack'
+ )
+ if (err != null) {
+ return callback(err)
+ }
+ upload.on('error', err => callback(err))
+ upload.on('finish', function () {
+ Metrics.inc('archive-pack')
+ logger.log({ project_id, doc_id, pack_id }, 'upload to s3 completed')
+ return callback(null)
+ })
+ upload.write(buf)
+ return upload.end()
+ })
+ })
+ },
+
+ readArchivedPack(project_id, doc_id, pack_id, _callback) {
+ if (_callback == null) {
+ _callback = function (error, result) {}
+ }
+ const callback = function (...args) {
+ _callback(...Array.from(args || []))
+ return (_callback = function () {})
+ }
+
+ if (project_id == null) {
+ return callback(new Error('invalid project id'))
+ }
+ if (doc_id == null) {
+ return callback(new Error('invalid doc id'))
+ }
+ if (pack_id == null) {
+ return callback(new Error('invalid pack id'))
+ }
+
+ logger.log({ project_id, doc_id, pack_id }, 'downloading data from s3')
+
+ const download = createStream(S3S.ReadStream, project_id, doc_id, pack_id)
+
+ const inputStream = download
+ .on('open', obj => 1)
+ .on('error', err => callback(err))
+
+ const gunzip = zlib.createGunzip()
+ gunzip.setEncoding('utf8')
+ gunzip.on('error', function (err) {
+ logger.log(
+ { project_id, doc_id, pack_id, err },
+ 'error uncompressing gzip stream'
+ )
+ return callback(err)
+ })
+
+ const outputStream = inputStream.pipe(gunzip)
+ const parts = []
+ outputStream.on('error', err => callback(err))
+ outputStream.on('end', function () {
+ let object
+ logger.log({ project_id, doc_id, pack_id }, 'download from s3 completed')
+ try {
+ object = JSON.parse(parts.join(''))
+ } catch (e) {
+ return callback(e)
+ }
+ object._id = ObjectId(object._id)
+ object.doc_id = ObjectId(object.doc_id)
+ object.project_id = ObjectId(object.project_id)
+ for (const op of Array.from(object.pack)) {
+ if (op._id != null) {
+ op._id = ObjectId(op._id)
+ }
+ }
+ return callback(null, object)
+ })
+ return outputStream.on('data', data => parts.push(data))
+ },
+
+ unArchivePack(project_id, doc_id, pack_id, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ return MongoAWS.readArchivedPack(
+ project_id,
+ doc_id,
+ pack_id,
+ function (err, object) {
+ if (err != null) {
+ return callback(err)
+ }
+ Metrics.inc('unarchive-pack')
+ // allow the object to expire, we can always retrieve it again
+ object.expiresAt = new Date(Date.now() + 7 * DAYS)
+ logger.log({ project_id, doc_id, pack_id }, 'inserting object from s3')
+ return db.docHistory.insertOne(object, (err, confirmation) => {
+ if (err) return callback(err)
+ object._id = confirmation.insertedId
+ callback(null, object)
+ })
+ }
+ )
+ },
+}
diff --git a/services/track-changes/app/js/MongoManager.js b/services/track-changes/app/js/MongoManager.js
new file mode 100644
index 0000000000..6dfdd46ce3
--- /dev/null
+++ b/services/track-changes/app/js/MongoManager.js
@@ -0,0 +1,202 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let MongoManager
+const { db, ObjectId } = require('./mongodb')
+const PackManager = require('./PackManager')
+const async = require('async')
+const _ = require('underscore')
+const metrics = require('@overleaf/metrics')
+const logger = require('logger-sharelatex')
+
+module.exports = MongoManager = {
+ getLastCompressedUpdate(doc_id, callback) {
+ if (callback == null) {
+ callback = function (error, update) {}
+ }
+ return db.docHistory
+ .find(
+ { doc_id: ObjectId(doc_id.toString()) },
+ // only return the last entry in a pack
+ { projection: { pack: { $slice: -1 } } }
+ )
+ .sort({ v: -1 })
+ .limit(1)
+ .toArray(function (error, compressedUpdates) {
+ if (error != null) {
+ return callback(error)
+ }
+ return callback(null, compressedUpdates[0] || null)
+ })
+ },
+
+ peekLastCompressedUpdate(doc_id, callback) {
+ // under normal use we pass back the last update as
+ // callback(null,update,version).
+ //
+ // when we have an existing last update but want to force a new one
+ // to start, we pass it back as callback(null,null,version), just
+ // giving the version so we can check consistency.
+ if (callback == null) {
+ callback = function (error, update, version) {}
+ }
+ return MongoManager.getLastCompressedUpdate(
+ doc_id,
+ function (error, update) {
+ if (error != null) {
+ return callback(error)
+ }
+ if (update != null) {
+ if (update.broken) {
+ // marked as broken so we will force a new op
+ return callback(null, null)
+ } else if (update.pack != null) {
+ if (update.finalised) {
+ // no more ops can be appended
+ return callback(
+ null,
+ null,
+ update.pack[0] != null ? update.pack[0].v : undefined
+ )
+ } else {
+ return callback(
+ null,
+ update,
+ update.pack[0] != null ? update.pack[0].v : undefined
+ )
+ }
+ } else {
+ return callback(null, update, update.v)
+ }
+ } else {
+ return PackManager.getLastPackFromIndex(
+ doc_id,
+ function (error, pack) {
+ if (error != null) {
+ return callback(error)
+ }
+ if (
+ (pack != null ? pack.inS3 : undefined) != null &&
+ (pack != null ? pack.v_end : undefined) != null
+ ) {
+ return callback(null, null, pack.v_end)
+ }
+ return callback(null, null)
+ }
+ )
+ }
+ }
+ )
+ },
+
+ backportProjectId(project_id, doc_id, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ return db.docHistory.updateMany(
+ {
+ doc_id: ObjectId(doc_id.toString()),
+ project_id: { $exists: false },
+ },
+ {
+ $set: { project_id: ObjectId(project_id.toString()) },
+ },
+ callback
+ )
+ },
+
+ getProjectMetaData(project_id, callback) {
+ if (callback == null) {
+ callback = function (error, metadata) {}
+ }
+ return db.projectHistoryMetaData.findOne(
+ {
+ project_id: ObjectId(project_id.toString()),
+ },
+ callback
+ )
+ },
+
+ setProjectMetaData(project_id, metadata, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ return db.projectHistoryMetaData.updateOne(
+ {
+ project_id: ObjectId(project_id),
+ },
+ {
+ $set: metadata,
+ },
+ {
+ upsert: true,
+ },
+ callback
+ )
+ },
+
+ upgradeHistory(project_id, callback) {
+ // preserve the project's existing history
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ return db.docHistory.updateMany(
+ {
+ project_id: ObjectId(project_id),
+ temporary: true,
+ expiresAt: { $exists: true },
+ },
+ {
+ $set: { temporary: false },
+ $unset: { expiresAt: '' },
+ },
+ callback
+ )
+ },
+
+ ensureIndices() {
+ // For finding all updates that go into a diff for a doc
+ db.docHistory.ensureIndex({ doc_id: 1, v: 1 }, { background: true })
+ // For finding all updates that affect a project
+ db.docHistory.ensureIndex(
+ { project_id: 1, 'meta.end_ts': 1 },
+ { background: true }
+ )
+ // For finding updates that don't yet have a project_id and need it inserting
+ db.docHistory.ensureIndex(
+ { doc_id: 1, project_id: 1 },
+ { background: true }
+ )
+ // For finding project meta-data
+ db.projectHistoryMetaData.ensureIndex(
+ { project_id: 1 },
+ { background: true }
+ )
+ // TTL index for auto deleting week old temporary ops
+ db.docHistory.ensureIndex(
+ { expiresAt: 1 },
+ { expireAfterSeconds: 0, background: true }
+ )
+ // For finding packs to be checked for archiving
+ db.docHistory.ensureIndex({ last_checked: 1 }, { background: true })
+ // For finding archived packs
+ return db.docHistoryIndex.ensureIndex(
+ { project_id: 1 },
+ { background: true }
+ )
+ },
+}
+;['getLastCompressedUpdate', 'getProjectMetaData', 'setProjectMetaData'].map(
+ method =>
+ metrics.timeAsyncMethod(MongoManager, method, 'mongo.MongoManager', logger)
+)
diff --git a/services/track-changes/app/js/PackManager.js b/services/track-changes/app/js/PackManager.js
new file mode 100644
index 0000000000..f8efb77f11
--- /dev/null
+++ b/services/track-changes/app/js/PackManager.js
@@ -0,0 +1,1171 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS205: Consider reworking code to avoid use of IIFEs
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let PackManager
+const async = require('async')
+const _ = require('underscore')
+const Bson = require('bson')
+const BSON = new Bson()
+const { db, ObjectId } = require('./mongodb')
+const logger = require('logger-sharelatex')
+const LockManager = require('./LockManager')
+const MongoAWS = require('./MongoAWS')
+const Metrics = require('@overleaf/metrics')
+const ProjectIterator = require('./ProjectIterator')
+const Settings = require('@overleaf/settings')
+const keys = Settings.redis.lock.key_schema
+
+// Sharejs operations are stored in a 'pack' object
+//
+// e.g. a single sharejs update looks like
+//
+// {
+// "doc_id" : 549dae9e0a2a615c0c7f0c98,
+// "project_id" : 549dae9c0a2a615c0c7f0c8c,
+// "op" : [ {"p" : 6981, "d" : "?" } ],
+// "meta" : { "user_id" : 52933..., "start_ts" : 1422310693931, "end_ts" : 1422310693931 },
+// "v" : 17082
+// }
+//
+// and a pack looks like this
+//
+// {
+// "doc_id" : 549dae9e0a2a615c0c7f0c98,
+// "project_id" : 549dae9c0a2a615c0c7f0c8c,
+// "pack" : [ U1, U2, U3, ...., UN],
+// "meta" : { "user_id" : 52933..., "start_ts" : 1422310693931, "end_ts" : 1422310693931 },
+// "v" : 17082
+// "v_end" : ...
+// }
+//
+// where U1, U2, U3, .... are single updates stripped of their
+// doc_id and project_id fields (which are the same for all the
+// updates in the pack).
+//
+// The pack itself has v and meta fields, this makes it possible to
+// treat packs and single updates in a similar way.
+//
+// The v field of the pack itself is from the first entry U1, the
+// v_end field from UN. The meta.end_ts field of the pack itself is
+// from the last entry UN, the meta.start_ts field from U1.
+
+const DAYS = 24 * 3600 * 1000 // one day in milliseconds
+
+module.exports = PackManager = {
+ MAX_SIZE: 1024 * 1024, // make these configurable parameters
+ MAX_COUNT: 1024,
+
+ insertCompressedUpdates(
+ project_id,
+ doc_id,
+ lastUpdate,
+ newUpdates,
+ temporary,
+ callback
+ ) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ if (newUpdates.length === 0) {
+ return callback()
+ }
+
+ // never append permanent ops to a pack that will expire
+ if (
+ (lastUpdate != null ? lastUpdate.expiresAt : undefined) != null &&
+ !temporary
+ ) {
+ lastUpdate = null
+ }
+
+ const updatesToFlush = []
+ const updatesRemaining = newUpdates.slice()
+
+ let n = (lastUpdate != null ? lastUpdate.n : undefined) || 0
+ let sz = (lastUpdate != null ? lastUpdate.sz : undefined) || 0
+
+ while (
+ updatesRemaining.length &&
+ n < PackManager.MAX_COUNT &&
+ sz < PackManager.MAX_SIZE
+ ) {
+ const nextUpdate = updatesRemaining[0]
+ const nextUpdateSize = BSON.calculateObjectSize(nextUpdate)
+ if (nextUpdateSize + sz > PackManager.MAX_SIZE && n > 0) {
+ break
+ }
+ n++
+ sz += nextUpdateSize
+ updatesToFlush.push(updatesRemaining.shift())
+ }
+
+ return PackManager.flushCompressedUpdates(
+ project_id,
+ doc_id,
+ lastUpdate,
+ updatesToFlush,
+ temporary,
+ function (error) {
+ if (error != null) {
+ return callback(error)
+ }
+ return PackManager.insertCompressedUpdates(
+ project_id,
+ doc_id,
+ null,
+ updatesRemaining,
+ temporary,
+ callback
+ )
+ }
+ )
+ },
+
+ flushCompressedUpdates(
+ project_id,
+ doc_id,
+ lastUpdate,
+ newUpdates,
+ temporary,
+ callback
+ ) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ if (newUpdates.length === 0) {
+ return callback()
+ }
+
+ let canAppend = false
+ // check if it is safe to append to an existing pack
+ if (lastUpdate != null) {
+ if (!temporary && lastUpdate.expiresAt == null) {
+ // permanent pack appends to permanent pack
+ canAppend = true
+ }
+ const age =
+ Date.now() -
+ (lastUpdate.meta != null ? lastUpdate.meta.start_ts : undefined)
+ if (temporary && lastUpdate.expiresAt != null && age < 1 * DAYS) {
+ // temporary pack appends to temporary pack if same day
+ canAppend = true
+ }
+ }
+
+ if (canAppend) {
+ return PackManager.appendUpdatesToExistingPack(
+ project_id,
+ doc_id,
+ lastUpdate,
+ newUpdates,
+ temporary,
+ callback
+ )
+ } else {
+ return PackManager.insertUpdatesIntoNewPack(
+ project_id,
+ doc_id,
+ newUpdates,
+ temporary,
+ callback
+ )
+ }
+ },
+
+ insertUpdatesIntoNewPack(
+ project_id,
+ doc_id,
+ newUpdates,
+ temporary,
+ callback
+ ) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ const first = newUpdates[0]
+ const last = newUpdates[newUpdates.length - 1]
+ const n = newUpdates.length
+ const sz = BSON.calculateObjectSize(newUpdates)
+ const newPack = {
+ project_id: ObjectId(project_id.toString()),
+ doc_id: ObjectId(doc_id.toString()),
+ pack: newUpdates,
+ n,
+ sz,
+ meta: {
+ start_ts: first.meta.start_ts,
+ end_ts: last.meta.end_ts,
+ },
+ v: first.v,
+ v_end: last.v,
+ temporary,
+ }
+ if (temporary) {
+ newPack.expiresAt = new Date(Date.now() + 7 * DAYS)
+ newPack.last_checked = new Date(Date.now() + 30 * DAYS) // never check temporary packs
+ }
+ logger.log(
+ { project_id, doc_id, newUpdates },
+ 'inserting updates into new pack'
+ )
+ return db.docHistory.insertOne(newPack, function (err) {
+ if (err != null) {
+ return callback(err)
+ }
+ Metrics.inc(`insert-pack-${temporary ? 'temporary' : 'permanent'}`)
+ if (temporary) {
+ return callback()
+ } else {
+ return PackManager.updateIndex(project_id, doc_id, callback)
+ }
+ })
+ },
+
+ appendUpdatesToExistingPack(
+ project_id,
+ doc_id,
+ lastUpdate,
+ newUpdates,
+ temporary,
+ callback
+ ) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ const first = newUpdates[0]
+ const last = newUpdates[newUpdates.length - 1]
+ const n = newUpdates.length
+ const sz = BSON.calculateObjectSize(newUpdates)
+ const query = {
+ _id: lastUpdate._id,
+ project_id: ObjectId(project_id.toString()),
+ doc_id: ObjectId(doc_id.toString()),
+ pack: { $exists: true },
+ }
+ const update = {
+ $push: {
+ pack: { $each: newUpdates },
+ },
+ $inc: {
+ n: n,
+ sz: sz,
+ },
+ $set: {
+ 'meta.end_ts': last.meta.end_ts,
+ v_end: last.v,
+ },
+ }
+ if (lastUpdate.expiresAt && temporary) {
+ update.$set.expiresAt = new Date(Date.now() + 7 * DAYS)
+ }
+ logger.log(
+ { project_id, doc_id, lastUpdate, newUpdates },
+ 'appending updates to existing pack'
+ )
+ Metrics.inc(`append-pack-${temporary ? 'temporary' : 'permanent'}`)
+ return db.docHistory.updateOne(query, update, callback)
+ },
+
+ // Retrieve all changes for a document
+
+ getOpsByVersionRange(project_id, doc_id, fromVersion, toVersion, callback) {
+ if (callback == null) {
+ callback = function (error, updates) {}
+ }
+ return PackManager.loadPacksByVersionRange(
+ project_id,
+ doc_id,
+ fromVersion,
+ toVersion,
+ function (error) {
+ const query = { doc_id: ObjectId(doc_id.toString()) }
+ if (toVersion != null) {
+ query.v = { $lte: toVersion }
+ }
+ if (fromVersion != null) {
+ query.v_end = { $gte: fromVersion }
+ }
+ // console.log "query:", query
+ return db.docHistory
+ .find(query)
+ .sort({ v: -1 })
+ .toArray(function (err, result) {
+ if (err != null) {
+ return callback(err)
+ }
+ // console.log "getOpsByVersionRange:", err, result
+ const updates = []
+ const opInRange = function (op, from, to) {
+ if (fromVersion != null && op.v < fromVersion) {
+ return false
+ }
+ if (toVersion != null && op.v > toVersion) {
+ return false
+ }
+ return true
+ }
+ for (const docHistory of Array.from(result)) {
+ // console.log 'adding', docHistory.pack
+ for (const op of Array.from(docHistory.pack.reverse())) {
+ if (opInRange(op, fromVersion, toVersion)) {
+ op.project_id = docHistory.project_id
+ op.doc_id = docHistory.doc_id
+ // console.log "added op", op.v, fromVersion, toVersion
+ updates.push(op)
+ }
+ }
+ }
+ return callback(null, updates)
+ })
+ }
+ )
+ },
+
+ loadPacksByVersionRange(
+ project_id,
+ doc_id,
+ fromVersion,
+ toVersion,
+ callback
+ ) {
+ return PackManager.getIndex(doc_id, function (err, indexResult) {
+ let pack
+ if (err != null) {
+ return callback(err)
+ }
+ const indexPacks =
+ (indexResult != null ? indexResult.packs : undefined) || []
+ const packInRange = function (pack, from, to) {
+ if (fromVersion != null && pack.v_end < fromVersion) {
+ return false
+ }
+ if (toVersion != null && pack.v > toVersion) {
+ return false
+ }
+ return true
+ }
+ const neededIds = (() => {
+ const result = []
+ for (pack of Array.from(indexPacks)) {
+ if (packInRange(pack, fromVersion, toVersion)) {
+ result.push(pack._id)
+ }
+ }
+ return result
+ })()
+ if (neededIds.length) {
+ return PackManager.fetchPacksIfNeeded(
+ project_id,
+ doc_id,
+ neededIds,
+ callback
+ )
+ } else {
+ return callback()
+ }
+ })
+ },
+
+ fetchPacksIfNeeded(project_id, doc_id, pack_ids, callback) {
+ let id
+ return db.docHistory
+ .find(
+ { _id: { $in: pack_ids.map(ObjectId) } },
+ { projection: { _id: 1 } }
+ )
+ .toArray(function (err, loadedPacks) {
+ if (err != null) {
+ return callback(err)
+ }
+ const allPackIds = (() => {
+ const result1 = []
+ for (id of Array.from(pack_ids)) {
+ result1.push(id.toString())
+ }
+ return result1
+ })()
+ const loadedPackIds = Array.from(loadedPacks).map(pack =>
+ pack._id.toString()
+ )
+ const packIdsToFetch = _.difference(allPackIds, loadedPackIds)
+ logger.log(
+ { project_id, doc_id, loadedPackIds, allPackIds, packIdsToFetch },
+ 'analysed packs'
+ )
+ if (packIdsToFetch.length === 0) {
+ return callback()
+ }
+ return async.eachLimit(
+ packIdsToFetch,
+ 4,
+ (pack_id, cb) =>
+ MongoAWS.unArchivePack(project_id, doc_id, pack_id, cb),
+ function (err) {
+ if (err != null) {
+ return callback(err)
+ }
+ logger.log({ project_id, doc_id }, 'done unarchiving')
+ return callback()
+ }
+ )
+ })
+ },
+
+ // Retrieve all changes across a project
+
+ makeProjectIterator(project_id, before, callback) {
+ // get all the docHistory Entries
+ return db.docHistory
+ .find(
+ { project_id: ObjectId(project_id) },
+ { projection: { pack: false } }
+ )
+ .sort({ 'meta.end_ts': -1 })
+ .toArray(function (err, packs) {
+ let pack
+ if (err != null) {
+ return callback(err)
+ }
+ const allPacks = []
+ const seenIds = {}
+ for (pack of Array.from(packs)) {
+ allPacks.push(pack)
+ seenIds[pack._id] = true
+ }
+ return db.docHistoryIndex
+ .find({ project_id: ObjectId(project_id) })
+ .toArray(function (err, indexes) {
+ if (err != null) {
+ return callback(err)
+ }
+ for (const index of Array.from(indexes)) {
+ for (pack of Array.from(index.packs)) {
+ if (!seenIds[pack._id]) {
+ pack.project_id = index.project_id
+ pack.doc_id = index._id
+ pack.fromIndex = true
+ allPacks.push(pack)
+ seenIds[pack._id] = true
+ }
+ }
+ }
+ return callback(
+ null,
+ new ProjectIterator(allPacks, before, PackManager.getPackById)
+ )
+ })
+ })
+ },
+
+ getPackById(project_id, doc_id, pack_id, callback) {
+ return db.docHistory.findOne({ _id: pack_id }, function (err, pack) {
+ if (err != null) {
+ return callback(err)
+ }
+ if (pack == null) {
+ return MongoAWS.unArchivePack(project_id, doc_id, pack_id, callback)
+ } else if (pack.expiresAt != null && pack.temporary === false) {
+ // we only need to touch the TTL when listing the changes in the project
+ // because diffs on individual documents are always done after that
+ return PackManager.increaseTTL(pack, callback)
+ // only do this for cached packs, not temporary ones to avoid older packs
+ // being kept longer than newer ones (which messes up the last update version)
+ } else {
+ return callback(null, pack)
+ }
+ })
+ },
+
+ increaseTTL(pack, callback) {
+ if (pack.expiresAt < new Date(Date.now() + 6 * DAYS)) {
+ // update cache expiry since we are using this pack
+ return db.docHistory.updateOne(
+ { _id: pack._id },
+ { $set: { expiresAt: new Date(Date.now() + 7 * DAYS) } },
+ err => callback(err, pack)
+ )
+ } else {
+ return callback(null, pack)
+ }
+ },
+
+ // Manage docHistoryIndex collection
+
+ getIndex(doc_id, callback) {
+ return db.docHistoryIndex.findOne(
+ { _id: ObjectId(doc_id.toString()) },
+ callback
+ )
+ },
+
+ getPackFromIndex(doc_id, pack_id, callback) {
+ return db.docHistoryIndex.findOne(
+ { _id: ObjectId(doc_id.toString()), 'packs._id': pack_id },
+ { projection: { 'packs.$': 1 } },
+ callback
+ )
+ },
+
+ getLastPackFromIndex(doc_id, callback) {
+ return db.docHistoryIndex.findOne(
+ { _id: ObjectId(doc_id.toString()) },
+ { projection: { packs: { $slice: -1 } } },
+ function (err, indexPack) {
+ if (err != null) {
+ return callback(err)
+ }
+ if (indexPack == null) {
+ return callback()
+ }
+ return callback(null, indexPack[0])
+ }
+ )
+ },
+
+ getIndexWithKeys(doc_id, callback) {
+ return PackManager.getIndex(doc_id, function (err, index) {
+ if (err != null) {
+ return callback(err)
+ }
+ if (index == null) {
+ return callback()
+ }
+ for (const pack of Array.from(
+ (index != null ? index.packs : undefined) || []
+ )) {
+ index[pack._id] = pack
+ }
+ return callback(null, index)
+ })
+ },
+
+ initialiseIndex(project_id, doc_id, callback) {
+ return PackManager.findCompletedPacks(
+ project_id,
+ doc_id,
+ function (err, packs) {
+ // console.log 'err', err, 'packs', packs, packs?.length
+ if (err != null) {
+ return callback(err)
+ }
+ if (packs == null) {
+ return callback()
+ }
+ return PackManager.insertPacksIntoIndexWithLock(
+ project_id,
+ doc_id,
+ packs,
+ callback
+ )
+ }
+ )
+ },
+
+ updateIndex(project_id, doc_id, callback) {
+ // find all packs prior to current pack
+ return PackManager.findUnindexedPacks(
+ project_id,
+ doc_id,
+ function (err, newPacks) {
+ if (err != null) {
+ return callback(err)
+ }
+ if (newPacks == null || newPacks.length === 0) {
+ return callback()
+ }
+ return PackManager.insertPacksIntoIndexWithLock(
+ project_id,
+ doc_id,
+ newPacks,
+ function (err) {
+ if (err != null) {
+ return callback(err)
+ }
+ logger.log(
+ { project_id, doc_id, newPacks },
+ 'added new packs to index'
+ )
+ return callback()
+ }
+ )
+ }
+ )
+ },
+
+ findCompletedPacks(project_id, doc_id, callback) {
+ const query = {
+ doc_id: ObjectId(doc_id.toString()),
+ expiresAt: { $exists: false },
+ }
+ return db.docHistory
+ .find(query, { projection: { pack: false } })
+ .sort({ v: 1 })
+ .toArray(function (err, packs) {
+ if (err != null) {
+ return callback(err)
+ }
+ if (packs == null) {
+ return callback()
+ }
+ if (!(packs != null ? packs.length : undefined)) {
+ return callback()
+ }
+ const last = packs.pop() // discard the last pack, if it's still in progress
+ if (last.finalised) {
+ packs.push(last)
+ } // it's finalised so we push it back to archive it
+ return callback(null, packs)
+ })
+ },
+
+ findPacks(project_id, doc_id, callback) {
+ const query = {
+ doc_id: ObjectId(doc_id.toString()),
+ expiresAt: { $exists: false },
+ }
+ return db.docHistory
+ .find(query, { projection: { pack: false } })
+ .sort({ v: 1 })
+ .toArray(function (err, packs) {
+ if (err != null) {
+ return callback(err)
+ }
+ if (packs == null) {
+ return callback()
+ }
+ if (!(packs != null ? packs.length : undefined)) {
+ return callback()
+ }
+ return callback(null, packs)
+ })
+ },
+
+ findUnindexedPacks(project_id, doc_id, callback) {
+ return PackManager.getIndexWithKeys(doc_id, function (err, indexResult) {
+ if (err != null) {
+ return callback(err)
+ }
+ return PackManager.findCompletedPacks(
+ project_id,
+ doc_id,
+ function (err, historyPacks) {
+ let pack
+ if (err != null) {
+ return callback(err)
+ }
+ if (historyPacks == null) {
+ return callback()
+ }
+ // select only the new packs not already in the index
+ let newPacks = (() => {
+ const result = []
+ for (pack of Array.from(historyPacks)) {
+ if (
+ (indexResult != null ? indexResult[pack._id] : undefined) ==
+ null
+ ) {
+ result.push(pack)
+ }
+ }
+ return result
+ })()
+ newPacks = (() => {
+ const result1 = []
+ for (pack of Array.from(newPacks)) {
+ result1.push(
+ _.omit(
+ pack,
+ 'doc_id',
+ 'project_id',
+ 'n',
+ 'sz',
+ 'last_checked',
+ 'finalised'
+ )
+ )
+ }
+ return result1
+ })()
+ if (newPacks.length) {
+ logger.log(
+ { project_id, doc_id, n: newPacks.length },
+ 'found new packs'
+ )
+ }
+ return callback(null, newPacks)
+ }
+ )
+ })
+ },
+
+ insertPacksIntoIndexWithLock(project_id, doc_id, newPacks, callback) {
+ return LockManager.runWithLock(
+ keys.historyIndexLock({ doc_id }),
+ releaseLock =>
+ PackManager._insertPacksIntoIndex(
+ project_id,
+ doc_id,
+ newPacks,
+ releaseLock
+ ),
+ callback
+ )
+ },
+
+ _insertPacksIntoIndex(project_id, doc_id, newPacks, callback) {
+ return db.docHistoryIndex.updateOne(
+ { _id: ObjectId(doc_id.toString()) },
+ {
+ $setOnInsert: { project_id: ObjectId(project_id.toString()) },
+ $push: {
+ packs: { $each: newPacks, $sort: { v: 1 } },
+ },
+ },
+ {
+ upsert: true,
+ },
+ callback
+ )
+ },
+
+ // Archiving packs to S3
+
+ archivePack(project_id, doc_id, pack_id, callback) {
+ const clearFlagOnError = function (err, cb) {
+ if (err != null) {
+ // clear the inS3 flag on error
+ return PackManager.clearPackAsArchiveInProgress(
+ project_id,
+ doc_id,
+ pack_id,
+ function (err2) {
+ if (err2 != null) {
+ return cb(err2)
+ }
+ return cb(err)
+ }
+ )
+ } else {
+ return cb()
+ }
+ }
+ return async.series(
+ [
+ cb =>
+ PackManager.checkArchiveNotInProgress(
+ project_id,
+ doc_id,
+ pack_id,
+ cb
+ ),
+ cb =>
+ PackManager.markPackAsArchiveInProgress(
+ project_id,
+ doc_id,
+ pack_id,
+ cb
+ ),
+ cb =>
+ MongoAWS.archivePack(project_id, doc_id, pack_id, err =>
+ clearFlagOnError(err, cb)
+ ),
+ cb =>
+ PackManager.checkArchivedPack(project_id, doc_id, pack_id, err =>
+ clearFlagOnError(err, cb)
+ ),
+ cb => PackManager.markPackAsArchived(project_id, doc_id, pack_id, cb),
+ cb =>
+ PackManager.setTTLOnArchivedPack(
+ project_id,
+ doc_id,
+ pack_id,
+ callback
+ ),
+ ],
+ callback
+ )
+ },
+
+ checkArchivedPack(project_id, doc_id, pack_id, callback) {
+ return db.docHistory.findOne({ _id: pack_id }, function (err, pack) {
+ if (err != null) {
+ return callback(err)
+ }
+ if (pack == null) {
+ return callback(new Error('pack not found'))
+ }
+ return MongoAWS.readArchivedPack(
+ project_id,
+ doc_id,
+ pack_id,
+ function (err, result) {
+ delete result.last_checked
+ delete pack.last_checked
+ // need to compare ids as ObjectIds with .equals()
+ for (const key of ['_id', 'project_id', 'doc_id']) {
+ if (result[key].equals(pack[key])) {
+ result[key] = pack[key]
+ }
+ }
+ for (let i = 0; i < result.pack.length; i++) {
+ const op = result.pack[i]
+ if (op._id != null && op._id.equals(pack.pack[i]._id)) {
+ op._id = pack.pack[i]._id
+ }
+ }
+ if (_.isEqual(pack, result)) {
+ return callback()
+ } else {
+ logger.err(
+ {
+ pack,
+ result,
+ jsondiff: JSON.stringify(pack) === JSON.stringify(result),
+ },
+ 'difference when comparing packs'
+ )
+ return callback(
+ new Error('pack retrieved from s3 does not match pack in mongo')
+ )
+ }
+ }
+ )
+ })
+ },
+ // Extra methods to test archive/unarchive for a doc_id
+
+ pushOldPacks(project_id, doc_id, callback) {
+ return PackManager.findPacks(project_id, doc_id, function (err, packs) {
+ if (err != null) {
+ return callback(err)
+ }
+ if (!(packs != null ? packs.length : undefined)) {
+ return callback()
+ }
+ return PackManager.processOldPack(
+ project_id,
+ doc_id,
+ packs[0]._id,
+ callback
+ )
+ })
+ },
+
+ pullOldPacks(project_id, doc_id, callback) {
+ return PackManager.loadPacksByVersionRange(
+ project_id,
+ doc_id,
+ null,
+ null,
+ callback
+ )
+ },
+
+ // Processing old packs via worker
+
+ processOldPack(project_id, doc_id, pack_id, callback) {
+ const markAsChecked = err =>
+ PackManager.markPackAsChecked(
+ project_id,
+ doc_id,
+ pack_id,
+ function (err2) {
+ if (err2 != null) {
+ return callback(err2)
+ }
+ return callback(err)
+ }
+ )
+ logger.log({ project_id, doc_id }, 'processing old packs')
+ return db.docHistory.findOne({ _id: pack_id }, function (err, pack) {
+ if (err != null) {
+ return markAsChecked(err)
+ }
+ if (pack == null) {
+ return markAsChecked()
+ }
+ if (pack.expiresAt != null) {
+ return callback()
+ } // return directly
+ return PackManager.finaliseIfNeeded(
+ project_id,
+ doc_id,
+ pack._id,
+ pack,
+ function (err) {
+ if (err != null) {
+ return markAsChecked(err)
+ }
+ return PackManager.updateIndexIfNeeded(
+ project_id,
+ doc_id,
+ function (err) {
+ if (err != null) {
+ return markAsChecked(err)
+ }
+ return PackManager.findUnarchivedPacks(
+ project_id,
+ doc_id,
+ function (err, unarchivedPacks) {
+ if (err != null) {
+ return markAsChecked(err)
+ }
+ if (
+ !(unarchivedPacks != null
+ ? unarchivedPacks.length
+ : undefined)
+ ) {
+ logger.log(
+ { project_id, doc_id },
+ 'no packs need archiving'
+ )
+ return markAsChecked()
+ }
+ return async.eachSeries(
+ unarchivedPacks,
+ (pack, cb) =>
+ PackManager.archivePack(project_id, doc_id, pack._id, cb),
+ function (err) {
+ if (err != null) {
+ return markAsChecked(err)
+ }
+ logger.log({ project_id, doc_id }, 'done processing')
+ return markAsChecked()
+ }
+ )
+ }
+ )
+ }
+ )
+ }
+ )
+ })
+ },
+
+ finaliseIfNeeded(project_id, doc_id, pack_id, pack, callback) {
+ const sz = pack.sz / (1024 * 1024) // in fractions of a megabyte
+ const n = pack.n / 1024 // in fraction of 1024 ops
+ const age = (Date.now() - pack.meta.end_ts) / DAYS
+ if (age < 30) {
+ // always keep if less than 1 month old
+ logger.log({ project_id, doc_id, pack_id, age }, 'less than 30 days old')
+ return callback()
+ }
+ // compute an archiving threshold which decreases for each month of age
+ const archive_threshold = 30 / age
+ if (sz > archive_threshold || n > archive_threshold || age > 90) {
+ logger.log(
+ { project_id, doc_id, pack_id, age, archive_threshold, sz, n },
+ 'meets archive threshold'
+ )
+ return PackManager.markPackAsFinalisedWithLock(
+ project_id,
+ doc_id,
+ pack_id,
+ callback
+ )
+ } else {
+ logger.log(
+ { project_id, doc_id, pack_id, age, archive_threshold, sz, n },
+ 'does not meet archive threshold'
+ )
+ return callback()
+ }
+ },
+
+ markPackAsFinalisedWithLock(project_id, doc_id, pack_id, callback) {
+ return LockManager.runWithLock(
+ keys.historyLock({ doc_id }),
+ releaseLock =>
+ PackManager._markPackAsFinalised(
+ project_id,
+ doc_id,
+ pack_id,
+ releaseLock
+ ),
+ callback
+ )
+ },
+
+ _markPackAsFinalised(project_id, doc_id, pack_id, callback) {
+ logger.log({ project_id, doc_id, pack_id }, 'marking pack as finalised')
+ return db.docHistory.updateOne(
+ { _id: pack_id },
+ { $set: { finalised: true } },
+ callback
+ )
+ },
+
+ updateIndexIfNeeded(project_id, doc_id, callback) {
+ logger.log({ project_id, doc_id }, 'archiving old packs')
+ return PackManager.getIndexWithKeys(doc_id, function (err, index) {
+ if (err != null) {
+ return callback(err)
+ }
+ if (index == null) {
+ return PackManager.initialiseIndex(project_id, doc_id, callback)
+ } else {
+ return PackManager.updateIndex(project_id, doc_id, callback)
+ }
+ })
+ },
+
+ markPackAsChecked(project_id, doc_id, pack_id, callback) {
+ logger.log({ project_id, doc_id, pack_id }, 'marking pack as checked')
+ return db.docHistory.updateOne(
+ { _id: pack_id },
+ { $currentDate: { last_checked: true } },
+ callback
+ )
+ },
+
+ findUnarchivedPacks(project_id, doc_id, callback) {
+ return PackManager.getIndex(doc_id, function (err, indexResult) {
+ if (err != null) {
+ return callback(err)
+ }
+ const indexPacks =
+ (indexResult != null ? indexResult.packs : undefined) || []
+ const unArchivedPacks = (() => {
+ const result = []
+ for (const pack of Array.from(indexPacks)) {
+ if (pack.inS3 == null) {
+ result.push(pack)
+ }
+ }
+ return result
+ })()
+ if (unArchivedPacks.length) {
+ logger.log(
+ { project_id, doc_id, n: unArchivedPacks.length },
+ 'find unarchived packs'
+ )
+ }
+ return callback(null, unArchivedPacks)
+ })
+ },
+
+ // Archive locking flags
+
+ checkArchiveNotInProgress(project_id, doc_id, pack_id, callback) {
+ logger.log(
+ { project_id, doc_id, pack_id },
+ 'checking if archive in progress'
+ )
+ return PackManager.getPackFromIndex(
+ doc_id,
+ pack_id,
+ function (err, result) {
+ if (err != null) {
+ return callback(err)
+ }
+ if (result == null) {
+ return callback(new Error('pack not found in index'))
+ }
+ if (result.inS3) {
+ return callback(new Error('pack archiving already done'))
+ } else if (result.inS3 != null) {
+ return callback(new Error('pack archiving already in progress'))
+ } else {
+ return callback()
+ }
+ }
+ )
+ },
+
+ markPackAsArchiveInProgress(project_id, doc_id, pack_id, callback) {
+ logger.log(
+ { project_id, doc_id },
+ 'marking pack as archive in progress status'
+ )
+ return db.docHistoryIndex.findOneAndUpdate(
+ {
+ _id: ObjectId(doc_id.toString()),
+ packs: { $elemMatch: { _id: pack_id, inS3: { $exists: false } } },
+ },
+ { $set: { 'packs.$.inS3': false } },
+ { projection: { 'packs.$': 1 } },
+ function (err, result) {
+ if (err != null) {
+ return callback(err)
+ }
+ if (!result.value) {
+ return callback(new Error('archive is already in progress'))
+ }
+ logger.log(
+ { project_id, doc_id, pack_id },
+ 'marked as archive in progress'
+ )
+ return callback()
+ }
+ )
+ },
+
+ clearPackAsArchiveInProgress(project_id, doc_id, pack_id, callback) {
+ logger.log(
+ { project_id, doc_id, pack_id },
+ 'clearing as archive in progress'
+ )
+ return db.docHistoryIndex.updateOne(
+ {
+ _id: ObjectId(doc_id.toString()),
+ packs: { $elemMatch: { _id: pack_id, inS3: false } },
+ },
+ { $unset: { 'packs.$.inS3': true } },
+ callback
+ )
+ },
+
+ markPackAsArchived(project_id, doc_id, pack_id, callback) {
+ logger.log({ project_id, doc_id, pack_id }, 'marking pack as archived')
+ return db.docHistoryIndex.findOneAndUpdate(
+ {
+ _id: ObjectId(doc_id.toString()),
+ packs: { $elemMatch: { _id: pack_id, inS3: false } },
+ },
+ { $set: { 'packs.$.inS3': true } },
+ { projection: { 'packs.$': 1 } },
+ function (err, result) {
+ if (err != null) {
+ return callback(err)
+ }
+ if (!result.value) {
+ return callback(new Error('archive is not marked as progress'))
+ }
+ logger.log({ project_id, doc_id, pack_id }, 'marked as archived')
+ return callback()
+ }
+ )
+ },
+
+ setTTLOnArchivedPack(project_id, doc_id, pack_id, callback) {
+ return db.docHistory.updateOne(
+ { _id: pack_id },
+ { $set: { expiresAt: new Date(Date.now() + 1 * DAYS) } },
+ function (err) {
+ if (err) {
+ return callback(err)
+ }
+ logger.log({ project_id, doc_id, pack_id }, 'set expiry on pack')
+ return callback()
+ }
+ )
+ },
+}
+
+// _getOneDayInFutureWithRandomDelay: ->
+// thirtyMins = 1000 * 60 * 30
+// randomThirtyMinMax = Math.ceil(Math.random() * thirtyMins)
+// return new Date(Date.now() + randomThirtyMinMax + 1*DAYS)
diff --git a/services/track-changes/app/js/PackWorker.js b/services/track-changes/app/js/PackWorker.js
new file mode 100644
index 0000000000..38e99101de
--- /dev/null
+++ b/services/track-changes/app/js/PackWorker.js
@@ -0,0 +1,212 @@
+/* eslint-disable
+ camelcase,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS103: Rewrite code to no longer use __guard__
+ * DS205: Consider reworking code to avoid use of IIFEs
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let LIMIT, pending
+let project_id, doc_id
+const { callbackify } = require('util')
+const Settings = require('@overleaf/settings')
+const async = require('async')
+const _ = require('underscore')
+const { db, ObjectId, waitForDb, closeDb } = require('./mongodb')
+const fs = require('fs')
+const Metrics = require('@overleaf/metrics')
+Metrics.initialize('track-changes')
+const logger = require('logger-sharelatex')
+logger.initialize('track-changes-packworker')
+if ((Settings.sentry != null ? Settings.sentry.dsn : undefined) != null) {
+ logger.initializeErrorReporting(Settings.sentry.dsn)
+}
+
+const DAYS = 24 * 3600 * 1000
+
+const LockManager = require('./LockManager')
+const PackManager = require('./PackManager')
+
+// this worker script is forked by the main process to look for
+// document histories which can be archived
+
+const source = process.argv[2]
+const DOCUMENT_PACK_DELAY = Number(process.argv[3]) || 1000
+const TIMEOUT = Number(process.argv[4]) || 30 * 60 * 1000
+let COUNT = 0 // number processed
+let TOTAL = 0 // total number to process
+
+if (!source.match(/^[0-9]+$/)) {
+ const file = fs.readFileSync(source)
+ const result = (() => {
+ const result1 = []
+ for (const line of Array.from(file.toString().split('\n'))) {
+ ;[project_id, doc_id] = Array.from(line.split(' '))
+ result1.push({ doc_id, project_id })
+ }
+ return result1
+ })()
+ pending = _.filter(result, row =>
+ __guard__(row != null ? row.doc_id : undefined, x =>
+ x.match(/^[a-f0-9]{24}$/)
+ )
+ )
+} else {
+ LIMIT = Number(process.argv[2]) || 1000
+}
+
+let shutDownRequested = false
+const shutDownTimer = setTimeout(function () {
+ logger.log('pack timed out, requesting shutdown')
+ // start the shutdown on the next pack
+ shutDownRequested = true
+ // do a hard shutdown after a further 5 minutes
+ const hardTimeout = setTimeout(function () {
+ logger.error('HARD TIMEOUT in pack archive worker')
+ return process.exit()
+ }, 5 * 60 * 1000)
+ return hardTimeout.unref()
+}, TIMEOUT)
+
+logger.log(
+ `checking for updates, limit=${LIMIT}, delay=${DOCUMENT_PACK_DELAY}, timeout=${TIMEOUT}`
+)
+
+const finish = function () {
+ if (shutDownTimer != null) {
+ logger.log('cancelling timeout')
+ clearTimeout(shutDownTimer)
+ }
+ logger.log('closing db')
+ callbackify(closeDb)(function () {
+ logger.log('closing LockManager Redis Connection')
+ return LockManager.close(function () {
+ logger.log(
+ { processedCount: COUNT, allCount: TOTAL },
+ 'ready to exit from pack archive worker'
+ )
+ const hardTimeout = setTimeout(function () {
+ logger.error('hard exit from pack archive worker')
+ return process.exit(1)
+ }, 5 * 1000)
+ return hardTimeout.unref()
+ })
+ })
+}
+
+process.on('exit', code => logger.log({ code }, 'pack archive worker exited'))
+
+const processUpdates = pending =>
+ async.eachSeries(
+ pending,
+ function (result, callback) {
+ let _id
+ ;({ _id, project_id, doc_id } = result)
+ COUNT++
+ logger.log({ project_id, doc_id }, `processing ${COUNT}/${TOTAL}`)
+ if (project_id == null || doc_id == null) {
+ logger.log(
+ { project_id, doc_id },
+ 'skipping pack, missing project/doc id'
+ )
+ return callback()
+ }
+ const handler = function (err, result) {
+ if (err != null && err.code === 'InternalError' && err.retryable) {
+ logger.warn(
+ { err, result },
+ 'ignoring S3 error in pack archive worker'
+ )
+ // Ignore any s3 errors due to random problems
+ err = null
+ }
+ if (err != null) {
+ logger.error({ err, result }, 'error in pack archive worker')
+ return callback(err)
+ }
+ if (shutDownRequested) {
+ logger.warn('shutting down pack archive worker')
+ return callback(new Error('shutdown'))
+ }
+ return setTimeout(() => callback(err, result), DOCUMENT_PACK_DELAY)
+ }
+ if (_id == null) {
+ return PackManager.pushOldPacks(project_id, doc_id, handler)
+ } else {
+ return PackManager.processOldPack(project_id, doc_id, _id, handler)
+ }
+ },
+ function (err, results) {
+ if (err != null && err.message !== 'shutdown') {
+ logger.error({ err }, 'error in pack archive worker processUpdates')
+ }
+ return finish()
+ }
+ )
+// find the packs which can be archived
+
+const ObjectIdFromDate = function (date) {
+ const id = Math.floor(date.getTime() / 1000).toString(16) + '0000000000000000'
+ return ObjectId(id)
+}
+
+// new approach, two passes
+// find packs to be marked as finalised:true, those which have a newer pack present
+// then only consider finalised:true packs for archiving
+
+waitForDb()
+ .then(() => {
+ if (pending != null) {
+ logger.log(`got ${pending.length} entries from ${source}`)
+ processUpdates(pending)
+ } else {
+ processFromOneWeekAgo()
+ }
+ })
+ .catch(err => {
+ logger.fatal({ err }, 'cannot connect to mongo, exiting')
+ process.exit(1)
+ })
+
+function processFromOneWeekAgo() {
+ const oneWeekAgo = new Date(Date.now() - 7 * DAYS)
+ db.docHistory
+ .find(
+ {
+ expiresAt: { $exists: false },
+ project_id: { $exists: true },
+ v_end: { $exists: true },
+ _id: { $lt: ObjectIdFromDate(oneWeekAgo) },
+ last_checked: { $lt: oneWeekAgo },
+ },
+ { projection: { _id: 1, doc_id: 1, project_id: 1 } }
+ )
+ .sort({
+ last_checked: 1,
+ })
+ .limit(LIMIT)
+ .toArray(function (err, results) {
+ if (err != null) {
+ logger.log({ err }, 'error checking for updates')
+ finish()
+ return
+ }
+ pending = _.uniq(results, false, result => result.doc_id.toString())
+ TOTAL = pending.length
+ logger.log(`found ${TOTAL} documents to archive`)
+ return processUpdates(pending)
+ })
+}
+
+function __guard__(value, transform) {
+ return typeof value !== 'undefined' && value !== null
+ ? transform(value)
+ : undefined
+}
diff --git a/services/track-changes/app/js/ProjectIterator.js b/services/track-changes/app/js/ProjectIterator.js
new file mode 100644
index 0000000000..2b9da401b3
--- /dev/null
+++ b/services/track-changes/app/js/ProjectIterator.js
@@ -0,0 +1,113 @@
+/* eslint-disable
+ no-unmodified-loop-condition,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let ProjectIterator
+const Heap = require('heap')
+
+module.exports =
+ ProjectIterator =
+ ProjectIterator =
+ class ProjectIterator {
+ constructor(packs, before, getPackByIdFn) {
+ this.before = before
+ this.getPackByIdFn = getPackByIdFn
+ const byEndTs = (a, b) =>
+ b.meta.end_ts - a.meta.end_ts || a.fromIndex - b.fromIndex
+ this.packs = packs.slice().sort(byEndTs)
+ this.queue = new Heap(byEndTs)
+ }
+
+ next(callback) {
+ // what's up next
+ // console.log ">>> top item", iterator.packs[0]
+ const iterator = this
+ const { before } = this
+ const { queue } = iterator
+ const opsToReturn = []
+ let nextPack = iterator.packs[0]
+ let lowWaterMark =
+ (nextPack != null ? nextPack.meta.end_ts : undefined) || 0
+ let nextItem = queue.peek()
+
+ // console.log "queue empty?", queue.empty()
+ // console.log "nextItem", nextItem
+ // console.log "nextItem.meta.end_ts", nextItem?.meta.end_ts
+ // console.log "lowWaterMark", lowWaterMark
+
+ while (
+ before != null &&
+ (nextPack != null ? nextPack.meta.start_ts : undefined) > before
+ ) {
+ // discard pack that is outside range
+ iterator.packs.shift()
+ nextPack = iterator.packs[0]
+ lowWaterMark =
+ (nextPack != null ? nextPack.meta.end_ts : undefined) || 0
+ }
+
+ if (
+ (queue.empty() ||
+ (nextItem != null ? nextItem.meta.end_ts : undefined) <=
+ lowWaterMark) &&
+ nextPack != null
+ ) {
+ // retrieve the next pack and populate the queue
+ return this.getPackByIdFn(
+ nextPack.project_id,
+ nextPack.doc_id,
+ nextPack._id,
+ function (err, pack) {
+ if (err != null) {
+ return callback(err)
+ }
+ iterator.packs.shift() // have now retrieved this pack, remove it
+ // console.log "got pack", pack
+ for (const op of Array.from(pack.pack)) {
+ // console.log "adding op", op
+ if (before == null || op.meta.end_ts < before) {
+ op.doc_id = nextPack.doc_id
+ op.project_id = nextPack.project_id
+ queue.push(op)
+ }
+ }
+ // now try again
+ return iterator.next(callback)
+ }
+ )
+ }
+
+ // console.log "nextItem", nextItem, "lowWaterMark", lowWaterMark
+ while (
+ nextItem != null &&
+ (nextItem != null ? nextItem.meta.end_ts : undefined) > lowWaterMark
+ ) {
+ opsToReturn.push(nextItem)
+ queue.pop()
+ nextItem = queue.peek()
+ }
+
+ // console.log "queue empty?", queue.empty()
+ // console.log "nextPack", nextPack?
+
+ if (queue.empty() && nextPack == null) {
+ // got everything
+ iterator._done = true
+ }
+
+ return callback(null, opsToReturn)
+ }
+
+ done() {
+ return this._done
+ }
+ }
diff --git a/services/track-changes/app/js/RedisManager.js b/services/track-changes/app/js/RedisManager.js
new file mode 100644
index 0000000000..4998c4757f
--- /dev/null
+++ b/services/track-changes/app/js/RedisManager.js
@@ -0,0 +1,170 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS205: Consider reworking code to avoid use of IIFEs
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let RedisManager
+const Settings = require('@overleaf/settings')
+const redis = require('@overleaf/redis-wrapper')
+const rclient = redis.createClient(Settings.redis.history)
+const Keys = Settings.redis.history.key_schema
+const async = require('async')
+
+module.exports = RedisManager = {
+ getOldestDocUpdates(doc_id, batchSize, callback) {
+ if (callback == null) {
+ callback = function (error, jsonUpdates) {}
+ }
+ const key = Keys.uncompressedHistoryOps({ doc_id })
+ return rclient.lrange(key, 0, batchSize - 1, callback)
+ },
+
+ expandDocUpdates(jsonUpdates, callback) {
+ let rawUpdates
+ if (callback == null) {
+ callback = function (error, rawUpdates) {}
+ }
+ try {
+ rawUpdates = Array.from(jsonUpdates || []).map(update =>
+ JSON.parse(update)
+ )
+ } catch (e) {
+ return callback(e)
+ }
+ return callback(null, rawUpdates)
+ },
+
+ deleteAppliedDocUpdates(project_id, doc_id, docUpdates, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ const multi = rclient.multi()
+ // Delete all the updates which have been applied (exact match)
+ for (const update of Array.from(docUpdates || [])) {
+ multi.lrem(Keys.uncompressedHistoryOps({ doc_id }), 1, update)
+ }
+ return multi.exec(function (error, results) {
+ if (error != null) {
+ return callback(error)
+ }
+ // It's ok to delete the doc_id from the set here. Even though the list
+ // of updates may not be empty, we will continue to process it until it is.
+ return rclient.srem(
+ Keys.docsWithHistoryOps({ project_id }),
+ doc_id,
+ function (error) {
+ if (error != null) {
+ return callback(error)
+ }
+ return callback(null)
+ }
+ )
+ })
+ },
+
+ getDocIdsWithHistoryOps(project_id, callback) {
+ if (callback == null) {
+ callback = function (error, doc_ids) {}
+ }
+ return rclient.smembers(Keys.docsWithHistoryOps({ project_id }), callback)
+ },
+
+ // iterate over keys asynchronously using redis scan (non-blocking)
+ // handle all the cluster nodes or single redis server
+ _getKeys(pattern, callback) {
+ const nodes = (typeof rclient.nodes === 'function'
+ ? rclient.nodes('master')
+ : undefined) || [rclient]
+ const doKeyLookupForNode = (node, cb) =>
+ RedisManager._getKeysFromNode(node, pattern, cb)
+ return async.concatSeries(nodes, doKeyLookupForNode, callback)
+ },
+
+ _getKeysFromNode(node, pattern, callback) {
+ let cursor = 0 // redis iterator
+ const keySet = {} // use hash to avoid duplicate results
+ // scan over all keys looking for pattern
+ var doIteration = cb =>
+ node.scan(
+ cursor,
+ 'MATCH',
+ pattern,
+ 'COUNT',
+ 1000,
+ function (error, reply) {
+ let keys
+ if (error != null) {
+ return callback(error)
+ }
+ ;[cursor, keys] = Array.from(reply)
+ for (const key of Array.from(keys)) {
+ keySet[key] = true
+ }
+ if (cursor === '0') {
+ // note redis returns string result not numeric
+ return callback(null, Object.keys(keySet))
+ } else {
+ return doIteration()
+ }
+ }
+ )
+ return doIteration()
+ },
+
+ // extract ids from keys like DocsWithHistoryOps:57fd0b1f53a8396d22b2c24b
+ // or DocsWithHistoryOps:{57fd0b1f53a8396d22b2c24b} (for redis cluster)
+ _extractIds(keyList) {
+ const ids = (() => {
+ const result = []
+ for (const key of Array.from(keyList)) {
+ const m = key.match(/:\{?([0-9a-f]{24})\}?/) // extract object id
+ result.push(m[1])
+ }
+ return result
+ })()
+ return ids
+ },
+
+ getProjectIdsWithHistoryOps(callback) {
+ if (callback == null) {
+ callback = function (error, project_ids) {}
+ }
+ return RedisManager._getKeys(
+ Keys.docsWithHistoryOps({ project_id: '*' }),
+ function (error, project_keys) {
+ if (error != null) {
+ return callback(error)
+ }
+ const project_ids = RedisManager._extractIds(project_keys)
+ return callback(error, project_ids)
+ }
+ )
+ },
+
+ getAllDocIdsWithHistoryOps(callback) {
+ // return all the docids, to find dangling history entries after
+ // everything is flushed.
+ if (callback == null) {
+ callback = function (error, doc_ids) {}
+ }
+ return RedisManager._getKeys(
+ Keys.uncompressedHistoryOps({ doc_id: '*' }),
+ function (error, doc_keys) {
+ if (error != null) {
+ return callback(error)
+ }
+ const doc_ids = RedisManager._extractIds(doc_keys)
+ return callback(error, doc_ids)
+ }
+ )
+ },
+}
diff --git a/services/track-changes/app/js/RestoreManager.js b/services/track-changes/app/js/RestoreManager.js
new file mode 100644
index 0000000000..84ddaf5236
--- /dev/null
+++ b/services/track-changes/app/js/RestoreManager.js
@@ -0,0 +1,48 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let RestoreManager
+const DocumentUpdaterManager = require('./DocumentUpdaterManager')
+const DiffManager = require('./DiffManager')
+const logger = require('logger-sharelatex')
+
+module.exports = RestoreManager = {
+ restoreToBeforeVersion(project_id, doc_id, version, user_id, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ logger.log({ project_id, doc_id, version, user_id }, 'restoring document')
+ return DiffManager.getDocumentBeforeVersion(
+ project_id,
+ doc_id,
+ version,
+ function (error, content) {
+ if (error != null) {
+ return callback(error)
+ }
+ return DocumentUpdaterManager.setDocument(
+ project_id,
+ doc_id,
+ content,
+ user_id,
+ function (error) {
+ if (error != null) {
+ return callback(error)
+ }
+ return callback()
+ }
+ )
+ }
+ )
+ },
+}
diff --git a/services/track-changes/app/js/UpdateCompressor.js b/services/track-changes/app/js/UpdateCompressor.js
new file mode 100644
index 0000000000..8e447f95c0
--- /dev/null
+++ b/services/track-changes/app/js/UpdateCompressor.js
@@ -0,0 +1,340 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+ new-cap,
+ no-throw-literal,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS103: Rewrite code to no longer use __guard__
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let oneMinute, twoMegabytes, UpdateCompressor
+const strInject = (s1, pos, s2) => s1.slice(0, pos) + s2 + s1.slice(pos)
+const strRemove = (s1, pos, length) => s1.slice(0, pos) + s1.slice(pos + length)
+
+const { diff_match_patch } = require('../lib/diff_match_patch')
+const dmp = new diff_match_patch()
+
+module.exports = UpdateCompressor = {
+ NOOP: 'noop',
+
+ // Updates come from the doc updater in format
+ // {
+ // op: [ { ... op1 ... }, { ... op2 ... } ]
+ // meta: { ts: ..., user_id: ... }
+ // }
+ // but it's easier to work with on op per update, so convert these updates to
+ // our compressed format
+ // [{
+ // op: op1
+ // meta: { start_ts: ... , end_ts: ..., user_id: ... }
+ // }, {
+ // op: op2
+ // meta: { start_ts: ... , end_ts: ..., user_id: ... }
+ // }]
+ convertToSingleOpUpdates(updates) {
+ const splitUpdates = []
+ for (const update of Array.from(updates)) {
+ // Reject any non-insert or delete ops, i.e. comments
+ const ops = update.op.filter(o => o.i != null || o.d != null)
+ if (ops.length === 0) {
+ splitUpdates.push({
+ op: UpdateCompressor.NOOP,
+ meta: {
+ start_ts: update.meta.start_ts || update.meta.ts,
+ end_ts: update.meta.end_ts || update.meta.ts,
+ user_id: update.meta.user_id,
+ },
+ v: update.v,
+ })
+ } else {
+ for (const op of Array.from(ops)) {
+ splitUpdates.push({
+ op,
+ meta: {
+ start_ts: update.meta.start_ts || update.meta.ts,
+ end_ts: update.meta.end_ts || update.meta.ts,
+ user_id: update.meta.user_id,
+ },
+ v: update.v,
+ })
+ }
+ }
+ }
+ return splitUpdates
+ },
+
+ concatUpdatesWithSameVersion(updates) {
+ const concattedUpdates = []
+ for (const update of Array.from(updates)) {
+ const lastUpdate = concattedUpdates[concattedUpdates.length - 1]
+ if (lastUpdate != null && lastUpdate.v === update.v) {
+ if (update.op !== UpdateCompressor.NOOP) {
+ lastUpdate.op.push(update.op)
+ }
+ } else {
+ const nextUpdate = {
+ op: [],
+ meta: update.meta,
+ v: update.v,
+ }
+ if (update.op !== UpdateCompressor.NOOP) {
+ nextUpdate.op.push(update.op)
+ }
+ concattedUpdates.push(nextUpdate)
+ }
+ }
+ return concattedUpdates
+ },
+
+ compressRawUpdates(lastPreviousUpdate, rawUpdates) {
+ if (
+ __guard__(
+ lastPreviousUpdate != null ? lastPreviousUpdate.op : undefined,
+ x => x.length
+ ) > 1
+ ) {
+ // if the last previous update was an array op, don't compress onto it.
+ // The avoids cases where array length changes but version number doesn't
+ return [lastPreviousUpdate].concat(
+ UpdateCompressor.compressRawUpdates(null, rawUpdates)
+ )
+ }
+ if (lastPreviousUpdate != null) {
+ rawUpdates = [lastPreviousUpdate].concat(rawUpdates)
+ }
+ let updates = UpdateCompressor.convertToSingleOpUpdates(rawUpdates)
+ updates = UpdateCompressor.compressUpdates(updates)
+ return UpdateCompressor.concatUpdatesWithSameVersion(updates)
+ },
+
+ compressUpdates(updates) {
+ if (updates.length === 0) {
+ return []
+ }
+
+ let compressedUpdates = [updates.shift()]
+ for (const update of Array.from(updates)) {
+ const lastCompressedUpdate = compressedUpdates.pop()
+ if (lastCompressedUpdate != null) {
+ compressedUpdates = compressedUpdates.concat(
+ UpdateCompressor._concatTwoUpdates(lastCompressedUpdate, update)
+ )
+ } else {
+ compressedUpdates.push(update)
+ }
+ }
+
+ return compressedUpdates
+ },
+
+ MAX_TIME_BETWEEN_UPDATES: (oneMinute = 60 * 1000),
+ MAX_UPDATE_SIZE: (twoMegabytes = 2 * 1024 * 1024),
+
+ _concatTwoUpdates(firstUpdate, secondUpdate) {
+ let offset
+ firstUpdate = {
+ op: firstUpdate.op,
+ meta: {
+ user_id: firstUpdate.meta.user_id || null,
+ start_ts: firstUpdate.meta.start_ts || firstUpdate.meta.ts,
+ end_ts: firstUpdate.meta.end_ts || firstUpdate.meta.ts,
+ },
+ v: firstUpdate.v,
+ }
+ secondUpdate = {
+ op: secondUpdate.op,
+ meta: {
+ user_id: secondUpdate.meta.user_id || null,
+ start_ts: secondUpdate.meta.start_ts || secondUpdate.meta.ts,
+ end_ts: secondUpdate.meta.end_ts || secondUpdate.meta.ts,
+ },
+ v: secondUpdate.v,
+ }
+
+ if (firstUpdate.meta.user_id !== secondUpdate.meta.user_id) {
+ return [firstUpdate, secondUpdate]
+ }
+
+ if (
+ secondUpdate.meta.start_ts - firstUpdate.meta.end_ts >
+ UpdateCompressor.MAX_TIME_BETWEEN_UPDATES
+ ) {
+ return [firstUpdate, secondUpdate]
+ }
+
+ const firstOp = firstUpdate.op
+ const secondOp = secondUpdate.op
+
+ const firstSize =
+ (firstOp.i != null ? firstOp.i.length : undefined) ||
+ (firstOp.d != null ? firstOp.d.length : undefined)
+ const secondSize =
+ (secondOp.i != null ? secondOp.i.length : undefined) ||
+ (secondOp.d != null ? secondOp.d.length : undefined)
+
+ // Two inserts
+ if (
+ firstOp.i != null &&
+ secondOp.i != null &&
+ firstOp.p <= secondOp.p &&
+ secondOp.p <= firstOp.p + firstOp.i.length &&
+ firstSize + secondSize < UpdateCompressor.MAX_UPDATE_SIZE
+ ) {
+ return [
+ {
+ meta: {
+ start_ts: firstUpdate.meta.start_ts,
+ end_ts: secondUpdate.meta.end_ts,
+ user_id: firstUpdate.meta.user_id,
+ },
+ op: {
+ p: firstOp.p,
+ i: strInject(firstOp.i, secondOp.p - firstOp.p, secondOp.i),
+ },
+ v: secondUpdate.v,
+ },
+ ]
+ // Two deletes
+ } else if (
+ firstOp.d != null &&
+ secondOp.d != null &&
+ secondOp.p <= firstOp.p &&
+ firstOp.p <= secondOp.p + secondOp.d.length &&
+ firstSize + secondSize < UpdateCompressor.MAX_UPDATE_SIZE
+ ) {
+ return [
+ {
+ meta: {
+ start_ts: firstUpdate.meta.start_ts,
+ end_ts: secondUpdate.meta.end_ts,
+ user_id: firstUpdate.meta.user_id,
+ },
+ op: {
+ p: secondOp.p,
+ d: strInject(secondOp.d, firstOp.p - secondOp.p, firstOp.d),
+ },
+ v: secondUpdate.v,
+ },
+ ]
+ // An insert and then a delete
+ } else if (
+ firstOp.i != null &&
+ secondOp.d != null &&
+ firstOp.p <= secondOp.p &&
+ secondOp.p <= firstOp.p + firstOp.i.length
+ ) {
+ offset = secondOp.p - firstOp.p
+ const insertedText = firstOp.i.slice(offset, offset + secondOp.d.length)
+ // Only trim the insert when the delete is fully contained within in it
+ if (insertedText === secondOp.d) {
+ const insert = strRemove(firstOp.i, offset, secondOp.d.length)
+ return [
+ {
+ meta: {
+ start_ts: firstUpdate.meta.start_ts,
+ end_ts: secondUpdate.meta.end_ts,
+ user_id: firstUpdate.meta.user_id,
+ },
+ op: {
+ p: firstOp.p,
+ i: insert,
+ },
+ v: secondUpdate.v,
+ },
+ ]
+ } else {
+ // This will only happen if the delete extends outside the insert
+ return [firstUpdate, secondUpdate]
+ }
+
+ // A delete then an insert at the same place, likely a copy-paste of a chunk of content
+ } else if (
+ firstOp.d != null &&
+ secondOp.i != null &&
+ firstOp.p === secondOp.p
+ ) {
+ offset = firstOp.p
+ const diff_ops = this.diffAsShareJsOps(firstOp.d, secondOp.i)
+ if (diff_ops.length === 0) {
+ return [
+ {
+ // Noop
+ meta: {
+ start_ts: firstUpdate.meta.start_ts,
+ end_ts: secondUpdate.meta.end_ts,
+ user_id: firstUpdate.meta.user_id,
+ },
+ op: {
+ p: firstOp.p,
+ i: '',
+ },
+ v: secondUpdate.v,
+ },
+ ]
+ } else {
+ return diff_ops.map(function (op) {
+ op.p += offset
+ return {
+ meta: {
+ start_ts: firstUpdate.meta.start_ts,
+ end_ts: secondUpdate.meta.end_ts,
+ user_id: firstUpdate.meta.user_id,
+ },
+ op,
+ v: secondUpdate.v,
+ }
+ })
+ }
+ } else {
+ return [firstUpdate, secondUpdate]
+ }
+ },
+
+ ADDED: 1,
+ REMOVED: -1,
+ UNCHANGED: 0,
+ diffAsShareJsOps(before, after, callback) {
+ if (callback == null) {
+ callback = function (error, ops) {}
+ }
+ const diffs = dmp.diff_main(before, after)
+ dmp.diff_cleanupSemantic(diffs)
+
+ const ops = []
+ let position = 0
+ for (const diff of Array.from(diffs)) {
+ const type = diff[0]
+ const content = diff[1]
+ if (type === this.ADDED) {
+ ops.push({
+ i: content,
+ p: position,
+ })
+ position += content.length
+ } else if (type === this.REMOVED) {
+ ops.push({
+ d: content,
+ p: position,
+ })
+ } else if (type === this.UNCHANGED) {
+ position += content.length
+ } else {
+ throw 'Unknown type'
+ }
+ }
+ return ops
+ },
+}
+
+function __guard__(value, transform) {
+ return typeof value !== 'undefined' && value !== null
+ ? transform(value)
+ : undefined
+}
diff --git a/services/track-changes/app/js/UpdateTrimmer.js b/services/track-changes/app/js/UpdateTrimmer.js
new file mode 100644
index 0000000000..b37bf545f2
--- /dev/null
+++ b/services/track-changes/app/js/UpdateTrimmer.js
@@ -0,0 +1,80 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS103: Rewrite code to no longer use __guard__
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let UpdateTrimmer
+const MongoManager = require('./MongoManager')
+const WebApiManager = require('./WebApiManager')
+const logger = require('logger-sharelatex')
+
+module.exports = UpdateTrimmer = {
+ shouldTrimUpdates(project_id, callback) {
+ if (callback == null) {
+ callback = function (error, shouldTrim) {}
+ }
+ return MongoManager.getProjectMetaData(
+ project_id,
+ function (error, metadata) {
+ if (error != null) {
+ return callback(error)
+ }
+ if (metadata != null ? metadata.preserveHistory : undefined) {
+ return callback(null, false)
+ } else {
+ return WebApiManager.getProjectDetails(
+ project_id,
+ function (error, details) {
+ if (error != null) {
+ return callback(error)
+ }
+ logger.log({ project_id, details }, 'got details')
+ if (
+ __guard__(
+ details != null ? details.features : undefined,
+ x => x.versioning
+ )
+ ) {
+ return MongoManager.setProjectMetaData(
+ project_id,
+ { preserveHistory: true },
+ function (error) {
+ if (error != null) {
+ return callback(error)
+ }
+ return MongoManager.upgradeHistory(
+ project_id,
+ function (error) {
+ if (error != null) {
+ return callback(error)
+ }
+ return callback(null, false)
+ }
+ )
+ }
+ )
+ } else {
+ return callback(null, true)
+ }
+ }
+ )
+ }
+ }
+ )
+ },
+}
+
+function __guard__(value, transform) {
+ return typeof value !== 'undefined' && value !== null
+ ? transform(value)
+ : undefined
+}
diff --git a/services/track-changes/app/js/UpdatesManager.js b/services/track-changes/app/js/UpdatesManager.js
new file mode 100644
index 0000000000..26c1104569
--- /dev/null
+++ b/services/track-changes/app/js/UpdatesManager.js
@@ -0,0 +1,888 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS103: Rewrite code to no longer use __guard__
+ * DS205: Consider reworking code to avoid use of IIFEs
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let fiveMinutes, UpdatesManager
+const MongoManager = require('./MongoManager')
+const PackManager = require('./PackManager')
+const RedisManager = require('./RedisManager')
+const UpdateCompressor = require('./UpdateCompressor')
+const LockManager = require('./LockManager')
+const WebApiManager = require('./WebApiManager')
+const UpdateTrimmer = require('./UpdateTrimmer')
+const logger = require('logger-sharelatex')
+const async = require('async')
+const _ = require('underscore')
+const Settings = require('@overleaf/settings')
+const keys = Settings.redis.lock.key_schema
+
+module.exports = UpdatesManager = {
+ compressAndSaveRawUpdates(
+ project_id,
+ doc_id,
+ rawUpdates,
+ temporary,
+ callback
+ ) {
+ let i
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ const { length } = rawUpdates
+ if (length === 0) {
+ return callback()
+ }
+
+ // check that ops are in the correct order
+ for (i = 0; i < rawUpdates.length; i++) {
+ const op = rawUpdates[i]
+ if (i > 0) {
+ const thisVersion = op != null ? op.v : undefined
+ const prevVersion = __guard__(rawUpdates[i - 1], x => x.v)
+ if (!(prevVersion < thisVersion)) {
+ logger.error(
+ {
+ project_id,
+ doc_id,
+ rawUpdates,
+ temporary,
+ thisVersion,
+ prevVersion,
+ },
+ 'op versions out of order'
+ )
+ }
+ }
+ }
+
+ // FIXME: we no longer need the lastCompressedUpdate, so change functions not to need it
+ // CORRECTION: we do use it to log the time in case of error
+ return MongoManager.peekLastCompressedUpdate(
+ doc_id,
+ function (error, lastCompressedUpdate, lastVersion) {
+ // lastCompressedUpdate is the most recent update in Mongo, and
+ // lastVersion is its sharejs version number.
+ //
+ // The peekLastCompressedUpdate method may pass the update back
+ // as 'null' (for example if the previous compressed update has
+ // been archived). In this case it can still pass back the
+ // lastVersion from the update to allow us to check consistency.
+ let op
+ if (error != null) {
+ return callback(error)
+ }
+
+ // Ensure that raw updates start where lastVersion left off
+ if (lastVersion != null) {
+ const discardedUpdates = []
+ rawUpdates = rawUpdates.slice(0)
+ while (rawUpdates[0] != null && rawUpdates[0].v <= lastVersion) {
+ discardedUpdates.push(rawUpdates.shift())
+ }
+ if (discardedUpdates.length) {
+ logger.error(
+ { project_id, doc_id, discardedUpdates, temporary, lastVersion },
+ 'discarded updates already present'
+ )
+ }
+
+ if (rawUpdates[0] != null && rawUpdates[0].v !== lastVersion + 1) {
+ const ts = __guard__(
+ lastCompressedUpdate != null
+ ? lastCompressedUpdate.meta
+ : undefined,
+ x1 => x1.end_ts
+ )
+ const last_timestamp = ts != null ? new Date(ts) : 'unknown time'
+ error = new Error(
+ `Tried to apply raw op at version ${rawUpdates[0].v} to last compressed update with version ${lastVersion} from ${last_timestamp}`
+ )
+ logger.error(
+ {
+ err: error,
+ doc_id,
+ project_id,
+ prev_end_ts: ts,
+ temporary,
+ lastCompressedUpdate,
+ },
+ 'inconsistent doc versions'
+ )
+ if (
+ (Settings.trackchanges != null
+ ? Settings.trackchanges.continueOnError
+ : undefined) &&
+ rawUpdates[0].v > lastVersion + 1
+ ) {
+ // we have lost some ops - continue to write into the database, we can't recover at this point
+ lastCompressedUpdate = null
+ } else {
+ return callback(error)
+ }
+ }
+ }
+
+ if (rawUpdates.length === 0) {
+ return callback()
+ }
+
+ // some old large ops in redis need to be rejected, they predate
+ // the size limit that now prevents them going through the system
+ const REJECT_LARGE_OP_SIZE = 4 * 1024 * 1024
+ for (var rawUpdate of Array.from(rawUpdates)) {
+ const opSizes = (() => {
+ const result = []
+ for (op of Array.from(
+ (rawUpdate != null ? rawUpdate.op : undefined) || []
+ )) {
+ result.push(
+ (op.i != null ? op.i.length : undefined) ||
+ (op.d != null ? op.d.length : undefined)
+ )
+ }
+ return result
+ })()
+ const size = _.max(opSizes)
+ if (size > REJECT_LARGE_OP_SIZE) {
+ error = new Error(
+ `dropped op exceeding maximum allowed size of ${REJECT_LARGE_OP_SIZE}`
+ )
+ logger.error(
+ { err: error, doc_id, project_id, size, rawUpdate },
+ 'dropped op - too big'
+ )
+ rawUpdate.op = []
+ }
+ }
+
+ const compressedUpdates = UpdateCompressor.compressRawUpdates(
+ null,
+ rawUpdates
+ )
+ return PackManager.insertCompressedUpdates(
+ project_id,
+ doc_id,
+ lastCompressedUpdate,
+ compressedUpdates,
+ temporary,
+ function (error, result) {
+ if (error != null) {
+ return callback(error)
+ }
+ if (result != null) {
+ logger.log(
+ {
+ project_id,
+ doc_id,
+ orig_v:
+ lastCompressedUpdate != null
+ ? lastCompressedUpdate.v
+ : undefined,
+ new_v: result.v,
+ },
+ 'inserted updates into pack'
+ )
+ }
+ return callback()
+ }
+ )
+ }
+ )
+ },
+
+ // Check whether the updates are temporary (per-project property)
+ _prepareProjectForUpdates(project_id, callback) {
+ if (callback == null) {
+ callback = function (error, temporary) {}
+ }
+ return UpdateTrimmer.shouldTrimUpdates(
+ project_id,
+ function (error, temporary) {
+ if (error != null) {
+ return callback(error)
+ }
+ return callback(null, temporary)
+ }
+ )
+ },
+
+ // Check for project id on document history (per-document property)
+ _prepareDocForUpdates(project_id, doc_id, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ return MongoManager.backportProjectId(project_id, doc_id, function (error) {
+ if (error != null) {
+ return callback(error)
+ }
+ return callback(null)
+ })
+ },
+
+ // Apply updates for specific project/doc after preparing at project and doc level
+ REDIS_READ_BATCH_SIZE: 100,
+ processUncompressedUpdates(project_id, doc_id, temporary, callback) {
+ // get the updates as strings from redis (so we can delete them after they are applied)
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ return RedisManager.getOldestDocUpdates(
+ doc_id,
+ UpdatesManager.REDIS_READ_BATCH_SIZE,
+ function (error, docUpdates) {
+ if (error != null) {
+ return callback(error)
+ }
+ const { length } = docUpdates
+ // parse the redis strings into ShareJs updates
+ return RedisManager.expandDocUpdates(
+ docUpdates,
+ function (error, rawUpdates) {
+ if (error != null) {
+ logger.err(
+ { project_id, doc_id, docUpdates },
+ 'failed to parse docUpdates'
+ )
+ return callback(error)
+ }
+ logger.log(
+ { project_id, doc_id, rawUpdates },
+ 'retrieved raw updates from redis'
+ )
+ return UpdatesManager.compressAndSaveRawUpdates(
+ project_id,
+ doc_id,
+ rawUpdates,
+ temporary,
+ function (error) {
+ if (error != null) {
+ return callback(error)
+ }
+ logger.log(
+ { project_id, doc_id },
+ 'compressed and saved doc updates'
+ )
+ // delete the applied updates from redis
+ return RedisManager.deleteAppliedDocUpdates(
+ project_id,
+ doc_id,
+ docUpdates,
+ function (error) {
+ if (error != null) {
+ return callback(error)
+ }
+ if (length === UpdatesManager.REDIS_READ_BATCH_SIZE) {
+ // There might be more updates
+ logger.log(
+ { project_id, doc_id },
+ 'continuing processing updates'
+ )
+ return setTimeout(
+ () =>
+ UpdatesManager.processUncompressedUpdates(
+ project_id,
+ doc_id,
+ temporary,
+ callback
+ ),
+ 0
+ )
+ } else {
+ logger.log(
+ { project_id, doc_id },
+ 'all raw updates processed'
+ )
+ return callback()
+ }
+ }
+ )
+ }
+ )
+ }
+ )
+ }
+ )
+ },
+
+ // Process updates for a doc when we flush it individually
+ processUncompressedUpdatesWithLock(project_id, doc_id, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ return UpdatesManager._prepareProjectForUpdates(
+ project_id,
+ function (error, temporary) {
+ if (error != null) {
+ return callback(error)
+ }
+ return UpdatesManager._processUncompressedUpdatesForDocWithLock(
+ project_id,
+ doc_id,
+ temporary,
+ callback
+ )
+ }
+ )
+ },
+
+ // Process updates for a doc when the whole project is flushed (internal method)
+ _processUncompressedUpdatesForDocWithLock(
+ project_id,
+ doc_id,
+ temporary,
+ callback
+ ) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ return UpdatesManager._prepareDocForUpdates(
+ project_id,
+ doc_id,
+ function (error) {
+ if (error != null) {
+ return callback(error)
+ }
+ return LockManager.runWithLock(
+ keys.historyLock({ doc_id }),
+ releaseLock =>
+ UpdatesManager.processUncompressedUpdates(
+ project_id,
+ doc_id,
+ temporary,
+ releaseLock
+ ),
+ callback
+ )
+ }
+ )
+ },
+
+ // Process all updates for a project, only check project-level information once
+ processUncompressedUpdatesForProject(project_id, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ return RedisManager.getDocIdsWithHistoryOps(
+ project_id,
+ function (error, doc_ids) {
+ if (error != null) {
+ return callback(error)
+ }
+ return UpdatesManager._prepareProjectForUpdates(
+ project_id,
+ function (error, temporary) {
+ const jobs = []
+ for (const doc_id of Array.from(doc_ids)) {
+ ;(doc_id =>
+ jobs.push(cb =>
+ UpdatesManager._processUncompressedUpdatesForDocWithLock(
+ project_id,
+ doc_id,
+ temporary,
+ cb
+ )
+ ))(doc_id)
+ }
+ return async.parallelLimit(jobs, 5, callback)
+ }
+ )
+ }
+ )
+ },
+
+ // flush all outstanding changes
+ flushAll(limit, callback) {
+ if (callback == null) {
+ callback = function (error, result) {}
+ }
+ return RedisManager.getProjectIdsWithHistoryOps(function (
+ error,
+ project_ids
+ ) {
+ let project_id
+ if (error != null) {
+ return callback(error)
+ }
+ logger.log(
+ {
+ count: project_ids != null ? project_ids.length : undefined,
+ project_ids,
+ },
+ 'found projects'
+ )
+ const jobs = []
+ project_ids = _.shuffle(project_ids) // randomise to avoid hitting same projects each time
+ const selectedProjects =
+ limit < 0 ? project_ids : project_ids.slice(0, limit)
+ for (project_id of Array.from(selectedProjects)) {
+ ;(project_id =>
+ jobs.push(cb =>
+ UpdatesManager.processUncompressedUpdatesForProject(
+ project_id,
+ err => cb(null, { failed: err != null, project_id })
+ )
+ ))(project_id)
+ }
+ return async.series(jobs, function (error, result) {
+ let x
+ if (error != null) {
+ return callback(error)
+ }
+ const failedProjects = (() => {
+ const result1 = []
+ for (x of Array.from(result)) {
+ if (x.failed) {
+ result1.push(x.project_id)
+ }
+ }
+ return result1
+ })()
+ const succeededProjects = (() => {
+ const result2 = []
+ for (x of Array.from(result)) {
+ if (!x.failed) {
+ result2.push(x.project_id)
+ }
+ }
+ return result2
+ })()
+ return callback(null, {
+ failed: failedProjects,
+ succeeded: succeededProjects,
+ all: project_ids,
+ })
+ })
+ })
+ },
+
+ getDanglingUpdates(callback) {
+ if (callback == null) {
+ callback = function (error, doc_ids) {}
+ }
+ return RedisManager.getAllDocIdsWithHistoryOps(function (
+ error,
+ all_doc_ids
+ ) {
+ if (error != null) {
+ return callback(error)
+ }
+ return RedisManager.getProjectIdsWithHistoryOps(function (
+ error,
+ all_project_ids
+ ) {
+ if (error != null) {
+ return callback(error)
+ }
+ // function to get doc_ids for each project
+ const task = cb =>
+ async.concatSeries(
+ all_project_ids,
+ RedisManager.getDocIdsWithHistoryOps,
+ cb
+ )
+ // find the dangling doc ids
+ return task(function (error, project_doc_ids) {
+ const dangling_doc_ids = _.difference(all_doc_ids, project_doc_ids)
+ logger.log(
+ { all_doc_ids, all_project_ids, project_doc_ids, dangling_doc_ids },
+ 'checking for dangling doc ids'
+ )
+ return callback(null, dangling_doc_ids)
+ })
+ })
+ })
+ },
+
+ getDocUpdates(project_id, doc_id, options, callback) {
+ if (options == null) {
+ options = {}
+ }
+ if (callback == null) {
+ callback = function (error, updates) {}
+ }
+ return UpdatesManager.processUncompressedUpdatesWithLock(
+ project_id,
+ doc_id,
+ function (error) {
+ if (error != null) {
+ return callback(error)
+ }
+ // console.log "options", options
+ return PackManager.getOpsByVersionRange(
+ project_id,
+ doc_id,
+ options.from,
+ options.to,
+ function (error, updates) {
+ if (error != null) {
+ return callback(error)
+ }
+ return callback(null, updates)
+ }
+ )
+ }
+ )
+ },
+
+ getDocUpdatesWithUserInfo(project_id, doc_id, options, callback) {
+ if (options == null) {
+ options = {}
+ }
+ if (callback == null) {
+ callback = function (error, updates) {}
+ }
+ return UpdatesManager.getDocUpdates(
+ project_id,
+ doc_id,
+ options,
+ function (error, updates) {
+ if (error != null) {
+ return callback(error)
+ }
+ return UpdatesManager.fillUserInfo(updates, function (error, updates) {
+ if (error != null) {
+ return callback(error)
+ }
+ return callback(null, updates)
+ })
+ }
+ )
+ },
+
+ getSummarizedProjectUpdates(project_id, options, callback) {
+ if (options == null) {
+ options = {}
+ }
+ if (callback == null) {
+ callback = function (error, updates) {}
+ }
+ if (!options.min_count) {
+ options.min_count = 25
+ }
+ let summarizedUpdates = []
+ const { before } = options
+ let nextBeforeTimestamp = null
+ return UpdatesManager.processUncompressedUpdatesForProject(
+ project_id,
+ function (error) {
+ if (error != null) {
+ return callback(error)
+ }
+ return PackManager.makeProjectIterator(
+ project_id,
+ before,
+ function (err, iterator) {
+ if (err != null) {
+ return callback(err)
+ }
+ // repeatedly get updates and pass them through the summariser to get an final output with user info
+ return async.whilst(
+ () =>
+ // console.log "checking iterator.done", iterator.done()
+ summarizedUpdates.length < options.min_count &&
+ !iterator.done(),
+
+ cb =>
+ iterator.next(function (err, partialUpdates) {
+ if (err != null) {
+ return callback(err)
+ }
+ // logger.log {partialUpdates}, 'got partialUpdates'
+ if (partialUpdates.length === 0) {
+ return cb()
+ } // # FIXME should try to avoid this happening
+ nextBeforeTimestamp =
+ partialUpdates[partialUpdates.length - 1].meta.end_ts
+ // add the updates to the summary list
+ summarizedUpdates = UpdatesManager._summarizeUpdates(
+ partialUpdates,
+ summarizedUpdates
+ )
+ return cb()
+ }),
+
+ () =>
+ // finally done all updates
+ // console.log 'summarized Updates', summarizedUpdates
+ UpdatesManager.fillSummarizedUserInfo(
+ summarizedUpdates,
+ function (err, results) {
+ if (err != null) {
+ return callback(err)
+ }
+ return callback(
+ null,
+ results,
+ !iterator.done() ? nextBeforeTimestamp : undefined
+ )
+ }
+ )
+ )
+ }
+ )
+ }
+ )
+ },
+
+ exportProject(projectId, consumer) {
+ // Flush anything before collecting updates.
+ UpdatesManager.processUncompressedUpdatesForProject(projectId, err => {
+ if (err) return consumer(err)
+
+ // Fetch all the packs.
+ const before = undefined
+ PackManager.makeProjectIterator(projectId, before, (err, iterator) => {
+ if (err) return consumer(err)
+
+ const accumulatedUserIds = new Set()
+
+ async.whilst(
+ () => !iterator.done(),
+
+ cb =>
+ iterator.next((err, updatesFromASinglePack) => {
+ if (err) return cb(err)
+
+ if (updatesFromASinglePack.length === 0) {
+ // This should not happen when `iterator.done() == false`.
+ // Emitting an empty array would signal the consumer the final
+ // call.
+ return cb()
+ }
+ updatesFromASinglePack.forEach(update => {
+ accumulatedUserIds.add(
+ // Super defensive access on update details.
+ String(update && update.meta && update.meta.user_id)
+ )
+ })
+ // Emit updates and wait for the consumer.
+ consumer(null, { updates: updatesFromASinglePack }, cb)
+ }),
+
+ err => {
+ if (err) return consumer(err)
+
+ // Adding undefined can happen for broken updates.
+ accumulatedUserIds.delete('undefined')
+
+ consumer(null, {
+ updates: [],
+ userIds: Array.from(accumulatedUserIds).sort(),
+ })
+ }
+ )
+ })
+ })
+ },
+
+ fetchUserInfo(users, callback) {
+ if (callback == null) {
+ callback = function (error, fetchedUserInfo) {}
+ }
+ const jobs = []
+ const fetchedUserInfo = {}
+ for (const user_id in users) {
+ ;(user_id =>
+ jobs.push(callback =>
+ WebApiManager.getUserInfo(user_id, function (error, userInfo) {
+ if (error != null) {
+ return callback(error)
+ }
+ fetchedUserInfo[user_id] = userInfo
+ return callback()
+ })
+ ))(user_id)
+ }
+
+ return async.series(jobs, function (err) {
+ if (err != null) {
+ return callback(err)
+ }
+ return callback(null, fetchedUserInfo)
+ })
+ },
+
+ fillUserInfo(updates, callback) {
+ let update, user_id
+ if (callback == null) {
+ callback = function (error, updates) {}
+ }
+ const users = {}
+ for (update of Array.from(updates)) {
+ ;({ user_id } = update.meta)
+ if (UpdatesManager._validUserId(user_id)) {
+ users[user_id] = true
+ }
+ }
+
+ return UpdatesManager.fetchUserInfo(
+ users,
+ function (error, fetchedUserInfo) {
+ if (error != null) {
+ return callback(error)
+ }
+ for (update of Array.from(updates)) {
+ ;({ user_id } = update.meta)
+ delete update.meta.user_id
+ if (UpdatesManager._validUserId(user_id)) {
+ update.meta.user = fetchedUserInfo[user_id]
+ }
+ }
+ return callback(null, updates)
+ }
+ )
+ },
+
+ fillSummarizedUserInfo(updates, callback) {
+ let update, user_id, user_ids
+ if (callback == null) {
+ callback = function (error, updates) {}
+ }
+ const users = {}
+ for (update of Array.from(updates)) {
+ user_ids = update.meta.user_ids || []
+ for (user_id of Array.from(user_ids)) {
+ if (UpdatesManager._validUserId(user_id)) {
+ users[user_id] = true
+ }
+ }
+ }
+
+ return UpdatesManager.fetchUserInfo(
+ users,
+ function (error, fetchedUserInfo) {
+ if (error != null) {
+ return callback(error)
+ }
+ for (update of Array.from(updates)) {
+ user_ids = update.meta.user_ids || []
+ update.meta.users = []
+ delete update.meta.user_ids
+ for (user_id of Array.from(user_ids)) {
+ if (UpdatesManager._validUserId(user_id)) {
+ update.meta.users.push(fetchedUserInfo[user_id])
+ } else {
+ update.meta.users.push(null)
+ }
+ }
+ }
+ return callback(null, updates)
+ }
+ )
+ },
+
+ _validUserId(user_id) {
+ if (user_id == null) {
+ return false
+ } else {
+ return !!user_id.match(/^[a-f0-9]{24}$/)
+ }
+ },
+
+ TIME_BETWEEN_DISTINCT_UPDATES: (fiveMinutes = 5 * 60 * 1000),
+ SPLIT_ON_DELETE_SIZE: 16, // characters
+ _summarizeUpdates(updates, existingSummarizedUpdates) {
+ if (existingSummarizedUpdates == null) {
+ existingSummarizedUpdates = []
+ }
+ const summarizedUpdates = existingSummarizedUpdates.slice()
+ let previousUpdateWasBigDelete = false
+ for (const update of Array.from(updates)) {
+ var doc_id
+ const earliestUpdate = summarizedUpdates[summarizedUpdates.length - 1]
+ let shouldConcat = false
+
+ // If a user inserts some text, then deletes a big chunk including that text,
+ // the update we show might concat the insert and delete, and there will be no sign
+ // of that insert having happened, or be able to restore to it (restoring after a big delete is common).
+ // So, we split the summary on 'big' deletes. However, we've stepping backwards in time with
+ // most recent changes considered first, so if this update is a big delete, we want to start
+ // a new summarized update next timge, hence we monitor the previous update.
+ if (previousUpdateWasBigDelete) {
+ shouldConcat = false
+ } else if (
+ earliestUpdate &&
+ earliestUpdate.meta.end_ts - update.meta.start_ts <
+ this.TIME_BETWEEN_DISTINCT_UPDATES
+ ) {
+ // We're going backwards in time through the updates, so only combine if this update starts less than 5 minutes before
+ // the end of current summarized block, so no block spans more than 5 minutes.
+ shouldConcat = true
+ }
+
+ let isBigDelete = false
+ for (const op of Array.from(update.op || [])) {
+ if (op.d != null && op.d.length > this.SPLIT_ON_DELETE_SIZE) {
+ isBigDelete = true
+ }
+ }
+
+ previousUpdateWasBigDelete = isBigDelete
+
+ if (shouldConcat) {
+ // check if the user in this update is already present in the earliest update,
+ // if not, add them to the users list of the earliest update
+ earliestUpdate.meta.user_ids = _.union(earliestUpdate.meta.user_ids, [
+ update.meta.user_id,
+ ])
+
+ doc_id = update.doc_id.toString()
+ const doc = earliestUpdate.docs[doc_id]
+ if (doc != null) {
+ doc.fromV = Math.min(doc.fromV, update.v)
+ doc.toV = Math.max(doc.toV, update.v)
+ } else {
+ earliestUpdate.docs[doc_id] = {
+ fromV: update.v,
+ toV: update.v,
+ }
+ }
+
+ earliestUpdate.meta.start_ts = Math.min(
+ earliestUpdate.meta.start_ts,
+ update.meta.start_ts
+ )
+ earliestUpdate.meta.end_ts = Math.max(
+ earliestUpdate.meta.end_ts,
+ update.meta.end_ts
+ )
+ } else {
+ const newUpdate = {
+ meta: {
+ user_ids: [],
+ start_ts: update.meta.start_ts,
+ end_ts: update.meta.end_ts,
+ },
+ docs: {},
+ }
+
+ newUpdate.docs[update.doc_id.toString()] = {
+ fromV: update.v,
+ toV: update.v,
+ }
+ newUpdate.meta.user_ids.push(update.meta.user_id)
+ summarizedUpdates.push(newUpdate)
+ }
+ }
+
+ return summarizedUpdates
+ },
+}
+
+function __guard__(value, transform) {
+ return typeof value !== 'undefined' && value !== null
+ ? transform(value)
+ : undefined
+}
diff --git a/services/track-changes/app/js/WebApiManager.js b/services/track-changes/app/js/WebApiManager.js
new file mode 100644
index 0000000000..b0706d8e71
--- /dev/null
+++ b/services/track-changes/app/js/WebApiManager.js
@@ -0,0 +1,116 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let WebApiManager
+const request = require('requestretry') // allow retry on error https://github.com/FGRibreau/node-request-retry
+const logger = require('logger-sharelatex')
+const Settings = require('@overleaf/settings')
+
+// Don't let HTTP calls hang for a long time
+const MAX_HTTP_REQUEST_LENGTH = 15000 // 15 seconds
+
+// DEPRECATED! This method of getting user details via track-changes is deprecated
+// in the way we lay out our services.
+// Instead, web should be responsible for collecting the raw data (user_ids) and
+// filling it out with calls to other services. All API calls should create a
+// tree-like structure as much as possible, with web as the root.
+module.exports = WebApiManager = {
+ sendRequest(url, callback) {
+ if (callback == null) {
+ callback = function (error, body) {}
+ }
+ return request.get(
+ {
+ url: `${Settings.apis.web.url}${url}`,
+ timeout: MAX_HTTP_REQUEST_LENGTH,
+ maxAttempts: 2, // for node-request-retry
+ auth: {
+ user: Settings.apis.web.user,
+ pass: Settings.apis.web.pass,
+ sendImmediately: true,
+ },
+ },
+ function (error, res, body) {
+ if (error != null) {
+ return callback(error)
+ }
+ if (res.statusCode === 404) {
+ logger.log({ url }, 'got 404 from web api')
+ return callback(null, null)
+ }
+ if (res.statusCode >= 200 && res.statusCode < 300) {
+ return callback(null, body)
+ } else {
+ error = new Error(
+ `web returned a non-success status code: ${res.statusCode} (attempts: ${res.attempts})`
+ )
+ return callback(error)
+ }
+ }
+ )
+ },
+
+ getUserInfo(user_id, callback) {
+ if (callback == null) {
+ callback = function (error, userInfo) {}
+ }
+ const url = `/user/${user_id}/personal_info`
+ logger.log({ user_id }, 'getting user info from web')
+ return WebApiManager.sendRequest(url, function (error, body) {
+ let user
+ if (error != null) {
+ logger.error({ err: error, user_id, url }, 'error accessing web')
+ return callback(error)
+ }
+
+ if (body === null) {
+ logger.error({ user_id, url }, 'no user found')
+ return callback(null, null)
+ }
+ try {
+ user = JSON.parse(body)
+ } catch (error1) {
+ error = error1
+ return callback(error)
+ }
+ return callback(null, {
+ id: user.id,
+ email: user.email,
+ first_name: user.first_name,
+ last_name: user.last_name,
+ })
+ })
+ },
+
+ getProjectDetails(project_id, callback) {
+ if (callback == null) {
+ callback = function (error, details) {}
+ }
+ const url = `/project/${project_id}/details`
+ logger.log({ project_id }, 'getting project details from web')
+ return WebApiManager.sendRequest(url, function (error, body) {
+ let project
+ if (error != null) {
+ logger.error({ err: error, project_id, url }, 'error accessing web')
+ return callback(error)
+ }
+
+ try {
+ project = JSON.parse(body)
+ } catch (error1) {
+ error = error1
+ return callback(error)
+ }
+ return callback(null, project)
+ })
+ },
+}
diff --git a/services/track-changes/app/js/mongodb.js b/services/track-changes/app/js/mongodb.js
new file mode 100644
index 0000000000..a345d5ce70
--- /dev/null
+++ b/services/track-changes/app/js/mongodb.js
@@ -0,0 +1,42 @@
+const Settings = require('@overleaf/settings')
+const { MongoClient, ObjectId } = require('mongodb')
+
+const clientPromise = MongoClient.connect(
+ Settings.mongo.url,
+ Settings.mongo.options
+)
+
+let setupDbPromise
+async function waitForDb() {
+ if (!setupDbPromise) {
+ setupDbPromise = setupDb()
+ }
+ await setupDbPromise
+}
+
+const db = {}
+async function setupDb() {
+ const internalDb = (await clientPromise).db()
+
+ db.docHistory = internalDb.collection('docHistory')
+ db.docHistoryIndex = internalDb.collection('docHistoryIndex')
+ db.projectHistoryMetaData = internalDb.collection('projectHistoryMetaData')
+}
+
+async function closeDb() {
+ let client
+ try {
+ client = await clientPromise
+ } catch (e) {
+ // there is nothing to close
+ return
+ }
+ return client.close()
+}
+
+module.exports = {
+ db,
+ ObjectId,
+ closeDb,
+ waitForDb,
+}
diff --git a/services/track-changes/app/lib/diff_match_patch.js b/services/track-changes/app/lib/diff_match_patch.js
new file mode 100644
index 0000000000..aeb2b1c570
--- /dev/null
+++ b/services/track-changes/app/lib/diff_match_patch.js
@@ -0,0 +1,2339 @@
+/**
+ * Diff Match and Patch
+ *
+ * Copyright 2006 Google Inc.
+ * http://code.google.com/p/google-diff-match-patch/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @fileoverview Computes the difference between two texts to create a patch.
+ * Applies the patch onto another text, allowing for errors.
+ * @author fraser@google.com (Neil Fraser)
+ */
+
+/**
+ * Class containing the diff, match and patch methods.
+ * @constructor
+ */
+function diff_match_patch() {
+ // Defaults.
+ // Redefine these in your program to override the defaults.
+
+ // Number of seconds to map a diff before giving up (0 for infinity).
+ this.Diff_Timeout = 1.0
+ // Cost of an empty edit operation in terms of edit characters.
+ this.Diff_EditCost = 4
+ // At what point is no match declared (0.0 = perfection, 1.0 = very loose).
+ this.Match_Threshold = 0.5
+ // How far to search for a match (0 = exact location, 1000+ = broad match).
+ // A match this many characters away from the expected location will add
+ // 1.0 to the score (0.0 is a perfect match).
+ this.Match_Distance = 1000
+ // When deleting a large block of text (over ~64 characters), how close do
+ // the contents have to be to match the expected contents. (0.0 = perfection,
+ // 1.0 = very loose). Note that Match_Threshold controls how closely the
+ // end points of a delete need to match.
+ this.Patch_DeleteThreshold = 0.5
+ // Chunk size for context length.
+ this.Patch_Margin = 4
+
+ // The number of bits in an int.
+ this.Match_MaxBits = 32
+}
+
+// DIFF FUNCTIONS
+
+/**
+ * The data structure representing a diff is an array of tuples:
+ * [[DIFF_DELETE, 'Hello'], [DIFF_INSERT, 'Goodbye'], [DIFF_EQUAL, ' world.']]
+ * which means: delete 'Hello', add 'Goodbye' and keep ' world.'
+ */
+var DIFF_DELETE = -1
+var DIFF_INSERT = 1
+var DIFF_EQUAL = 0
+
+/** @typedef {{0: number, 1: string}} */
+diff_match_patch.Diff
+
+/**
+ * Find the differences between two texts. Simplifies the problem by stripping
+ * any common prefix or suffix off the texts before diffing.
+ * @param {string} text1 Old string to be diffed.
+ * @param {string} text2 New string to be diffed.
+ * @param {boolean=} opt_checklines Optional speedup flag. If present and false,
+ * then don't run a line-level diff first to identify the changed areas.
+ * Defaults to true, which does a faster, slightly less optimal diff.
+ * @param {number} opt_deadline Optional time when the diff should be complete
+ * by. Used internally for recursive calls. Users should set DiffTimeout
+ * instead.
+ * @return {!Array.} Array of diff tuples.
+ */
+diff_match_patch.prototype.diff_main = function (
+ text1,
+ text2,
+ opt_checklines,
+ opt_deadline
+) {
+ // Set a deadline by which time the diff must be complete.
+ if (typeof opt_deadline === 'undefined') {
+ if (this.Diff_Timeout <= 0) {
+ opt_deadline = Number.MAX_VALUE
+ } else {
+ opt_deadline = new Date().getTime() + this.Diff_Timeout * 1000
+ }
+ }
+ var deadline = opt_deadline
+
+ // Check for null inputs.
+ if (text1 == null || text2 == null) {
+ throw new Error('Null input. (diff_main)')
+ }
+
+ // Check for equality (speedup).
+ if (text1 == text2) {
+ if (text1) {
+ return [[DIFF_EQUAL, text1]]
+ }
+ return []
+ }
+
+ if (typeof opt_checklines === 'undefined') {
+ opt_checklines = true
+ }
+ var checklines = opt_checklines
+
+ // Trim off common prefix (speedup).
+ var commonlength = this.diff_commonPrefix(text1, text2)
+ var commonprefix = text1.substring(0, commonlength)
+ text1 = text1.substring(commonlength)
+ text2 = text2.substring(commonlength)
+
+ // Trim off common suffix (speedup).
+ commonlength = this.diff_commonSuffix(text1, text2)
+ var commonsuffix = text1.substring(text1.length - commonlength)
+ text1 = text1.substring(0, text1.length - commonlength)
+ text2 = text2.substring(0, text2.length - commonlength)
+
+ // Compute the diff on the middle block.
+ var diffs = this.diff_compute_(text1, text2, checklines, deadline)
+
+ // Restore the prefix and suffix.
+ if (commonprefix) {
+ diffs.unshift([DIFF_EQUAL, commonprefix])
+ }
+ if (commonsuffix) {
+ diffs.push([DIFF_EQUAL, commonsuffix])
+ }
+ this.diff_cleanupMerge(diffs)
+ return diffs
+}
+
+/**
+ * Find the differences between two texts. Assumes that the texts do not
+ * have any common prefix or suffix.
+ * @param {string} text1 Old string to be diffed.
+ * @param {string} text2 New string to be diffed.
+ * @param {boolean} checklines Speedup flag. If false, then don't run a
+ * line-level diff first to identify the changed areas.
+ * If true, then run a faster, slightly less optimal diff.
+ * @param {number} deadline Time when the diff should be complete by.
+ * @return {!Array.} Array of diff tuples.
+ * @private
+ */
+diff_match_patch.prototype.diff_compute_ = function (
+ text1,
+ text2,
+ checklines,
+ deadline
+) {
+ var diffs
+
+ if (!text1) {
+ // Just add some text (speedup).
+ return [[DIFF_INSERT, text2]]
+ }
+
+ if (!text2) {
+ // Just delete some text (speedup).
+ return [[DIFF_DELETE, text1]]
+ }
+
+ var longtext = text1.length > text2.length ? text1 : text2
+ var shorttext = text1.length > text2.length ? text2 : text1
+ var i = longtext.indexOf(shorttext)
+ if (i != -1) {
+ // Shorter text is inside the longer text (speedup).
+ diffs = [
+ [DIFF_INSERT, longtext.substring(0, i)],
+ [DIFF_EQUAL, shorttext],
+ [DIFF_INSERT, longtext.substring(i + shorttext.length)],
+ ]
+ // Swap insertions for deletions if diff is reversed.
+ if (text1.length > text2.length) {
+ diffs[0][0] = diffs[2][0] = DIFF_DELETE
+ }
+ return diffs
+ }
+
+ if (shorttext.length == 1) {
+ // Single character string.
+ // After the previous speedup, the character can't be an equality.
+ return [
+ [DIFF_DELETE, text1],
+ [DIFF_INSERT, text2],
+ ]
+ }
+
+ // Check to see if the problem can be split in two.
+ var hm = this.diff_halfMatch_(text1, text2)
+ if (hm) {
+ // A half-match was found, sort out the return data.
+ var text1_a = hm[0]
+ var text1_b = hm[1]
+ var text2_a = hm[2]
+ var text2_b = hm[3]
+ var mid_common = hm[4]
+ // Send both pairs off for separate processing.
+ var diffs_a = this.diff_main(text1_a, text2_a, checklines, deadline)
+ var diffs_b = this.diff_main(text1_b, text2_b, checklines, deadline)
+ // Merge the results.
+ return diffs_a.concat([[DIFF_EQUAL, mid_common]], diffs_b)
+ }
+
+ if (checklines && text1.length > 100 && text2.length > 100) {
+ return this.diff_lineMode_(text1, text2, deadline)
+ }
+
+ return this.diff_bisect_(text1, text2, deadline)
+}
+
+/**
+ * Do a quick line-level diff on both strings, then rediff the parts for
+ * greater accuracy.
+ * This speedup can produce non-minimal diffs.
+ * @param {string} text1 Old string to be diffed.
+ * @param {string} text2 New string to be diffed.
+ * @param {number} deadline Time when the diff should be complete by.
+ * @return {!Array.} Array of diff tuples.
+ * @private
+ */
+diff_match_patch.prototype.diff_lineMode_ = function (text1, text2, deadline) {
+ // Scan the text on a line-by-line basis first.
+ var a = this.diff_linesToChars_(text1, text2)
+ text1 = a.chars1
+ text2 = a.chars2
+ var linearray = a.lineArray
+
+ var diffs = this.diff_main(text1, text2, false, deadline)
+
+ // Convert the diff back to original text.
+ this.diff_charsToLines_(diffs, linearray)
+ // Eliminate freak matches (e.g. blank lines)
+ this.diff_cleanupSemantic(diffs)
+
+ // Rediff any replacement blocks, this time character-by-character.
+ // Add a dummy entry at the end.
+ diffs.push([DIFF_EQUAL, ''])
+ var pointer = 0
+ var count_delete = 0
+ var count_insert = 0
+ var text_delete = ''
+ var text_insert = ''
+ while (pointer < diffs.length) {
+ switch (diffs[pointer][0]) {
+ case DIFF_INSERT:
+ count_insert++
+ text_insert += diffs[pointer][1]
+ break
+ case DIFF_DELETE:
+ count_delete++
+ text_delete += diffs[pointer][1]
+ break
+ case DIFF_EQUAL:
+ // Upon reaching an equality, check for prior redundancies.
+ if (count_delete >= 1 && count_insert >= 1) {
+ // Delete the offending records and add the merged ones.
+ diffs.splice(
+ pointer - count_delete - count_insert,
+ count_delete + count_insert
+ )
+ pointer = pointer - count_delete - count_insert
+ var a = this.diff_main(text_delete, text_insert, false, deadline)
+ for (var j = a.length - 1; j >= 0; j--) {
+ diffs.splice(pointer, 0, a[j])
+ }
+ pointer = pointer + a.length
+ }
+ count_insert = 0
+ count_delete = 0
+ text_delete = ''
+ text_insert = ''
+ break
+ }
+ pointer++
+ }
+ diffs.pop() // Remove the dummy entry at the end.
+
+ return diffs
+}
+
+/**
+ * Find the 'middle snake' of a diff, split the problem in two
+ * and return the recursively constructed diff.
+ * See Myers 1986 paper: An O(ND) Difference Algorithm and Its Variations.
+ * @param {string} text1 Old string to be diffed.
+ * @param {string} text2 New string to be diffed.
+ * @param {number} deadline Time at which to bail if not yet complete.
+ * @return {!Array.} Array of diff tuples.
+ * @private
+ */
+diff_match_patch.prototype.diff_bisect_ = function (text1, text2, deadline) {
+ // Cache the text lengths to prevent multiple calls.
+ var text1_length = text1.length
+ var text2_length = text2.length
+ var max_d = Math.ceil((text1_length + text2_length) / 2)
+ var v_offset = max_d
+ var v_length = 2 * max_d
+ var v1 = new Array(v_length)
+ var v2 = new Array(v_length)
+ // Setting all elements to -1 is faster in Chrome & Firefox than mixing
+ // integers and undefined.
+ for (var x = 0; x < v_length; x++) {
+ v1[x] = -1
+ v2[x] = -1
+ }
+ v1[v_offset + 1] = 0
+ v2[v_offset + 1] = 0
+ var delta = text1_length - text2_length
+ // If the total number of characters is odd, then the front path will collide
+ // with the reverse path.
+ var front = delta % 2 != 0
+ // Offsets for start and end of k loop.
+ // Prevents mapping of space beyond the grid.
+ var k1start = 0
+ var k1end = 0
+ var k2start = 0
+ var k2end = 0
+ for (var d = 0; d < max_d; d++) {
+ // Bail out if deadline is reached.
+ if (new Date().getTime() > deadline) {
+ break
+ }
+
+ // Walk the front path one step.
+ for (var k1 = -d + k1start; k1 <= d - k1end; k1 += 2) {
+ var k1_offset = v_offset + k1
+ var x1
+ if (k1 == -d || (k1 != d && v1[k1_offset - 1] < v1[k1_offset + 1])) {
+ x1 = v1[k1_offset + 1]
+ } else {
+ x1 = v1[k1_offset - 1] + 1
+ }
+ var y1 = x1 - k1
+ while (
+ x1 < text1_length &&
+ y1 < text2_length &&
+ text1.charAt(x1) == text2.charAt(y1)
+ ) {
+ x1++
+ y1++
+ }
+ v1[k1_offset] = x1
+ if (x1 > text1_length) {
+ // Ran off the right of the graph.
+ k1end += 2
+ } else if (y1 > text2_length) {
+ // Ran off the bottom of the graph.
+ k1start += 2
+ } else if (front) {
+ var k2_offset = v_offset + delta - k1
+ if (k2_offset >= 0 && k2_offset < v_length && v2[k2_offset] != -1) {
+ // Mirror x2 onto top-left coordinate system.
+ var x2 = text1_length - v2[k2_offset]
+ if (x1 >= x2) {
+ // Overlap detected.
+ return this.diff_bisectSplit_(text1, text2, x1, y1, deadline)
+ }
+ }
+ }
+ }
+
+ // Walk the reverse path one step.
+ for (var k2 = -d + k2start; k2 <= d - k2end; k2 += 2) {
+ var k2_offset = v_offset + k2
+ var x2
+ if (k2 == -d || (k2 != d && v2[k2_offset - 1] < v2[k2_offset + 1])) {
+ x2 = v2[k2_offset + 1]
+ } else {
+ x2 = v2[k2_offset - 1] + 1
+ }
+ var y2 = x2 - k2
+ while (
+ x2 < text1_length &&
+ y2 < text2_length &&
+ text1.charAt(text1_length - x2 - 1) ==
+ text2.charAt(text2_length - y2 - 1)
+ ) {
+ x2++
+ y2++
+ }
+ v2[k2_offset] = x2
+ if (x2 > text1_length) {
+ // Ran off the left of the graph.
+ k2end += 2
+ } else if (y2 > text2_length) {
+ // Ran off the top of the graph.
+ k2start += 2
+ } else if (!front) {
+ var k1_offset = v_offset + delta - k2
+ if (k1_offset >= 0 && k1_offset < v_length && v1[k1_offset] != -1) {
+ var x1 = v1[k1_offset]
+ var y1 = v_offset + x1 - k1_offset
+ // Mirror x2 onto top-left coordinate system.
+ x2 = text1_length - x2
+ if (x1 >= x2) {
+ // Overlap detected.
+ return this.diff_bisectSplit_(text1, text2, x1, y1, deadline)
+ }
+ }
+ }
+ }
+ }
+ // Diff took too long and hit the deadline or
+ // number of diffs equals number of characters, no commonality at all.
+ return [
+ [DIFF_DELETE, text1],
+ [DIFF_INSERT, text2],
+ ]
+}
+
+/**
+ * Given the location of the 'middle snake', split the diff in two parts
+ * and recurse.
+ * @param {string} text1 Old string to be diffed.
+ * @param {string} text2 New string to be diffed.
+ * @param {number} x Index of split point in text1.
+ * @param {number} y Index of split point in text2.
+ * @param {number} deadline Time at which to bail if not yet complete.
+ * @return {!Array.} Array of diff tuples.
+ * @private
+ */
+diff_match_patch.prototype.diff_bisectSplit_ = function (
+ text1,
+ text2,
+ x,
+ y,
+ deadline
+) {
+ var text1a = text1.substring(0, x)
+ var text2a = text2.substring(0, y)
+ var text1b = text1.substring(x)
+ var text2b = text2.substring(y)
+
+ // Compute both diffs serially.
+ var diffs = this.diff_main(text1a, text2a, false, deadline)
+ var diffsb = this.diff_main(text1b, text2b, false, deadline)
+
+ return diffs.concat(diffsb)
+}
+
+/**
+ * Split two texts into an array of strings. Reduce the texts to a string of
+ * hashes where each Unicode character represents one line.
+ * @param {string} text1 First string.
+ * @param {string} text2 Second string.
+ * @return {{chars1: string, chars2: string, lineArray: !Array.}}
+ * An object containing the encoded text1, the encoded text2 and
+ * the array of unique strings.
+ * The zeroth element of the array of unique strings is intentionally blank.
+ * @private
+ */
+diff_match_patch.prototype.diff_linesToChars_ = function (text1, text2) {
+ var lineArray = [] // e.g. lineArray[4] == 'Hello\n'
+ var lineHash = {} // e.g. lineHash['Hello\n'] == 4
+
+ // '\x00' is a valid character, but various debuggers don't like it.
+ // So we'll insert a junk entry to avoid generating a null character.
+ lineArray[0] = ''
+
+ /**
+ * Split a text into an array of strings. Reduce the texts to a string of
+ * hashes where each Unicode character represents one line.
+ * Modifies linearray and linehash through being a closure.
+ * @param {string} text String to encode.
+ * @return {string} Encoded string.
+ * @private
+ */
+ function diff_linesToCharsMunge_(text) {
+ var chars = ''
+ // Walk the text, pulling out a substring for each line.
+ // text.split('\n') would would temporarily double our memory footprint.
+ // Modifying text would create many large strings to garbage collect.
+ var lineStart = 0
+ var lineEnd = -1
+ // Keeping our own length variable is faster than looking it up.
+ var lineArrayLength = lineArray.length
+ while (lineEnd < text.length - 1) {
+ lineEnd = text.indexOf('\n', lineStart)
+ if (lineEnd == -1) {
+ lineEnd = text.length - 1
+ }
+ var line = text.substring(lineStart, lineEnd + 1)
+ lineStart = lineEnd + 1
+
+ if (
+ lineHash.hasOwnProperty
+ ? lineHash.hasOwnProperty(line)
+ : lineHash[line] !== undefined
+ ) {
+ chars += String.fromCharCode(lineHash[line])
+ } else {
+ chars += String.fromCharCode(lineArrayLength)
+ lineHash[line] = lineArrayLength
+ lineArray[lineArrayLength++] = line
+ }
+ }
+ return chars
+ }
+
+ var chars1 = diff_linesToCharsMunge_(text1)
+ var chars2 = diff_linesToCharsMunge_(text2)
+ return { chars1: chars1, chars2: chars2, lineArray: lineArray }
+}
+
+/**
+ * Rehydrate the text in a diff from a string of line hashes to real lines of
+ * text.
+ * @param {!Array.} diffs Array of diff tuples.
+ * @param {!Array.} lineArray Array of unique strings.
+ * @private
+ */
+diff_match_patch.prototype.diff_charsToLines_ = function (diffs, lineArray) {
+ for (var x = 0; x < diffs.length; x++) {
+ var chars = diffs[x][1]
+ var text = []
+ for (var y = 0; y < chars.length; y++) {
+ text[y] = lineArray[chars.charCodeAt(y)]
+ }
+ diffs[x][1] = text.join('')
+ }
+}
+
+/**
+ * Determine the common prefix of two strings.
+ * @param {string} text1 First string.
+ * @param {string} text2 Second string.
+ * @return {number} The number of characters common to the start of each
+ * string.
+ */
+diff_match_patch.prototype.diff_commonPrefix = function (text1, text2) {
+ // Quick check for common null cases.
+ if (!text1 || !text2 || text1.charAt(0) != text2.charAt(0)) {
+ return 0
+ }
+ // Binary search.
+ // Performance analysis: http://neil.fraser.name/news/2007/10/09/
+ var pointermin = 0
+ var pointermax = Math.min(text1.length, text2.length)
+ var pointermid = pointermax
+ var pointerstart = 0
+ while (pointermin < pointermid) {
+ if (
+ text1.substring(pointerstart, pointermid) ==
+ text2.substring(pointerstart, pointermid)
+ ) {
+ pointermin = pointermid
+ pointerstart = pointermin
+ } else {
+ pointermax = pointermid
+ }
+ pointermid = Math.floor((pointermax - pointermin) / 2 + pointermin)
+ }
+ return pointermid
+}
+
+/**
+ * Determine the common suffix of two strings.
+ * @param {string} text1 First string.
+ * @param {string} text2 Second string.
+ * @return {number} The number of characters common to the end of each string.
+ */
+diff_match_patch.prototype.diff_commonSuffix = function (text1, text2) {
+ // Quick check for common null cases.
+ if (
+ !text1 ||
+ !text2 ||
+ text1.charAt(text1.length - 1) != text2.charAt(text2.length - 1)
+ ) {
+ return 0
+ }
+ // Binary search.
+ // Performance analysis: http://neil.fraser.name/news/2007/10/09/
+ var pointermin = 0
+ var pointermax = Math.min(text1.length, text2.length)
+ var pointermid = pointermax
+ var pointerend = 0
+ while (pointermin < pointermid) {
+ if (
+ text1.substring(text1.length - pointermid, text1.length - pointerend) ==
+ text2.substring(text2.length - pointermid, text2.length - pointerend)
+ ) {
+ pointermin = pointermid
+ pointerend = pointermin
+ } else {
+ pointermax = pointermid
+ }
+ pointermid = Math.floor((pointermax - pointermin) / 2 + pointermin)
+ }
+ return pointermid
+}
+
+/**
+ * Determine if the suffix of one string is the prefix of another.
+ * @param {string} text1 First string.
+ * @param {string} text2 Second string.
+ * @return {number} The number of characters common to the end of the first
+ * string and the start of the second string.
+ * @private
+ */
+diff_match_patch.prototype.diff_commonOverlap_ = function (text1, text2) {
+ // Cache the text lengths to prevent multiple calls.
+ var text1_length = text1.length
+ var text2_length = text2.length
+ // Eliminate the null case.
+ if (text1_length == 0 || text2_length == 0) {
+ return 0
+ }
+ // Truncate the longer string.
+ if (text1_length > text2_length) {
+ text1 = text1.substring(text1_length - text2_length)
+ } else if (text1_length < text2_length) {
+ text2 = text2.substring(0, text1_length)
+ }
+ var text_length = Math.min(text1_length, text2_length)
+ // Quick check for the worst case.
+ if (text1 == text2) {
+ return text_length
+ }
+
+ // Start by looking for a single character match
+ // and increase length until no match is found.
+ // Performance analysis: http://neil.fraser.name/news/2010/11/04/
+ var best = 0
+ var length = 1
+ while (true) {
+ var pattern = text1.substring(text_length - length)
+ var found = text2.indexOf(pattern)
+ if (found == -1) {
+ return best
+ }
+ length += found
+ if (
+ found == 0 ||
+ text1.substring(text_length - length) == text2.substring(0, length)
+ ) {
+ best = length
+ length++
+ }
+ }
+}
+
+/**
+ * Do the two texts share a substring which is at least half the length of the
+ * longer text?
+ * This speedup can produce non-minimal diffs.
+ * @param {string} text1 First string.
+ * @param {string} text2 Second string.
+ * @return {Array.} Five element Array, containing the prefix of
+ * text1, the suffix of text1, the prefix of text2, the suffix of
+ * text2 and the common middle. Or null if there was no match.
+ * @private
+ */
+diff_match_patch.prototype.diff_halfMatch_ = function (text1, text2) {
+ if (this.Diff_Timeout <= 0) {
+ // Don't risk returning a non-optimal diff if we have unlimited time.
+ return null
+ }
+ var longtext = text1.length > text2.length ? text1 : text2
+ var shorttext = text1.length > text2.length ? text2 : text1
+ if (longtext.length < 4 || shorttext.length * 2 < longtext.length) {
+ return null // Pointless.
+ }
+ var dmp = this // 'this' becomes 'window' in a closure.
+
+ /**
+ * Does a substring of shorttext exist within longtext such that the substring
+ * is at least half the length of longtext?
+ * Closure, but does not reference any external variables.
+ * @param {string} longtext Longer string.
+ * @param {string} shorttext Shorter string.
+ * @param {number} i Start index of quarter length substring within longtext.
+ * @return {Array.} Five element Array, containing the prefix of
+ * longtext, the suffix of longtext, the prefix of shorttext, the suffix
+ * of shorttext and the common middle. Or null if there was no match.
+ * @private
+ */
+ function diff_halfMatchI_(longtext, shorttext, i) {
+ // Start with a 1/4 length substring at position i as a seed.
+ var seed = longtext.substring(i, i + Math.floor(longtext.length / 4))
+ var j = -1
+ var best_common = ''
+ var best_longtext_a, best_longtext_b, best_shorttext_a, best_shorttext_b
+ while ((j = shorttext.indexOf(seed, j + 1)) != -1) {
+ var prefixLength = dmp.diff_commonPrefix(
+ longtext.substring(i),
+ shorttext.substring(j)
+ )
+ var suffixLength = dmp.diff_commonSuffix(
+ longtext.substring(0, i),
+ shorttext.substring(0, j)
+ )
+ if (best_common.length < suffixLength + prefixLength) {
+ best_common =
+ shorttext.substring(j - suffixLength, j) +
+ shorttext.substring(j, j + prefixLength)
+ best_longtext_a = longtext.substring(0, i - suffixLength)
+ best_longtext_b = longtext.substring(i + prefixLength)
+ best_shorttext_a = shorttext.substring(0, j - suffixLength)
+ best_shorttext_b = shorttext.substring(j + prefixLength)
+ }
+ }
+ if (best_common.length * 2 >= longtext.length) {
+ return [
+ best_longtext_a,
+ best_longtext_b,
+ best_shorttext_a,
+ best_shorttext_b,
+ best_common,
+ ]
+ } else {
+ return null
+ }
+ }
+
+ // First check if the second quarter is the seed for a half-match.
+ var hm1 = diff_halfMatchI_(
+ longtext,
+ shorttext,
+ Math.ceil(longtext.length / 4)
+ )
+ // Check again based on the third quarter.
+ var hm2 = diff_halfMatchI_(
+ longtext,
+ shorttext,
+ Math.ceil(longtext.length / 2)
+ )
+ var hm
+ if (!hm1 && !hm2) {
+ return null
+ } else if (!hm2) {
+ hm = hm1
+ } else if (!hm1) {
+ hm = hm2
+ } else {
+ // Both matched. Select the longest.
+ hm = hm1[4].length > hm2[4].length ? hm1 : hm2
+ }
+
+ // A half-match was found, sort out the return data.
+ var text1_a, text1_b, text2_a, text2_b
+ if (text1.length > text2.length) {
+ text1_a = hm[0]
+ text1_b = hm[1]
+ text2_a = hm[2]
+ text2_b = hm[3]
+ } else {
+ text2_a = hm[0]
+ text2_b = hm[1]
+ text1_a = hm[2]
+ text1_b = hm[3]
+ }
+ var mid_common = hm[4]
+ return [text1_a, text1_b, text2_a, text2_b, mid_common]
+}
+
+/**
+ * Reduce the number of edits by eliminating semantically trivial equalities.
+ * @param {!Array.} diffs Array of diff tuples.
+ */
+diff_match_patch.prototype.diff_cleanupSemantic = function (diffs) {
+ var changes = false
+ var equalities = [] // Stack of indices where equalities are found.
+ var equalitiesLength = 0 // Keeping our own length var is faster in JS.
+ /** @type {?string} */
+ var lastequality = null
+ // Always equal to diffs[equalities[equalitiesLength - 1]][1]
+ var pointer = 0 // Index of current position.
+ // Number of characters that changed prior to the equality.
+ var length_insertions1 = 0
+ var length_deletions1 = 0
+ // Number of characters that changed after the equality.
+ var length_insertions2 = 0
+ var length_deletions2 = 0
+ while (pointer < diffs.length) {
+ if (diffs[pointer][0] == DIFF_EQUAL) {
+ // Equality found.
+ equalities[equalitiesLength++] = pointer
+ length_insertions1 = length_insertions2
+ length_deletions1 = length_deletions2
+ length_insertions2 = 0
+ length_deletions2 = 0
+ lastequality = diffs[pointer][1]
+ } else {
+ // An insertion or deletion.
+ if (diffs[pointer][0] == DIFF_INSERT) {
+ length_insertions2 += diffs[pointer][1].length
+ } else {
+ length_deletions2 += diffs[pointer][1].length
+ }
+ // Eliminate an equality that is smaller or equal to the edits on both
+ // sides of it.
+ if (
+ lastequality &&
+ lastequality.length <=
+ Math.max(length_insertions1, length_deletions1) &&
+ lastequality.length <= Math.max(length_insertions2, length_deletions2)
+ ) {
+ // Duplicate record.
+ diffs.splice(equalities[equalitiesLength - 1], 0, [
+ DIFF_DELETE,
+ lastequality,
+ ])
+ // Change second copy to insert.
+ diffs[equalities[equalitiesLength - 1] + 1][0] = DIFF_INSERT
+ // Throw away the equality we just deleted.
+ equalitiesLength--
+ // Throw away the previous equality (it needs to be reevaluated).
+ equalitiesLength--
+ pointer = equalitiesLength > 0 ? equalities[equalitiesLength - 1] : -1
+ length_insertions1 = 0 // Reset the counters.
+ length_deletions1 = 0
+ length_insertions2 = 0
+ length_deletions2 = 0
+ lastequality = null
+ changes = true
+ }
+ }
+ pointer++
+ }
+
+ // Normalize the diff.
+ if (changes) {
+ this.diff_cleanupMerge(diffs)
+ }
+ this.diff_cleanupSemanticLossless(diffs)
+
+ // Find any overlaps between deletions and insertions.
+ // e.g: abcxxxxxxdef
+ // -> abcxxxdef
+ // e.g: xxxabcdefxxx
+ // -> defxxxabc
+ // Only extract an overlap if it is as big as the edit ahead or behind it.
+ pointer = 1
+ while (pointer < diffs.length) {
+ if (
+ diffs[pointer - 1][0] == DIFF_DELETE &&
+ diffs[pointer][0] == DIFF_INSERT
+ ) {
+ var deletion = diffs[pointer - 1][1]
+ var insertion = diffs[pointer][1]
+ var overlap_length1 = this.diff_commonOverlap_(deletion, insertion)
+ var overlap_length2 = this.diff_commonOverlap_(insertion, deletion)
+ if (overlap_length1 >= overlap_length2) {
+ if (
+ overlap_length1 >= deletion.length / 2 ||
+ overlap_length1 >= insertion.length / 2
+ ) {
+ // Overlap found. Insert an equality and trim the surrounding edits.
+ diffs.splice(pointer, 0, [
+ DIFF_EQUAL,
+ insertion.substring(0, overlap_length1),
+ ])
+ diffs[pointer - 1][1] = deletion.substring(
+ 0,
+ deletion.length - overlap_length1
+ )
+ diffs[pointer + 1][1] = insertion.substring(overlap_length1)
+ pointer++
+ }
+ } else {
+ if (
+ overlap_length2 >= deletion.length / 2 ||
+ overlap_length2 >= insertion.length / 2
+ ) {
+ // Reverse overlap found.
+ // Insert an equality and swap and trim the surrounding edits.
+ diffs.splice(pointer, 0, [
+ DIFF_EQUAL,
+ deletion.substring(0, overlap_length2),
+ ])
+ diffs[pointer - 1][0] = DIFF_INSERT
+ diffs[pointer - 1][1] = insertion.substring(
+ 0,
+ insertion.length - overlap_length2
+ )
+ diffs[pointer + 1][0] = DIFF_DELETE
+ diffs[pointer + 1][1] = deletion.substring(overlap_length2)
+ pointer++
+ }
+ }
+ pointer++
+ }
+ pointer++
+ }
+}
+
+/**
+ * Look for single edits surrounded on both sides by equalities
+ * which can be shifted sideways to align the edit to a word boundary.
+ * e.g: The cat came. -> The cat came.
+ * @param {!Array.} diffs Array of diff tuples.
+ */
+diff_match_patch.prototype.diff_cleanupSemanticLossless = function (diffs) {
+ /**
+ * Given two strings, compute a score representing whether the internal
+ * boundary falls on logical boundaries.
+ * Scores range from 6 (best) to 0 (worst).
+ * Closure, but does not reference any external variables.
+ * @param {string} one First string.
+ * @param {string} two Second string.
+ * @return {number} The score.
+ * @private
+ */
+ function diff_cleanupSemanticScore_(one, two) {
+ if (!one || !two) {
+ // Edges are the best.
+ return 6
+ }
+
+ // Each port of this function behaves slightly differently due to
+ // subtle differences in each language's definition of things like
+ // 'whitespace'. Since this function's purpose is largely cosmetic,
+ // the choice has been made to use each language's native features
+ // rather than force total conformity.
+ var char1 = one.charAt(one.length - 1)
+ var char2 = two.charAt(0)
+ var nonAlphaNumeric1 = char1.match(diff_match_patch.nonAlphaNumericRegex_)
+ var nonAlphaNumeric2 = char2.match(diff_match_patch.nonAlphaNumericRegex_)
+ var whitespace1 =
+ nonAlphaNumeric1 && char1.match(diff_match_patch.whitespaceRegex_)
+ var whitespace2 =
+ nonAlphaNumeric2 && char2.match(diff_match_patch.whitespaceRegex_)
+ var lineBreak1 =
+ whitespace1 && char1.match(diff_match_patch.linebreakRegex_)
+ var lineBreak2 =
+ whitespace2 && char2.match(diff_match_patch.linebreakRegex_)
+ var blankLine1 =
+ lineBreak1 && one.match(diff_match_patch.blanklineEndRegex_)
+ var blankLine2 =
+ lineBreak2 && two.match(diff_match_patch.blanklineStartRegex_)
+
+ if (blankLine1 || blankLine2) {
+ // Five points for blank lines.
+ return 5
+ } else if (lineBreak1 || lineBreak2) {
+ // Four points for line breaks.
+ return 4
+ } else if (nonAlphaNumeric1 && !whitespace1 && whitespace2) {
+ // Three points for end of sentences.
+ return 3
+ } else if (whitespace1 || whitespace2) {
+ // Two points for whitespace.
+ return 2
+ } else if (nonAlphaNumeric1 || nonAlphaNumeric2) {
+ // One point for non-alphanumeric.
+ return 1
+ }
+ return 0
+ }
+
+ var pointer = 1
+ // Intentionally ignore the first and last element (don't need checking).
+ while (pointer < diffs.length - 1) {
+ if (
+ diffs[pointer - 1][0] == DIFF_EQUAL &&
+ diffs[pointer + 1][0] == DIFF_EQUAL
+ ) {
+ // This is a single edit surrounded by equalities.
+ var equality1 = diffs[pointer - 1][1]
+ var edit = diffs[pointer][1]
+ var equality2 = diffs[pointer + 1][1]
+
+ // First, shift the edit as far left as possible.
+ var commonOffset = this.diff_commonSuffix(equality1, edit)
+ if (commonOffset) {
+ var commonString = edit.substring(edit.length - commonOffset)
+ equality1 = equality1.substring(0, equality1.length - commonOffset)
+ edit = commonString + edit.substring(0, edit.length - commonOffset)
+ equality2 = commonString + equality2
+ }
+
+ // Second, step character by character right, looking for the best fit.
+ var bestEquality1 = equality1
+ var bestEdit = edit
+ var bestEquality2 = equality2
+ var bestScore =
+ diff_cleanupSemanticScore_(equality1, edit) +
+ diff_cleanupSemanticScore_(edit, equality2)
+ while (edit.charAt(0) === equality2.charAt(0)) {
+ equality1 += edit.charAt(0)
+ edit = edit.substring(1) + equality2.charAt(0)
+ equality2 = equality2.substring(1)
+ var score =
+ diff_cleanupSemanticScore_(equality1, edit) +
+ diff_cleanupSemanticScore_(edit, equality2)
+ // The >= encourages trailing rather than leading whitespace on edits.
+ if (score >= bestScore) {
+ bestScore = score
+ bestEquality1 = equality1
+ bestEdit = edit
+ bestEquality2 = equality2
+ }
+ }
+
+ if (diffs[pointer - 1][1] != bestEquality1) {
+ // We have an improvement, save it back to the diff.
+ if (bestEquality1) {
+ diffs[pointer - 1][1] = bestEquality1
+ } else {
+ diffs.splice(pointer - 1, 1)
+ pointer--
+ }
+ diffs[pointer][1] = bestEdit
+ if (bestEquality2) {
+ diffs[pointer + 1][1] = bestEquality2
+ } else {
+ diffs.splice(pointer + 1, 1)
+ pointer--
+ }
+ }
+ }
+ pointer++
+ }
+}
+
+// Define some regex patterns for matching boundaries.
+diff_match_patch.nonAlphaNumericRegex_ = /[^a-zA-Z0-9]/
+diff_match_patch.whitespaceRegex_ = /\s/
+diff_match_patch.linebreakRegex_ = /[\r\n]/
+diff_match_patch.blanklineEndRegex_ = /\n\r?\n$/
+diff_match_patch.blanklineStartRegex_ = /^\r?\n\r?\n/
+
+/**
+ * Reduce the number of edits by eliminating operationally trivial equalities.
+ * @param {!Array.} diffs Array of diff tuples.
+ */
+diff_match_patch.prototype.diff_cleanupEfficiency = function (diffs) {
+ var changes = false
+ var equalities = [] // Stack of indices where equalities are found.
+ var equalitiesLength = 0 // Keeping our own length var is faster in JS.
+ /** @type {?string} */
+ var lastequality = null
+ // Always equal to diffs[equalities[equalitiesLength - 1]][1]
+ var pointer = 0 // Index of current position.
+ // Is there an insertion operation before the last equality.
+ var pre_ins = false
+ // Is there a deletion operation before the last equality.
+ var pre_del = false
+ // Is there an insertion operation after the last equality.
+ var post_ins = false
+ // Is there a deletion operation after the last equality.
+ var post_del = false
+ while (pointer < diffs.length) {
+ if (diffs[pointer][0] == DIFF_EQUAL) {
+ // Equality found.
+ if (
+ diffs[pointer][1].length < this.Diff_EditCost &&
+ (post_ins || post_del)
+ ) {
+ // Candidate found.
+ equalities[equalitiesLength++] = pointer
+ pre_ins = post_ins
+ pre_del = post_del
+ lastequality = diffs[pointer][1]
+ } else {
+ // Not a candidate, and can never become one.
+ equalitiesLength = 0
+ lastequality = null
+ }
+ post_ins = post_del = false
+ } else {
+ // An insertion or deletion.
+ if (diffs[pointer][0] == DIFF_DELETE) {
+ post_del = true
+ } else {
+ post_ins = true
+ }
+ /*
+ * Five types to be split:
+ * ABXYCD
+ * AXCD
+ * ABXC
+ * AXCD
+ * ABXC
+ */
+ if (
+ lastequality &&
+ ((pre_ins && pre_del && post_ins && post_del) ||
+ (lastequality.length < this.Diff_EditCost / 2 &&
+ pre_ins + pre_del + post_ins + post_del == 3))
+ ) {
+ // Duplicate record.
+ diffs.splice(equalities[equalitiesLength - 1], 0, [
+ DIFF_DELETE,
+ lastequality,
+ ])
+ // Change second copy to insert.
+ diffs[equalities[equalitiesLength - 1] + 1][0] = DIFF_INSERT
+ equalitiesLength-- // Throw away the equality we just deleted;
+ lastequality = null
+ if (pre_ins && pre_del) {
+ // No changes made which could affect previous entry, keep going.
+ post_ins = post_del = true
+ equalitiesLength = 0
+ } else {
+ equalitiesLength-- // Throw away the previous equality.
+ pointer = equalitiesLength > 0 ? equalities[equalitiesLength - 1] : -1
+ post_ins = post_del = false
+ }
+ changes = true
+ }
+ }
+ pointer++
+ }
+
+ if (changes) {
+ this.diff_cleanupMerge(diffs)
+ }
+}
+
+/**
+ * Reorder and merge like edit sections. Merge equalities.
+ * Any edit section can move as long as it doesn't cross an equality.
+ * @param {!Array.} diffs Array of diff tuples.
+ */
+diff_match_patch.prototype.diff_cleanupMerge = function (diffs) {
+ diffs.push([DIFF_EQUAL, '']) // Add a dummy entry at the end.
+ var pointer = 0
+ var count_delete = 0
+ var count_insert = 0
+ var text_delete = ''
+ var text_insert = ''
+ var commonlength
+ while (pointer < diffs.length) {
+ switch (diffs[pointer][0]) {
+ case DIFF_INSERT:
+ count_insert++
+ text_insert += diffs[pointer][1]
+ pointer++
+ break
+ case DIFF_DELETE:
+ count_delete++
+ text_delete += diffs[pointer][1]
+ pointer++
+ break
+ case DIFF_EQUAL:
+ // Upon reaching an equality, check for prior redundancies.
+ if (count_delete + count_insert > 1) {
+ if (count_delete !== 0 && count_insert !== 0) {
+ // Factor out any common prefixies.
+ commonlength = this.diff_commonPrefix(text_insert, text_delete)
+ if (commonlength !== 0) {
+ if (
+ pointer - count_delete - count_insert > 0 &&
+ diffs[pointer - count_delete - count_insert - 1][0] ==
+ DIFF_EQUAL
+ ) {
+ diffs[pointer - count_delete - count_insert - 1][1] +=
+ text_insert.substring(0, commonlength)
+ } else {
+ diffs.splice(0, 0, [
+ DIFF_EQUAL,
+ text_insert.substring(0, commonlength),
+ ])
+ pointer++
+ }
+ text_insert = text_insert.substring(commonlength)
+ text_delete = text_delete.substring(commonlength)
+ }
+ // Factor out any common suffixies.
+ commonlength = this.diff_commonSuffix(text_insert, text_delete)
+ if (commonlength !== 0) {
+ diffs[pointer][1] =
+ text_insert.substring(text_insert.length - commonlength) +
+ diffs[pointer][1]
+ text_insert = text_insert.substring(
+ 0,
+ text_insert.length - commonlength
+ )
+ text_delete = text_delete.substring(
+ 0,
+ text_delete.length - commonlength
+ )
+ }
+ }
+ // Delete the offending records and add the merged ones.
+ if (count_delete === 0) {
+ diffs.splice(pointer - count_insert, count_delete + count_insert, [
+ DIFF_INSERT,
+ text_insert,
+ ])
+ } else if (count_insert === 0) {
+ diffs.splice(pointer - count_delete, count_delete + count_insert, [
+ DIFF_DELETE,
+ text_delete,
+ ])
+ } else {
+ diffs.splice(
+ pointer - count_delete - count_insert,
+ count_delete + count_insert,
+ [DIFF_DELETE, text_delete],
+ [DIFF_INSERT, text_insert]
+ )
+ }
+ pointer =
+ pointer -
+ count_delete -
+ count_insert +
+ (count_delete ? 1 : 0) +
+ (count_insert ? 1 : 0) +
+ 1
+ } else if (pointer !== 0 && diffs[pointer - 1][0] == DIFF_EQUAL) {
+ // Merge this equality with the previous one.
+ diffs[pointer - 1][1] += diffs[pointer][1]
+ diffs.splice(pointer, 1)
+ } else {
+ pointer++
+ }
+ count_insert = 0
+ count_delete = 0
+ text_delete = ''
+ text_insert = ''
+ break
+ }
+ }
+ if (diffs[diffs.length - 1][1] === '') {
+ diffs.pop() // Remove the dummy entry at the end.
+ }
+
+ // Second pass: look for single edits surrounded on both sides by equalities
+ // which can be shifted sideways to eliminate an equality.
+ // e.g: ABAC -> ABAC
+ var changes = false
+ pointer = 1
+ // Intentionally ignore the first and last element (don't need checking).
+ while (pointer < diffs.length - 1) {
+ if (
+ diffs[pointer - 1][0] == DIFF_EQUAL &&
+ diffs[pointer + 1][0] == DIFF_EQUAL
+ ) {
+ // This is a single edit surrounded by equalities.
+ if (
+ diffs[pointer][1].substring(
+ diffs[pointer][1].length - diffs[pointer - 1][1].length
+ ) == diffs[pointer - 1][1]
+ ) {
+ // Shift the edit over the previous equality.
+ diffs[pointer][1] =
+ diffs[pointer - 1][1] +
+ diffs[pointer][1].substring(
+ 0,
+ diffs[pointer][1].length - diffs[pointer - 1][1].length
+ )
+ diffs[pointer + 1][1] = diffs[pointer - 1][1] + diffs[pointer + 1][1]
+ diffs.splice(pointer - 1, 1)
+ changes = true
+ } else if (
+ diffs[pointer][1].substring(0, diffs[pointer + 1][1].length) ==
+ diffs[pointer + 1][1]
+ ) {
+ // Shift the edit over the next equality.
+ diffs[pointer - 1][1] += diffs[pointer + 1][1]
+ diffs[pointer][1] =
+ diffs[pointer][1].substring(diffs[pointer + 1][1].length) +
+ diffs[pointer + 1][1]
+ diffs.splice(pointer + 1, 1)
+ changes = true
+ }
+ }
+ pointer++
+ }
+ // If shifts were made, the diff needs reordering and another shift sweep.
+ if (changes) {
+ this.diff_cleanupMerge(diffs)
+ }
+}
+
+/**
+ * loc is a location in text1, compute and return the equivalent location in
+ * text2.
+ * e.g. 'The cat' vs 'The big cat', 1->1, 5->8
+ * @param {!Array.} diffs Array of diff tuples.
+ * @param {number} loc Location within text1.
+ * @return {number} Location within text2.
+ */
+diff_match_patch.prototype.diff_xIndex = function (diffs, loc) {
+ var chars1 = 0
+ var chars2 = 0
+ var last_chars1 = 0
+ var last_chars2 = 0
+ var x
+ for (x = 0; x < diffs.length; x++) {
+ if (diffs[x][0] !== DIFF_INSERT) {
+ // Equality or deletion.
+ chars1 += diffs[x][1].length
+ }
+ if (diffs[x][0] !== DIFF_DELETE) {
+ // Equality or insertion.
+ chars2 += diffs[x][1].length
+ }
+ if (chars1 > loc) {
+ // Overshot the location.
+ break
+ }
+ last_chars1 = chars1
+ last_chars2 = chars2
+ }
+ // Was the location was deleted?
+ if (diffs.length != x && diffs[x][0] === DIFF_DELETE) {
+ return last_chars2
+ }
+ // Add the remaining character length.
+ return last_chars2 + (loc - last_chars1)
+}
+
+/**
+ * Convert a diff array into a pretty HTML report.
+ * @param {!Array.} diffs Array of diff tuples.
+ * @return {string} HTML representation.
+ */
+diff_match_patch.prototype.diff_prettyHtml = function (diffs) {
+ var html = []
+ var pattern_amp = /&/g
+ var pattern_lt = //g
+ var pattern_para = /\n/g
+ for (var x = 0; x < diffs.length; x++) {
+ var op = diffs[x][0] // Operation (insert, delete, equal)
+ var data = diffs[x][1] // Text of change.
+ var text = data
+ .replace(pattern_amp, '&')
+ .replace(pattern_lt, '<')
+ .replace(pattern_gt, '>')
+ .replace(pattern_para, '¶
')
+ switch (op) {
+ case DIFF_INSERT:
+ html[x] = '' + text + ''
+ break
+ case DIFF_DELETE:
+ html[x] = '' + text + ''
+ break
+ case DIFF_EQUAL:
+ html[x] = '' + text + ''
+ break
+ }
+ }
+ return html.join('')
+}
+
+/**
+ * Compute and return the source text (all equalities and deletions).
+ * @param {!Array.} diffs Array of diff tuples.
+ * @return {string} Source text.
+ */
+diff_match_patch.prototype.diff_text1 = function (diffs) {
+ var text = []
+ for (var x = 0; x < diffs.length; x++) {
+ if (diffs[x][0] !== DIFF_INSERT) {
+ text[x] = diffs[x][1]
+ }
+ }
+ return text.join('')
+}
+
+/**
+ * Compute and return the destination text (all equalities and insertions).
+ * @param {!Array.} diffs Array of diff tuples.
+ * @return {string} Destination text.
+ */
+diff_match_patch.prototype.diff_text2 = function (diffs) {
+ var text = []
+ for (var x = 0; x < diffs.length; x++) {
+ if (diffs[x][0] !== DIFF_DELETE) {
+ text[x] = diffs[x][1]
+ }
+ }
+ return text.join('')
+}
+
+/**
+ * Compute the Levenshtein distance; the number of inserted, deleted or
+ * substituted characters.
+ * @param {!Array.} diffs Array of diff tuples.
+ * @return {number} Number of changes.
+ */
+diff_match_patch.prototype.diff_levenshtein = function (diffs) {
+ var levenshtein = 0
+ var insertions = 0
+ var deletions = 0
+ for (var x = 0; x < diffs.length; x++) {
+ var op = diffs[x][0]
+ var data = diffs[x][1]
+ switch (op) {
+ case DIFF_INSERT:
+ insertions += data.length
+ break
+ case DIFF_DELETE:
+ deletions += data.length
+ break
+ case DIFF_EQUAL:
+ // A deletion and an insertion is one substitution.
+ levenshtein += Math.max(insertions, deletions)
+ insertions = 0
+ deletions = 0
+ break
+ }
+ }
+ levenshtein += Math.max(insertions, deletions)
+ return levenshtein
+}
+
+/**
+ * Crush the diff into an encoded string which describes the operations
+ * required to transform text1 into text2.
+ * E.g. =3\t-2\t+ing -> Keep 3 chars, delete 2 chars, insert 'ing'.
+ * Operations are tab-separated. Inserted text is escaped using %xx notation.
+ * @param {!Array.} diffs Array of diff tuples.
+ * @return {string} Delta text.
+ */
+diff_match_patch.prototype.diff_toDelta = function (diffs) {
+ var text = []
+ for (var x = 0; x < diffs.length; x++) {
+ switch (diffs[x][0]) {
+ case DIFF_INSERT:
+ text[x] = '+' + encodeURI(diffs[x][1])
+ break
+ case DIFF_DELETE:
+ text[x] = '-' + diffs[x][1].length
+ break
+ case DIFF_EQUAL:
+ text[x] = '=' + diffs[x][1].length
+ break
+ }
+ }
+ return text.join('\t').replace(/%20/g, ' ')
+}
+
+/**
+ * Given the original text1, and an encoded string which describes the
+ * operations required to transform text1 into text2, compute the full diff.
+ * @param {string} text1 Source string for the diff.
+ * @param {string} delta Delta text.
+ * @return {!Array.} Array of diff tuples.
+ * @throws {!Error} If invalid input.
+ */
+diff_match_patch.prototype.diff_fromDelta = function (text1, delta) {
+ var diffs = []
+ var diffsLength = 0 // Keeping our own length var is faster in JS.
+ var pointer = 0 // Cursor in text1
+ var tokens = delta.split(/\t/g)
+ for (var x = 0; x < tokens.length; x++) {
+ // Each token begins with a one character parameter which specifies the
+ // operation of this token (delete, insert, equality).
+ var param = tokens[x].substring(1)
+ switch (tokens[x].charAt(0)) {
+ case '+':
+ try {
+ diffs[diffsLength++] = [DIFF_INSERT, decodeURI(param)]
+ } catch (ex) {
+ // Malformed URI sequence.
+ throw new Error('Illegal escape in diff_fromDelta: ' + param)
+ }
+ break
+ case '-':
+ // Fall through.
+ case '=':
+ var n = parseInt(param, 10)
+ if (isNaN(n) || n < 0) {
+ throw new Error('Invalid number in diff_fromDelta: ' + param)
+ }
+ var text = text1.substring(pointer, (pointer += n))
+ if (tokens[x].charAt(0) == '=') {
+ diffs[diffsLength++] = [DIFF_EQUAL, text]
+ } else {
+ diffs[diffsLength++] = [DIFF_DELETE, text]
+ }
+ break
+ default:
+ // Blank tokens are ok (from a trailing \t).
+ // Anything else is an error.
+ if (tokens[x]) {
+ throw new Error(
+ 'Invalid diff operation in diff_fromDelta: ' + tokens[x]
+ )
+ }
+ }
+ }
+ if (pointer != text1.length) {
+ throw new Error(
+ 'Delta length (' +
+ pointer +
+ ') does not equal source text length (' +
+ text1.length +
+ ').'
+ )
+ }
+ return diffs
+}
+
+// MATCH FUNCTIONS
+
+/**
+ * Locate the best instance of 'pattern' in 'text' near 'loc'.
+ * @param {string} text The text to search.
+ * @param {string} pattern The pattern to search for.
+ * @param {number} loc The location to search around.
+ * @return {number} Best match index or -1.
+ */
+diff_match_patch.prototype.match_main = function (text, pattern, loc) {
+ // Check for null inputs.
+ if (text == null || pattern == null || loc == null) {
+ throw new Error('Null input. (match_main)')
+ }
+
+ loc = Math.max(0, Math.min(loc, text.length))
+ if (text == pattern) {
+ // Shortcut (potentially not guaranteed by the algorithm)
+ return 0
+ } else if (!text.length) {
+ // Nothing to match.
+ return -1
+ } else if (text.substring(loc, loc + pattern.length) == pattern) {
+ // Perfect match at the perfect spot! (Includes case of null pattern)
+ return loc
+ } else {
+ // Do a fuzzy compare.
+ return this.match_bitap_(text, pattern, loc)
+ }
+}
+
+/**
+ * Locate the best instance of 'pattern' in 'text' near 'loc' using the
+ * Bitap algorithm.
+ * @param {string} text The text to search.
+ * @param {string} pattern The pattern to search for.
+ * @param {number} loc The location to search around.
+ * @return {number} Best match index or -1.
+ * @private
+ */
+diff_match_patch.prototype.match_bitap_ = function (text, pattern, loc) {
+ if (pattern.length > this.Match_MaxBits) {
+ throw new Error('Pattern too long for this browser.')
+ }
+
+ // Initialise the alphabet.
+ var s = this.match_alphabet_(pattern)
+
+ var dmp = this // 'this' becomes 'window' in a closure.
+
+ /**
+ * Compute and return the score for a match with e errors and x location.
+ * Accesses loc and pattern through being a closure.
+ * @param {number} e Number of errors in match.
+ * @param {number} x Location of match.
+ * @return {number} Overall score for match (0.0 = good, 1.0 = bad).
+ * @private
+ */
+ function match_bitapScore_(e, x) {
+ var accuracy = e / pattern.length
+ var proximity = Math.abs(loc - x)
+ if (!dmp.Match_Distance) {
+ // Dodge divide by zero error.
+ return proximity ? 1.0 : accuracy
+ }
+ return accuracy + proximity / dmp.Match_Distance
+ }
+
+ // Highest score beyond which we give up.
+ var score_threshold = this.Match_Threshold
+ // Is there a nearby exact match? (speedup)
+ var best_loc = text.indexOf(pattern, loc)
+ if (best_loc != -1) {
+ score_threshold = Math.min(match_bitapScore_(0, best_loc), score_threshold)
+ // What about in the other direction? (speedup)
+ best_loc = text.lastIndexOf(pattern, loc + pattern.length)
+ if (best_loc != -1) {
+ score_threshold = Math.min(
+ match_bitapScore_(0, best_loc),
+ score_threshold
+ )
+ }
+ }
+
+ // Initialise the bit arrays.
+ var matchmask = 1 << (pattern.length - 1)
+ best_loc = -1
+
+ var bin_min, bin_mid
+ var bin_max = pattern.length + text.length
+ var last_rd
+ for (var d = 0; d < pattern.length; d++) {
+ // Scan for the best match; each iteration allows for one more error.
+ // Run a binary search to determine how far from 'loc' we can stray at this
+ // error level.
+ bin_min = 0
+ bin_mid = bin_max
+ while (bin_min < bin_mid) {
+ if (match_bitapScore_(d, loc + bin_mid) <= score_threshold) {
+ bin_min = bin_mid
+ } else {
+ bin_max = bin_mid
+ }
+ bin_mid = Math.floor((bin_max - bin_min) / 2 + bin_min)
+ }
+ // Use the result from this iteration as the maximum for the next.
+ bin_max = bin_mid
+ var start = Math.max(1, loc - bin_mid + 1)
+ var finish = Math.min(loc + bin_mid, text.length) + pattern.length
+
+ var rd = Array(finish + 2)
+ rd[finish + 1] = (1 << d) - 1
+ for (var j = finish; j >= start; j--) {
+ // The alphabet (s) is a sparse hash, so the following line generates
+ // warnings.
+ var charMatch = s[text.charAt(j - 1)]
+ if (d === 0) {
+ // First pass: exact match.
+ rd[j] = ((rd[j + 1] << 1) | 1) & charMatch
+ } else {
+ // Subsequent passes: fuzzy match.
+ rd[j] =
+ (((rd[j + 1] << 1) | 1) & charMatch) |
+ (((last_rd[j + 1] | last_rd[j]) << 1) | 1) |
+ last_rd[j + 1]
+ }
+ if (rd[j] & matchmask) {
+ var score = match_bitapScore_(d, j - 1)
+ // This match will almost certainly be better than any existing match.
+ // But check anyway.
+ if (score <= score_threshold) {
+ // Told you so.
+ score_threshold = score
+ best_loc = j - 1
+ if (best_loc > loc) {
+ // When passing loc, don't exceed our current distance from loc.
+ start = Math.max(1, 2 * loc - best_loc)
+ } else {
+ // Already passed loc, downhill from here on in.
+ break
+ }
+ }
+ }
+ }
+ // No hope for a (better) match at greater error levels.
+ if (match_bitapScore_(d + 1, loc) > score_threshold) {
+ break
+ }
+ last_rd = rd
+ }
+ return best_loc
+}
+
+/**
+ * Initialise the alphabet for the Bitap algorithm.
+ * @param {string} pattern The text to encode.
+ * @return {!Object} Hash of character locations.
+ * @private
+ */
+diff_match_patch.prototype.match_alphabet_ = function (pattern) {
+ var s = {}
+ for (var i = 0; i < pattern.length; i++) {
+ s[pattern.charAt(i)] = 0
+ }
+ for (var i = 0; i < pattern.length; i++) {
+ s[pattern.charAt(i)] |= 1 << (pattern.length - i - 1)
+ }
+ return s
+}
+
+// PATCH FUNCTIONS
+
+/**
+ * Increase the context until it is unique,
+ * but don't let the pattern expand beyond Match_MaxBits.
+ * @param {!diff_match_patch.patch_obj} patch The patch to grow.
+ * @param {string} text Source text.
+ * @private
+ */
+diff_match_patch.prototype.patch_addContext_ = function (patch, text) {
+ if (text.length == 0) {
+ return
+ }
+ var pattern = text.substring(patch.start2, patch.start2 + patch.length1)
+ var padding = 0
+
+ // Look for the first and last matches of pattern in text. If two different
+ // matches are found, increase the pattern length.
+ while (
+ text.indexOf(pattern) != text.lastIndexOf(pattern) &&
+ pattern.length < this.Match_MaxBits - this.Patch_Margin - this.Patch_Margin
+ ) {
+ padding += this.Patch_Margin
+ pattern = text.substring(
+ patch.start2 - padding,
+ patch.start2 + patch.length1 + padding
+ )
+ }
+ // Add one chunk for good luck.
+ padding += this.Patch_Margin
+
+ // Add the prefix.
+ var prefix = text.substring(patch.start2 - padding, patch.start2)
+ if (prefix) {
+ patch.diffs.unshift([DIFF_EQUAL, prefix])
+ }
+ // Add the suffix.
+ var suffix = text.substring(
+ patch.start2 + patch.length1,
+ patch.start2 + patch.length1 + padding
+ )
+ if (suffix) {
+ patch.diffs.push([DIFF_EQUAL, suffix])
+ }
+
+ // Roll back the start points.
+ patch.start1 -= prefix.length
+ patch.start2 -= prefix.length
+ // Extend the lengths.
+ patch.length1 += prefix.length + suffix.length
+ patch.length2 += prefix.length + suffix.length
+}
+
+/**
+ * Compute a list of patches to turn text1 into text2.
+ * Use diffs if provided, otherwise compute it ourselves.
+ * There are four ways to call this function, depending on what data is
+ * available to the caller:
+ * Method 1:
+ * a = text1, b = text2
+ * Method 2:
+ * a = diffs
+ * Method 3 (optimal):
+ * a = text1, b = diffs
+ * Method 4 (deprecated, use method 3):
+ * a = text1, b = text2, c = diffs
+ *
+ * @param {string|!Array.} a text1 (methods 1,3,4) or
+ * Array of diff tuples for text1 to text2 (method 2).
+ * @param {string|!Array.} opt_b text2 (methods 1,4) or
+ * Array of diff tuples for text1 to text2 (method 3) or undefined (method 2).
+ * @param {string|!Array.} opt_c Array of diff tuples
+ * for text1 to text2 (method 4) or undefined (methods 1,2,3).
+ * @return {!Array.} Array of Patch objects.
+ */
+diff_match_patch.prototype.patch_make = function (a, opt_b, opt_c) {
+ var text1, diffs
+ if (
+ typeof a === 'string' &&
+ typeof opt_b === 'string' &&
+ typeof opt_c === 'undefined'
+ ) {
+ // Method 1: text1, text2
+ // Compute diffs from text1 and text2.
+ text1 = /** @type {string} */ (a)
+ diffs = this.diff_main(text1, /** @type {string} */ (opt_b), true)
+ if (diffs.length > 2) {
+ this.diff_cleanupSemantic(diffs)
+ this.diff_cleanupEfficiency(diffs)
+ }
+ } else if (
+ a &&
+ typeof a === 'object' &&
+ typeof opt_b === 'undefined' &&
+ typeof opt_c === 'undefined'
+ ) {
+ // Method 2: diffs
+ // Compute text1 from diffs.
+ diffs = /** @type {!Array.} */ (a)
+ text1 = this.diff_text1(diffs)
+ } else if (
+ typeof a === 'string' &&
+ opt_b &&
+ typeof opt_b === 'object' &&
+ typeof opt_c === 'undefined'
+ ) {
+ // Method 3: text1, diffs
+ text1 = /** @type {string} */ (a)
+ diffs = /** @type {!Array.} */ (opt_b)
+ } else if (
+ typeof a === 'string' &&
+ typeof opt_b === 'string' &&
+ opt_c &&
+ typeof opt_c === 'object'
+ ) {
+ // Method 4: text1, text2, diffs
+ // text2 is not used.
+ text1 = /** @type {string} */ (a)
+ diffs = /** @type {!Array.} */ (opt_c)
+ } else {
+ throw new Error('Unknown call format to patch_make.')
+ }
+
+ if (diffs.length === 0) {
+ return [] // Get rid of the null case.
+ }
+ var patches = []
+ var patch = new diff_match_patch.patch_obj()
+ var patchDiffLength = 0 // Keeping our own length var is faster in JS.
+ var char_count1 = 0 // Number of characters into the text1 string.
+ var char_count2 = 0 // Number of characters into the text2 string.
+ // Start with text1 (prepatch_text) and apply the diffs until we arrive at
+ // text2 (postpatch_text). We recreate the patches one by one to determine
+ // context info.
+ var prepatch_text = text1
+ var postpatch_text = text1
+ for (var x = 0; x < diffs.length; x++) {
+ var diff_type = diffs[x][0]
+ var diff_text = diffs[x][1]
+
+ if (!patchDiffLength && diff_type !== DIFF_EQUAL) {
+ // A new patch starts here.
+ patch.start1 = char_count1
+ patch.start2 = char_count2
+ }
+
+ switch (diff_type) {
+ case DIFF_INSERT:
+ patch.diffs[patchDiffLength++] = diffs[x]
+ patch.length2 += diff_text.length
+ postpatch_text =
+ postpatch_text.substring(0, char_count2) +
+ diff_text +
+ postpatch_text.substring(char_count2)
+ break
+ case DIFF_DELETE:
+ patch.length1 += diff_text.length
+ patch.diffs[patchDiffLength++] = diffs[x]
+ postpatch_text =
+ postpatch_text.substring(0, char_count2) +
+ postpatch_text.substring(char_count2 + diff_text.length)
+ break
+ case DIFF_EQUAL:
+ if (
+ diff_text.length <= 2 * this.Patch_Margin &&
+ patchDiffLength &&
+ diffs.length != x + 1
+ ) {
+ // Small equality inside a patch.
+ patch.diffs[patchDiffLength++] = diffs[x]
+ patch.length1 += diff_text.length
+ patch.length2 += diff_text.length
+ } else if (diff_text.length >= 2 * this.Patch_Margin) {
+ // Time for a new patch.
+ if (patchDiffLength) {
+ this.patch_addContext_(patch, prepatch_text)
+ patches.push(patch)
+ patch = new diff_match_patch.patch_obj()
+ patchDiffLength = 0
+ // Unlike Unidiff, our patch lists have a rolling context.
+ // http://code.google.com/p/google-diff-match-patch/wiki/Unidiff
+ // Update prepatch text & pos to reflect the application of the
+ // just completed patch.
+ prepatch_text = postpatch_text
+ char_count1 = char_count2
+ }
+ }
+ break
+ }
+
+ // Update the current character count.
+ if (diff_type !== DIFF_INSERT) {
+ char_count1 += diff_text.length
+ }
+ if (diff_type !== DIFF_DELETE) {
+ char_count2 += diff_text.length
+ }
+ }
+ // Pick up the leftover patch if not empty.
+ if (patchDiffLength) {
+ this.patch_addContext_(patch, prepatch_text)
+ patches.push(patch)
+ }
+
+ return patches
+}
+
+/**
+ * Given an array of patches, return another array that is identical.
+ * @param {!Array.} patches Array of Patch objects.
+ * @return {!Array.} Array of Patch objects.
+ */
+diff_match_patch.prototype.patch_deepCopy = function (patches) {
+ // Making deep copies is hard in JavaScript.
+ var patchesCopy = []
+ for (var x = 0; x < patches.length; x++) {
+ var patch = patches[x]
+ var patchCopy = new diff_match_patch.patch_obj()
+ patchCopy.diffs = []
+ for (var y = 0; y < patch.diffs.length; y++) {
+ patchCopy.diffs[y] = patch.diffs[y].slice()
+ }
+ patchCopy.start1 = patch.start1
+ patchCopy.start2 = patch.start2
+ patchCopy.length1 = patch.length1
+ patchCopy.length2 = patch.length2
+ patchesCopy[x] = patchCopy
+ }
+ return patchesCopy
+}
+
+/**
+ * Merge a set of patches onto the text. Return a patched text, as well
+ * as a list of true/false values indicating which patches were applied.
+ * @param {!Array.} patches Array of Patch objects.
+ * @param {string} text Old text.
+ * @return {!Array.>} Two element Array, containing the
+ * new text and an array of boolean values.
+ */
+diff_match_patch.prototype.patch_apply = function (patches, text) {
+ if (patches.length == 0) {
+ return [text, []]
+ }
+
+ // Deep copy the patches so that no changes are made to originals.
+ patches = this.patch_deepCopy(patches)
+
+ var nullPadding = this.patch_addPadding(patches)
+ text = nullPadding + text + nullPadding
+
+ this.patch_splitMax(patches)
+ // delta keeps track of the offset between the expected and actual location
+ // of the previous patch. If there are patches expected at positions 10 and
+ // 20, but the first patch was found at 12, delta is 2 and the second patch
+ // has an effective expected position of 22.
+ var delta = 0
+ var results = []
+ for (var x = 0; x < patches.length; x++) {
+ var expected_loc = patches[x].start2 + delta
+ var text1 = this.diff_text1(patches[x].diffs)
+ var start_loc
+ var end_loc = -1
+ if (text1.length > this.Match_MaxBits) {
+ // patch_splitMax will only provide an oversized pattern in the case of
+ // a monster delete.
+ start_loc = this.match_main(
+ text,
+ text1.substring(0, this.Match_MaxBits),
+ expected_loc
+ )
+ if (start_loc != -1) {
+ end_loc = this.match_main(
+ text,
+ text1.substring(text1.length - this.Match_MaxBits),
+ expected_loc + text1.length - this.Match_MaxBits
+ )
+ if (end_loc == -1 || start_loc >= end_loc) {
+ // Can't find valid trailing context. Drop this patch.
+ start_loc = -1
+ }
+ }
+ } else {
+ start_loc = this.match_main(text, text1, expected_loc)
+ }
+ if (start_loc == -1) {
+ // No match found. :(
+ results[x] = false
+ // Subtract the delta for this failed patch from subsequent patches.
+ delta -= patches[x].length2 - patches[x].length1
+ } else {
+ // Found a match. :)
+ results[x] = true
+ delta = start_loc - expected_loc
+ var text2
+ if (end_loc == -1) {
+ text2 = text.substring(start_loc, start_loc + text1.length)
+ } else {
+ text2 = text.substring(start_loc, end_loc + this.Match_MaxBits)
+ }
+ if (text1 == text2) {
+ // Perfect match, just shove the replacement text in.
+ text =
+ text.substring(0, start_loc) +
+ this.diff_text2(patches[x].diffs) +
+ text.substring(start_loc + text1.length)
+ } else {
+ // Imperfect match. Run a diff to get a framework of equivalent
+ // indices.
+ var diffs = this.diff_main(text1, text2, false)
+ if (
+ text1.length > this.Match_MaxBits &&
+ this.diff_levenshtein(diffs) / text1.length >
+ this.Patch_DeleteThreshold
+ ) {
+ // The end points match, but the content is unacceptably bad.
+ results[x] = false
+ } else {
+ this.diff_cleanupSemanticLossless(diffs)
+ var index1 = 0
+ var index2
+ for (var y = 0; y < patches[x].diffs.length; y++) {
+ var mod = patches[x].diffs[y]
+ if (mod[0] !== DIFF_EQUAL) {
+ index2 = this.diff_xIndex(diffs, index1)
+ }
+ if (mod[0] === DIFF_INSERT) {
+ // Insertion
+ text =
+ text.substring(0, start_loc + index2) +
+ mod[1] +
+ text.substring(start_loc + index2)
+ } else if (mod[0] === DIFF_DELETE) {
+ // Deletion
+ text =
+ text.substring(0, start_loc + index2) +
+ text.substring(
+ start_loc + this.diff_xIndex(diffs, index1 + mod[1].length)
+ )
+ }
+ if (mod[0] !== DIFF_DELETE) {
+ index1 += mod[1].length
+ }
+ }
+ }
+ }
+ }
+ }
+ // Strip the padding off.
+ text = text.substring(nullPadding.length, text.length - nullPadding.length)
+ return [text, results]
+}
+
+/**
+ * Add some padding on text start and end so that edges can match something.
+ * Intended to be called only from within patch_apply.
+ * @param {!Array.} patches Array of Patch objects.
+ * @return {string} The padding string added to each side.
+ */
+diff_match_patch.prototype.patch_addPadding = function (patches) {
+ var paddingLength = this.Patch_Margin
+ var nullPadding = ''
+ for (var x = 1; x <= paddingLength; x++) {
+ nullPadding += String.fromCharCode(x)
+ }
+
+ // Bump all the patches forward.
+ for (var x = 0; x < patches.length; x++) {
+ patches[x].start1 += paddingLength
+ patches[x].start2 += paddingLength
+ }
+
+ // Add some padding on start of first diff.
+ var patch = patches[0]
+ var diffs = patch.diffs
+ if (diffs.length == 0 || diffs[0][0] != DIFF_EQUAL) {
+ // Add nullPadding equality.
+ diffs.unshift([DIFF_EQUAL, nullPadding])
+ patch.start1 -= paddingLength // Should be 0.
+ patch.start2 -= paddingLength // Should be 0.
+ patch.length1 += paddingLength
+ patch.length2 += paddingLength
+ } else if (paddingLength > diffs[0][1].length) {
+ // Grow first equality.
+ var extraLength = paddingLength - diffs[0][1].length
+ diffs[0][1] = nullPadding.substring(diffs[0][1].length) + diffs[0][1]
+ patch.start1 -= extraLength
+ patch.start2 -= extraLength
+ patch.length1 += extraLength
+ patch.length2 += extraLength
+ }
+
+ // Add some padding on end of last diff.
+ patch = patches[patches.length - 1]
+ diffs = patch.diffs
+ if (diffs.length == 0 || diffs[diffs.length - 1][0] != DIFF_EQUAL) {
+ // Add nullPadding equality.
+ diffs.push([DIFF_EQUAL, nullPadding])
+ patch.length1 += paddingLength
+ patch.length2 += paddingLength
+ } else if (paddingLength > diffs[diffs.length - 1][1].length) {
+ // Grow last equality.
+ var extraLength = paddingLength - diffs[diffs.length - 1][1].length
+ diffs[diffs.length - 1][1] += nullPadding.substring(0, extraLength)
+ patch.length1 += extraLength
+ patch.length2 += extraLength
+ }
+
+ return nullPadding
+}
+
+/**
+ * Look through the patches and break up any which are longer than the maximum
+ * limit of the match algorithm.
+ * Intended to be called only from within patch_apply.
+ * @param {!Array.} patches Array of Patch objects.
+ */
+diff_match_patch.prototype.patch_splitMax = function (patches) {
+ var patch_size = this.Match_MaxBits
+ for (var x = 0; x < patches.length; x++) {
+ if (patches[x].length1 <= patch_size) {
+ continue
+ }
+ var bigpatch = patches[x]
+ // Remove the big old patch.
+ patches.splice(x--, 1)
+ var start1 = bigpatch.start1
+ var start2 = bigpatch.start2
+ var precontext = ''
+ while (bigpatch.diffs.length !== 0) {
+ // Create one of several smaller patches.
+ var patch = new diff_match_patch.patch_obj()
+ var empty = true
+ patch.start1 = start1 - precontext.length
+ patch.start2 = start2 - precontext.length
+ if (precontext !== '') {
+ patch.length1 = patch.length2 = precontext.length
+ patch.diffs.push([DIFF_EQUAL, precontext])
+ }
+ while (
+ bigpatch.diffs.length !== 0 &&
+ patch.length1 < patch_size - this.Patch_Margin
+ ) {
+ var diff_type = bigpatch.diffs[0][0]
+ var diff_text = bigpatch.diffs[0][1]
+ if (diff_type === DIFF_INSERT) {
+ // Insertions are harmless.
+ patch.length2 += diff_text.length
+ start2 += diff_text.length
+ patch.diffs.push(bigpatch.diffs.shift())
+ empty = false
+ } else if (
+ diff_type === DIFF_DELETE &&
+ patch.diffs.length == 1 &&
+ patch.diffs[0][0] == DIFF_EQUAL &&
+ diff_text.length > 2 * patch_size
+ ) {
+ // This is a large deletion. Let it pass in one chunk.
+ patch.length1 += diff_text.length
+ start1 += diff_text.length
+ empty = false
+ patch.diffs.push([diff_type, diff_text])
+ bigpatch.diffs.shift()
+ } else {
+ // Deletion or equality. Only take as much as we can stomach.
+ diff_text = diff_text.substring(
+ 0,
+ patch_size - patch.length1 - this.Patch_Margin
+ )
+ patch.length1 += diff_text.length
+ start1 += diff_text.length
+ if (diff_type === DIFF_EQUAL) {
+ patch.length2 += diff_text.length
+ start2 += diff_text.length
+ } else {
+ empty = false
+ }
+ patch.diffs.push([diff_type, diff_text])
+ if (diff_text == bigpatch.diffs[0][1]) {
+ bigpatch.diffs.shift()
+ } else {
+ bigpatch.diffs[0][1] = bigpatch.diffs[0][1].substring(
+ diff_text.length
+ )
+ }
+ }
+ }
+ // Compute the head context for the next patch.
+ precontext = this.diff_text2(patch.diffs)
+ precontext = precontext.substring(precontext.length - this.Patch_Margin)
+ // Append the end context for this patch.
+ var postcontext = this.diff_text1(bigpatch.diffs).substring(
+ 0,
+ this.Patch_Margin
+ )
+ if (postcontext !== '') {
+ patch.length1 += postcontext.length
+ patch.length2 += postcontext.length
+ if (
+ patch.diffs.length !== 0 &&
+ patch.diffs[patch.diffs.length - 1][0] === DIFF_EQUAL
+ ) {
+ patch.diffs[patch.diffs.length - 1][1] += postcontext
+ } else {
+ patch.diffs.push([DIFF_EQUAL, postcontext])
+ }
+ }
+ if (!empty) {
+ patches.splice(++x, 0, patch)
+ }
+ }
+ }
+}
+
+/**
+ * Take a list of patches and return a textual representation.
+ * @param {!Array.} patches Array of Patch objects.
+ * @return {string} Text representation of patches.
+ */
+diff_match_patch.prototype.patch_toText = function (patches) {
+ var text = []
+ for (var x = 0; x < patches.length; x++) {
+ text[x] = patches[x]
+ }
+ return text.join('')
+}
+
+/**
+ * Parse a textual representation of patches and return a list of Patch objects.
+ * @param {string} textline Text representation of patches.
+ * @return {!Array.} Array of Patch objects.
+ * @throws {!Error} If invalid input.
+ */
+diff_match_patch.prototype.patch_fromText = function (textline) {
+ var patches = []
+ if (!textline) {
+ return patches
+ }
+ var text = textline.split('\n')
+ var textPointer = 0
+ var patchHeader = /^@@ -(\d+),?(\d*) \+(\d+),?(\d*) @@$/
+ while (textPointer < text.length) {
+ var m = text[textPointer].match(patchHeader)
+ if (!m) {
+ throw new Error('Invalid patch string: ' + text[textPointer])
+ }
+ var patch = new diff_match_patch.patch_obj()
+ patches.push(patch)
+ patch.start1 = parseInt(m[1], 10)
+ if (m[2] === '') {
+ patch.start1--
+ patch.length1 = 1
+ } else if (m[2] == '0') {
+ patch.length1 = 0
+ } else {
+ patch.start1--
+ patch.length1 = parseInt(m[2], 10)
+ }
+
+ patch.start2 = parseInt(m[3], 10)
+ if (m[4] === '') {
+ patch.start2--
+ patch.length2 = 1
+ } else if (m[4] == '0') {
+ patch.length2 = 0
+ } else {
+ patch.start2--
+ patch.length2 = parseInt(m[4], 10)
+ }
+ textPointer++
+
+ while (textPointer < text.length) {
+ var sign = text[textPointer].charAt(0)
+ try {
+ var line = decodeURI(text[textPointer].substring(1))
+ } catch (ex) {
+ // Malformed URI sequence.
+ throw new Error('Illegal escape in patch_fromText: ' + line)
+ }
+ if (sign == '-') {
+ // Deletion.
+ patch.diffs.push([DIFF_DELETE, line])
+ } else if (sign == '+') {
+ // Insertion.
+ patch.diffs.push([DIFF_INSERT, line])
+ } else if (sign == ' ') {
+ // Minor equality.
+ patch.diffs.push([DIFF_EQUAL, line])
+ } else if (sign == '@') {
+ // Start of next patch.
+ break
+ } else if (sign === '') {
+ // Blank line? Whatever.
+ } else {
+ // WTF?
+ throw new Error('Invalid patch mode "' + sign + '" in: ' + line)
+ }
+ textPointer++
+ }
+ }
+ return patches
+}
+
+/**
+ * Class representing one patch operation.
+ * @constructor
+ */
+diff_match_patch.patch_obj = function () {
+ /** @type {!Array.} */
+ this.diffs = []
+ /** @type {?number} */
+ this.start1 = null
+ /** @type {?number} */
+ this.start2 = null
+ /** @type {number} */
+ this.length1 = 0
+ /** @type {number} */
+ this.length2 = 0
+}
+
+/**
+ * Emmulate GNU diff's format.
+ * Header: @@ -382,8 +481,9 @@
+ * Indicies are printed as 1-based, not 0-based.
+ * @return {string} The GNU diff string.
+ */
+diff_match_patch.patch_obj.prototype.toString = function () {
+ var coords1, coords2
+ if (this.length1 === 0) {
+ coords1 = this.start1 + ',0'
+ } else if (this.length1 == 1) {
+ coords1 = this.start1 + 1
+ } else {
+ coords1 = this.start1 + 1 + ',' + this.length1
+ }
+ if (this.length2 === 0) {
+ coords2 = this.start2 + ',0'
+ } else if (this.length2 == 1) {
+ coords2 = this.start2 + 1
+ } else {
+ coords2 = this.start2 + 1 + ',' + this.length2
+ }
+ var text = ['@@ -' + coords1 + ' +' + coords2 + ' @@\n']
+ var op
+ // Escape the body of the patch with %xx notation.
+ for (var x = 0; x < this.diffs.length; x++) {
+ switch (this.diffs[x][0]) {
+ case DIFF_INSERT:
+ op = '+'
+ break
+ case DIFF_DELETE:
+ op = '-'
+ break
+ case DIFF_EQUAL:
+ op = ' '
+ break
+ }
+ text[x + 1] = op + encodeURI(this.diffs[x][1]) + '\n'
+ }
+ return text.join('').replace(/%20/g, ' ')
+}
+
+// Export these global variables so that they survive Google's JS compiler.
+// In a browser, 'this' will be 'window'.
+// Users of node.js should 'require' the uncompressed version since Google's
+// JS compiler may break the following exports for non-browser environments.
+this.diff_match_patch = diff_match_patch
+this.DIFF_DELETE = DIFF_DELETE
+this.DIFF_INSERT = DIFF_INSERT
+this.DIFF_EQUAL = DIFF_EQUAL
diff --git a/services/track-changes/buildscript.txt b/services/track-changes/buildscript.txt
new file mode 100644
index 0000000000..36ed3a783a
--- /dev/null
+++ b/services/track-changes/buildscript.txt
@@ -0,0 +1,8 @@
+track-changes
+--dependencies=mongo,redis,s3
+--docker-repos=gcr.io/overleaf-ops
+--env-add=AWS_BUCKET=bucket
+--env-pass-through=
+--node-version=12.22.3
+--public-repo=True
+--script-version=3.11.0
diff --git a/services/track-changes/config/settings.defaults.js b/services/track-changes/config/settings.defaults.js
new file mode 100755
index 0000000000..faa8660b70
--- /dev/null
+++ b/services/track-changes/config/settings.defaults.js
@@ -0,0 +1,90 @@
+const Path = require('path')
+const TMP_DIR =
+ process.env.TMP_PATH || Path.resolve(Path.join(__dirname, '../../', 'tmp'))
+
+module.exports = {
+ mongo: {
+ options: {
+ useUnifiedTopology:
+ (process.env.MONGO_USE_UNIFIED_TOPOLOGY || 'true') === 'true',
+ },
+ url:
+ process.env.MONGO_CONNECTION_STRING ||
+ `mongodb://${process.env.MONGO_HOST || 'localhost'}/sharelatex`,
+ },
+
+ internal: {
+ trackchanges: {
+ port: 3015,
+ host: process.env.LISTEN_ADDRESS || 'localhost',
+ },
+ },
+ apis: {
+ documentupdater: {
+ url: `http://${
+ process.env.DOCUMENT_UPDATER_HOST ||
+ process.env.DOCUPDATER_HOST ||
+ 'localhost'
+ }:3003`,
+ },
+ docstore: {
+ url: `http://${process.env.DOCSTORE_HOST || 'localhost'}:3016`,
+ },
+ web: {
+ url: `http://${
+ process.env.WEB_API_HOST || process.env.WEB_HOST || 'localhost'
+ }:${process.env.WEB_API_PORT || process.env.WEB_PORT || 3000}`,
+ user: process.env.WEB_API_USER || 'sharelatex',
+ pass: process.env.WEB_API_PASSWORD || 'password',
+ },
+ },
+ redis: {
+ lock: {
+ host: process.env.REDIS_HOST || 'localhost',
+ port: process.env.REDIS_PORT || 6379,
+ password: process.env.REDIS_PASSWORD || '',
+ key_schema: {
+ historyLock({ doc_id: docId }) {
+ return `HistoryLock:{${docId}}`
+ },
+ historyIndexLock({ project_id: projectId }) {
+ return `HistoryIndexLock:{${projectId}}`
+ },
+ },
+ },
+ history: {
+ host: process.env.REDIS_HOST || 'localhost',
+ port: process.env.REDIS_PORT || 6379,
+ password: process.env.REDIS_PASSWORD || '',
+ key_schema: {
+ uncompressedHistoryOps({ doc_id: docId }) {
+ return `UncompressedHistoryOps:{${docId}}`
+ },
+ docsWithHistoryOps({ project_id: projectId }) {
+ return `DocsWithHistoryOps:{${projectId}}`
+ },
+ },
+ },
+ },
+
+ trackchanges: {
+ s3: {
+ key: process.env.AWS_ACCESS_KEY_ID,
+ secret: process.env.AWS_SECRET_ACCESS_KEY,
+ endpoint: process.env.AWS_S3_ENDPOINT,
+ pathStyle: process.env.AWS_S3_PATH_STYLE === 'true',
+ },
+ stores: {
+ doc_history: process.env.AWS_BUCKET,
+ },
+ continueOnError: process.env.TRACK_CHANGES_CONTINUE_ON_ERROR || false,
+ },
+
+ path: {
+ dumpFolder: Path.join(TMP_DIR, 'dumpFolder'),
+ },
+
+ sentry: {
+ dsn: process.env.SENTRY_DSN,
+ },
+}
diff --git a/services/track-changes/docker-compose.ci.yml b/services/track-changes/docker-compose.ci.yml
new file mode 100644
index 0000000000..3797853153
--- /dev/null
+++ b/services/track-changes/docker-compose.ci.yml
@@ -0,0 +1,72 @@
+# This file was auto-generated, do not edit it directly.
+# Instead run bin/update_build_scripts from
+# https://github.com/sharelatex/sharelatex-dev-environment
+
+version: "2.3"
+
+services:
+ test_unit:
+ image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
+ user: node
+ command: npm run test:unit:_run
+ environment:
+ NODE_ENV: test
+ NODE_OPTIONS: "--unhandled-rejections=strict"
+
+
+ test_acceptance:
+ build: .
+ image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
+ environment:
+ ELASTIC_SEARCH_DSN: es:9200
+ REDIS_HOST: redis
+ QUEUES_REDIS_HOST: redis
+ MONGO_HOST: mongo
+ POSTGRES_HOST: postgres
+ AWS_S3_ENDPOINT: http://s3:9090
+ AWS_S3_PATH_STYLE: 'true'
+ AWS_ACCESS_KEY_ID: fake
+ AWS_SECRET_ACCESS_KEY: fake
+ MOCHA_GREP: ${MOCHA_GREP}
+ NODE_ENV: test
+ NODE_OPTIONS: "--unhandled-rejections=strict"
+ AWS_BUCKET: bucket
+ depends_on:
+ mongo:
+ condition: service_healthy
+ redis:
+ condition: service_healthy
+ s3:
+ condition: service_healthy
+ user: node
+ command: npm run test:acceptance:_run
+
+
+ tar:
+ build: .
+ image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
+ volumes:
+ - ./:/tmp/build/
+ command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
+ user: root
+ redis:
+ image: redis
+ healthcheck:
+ test: ping="$$(redis-cli ping)" && [ "$$ping" = 'PONG' ]
+ interval: 1s
+ retries: 20
+
+ mongo:
+ image: mongo:4.0
+ healthcheck:
+ test: "mongo --quiet localhost/test --eval 'quit(db.runCommand({ ping: 1 }).ok ? 0 : 1)'"
+ interval: 1s
+ retries: 20
+ s3:
+ image: adobe/s3mock
+ environment:
+ - initialBuckets=fake_user_files,fake_template_files,fake_public_files,bucket
+ healthcheck:
+ test: wget --quiet --output-document=/dev/null http://localhost:9090
+ interval: 1s
+ retries: 20
diff --git a/services/track-changes/docker-compose.yml b/services/track-changes/docker-compose.yml
new file mode 100644
index 0000000000..c4bac32698
--- /dev/null
+++ b/services/track-changes/docker-compose.yml
@@ -0,0 +1,71 @@
+# This file was auto-generated, do not edit it directly.
+# Instead run bin/update_build_scripts from
+# https://github.com/sharelatex/sharelatex-dev-environment
+
+version: "2.3"
+
+services:
+ test_unit:
+ image: node:12.22.3
+ volumes:
+ - .:/app
+ working_dir: /app
+ environment:
+ MOCHA_GREP: ${MOCHA_GREP}
+ NODE_ENV: test
+ NODE_OPTIONS: "--unhandled-rejections=strict"
+ command: npm run --silent test:unit
+ user: node
+
+ test_acceptance:
+ image: node:12.22.3
+ volumes:
+ - .:/app
+ working_dir: /app
+ environment:
+ ELASTIC_SEARCH_DSN: es:9200
+ REDIS_HOST: redis
+ QUEUES_REDIS_HOST: redis
+ MONGO_HOST: mongo
+ POSTGRES_HOST: postgres
+ AWS_S3_ENDPOINT: http://s3:9090
+ AWS_S3_PATH_STYLE: 'true'
+ AWS_ACCESS_KEY_ID: fake
+ AWS_SECRET_ACCESS_KEY: fake
+ MOCHA_GREP: ${MOCHA_GREP}
+ LOG_LEVEL: ERROR
+ NODE_ENV: test
+ NODE_OPTIONS: "--unhandled-rejections=strict"
+ AWS_BUCKET: bucket
+ user: node
+ depends_on:
+ mongo:
+ condition: service_healthy
+ redis:
+ condition: service_healthy
+ s3:
+ condition: service_healthy
+ command: npm run --silent test:acceptance
+
+ redis:
+ image: redis
+ healthcheck:
+ test: ping=$$(redis-cli ping) && [ "$$ping" = 'PONG' ]
+ interval: 1s
+ retries: 20
+
+ mongo:
+ image: mongo:4.0
+ healthcheck:
+ test: "mongo --quiet localhost/test --eval 'quit(db.runCommand({ ping: 1 }).ok ? 0 : 1)'"
+ interval: 1s
+ retries: 20
+
+ s3:
+ image: adobe/s3mock
+ environment:
+ - initialBuckets=fake_user_files,fake_template_files,fake_public_files,bucket
+ healthcheck:
+ test: wget --quiet --output-document=/dev/null http://localhost:9090
+ interval: 1s
+ retries: 20
diff --git a/services/track-changes/nodemon.json b/services/track-changes/nodemon.json
new file mode 100644
index 0000000000..e3e8817d90
--- /dev/null
+++ b/services/track-changes/nodemon.json
@@ -0,0 +1,17 @@
+{
+ "ignore": [
+ ".git",
+ "node_modules/"
+ ],
+ "verbose": true,
+ "legacyWatch": true,
+ "execMap": {
+ "js": "npm run start"
+ },
+ "watch": [
+ "app/js/",
+ "app.js",
+ "config/"
+ ],
+ "ext": "js"
+}
diff --git a/services/track-changes/pack.sh b/services/track-changes/pack.sh
new file mode 100755
index 0000000000..c2aad785c7
--- /dev/null
+++ b/services/track-changes/pack.sh
@@ -0,0 +1,24 @@
+#!/bin/bash -x
+
+# find all the docHistories with unpacked ops and pack them
+
+# need to keep track of docs already done
+
+HOST=${1:-"localhost:3015"}
+T=${2:-10}
+
+echo packing all docHistory on $HOST with delay of $T
+for n in $(seq 5 -1 1) ; do
+ echo starting in $n seconds
+ sleep 1
+done
+
+while docs=$(curl "$HOST/doc/list?limit=1000&doc_id=$last_doc"); do
+ if [ -z "$docs" ] ; then break ; fi
+ for d in $docs ; do
+ echo "packing $d"
+ curl -X POST "$HOST/doc/$d/pack"
+ sleep $T
+ last_doc=$d
+ done
+done
diff --git a/services/track-changes/package-lock.json b/services/track-changes/package-lock.json
new file mode 100644
index 0000000000..0cb04e7652
--- /dev/null
+++ b/services/track-changes/package-lock.json
@@ -0,0 +1,5794 @@
+{
+ "name": "history-sharelatex",
+ "version": "0.1.4",
+ "lockfileVersion": 1,
+ "requires": true,
+ "dependencies": {
+ "@babel/code-frame": {
+ "version": "7.12.11",
+ "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.11.tgz",
+ "integrity": "sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw==",
+ "dev": true,
+ "requires": {
+ "@babel/highlight": "^7.10.4"
+ }
+ },
+ "@babel/helper-validator-identifier": {
+ "version": "7.14.5",
+ "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.14.5.tgz",
+ "integrity": "sha512-5lsetuxCLilmVGyiLEfoHBRX8UCFD+1m2x3Rj97WrW3V7H3u4RWRXA4evMjImCsin2J2YT0QaVDGf+z8ondbAg==",
+ "dev": true
+ },
+ "@babel/highlight": {
+ "version": "7.14.5",
+ "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.14.5.tgz",
+ "integrity": "sha512-qf9u2WFWVV0MppaL877j2dBtQIDgmidgjGk5VIMw3OadXvYaXn66U1BFlH2t4+t3i+8PhedppRv+i40ABzd+gg==",
+ "dev": true,
+ "requires": {
+ "@babel/helper-validator-identifier": "^7.14.5",
+ "chalk": "^2.0.0",
+ "js-tokens": "^4.0.0"
+ }
+ },
+ "@eslint/eslintrc": {
+ "version": "0.4.2",
+ "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-0.4.2.tgz",
+ "integrity": "sha512-8nmGq/4ycLpIwzvhI4tNDmQztZ8sp+hI7cyG8i1nQDhkAbRzHpXPidRAHlNvCZQpJTKw5ItIpMw9RSToGF00mg==",
+ "dev": true,
+ "requires": {
+ "ajv": "^6.12.4",
+ "debug": "^4.1.1",
+ "espree": "^7.3.0",
+ "globals": "^13.9.0",
+ "ignore": "^4.0.6",
+ "import-fresh": "^3.2.1",
+ "js-yaml": "^3.13.1",
+ "minimatch": "^3.0.4",
+ "strip-json-comments": "^3.1.1"
+ },
+ "dependencies": {
+ "ajv": {
+ "version": "6.12.6",
+ "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
+ "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
+ "dev": true,
+ "requires": {
+ "fast-deep-equal": "^3.1.1",
+ "fast-json-stable-stringify": "^2.0.0",
+ "json-schema-traverse": "^0.4.1",
+ "uri-js": "^4.2.2"
+ }
+ },
+ "fast-deep-equal": {
+ "version": "3.1.3",
+ "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
+ "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==",
+ "dev": true
+ }
+ }
+ },
+ "@google-cloud/common": {
+ "version": "2.4.0",
+ "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-2.4.0.tgz",
+ "integrity": "sha512-zWFjBS35eI9leAHhjfeOYlK5Plcuj/77EzstnrJIZbKgF/nkqjcQuGiMCpzCwOfPyUbz8ZaEOYgbHa759AKbjg==",
+ "requires": {
+ "@google-cloud/projectify": "^1.0.0",
+ "@google-cloud/promisify": "^1.0.0",
+ "arrify": "^2.0.0",
+ "duplexify": "^3.6.0",
+ "ent": "^2.2.0",
+ "extend": "^3.0.2",
+ "google-auth-library": "^5.5.0",
+ "retry-request": "^4.0.0",
+ "teeny-request": "^6.0.0"
+ },
+ "dependencies": {
+ "google-auth-library": {
+ "version": "5.10.1",
+ "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.10.1.tgz",
+ "integrity": "sha512-rOlaok5vlpV9rSiUu5EpR0vVpc+PhN62oF4RyX/6++DG1VsaulAFEMlDYBLjJDDPI6OcNOCGAKy9UVB/3NIDXg==",
+ "requires": {
+ "arrify": "^2.0.0",
+ "base64-js": "^1.3.0",
+ "ecdsa-sig-formatter": "^1.0.11",
+ "fast-text-encoding": "^1.0.0",
+ "gaxios": "^2.1.0",
+ "gcp-metadata": "^3.4.0",
+ "gtoken": "^4.1.0",
+ "jws": "^4.0.0",
+ "lru-cache": "^5.0.0"
+ }
+ }
+ }
+ },
+ "@google-cloud/debug-agent": {
+ "version": "5.1.3",
+ "resolved": "https://registry.npmjs.org/@google-cloud/debug-agent/-/debug-agent-5.1.3.tgz",
+ "integrity": "sha512-WbzeEz4MvPlM7DX2QBsPcWgF62u7LSQv/oMYPl0L+TddTebqjDKiVXwxpzWk61NIfcKiet3dyCbPIt3N5o8XPQ==",
+ "requires": {
+ "@google-cloud/common": "^3.0.0",
+ "acorn": "^8.0.0",
+ "coffeescript": "^2.0.0",
+ "console-log-level": "^1.4.0",
+ "extend": "^3.0.2",
+ "findit2": "^2.2.3",
+ "gcp-metadata": "^4.0.0",
+ "p-limit": "^3.0.1",
+ "semver": "^7.0.0",
+ "source-map": "^0.6.1",
+ "split": "^1.0.0"
+ },
+ "dependencies": {
+ "@google-cloud/common": {
+ "version": "3.5.0",
+ "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.5.0.tgz",
+ "integrity": "sha512-10d7ZAvKhq47L271AqvHEd8KzJqGU45TY+rwM2Z3JHuB070FeTi7oJJd7elfrnKaEvaktw3hH2wKnRWxk/3oWQ==",
+ "requires": {
+ "@google-cloud/projectify": "^2.0.0",
+ "@google-cloud/promisify": "^2.0.0",
+ "arrify": "^2.0.1",
+ "duplexify": "^4.1.1",
+ "ent": "^2.2.0",
+ "extend": "^3.0.2",
+ "google-auth-library": "^6.1.1",
+ "retry-request": "^4.1.1",
+ "teeny-request": "^7.0.0"
+ }
+ },
+ "@google-cloud/projectify": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz",
+ "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ=="
+ },
+ "@google-cloud/promisify": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.3.tgz",
+ "integrity": "sha512-d4VSA86eL/AFTe5xtyZX+ePUjE8dIFu2T8zmdeNBSa5/kNgXPCx/o/wbFNHAGLJdGnk1vddRuMESD9HbOC8irw=="
+ },
+ "bignumber.js": {
+ "version": "9.0.1",
+ "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.1.tgz",
+ "integrity": "sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA=="
+ },
+ "duplexify": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz",
+ "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==",
+ "requires": {
+ "end-of-stream": "^1.4.1",
+ "inherits": "^2.0.3",
+ "readable-stream": "^3.1.1",
+ "stream-shift": "^1.0.0"
+ }
+ },
+ "gaxios": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.1.0.tgz",
+ "integrity": "sha512-vb0to8xzGnA2qcgywAjtshOKKVDf2eQhJoiL6fHhgW5tVN7wNk7egnYIO9zotfn3lQ3De1VPdf7V5/BWfCtCmg==",
+ "requires": {
+ "abort-controller": "^3.0.0",
+ "extend": "^3.0.2",
+ "https-proxy-agent": "^5.0.0",
+ "is-stream": "^2.0.0",
+ "node-fetch": "^2.3.0"
+ }
+ },
+ "gcp-metadata": {
+ "version": "4.2.1",
+ "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.2.1.tgz",
+ "integrity": "sha512-tSk+REe5iq/N+K+SK1XjZJUrFPuDqGZVzCy2vocIHIGmPlTGsa8owXMJwGkrXr73NO0AzhPW4MF2DEHz7P2AVw==",
+ "requires": {
+ "gaxios": "^4.0.0",
+ "json-bigint": "^1.0.0"
+ }
+ },
+ "google-auth-library": {
+ "version": "6.1.6",
+ "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.1.6.tgz",
+ "integrity": "sha512-Q+ZjUEvLQj/lrVHF/IQwRo6p3s8Nc44Zk/DALsN+ac3T4HY/g/3rrufkgtl+nZ1TW7DNAw5cTChdVp4apUXVgQ==",
+ "requires": {
+ "arrify": "^2.0.0",
+ "base64-js": "^1.3.0",
+ "ecdsa-sig-formatter": "^1.0.11",
+ "fast-text-encoding": "^1.0.0",
+ "gaxios": "^4.0.0",
+ "gcp-metadata": "^4.2.0",
+ "gtoken": "^5.0.4",
+ "jws": "^4.0.0",
+ "lru-cache": "^6.0.0"
+ }
+ },
+ "google-p12-pem": {
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.3.tgz",
+ "integrity": "sha512-wS0ek4ZtFx/ACKYF3JhyGe5kzH7pgiQ7J5otlumqR9psmWMYc+U9cErKlCYVYHoUaidXHdZ2xbo34kB+S+24hA==",
+ "requires": {
+ "node-forge": "^0.10.0"
+ }
+ },
+ "gtoken": {
+ "version": "5.2.1",
+ "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.2.1.tgz",
+ "integrity": "sha512-OY0BfPKe3QnMsY9MzTHTSKn+Vl2l1CcLe6BwDEQj00mbbkl5nyQ/7EUREstg4fQNZ8iYE7br4JJ7TdKeDOPWmw==",
+ "requires": {
+ "gaxios": "^4.0.0",
+ "google-p12-pem": "^3.0.3",
+ "jws": "^4.0.0"
+ }
+ },
+ "json-bigint": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz",
+ "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==",
+ "requires": {
+ "bignumber.js": "^9.0.0"
+ }
+ },
+ "lru-cache": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
+ "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
+ "requires": {
+ "yallist": "^4.0.0"
+ }
+ },
+ "node-forge": {
+ "version": "0.10.0",
+ "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz",
+ "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA=="
+ },
+ "readable-stream": {
+ "version": "3.6.0",
+ "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
+ "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==",
+ "requires": {
+ "inherits": "^2.0.3",
+ "string_decoder": "^1.1.1",
+ "util-deprecate": "^1.0.1"
+ }
+ },
+ "semver": {
+ "version": "7.3.4",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.4.tgz",
+ "integrity": "sha512-tCfb2WLjqFAtXn4KEdxIhalnRtoKFN7nAwj0B3ZXCbQloV2tq5eDbcTmT68JJD3nRJq24/XgxtQKFIpQdtvmVw==",
+ "requires": {
+ "lru-cache": "^6.0.0"
+ }
+ },
+ "teeny-request": {
+ "version": "7.0.1",
+ "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.1.tgz",
+ "integrity": "sha512-sasJmQ37klOlplL4Ia/786M5YlOcoLGQyq2TE4WHSRupbAuDaQW0PfVxV4MtdBtRJ4ngzS+1qim8zP6Zp35qCw==",
+ "requires": {
+ "http-proxy-agent": "^4.0.0",
+ "https-proxy-agent": "^5.0.0",
+ "node-fetch": "^2.6.1",
+ "stream-events": "^1.0.5",
+ "uuid": "^8.0.0"
+ }
+ },
+ "uuid": {
+ "version": "8.3.2",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
+ "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg=="
+ },
+ "yallist": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
+ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
+ }
+ }
+ },
+ "@google-cloud/logging": {
+ "version": "7.3.0",
+ "resolved": "https://registry.npmjs.org/@google-cloud/logging/-/logging-7.3.0.tgz",
+ "integrity": "sha512-xTW1V4MKpYC0mjSugyuiyUoZ9g6A42IhrrO3z7Tt3SmAb2IRj2Gf4RLoguKKncs340ooZFXrrVN/++t2Aj5zgg==",
+ "requires": {
+ "@google-cloud/common": "^2.2.2",
+ "@google-cloud/paginator": "^2.0.0",
+ "@google-cloud/projectify": "^1.0.0",
+ "@google-cloud/promisify": "^1.0.0",
+ "@opencensus/propagation-stackdriver": "0.0.20",
+ "arrify": "^2.0.0",
+ "dot-prop": "^5.1.0",
+ "eventid": "^1.0.0",
+ "extend": "^3.0.2",
+ "gcp-metadata": "^3.1.0",
+ "google-auth-library": "^5.2.2",
+ "google-gax": "^1.11.0",
+ "is": "^3.3.0",
+ "on-finished": "^2.3.0",
+ "pumpify": "^2.0.0",
+ "snakecase-keys": "^3.0.0",
+ "stream-events": "^1.0.4",
+ "through2": "^3.0.0",
+ "type-fest": "^0.12.0"
+ },
+ "dependencies": {
+ "google-auth-library": {
+ "version": "5.10.1",
+ "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.10.1.tgz",
+ "integrity": "sha512-rOlaok5vlpV9rSiUu5EpR0vVpc+PhN62oF4RyX/6++DG1VsaulAFEMlDYBLjJDDPI6OcNOCGAKy9UVB/3NIDXg==",
+ "requires": {
+ "arrify": "^2.0.0",
+ "base64-js": "^1.3.0",
+ "ecdsa-sig-formatter": "^1.0.11",
+ "fast-text-encoding": "^1.0.0",
+ "gaxios": "^2.1.0",
+ "gcp-metadata": "^3.4.0",
+ "gtoken": "^4.1.0",
+ "jws": "^4.0.0",
+ "lru-cache": "^5.0.0"
+ }
+ },
+ "type-fest": {
+ "version": "0.12.0",
+ "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.12.0.tgz",
+ "integrity": "sha512-53RyidyjvkGpnWPMF9bQgFtWp+Sl8O2Rp13VavmJgfAP9WWG6q6TkrKU8iyJdnwnfgHI6k2hTlgqH4aSdjoTbg=="
+ }
+ }
+ },
+ "@google-cloud/logging-bunyan": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/@google-cloud/logging-bunyan/-/logging-bunyan-3.0.0.tgz",
+ "integrity": "sha512-ZLVXEejNQ27ktGcA3S/sd7GPefp7kywbn+/KoBajdb1Syqcmtc98jhXpYQBXVtNP2065iyu77s4SBaiYFbTC5A==",
+ "requires": {
+ "@google-cloud/logging": "^7.0.0",
+ "google-auth-library": "^6.0.0"
+ }
+ },
+ "@google-cloud/paginator": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-2.0.3.tgz",
+ "integrity": "sha512-kp/pkb2p/p0d8/SKUu4mOq8+HGwF8NPzHWkj+VKrIPQPyMRw8deZtrO/OcSiy9C/7bpfU5Txah5ltUNfPkgEXg==",
+ "requires": {
+ "arrify": "^2.0.0",
+ "extend": "^3.0.2"
+ }
+ },
+ "@google-cloud/profiler": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/@google-cloud/profiler/-/profiler-4.1.0.tgz",
+ "integrity": "sha512-9e1zXRctLSUHAoAsFGwE4rS28fr0siiG+jXl5OpwTK8ZAUlxb70aosHaZGdsv8YXrYKjuiufjRZ/OXCs0XLI9g==",
+ "requires": {
+ "@google-cloud/common": "^3.0.0",
+ "@types/console-log-level": "^1.4.0",
+ "@types/semver": "^7.0.0",
+ "console-log-level": "^1.4.0",
+ "delay": "^4.0.1",
+ "extend": "^3.0.2",
+ "gcp-metadata": "^4.0.0",
+ "parse-duration": "^0.4.4",
+ "pprof": "3.0.0",
+ "pretty-ms": "^7.0.0",
+ "protobufjs": "~6.10.0",
+ "semver": "^7.0.0",
+ "teeny-request": "^7.0.0"
+ },
+ "dependencies": {
+ "@google-cloud/common": {
+ "version": "3.5.0",
+ "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.5.0.tgz",
+ "integrity": "sha512-10d7ZAvKhq47L271AqvHEd8KzJqGU45TY+rwM2Z3JHuB070FeTi7oJJd7elfrnKaEvaktw3hH2wKnRWxk/3oWQ==",
+ "requires": {
+ "@google-cloud/projectify": "^2.0.0",
+ "@google-cloud/promisify": "^2.0.0",
+ "arrify": "^2.0.1",
+ "duplexify": "^4.1.1",
+ "ent": "^2.2.0",
+ "extend": "^3.0.2",
+ "google-auth-library": "^6.1.1",
+ "retry-request": "^4.1.1",
+ "teeny-request": "^7.0.0"
+ }
+ },
+ "@google-cloud/projectify": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz",
+ "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ=="
+ },
+ "@google-cloud/promisify": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.3.tgz",
+ "integrity": "sha512-d4VSA86eL/AFTe5xtyZX+ePUjE8dIFu2T8zmdeNBSa5/kNgXPCx/o/wbFNHAGLJdGnk1vddRuMESD9HbOC8irw=="
+ },
+ "@types/node": {
+ "version": "13.13.42",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.42.tgz",
+ "integrity": "sha512-g+w2QgbW7k2CWLOXzQXbO37a7v5P9ObPvYahKphdBLV5aqpbVZRhTpWCT0SMRqX1i30Aig791ZmIM2fJGL2S8A=="
+ },
+ "bignumber.js": {
+ "version": "9.0.1",
+ "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.1.tgz",
+ "integrity": "sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA=="
+ },
+ "duplexify": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz",
+ "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==",
+ "requires": {
+ "end-of-stream": "^1.4.1",
+ "inherits": "^2.0.3",
+ "readable-stream": "^3.1.1",
+ "stream-shift": "^1.0.0"
+ }
+ },
+ "gaxios": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.1.0.tgz",
+ "integrity": "sha512-vb0to8xzGnA2qcgywAjtshOKKVDf2eQhJoiL6fHhgW5tVN7wNk7egnYIO9zotfn3lQ3De1VPdf7V5/BWfCtCmg==",
+ "requires": {
+ "abort-controller": "^3.0.0",
+ "extend": "^3.0.2",
+ "https-proxy-agent": "^5.0.0",
+ "is-stream": "^2.0.0",
+ "node-fetch": "^2.3.0"
+ }
+ },
+ "gcp-metadata": {
+ "version": "4.2.1",
+ "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.2.1.tgz",
+ "integrity": "sha512-tSk+REe5iq/N+K+SK1XjZJUrFPuDqGZVzCy2vocIHIGmPlTGsa8owXMJwGkrXr73NO0AzhPW4MF2DEHz7P2AVw==",
+ "requires": {
+ "gaxios": "^4.0.0",
+ "json-bigint": "^1.0.0"
+ }
+ },
+ "google-auth-library": {
+ "version": "6.1.6",
+ "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.1.6.tgz",
+ "integrity": "sha512-Q+ZjUEvLQj/lrVHF/IQwRo6p3s8Nc44Zk/DALsN+ac3T4HY/g/3rrufkgtl+nZ1TW7DNAw5cTChdVp4apUXVgQ==",
+ "requires": {
+ "arrify": "^2.0.0",
+ "base64-js": "^1.3.0",
+ "ecdsa-sig-formatter": "^1.0.11",
+ "fast-text-encoding": "^1.0.0",
+ "gaxios": "^4.0.0",
+ "gcp-metadata": "^4.2.0",
+ "gtoken": "^5.0.4",
+ "jws": "^4.0.0",
+ "lru-cache": "^6.0.0"
+ }
+ },
+ "google-p12-pem": {
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.3.tgz",
+ "integrity": "sha512-wS0ek4ZtFx/ACKYF3JhyGe5kzH7pgiQ7J5otlumqR9psmWMYc+U9cErKlCYVYHoUaidXHdZ2xbo34kB+S+24hA==",
+ "requires": {
+ "node-forge": "^0.10.0"
+ }
+ },
+ "gtoken": {
+ "version": "5.2.1",
+ "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.2.1.tgz",
+ "integrity": "sha512-OY0BfPKe3QnMsY9MzTHTSKn+Vl2l1CcLe6BwDEQj00mbbkl5nyQ/7EUREstg4fQNZ8iYE7br4JJ7TdKeDOPWmw==",
+ "requires": {
+ "gaxios": "^4.0.0",
+ "google-p12-pem": "^3.0.3",
+ "jws": "^4.0.0"
+ }
+ },
+ "json-bigint": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz",
+ "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==",
+ "requires": {
+ "bignumber.js": "^9.0.0"
+ }
+ },
+ "lru-cache": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
+ "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
+ "requires": {
+ "yallist": "^4.0.0"
+ }
+ },
+ "node-forge": {
+ "version": "0.10.0",
+ "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz",
+ "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA=="
+ },
+ "protobufjs": {
+ "version": "6.10.2",
+ "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.2.tgz",
+ "integrity": "sha512-27yj+04uF6ya9l+qfpH187aqEzfCF4+Uit0I9ZBQVqK09hk/SQzKa2MUqUpXaVa7LOFRg1TSSr3lVxGOk6c0SQ==",
+ "requires": {
+ "@protobufjs/aspromise": "^1.1.2",
+ "@protobufjs/base64": "^1.1.2",
+ "@protobufjs/codegen": "^2.0.4",
+ "@protobufjs/eventemitter": "^1.1.0",
+ "@protobufjs/fetch": "^1.1.0",
+ "@protobufjs/float": "^1.0.2",
+ "@protobufjs/inquire": "^1.1.0",
+ "@protobufjs/path": "^1.1.2",
+ "@protobufjs/pool": "^1.1.0",
+ "@protobufjs/utf8": "^1.1.0",
+ "@types/long": "^4.0.1",
+ "@types/node": "^13.7.0",
+ "long": "^4.0.0"
+ }
+ },
+ "readable-stream": {
+ "version": "3.6.0",
+ "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
+ "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==",
+ "requires": {
+ "inherits": "^2.0.3",
+ "string_decoder": "^1.1.1",
+ "util-deprecate": "^1.0.1"
+ }
+ },
+ "semver": {
+ "version": "7.3.4",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.4.tgz",
+ "integrity": "sha512-tCfb2WLjqFAtXn4KEdxIhalnRtoKFN7nAwj0B3ZXCbQloV2tq5eDbcTmT68JJD3nRJq24/XgxtQKFIpQdtvmVw==",
+ "requires": {
+ "lru-cache": "^6.0.0"
+ }
+ },
+ "teeny-request": {
+ "version": "7.0.1",
+ "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.1.tgz",
+ "integrity": "sha512-sasJmQ37klOlplL4Ia/786M5YlOcoLGQyq2TE4WHSRupbAuDaQW0PfVxV4MtdBtRJ4ngzS+1qim8zP6Zp35qCw==",
+ "requires": {
+ "http-proxy-agent": "^4.0.0",
+ "https-proxy-agent": "^5.0.0",
+ "node-fetch": "^2.6.1",
+ "stream-events": "^1.0.5",
+ "uuid": "^8.0.0"
+ }
+ },
+ "uuid": {
+ "version": "8.3.2",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
+ "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg=="
+ },
+ "yallist": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
+ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
+ }
+ }
+ },
+ "@google-cloud/projectify": {
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-1.0.4.tgz",
+ "integrity": "sha512-ZdzQUN02eRsmTKfBj9FDL0KNDIFNjBn/d6tHQmA/+FImH5DO6ZV8E7FzxMgAUiVAUq41RFAkb25p1oHOZ8psfg=="
+ },
+ "@google-cloud/promisify": {
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-1.0.4.tgz",
+ "integrity": "sha512-VccZDcOql77obTnFh0TbNED/6ZbbmHDf8UMNnzO1d5g9V0Htfm4k5cllY8P1tJsRKC3zWYGRLaViiupcgVjBoQ=="
+ },
+ "@google-cloud/trace-agent": {
+ "version": "5.1.3",
+ "resolved": "https://registry.npmjs.org/@google-cloud/trace-agent/-/trace-agent-5.1.3.tgz",
+ "integrity": "sha512-f+5DX7n6QpDlHA+4kr81z69SLAdrlvd9T8skqCMgnYvtXx14AwzXZyzEDf3jppOYzYoqPPJv8XYiyYHHmYD0BA==",
+ "requires": {
+ "@google-cloud/common": "^3.0.0",
+ "@opencensus/propagation-stackdriver": "0.0.22",
+ "builtin-modules": "^3.0.0",
+ "console-log-level": "^1.4.0",
+ "continuation-local-storage": "^3.2.1",
+ "extend": "^3.0.2",
+ "gcp-metadata": "^4.0.0",
+ "google-auth-library": "^7.0.0",
+ "hex2dec": "^1.0.1",
+ "is": "^3.2.0",
+ "methods": "^1.1.1",
+ "require-in-the-middle": "^5.0.0",
+ "semver": "^7.0.0",
+ "shimmer": "^1.2.0",
+ "source-map-support": "^0.5.16",
+ "uuid": "^8.0.0"
+ },
+ "dependencies": {
+ "@google-cloud/common": {
+ "version": "3.5.0",
+ "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.5.0.tgz",
+ "integrity": "sha512-10d7ZAvKhq47L271AqvHEd8KzJqGU45TY+rwM2Z3JHuB070FeTi7oJJd7elfrnKaEvaktw3hH2wKnRWxk/3oWQ==",
+ "requires": {
+ "@google-cloud/projectify": "^2.0.0",
+ "@google-cloud/promisify": "^2.0.0",
+ "arrify": "^2.0.1",
+ "duplexify": "^4.1.1",
+ "ent": "^2.2.0",
+ "extend": "^3.0.2",
+ "google-auth-library": "^6.1.1",
+ "retry-request": "^4.1.1",
+ "teeny-request": "^7.0.0"
+ },
+ "dependencies": {
+ "google-auth-library": {
+ "version": "6.1.6",
+ "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.1.6.tgz",
+ "integrity": "sha512-Q+ZjUEvLQj/lrVHF/IQwRo6p3s8Nc44Zk/DALsN+ac3T4HY/g/3rrufkgtl+nZ1TW7DNAw5cTChdVp4apUXVgQ==",
+ "requires": {
+ "arrify": "^2.0.0",
+ "base64-js": "^1.3.0",
+ "ecdsa-sig-formatter": "^1.0.11",
+ "fast-text-encoding": "^1.0.0",
+ "gaxios": "^4.0.0",
+ "gcp-metadata": "^4.2.0",
+ "gtoken": "^5.0.4",
+ "jws": "^4.0.0",
+ "lru-cache": "^6.0.0"
+ }
+ }
+ }
+ },
+ "@google-cloud/projectify": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz",
+ "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ=="
+ },
+ "@google-cloud/promisify": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.3.tgz",
+ "integrity": "sha512-d4VSA86eL/AFTe5xtyZX+ePUjE8dIFu2T8zmdeNBSa5/kNgXPCx/o/wbFNHAGLJdGnk1vddRuMESD9HbOC8irw=="
+ },
+ "@opencensus/core": {
+ "version": "0.0.22",
+ "resolved": "https://registry.npmjs.org/@opencensus/core/-/core-0.0.22.tgz",
+ "integrity": "sha512-ErazJtivjceNoOZI1bG9giQ6cWS45J4i6iPUtlp7dLNu58OLs/v+CD0FsaPCh47XgPxAI12vbBE8Ec09ViwHNA==",
+ "requires": {
+ "continuation-local-storage": "^3.2.1",
+ "log-driver": "^1.2.7",
+ "semver": "^7.0.0",
+ "shimmer": "^1.2.0",
+ "uuid": "^8.0.0"
+ }
+ },
+ "@opencensus/propagation-stackdriver": {
+ "version": "0.0.22",
+ "resolved": "https://registry.npmjs.org/@opencensus/propagation-stackdriver/-/propagation-stackdriver-0.0.22.tgz",
+ "integrity": "sha512-eBvf/ihb1mN8Yz/ASkz8nHzuMKqygu77+VNnUeR0yEh3Nj+ykB8VVR6lK+NAFXo1Rd1cOsTmgvuXAZgDAGleQQ==",
+ "requires": {
+ "@opencensus/core": "^0.0.22",
+ "hex2dec": "^1.0.1",
+ "uuid": "^8.0.0"
+ }
+ },
+ "bignumber.js": {
+ "version": "9.0.1",
+ "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.1.tgz",
+ "integrity": "sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA=="
+ },
+ "duplexify": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz",
+ "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==",
+ "requires": {
+ "end-of-stream": "^1.4.1",
+ "inherits": "^2.0.3",
+ "readable-stream": "^3.1.1",
+ "stream-shift": "^1.0.0"
+ }
+ },
+ "gaxios": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.1.0.tgz",
+ "integrity": "sha512-vb0to8xzGnA2qcgywAjtshOKKVDf2eQhJoiL6fHhgW5tVN7wNk7egnYIO9zotfn3lQ3De1VPdf7V5/BWfCtCmg==",
+ "requires": {
+ "abort-controller": "^3.0.0",
+ "extend": "^3.0.2",
+ "https-proxy-agent": "^5.0.0",
+ "is-stream": "^2.0.0",
+ "node-fetch": "^2.3.0"
+ }
+ },
+ "gcp-metadata": {
+ "version": "4.2.1",
+ "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.2.1.tgz",
+ "integrity": "sha512-tSk+REe5iq/N+K+SK1XjZJUrFPuDqGZVzCy2vocIHIGmPlTGsa8owXMJwGkrXr73NO0AzhPW4MF2DEHz7P2AVw==",
+ "requires": {
+ "gaxios": "^4.0.0",
+ "json-bigint": "^1.0.0"
+ }
+ },
+ "google-auth-library": {
+ "version": "7.0.2",
+ "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-7.0.2.tgz",
+ "integrity": "sha512-vjyNZR3pDLC0u7GHLfj+Hw9tGprrJwoMwkYGqURCXYITjCrP9HprOyxVV+KekdLgATtWGuDkQG2MTh0qpUPUgg==",
+ "requires": {
+ "arrify": "^2.0.0",
+ "base64-js": "^1.3.0",
+ "ecdsa-sig-formatter": "^1.0.11",
+ "fast-text-encoding": "^1.0.0",
+ "gaxios": "^4.0.0",
+ "gcp-metadata": "^4.2.0",
+ "gtoken": "^5.0.4",
+ "jws": "^4.0.0",
+ "lru-cache": "^6.0.0"
+ }
+ },
+ "google-p12-pem": {
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.3.tgz",
+ "integrity": "sha512-wS0ek4ZtFx/ACKYF3JhyGe5kzH7pgiQ7J5otlumqR9psmWMYc+U9cErKlCYVYHoUaidXHdZ2xbo34kB+S+24hA==",
+ "requires": {
+ "node-forge": "^0.10.0"
+ }
+ },
+ "gtoken": {
+ "version": "5.2.1",
+ "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.2.1.tgz",
+ "integrity": "sha512-OY0BfPKe3QnMsY9MzTHTSKn+Vl2l1CcLe6BwDEQj00mbbkl5nyQ/7EUREstg4fQNZ8iYE7br4JJ7TdKeDOPWmw==",
+ "requires": {
+ "gaxios": "^4.0.0",
+ "google-p12-pem": "^3.0.3",
+ "jws": "^4.0.0"
+ }
+ },
+ "json-bigint": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz",
+ "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==",
+ "requires": {
+ "bignumber.js": "^9.0.0"
+ }
+ },
+ "lru-cache": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
+ "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
+ "requires": {
+ "yallist": "^4.0.0"
+ }
+ },
+ "node-forge": {
+ "version": "0.10.0",
+ "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz",
+ "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA=="
+ },
+ "readable-stream": {
+ "version": "3.6.0",
+ "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
+ "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==",
+ "requires": {
+ "inherits": "^2.0.3",
+ "string_decoder": "^1.1.1",
+ "util-deprecate": "^1.0.1"
+ }
+ },
+ "semver": {
+ "version": "7.3.4",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.4.tgz",
+ "integrity": "sha512-tCfb2WLjqFAtXn4KEdxIhalnRtoKFN7nAwj0B3ZXCbQloV2tq5eDbcTmT68JJD3nRJq24/XgxtQKFIpQdtvmVw==",
+ "requires": {
+ "lru-cache": "^6.0.0"
+ }
+ },
+ "teeny-request": {
+ "version": "7.0.1",
+ "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.1.tgz",
+ "integrity": "sha512-sasJmQ37klOlplL4Ia/786M5YlOcoLGQyq2TE4WHSRupbAuDaQW0PfVxV4MtdBtRJ4ngzS+1qim8zP6Zp35qCw==",
+ "requires": {
+ "http-proxy-agent": "^4.0.0",
+ "https-proxy-agent": "^5.0.0",
+ "node-fetch": "^2.6.1",
+ "stream-events": "^1.0.5",
+ "uuid": "^8.0.0"
+ }
+ },
+ "uuid": {
+ "version": "8.3.2",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
+ "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg=="
+ },
+ "yallist": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
+ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
+ }
+ }
+ },
+ "@grpc/grpc-js": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.0.5.tgz",
+ "integrity": "sha512-Hm+xOiqAhcpT9RYM8lc15dbQD7aQurM7ZU8ulmulepiPlN7iwBXXwP3vSBUimoFoApRqz7pSIisXU8pZaCB4og==",
+ "requires": {
+ "semver": "^6.2.0"
+ },
+ "dependencies": {
+ "semver": {
+ "version": "6.3.0",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
+ "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw=="
+ }
+ }
+ },
+ "@grpc/proto-loader": {
+ "version": "0.5.5",
+ "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.5.5.tgz",
+ "integrity": "sha512-WwN9jVNdHRQoOBo9FDH7qU+mgfjPc8GygPYms3M+y3fbQLfnCe/Kv/E01t7JRgnrsOHH8euvSbed3mIalXhwqQ==",
+ "requires": {
+ "lodash.camelcase": "^4.3.0",
+ "protobufjs": "^6.8.6"
+ }
+ },
+ "@humanwhocodes/config-array": {
+ "version": "0.5.0",
+ "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.5.0.tgz",
+ "integrity": "sha512-FagtKFz74XrTl7y6HCzQpwDfXP0yhxe9lHLD1UZxjvZIcbyRz8zTFF/yYNfSfzU414eDwZ1SrO0Qvtyf+wFMQg==",
+ "dev": true,
+ "requires": {
+ "@humanwhocodes/object-schema": "^1.2.0",
+ "debug": "^4.1.1",
+ "minimatch": "^3.0.4"
+ }
+ },
+ "@humanwhocodes/object-schema": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.0.tgz",
+ "integrity": "sha512-wdppn25U8z/2yiaT6YGquE6X8sSv7hNMWSXYSSU1jGv/yd6XqjXgTDJ8KP4NgjTXfJ3GbRjeeb8RTV7a/VpM+w==",
+ "dev": true
+ },
+ "@opencensus/core": {
+ "version": "0.0.20",
+ "resolved": "https://registry.npmjs.org/@opencensus/core/-/core-0.0.20.tgz",
+ "integrity": "sha512-vqOuTd2yuMpKohp8TNNGUAPjWEGjlnGfB9Rh5e3DKqeyR94YgierNs4LbMqxKtsnwB8Dm2yoEtRuUgoe5vD9DA==",
+ "requires": {
+ "continuation-local-storage": "^3.2.1",
+ "log-driver": "^1.2.7",
+ "semver": "^6.0.0",
+ "shimmer": "^1.2.0",
+ "uuid": "^3.2.1"
+ },
+ "dependencies": {
+ "semver": {
+ "version": "6.3.0",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
+ "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw=="
+ },
+ "uuid": {
+ "version": "3.4.0",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz",
+ "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A=="
+ }
+ }
+ },
+ "@opencensus/propagation-stackdriver": {
+ "version": "0.0.20",
+ "resolved": "https://registry.npmjs.org/@opencensus/propagation-stackdriver/-/propagation-stackdriver-0.0.20.tgz",
+ "integrity": "sha512-P8yuHSLtce+yb+2EZjtTVqG7DQ48laC+IuOWi3X9q78s1Gni5F9+hmbmyP6Nb61jb5BEvXQX1s2rtRI6bayUWA==",
+ "requires": {
+ "@opencensus/core": "^0.0.20",
+ "hex2dec": "^1.0.1",
+ "uuid": "^3.2.1"
+ },
+ "dependencies": {
+ "uuid": {
+ "version": "3.4.0",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz",
+ "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A=="
+ }
+ }
+ },
+ "@overleaf/metrics": {
+ "version": "3.5.1",
+ "resolved": "https://registry.npmjs.org/@overleaf/metrics/-/metrics-3.5.1.tgz",
+ "integrity": "sha512-RLHxkMF7Y3725L3QwXo9cIn2gGobsMYUGuxKxg7PVMrPTMsomHEMeG7StOxCO7ML1Z/BwB/9nsVYNrsRdAJtKg==",
+ "requires": {
+ "@google-cloud/debug-agent": "^5.1.2",
+ "@google-cloud/profiler": "^4.0.3",
+ "@google-cloud/trace-agent": "^5.1.1",
+ "compression": "^1.7.4",
+ "prom-client": "^11.1.3",
+ "underscore": "~1.6.0",
+ "yn": "^3.1.1"
+ },
+ "dependencies": {
+ "underscore": {
+ "version": "1.6.0",
+ "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz",
+ "integrity": "sha1-izixDKze9jM3uLJOT/htRa6lKag="
+ }
+ }
+ },
+ "@overleaf/o-error": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/@overleaf/o-error/-/o-error-3.1.0.tgz",
+ "integrity": "sha512-TWJ80ozJ1LeugGTJyGQSPEuTkZ9LqZD7/ndLE6azKa03SU/mKV/FINcfk8atpVil8iv1hHQwzYZc35klplpMpQ=="
+ },
+ "@overleaf/redis-wrapper": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/@overleaf/redis-wrapper/-/redis-wrapper-2.0.0.tgz",
+ "integrity": "sha512-lREuhDPNgmKyOmL1g6onfRzDLWOG/POsE4Vd7ZzLnKDYt9SbOIujtx3CxI2qtQAKBYHf/hfyrbtyX3Ib2yTvYA==",
+ "requires": {
+ "ioredis": "~4.17.3"
+ }
+ },
+ "@overleaf/settings": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/@overleaf/settings/-/settings-2.1.1.tgz",
+ "integrity": "sha512-vcJwqCGFKmQxTP/syUqCeMaSRjHmBcQgKOACR9He2uJcErg2GZPa1go+nGvszMbkElM4HfRKm/MfxvqHhoN4TQ=="
+ },
+ "@protobufjs/aspromise": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz",
+ "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ=="
+ },
+ "@protobufjs/base64": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz",
+ "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg=="
+ },
+ "@protobufjs/codegen": {
+ "version": "2.0.4",
+ "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz",
+ "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg=="
+ },
+ "@protobufjs/eventemitter": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz",
+ "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q=="
+ },
+ "@protobufjs/fetch": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz",
+ "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==",
+ "requires": {
+ "@protobufjs/aspromise": "^1.1.1",
+ "@protobufjs/inquire": "^1.1.0"
+ }
+ },
+ "@protobufjs/float": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz",
+ "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ=="
+ },
+ "@protobufjs/inquire": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz",
+ "integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q=="
+ },
+ "@protobufjs/path": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz",
+ "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA=="
+ },
+ "@protobufjs/pool": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz",
+ "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw=="
+ },
+ "@protobufjs/utf8": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz",
+ "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw=="
+ },
+ "@sinonjs/commons": {
+ "version": "1.7.1",
+ "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.7.1.tgz",
+ "integrity": "sha512-Debi3Baff1Qu1Unc3mjJ96MgpbwTn43S1+9yJ0llWygPwDNu2aaWBD6yc9y/Z8XDRNhx7U+u2UDg2OGQXkclUQ==",
+ "dev": true,
+ "requires": {
+ "type-detect": "4.0.8"
+ }
+ },
+ "@sinonjs/fake-timers": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-6.0.0.tgz",
+ "integrity": "sha512-atR1J/jRXvQAb47gfzSK8zavXy7BcpnYq21ALon0U99etu99vsir0trzIO3wpeLtW+LLVY6X7EkfVTbjGSH8Ww==",
+ "dev": true,
+ "requires": {
+ "@sinonjs/commons": "^1.7.0"
+ }
+ },
+ "@sinonjs/formatio": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/@sinonjs/formatio/-/formatio-5.0.1.tgz",
+ "integrity": "sha512-KaiQ5pBf1MpS09MuA0kp6KBQt2JUOQycqVG1NZXvzeaXe5LGFqAKueIS0bw4w0P9r7KuBSVdUk5QjXsUdu2CxQ==",
+ "dev": true,
+ "requires": {
+ "@sinonjs/commons": "^1",
+ "@sinonjs/samsam": "^5.0.2"
+ }
+ },
+ "@sinonjs/samsam": {
+ "version": "5.0.3",
+ "resolved": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-5.0.3.tgz",
+ "integrity": "sha512-QucHkc2uMJ0pFGjJUDP3F9dq5dx8QIaqISl9QgwLOh6P9yv877uONPGXh/OH/0zmM3tW1JjuJltAZV2l7zU+uQ==",
+ "dev": true,
+ "requires": {
+ "@sinonjs/commons": "^1.6.0",
+ "lodash.get": "^4.4.2",
+ "type-detect": "^4.0.8"
+ }
+ },
+ "@sinonjs/text-encoding": {
+ "version": "0.7.1",
+ "resolved": "https://registry.npmjs.org/@sinonjs/text-encoding/-/text-encoding-0.7.1.tgz",
+ "integrity": "sha512-+iTbntw2IZPb/anVDbypzfQa+ay64MW0Zo8aJ8gZPWMMK6/OubMVb6lUPMagqjOPnmtauXnFCACVl3O7ogjeqQ==",
+ "dev": true
+ },
+ "@tootallnate/once": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz",
+ "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw=="
+ },
+ "@types/console-log-level": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/@types/console-log-level/-/console-log-level-1.4.0.tgz",
+ "integrity": "sha512-x+OscEQwcx5Biair4enH7ov9W+clcqUWaZRaxn5IkT4yNWWjRr2oiYDkY/x1uXSTVZOQ2xlbFQySaQGB+VdXGQ=="
+ },
+ "@types/fs-extra": {
+ "version": "8.1.1",
+ "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-8.1.1.tgz",
+ "integrity": "sha512-TcUlBem321DFQzBNuz8p0CLLKp0VvF/XH9E4KHNmgwyp4E3AfgI5cjiIVZWlbfThBop2qxFIh4+LeY6hVWWZ2w==",
+ "requires": {
+ "@types/node": "*"
+ }
+ },
+ "@types/long": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.1.tgz",
+ "integrity": "sha512-5tXH6Bx/kNGd3MgffdmP4dy2Z+G4eaXw0SE81Tq3BNadtnMR5/ySMzX4SLEzHJzSmPNn4HIdpQsBvXMUykr58w=="
+ },
+ "@types/node": {
+ "version": "10.17.17",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.17.tgz",
+ "integrity": "sha512-gpNnRnZP3VWzzj5k3qrpRC6Rk3H/uclhAVo1aIvwzK5p5cOrs9yEyQ8H/HBsBY0u5rrWxXEiVPQ0dEB6pkjE8Q=="
+ },
+ "@types/semver": {
+ "version": "7.3.4",
+ "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.3.4.tgz",
+ "integrity": "sha512-+nVsLKlcUCeMzD2ufHEYuJ9a2ovstb6Dp52A5VsoKxDXgvE051XgHI/33I1EymwkRGQkwnA0LkhnUzituGs4EQ=="
+ },
+ "@ungap/promise-all-settled": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/@ungap/promise-all-settled/-/promise-all-settled-1.1.2.tgz",
+ "integrity": "sha512-sL/cEvJWAnClXw0wHk85/2L0G6Sj8UB0Ctc1TEMbKSsmpRosqhwj9gWgFRZSrBr2f9tiXISwNhCPmlfqUqyb9Q==",
+ "dev": true
+ },
+ "JSONStream": {
+ "version": "1.3.5",
+ "resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.5.tgz",
+ "integrity": "sha512-E+iruNOY8VV9s4JEbe1aNEm6MiszPRr/UfcHMz0TQh1BXSxHK+ASV1R6W4HpjBhSeS+54PIsAMCBmwD06LLsqQ==",
+ "requires": {
+ "jsonparse": "^1.2.0",
+ "through": ">=2.2.7 <3"
+ }
+ },
+ "abbrev": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz",
+ "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q=="
+ },
+ "abort-controller": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz",
+ "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==",
+ "requires": {
+ "event-target-shim": "^5.0.0"
+ }
+ },
+ "accepts": {
+ "version": "1.3.7",
+ "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.7.tgz",
+ "integrity": "sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA==",
+ "requires": {
+ "mime-types": "~2.1.24",
+ "negotiator": "0.6.2"
+ },
+ "dependencies": {
+ "mime-db": {
+ "version": "1.43.0",
+ "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.43.0.tgz",
+ "integrity": "sha512-+5dsGEEovYbT8UY9yD7eE4XTc4UwJ1jBYlgaQQF38ENsKR3wj/8q8RFZrF9WIZpB2V1ArTVFUva8sAul1NzRzQ=="
+ },
+ "mime-types": {
+ "version": "2.1.26",
+ "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.26.tgz",
+ "integrity": "sha512-01paPWYgLrkqAyrlDorC1uDwl2p3qZT7yl806vW7DvDoxwXi46jsjFbg+WdwotBIk6/MbEhO/dh5aZ5sNj/dWQ==",
+ "requires": {
+ "mime-db": "1.43.0"
+ }
+ }
+ }
+ },
+ "acorn": {
+ "version": "8.0.5",
+ "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.0.5.tgz",
+ "integrity": "sha512-v+DieK/HJkJOpFBETDJioequtc3PfxsWMaxIdIwujtF7FEV/MAyDQLlm6/zPvr7Mix07mLh6ccVwIsloceodlg=="
+ },
+ "acorn-jsx": {
+ "version": "5.3.2",
+ "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz",
+ "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==",
+ "dev": true
+ },
+ "agent-base": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.1.tgz",
+ "integrity": "sha512-01q25QQDwLSsyfhrKbn8yuur+JNw0H+0Y4JiGIKd3z9aYk/w/2kxD/Upc+t2ZBBSUNff50VjPsSW2YxM8QYKVg==",
+ "requires": {
+ "debug": "4"
+ }
+ },
+ "ajv": {
+ "version": "6.6.2",
+ "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.6.2.tgz",
+ "integrity": "sha512-FBHEW6Jf5TB9MGBgUUA9XHkTbjXYfAUjY43ACMfmdMRHniyoMHjHjzD50OK8LGDWQwp4rWEsIq5kEqq7rvIM1g==",
+ "requires": {
+ "fast-deep-equal": "^2.0.1",
+ "fast-json-stable-stringify": "^2.0.0",
+ "json-schema-traverse": "^0.4.1",
+ "uri-js": "^4.2.2"
+ }
+ },
+ "ansi-colors": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz",
+ "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==",
+ "dev": true
+ },
+ "ansi-regex": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz",
+ "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==",
+ "dev": true
+ },
+ "ansi-styles": {
+ "version": "3.2.1",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz",
+ "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==",
+ "dev": true,
+ "requires": {
+ "color-convert": "^1.9.0"
+ }
+ },
+ "anymatch": {
+ "version": "3.1.2",
+ "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz",
+ "integrity": "sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==",
+ "dev": true,
+ "requires": {
+ "normalize-path": "^3.0.0",
+ "picomatch": "^2.0.4"
+ }
+ },
+ "aproba": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz",
+ "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw=="
+ },
+ "are-we-there-yet": {
+ "version": "1.1.5",
+ "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz",
+ "integrity": "sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w==",
+ "requires": {
+ "delegates": "^1.0.0",
+ "readable-stream": "^2.0.6"
+ }
+ },
+ "argparse": {
+ "version": "1.0.10",
+ "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
+ "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==",
+ "dev": true,
+ "requires": {
+ "sprintf-js": "~1.0.2"
+ }
+ },
+ "array-flatten": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
+ "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg=="
+ },
+ "array-includes": {
+ "version": "3.1.3",
+ "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.3.tgz",
+ "integrity": "sha512-gcem1KlBU7c9rB+Rq8/3PPKsK2kjqeEBa3bD5kkQo4nYlOHQCJqIJFqBXDEfwaRuYTT4E+FxA9xez7Gf/e3Q7A==",
+ "dev": true,
+ "requires": {
+ "call-bind": "^1.0.2",
+ "define-properties": "^1.1.3",
+ "es-abstract": "^1.18.0-next.2",
+ "get-intrinsic": "^1.1.1",
+ "is-string": "^1.0.5"
+ }
+ },
+ "array.prototype.flat": {
+ "version": "1.2.4",
+ "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.2.4.tgz",
+ "integrity": "sha512-4470Xi3GAPAjZqFcljX2xzckv1qeKPizoNkiS0+O4IoPR2ZNpcjE0pkhdihlDouK+x6QOast26B4Q/O9DJnwSg==",
+ "dev": true,
+ "requires": {
+ "call-bind": "^1.0.0",
+ "define-properties": "^1.1.3",
+ "es-abstract": "^1.18.0-next.1"
+ },
+ "dependencies": {
+ "has-symbols": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz",
+ "integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw=="
+ },
+ "is-callable": {
+ "version": "1.2.3",
+ "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.3.tgz",
+ "integrity": "sha512-J1DcMe8UYTBSrKezuIUTUwjXsho29693unXM2YhJUTR2txK/eG47bvNa/wipPFmZFgr/N6f1GA66dv0mEyTIyQ=="
+ },
+ "is-string": {
+ "version": "1.0.6",
+ "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.6.tgz",
+ "integrity": "sha512-2gdzbKUuqtQ3lYNrUTQYoClPhm7oQu4UdpSZMp1/DGgkHBT8E2Z1l0yMdb6D4zNAxwDiMv8MdulKROJGNl0Q0w=="
+ }
+ }
+ },
+ "arrify": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz",
+ "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug=="
+ },
+ "asn1": {
+ "version": "0.2.4",
+ "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz",
+ "integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==",
+ "requires": {
+ "safer-buffer": "~2.1.0"
+ }
+ },
+ "assert-plus": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz",
+ "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw=="
+ },
+ "assertion-error": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz",
+ "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==",
+ "dev": true
+ },
+ "astral-regex": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz",
+ "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==",
+ "dev": true
+ },
+ "async": {
+ "version": "2.6.3",
+ "resolved": "https://registry.npmjs.org/async/-/async-2.6.3.tgz",
+ "integrity": "sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==",
+ "requires": {
+ "lodash": "^4.17.14"
+ }
+ },
+ "async-listener": {
+ "version": "0.6.10",
+ "resolved": "https://registry.npmjs.org/async-listener/-/async-listener-0.6.10.tgz",
+ "integrity": "sha512-gpuo6xOyF4D5DE5WvyqZdPA3NGhiT6Qf07l7DCB0wwDEsLvDIbCr6j9S5aj5Ch96dLace5tXVzWBZkxU/c5ohw==",
+ "requires": {
+ "semver": "^5.3.0",
+ "shimmer": "^1.1.0"
+ }
+ },
+ "asynckit": {
+ "version": "0.4.0",
+ "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
+ "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
+ },
+ "aws-sdk": {
+ "version": "2.643.0",
+ "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.643.0.tgz",
+ "integrity": "sha512-4r7VGQFqshrhXnOCVQdlatAWiK/8kmmtAtY9gbITPNpY5Is+SfIy6k/1BgrnL5H/2sYd27H+Xp8itXZoCnQeTw==",
+ "requires": {
+ "buffer": "4.9.1",
+ "events": "1.1.1",
+ "ieee754": "1.1.13",
+ "jmespath": "0.15.0",
+ "querystring": "0.2.0",
+ "sax": "1.2.1",
+ "url": "0.10.3",
+ "uuid": "3.3.2",
+ "xml2js": "0.4.19"
+ },
+ "dependencies": {
+ "uuid": {
+ "version": "3.3.2",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz",
+ "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA=="
+ }
+ }
+ },
+ "aws-sign2": {
+ "version": "0.7.0",
+ "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz",
+ "integrity": "sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA=="
+ },
+ "aws4": {
+ "version": "1.8.0",
+ "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz",
+ "integrity": "sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ=="
+ },
+ "balanced-match": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
+ "integrity": "sha512-9Y0g0Q8rmSt+H33DfKv7FOc3v+iRI+o1lbzt8jGcIosYW37IIW/2XVYq5NPdmaD5NQ59Nk26Kl/vZbwW9Fr8vg=="
+ },
+ "base64-js": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.0.tgz",
+ "integrity": "sha512-ccav/yGvoa80BQDljCxsmmQ3Xvx60/UpBIij5QN21W3wBi/hhIC9OoO+KLpu9IJTS9j4DRVJ3aDDF9cMSoa2lw=="
+ },
+ "bcrypt-pbkdf": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz",
+ "integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==",
+ "requires": {
+ "tweetnacl": "^0.14.3"
+ }
+ },
+ "bignumber.js": {
+ "version": "7.2.1",
+ "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz",
+ "integrity": "sha512-S4XzBk5sMB+Rcb/LNcpzXr57VRTxgAvaAEDAl1AwRx27j00hT84O6OkteE7u8UB3NuaaygCRrEpqox4uDOrbdQ=="
+ },
+ "binary-extensions": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz",
+ "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==",
+ "dev": true
+ },
+ "bindings": {
+ "version": "1.5.0",
+ "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz",
+ "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==",
+ "requires": {
+ "file-uri-to-path": "1.0.0"
+ }
+ },
+ "bintrees": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.1.tgz",
+ "integrity": "sha1-DmVcm5wkNeqraL9AJyJtK1WjRSQ="
+ },
+ "bl": {
+ "version": "2.2.1",
+ "resolved": "https://registry.npmjs.org/bl/-/bl-2.2.1.tgz",
+ "integrity": "sha512-6Pesp1w0DEX1N550i/uGV/TqucVL4AM/pgThFSN/Qq9si1/DF9aIHs1BxD8V/QU0HoeHO6cQRTAuYnLPKq1e4g==",
+ "requires": {
+ "readable-stream": "^2.3.5",
+ "safe-buffer": "^5.1.1"
+ }
+ },
+ "bluebird": {
+ "version": "3.7.2",
+ "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz",
+ "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg=="
+ },
+ "body-parser": {
+ "version": "1.19.0",
+ "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.19.0.tgz",
+ "integrity": "sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw==",
+ "requires": {
+ "bytes": "3.1.0",
+ "content-type": "~1.0.4",
+ "debug": "2.6.9",
+ "depd": "~1.1.2",
+ "http-errors": "1.7.2",
+ "iconv-lite": "0.4.24",
+ "on-finished": "~2.3.0",
+ "qs": "6.7.0",
+ "raw-body": "2.4.0",
+ "type-is": "~1.6.17"
+ },
+ "dependencies": {
+ "debug": {
+ "version": "2.6.9",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
+ "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
+ "requires": {
+ "ms": "2.0.0"
+ }
+ },
+ "ms": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
+ "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="
+ }
+ }
+ },
+ "brace-expansion": {
+ "version": "1.1.11",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
+ "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
+ "requires": {
+ "balanced-match": "^1.0.0",
+ "concat-map": "0.0.1"
+ }
+ },
+ "braces": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
+ "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
+ "dev": true,
+ "requires": {
+ "fill-range": "^7.0.1"
+ }
+ },
+ "browser-stdout": {
+ "version": "1.3.1",
+ "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz",
+ "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==",
+ "dev": true
+ },
+ "bson": {
+ "version": "1.1.5",
+ "resolved": "https://registry.npmjs.org/bson/-/bson-1.1.5.tgz",
+ "integrity": "sha512-kDuEzldR21lHciPQAIulLs1LZlCXdLziXI6Mb/TDkwXhb//UORJNPXgcRs2CuO4H0DcMkpfT3/ySsP3unoZjBg=="
+ },
+ "buffer": {
+ "version": "4.9.1",
+ "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.1.tgz",
+ "integrity": "sha512-DNK4ruAqtyHaN8Zne7PkBTO+dD1Lr0YfTduMqlIyjvQIoztBkUxrvL+hKeLW8NXFKHOq/2upkxuoS9znQ9bW9A==",
+ "requires": {
+ "base64-js": "^1.0.2",
+ "ieee754": "^1.1.4",
+ "isarray": "^1.0.0"
+ }
+ },
+ "buffer-equal-constant-time": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
+ "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA=="
+ },
+ "buffer-from": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz",
+ "integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A=="
+ },
+ "builtin-modules": {
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.2.0.tgz",
+ "integrity": "sha512-lGzLKcioL90C7wMczpkY0n/oART3MbBa8R9OFGE1rJxoVI86u4WAGfEk8Wjv10eKSyTHVGkSo3bvBylCEtk7LA=="
+ },
+ "bunyan": {
+ "version": "1.8.15",
+ "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.15.tgz",
+ "integrity": "sha512-0tECWShh6wUysgucJcBAoYegf3JJoZWibxdqhTm7OHPeT42qdjkZ29QCMcKwbgU1kiH+auSIasNRXMLWXafXig==",
+ "requires": {
+ "dtrace-provider": "~0.8",
+ "moment": "^2.19.3",
+ "mv": "~2",
+ "safe-json-stringify": "~1"
+ }
+ },
+ "byline": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/byline/-/byline-5.0.0.tgz",
+ "integrity": "sha512-s6webAy+R4SR8XVuJWt2V2rGvhnrhxN+9S15GNuTK3wKPOXFF6RNc+8ug2XhH+2s4f+uudG4kUVYmYOQWL2g0Q=="
+ },
+ "bytes": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz",
+ "integrity": "sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg=="
+ },
+ "call-bind": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz",
+ "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==",
+ "dev": true,
+ "requires": {
+ "function-bind": "^1.1.1",
+ "get-intrinsic": "^1.0.2"
+ }
+ },
+ "callsites": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
+ "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==",
+ "dev": true
+ },
+ "camelcase": {
+ "version": "6.2.0",
+ "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.2.0.tgz",
+ "integrity": "sha512-c7wVvbw3f37nuobQNtgsgG9POC9qMbNuMQmTCqZv23b6MIz0fcYpBiOlv9gEN/hdLdnZTDQhg6e9Dq5M1vKvfg==",
+ "dev": true
+ },
+ "caseless": {
+ "version": "0.12.0",
+ "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz",
+ "integrity": "sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw=="
+ },
+ "chai": {
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/chai/-/chai-4.2.0.tgz",
+ "integrity": "sha512-XQU3bhBukrOsQCuwZndwGcCVQHyZi53fQ6Ys1Fym7E4olpIqqZZhhoFJoaKVvV17lWQoXYwgWN2nF5crA8J2jw==",
+ "dev": true,
+ "requires": {
+ "assertion-error": "^1.1.0",
+ "check-error": "^1.0.2",
+ "deep-eql": "^3.0.1",
+ "get-func-name": "^2.0.0",
+ "pathval": "^1.1.0",
+ "type-detect": "^4.0.5"
+ }
+ },
+ "chai-as-promised": {
+ "version": "7.1.1",
+ "resolved": "https://registry.npmjs.org/chai-as-promised/-/chai-as-promised-7.1.1.tgz",
+ "integrity": "sha512-azL6xMoi+uxu6z4rhWQ1jbdUhOMhis2PvscD/xjLqNMkv3BPPp2JyyuTHOrf9BOosGpNQ11v6BKv/g57RXbiaA==",
+ "dev": true,
+ "requires": {
+ "check-error": "^1.0.2"
+ }
+ },
+ "chalk": {
+ "version": "2.4.2",
+ "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz",
+ "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==",
+ "dev": true,
+ "requires": {
+ "ansi-styles": "^3.2.1",
+ "escape-string-regexp": "^1.0.5",
+ "supports-color": "^5.3.0"
+ }
+ },
+ "charenc": {
+ "version": "0.0.2",
+ "resolved": "https://registry.npmjs.org/charenc/-/charenc-0.0.2.tgz",
+ "integrity": "sha1-wKHS86cJLgN3S/qD8UwPxXkKhmc="
+ },
+ "check-error": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz",
+ "integrity": "sha512-BrgHpW9NURQgzoNyjfq0Wu6VFO6D7IZEmJNdtgNqpzGG8RuNFHt2jQxWlAs4HMe119chBnv+34syEZtc6IhLtA==",
+ "dev": true
+ },
+ "chokidar": {
+ "version": "3.5.1",
+ "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.1.tgz",
+ "integrity": "sha512-9+s+Od+W0VJJzawDma/gvBNQqkTiqYTWLuZoyAsivsI4AaWTCzHG06/TMjsf1cYe9Cb97UCEhjz7HvnPk2p/tw==",
+ "dev": true,
+ "requires": {
+ "anymatch": "~3.1.1",
+ "braces": "~3.0.2",
+ "glob-parent": "~5.1.0",
+ "is-binary-path": "~2.1.0",
+ "is-glob": "~4.0.1",
+ "normalize-path": "~3.0.0",
+ "readdirp": "~3.5.0"
+ }
+ },
+ "chownr": {
+ "version": "1.1.4",
+ "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz",
+ "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg=="
+ },
+ "cli": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/cli/-/cli-1.0.1.tgz",
+ "integrity": "sha512-41U72MB56TfUMGndAKK8vJ78eooOD4Z5NOL4xEfjc0c23s+6EYKXlXsmACBVclLP1yOfWCgEganVzddVrSNoTg==",
+ "dev": true,
+ "requires": {
+ "exit": "0.1.2",
+ "glob": "^7.1.1"
+ }
+ },
+ "cliui": {
+ "version": "7.0.4",
+ "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz",
+ "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==",
+ "dev": true,
+ "requires": {
+ "string-width": "^4.2.0",
+ "strip-ansi": "^6.0.0",
+ "wrap-ansi": "^7.0.0"
+ }
+ },
+ "cluster-key-slot": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.0.tgz",
+ "integrity": "sha512-2Nii8p3RwAPiFwsnZvukotvow2rIHM+yQ6ZcBXGHdniadkYGZYiGmkHJIbZPIV9nfv7m/U1IPMVVcAhoWFeklw=="
+ },
+ "code-point-at": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz",
+ "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c="
+ },
+ "coffeescript": {
+ "version": "2.5.1",
+ "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.5.1.tgz",
+ "integrity": "sha512-J2jRPX0eeFh5VKyVnoLrfVFgLZtnnmp96WQSLAS8OrLm2wtQLcnikYKe1gViJKDH7vucjuhHvBKKBP3rKcD1tQ=="
+ },
+ "color-convert": {
+ "version": "1.9.3",
+ "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz",
+ "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==",
+ "dev": true,
+ "requires": {
+ "color-name": "1.1.3"
+ }
+ },
+ "color-name": {
+ "version": "1.1.3",
+ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
+ "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==",
+ "dev": true
+ },
+ "combined-stream": {
+ "version": "1.0.8",
+ "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
+ "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
+ "requires": {
+ "delayed-stream": "~1.0.0"
+ }
+ },
+ "compressible": {
+ "version": "2.0.18",
+ "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz",
+ "integrity": "sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==",
+ "requires": {
+ "mime-db": ">= 1.43.0 < 2"
+ },
+ "dependencies": {
+ "mime-db": {
+ "version": "1.46.0",
+ "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.46.0.tgz",
+ "integrity": "sha512-svXaP8UQRZ5K7or+ZmfNhg2xX3yKDMUzqadsSqi4NCH/KomcH75MAMYAGVlvXn4+b/xOPhS3I2uHKRUzvjY7BQ=="
+ }
+ }
+ },
+ "compression": {
+ "version": "1.7.4",
+ "resolved": "https://registry.npmjs.org/compression/-/compression-1.7.4.tgz",
+ "integrity": "sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ==",
+ "requires": {
+ "accepts": "~1.3.5",
+ "bytes": "3.0.0",
+ "compressible": "~2.0.16",
+ "debug": "2.6.9",
+ "on-headers": "~1.0.2",
+ "safe-buffer": "5.1.2",
+ "vary": "~1.1.2"
+ },
+ "dependencies": {
+ "bytes": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz",
+ "integrity": "sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg="
+ },
+ "debug": {
+ "version": "2.6.9",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
+ "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
+ "requires": {
+ "ms": "2.0.0"
+ }
+ },
+ "ms": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
+ "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g="
+ }
+ }
+ },
+ "concat-map": {
+ "version": "0.0.1",
+ "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
+ "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="
+ },
+ "console-control-strings": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz",
+ "integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4="
+ },
+ "console-log-level": {
+ "version": "1.4.1",
+ "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.1.tgz",
+ "integrity": "sha512-VZzbIORbP+PPcN/gg3DXClTLPLg5Slwd5fL2MIc+o1qZ4BXBvWyc6QxPk6T/Mkr6IVjRpoAGf32XxP3ZWMVRcQ=="
+ },
+ "content-disposition": {
+ "version": "0.5.3",
+ "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.3.tgz",
+ "integrity": "sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g==",
+ "requires": {
+ "safe-buffer": "5.1.2"
+ }
+ },
+ "content-type": {
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz",
+ "integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA=="
+ },
+ "continuation-local-storage": {
+ "version": "3.2.1",
+ "resolved": "https://registry.npmjs.org/continuation-local-storage/-/continuation-local-storage-3.2.1.tgz",
+ "integrity": "sha512-jx44cconVqkCEEyLSKWwkvUXwO561jXMa3LPjTPsm5QR22PA0/mhe33FT4Xb5y74JDvt/Cq+5lm8S8rskLv9ZA==",
+ "requires": {
+ "async-listener": "^0.6.0",
+ "emitter-listener": "^1.1.1"
+ }
+ },
+ "cookie": {
+ "version": "0.4.0",
+ "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.0.tgz",
+ "integrity": "sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg=="
+ },
+ "cookie-signature": {
+ "version": "1.0.6",
+ "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz",
+ "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ=="
+ },
+ "core-util-is": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz",
+ "integrity": "sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ=="
+ },
+ "cross-spawn": {
+ "version": "7.0.3",
+ "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
+ "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==",
+ "dev": true,
+ "requires": {
+ "path-key": "^3.1.0",
+ "shebang-command": "^2.0.0",
+ "which": "^2.0.1"
+ }
+ },
+ "crypt": {
+ "version": "0.0.2",
+ "resolved": "https://registry.npmjs.org/crypt/-/crypt-0.0.2.tgz",
+ "integrity": "sha1-iNf/fsDfuG9xPch7u0LQRNPmxBs="
+ },
+ "d64": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/d64/-/d64-1.0.0.tgz",
+ "integrity": "sha1-QAKofoUMv8n52XBrYPymE6MzbpA="
+ },
+ "dashdash": {
+ "version": "1.14.1",
+ "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz",
+ "integrity": "sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g==",
+ "requires": {
+ "assert-plus": "^1.0.0"
+ }
+ },
+ "debug": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz",
+ "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==",
+ "requires": {
+ "ms": "^2.1.1"
+ }
+ },
+ "decamelize": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz",
+ "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==",
+ "dev": true
+ },
+ "deep-eql": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz",
+ "integrity": "sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw==",
+ "dev": true,
+ "requires": {
+ "type-detect": "^4.0.0"
+ }
+ },
+ "deep-extend": {
+ "version": "0.6.0",
+ "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz",
+ "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA=="
+ },
+ "deep-is": {
+ "version": "0.1.3",
+ "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz",
+ "integrity": "sha512-GtxAN4HvBachZzm4OnWqc45ESpUCMwkYcsjnsPs23FwJbsO+k4t0k9bQCgOmzIlpHO28+WPK/KRbRk0DDHuuDw==",
+ "dev": true
+ },
+ "define-properties": {
+ "version": "1.1.3",
+ "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz",
+ "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==",
+ "dev": true,
+ "requires": {
+ "object-keys": "^1.0.12"
+ }
+ },
+ "delay": {
+ "version": "4.4.1",
+ "resolved": "https://registry.npmjs.org/delay/-/delay-4.4.1.tgz",
+ "integrity": "sha512-aL3AhqtfhOlT/3ai6sWXeqwnw63ATNpnUiN4HL7x9q+My5QtHlO3OIkasmug9LKzpheLdmUKGRKnYXYAS7FQkQ=="
+ },
+ "delayed-stream": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
+ "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="
+ },
+ "delegates": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz",
+ "integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o="
+ },
+ "denque": {
+ "version": "1.4.1",
+ "resolved": "https://registry.npmjs.org/denque/-/denque-1.4.1.tgz",
+ "integrity": "sha512-OfzPuSZKGcgr96rf1oODnfjqBFmr1DVoc/TrItj3Ohe0Ah1C5WX5Baquw/9U9KovnQ88EqmJbD66rKYUQYN1tQ=="
+ },
+ "depd": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz",
+ "integrity": "sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ=="
+ },
+ "destroy": {
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz",
+ "integrity": "sha512-3NdhDuEXnfun/z7x9GOElY49LoqVHoGScmOKwmxhsS8N5Y+Z8KyPPDnaSzqWgYt/ji4mqwfTS34Htrk0zPIXVg=="
+ },
+ "detect-libc": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz",
+ "integrity": "sha1-+hN8S9aY7fVc1c0CrFWfkaTEups="
+ },
+ "diff": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz",
+ "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==",
+ "dev": true
+ },
+ "doctrine": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz",
+ "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==",
+ "dev": true,
+ "requires": {
+ "esutils": "^2.0.2"
+ }
+ },
+ "dot-prop": {
+ "version": "5.2.0",
+ "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.2.0.tgz",
+ "integrity": "sha512-uEUyaDKoSQ1M4Oq8l45hSE26SnTxL6snNnqvK/VWx5wJhmff5z0FUVJDKDanor/6w3kzE3i7XZOk+7wC0EXr1A==",
+ "requires": {
+ "is-obj": "^2.0.0"
+ }
+ },
+ "dtrace-provider": {
+ "version": "0.8.7",
+ "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.8.7.tgz",
+ "integrity": "sha512-V+HIGbAdxCIxddHNDwzXi6cx8Cz5RRlQOVcsryHfsyVVebpBEnDwHSgqxpgKzqeU/6/0DWqRLAGUwkbg2ecN1Q==",
+ "optional": true,
+ "requires": {
+ "nan": "^2.10.0"
+ }
+ },
+ "duplexify": {
+ "version": "3.7.1",
+ "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.7.1.tgz",
+ "integrity": "sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g==",
+ "requires": {
+ "end-of-stream": "^1.0.0",
+ "inherits": "^2.0.1",
+ "readable-stream": "^2.0.0",
+ "stream-shift": "^1.0.0"
+ }
+ },
+ "ecc-jsbn": {
+ "version": "0.1.2",
+ "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz",
+ "integrity": "sha512-eh9O+hwRHNbG4BLTjEl3nw044CkGm5X6LoaCf7LPp7UU8Qrt47JYNi6nPX8xjW97TKGKm1ouctg0QSpZe9qrnw==",
+ "requires": {
+ "jsbn": "~0.1.0",
+ "safer-buffer": "^2.1.0"
+ }
+ },
+ "ecdsa-sig-formatter": {
+ "version": "1.0.11",
+ "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz",
+ "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==",
+ "requires": {
+ "safe-buffer": "^5.0.1"
+ }
+ },
+ "ee-first": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz",
+ "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow=="
+ },
+ "emitter-listener": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/emitter-listener/-/emitter-listener-1.1.2.tgz",
+ "integrity": "sha512-Bt1sBAGFHY9DKY+4/2cV6izcKJUf5T7/gkdmkxzX/qv9CcGH8xSwVRW5mtX03SWJtRTWSOpzCuWN9rBFYZepZQ==",
+ "requires": {
+ "shimmer": "^1.2.0"
+ }
+ },
+ "emoji-regex": {
+ "version": "8.0.0",
+ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
+ "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
+ "dev": true
+ },
+ "encodeurl": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz",
+ "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w=="
+ },
+ "end-of-stream": {
+ "version": "1.4.4",
+ "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz",
+ "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==",
+ "requires": {
+ "once": "^1.4.0"
+ }
+ },
+ "enquirer": {
+ "version": "2.3.6",
+ "resolved": "https://registry.npmjs.org/enquirer/-/enquirer-2.3.6.tgz",
+ "integrity": "sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==",
+ "dev": true,
+ "requires": {
+ "ansi-colors": "^4.1.1"
+ }
+ },
+ "ent": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/ent/-/ent-2.2.0.tgz",
+ "integrity": "sha512-GHrMyVZQWvTIdDtpiEXdHZnFQKzeO09apj8Cbl4pKWy4i0Oprcq17usfDt5aO63swf0JOeMWjWQE/LzgSRuWpA=="
+ },
+ "error-ex": {
+ "version": "1.3.2",
+ "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz",
+ "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==",
+ "dev": true,
+ "requires": {
+ "is-arrayish": "^0.2.1"
+ }
+ },
+ "es-abstract": {
+ "version": "1.18.3",
+ "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.3.tgz",
+ "integrity": "sha512-nQIr12dxV7SSxE6r6f1l3DtAeEYdsGpps13dR0TwJg1S8gyp4ZPgy3FZcHBgbiQqnoqSTb+oC+kO4UQ0C/J8vw==",
+ "dev": true,
+ "requires": {
+ "call-bind": "^1.0.2",
+ "es-to-primitive": "^1.2.1",
+ "function-bind": "^1.1.1",
+ "get-intrinsic": "^1.1.1",
+ "has": "^1.0.3",
+ "is-negative-zero": "^2.0.1",
+ "is-regex": "^1.1.3",
+ "object-inspect": "^1.10.3",
+ "object-keys": "^1.1.1",
+ "object.assign": "^4.1.2",
+ "string.prototype.trimend": "^1.0.4",
+ "string.prototype.trimstart": "^1.0.4",
+ "unbox-primitive": "^1.0.1"
+ }
+ },
+ "es-to-primitive": {
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz",
+ "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==",
+ "dev": true,
+ "requires": {
+ "is-callable": "^1.1.4",
+ "is-date-object": "^1.0.1",
+ "is-symbol": "^1.0.2"
+ }
+ },
+ "escalade": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz",
+ "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==",
+ "dev": true
+ },
+ "escape-html": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz",
+ "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow=="
+ },
+ "escape-string-regexp": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
+ "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=",
+ "dev": true
+ },
+ "eslint": {
+ "version": "7.30.0",
+ "resolved": "https://registry.npmjs.org/eslint/-/eslint-7.30.0.tgz",
+ "integrity": "sha512-VLqz80i3as3NdloY44BQSJpFw534L9Oh+6zJOUaViV4JPd+DaHwutqP7tcpkW3YiXbK6s05RZl7yl7cQn+lijg==",
+ "dev": true,
+ "requires": {
+ "@babel/code-frame": "7.12.11",
+ "@eslint/eslintrc": "^0.4.2",
+ "@humanwhocodes/config-array": "^0.5.0",
+ "ajv": "^6.10.0",
+ "chalk": "^4.0.0",
+ "cross-spawn": "^7.0.2",
+ "debug": "^4.0.1",
+ "doctrine": "^3.0.0",
+ "enquirer": "^2.3.5",
+ "escape-string-regexp": "^4.0.0",
+ "eslint-scope": "^5.1.1",
+ "eslint-utils": "^2.1.0",
+ "eslint-visitor-keys": "^2.0.0",
+ "espree": "^7.3.1",
+ "esquery": "^1.4.0",
+ "esutils": "^2.0.2",
+ "fast-deep-equal": "^3.1.3",
+ "file-entry-cache": "^6.0.1",
+ "functional-red-black-tree": "^1.0.1",
+ "glob-parent": "^5.1.2",
+ "globals": "^13.6.0",
+ "ignore": "^4.0.6",
+ "import-fresh": "^3.0.0",
+ "imurmurhash": "^0.1.4",
+ "is-glob": "^4.0.0",
+ "js-yaml": "^3.13.1",
+ "json-stable-stringify-without-jsonify": "^1.0.1",
+ "levn": "^0.4.1",
+ "lodash.merge": "^4.6.2",
+ "minimatch": "^3.0.4",
+ "natural-compare": "^1.4.0",
+ "optionator": "^0.9.1",
+ "progress": "^2.0.0",
+ "regexpp": "^3.1.0",
+ "semver": "^7.2.1",
+ "strip-ansi": "^6.0.0",
+ "strip-json-comments": "^3.1.0",
+ "table": "^6.0.9",
+ "text-table": "^0.2.0",
+ "v8-compile-cache": "^2.0.3"
+ },
+ "dependencies": {
+ "ajv": {
+ "version": "6.12.6",
+ "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
+ "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
+ "dev": true,
+ "requires": {
+ "fast-deep-equal": "^3.1.1",
+ "fast-json-stable-stringify": "^2.0.0",
+ "json-schema-traverse": "^0.4.1",
+ "uri-js": "^4.2.2"
+ }
+ },
+ "chalk": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.1.tgz",
+ "integrity": "sha512-diHzdDKxcU+bAsUboHLPEDQiw0qEe0qd7SYUn3HgcFlWgbDcfLGswOHYeGrHKzG9z6UYf01d9VFMfZxPM1xZSg==",
+ "dev": true,
+ "requires": {
+ "ansi-styles": "^4.1.0",
+ "supports-color": "^7.1.0"
+ },
+ "dependencies": {
+ "ansi-styles": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
+ "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
+ "dev": true,
+ "requires": {
+ "color-convert": "^2.0.1"
+ }
+ },
+ "supports-color": {
+ "version": "7.2.0",
+ "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
+ "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
+ "dev": true,
+ "requires": {
+ "has-flag": "^4.0.0"
+ }
+ }
+ }
+ },
+ "color-convert": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
+ "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
+ "dev": true,
+ "requires": {
+ "color-name": "~1.1.4"
+ }
+ },
+ "color-name": {
+ "version": "1.1.4",
+ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
+ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
+ "dev": true
+ },
+ "escape-string-regexp": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz",
+ "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==",
+ "dev": true
+ },
+ "eslint-visitor-keys": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz",
+ "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==",
+ "dev": true
+ },
+ "estraverse": {
+ "version": "5.2.0",
+ "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.2.0.tgz",
+ "integrity": "sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ=="
+ },
+ "fast-deep-equal": {
+ "version": "3.1.3",
+ "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
+ "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==",
+ "dev": true
+ },
+ "has-flag": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
+ "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
+ "dev": true
+ },
+ "lru-cache": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
+ "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
+ "dev": true,
+ "requires": {
+ "yallist": "^4.0.0"
+ }
+ },
+ "rimraf": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
+ "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
+ "requires": {
+ "glob": "^7.1.3"
+ }
+ },
+ "semver": {
+ "version": "7.3.5",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz",
+ "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==",
+ "dev": true,
+ "requires": {
+ "lru-cache": "^6.0.0"
+ }
+ },
+ "yallist": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
+ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==",
+ "dev": true
+ }
+ }
+ },
+ "eslint-config-prettier": {
+ "version": "8.3.0",
+ "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.3.0.tgz",
+ "integrity": "sha512-BgZuLUSeKzvlL/VUjx/Yb787VQ26RU3gGjA3iiFvdsp/2bMfVIWUVP7tjxtjS0e+HP409cPlPvNkQloz8C91ew==",
+ "dev": true
+ },
+ "eslint-config-standard": {
+ "version": "16.0.3",
+ "resolved": "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-16.0.3.tgz",
+ "integrity": "sha512-x4fmJL5hGqNJKGHSjnLdgA6U6h1YW/G2dW9fA+cyVur4SK6lyue8+UgNKWlZtUDTXvgKDD/Oa3GQjmB5kjtVvg==",
+ "dev": true
+ },
+ "eslint-import-resolver-node": {
+ "version": "0.3.4",
+ "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.4.tgz",
+ "integrity": "sha512-ogtf+5AB/O+nM6DIeBUNr2fuT7ot9Qg/1harBfBtaP13ekEWFQEEMP94BCB7zaNW3gyY+8SHYF00rnqYwXKWOA==",
+ "dev": true,
+ "requires": {
+ "debug": "^2.6.9",
+ "resolve": "^1.13.1"
+ },
+ "dependencies": {
+ "debug": {
+ "version": "2.6.9",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
+ "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
+ "dev": true,
+ "requires": {
+ "ms": "2.0.0"
+ }
+ },
+ "ms": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
+ "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=",
+ "dev": true
+ }
+ }
+ },
+ "eslint-module-utils": {
+ "version": "2.6.1",
+ "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.6.1.tgz",
+ "integrity": "sha512-ZXI9B8cxAJIH4nfkhTwcRTEAnrVfobYqwjWy/QMCZ8rHkZHFjf9yO4BzpiF9kCSfNlMG54eKigISHpX0+AaT4A==",
+ "dev": true,
+ "requires": {
+ "debug": "^3.2.7",
+ "pkg-dir": "^2.0.0"
+ },
+ "dependencies": {
+ "debug": {
+ "version": "3.2.7",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz",
+ "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==",
+ "dev": true,
+ "requires": {
+ "ms": "^2.1.1"
+ }
+ }
+ }
+ },
+ "eslint-plugin-chai-expect": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/eslint-plugin-chai-expect/-/eslint-plugin-chai-expect-2.2.0.tgz",
+ "integrity": "sha512-ExTJKhgeYMfY8wDj3UiZmgpMKJOUHGNHmWMlxT49JUDB1vTnw0sSNfXJSxnX+LcebyBD/gudXzjzD136WqPJrQ==",
+ "dev": true
+ },
+ "eslint-plugin-chai-friendly": {
+ "version": "0.6.0",
+ "resolved": "https://registry.npmjs.org/eslint-plugin-chai-friendly/-/eslint-plugin-chai-friendly-0.6.0.tgz",
+ "integrity": "sha512-Uvvv1gkbRGp/qfN15B0kQyQWg+oFA8buDSqrwmW3egNSk/FpqH2MjQqKOuKwmEL6w4QIQrIjDp+gg6kGGmD3oQ==",
+ "dev": true
+ },
+ "eslint-plugin-es": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-3.0.1.tgz",
+ "integrity": "sha512-GUmAsJaN4Fc7Gbtl8uOBlayo2DqhwWvEzykMHSCZHU3XdJ+NSzzZcVhXh3VxX5icqQ+oQdIEawXX8xkR3mIFmQ==",
+ "dev": true,
+ "requires": {
+ "eslint-utils": "^2.0.0",
+ "regexpp": "^3.0.0"
+ }
+ },
+ "eslint-plugin-import": {
+ "version": "2.23.4",
+ "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.23.4.tgz",
+ "integrity": "sha512-6/wP8zZRsnQFiR3iaPFgh5ImVRM1WN5NUWfTIRqwOdeiGJlBcSk82o1FEVq8yXmy4lkIzTo7YhHCIxlU/2HyEQ==",
+ "dev": true,
+ "requires": {
+ "array-includes": "^3.1.3",
+ "array.prototype.flat": "^1.2.4",
+ "debug": "^2.6.9",
+ "doctrine": "^2.1.0",
+ "eslint-import-resolver-node": "^0.3.4",
+ "eslint-module-utils": "^2.6.1",
+ "find-up": "^2.0.0",
+ "has": "^1.0.3",
+ "is-core-module": "^2.4.0",
+ "minimatch": "^3.0.4",
+ "object.values": "^1.1.3",
+ "pkg-up": "^2.0.0",
+ "read-pkg-up": "^3.0.0",
+ "resolve": "^1.20.0",
+ "tsconfig-paths": "^3.9.0"
+ },
+ "dependencies": {
+ "debug": {
+ "version": "2.6.9",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
+ "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
+ "dev": true,
+ "requires": {
+ "ms": "2.0.0"
+ }
+ },
+ "doctrine": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz",
+ "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==",
+ "dev": true,
+ "requires": {
+ "esutils": "^2.0.2"
+ }
+ },
+ "has-symbols": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz",
+ "integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw=="
+ },
+ "is-callable": {
+ "version": "1.2.3",
+ "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.3.tgz",
+ "integrity": "sha512-J1DcMe8UYTBSrKezuIUTUwjXsho29693unXM2YhJUTR2txK/eG47bvNa/wipPFmZFgr/N6f1GA66dv0mEyTIyQ=="
+ },
+ "is-core-module": {
+ "version": "2.5.0",
+ "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.5.0.tgz",
+ "integrity": "sha512-TXCMSDsEHMEEZ6eCA8rwRDbLu55MRGmrctljsBX/2v1d9/GzqHOxW5c5oPSgrUt2vBFXebu9rGqckXGPWOlYpg==",
+ "dev": true,
+ "requires": {
+ "has": "^1.0.3"
+ }
+ },
+ "ms": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
+ "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=",
+ "dev": true
+ }
+ }
+ },
+ "eslint-plugin-mocha": {
+ "version": "8.2.0",
+ "resolved": "https://registry.npmjs.org/eslint-plugin-mocha/-/eslint-plugin-mocha-8.2.0.tgz",
+ "integrity": "sha512-8oOR47Ejt+YJPNQzedbiklDqS1zurEaNrxXpRs+Uk4DMDPVmKNagShFeUaYsfvWP55AhI+P1non5QZAHV6K78A==",
+ "dev": true,
+ "requires": {
+ "eslint-utils": "^2.1.0",
+ "ramda": "^0.27.1"
+ }
+ },
+ "eslint-plugin-node": {
+ "version": "11.1.0",
+ "resolved": "https://registry.npmjs.org/eslint-plugin-node/-/eslint-plugin-node-11.1.0.tgz",
+ "integrity": "sha512-oUwtPJ1W0SKD0Tr+wqu92c5xuCeQqB3hSCHasn/ZgjFdA9iDGNkNf2Zi9ztY7X+hNuMib23LNGRm6+uN+KLE3g==",
+ "dev": true,
+ "requires": {
+ "eslint-plugin-es": "^3.0.0",
+ "eslint-utils": "^2.0.0",
+ "ignore": "^5.1.1",
+ "minimatch": "^3.0.4",
+ "resolve": "^1.10.1",
+ "semver": "^6.1.0"
+ },
+ "dependencies": {
+ "ignore": {
+ "version": "5.1.8",
+ "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.1.8.tgz",
+ "integrity": "sha512-BMpfD7PpiETpBl/A6S498BaIJ6Y/ABT93ETbby2fP00v4EbvPBXWEoaR1UBPKs3iR53pJY7EtZk5KACI57i1Uw==",
+ "dev": true
+ },
+ "semver": {
+ "version": "6.3.0",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
+ "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
+ "dev": true
+ }
+ }
+ },
+ "eslint-plugin-prettier": {
+ "version": "3.1.2",
+ "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-3.1.2.tgz",
+ "integrity": "sha512-GlolCC9y3XZfv3RQfwGew7NnuFDKsfI4lbvRK+PIIo23SFH+LemGs4cKwzAaRa+Mdb+lQO/STaIayno8T5sJJA==",
+ "dev": true,
+ "requires": {
+ "prettier-linter-helpers": "^1.0.0"
+ }
+ },
+ "eslint-plugin-promise": {
+ "version": "4.2.1",
+ "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-4.2.1.tgz",
+ "integrity": "sha512-VoM09vT7bfA7D+upt+FjeBO5eHIJQBUWki1aPvB+vbNiHS3+oGIJGIeyBtKQTME6UPXXy3vV07OL1tHd3ANuDw==",
+ "dev": true
+ },
+ "eslint-scope": {
+ "version": "5.1.1",
+ "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz",
+ "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==",
+ "dev": true,
+ "requires": {
+ "esrecurse": "^4.3.0",
+ "estraverse": "^4.1.1"
+ }
+ },
+ "eslint-utils": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz",
+ "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==",
+ "dev": true,
+ "requires": {
+ "eslint-visitor-keys": "^1.1.0"
+ }
+ },
+ "eslint-visitor-keys": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.1.0.tgz",
+ "integrity": "sha512-8y9YjtM1JBJU/A9Kc+SbaOV4y29sSWckBwMHa+FGtVj5gN/sbnKDf6xJUl+8g7FAij9LVaP8C24DUiH/f/2Z9A==",
+ "dev": true
+ },
+ "espree": {
+ "version": "7.3.1",
+ "resolved": "https://registry.npmjs.org/espree/-/espree-7.3.1.tgz",
+ "integrity": "sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g==",
+ "dev": true,
+ "requires": {
+ "acorn": "^7.4.0",
+ "acorn-jsx": "^5.3.1",
+ "eslint-visitor-keys": "^1.3.0"
+ },
+ "dependencies": {
+ "acorn": {
+ "version": "7.4.1",
+ "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz",
+ "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==",
+ "dev": true
+ },
+ "eslint-visitor-keys": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz",
+ "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==",
+ "dev": true
+ }
+ }
+ },
+ "esprima": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz",
+ "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==",
+ "dev": true
+ },
+ "esquery": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz",
+ "integrity": "sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==",
+ "dev": true
+ },
+ "esrecurse": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz",
+ "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==",
+ "dev": true,
+ "requires": {
+ "estraverse": "^5.2.0"
+ },
+ "dependencies": {
+ "estraverse": {
+ "version": "5.2.0",
+ "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.2.0.tgz",
+ "integrity": "sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ==",
+ "dev": true
+ }
+ }
+ },
+ "estraverse": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz",
+ "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==",
+ "dev": true
+ },
+ "esutils": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz",
+ "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==",
+ "dev": true
+ },
+ "etag": {
+ "version": "1.8.1",
+ "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz",
+ "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg=="
+ },
+ "event-target-shim": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz",
+ "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ=="
+ },
+ "eventid": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/eventid/-/eventid-1.0.0.tgz",
+ "integrity": "sha512-4upSDsvpxhWPsmw4fsJCp0zj8S7I0qh1lCDTmZXP8V3TtryQKDI8CgQPN+e5JakbWwzaAX3lrdp2b3KSoMSUpw==",
+ "requires": {
+ "d64": "^1.0.0",
+ "uuid": "^3.0.1"
+ }
+ },
+ "events": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz",
+ "integrity": "sha512-kEcvvCBByWXGnZy6JUlgAp2gBIUjfCAV6P6TgT1/aaQKcmuAEC4OZTV1I4EWQLz2gxZw76atuVyvHhTxvi0Flw=="
+ },
+ "exit": {
+ "version": "0.1.2",
+ "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz",
+ "integrity": "sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==",
+ "dev": true
+ },
+ "express": {
+ "version": "4.17.1",
+ "resolved": "https://registry.npmjs.org/express/-/express-4.17.1.tgz",
+ "integrity": "sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g==",
+ "requires": {
+ "accepts": "~1.3.7",
+ "array-flatten": "1.1.1",
+ "body-parser": "1.19.0",
+ "content-disposition": "0.5.3",
+ "content-type": "~1.0.4",
+ "cookie": "0.4.0",
+ "cookie-signature": "1.0.6",
+ "debug": "2.6.9",
+ "depd": "~1.1.2",
+ "encodeurl": "~1.0.2",
+ "escape-html": "~1.0.3",
+ "etag": "~1.8.1",
+ "finalhandler": "~1.1.2",
+ "fresh": "0.5.2",
+ "merge-descriptors": "1.0.1",
+ "methods": "~1.1.2",
+ "on-finished": "~2.3.0",
+ "parseurl": "~1.3.3",
+ "path-to-regexp": "0.1.7",
+ "proxy-addr": "~2.0.5",
+ "qs": "6.7.0",
+ "range-parser": "~1.2.1",
+ "safe-buffer": "5.1.2",
+ "send": "0.17.1",
+ "serve-static": "1.14.1",
+ "setprototypeof": "1.1.1",
+ "statuses": "~1.5.0",
+ "type-is": "~1.6.18",
+ "utils-merge": "1.0.1",
+ "vary": "~1.1.2"
+ },
+ "dependencies": {
+ "debug": {
+ "version": "2.6.9",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
+ "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
+ "requires": {
+ "ms": "2.0.0"
+ }
+ },
+ "ms": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
+ "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="
+ },
+ "path-to-regexp": {
+ "version": "0.1.7",
+ "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz",
+ "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ=="
+ }
+ }
+ },
+ "extend": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
+ "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="
+ },
+ "extsprintf": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz",
+ "integrity": "sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g=="
+ },
+ "fast-deep-equal": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz",
+ "integrity": "sha512-bCK/2Z4zLidyB4ReuIsvALH6w31YfAQDmXMqMx6FyfHqvBxtjC0eRumeSu4Bs3XtXwpyIywtSTrVT99BxY1f9w=="
+ },
+ "fast-diff": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.2.0.tgz",
+ "integrity": "sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w==",
+ "dev": true
+ },
+ "fast-json-stable-stringify": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz",
+ "integrity": "sha512-eIgZvM9C3P05kg0qxfqaVU6Tma4QedCPIByQOcemV0vju8ot3cS2DpHi4m2G2JvbSMI152rjfLX0p1pkSdyPlQ=="
+ },
+ "fast-levenshtein": {
+ "version": "2.0.6",
+ "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz",
+ "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==",
+ "dev": true
+ },
+ "fast-text-encoding": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.1.tgz",
+ "integrity": "sha512-x4FEgaz3zNRtJfLFqJmHWxkMDDvXVtaznj2V9jiP8ACUJrUgist4bP9FmDL2Vew2Y9mEQI/tG4GqabaitYp9CQ=="
+ },
+ "file-entry-cache": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz",
+ "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==",
+ "dev": true,
+ "requires": {
+ "flat-cache": "^3.0.4"
+ }
+ },
+ "file-uri-to-path": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz",
+ "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw=="
+ },
+ "fill-range": {
+ "version": "7.0.1",
+ "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
+ "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
+ "dev": true,
+ "requires": {
+ "to-regex-range": "^5.0.1"
+ }
+ },
+ "finalhandler": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz",
+ "integrity": "sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA==",
+ "requires": {
+ "debug": "2.6.9",
+ "encodeurl": "~1.0.2",
+ "escape-html": "~1.0.3",
+ "on-finished": "~2.3.0",
+ "parseurl": "~1.3.3",
+ "statuses": "~1.5.0",
+ "unpipe": "~1.0.0"
+ },
+ "dependencies": {
+ "debug": {
+ "version": "2.6.9",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
+ "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
+ "requires": {
+ "ms": "2.0.0"
+ }
+ },
+ "ms": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
+ "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="
+ }
+ }
+ },
+ "find-up": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz",
+ "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=",
+ "dev": true,
+ "requires": {
+ "locate-path": "^2.0.0"
+ }
+ },
+ "findit2": {
+ "version": "2.2.3",
+ "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz",
+ "integrity": "sha1-WKRmaX34piBc39vzlVNri9d3pfY="
+ },
+ "flat": {
+ "version": "5.0.2",
+ "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz",
+ "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==",
+ "dev": true
+ },
+ "flat-cache": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz",
+ "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==",
+ "dev": true,
+ "requires": {
+ "flatted": "^3.1.0"
+ }
+ },
+ "flatted": {
+ "version": "3.2.1",
+ "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.1.tgz",
+ "integrity": "sha512-OMQjaErSFHmHqZe+PSidH5n8j3O0F2DdnVh8JB4j4eUQ2k6KvB0qGfrKIhapvez5JerBbmWkaLYUYWISaESoXg==",
+ "dev": true
+ },
+ "forever-agent": {
+ "version": "0.6.1",
+ "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz",
+ "integrity": "sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw=="
+ },
+ "form-data": {
+ "version": "2.3.3",
+ "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz",
+ "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==",
+ "requires": {
+ "asynckit": "^0.4.0",
+ "combined-stream": "^1.0.6",
+ "mime-types": "^2.1.12"
+ }
+ },
+ "forwarded": {
+ "version": "0.1.2",
+ "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz",
+ "integrity": "sha512-Ua9xNhH0b8pwE3yRbFfXJvfdWF0UHNCdeyb2sbi9Ul/M+r3PTdrz7Cv4SCfZRMjmzEM9PhraqfZFbGTIg3OMyA=="
+ },
+ "fresh": {
+ "version": "0.5.2",
+ "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz",
+ "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q=="
+ },
+ "fs-minipass": {
+ "version": "1.2.7",
+ "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-1.2.7.tgz",
+ "integrity": "sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA==",
+ "requires": {
+ "minipass": "^2.6.0"
+ }
+ },
+ "fs.realpath": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
+ "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw=="
+ },
+ "function-bind": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz",
+ "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A=="
+ },
+ "functional-red-black-tree": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz",
+ "integrity": "sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==",
+ "dev": true
+ },
+ "gauge": {
+ "version": "2.7.4",
+ "resolved": "https://registry.npmjs.org/gauge/-/gauge-2.7.4.tgz",
+ "integrity": "sha1-LANAXHU4w51+s3sxcCLjJfsBi/c=",
+ "requires": {
+ "aproba": "^1.0.3",
+ "console-control-strings": "^1.0.0",
+ "has-unicode": "^2.0.0",
+ "object-assign": "^4.1.0",
+ "signal-exit": "^3.0.0",
+ "string-width": "^1.0.1",
+ "strip-ansi": "^3.0.1",
+ "wide-align": "^1.1.0"
+ },
+ "dependencies": {
+ "ansi-regex": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz",
+ "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8="
+ },
+ "is-fullwidth-code-point": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz",
+ "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=",
+ "requires": {
+ "number-is-nan": "^1.0.0"
+ }
+ },
+ "string-width": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz",
+ "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=",
+ "requires": {
+ "code-point-at": "^1.0.0",
+ "is-fullwidth-code-point": "^1.0.0",
+ "strip-ansi": "^3.0.0"
+ }
+ },
+ "strip-ansi": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz",
+ "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=",
+ "requires": {
+ "ansi-regex": "^2.0.0"
+ }
+ }
+ }
+ },
+ "gaxios": {
+ "version": "2.3.4",
+ "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-2.3.4.tgz",
+ "integrity": "sha512-US8UMj8C5pRnao3Zykc4AAVr+cffoNKRTg9Rsf2GiuZCW69vgJj38VK2PzlPuQU73FZ/nTk9/Av6/JGcE1N9vA==",
+ "requires": {
+ "abort-controller": "^3.0.0",
+ "extend": "^3.0.2",
+ "https-proxy-agent": "^5.0.0",
+ "is-stream": "^2.0.0",
+ "node-fetch": "^2.3.0"
+ }
+ },
+ "gcp-metadata": {
+ "version": "3.5.0",
+ "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-3.5.0.tgz",
+ "integrity": "sha512-ZQf+DLZ5aKcRpLzYUyBS3yo3N0JSa82lNDO8rj3nMSlovLcz2riKFBsYgDzeXcv75oo5eqB2lx+B14UvPoCRnA==",
+ "requires": {
+ "gaxios": "^2.1.0",
+ "json-bigint": "^0.3.0"
+ }
+ },
+ "get-caller-file": {
+ "version": "2.0.5",
+ "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
+ "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==",
+ "dev": true
+ },
+ "get-func-name": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz",
+ "integrity": "sha512-Hm0ixYtaSZ/V7C8FJrtZIuBBI+iSgL+1Aq82zSu8VQNB4S3Gk8e7Qs3VwBDJAhmRZcFqkl3tQu36g/Foh5I5ig==",
+ "dev": true
+ },
+ "get-intrinsic": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz",
+ "integrity": "sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==",
+ "dev": true,
+ "requires": {
+ "function-bind": "^1.1.1",
+ "has": "^1.0.3",
+ "has-symbols": "^1.0.1"
+ }
+ },
+ "getpass": {
+ "version": "0.1.7",
+ "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz",
+ "integrity": "sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng==",
+ "requires": {
+ "assert-plus": "^1.0.0"
+ }
+ },
+ "glob": {
+ "version": "7.1.6",
+ "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz",
+ "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==",
+ "requires": {
+ "fs.realpath": "^1.0.0",
+ "inflight": "^1.0.4",
+ "inherits": "2",
+ "minimatch": "^3.0.4",
+ "once": "^1.3.0",
+ "path-is-absolute": "^1.0.0"
+ }
+ },
+ "glob-parent": {
+ "version": "5.1.2",
+ "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
+ "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
+ "dev": true,
+ "requires": {
+ "is-glob": "^4.0.1"
+ }
+ },
+ "globals": {
+ "version": "13.10.0",
+ "resolved": "https://registry.npmjs.org/globals/-/globals-13.10.0.tgz",
+ "integrity": "sha512-piHC3blgLGFjvOuMmWZX60f+na1lXFDhQXBf1UYp2fXPXqvEUbOhNwi6BsQ0bQishwedgnjkwv1d9zKf+MWw3g==",
+ "dev": true,
+ "requires": {
+ "type-fest": "^0.20.2"
+ }
+ },
+ "google-auth-library": {
+ "version": "6.0.6",
+ "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.0.6.tgz",
+ "integrity": "sha512-fWYdRdg55HSJoRq9k568jJA1lrhg9i2xgfhVIMJbskUmbDpJGHsbv9l41DGhCDXM21F9Kn4kUwdysgxSYBYJUw==",
+ "requires": {
+ "arrify": "^2.0.0",
+ "base64-js": "^1.3.0",
+ "ecdsa-sig-formatter": "^1.0.11",
+ "fast-text-encoding": "^1.0.0",
+ "gaxios": "^3.0.0",
+ "gcp-metadata": "^4.1.0",
+ "gtoken": "^5.0.0",
+ "jws": "^4.0.0",
+ "lru-cache": "^6.0.0"
+ },
+ "dependencies": {
+ "bignumber.js": {
+ "version": "9.0.0",
+ "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.0.tgz",
+ "integrity": "sha512-t/OYhhJ2SD+YGBQcjY8GzzDHEk9f3nerxjtfa6tlMXfe7frs/WozhvCNoGvpM0P3bNf3Gq5ZRMlGr5f3r4/N8A=="
+ },
+ "gaxios": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-3.1.0.tgz",
+ "integrity": "sha512-DDTn3KXVJJigtz+g0J3vhcfbDbKtAroSTxauWsdnP57sM5KZ3d2c/3D9RKFJ86s43hfw6WULg6TXYw/AYiBlpA==",
+ "requires": {
+ "abort-controller": "^3.0.0",
+ "extend": "^3.0.2",
+ "https-proxy-agent": "^5.0.0",
+ "is-stream": "^2.0.0",
+ "node-fetch": "^2.3.0"
+ }
+ },
+ "gcp-metadata": {
+ "version": "4.1.4",
+ "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.1.4.tgz",
+ "integrity": "sha512-5J/GIH0yWt/56R3dNaNWPGQ/zXsZOddYECfJaqxFWgrZ9HC2Kvc5vl9upOgUUHKzURjAVf2N+f6tEJiojqXUuA==",
+ "requires": {
+ "gaxios": "^3.0.0",
+ "json-bigint": "^1.0.0"
+ }
+ },
+ "google-p12-pem": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.2.tgz",
+ "integrity": "sha512-tbjzndQvSIHGBLzHnhDs3cL4RBjLbLXc2pYvGH+imGVu5b4RMAttUTdnmW2UH0t11QeBTXZ7wlXPS7hrypO/tg==",
+ "requires": {
+ "node-forge": "^0.9.0"
+ }
+ },
+ "gtoken": {
+ "version": "5.0.3",
+ "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.0.3.tgz",
+ "integrity": "sha512-Nyd1wZCMRc2dj/mAD0LlfQLcAO06uKdpKJXvK85SGrF5+5+Bpfil9u/2aw35ltvEHjvl0h5FMKN5knEU+9JrOg==",
+ "requires": {
+ "gaxios": "^3.0.0",
+ "google-p12-pem": "^3.0.0",
+ "jws": "^4.0.0",
+ "mime": "^2.2.0"
+ }
+ },
+ "json-bigint": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz",
+ "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==",
+ "requires": {
+ "bignumber.js": "^9.0.0"
+ }
+ },
+ "lru-cache": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
+ "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
+ "requires": {
+ "yallist": "^4.0.0"
+ }
+ },
+ "mime": {
+ "version": "2.4.6",
+ "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz",
+ "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA=="
+ },
+ "yallist": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
+ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
+ }
+ }
+ },
+ "google-gax": {
+ "version": "1.15.3",
+ "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-1.15.3.tgz",
+ "integrity": "sha512-3JKJCRumNm3x2EksUTw4P1Rad43FTpqrtW9jzpf3xSMYXx+ogaqTM1vGo7VixHB4xkAyATXVIa3OcNSh8H9zsQ==",
+ "requires": {
+ "@grpc/grpc-js": "~1.0.3",
+ "@grpc/proto-loader": "^0.5.1",
+ "@types/fs-extra": "^8.0.1",
+ "@types/long": "^4.0.0",
+ "abort-controller": "^3.0.0",
+ "duplexify": "^3.6.0",
+ "google-auth-library": "^5.0.0",
+ "is-stream-ended": "^0.1.4",
+ "lodash.at": "^4.6.0",
+ "lodash.has": "^4.5.2",
+ "node-fetch": "^2.6.0",
+ "protobufjs": "^6.8.9",
+ "retry-request": "^4.0.0",
+ "semver": "^6.0.0",
+ "walkdir": "^0.4.0"
+ },
+ "dependencies": {
+ "google-auth-library": {
+ "version": "5.10.1",
+ "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.10.1.tgz",
+ "integrity": "sha512-rOlaok5vlpV9rSiUu5EpR0vVpc+PhN62oF4RyX/6++DG1VsaulAFEMlDYBLjJDDPI6OcNOCGAKy9UVB/3NIDXg==",
+ "requires": {
+ "arrify": "^2.0.0",
+ "base64-js": "^1.3.0",
+ "ecdsa-sig-formatter": "^1.0.11",
+ "fast-text-encoding": "^1.0.0",
+ "gaxios": "^2.1.0",
+ "gcp-metadata": "^3.4.0",
+ "gtoken": "^4.1.0",
+ "jws": "^4.0.0",
+ "lru-cache": "^5.0.0"
+ }
+ },
+ "semver": {
+ "version": "6.3.0",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
+ "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw=="
+ }
+ }
+ },
+ "google-p12-pem": {
+ "version": "2.0.4",
+ "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-2.0.4.tgz",
+ "integrity": "sha512-S4blHBQWZRnEW44OcR7TL9WR+QCqByRvhNDZ/uuQfpxywfupikf/miba8js1jZi6ZOGv5slgSuoshCWh6EMDzg==",
+ "requires": {
+ "node-forge": "^0.9.0"
+ }
+ },
+ "graceful-fs": {
+ "version": "4.2.6",
+ "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.6.tgz",
+ "integrity": "sha512-nTnJ528pbqxYanhpDYsi4Rd8MAeaBA67+RZ10CM1m3bTAVFEDcd5AuA4a6W5YkGZ1iNXHzZz8T6TBKLeBuNriQ==",
+ "dev": true
+ },
+ "growl": {
+ "version": "1.10.5",
+ "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz",
+ "integrity": "sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==",
+ "dev": true
+ },
+ "gtoken": {
+ "version": "4.1.4",
+ "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-4.1.4.tgz",
+ "integrity": "sha512-VxirzD0SWoFUo5p8RDP8Jt2AGyOmyYcT/pOUgDKJCK+iSw0TMqwrVfY37RXTNmoKwrzmDHSk0GMT9FsgVmnVSA==",
+ "requires": {
+ "gaxios": "^2.1.0",
+ "google-p12-pem": "^2.0.0",
+ "jws": "^4.0.0",
+ "mime": "^2.2.0"
+ },
+ "dependencies": {
+ "mime": {
+ "version": "2.4.6",
+ "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz",
+ "integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA=="
+ }
+ }
+ },
+ "har-schema": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz",
+ "integrity": "sha512-Oqluz6zhGX8cyRaTQlFMPw80bSJVG2x/cFb8ZPhUILGgHka9SsokCCOQgpveePerqidZOrT14ipqfJb7ILcW5Q=="
+ },
+ "har-validator": {
+ "version": "5.1.3",
+ "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz",
+ "integrity": "sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g==",
+ "requires": {
+ "ajv": "^6.5.5",
+ "har-schema": "^2.0.0"
+ }
+ },
+ "has": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz",
+ "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==",
+ "requires": {
+ "function-bind": "^1.1.1"
+ }
+ },
+ "has-bigints": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.1.tgz",
+ "integrity": "sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA==",
+ "dev": true
+ },
+ "has-flag": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
+ "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==",
+ "dev": true
+ },
+ "has-symbols": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz",
+ "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==",
+ "dev": true
+ },
+ "has-unicode": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz",
+ "integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk="
+ },
+ "he": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz",
+ "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==",
+ "dev": true
+ },
+ "heap": {
+ "version": "0.2.6",
+ "resolved": "https://registry.npmjs.org/heap/-/heap-0.2.6.tgz",
+ "integrity": "sha512-MzzWcnfB1e4EG2vHi3dXHoBupmuXNZzx6pY6HldVS55JKKBoq3xOyzfSaZRkJp37HIhEYC78knabHff3zc4dQQ=="
+ },
+ "hex2dec": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/hex2dec/-/hex2dec-1.1.2.tgz",
+ "integrity": "sha512-Yu+q/XWr2fFQ11tHxPq4p4EiNkb2y+lAacJNhAdRXVfRIcDH6gi7htWFnnlIzvqHMHoWeIsfXlNAjZInpAOJDA=="
+ },
+ "hosted-git-info": {
+ "version": "2.8.9",
+ "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz",
+ "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==",
+ "dev": true
+ },
+ "http-errors": {
+ "version": "1.7.2",
+ "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.7.2.tgz",
+ "integrity": "sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg==",
+ "requires": {
+ "depd": "~1.1.2",
+ "inherits": "2.0.3",
+ "setprototypeof": "1.1.1",
+ "statuses": ">= 1.5.0 < 2",
+ "toidentifier": "1.0.0"
+ }
+ },
+ "http-proxy-agent": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz",
+ "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==",
+ "requires": {
+ "@tootallnate/once": "1",
+ "agent-base": "6",
+ "debug": "4"
+ }
+ },
+ "http-signature": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz",
+ "integrity": "sha512-CAbnr6Rz4CYQkLYUtSNXxQPUH2gK8f3iWexVlsnMeD+GjlsQ0Xsy1cOX+mN3dtxYomRy21CiOzU8Uhw6OwncEQ==",
+ "requires": {
+ "assert-plus": "^1.0.0",
+ "jsprim": "^1.2.2",
+ "sshpk": "^1.7.0"
+ }
+ },
+ "https-proxy-agent": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz",
+ "integrity": "sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA==",
+ "requires": {
+ "agent-base": "6",
+ "debug": "4"
+ }
+ },
+ "iconv-lite": {
+ "version": "0.4.24",
+ "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
+ "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==",
+ "requires": {
+ "safer-buffer": ">= 2.1.2 < 3"
+ }
+ },
+ "ieee754": {
+ "version": "1.1.13",
+ "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.13.tgz",
+ "integrity": "sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg=="
+ },
+ "ignore": {
+ "version": "4.0.6",
+ "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz",
+ "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==",
+ "dev": true
+ },
+ "ignore-walk": {
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-3.0.3.tgz",
+ "integrity": "sha512-m7o6xuOaT1aqheYHKf8W6J5pYH85ZI9w077erOzLje3JsB1gkafkAhHHY19dqjulgIZHFm32Cp5uNZgcQqdJKw==",
+ "requires": {
+ "minimatch": "^3.0.4"
+ }
+ },
+ "import-fresh": {
+ "version": "3.2.1",
+ "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.2.1.tgz",
+ "integrity": "sha512-6e1q1cnWP2RXD9/keSkxHScg508CdXqXWgWBaETNhyuBFz+kUZlKboh+ISK+bU++DmbHimVBrOz/zzPe0sZ3sQ==",
+ "dev": true,
+ "requires": {
+ "parent-module": "^1.0.0",
+ "resolve-from": "^4.0.0"
+ },
+ "dependencies": {
+ "resolve-from": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz",
+ "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==",
+ "dev": true
+ }
+ }
+ },
+ "imurmurhash": {
+ "version": "0.1.4",
+ "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz",
+ "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==",
+ "dev": true
+ },
+ "inflight": {
+ "version": "1.0.6",
+ "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
+ "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==",
+ "requires": {
+ "once": "^1.3.0",
+ "wrappy": "1"
+ }
+ },
+ "inherits": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz",
+ "integrity": "sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw=="
+ },
+ "ini": {
+ "version": "1.3.8",
+ "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz",
+ "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew=="
+ },
+ "ioredis": {
+ "version": "4.17.3",
+ "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-4.17.3.tgz",
+ "integrity": "sha512-iRvq4BOYzNFkDnSyhx7cmJNOi1x/HWYe+A4VXHBu4qpwJaGT1Mp+D2bVGJntH9K/Z/GeOM/Nprb8gB3bmitz1Q==",
+ "requires": {
+ "cluster-key-slot": "^1.1.0",
+ "debug": "^4.1.1",
+ "denque": "^1.1.0",
+ "lodash.defaults": "^4.2.0",
+ "lodash.flatten": "^4.4.0",
+ "redis-commands": "1.5.0",
+ "redis-errors": "^1.2.0",
+ "redis-parser": "^3.0.0",
+ "standard-as-callback": "^2.0.1"
+ }
+ },
+ "ipaddr.js": {
+ "version": "1.9.1",
+ "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz",
+ "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g=="
+ },
+ "is": {
+ "version": "3.3.0",
+ "resolved": "https://registry.npmjs.org/is/-/is-3.3.0.tgz",
+ "integrity": "sha512-nW24QBoPcFGGHJGUwnfpI7Yc5CdqWNdsyHQszVE/z2pKHXzh7FZ5GWhJqSyaQ9wMkQnsTx+kAI8bHlCX4tKdbg=="
+ },
+ "is-arrayish": {
+ "version": "0.2.1",
+ "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz",
+ "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=",
+ "dev": true
+ },
+ "is-bigint": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.2.tgz",
+ "integrity": "sha512-0JV5+SOCQkIdzjBK9buARcV804Ddu7A0Qet6sHi3FimE9ne6m4BGQZfRn+NZiXbBk4F4XmHfDZIipLj9pX8dSA==",
+ "dev": true
+ },
+ "is-binary-path": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz",
+ "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==",
+ "dev": true,
+ "requires": {
+ "binary-extensions": "^2.0.0"
+ }
+ },
+ "is-boolean-object": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.1.tgz",
+ "integrity": "sha512-bXdQWkECBUIAcCkeH1unwJLIpZYaa5VvuygSyS/c2lf719mTKZDU5UdDRlpd01UjADgmW8RfqaP+mRaVPdr/Ng==",
+ "dev": true,
+ "requires": {
+ "call-bind": "^1.0.2"
+ }
+ },
+ "is-buffer": {
+ "version": "1.1.6",
+ "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz",
+ "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w=="
+ },
+ "is-callable": {
+ "version": "1.1.5",
+ "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz",
+ "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==",
+ "dev": true
+ },
+ "is-core-module": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.2.0.tgz",
+ "integrity": "sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ==",
+ "requires": {
+ "has": "^1.0.3"
+ }
+ },
+ "is-date-object": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.2.tgz",
+ "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==",
+ "dev": true
+ },
+ "is-extglob": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
+ "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=",
+ "dev": true
+ },
+ "is-fullwidth-code-point": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
+ "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
+ "dev": true
+ },
+ "is-glob": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz",
+ "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==",
+ "dev": true,
+ "requires": {
+ "is-extglob": "^2.1.1"
+ }
+ },
+ "is-negative-zero": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.1.tgz",
+ "integrity": "sha512-2z6JzQvZRa9A2Y7xC6dQQm4FSTSTNWjKIYYTt4246eMTJmIo0Q+ZyOsU66X8lxK1AbB92dFeglPLrhwpeRKO6w==",
+ "dev": true
+ },
+ "is-number": {
+ "version": "7.0.0",
+ "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
+ "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
+ "dev": true
+ },
+ "is-number-object": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.5.tgz",
+ "integrity": "sha512-RU0lI/n95pMoUKu9v1BZP5MBcZuNSVJkMkAG2dJqC4z2GlkGUNeH68SuHuBKBD/XFe+LHZ+f9BKkLET60Niedw==",
+ "dev": true
+ },
+ "is-obj": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz",
+ "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w=="
+ },
+ "is-plain-obj": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz",
+ "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==",
+ "dev": true
+ },
+ "is-regex": {
+ "version": "1.1.3",
+ "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.3.tgz",
+ "integrity": "sha512-qSVXFz28HM7y+IWX6vLCsexdlvzT1PJNFSBuaQLQ5o0IEw8UDYW6/2+eCMVyIsbM8CNLX2a/QWmSpyxYEHY7CQ==",
+ "dev": true,
+ "requires": {
+ "call-bind": "^1.0.2"
+ }
+ },
+ "is-stream": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.0.tgz",
+ "integrity": "sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw=="
+ },
+ "is-stream-ended": {
+ "version": "0.1.4",
+ "resolved": "https://registry.npmjs.org/is-stream-ended/-/is-stream-ended-0.1.4.tgz",
+ "integrity": "sha512-xj0XPvmr7bQFTvirqnFr50o0hQIh6ZItDqloxt5aJrR4NQsYeSsyFQERYGCAzfindAcnKjINnwEEgLx4IqVzQw=="
+ },
+ "is-string": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.5.tgz",
+ "integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==",
+ "dev": true
+ },
+ "is-symbol": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.3.tgz",
+ "integrity": "sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==",
+ "dev": true,
+ "requires": {
+ "has-symbols": "^1.0.1"
+ }
+ },
+ "is-typedarray": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz",
+ "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA=="
+ },
+ "isarray": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
+ "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ=="
+ },
+ "isexe": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
+ "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
+ "dev": true
+ },
+ "isstream": {
+ "version": "0.1.2",
+ "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz",
+ "integrity": "sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g=="
+ },
+ "jmespath": {
+ "version": "0.15.0",
+ "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.15.0.tgz",
+ "integrity": "sha512-+kHj8HXArPfpPEKGLZ+kB5ONRTCiGQXo8RQYL0hH8t6pWXUBBK5KkkQmTNOwKK4LEsd0yTsgtjJVm4UBSZea4w=="
+ },
+ "js-tokens": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
+ "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==",
+ "dev": true
+ },
+ "js-yaml": {
+ "version": "3.13.1",
+ "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz",
+ "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==",
+ "dev": true,
+ "requires": {
+ "argparse": "^1.0.7",
+ "esprima": "^4.0.0"
+ }
+ },
+ "jsbn": {
+ "version": "0.1.1",
+ "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
+ "integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg=="
+ },
+ "json-bigint": {
+ "version": "0.3.0",
+ "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-0.3.0.tgz",
+ "integrity": "sha512-u+c/u/F+JNPUekHCFyGVycRPyh9UHD5iUhSyIAn10kxbDTJxijwAbT6XHaONEOXuGGfmWUSroheXgHcml4gLgg==",
+ "requires": {
+ "bignumber.js": "^7.0.0"
+ }
+ },
+ "json-parse-better-errors": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz",
+ "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==",
+ "dev": true
+ },
+ "json-schema": {
+ "version": "0.2.3",
+ "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz",
+ "integrity": "sha512-a3xHnILGMtk+hDOqNwHzF6e2fNbiMrXZvxKQiEv2MlgQP+pjIOzqAmKYD2mDpXYE/44M7g+n9p2bKkYWDUcXCQ=="
+ },
+ "json-schema-traverse": {
+ "version": "0.4.1",
+ "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
+ "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="
+ },
+ "json-stable-stringify-without-jsonify": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz",
+ "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==",
+ "dev": true
+ },
+ "json-stringify-safe": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz",
+ "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA=="
+ },
+ "json5": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.0.tgz",
+ "integrity": "sha512-f+8cldu7X/y7RAJurMEJmdoKXGB/X550w2Nr3tTbezL6RwEE/iMcm+tZnXeoZtKuOq6ft8+CqzEkrIgx1fPoQA==",
+ "dev": true,
+ "requires": {
+ "minimist": "^1.2.5"
+ }
+ },
+ "jsonparse": {
+ "version": "1.3.1",
+ "resolved": "https://registry.npmjs.org/jsonparse/-/jsonparse-1.3.1.tgz",
+ "integrity": "sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg=="
+ },
+ "jsprim": {
+ "version": "1.4.1",
+ "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz",
+ "integrity": "sha512-4Dj8Rf+fQ+/Pn7C5qeEX02op1WfOss3PKTE9Nsop3Dx+6UPxlm1dr/og7o2cRa5hNN07CACr4NFzRLtj/rjWog==",
+ "requires": {
+ "assert-plus": "1.0.0",
+ "extsprintf": "1.3.0",
+ "json-schema": "0.2.3",
+ "verror": "1.10.0"
+ }
+ },
+ "just-extend": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-4.1.0.tgz",
+ "integrity": "sha512-ApcjaOdVTJ7y4r08xI5wIqpvwS48Q0PBG4DJROcEkH1f8MdAiNFyFxz3xoL0LWAVwjrwPYZdVHHxhRHcx/uGLA==",
+ "dev": true
+ },
+ "jwa": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz",
+ "integrity": "sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==",
+ "requires": {
+ "buffer-equal-constant-time": "1.0.1",
+ "ecdsa-sig-formatter": "1.0.11",
+ "safe-buffer": "^5.0.1"
+ }
+ },
+ "jws": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz",
+ "integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==",
+ "requires": {
+ "jwa": "^2.0.0",
+ "safe-buffer": "^5.0.1"
+ }
+ },
+ "levn": {
+ "version": "0.4.1",
+ "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz",
+ "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==",
+ "dev": true,
+ "requires": {
+ "prelude-ls": "^1.2.1",
+ "type-check": "~0.4.0"
+ }
+ },
+ "line-reader": {
+ "version": "0.4.0",
+ "resolved": "https://registry.npmjs.org/line-reader/-/line-reader-0.4.0.tgz",
+ "integrity": "sha512-AYJ8g+eE7v+Ba4s/cuYqzuNulH/WbjdKQ55fvx8fNVn8WQzTpioY6vI1MoxTuMgcHYX3VlmZWbVvnkIqkyJbCA=="
+ },
+ "load-json-file": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz",
+ "integrity": "sha1-L19Fq5HjMhYjT9U62rZo607AmTs=",
+ "dev": true,
+ "requires": {
+ "graceful-fs": "^4.1.2",
+ "parse-json": "^4.0.0",
+ "pify": "^3.0.0",
+ "strip-bom": "^3.0.0"
+ },
+ "dependencies": {
+ "pify": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz",
+ "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=",
+ "dev": true
+ }
+ }
+ },
+ "locate-path": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz",
+ "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=",
+ "dev": true,
+ "requires": {
+ "p-locate": "^2.0.0",
+ "path-exists": "^3.0.0"
+ }
+ },
+ "lodash": {
+ "version": "4.17.21",
+ "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
+ "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
+ },
+ "lodash.at": {
+ "version": "4.6.0",
+ "resolved": "https://registry.npmjs.org/lodash.at/-/lodash.at-4.6.0.tgz",
+ "integrity": "sha1-k83OZk8KGZTqM9181A4jr9EbD/g="
+ },
+ "lodash.camelcase": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz",
+ "integrity": "sha1-soqmKIorn8ZRA1x3EfZathkDMaY="
+ },
+ "lodash.clonedeep": {
+ "version": "4.5.0",
+ "resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz",
+ "integrity": "sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8=",
+ "dev": true
+ },
+ "lodash.defaults": {
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz",
+ "integrity": "sha1-0JF4cW/+pN3p5ft7N/bwgCJ0WAw="
+ },
+ "lodash.flatten": {
+ "version": "4.4.0",
+ "resolved": "https://registry.npmjs.org/lodash.flatten/-/lodash.flatten-4.4.0.tgz",
+ "integrity": "sha1-8xwiIlqWMtK7+OSt2+8kCqdlph8="
+ },
+ "lodash.get": {
+ "version": "4.4.2",
+ "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz",
+ "integrity": "sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ==",
+ "dev": true
+ },
+ "lodash.has": {
+ "version": "4.5.2",
+ "resolved": "https://registry.npmjs.org/lodash.has/-/lodash.has-4.5.2.tgz",
+ "integrity": "sha1-0Z9NwQlQWMzL4rDN9O4P5Ko3yGI="
+ },
+ "lodash.merge": {
+ "version": "4.6.2",
+ "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
+ "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==",
+ "dev": true
+ },
+ "lodash.truncate": {
+ "version": "4.4.2",
+ "resolved": "https://registry.npmjs.org/lodash.truncate/-/lodash.truncate-4.4.2.tgz",
+ "integrity": "sha1-WjUNoLERO4N+z//VgSy+WNbq4ZM=",
+ "dev": true
+ },
+ "log-driver": {
+ "version": "1.2.7",
+ "resolved": "https://registry.npmjs.org/log-driver/-/log-driver-1.2.7.tgz",
+ "integrity": "sha512-U7KCmLdqsGHBLeWqYlFA0V0Sl6P08EE1ZrmA9cxjUE0WVqT9qnyVDPz1kzpFEP0jdJuFnasWIfSd7fsaNXkpbg=="
+ },
+ "log-symbols": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.0.0.tgz",
+ "integrity": "sha512-FN8JBzLx6CzeMrB0tg6pqlGU1wCrXW+ZXGH481kfsBqer0hToTIiHdjH4Mq8xJUbvATujKCvaREGWpGUionraA==",
+ "dev": true,
+ "requires": {
+ "chalk": "^4.0.0"
+ },
+ "dependencies": {
+ "ansi-styles": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
+ "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
+ "dev": true,
+ "requires": {
+ "color-convert": "^2.0.1"
+ }
+ },
+ "chalk": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.1.tgz",
+ "integrity": "sha512-diHzdDKxcU+bAsUboHLPEDQiw0qEe0qd7SYUn3HgcFlWgbDcfLGswOHYeGrHKzG9z6UYf01d9VFMfZxPM1xZSg==",
+ "dev": true,
+ "requires": {
+ "ansi-styles": "^4.1.0",
+ "supports-color": "^7.1.0"
+ }
+ },
+ "color-convert": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
+ "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
+ "dev": true,
+ "requires": {
+ "color-name": "~1.1.4"
+ }
+ },
+ "color-name": {
+ "version": "1.1.4",
+ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
+ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
+ "dev": true
+ },
+ "has-flag": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
+ "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
+ "dev": true
+ },
+ "supports-color": {
+ "version": "7.2.0",
+ "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
+ "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
+ "dev": true,
+ "requires": {
+ "has-flag": "^4.0.0"
+ }
+ }
+ }
+ },
+ "logger-sharelatex": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-2.2.0.tgz",
+ "integrity": "sha512-ko+OmE25XHJJCiz1R9EgwlfM7J/5olpunUfR3WcfuqOQrcUqsdBrDA2sOytngT0ViwjCR0Fh4qZVPwEWfmrvwA==",
+ "requires": {
+ "@google-cloud/logging-bunyan": "^3.0.0",
+ "@overleaf/o-error": "^3.0.0",
+ "bunyan": "^1.8.14",
+ "node-fetch": "^2.6.0",
+ "raven": "^2.6.4",
+ "yn": "^4.0.0"
+ },
+ "dependencies": {
+ "yn": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/yn/-/yn-4.0.0.tgz",
+ "integrity": "sha512-huWiiCS4TxKc4SfgmTwW1K7JmXPPAmuXWYy4j9qjQo4+27Kni8mGhAAi1cloRWmBe2EqcLgt3IGqQoRL/MtPgg=="
+ }
+ }
+ },
+ "long": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz",
+ "integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA=="
+ },
+ "lru-cache": {
+ "version": "5.1.1",
+ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz",
+ "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==",
+ "requires": {
+ "yallist": "^3.0.2"
+ }
+ },
+ "map-obj": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.1.0.tgz",
+ "integrity": "sha512-glc9y00wgtwcDmp7GaE/0b0OnxpNJsVf3ael/An6Fe2Q51LLwN1er6sdomLRzz5h0+yMpiYLhWYF5R7HeqVd4g=="
+ },
+ "md5": {
+ "version": "2.3.0",
+ "resolved": "https://registry.npmjs.org/md5/-/md5-2.3.0.tgz",
+ "integrity": "sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g==",
+ "requires": {
+ "charenc": "0.0.2",
+ "crypt": "0.0.2",
+ "is-buffer": "~1.1.6"
+ }
+ },
+ "media-typer": {
+ "version": "0.3.0",
+ "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
+ "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ=="
+ },
+ "memory-pager": {
+ "version": "1.5.0",
+ "resolved": "https://registry.npmjs.org/memory-pager/-/memory-pager-1.5.0.tgz",
+ "integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==",
+ "optional": true
+ },
+ "memorystream": {
+ "version": "0.3.1",
+ "resolved": "https://registry.npmjs.org/memorystream/-/memorystream-0.3.1.tgz",
+ "integrity": "sha512-S3UwM3yj5mtUSEfP41UZmt/0SCoVYUcU1rkXv+BQ5Ig8ndL4sPoJNBUJERafdPb5jjHJGuMgytgKvKIf58XNBw==",
+ "dev": true
+ },
+ "merge-descriptors": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz",
+ "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w=="
+ },
+ "methods": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz",
+ "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w=="
+ },
+ "mime": {
+ "version": "1.6.0",
+ "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz",
+ "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg=="
+ },
+ "mime-db": {
+ "version": "1.37.0",
+ "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.37.0.tgz",
+ "integrity": "sha512-R3C4db6bgQhlIhPU48fUtdVmKnflq+hRdad7IyKhtFj06VPNVdk2RhiYL3UjQIlso8L+YxAtFkobT0VK+S/ybg=="
+ },
+ "mime-types": {
+ "version": "2.1.21",
+ "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.21.tgz",
+ "integrity": "sha512-3iL6DbwpyLzjR3xHSFNFeb9Nz/M8WDkX33t1GFQnFOllWk8pOrh/LSrB5OXlnlW5P9LH73X6loW/eogc+F5lJg==",
+ "requires": {
+ "mime-db": "~1.37.0"
+ }
+ },
+ "minimatch": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
+ "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
+ "requires": {
+ "brace-expansion": "^1.1.7"
+ }
+ },
+ "minimist": {
+ "version": "1.2.5",
+ "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz",
+ "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw=="
+ },
+ "minipass": {
+ "version": "2.9.0",
+ "resolved": "https://registry.npmjs.org/minipass/-/minipass-2.9.0.tgz",
+ "integrity": "sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg==",
+ "requires": {
+ "safe-buffer": "^5.1.2",
+ "yallist": "^3.0.0"
+ }
+ },
+ "minizlib": {
+ "version": "1.3.3",
+ "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-1.3.3.tgz",
+ "integrity": "sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q==",
+ "requires": {
+ "minipass": "^2.9.0"
+ }
+ },
+ "mkdirp": {
+ "version": "0.5.5",
+ "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz",
+ "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==",
+ "requires": {
+ "minimist": "^1.2.5"
+ }
+ },
+ "mocha": {
+ "version": "8.4.0",
+ "resolved": "https://registry.npmjs.org/mocha/-/mocha-8.4.0.tgz",
+ "integrity": "sha512-hJaO0mwDXmZS4ghXsvPVriOhsxQ7ofcpQdm8dE+jISUOKopitvnXFQmpRR7jd2K6VBG6E26gU3IAbXXGIbu4sQ==",
+ "dev": true,
+ "requires": {
+ "@ungap/promise-all-settled": "1.1.2",
+ "ansi-colors": "4.1.1",
+ "browser-stdout": "1.3.1",
+ "chokidar": "3.5.1",
+ "debug": "4.3.1",
+ "diff": "5.0.0",
+ "escape-string-regexp": "4.0.0",
+ "find-up": "5.0.0",
+ "glob": "7.1.6",
+ "growl": "1.10.5",
+ "he": "1.2.0",
+ "js-yaml": "4.0.0",
+ "log-symbols": "4.0.0",
+ "minimatch": "3.0.4",
+ "ms": "2.1.3",
+ "nanoid": "3.1.20",
+ "serialize-javascript": "5.0.1",
+ "strip-json-comments": "3.1.1",
+ "supports-color": "8.1.1",
+ "which": "2.0.2",
+ "wide-align": "1.1.3",
+ "workerpool": "6.1.0",
+ "yargs": "16.2.0",
+ "yargs-parser": "20.2.4",
+ "yargs-unparser": "2.0.0"
+ },
+ "dependencies": {
+ "ansi-styles": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
+ "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
+ "requires": {
+ "color-convert": "^2.0.1"
+ }
+ },
+ "argparse": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
+ "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==",
+ "dev": true
+ },
+ "color-convert": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
+ "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
+ "requires": {
+ "color-name": "~1.1.4"
+ }
+ },
+ "color-name": {
+ "version": "1.1.4",
+ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
+ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
+ },
+ "debug": {
+ "version": "4.3.1",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz",
+ "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==",
+ "dev": true,
+ "requires": {
+ "ms": "2.1.2"
+ },
+ "dependencies": {
+ "ms": {
+ "version": "2.1.2",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
+ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
+ "dev": true
+ }
+ }
+ },
+ "escape-string-regexp": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz",
+ "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==",
+ "dev": true
+ },
+ "find-up": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz",
+ "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==",
+ "dev": true,
+ "requires": {
+ "locate-path": "^6.0.0",
+ "path-exists": "^4.0.0"
+ }
+ },
+ "has-flag": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
+ "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
+ "dev": true
+ },
+ "js-yaml": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.0.0.tgz",
+ "integrity": "sha512-pqon0s+4ScYUvX30wxQi3PogGFAlUyH0awepWvwkj4jD4v+ova3RiYw8bmA6x2rDrEaj8i/oWKoRxpVNW+Re8Q==",
+ "dev": true,
+ "requires": {
+ "argparse": "^2.0.1"
+ }
+ },
+ "locate-path": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz",
+ "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==",
+ "dev": true,
+ "requires": {
+ "p-locate": "^5.0.0"
+ }
+ },
+ "ms": {
+ "version": "2.1.3",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
+ "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
+ "dev": true
+ },
+ "p-locate": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz",
+ "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==",
+ "dev": true,
+ "requires": {
+ "p-limit": "^3.0.2"
+ }
+ },
+ "path-exists": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
+ "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
+ "dev": true
+ },
+ "supports-color": {
+ "version": "8.1.1",
+ "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz",
+ "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==",
+ "dev": true,
+ "requires": {
+ "has-flag": "^4.0.0"
+ }
+ }
+ }
+ },
+ "module-details-from-path": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.3.tgz",
+ "integrity": "sha1-EUyUlnPiqKNenTV4hSeqN7Z52is="
+ },
+ "moment": {
+ "version": "2.24.0",
+ "resolved": "https://registry.npmjs.org/moment/-/moment-2.24.0.tgz",
+ "integrity": "sha512-bV7f+6l2QigeBBZSM/6yTNq4P2fNpSWj/0e7jQcy87A8e7o2nAfP/34/2ky5Vw4B9S446EtIhodAzkFCcR4dQg==",
+ "optional": true
+ },
+ "mongo-uri": {
+ "version": "0.1.2",
+ "resolved": "https://registry.npmjs.org/mongo-uri/-/mongo-uri-0.1.2.tgz",
+ "integrity": "sha512-FehPVi2Dv7VPvAkLnN9haM1aarj1E9w08rkn2MAbbQJF5EbcOckdOHRAD9T35yUkfLVcs0YzYluNX4/+G8HaIw=="
+ },
+ "mongodb": {
+ "version": "3.6.1",
+ "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-3.6.1.tgz",
+ "integrity": "sha512-uH76Zzr5wPptnjEKJRQnwTsomtFOU/kQEU8a9hKHr2M7y9qVk7Q4Pkv0EQVp88742z9+RwvsdTw6dRjDZCNu1g==",
+ "requires": {
+ "bl": "^2.2.0",
+ "bson": "^1.1.4",
+ "denque": "^1.4.1",
+ "require_optional": "^1.0.1",
+ "safe-buffer": "^5.1.2",
+ "saslprep": "^1.0.0"
+ }
+ },
+ "ms": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz",
+ "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg=="
+ },
+ "mv": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz",
+ "integrity": "sha512-at/ZndSy3xEGJ8i0ygALh8ru9qy7gWW1cmkaqBN29JmMlIvM//MEO9y1sk/avxuwnPcfhkejkLsuPxH81BrkSg==",
+ "optional": true,
+ "requires": {
+ "mkdirp": "~0.5.1",
+ "ncp": "~2.0.0",
+ "rimraf": "~2.4.0"
+ }
+ },
+ "nan": {
+ "version": "2.12.1",
+ "resolved": "https://registry.npmjs.org/nan/-/nan-2.12.1.tgz",
+ "integrity": "sha512-JY7V6lRkStKcKTvHO5NVSQRv+RV+FIL5pvDoLiAtSL9pKlC5x9PKQcZDsq7m4FO4d57mkhC6Z+QhAh3Jdk5JFw==",
+ "optional": true
+ },
+ "nanoid": {
+ "version": "3.1.20",
+ "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.1.20.tgz",
+ "integrity": "sha512-a1cQNyczgKbLX9jwbS/+d7W8fX/RfgYR7lVWwWOGIPNgK2m0MWvrGF6/m4kk6U3QcFMnZf3RIhL0v2Jgh/0Uxw==",
+ "dev": true
+ },
+ "natural-compare": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz",
+ "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==",
+ "dev": true
+ },
+ "ncp": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz",
+ "integrity": "sha512-zIdGUrPRFTUELUvr3Gmc7KZ2Sw/h1PiVM0Af/oHB6zgnV1ikqSfRk+TOufi79aHYCW3NiOXmr1BP5nWbzojLaA==",
+ "optional": true
+ },
+ "needle": {
+ "version": "2.6.0",
+ "resolved": "https://registry.npmjs.org/needle/-/needle-2.6.0.tgz",
+ "integrity": "sha512-KKYdza4heMsEfSWD7VPUIz3zX2XDwOyX2d+geb4vrERZMT5RMU6ujjaD+I5Yr54uZxQ2w6XRTAhHBbSCyovZBg==",
+ "requires": {
+ "debug": "^3.2.6",
+ "iconv-lite": "^0.4.4",
+ "sax": "^1.2.4"
+ },
+ "dependencies": {
+ "debug": {
+ "version": "3.2.7",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz",
+ "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==",
+ "requires": {
+ "ms": "^2.1.1"
+ }
+ },
+ "sax": {
+ "version": "1.2.4",
+ "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz",
+ "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw=="
+ }
+ }
+ },
+ "negotiator": {
+ "version": "0.6.2",
+ "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.2.tgz",
+ "integrity": "sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw=="
+ },
+ "nise": {
+ "version": "4.0.3",
+ "resolved": "https://registry.npmjs.org/nise/-/nise-4.0.3.tgz",
+ "integrity": "sha512-EGlhjm7/4KvmmE6B/UFsKh7eHykRl9VH+au8dduHLCyWUO/hr7+N+WtTvDUwc9zHuM1IaIJs/0lQ6Ag1jDkQSg==",
+ "dev": true,
+ "requires": {
+ "@sinonjs/commons": "^1.7.0",
+ "@sinonjs/fake-timers": "^6.0.0",
+ "@sinonjs/text-encoding": "^0.7.1",
+ "just-extend": "^4.0.2",
+ "path-to-regexp": "^1.7.0"
+ }
+ },
+ "node-fetch": {
+ "version": "2.6.1",
+ "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.1.tgz",
+ "integrity": "sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw=="
+ },
+ "node-forge": {
+ "version": "0.9.1",
+ "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.9.1.tgz",
+ "integrity": "sha512-G6RlQt5Sb4GMBzXvhfkeFmbqR6MzhtnT7VTHuLadjkii3rdYHNdw0m8zA4BTxVIh68FicCQ2NSUANpsqkr9jvQ=="
+ },
+ "node-pre-gyp": {
+ "version": "0.16.0",
+ "resolved": "https://registry.npmjs.org/node-pre-gyp/-/node-pre-gyp-0.16.0.tgz",
+ "integrity": "sha512-4efGA+X/YXAHLi1hN8KaPrILULaUn2nWecFrn1k2I+99HpoyvcOGEbtcOxpDiUwPF2ZANMJDh32qwOUPenuR1g==",
+ "requires": {
+ "detect-libc": "^1.0.2",
+ "mkdirp": "^0.5.3",
+ "needle": "^2.5.0",
+ "nopt": "^4.0.1",
+ "npm-packlist": "^1.1.6",
+ "npmlog": "^4.0.2",
+ "rc": "^1.2.7",
+ "rimraf": "^2.6.1",
+ "semver": "^5.3.0",
+ "tar": "^4.4.2"
+ },
+ "dependencies": {
+ "rimraf": {
+ "version": "2.7.1",
+ "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz",
+ "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==",
+ "requires": {
+ "glob": "^7.1.3"
+ }
+ }
+ }
+ },
+ "nopt": {
+ "version": "4.0.3",
+ "resolved": "https://registry.npmjs.org/nopt/-/nopt-4.0.3.tgz",
+ "integrity": "sha512-CvaGwVMztSMJLOeXPrez7fyfObdZqNUK1cPAEzLHrTybIua9pMdmmPR5YwtfNftIOMv3DPUhFaxsZMNTQO20Kg==",
+ "requires": {
+ "abbrev": "1",
+ "osenv": "^0.1.4"
+ }
+ },
+ "normalize-package-data": {
+ "version": "2.5.0",
+ "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz",
+ "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==",
+ "dev": true,
+ "requires": {
+ "hosted-git-info": "^2.1.4",
+ "resolve": "^1.10.0",
+ "semver": "2 || 3 || 4 || 5",
+ "validate-npm-package-license": "^3.0.1"
+ }
+ },
+ "normalize-path": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
+ "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==",
+ "dev": true
+ },
+ "npm-bundled": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-1.1.1.tgz",
+ "integrity": "sha512-gqkfgGePhTpAEgUsGEgcq1rqPXA+tv/aVBlgEzfXwA1yiUJF7xtEt3CtVwOjNYQOVknDk0F20w58Fnm3EtG0fA==",
+ "requires": {
+ "npm-normalize-package-bin": "^1.0.1"
+ }
+ },
+ "npm-normalize-package-bin": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-1.0.1.tgz",
+ "integrity": "sha512-EPfafl6JL5/rU+ot6P3gRSCpPDW5VmIzX959Ob1+ySFUuuYHWHekXpwdUZcKP5C+DS4GEtdJluwBjnsNDl+fSA=="
+ },
+ "npm-packlist": {
+ "version": "1.4.8",
+ "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-1.4.8.tgz",
+ "integrity": "sha512-5+AZgwru5IevF5ZdnFglB5wNlHG1AOOuw28WhUq8/8emhBmLv6jX5by4WJCh7lW0uSYZYS6DXqIsyZVIXRZU9A==",
+ "requires": {
+ "ignore-walk": "^3.0.1",
+ "npm-bundled": "^1.0.1",
+ "npm-normalize-package-bin": "^1.0.1"
+ }
+ },
+ "npmlog": {
+ "version": "4.1.2",
+ "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz",
+ "integrity": "sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==",
+ "requires": {
+ "are-we-there-yet": "~1.1.2",
+ "console-control-strings": "~1.1.0",
+ "gauge": "~2.7.3",
+ "set-blocking": "~2.0.0"
+ }
+ },
+ "number-is-nan": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz",
+ "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0="
+ },
+ "oauth-sign": {
+ "version": "0.9.0",
+ "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz",
+ "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ=="
+ },
+ "object-assign": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
+ "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg=="
+ },
+ "object-inspect": {
+ "version": "1.11.0",
+ "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.11.0.tgz",
+ "integrity": "sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg==",
+ "dev": true
+ },
+ "object-keys": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz",
+ "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==",
+ "dev": true
+ },
+ "object.assign": {
+ "version": "4.1.2",
+ "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.2.tgz",
+ "integrity": "sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ==",
+ "dev": true,
+ "requires": {
+ "call-bind": "^1.0.0",
+ "define-properties": "^1.1.3",
+ "has-symbols": "^1.0.1",
+ "object-keys": "^1.1.1"
+ }
+ },
+ "object.values": {
+ "version": "1.1.4",
+ "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.4.tgz",
+ "integrity": "sha512-TnGo7j4XSnKQoK3MfvkzqKCi0nVe/D9I9IjwTNYdb/fxYHpjrluHVOgw0AF6jrRFGMPHdfuidR09tIDiIvnaSg==",
+ "dev": true,
+ "requires": {
+ "call-bind": "^1.0.2",
+ "define-properties": "^1.1.3",
+ "es-abstract": "^1.18.2"
+ }
+ },
+ "on-finished": {
+ "version": "2.3.0",
+ "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz",
+ "integrity": "sha512-ikqdkGAAyf/X/gPhXGvfgAytDZtDbr+bkNUJ0N9h5MI/dmdgCs3l6hoHrcUv41sRKew3jIwrp4qQDXiK99Utww==",
+ "requires": {
+ "ee-first": "1.1.1"
+ }
+ },
+ "on-headers": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz",
+ "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA=="
+ },
+ "once": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
+ "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
+ "requires": {
+ "wrappy": "1"
+ }
+ },
+ "optionator": {
+ "version": "0.9.1",
+ "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz",
+ "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==",
+ "dev": true,
+ "requires": {
+ "deep-is": "^0.1.3",
+ "fast-levenshtein": "^2.0.6",
+ "levn": "^0.4.1",
+ "prelude-ls": "^1.2.1",
+ "type-check": "^0.4.0",
+ "word-wrap": "^1.2.3"
+ }
+ },
+ "os-homedir": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz",
+ "integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M="
+ },
+ "os-tmpdir": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz",
+ "integrity": "sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g=="
+ },
+ "osenv": {
+ "version": "0.1.5",
+ "resolved": "https://registry.npmjs.org/osenv/-/osenv-0.1.5.tgz",
+ "integrity": "sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g==",
+ "requires": {
+ "os-homedir": "^1.0.0",
+ "os-tmpdir": "^1.0.0"
+ }
+ },
+ "p-limit": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
+ "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==",
+ "requires": {
+ "yocto-queue": "^0.1.0"
+ }
+ },
+ "p-locate": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz",
+ "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=",
+ "dev": true,
+ "requires": {
+ "p-limit": "^1.1.0"
+ },
+ "dependencies": {
+ "p-limit": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz",
+ "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==",
+ "dev": true,
+ "requires": {
+ "p-try": "^1.0.0"
+ }
+ }
+ }
+ },
+ "p-try": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz",
+ "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=",
+ "dev": true
+ },
+ "parent-module": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
+ "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==",
+ "dev": true,
+ "requires": {
+ "callsites": "^3.0.0"
+ }
+ },
+ "parse-duration": {
+ "version": "0.4.4",
+ "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-0.4.4.tgz",
+ "integrity": "sha512-KbAJuYGUhZkB9gotDiKLnZ7Z3VTacK3fgwmDdB6ZVDtJbMBT6MfLga0WJaYpPDu0mzqT0NgHtHDt5PY4l0nidg=="
+ },
+ "parse-json": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz",
+ "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=",
+ "dev": true,
+ "requires": {
+ "error-ex": "^1.3.1",
+ "json-parse-better-errors": "^1.0.1"
+ }
+ },
+ "parse-ms": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-2.1.0.tgz",
+ "integrity": "sha512-kHt7kzLoS9VBZfUsiKjv43mr91ea+U05EyKkEtqp7vNbHxmaVuEqN7XxeEVnGrMtYOAxGrDElSi96K7EgO1zCA=="
+ },
+ "parseurl": {
+ "version": "1.3.3",
+ "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz",
+ "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ=="
+ },
+ "path-exists": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz",
+ "integrity": "sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==",
+ "dev": true
+ },
+ "path-is-absolute": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
+ "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg=="
+ },
+ "path-key": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz",
+ "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
+ "dev": true
+ },
+ "path-parse": {
+ "version": "1.0.6",
+ "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz",
+ "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw=="
+ },
+ "path-to-regexp": {
+ "version": "1.8.0",
+ "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz",
+ "integrity": "sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA==",
+ "dev": true,
+ "requires": {
+ "isarray": "0.0.1"
+ },
+ "dependencies": {
+ "isarray": {
+ "version": "0.0.1",
+ "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz",
+ "integrity": "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==",
+ "dev": true
+ }
+ }
+ },
+ "path-type": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz",
+ "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==",
+ "dev": true,
+ "requires": {
+ "pify": "^3.0.0"
+ },
+ "dependencies": {
+ "pify": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz",
+ "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=",
+ "dev": true
+ }
+ }
+ },
+ "pathval": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.0.tgz",
+ "integrity": "sha512-qZ181q3ICkag/+lv1X6frDUF84pqCm30qild3LGbD84n0AC75CYwnWsQRDlpz7zDkU5NVcmhHh4LjXK0goLYZA==",
+ "dev": true
+ },
+ "performance-now": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz",
+ "integrity": "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow=="
+ },
+ "picomatch": {
+ "version": "2.3.0",
+ "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.0.tgz",
+ "integrity": "sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw==",
+ "dev": true
+ },
+ "pify": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/pify/-/pify-5.0.0.tgz",
+ "integrity": "sha512-eW/gHNMlxdSP6dmG6uJip6FXN0EQBwm2clYYd8Wul42Cwu/DK8HEftzsapcNdYe2MfLiIwZqsDk2RDEsTE79hA=="
+ },
+ "pkg-dir": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz",
+ "integrity": "sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s=",
+ "dev": true,
+ "requires": {
+ "find-up": "^2.1.0"
+ }
+ },
+ "pkg-up": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/pkg-up/-/pkg-up-2.0.0.tgz",
+ "integrity": "sha1-yBmscoBZpGHKscOImivjxJoATX8=",
+ "dev": true,
+ "requires": {
+ "find-up": "^2.1.0"
+ }
+ },
+ "pprof": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/pprof/-/pprof-3.0.0.tgz",
+ "integrity": "sha512-uPWbAhoH/zvq1kM3/Fd/wshb4D7sLlGap8t6uCTER4aZRWqqyPYgXzpjWbT0Unn5U25pEy2VREUu27nQ9o9VPA==",
+ "requires": {
+ "bindings": "^1.2.1",
+ "delay": "^4.0.1",
+ "findit2": "^2.2.3",
+ "nan": "^2.14.0",
+ "node-pre-gyp": "^0.16.0",
+ "p-limit": "^3.0.0",
+ "pify": "^5.0.0",
+ "protobufjs": "~6.10.0",
+ "source-map": "^0.7.3",
+ "split": "^1.0.1"
+ },
+ "dependencies": {
+ "@types/node": {
+ "version": "13.13.42",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.42.tgz",
+ "integrity": "sha512-g+w2QgbW7k2CWLOXzQXbO37a7v5P9ObPvYahKphdBLV5aqpbVZRhTpWCT0SMRqX1i30Aig791ZmIM2fJGL2S8A=="
+ },
+ "nan": {
+ "version": "2.14.2",
+ "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.2.tgz",
+ "integrity": "sha512-M2ufzIiINKCuDfBSAUr1vWQ+vuVcA9kqx8JJUsbQi6yf1uGRyb7HfpdfUr5qLXf3B/t8dPvcjhKMmlfnP47EzQ=="
+ },
+ "protobufjs": {
+ "version": "6.10.2",
+ "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.2.tgz",
+ "integrity": "sha512-27yj+04uF6ya9l+qfpH187aqEzfCF4+Uit0I9ZBQVqK09hk/SQzKa2MUqUpXaVa7LOFRg1TSSr3lVxGOk6c0SQ==",
+ "requires": {
+ "@protobufjs/aspromise": "^1.1.2",
+ "@protobufjs/base64": "^1.1.2",
+ "@protobufjs/codegen": "^2.0.4",
+ "@protobufjs/eventemitter": "^1.1.0",
+ "@protobufjs/fetch": "^1.1.0",
+ "@protobufjs/float": "^1.0.2",
+ "@protobufjs/inquire": "^1.1.0",
+ "@protobufjs/path": "^1.1.2",
+ "@protobufjs/pool": "^1.1.0",
+ "@protobufjs/utf8": "^1.1.0",
+ "@types/long": "^4.0.1",
+ "@types/node": "^13.7.0",
+ "long": "^4.0.0"
+ }
+ },
+ "source-map": {
+ "version": "0.7.3",
+ "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz",
+ "integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ=="
+ }
+ }
+ },
+ "prelude-ls": {
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz",
+ "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==",
+ "dev": true
+ },
+ "prettier": {
+ "version": "2.3.2",
+ "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.3.2.tgz",
+ "integrity": "sha512-lnJzDfJ66zkMy58OL5/NY5zp70S7Nz6KqcKkXYzn2tMVrNxvbqaBpg7H3qHaLxCJ5lNMsGuM8+ohS7cZrthdLQ==",
+ "dev": true
+ },
+ "prettier-linter-helpers": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz",
+ "integrity": "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==",
+ "dev": true,
+ "requires": {
+ "fast-diff": "^1.1.2"
+ }
+ },
+ "pretty-ms": {
+ "version": "7.0.1",
+ "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-7.0.1.tgz",
+ "integrity": "sha512-973driJZvxiGOQ5ONsFhOF/DtzPMOMtgC11kCpUrPGMTgqp2q/1gwzCquocrN33is0VZ5GFHXZYMM9l6h67v2Q==",
+ "requires": {
+ "parse-ms": "^2.1.0"
+ }
+ },
+ "process-nextick-args": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz",
+ "integrity": "sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw=="
+ },
+ "progress": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz",
+ "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==",
+ "dev": true
+ },
+ "prom-client": {
+ "version": "11.5.3",
+ "resolved": "https://registry.npmjs.org/prom-client/-/prom-client-11.5.3.tgz",
+ "integrity": "sha512-iz22FmTbtkyL2vt0MdDFY+kWof+S9UB/NACxSn2aJcewtw+EERsen0urSkZ2WrHseNdydsvcxCTAnPcSMZZv4Q==",
+ "requires": {
+ "tdigest": "^0.1.1"
+ }
+ },
+ "protobufjs": {
+ "version": "6.8.9",
+ "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.9.tgz",
+ "integrity": "sha512-j2JlRdUeL/f4Z6x4aU4gj9I2LECglC+5qR2TrWb193Tla1qfdaNQTZ8I27Pt7K0Ajmvjjpft7O3KWTGciz4gpw==",
+ "requires": {
+ "@protobufjs/aspromise": "^1.1.2",
+ "@protobufjs/base64": "^1.1.2",
+ "@protobufjs/codegen": "^2.0.4",
+ "@protobufjs/eventemitter": "^1.1.0",
+ "@protobufjs/fetch": "^1.1.0",
+ "@protobufjs/float": "^1.0.2",
+ "@protobufjs/inquire": "^1.1.0",
+ "@protobufjs/path": "^1.1.2",
+ "@protobufjs/pool": "^1.1.0",
+ "@protobufjs/utf8": "^1.1.0",
+ "@types/long": "^4.0.0",
+ "@types/node": "^10.1.0",
+ "long": "^4.0.0"
+ }
+ },
+ "proxy-addr": {
+ "version": "2.0.6",
+ "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.6.tgz",
+ "integrity": "sha512-dh/frvCBVmSsDYzw6n926jv974gddhkFPfiN8hPOi30Wax25QZyZEGveluCgliBnqmuM+UJmBErbAUFIoDbjOw==",
+ "requires": {
+ "forwarded": "~0.1.2",
+ "ipaddr.js": "1.9.1"
+ }
+ },
+ "psl": {
+ "version": "1.1.31",
+ "resolved": "https://registry.npmjs.org/psl/-/psl-1.1.31.tgz",
+ "integrity": "sha512-/6pt4+C+T+wZUieKR620OpzN/LlnNKuWjy1iFLQ/UG35JqHlR/89MP1d96dUfkf6Dne3TuLQzOYEYshJ+Hx8mw=="
+ },
+ "pump": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz",
+ "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==",
+ "requires": {
+ "end-of-stream": "^1.1.0",
+ "once": "^1.3.1"
+ }
+ },
+ "pumpify": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/pumpify/-/pumpify-2.0.1.tgz",
+ "integrity": "sha512-m7KOje7jZxrmutanlkS1daj1dS6z6BgslzOXmcSEpIlCxM3VJH7lG5QLeck/6hgF6F4crFf01UtQmNsJfweTAw==",
+ "requires": {
+ "duplexify": "^4.1.1",
+ "inherits": "^2.0.3",
+ "pump": "^3.0.0"
+ },
+ "dependencies": {
+ "duplexify": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz",
+ "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==",
+ "requires": {
+ "end-of-stream": "^1.4.1",
+ "inherits": "^2.0.3",
+ "readable-stream": "^3.1.1",
+ "stream-shift": "^1.0.0"
+ }
+ },
+ "readable-stream": {
+ "version": "3.6.0",
+ "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
+ "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==",
+ "requires": {
+ "inherits": "^2.0.3",
+ "string_decoder": "^1.1.1",
+ "util-deprecate": "^1.0.1"
+ }
+ }
+ }
+ },
+ "punycode": {
+ "version": "1.3.2",
+ "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz",
+ "integrity": "sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw=="
+ },
+ "qs": {
+ "version": "6.7.0",
+ "resolved": "https://registry.npmjs.org/qs/-/qs-6.7.0.tgz",
+ "integrity": "sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ=="
+ },
+ "querystring": {
+ "version": "0.2.0",
+ "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz",
+ "integrity": "sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g=="
+ },
+ "ramda": {
+ "version": "0.27.1",
+ "resolved": "https://registry.npmjs.org/ramda/-/ramda-0.27.1.tgz",
+ "integrity": "sha512-PgIdVpn5y5Yns8vqb8FzBUEYn98V3xcPgawAkkgj0YJ0qDsnHCiNmZYfOGMgOvoB0eWFLpYbhxUR3mxfDIMvpw==",
+ "dev": true
+ },
+ "randombytes": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz",
+ "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==",
+ "dev": true,
+ "requires": {
+ "safe-buffer": "^5.1.0"
+ }
+ },
+ "range-parser": {
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz",
+ "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg=="
+ },
+ "raven": {
+ "version": "2.6.4",
+ "resolved": "https://registry.npmjs.org/raven/-/raven-2.6.4.tgz",
+ "integrity": "sha512-6PQdfC4+DQSFncowthLf+B6Hr0JpPsFBgTVYTAOq7tCmx/kR4SXbeawtPch20+3QfUcQDoJBLjWW1ybvZ4kXTw==",
+ "requires": {
+ "cookie": "0.3.1",
+ "md5": "^2.2.1",
+ "stack-trace": "0.0.10",
+ "timed-out": "4.0.1",
+ "uuid": "3.3.2"
+ },
+ "dependencies": {
+ "cookie": {
+ "version": "0.3.1",
+ "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz",
+ "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s="
+ },
+ "stack-trace": {
+ "version": "0.0.10",
+ "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz",
+ "integrity": "sha1-VHxws0fo0ytOEI6hoqFZ5f3eGcA="
+ },
+ "uuid": {
+ "version": "3.3.2",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz",
+ "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA=="
+ }
+ }
+ },
+ "raw-body": {
+ "version": "2.4.0",
+ "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.4.0.tgz",
+ "integrity": "sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q==",
+ "requires": {
+ "bytes": "3.1.0",
+ "http-errors": "1.7.2",
+ "iconv-lite": "0.4.24",
+ "unpipe": "1.0.0"
+ }
+ },
+ "rc": {
+ "version": "1.2.8",
+ "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz",
+ "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==",
+ "requires": {
+ "deep-extend": "^0.6.0",
+ "ini": "~1.3.0",
+ "minimist": "^1.2.0",
+ "strip-json-comments": "~2.0.1"
+ },
+ "dependencies": {
+ "strip-json-comments": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz",
+ "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo="
+ }
+ }
+ },
+ "read-pkg": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz",
+ "integrity": "sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k=",
+ "dev": true,
+ "requires": {
+ "load-json-file": "^4.0.0",
+ "normalize-package-data": "^2.3.2",
+ "path-type": "^3.0.0"
+ }
+ },
+ "read-pkg-up": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-3.0.0.tgz",
+ "integrity": "sha1-PtSWaF26D4/hGNBpHcUfSh/5bwc=",
+ "dev": true,
+ "requires": {
+ "find-up": "^2.0.0",
+ "read-pkg": "^3.0.0"
+ }
+ },
+ "readable-stream": {
+ "version": "2.3.6",
+ "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz",
+ "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==",
+ "requires": {
+ "core-util-is": "~1.0.0",
+ "inherits": "~2.0.3",
+ "isarray": "~1.0.0",
+ "process-nextick-args": "~2.0.0",
+ "safe-buffer": "~5.1.1",
+ "string_decoder": "~1.1.1",
+ "util-deprecate": "~1.0.1"
+ }
+ },
+ "readdirp": {
+ "version": "3.5.0",
+ "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.5.0.tgz",
+ "integrity": "sha512-cMhu7c/8rdhkHXWsY+osBhfSy0JikwpHK/5+imo+LpeasTF8ouErHrlYkwT0++njiyuDvc7OFY5T3ukvZ8qmFQ==",
+ "dev": true,
+ "requires": {
+ "picomatch": "^2.2.1"
+ }
+ },
+ "redis": {
+ "version": "0.10.3",
+ "resolved": "https://registry.npmjs.org/redis/-/redis-0.10.3.tgz",
+ "integrity": "sha512-oVblZDvWa0J03abo+nB4X7XlIlrfp7UF57fQmHe9X7aO5y/aUuxxXOyq8ZdjhMTnto3n9fQXX2flhXU7DP6RYQ=="
+ },
+ "redis-commands": {
+ "version": "1.5.0",
+ "resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.5.0.tgz",
+ "integrity": "sha512-6KxamqpZ468MeQC3bkWmCB1fp56XL64D4Kf0zJSwDZbVLLm7KFkoIcHrgRvQ+sk8dnhySs7+yBg94yIkAK7aJg=="
+ },
+ "redis-errors": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz",
+ "integrity": "sha1-62LSrbFeTq9GEMBK/hUpOEJQq60="
+ },
+ "redis-parser": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-3.0.0.tgz",
+ "integrity": "sha1-tm2CjNyv5rS4pCin3vTGvKwxyLQ=",
+ "requires": {
+ "redis-errors": "^1.0.0"
+ }
+ },
+ "regexpp": {
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz",
+ "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==",
+ "dev": true
+ },
+ "request": {
+ "version": "2.88.2",
+ "resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz",
+ "integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==",
+ "requires": {
+ "aws-sign2": "~0.7.0",
+ "aws4": "^1.8.0",
+ "caseless": "~0.12.0",
+ "combined-stream": "~1.0.6",
+ "extend": "~3.0.2",
+ "forever-agent": "~0.6.1",
+ "form-data": "~2.3.2",
+ "har-validator": "~5.1.3",
+ "http-signature": "~1.2.0",
+ "is-typedarray": "~1.0.0",
+ "isstream": "~0.1.2",
+ "json-stringify-safe": "~5.0.1",
+ "mime-types": "~2.1.19",
+ "oauth-sign": "~0.9.0",
+ "performance-now": "^2.1.0",
+ "qs": "~6.5.2",
+ "safe-buffer": "^5.1.2",
+ "tough-cookie": "~2.5.0",
+ "tunnel-agent": "^0.6.0",
+ "uuid": "^3.3.2"
+ },
+ "dependencies": {
+ "qs": {
+ "version": "6.5.2",
+ "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz",
+ "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA=="
+ },
+ "uuid": {
+ "version": "3.4.0",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz",
+ "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A=="
+ }
+ }
+ },
+ "requestretry": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/requestretry/-/requestretry-4.1.0.tgz",
+ "integrity": "sha512-q3IT2vz5vkcMT6xgwB/BWzsmnu7N/27l9fW86U48gt9Mwrce5rSEyFvpAW7Il1/B78/NBUlYBvcCY1RzWUWy7w==",
+ "requires": {
+ "extend": "^3.0.2",
+ "lodash": "^4.17.10",
+ "when": "^3.7.7"
+ }
+ },
+ "require-directory": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
+ "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==",
+ "dev": true
+ },
+ "require-from-string": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz",
+ "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==",
+ "dev": true
+ },
+ "require-in-the-middle": {
+ "version": "5.1.0",
+ "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-5.1.0.tgz",
+ "integrity": "sha512-M2rLKVupQfJ5lf9OvqFGIT+9iVLnTmjgbOmpil12hiSQNn5zJTKGPoIisETNjfK+09vP3rpm1zJajmErpr2sEQ==",
+ "requires": {
+ "debug": "^4.1.1",
+ "module-details-from-path": "^1.0.3",
+ "resolve": "^1.12.0"
+ }
+ },
+ "require-like": {
+ "version": "0.1.2",
+ "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz",
+ "integrity": "sha512-oyrU88skkMtDdauHDuKVrgR+zuItqr6/c//FXzvmxRGMexSDc6hNvJInGW3LL46n+8b50RykrvwSUIIQH2LQ5A==",
+ "dev": true
+ },
+ "require_optional": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/require_optional/-/require_optional-1.0.1.tgz",
+ "integrity": "sha512-qhM/y57enGWHAe3v/NcwML6a3/vfESLe/sGM2dII+gEO0BpKRUkWZow/tyloNqJyN6kXSl3RyyM8Ll5D/sJP8g==",
+ "requires": {
+ "resolve-from": "^2.0.0",
+ "semver": "^5.1.0"
+ }
+ },
+ "resolve": {
+ "version": "1.20.0",
+ "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz",
+ "integrity": "sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A==",
+ "requires": {
+ "is-core-module": "^2.2.0",
+ "path-parse": "^1.0.6"
+ }
+ },
+ "resolve-from": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-2.0.0.tgz",
+ "integrity": "sha512-qpFcKaXsq8+oRoLilkwyc7zHGF5i9Q2/25NIgLQQ/+VVv9rU4qvr6nXVAw1DsnXJyQkZsR4Ytfbtg5ehfcUssQ=="
+ },
+ "retry-request": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-4.1.1.tgz",
+ "integrity": "sha512-BINDzVtLI2BDukjWmjAIRZ0oglnCAkpP2vQjM3jdLhmT62h0xnQgciPwBRDAvHqpkPT2Wo1XuUyLyn6nbGrZQQ==",
+ "requires": {
+ "debug": "^4.1.1",
+ "through2": "^3.0.1"
+ }
+ },
+ "rimraf": {
+ "version": "2.4.5",
+ "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz",
+ "integrity": "sha512-J5xnxTyqaiw06JjMftq7L9ouA448dw/E7dKghkP9WpKNuwmARNNg+Gk8/u5ryb9N/Yo2+z3MCwuqFK/+qPOPfQ==",
+ "optional": true,
+ "requires": {
+ "glob": "^6.0.1"
+ },
+ "dependencies": {
+ "glob": {
+ "version": "6.0.4",
+ "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz",
+ "integrity": "sha512-MKZeRNyYZAVVVG1oZeLaWie1uweH40m9AZwIwxyPbTSX4hHrVYSzLg0Ro5Z5R7XKkIX+Cc6oD1rqeDJnwsB8/A==",
+ "optional": true,
+ "requires": {
+ "inflight": "^1.0.4",
+ "inherits": "2",
+ "minimatch": "2 || 3",
+ "once": "^1.3.0",
+ "path-is-absolute": "^1.0.0"
+ }
+ }
+ }
+ },
+ "s3-streams": {
+ "version": "0.4.0",
+ "resolved": "https://registry.npmjs.org/s3-streams/-/s3-streams-0.4.0.tgz",
+ "integrity": "sha512-DtZ7w3A0EorzHdhh00U3p7O2c2hv2w/i+A1JATAJZubp+fnwlU8MiejJibAzMLhhCRv+UsfimSGoivWt2Y4JsQ==",
+ "requires": {
+ "bluebird": "^3.5.3",
+ "lodash": "^4.17.11",
+ "readable-stream": "^3.1.1"
+ },
+ "dependencies": {
+ "readable-stream": {
+ "version": "3.6.0",
+ "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
+ "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==",
+ "requires": {
+ "inherits": "^2.0.3",
+ "string_decoder": "^1.1.1",
+ "util-deprecate": "^1.0.1"
+ }
+ }
+ }
+ },
+ "safe-buffer": {
+ "version": "5.1.2",
+ "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
+ "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="
+ },
+ "safe-json-stringify": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.2.0.tgz",
+ "integrity": "sha512-gH8eh2nZudPQO6TytOvbxnuhYBOvDBBLW52tz5q6X58lJcd/tkmqFR+5Z9adS8aJtURSXWThWy/xJtJwixErvg==",
+ "optional": true
+ },
+ "safer-buffer": {
+ "version": "2.1.2",
+ "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
+ "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
+ },
+ "sandboxed-module": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-2.0.3.tgz",
+ "integrity": "sha512-wXiA6ULoGjCDwjn6evQF/Qi+oe77P+aCxizUktLBBKdqNbTxwec4GySJcS+O7iZFhme2ex04m+14KgknKKqFsw==",
+ "dev": true,
+ "requires": {
+ "require-like": "0.1.2",
+ "stack-trace": "0.0.9"
+ }
+ },
+ "saslprep": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/saslprep/-/saslprep-1.0.3.tgz",
+ "integrity": "sha512-/MY/PEMbk2SuY5sScONwhUDsV2p77Znkb/q3nSVstq/yQzYJOH/Azh29p9oJLsl3LnQwSvZDKagDGBsBwSooag==",
+ "optional": true,
+ "requires": {
+ "sparse-bitfield": "^3.0.3"
+ }
+ },
+ "sax": {
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz",
+ "integrity": "sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA=="
+ },
+ "semver": {
+ "version": "5.6.0",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-5.6.0.tgz",
+ "integrity": "sha512-RS9R6R35NYgQn++fkDWaOmqGoj4Ek9gGs+DPxNUZKuwE183xjJroKvyo1IzVFeXvUrvmALy6FWD5xrdJT25gMg=="
+ },
+ "send": {
+ "version": "0.17.1",
+ "resolved": "https://registry.npmjs.org/send/-/send-0.17.1.tgz",
+ "integrity": "sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg==",
+ "requires": {
+ "debug": "2.6.9",
+ "depd": "~1.1.2",
+ "destroy": "~1.0.4",
+ "encodeurl": "~1.0.2",
+ "escape-html": "~1.0.3",
+ "etag": "~1.8.1",
+ "fresh": "0.5.2",
+ "http-errors": "~1.7.2",
+ "mime": "1.6.0",
+ "ms": "2.1.1",
+ "on-finished": "~2.3.0",
+ "range-parser": "~1.2.1",
+ "statuses": "~1.5.0"
+ },
+ "dependencies": {
+ "debug": {
+ "version": "2.6.9",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
+ "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
+ "requires": {
+ "ms": "2.0.0"
+ },
+ "dependencies": {
+ "ms": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
+ "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="
+ }
+ }
+ }
+ }
+ },
+ "serialize-javascript": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-5.0.1.tgz",
+ "integrity": "sha512-SaaNal9imEO737H2c05Og0/8LUXG7EnsZyMa8MzkmuHoELfT6txuj0cMqRj6zfPKnmQ1yasR4PCJc8x+M4JSPA==",
+ "dev": true,
+ "requires": {
+ "randombytes": "^2.1.0"
+ }
+ },
+ "serve-static": {
+ "version": "1.14.1",
+ "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.14.1.tgz",
+ "integrity": "sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg==",
+ "requires": {
+ "encodeurl": "~1.0.2",
+ "escape-html": "~1.0.3",
+ "parseurl": "~1.3.3",
+ "send": "0.17.1"
+ }
+ },
+ "set-blocking": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz",
+ "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw=="
+ },
+ "setprototypeof": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.1.tgz",
+ "integrity": "sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw=="
+ },
+ "shebang-command": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
+ "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
+ "dev": true,
+ "requires": {
+ "shebang-regex": "^3.0.0"
+ }
+ },
+ "shebang-regex": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz",
+ "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
+ "dev": true
+ },
+ "shimmer": {
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.1.tgz",
+ "integrity": "sha512-sQTKC1Re/rM6XyFM6fIAGHRPVGvyXfgzIDvzoq608vM+jeyVD0Tu1E6Np0Kc2zAIFWIj963V2800iF/9LPieQw=="
+ },
+ "signal-exit": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz",
+ "integrity": "sha512-meQNNykwecVxdu1RlYMKpQx4+wefIYpmxi6gexo/KAbwquJrBUrBmKYJrE8KFkVQAAVWEnwNdu21PgrD77J3xA=="
+ },
+ "sinon": {
+ "version": "9.0.1",
+ "resolved": "https://registry.npmjs.org/sinon/-/sinon-9.0.1.tgz",
+ "integrity": "sha512-iTTyiQo5T94jrOx7X7QLBZyucUJ2WvL9J13+96HMfm2CGoJYbIPqRfl6wgNcqmzk0DI28jeGx5bUTXizkrqBmg==",
+ "dev": true,
+ "requires": {
+ "@sinonjs/commons": "^1.7.0",
+ "@sinonjs/fake-timers": "^6.0.0",
+ "@sinonjs/formatio": "^5.0.1",
+ "@sinonjs/samsam": "^5.0.3",
+ "diff": "^4.0.2",
+ "nise": "^4.0.1",
+ "supports-color": "^7.1.0"
+ },
+ "dependencies": {
+ "diff": {
+ "version": "4.0.2",
+ "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
+ "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==",
+ "dev": true
+ },
+ "has-flag": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
+ "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
+ "dev": true
+ },
+ "supports-color": {
+ "version": "7.1.0",
+ "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz",
+ "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==",
+ "dev": true,
+ "requires": {
+ "has-flag": "^4.0.0"
+ }
+ }
+ }
+ },
+ "slice-ansi": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz",
+ "integrity": "sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==",
+ "dev": true,
+ "requires": {
+ "ansi-styles": "^4.0.0",
+ "astral-regex": "^2.0.0",
+ "is-fullwidth-code-point": "^3.0.0"
+ },
+ "dependencies": {
+ "ansi-styles": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
+ "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
+ "dev": true
+ }
+ }
+ },
+ "snakecase-keys": {
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/snakecase-keys/-/snakecase-keys-3.2.0.tgz",
+ "integrity": "sha512-WTJ0NhCH/37J+PU3fuz0x5b6TvtWQChTcKPOndWoUy0pteKOe0hrHMzSRsJOWSIP48EQkzUEsgQPmrG3W8pFNQ==",
+ "requires": {
+ "map-obj": "^4.0.0",
+ "to-snake-case": "^1.0.0"
+ }
+ },
+ "source-map": {
+ "version": "0.6.1",
+ "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
+ "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="
+ },
+ "source-map-support": {
+ "version": "0.5.19",
+ "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.19.tgz",
+ "integrity": "sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw==",
+ "requires": {
+ "buffer-from": "^1.0.0",
+ "source-map": "^0.6.0"
+ }
+ },
+ "sparse-bitfield": {
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/sparse-bitfield/-/sparse-bitfield-3.0.3.tgz",
+ "integrity": "sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ==",
+ "optional": true,
+ "requires": {
+ "memory-pager": "^1.0.2"
+ }
+ },
+ "spdx-correct": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.1.tgz",
+ "integrity": "sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==",
+ "dev": true,
+ "requires": {
+ "spdx-expression-parse": "^3.0.0",
+ "spdx-license-ids": "^3.0.0"
+ }
+ },
+ "spdx-exceptions": {
+ "version": "2.3.0",
+ "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz",
+ "integrity": "sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==",
+ "dev": true
+ },
+ "spdx-expression-parse": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz",
+ "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==",
+ "dev": true,
+ "requires": {
+ "spdx-exceptions": "^2.1.0",
+ "spdx-license-ids": "^3.0.0"
+ }
+ },
+ "spdx-license-ids": {
+ "version": "3.0.9",
+ "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.9.tgz",
+ "integrity": "sha512-Ki212dKK4ogX+xDo4CtOZBVIwhsKBEfsEEcwmJfLQzirgc2jIWdzg40Unxz/HzEUqM1WFzVlQSMF9kZZ2HboLQ==",
+ "dev": true
+ },
+ "split": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz",
+ "integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==",
+ "requires": {
+ "through": "2"
+ }
+ },
+ "sprintf-js": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
+ "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==",
+ "dev": true
+ },
+ "sshpk": {
+ "version": "1.16.0",
+ "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.0.tgz",
+ "integrity": "sha512-Zhev35/y7hRMcID/upReIvRse+I9SVhyVre/KTJSJQWMz3C3+G+HpO7m1wK/yckEtujKZ7dS4hkVxAnmHaIGVQ==",
+ "requires": {
+ "asn1": "~0.2.3",
+ "assert-plus": "^1.0.0",
+ "bcrypt-pbkdf": "^1.0.0",
+ "dashdash": "^1.12.0",
+ "ecc-jsbn": "~0.1.1",
+ "getpass": "^0.1.1",
+ "jsbn": "~0.1.0",
+ "safer-buffer": "^2.0.2",
+ "tweetnacl": "~0.14.0"
+ }
+ },
+ "stack-trace": {
+ "version": "0.0.9",
+ "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz",
+ "integrity": "sha512-vjUc6sfgtgY0dxCdnc40mK6Oftjo9+2K8H/NG81TMhgL392FtiPA9tn9RLyTxXmTLPJPjF3VyzFp6bsWFLisMQ==",
+ "dev": true
+ },
+ "standard-as-callback": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.0.1.tgz",
+ "integrity": "sha512-NQOxSeB8gOI5WjSaxjBgog2QFw55FV8TkS6Y07BiB3VJ8xNTvUYm0wl0s8ObgQ5NhdpnNfigMIKjgPESzgr4tg=="
+ },
+ "statuses": {
+ "version": "1.5.0",
+ "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz",
+ "integrity": "sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA=="
+ },
+ "stream-events": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/stream-events/-/stream-events-1.0.5.tgz",
+ "integrity": "sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg==",
+ "requires": {
+ "stubs": "^3.0.0"
+ }
+ },
+ "stream-shift": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.1.tgz",
+ "integrity": "sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ=="
+ },
+ "string-width": {
+ "version": "4.2.2",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz",
+ "integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==",
+ "dev": true,
+ "requires": {
+ "emoji-regex": "^8.0.0",
+ "is-fullwidth-code-point": "^3.0.0",
+ "strip-ansi": "^6.0.0"
+ }
+ },
+ "string.prototype.trimend": {
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.4.tgz",
+ "integrity": "sha512-y9xCjw1P23Awk8EvTpcyL2NIr1j7wJ39f+k6lvRnSMz+mz9CGz9NYPelDk42kOz6+ql8xjfK8oYzy3jAP5QU5A==",
+ "dev": true,
+ "requires": {
+ "call-bind": "^1.0.2",
+ "define-properties": "^1.1.3"
+ }
+ },
+ "string.prototype.trimstart": {
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.4.tgz",
+ "integrity": "sha512-jh6e984OBfvxS50tdY2nRZnoC5/mLFKOREQfw8t5yytkoUsJRNxvI/E39qu1sD0OtWI3OC0XgKSmcWwziwYuZw==",
+ "dev": true,
+ "requires": {
+ "call-bind": "^1.0.2",
+ "define-properties": "^1.1.3"
+ }
+ },
+ "string_decoder": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz",
+ "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==",
+ "requires": {
+ "safe-buffer": "~5.1.0"
+ }
+ },
+ "strip-ansi": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz",
+ "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==",
+ "dev": true,
+ "requires": {
+ "ansi-regex": "^5.0.0"
+ }
+ },
+ "strip-bom": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz",
+ "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=",
+ "dev": true
+ },
+ "strip-json-comments": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz",
+ "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==",
+ "dev": true
+ },
+ "stubs": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz",
+ "integrity": "sha1-6NK6H6nJBXAwPAMLaQD31fiavls="
+ },
+ "supports-color": {
+ "version": "5.5.0",
+ "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
+ "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==",
+ "dev": true,
+ "requires": {
+ "has-flag": "^3.0.0"
+ }
+ },
+ "table": {
+ "version": "6.7.1",
+ "resolved": "https://registry.npmjs.org/table/-/table-6.7.1.tgz",
+ "integrity": "sha512-ZGum47Yi6KOOFDE8m223td53ath2enHcYLgOCjGr5ngu8bdIARQk6mN/wRMv4yMRcHnCSnHbCEha4sobQx5yWg==",
+ "dev": true,
+ "requires": {
+ "ajv": "^8.0.1",
+ "lodash.clonedeep": "^4.5.0",
+ "lodash.truncate": "^4.4.2",
+ "slice-ansi": "^4.0.0",
+ "string-width": "^4.2.0",
+ "strip-ansi": "^6.0.0"
+ },
+ "dependencies": {
+ "ajv": {
+ "version": "8.6.1",
+ "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.6.1.tgz",
+ "integrity": "sha512-42VLtQUOLefAvKFAQIxIZDaThq6om/PrfP0CYk3/vn+y4BMNkKnbli8ON2QCiHov4KkzOSJ/xSoBJdayiiYvVQ==",
+ "dev": true,
+ "requires": {
+ "json-schema-traverse": "^1.0.0",
+ "require-from-string": "^2.0.2",
+ "uri-js": "^4.2.2"
+ }
+ },
+ "json-schema-traverse": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz",
+ "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==",
+ "dev": true
+ }
+ }
+ },
+ "tar": {
+ "version": "4.4.13",
+ "resolved": "https://registry.npmjs.org/tar/-/tar-4.4.13.tgz",
+ "integrity": "sha512-w2VwSrBoHa5BsSyH+KxEqeQBAllHhccyMFVHtGtdMpF4W7IRWfZjFiQceJPChOeTsSDVUpER2T8FA93pr0L+QA==",
+ "requires": {
+ "chownr": "^1.1.1",
+ "fs-minipass": "^1.2.5",
+ "minipass": "^2.8.6",
+ "minizlib": "^1.2.1",
+ "mkdirp": "^0.5.0",
+ "safe-buffer": "^5.1.2",
+ "yallist": "^3.0.3"
+ }
+ },
+ "tdigest": {
+ "version": "0.1.1",
+ "resolved": "https://registry.npmjs.org/tdigest/-/tdigest-0.1.1.tgz",
+ "integrity": "sha1-Ljyyw56kSeVdHmzZEReszKRYgCE=",
+ "requires": {
+ "bintrees": "1.0.1"
+ }
+ },
+ "teeny-request": {
+ "version": "6.0.3",
+ "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-6.0.3.tgz",
+ "integrity": "sha512-TZG/dfd2r6yeji19es1cUIwAlVD8y+/svB1kAC2Y0bjEyysrfbO8EZvJBRwIE6WkwmUoB7uvWLwTIhJbMXZ1Dw==",
+ "requires": {
+ "http-proxy-agent": "^4.0.0",
+ "https-proxy-agent": "^5.0.0",
+ "node-fetch": "^2.2.0",
+ "stream-events": "^1.0.5",
+ "uuid": "^7.0.0"
+ },
+ "dependencies": {
+ "uuid": {
+ "version": "7.0.3",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-7.0.3.tgz",
+ "integrity": "sha512-DPSke0pXhTZgoF/d+WSt2QaKMCFSfx7QegxEWT+JOuHF5aWrKEn0G+ztjuJg/gG8/ItK+rbPCD/yNv8yyih6Cg=="
+ }
+ }
+ },
+ "text-table": {
+ "version": "0.2.0",
+ "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz",
+ "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==",
+ "dev": true
+ },
+ "through": {
+ "version": "2.3.8",
+ "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz",
+ "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg=="
+ },
+ "through2": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/through2/-/through2-3.0.1.tgz",
+ "integrity": "sha512-M96dvTalPT3YbYLaKaCuwu+j06D/8Jfib0o/PxbVt6Amhv3dUAtW6rTV1jPgJSBG83I/e04Y6xkVdVhSRhi0ww==",
+ "requires": {
+ "readable-stream": "2 || 3"
+ }
+ },
+ "timed-out": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/timed-out/-/timed-out-4.0.1.tgz",
+ "integrity": "sha1-8y6srFoXW+ol1/q1Zas+2HQe9W8="
+ },
+ "timekeeper": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/timekeeper/-/timekeeper-2.2.0.tgz",
+ "integrity": "sha512-W3AmPTJWZkRwu+iSNxPIsLZ2ByADsOLbbLxe46UJyWj3mlYLlwucKiq+/dPm0l9wTzqoF3/2PH0AGFCebjq23A==",
+ "dev": true
+ },
+ "to-no-case": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/to-no-case/-/to-no-case-1.0.2.tgz",
+ "integrity": "sha1-xyKQcWTvaxeBMsjmmTAhLRtKoWo="
+ },
+ "to-regex-range": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
+ "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
+ "dev": true,
+ "requires": {
+ "is-number": "^7.0.0"
+ }
+ },
+ "to-snake-case": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/to-snake-case/-/to-snake-case-1.0.0.tgz",
+ "integrity": "sha1-znRpE4l5RgGah+Yu366upMYIq4w=",
+ "requires": {
+ "to-space-case": "^1.0.0"
+ }
+ },
+ "to-space-case": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/to-space-case/-/to-space-case-1.0.0.tgz",
+ "integrity": "sha1-sFLar7Gysp3HcM6gFj5ewOvJ/Bc=",
+ "requires": {
+ "to-no-case": "^1.0.0"
+ }
+ },
+ "toidentifier": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz",
+ "integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw=="
+ },
+ "tough-cookie": {
+ "version": "2.5.0",
+ "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz",
+ "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==",
+ "requires": {
+ "psl": "^1.1.28",
+ "punycode": "^2.1.1"
+ },
+ "dependencies": {
+ "punycode": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
+ "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A=="
+ }
+ }
+ },
+ "tsconfig-paths": {
+ "version": "3.10.1",
+ "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.10.1.tgz",
+ "integrity": "sha512-rETidPDgCpltxF7MjBZlAFPUHv5aHH2MymyPvh+vEyWAED4Eb/WeMbsnD/JDr4OKPOA1TssDHgIcpTN5Kh0p6Q==",
+ "dev": true,
+ "requires": {
+ "json5": "^2.2.0",
+ "minimist": "^1.2.0",
+ "strip-bom": "^3.0.0"
+ }
+ },
+ "tunnel-agent": {
+ "version": "0.6.0",
+ "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz",
+ "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==",
+ "requires": {
+ "safe-buffer": "^5.0.1"
+ }
+ },
+ "tweetnacl": {
+ "version": "0.14.5",
+ "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz",
+ "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA=="
+ },
+ "type-check": {
+ "version": "0.4.0",
+ "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz",
+ "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==",
+ "dev": true,
+ "requires": {
+ "prelude-ls": "^1.2.1"
+ }
+ },
+ "type-detect": {
+ "version": "4.0.8",
+ "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz",
+ "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==",
+ "dev": true
+ },
+ "type-fest": {
+ "version": "0.20.2",
+ "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz",
+ "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==",
+ "dev": true
+ },
+ "type-is": {
+ "version": "1.6.18",
+ "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz",
+ "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==",
+ "requires": {
+ "media-typer": "0.3.0",
+ "mime-types": "~2.1.24"
+ },
+ "dependencies": {
+ "mime-db": {
+ "version": "1.43.0",
+ "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.43.0.tgz",
+ "integrity": "sha512-+5dsGEEovYbT8UY9yD7eE4XTc4UwJ1jBYlgaQQF38ENsKR3wj/8q8RFZrF9WIZpB2V1ArTVFUva8sAul1NzRzQ=="
+ },
+ "mime-types": {
+ "version": "2.1.26",
+ "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.26.tgz",
+ "integrity": "sha512-01paPWYgLrkqAyrlDorC1uDwl2p3qZT7yl806vW7DvDoxwXi46jsjFbg+WdwotBIk6/MbEhO/dh5aZ5sNj/dWQ==",
+ "requires": {
+ "mime-db": "1.43.0"
+ }
+ }
+ }
+ },
+ "unbox-primitive": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.1.tgz",
+ "integrity": "sha512-tZU/3NqK3dA5gpE1KtyiJUrEB0lxnGkMFHptJ7q6ewdZ8s12QrODwNbhIJStmJkd1QDXa1NRA8aF2A1zk/Ypyw==",
+ "dev": true,
+ "requires": {
+ "function-bind": "^1.1.1",
+ "has-bigints": "^1.0.1",
+ "has-symbols": "^1.0.2",
+ "which-boxed-primitive": "^1.0.2"
+ },
+ "dependencies": {
+ "has-symbols": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz",
+ "integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==",
+ "dev": true
+ }
+ }
+ },
+ "underscore": {
+ "version": "1.13.1",
+ "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.13.1.tgz",
+ "integrity": "sha512-hzSoAVtJF+3ZtiFX0VgfFPHEDRm7Y/QPjGyNo4TVdnDTdft3tr8hEkD25a1jC+TjTuE7tkHGKkhwCgs9dgBB2g=="
+ },
+ "unpipe": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz",
+ "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ=="
+ },
+ "uri-js": {
+ "version": "4.2.2",
+ "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz",
+ "integrity": "sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ==",
+ "requires": {
+ "punycode": "^2.1.0"
+ },
+ "dependencies": {
+ "punycode": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
+ "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A=="
+ }
+ }
+ },
+ "url": {
+ "version": "0.10.3",
+ "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz",
+ "integrity": "sha512-hzSUW2q06EqL1gKM/a+obYHLIO6ct2hwPuviqTTOcfFVc61UbfJ2Q32+uGL/HCPxKqrdGB5QUwIe7UqlDgwsOQ==",
+ "requires": {
+ "punycode": "1.3.2",
+ "querystring": "0.2.0"
+ }
+ },
+ "util-deprecate": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
+ "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="
+ },
+ "utils-merge": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz",
+ "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA=="
+ },
+ "uuid": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.1.0.tgz",
+ "integrity": "sha512-DIWtzUkw04M4k3bf1IcpS2tngXEL26YUD2M0tMDUpnUrz2hgzUBlD55a4FjdLGPvfHxS6uluGWvaVEqgBcVa+g=="
+ },
+ "v8-compile-cache": {
+ "version": "2.3.0",
+ "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz",
+ "integrity": "sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==",
+ "dev": true
+ },
+ "validate-npm-package-license": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz",
+ "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==",
+ "dev": true,
+ "requires": {
+ "spdx-correct": "^3.0.0",
+ "spdx-expression-parse": "^3.0.0"
+ }
+ },
+ "vary": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz",
+ "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg=="
+ },
+ "verror": {
+ "version": "1.10.0",
+ "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz",
+ "integrity": "sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw==",
+ "requires": {
+ "assert-plus": "^1.0.0",
+ "core-util-is": "1.0.2",
+ "extsprintf": "^1.2.0"
+ }
+ },
+ "walkdir": {
+ "version": "0.4.1",
+ "resolved": "https://registry.npmjs.org/walkdir/-/walkdir-0.4.1.tgz",
+ "integrity": "sha512-3eBwRyEln6E1MSzcxcVpQIhRG8Q1jLvEqRmCZqS3dsfXEDR/AhOF4d+jHg1qvDCpYaVRZjENPQyrVxAkQqxPgQ=="
+ },
+ "when": {
+ "version": "3.7.8",
+ "resolved": "https://registry.npmjs.org/when/-/when-3.7.8.tgz",
+ "integrity": "sha512-5cZ7mecD3eYcMiCH4wtRPA5iFJZ50BJYDfckI5RRpQiktMiYTcn0ccLTZOvcbBume+1304fQztxeNzNS9Gvrnw=="
+ },
+ "which": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
+ "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
+ "dev": true,
+ "requires": {
+ "isexe": "^2.0.0"
+ }
+ },
+ "which-boxed-primitive": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz",
+ "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==",
+ "dev": true,
+ "requires": {
+ "is-bigint": "^1.0.1",
+ "is-boolean-object": "^1.1.0",
+ "is-number-object": "^1.0.4",
+ "is-string": "^1.0.5",
+ "is-symbol": "^1.0.3"
+ }
+ },
+ "wide-align": {
+ "version": "1.1.3",
+ "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz",
+ "integrity": "sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==",
+ "requires": {
+ "string-width": "^1.0.2 || 2"
+ },
+ "dependencies": {
+ "ansi-regex": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz",
+ "integrity": "sha512-wFUFA5bg5dviipbQQ32yOQhl6gcJaJXiHE7dvR8VYPG97+J/GNC5FKGepKdEDUFeXRzDxPF1X/Btc8L+v7oqIQ=="
+ },
+ "is-fullwidth-code-point": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz",
+ "integrity": "sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w=="
+ },
+ "string-width": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz",
+ "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==",
+ "requires": {
+ "is-fullwidth-code-point": "^2.0.0",
+ "strip-ansi": "^4.0.0"
+ }
+ },
+ "strip-ansi": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz",
+ "integrity": "sha512-4XaJ2zQdCzROZDivEVIDPkcQn8LMFSa8kj8Gxb/Lnwzv9A8VctNZ+lfivC/sV3ivW8ElJTERXZoPBRrZKkNKow==",
+ "requires": {
+ "ansi-regex": "^3.0.0"
+ }
+ }
+ }
+ },
+ "word-wrap": {
+ "version": "1.2.3",
+ "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz",
+ "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==",
+ "dev": true
+ },
+ "workerpool": {
+ "version": "6.1.0",
+ "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.1.0.tgz",
+ "integrity": "sha512-toV7q9rWNYha963Pl/qyeZ6wG+3nnsyvolaNUS8+R5Wtw6qJPTxIlOP1ZSvcGhEJw+l3HMMmtiNo9Gl61G4GVg==",
+ "dev": true
+ },
+ "wrap-ansi": {
+ "version": "7.0.0",
+ "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
+ "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
+ "dev": true,
+ "requires": {
+ "string-width": "^4.1.0",
+ "strip-ansi": "^6.0.0"
+ }
+ },
+ "wrappy": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
+ "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="
+ },
+ "xml2js": {
+ "version": "0.4.19",
+ "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.19.tgz",
+ "integrity": "sha512-esZnJZJOiJR9wWKMyuvSE1y6Dq5LCuJanqhxslH2bxM6duahNZ+HMpCLhBQGZkbX6xRf8x1Y2eJlgt2q3qo49Q==",
+ "requires": {
+ "sax": ">=0.6.0",
+ "xmlbuilder": "~9.0.1"
+ }
+ },
+ "xmlbuilder": {
+ "version": "9.0.7",
+ "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz",
+ "integrity": "sha512-7YXTQc3P2l9+0rjaUbLwMKRhtmwg1M1eDf6nag7urC7pIPYLD9W/jmzQ4ptRSUbodw5S0jfoGTflLemQibSpeQ=="
+ },
+ "y18n": {
+ "version": "5.0.8",
+ "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",
+ "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==",
+ "dev": true
+ },
+ "yallist": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz",
+ "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g=="
+ },
+ "yargs": {
+ "version": "16.2.0",
+ "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz",
+ "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==",
+ "dev": true,
+ "requires": {
+ "cliui": "^7.0.2",
+ "escalade": "^3.1.1",
+ "get-caller-file": "^2.0.5",
+ "require-directory": "^2.1.1",
+ "string-width": "^4.2.0",
+ "y18n": "^5.0.5",
+ "yargs-parser": "^20.2.2"
+ }
+ },
+ "yargs-parser": {
+ "version": "20.2.4",
+ "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz",
+ "integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==",
+ "dev": true
+ },
+ "yargs-unparser": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz",
+ "integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==",
+ "dev": true,
+ "requires": {
+ "camelcase": "^6.0.0",
+ "decamelize": "^4.0.0",
+ "flat": "^5.0.2",
+ "is-plain-obj": "^2.1.0"
+ }
+ },
+ "yn": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz",
+ "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q=="
+ },
+ "yocto-queue": {
+ "version": "0.1.0",
+ "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
+ "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q=="
+ }
+ }
+}
diff --git a/services/track-changes/package.json b/services/track-changes/package.json
new file mode 100644
index 0000000000..d3a2d990db
--- /dev/null
+++ b/services/track-changes/package.json
@@ -0,0 +1,66 @@
+{
+ "name": "history-sharelatex",
+ "version": "0.1.4",
+ "description": "An API for saving and compressing individual document updates into a browsable history",
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/sharelatex/track-changes-sharelatex.git"
+ },
+ "scripts": {
+ "start": "node $NODE_APP_OPTIONS app.js",
+ "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js",
+ "test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP",
+ "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js",
+ "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP",
+ "nodemon": "nodemon --config nodemon.json",
+ "lint": "eslint --max-warnings 0 --format unix .",
+ "format": "prettier --list-different $PWD/'**/*.js'",
+ "format:fix": "prettier --write $PWD/'**/*.js'",
+ "lint:fix": "eslint --fix ."
+ },
+ "dependencies": {
+ "@overleaf/metrics": "^3.5.1",
+ "@overleaf/o-error": "^3.1.0",
+ "@overleaf/redis-wrapper": "^2.0.0",
+ "@overleaf/settings": "^2.1.1",
+ "JSONStream": "^1.3.5",
+ "async": "^2.6.3",
+ "aws-sdk": "^2.643.0",
+ "body-parser": "^1.19.0",
+ "bson": "^1.1.5",
+ "bunyan": "^1.8.15",
+ "byline": "^5.0.0",
+ "express": "4.17.1",
+ "heap": "^0.2.6",
+ "line-reader": "^0.4.0",
+ "logger-sharelatex": "^2.2.0",
+ "mongo-uri": "^0.1.2",
+ "mongodb": "^3.6.0",
+ "redis": "~0.10.1",
+ "request": "~2.88.2",
+ "requestretry": "^4.1.0",
+ "s3-streams": "^0.4.0",
+ "underscore": "~1.13.1"
+ },
+ "devDependencies": {
+ "chai": "^4.2.0",
+ "chai-as-promised": "^7.1.1",
+ "cli": "^1.0.1",
+ "eslint": "^7.21.0",
+ "eslint-config-prettier": "^8.1.0",
+ "eslint-config-standard": "^16.0.2",
+ "eslint-plugin-chai-expect": "^2.2.0",
+ "eslint-plugin-chai-friendly": "^0.6.0",
+ "eslint-plugin-import": "^2.22.1",
+ "eslint-plugin-mocha": "^8.0.0",
+ "eslint-plugin-node": "^11.1.0",
+ "eslint-plugin-prettier": "^3.1.2",
+ "eslint-plugin-promise": "^4.2.1",
+ "memorystream": "0.3.1",
+ "mocha": "^8.3.2",
+ "prettier": "^2.2.1",
+ "sandboxed-module": "~2.0.3",
+ "sinon": "~9.0.1",
+ "timekeeper": "2.2.0"
+ }
+}
diff --git a/services/track-changes/test/acceptance/deps/Dockerfile.s3mock b/services/track-changes/test/acceptance/deps/Dockerfile.s3mock
new file mode 100644
index 0000000000..15eda4dd4b
--- /dev/null
+++ b/services/track-changes/test/acceptance/deps/Dockerfile.s3mock
@@ -0,0 +1,4 @@
+FROM adobe/s3mock
+RUN apk add --update --no-cache curl
+COPY healthcheck.sh /healthcheck.sh
+HEALTHCHECK --interval=1s --timeout=1s --retries=30 CMD /healthcheck.sh http://localhost:9090
diff --git a/services/track-changes/test/acceptance/deps/healthcheck.sh b/services/track-changes/test/acceptance/deps/healthcheck.sh
new file mode 100644
index 0000000000..cd19cea637
--- /dev/null
+++ b/services/track-changes/test/acceptance/deps/healthcheck.sh
@@ -0,0 +1,9 @@
+#!/bin/sh
+
+# health check to allow 404 status code as valid
+STATUSCODE=$(curl --silent --output /dev/null --write-out "%{http_code}" $1)
+# will be 000 on non-http error (e.g. connection failure)
+if test $STATUSCODE -ge 500 || test $STATUSCODE -lt 200; then
+ exit 1
+fi
+exit 0
diff --git a/services/track-changes/test/acceptance/js/AppendingUpdatesTests.js b/services/track-changes/test/acceptance/js/AppendingUpdatesTests.js
new file mode 100644
index 0000000000..9292005ba7
--- /dev/null
+++ b/services/track-changes/test/acceptance/js/AppendingUpdatesTests.js
@@ -0,0 +1,587 @@
+/* eslint-disable
+ handle-callback-err,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const { expect } = require('chai')
+const { ObjectId } = require('../../../app/js/mongodb')
+const Settings = require('@overleaf/settings')
+const request = require('request')
+const rclient = require('redis').createClient(Settings.redis.history) // Only works locally for now
+
+const TrackChangesApp = require('./helpers/TrackChangesApp')
+const TrackChangesClient = require('./helpers/TrackChangesClient')
+const MockWebApi = require('./helpers/MockWebApi')
+
+describe('Appending doc ops to the history', function () {
+ before(function (done) {
+ return TrackChangesApp.ensureRunning(done)
+ })
+
+ describe('when the history does not exist yet', function () {
+ before(function (done) {
+ this.project_id = ObjectId().toString()
+ this.doc_id = ObjectId().toString()
+ this.user_id = ObjectId().toString()
+ MockWebApi.projects[this.project_id] = { features: { versioning: false } }
+ TrackChangesClient.pushRawUpdates(
+ this.project_id,
+ this.doc_id,
+ [
+ {
+ op: [{ i: 'f', p: 3 }],
+ meta: { ts: Date.now(), user_id: this.user_id },
+ v: 3,
+ },
+ {
+ op: [{ i: 'o', p: 4 }],
+ meta: { ts: Date.now(), user_id: this.user_id },
+ v: 4,
+ },
+ {
+ op: [{ i: 'o', p: 5 }],
+ meta: { ts: Date.now(), user_id: this.user_id },
+ v: 5,
+ },
+ ],
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return TrackChangesClient.flushAndGetCompressedUpdates(
+ this.project_id,
+ this.doc_id,
+ (error, updates) => {
+ this.updates = updates
+ if (error != null) {
+ throw error
+ }
+ return done()
+ }
+ )
+ }
+ )
+ return null
+ })
+
+ it('should insert the compressed op into mongo', function () {
+ return expect(this.updates[0].pack[0].op).to.deep.equal([
+ {
+ p: 3,
+ i: 'foo',
+ },
+ ])
+ })
+
+ it('should insert the correct version number into mongo', function () {
+ return expect(this.updates[0].v).to.equal(5)
+ })
+
+ it('should store the doc id', function () {
+ return expect(this.updates[0].doc_id.toString()).to.equal(this.doc_id)
+ })
+
+ it('should store the project id', function () {
+ return expect(this.updates[0].project_id.toString()).to.equal(
+ this.project_id
+ )
+ })
+
+ return it('should clear the doc from the DocsWithHistoryOps set', function (done) {
+ rclient.sismember(
+ `DocsWithHistoryOps:${this.project_id}`,
+ this.doc_id,
+ (error, member) => {
+ member.should.equal(0)
+ return done()
+ }
+ )
+ return null
+ })
+ })
+
+ describe('when the history has already been started', function () {
+ beforeEach(function (done) {
+ this.project_id = ObjectId().toString()
+ this.doc_id = ObjectId().toString()
+ this.user_id = ObjectId().toString()
+ MockWebApi.projects[this.project_id] = { features: { versioning: false } }
+ TrackChangesClient.pushRawUpdates(
+ this.project_id,
+ this.doc_id,
+ [
+ {
+ op: [{ i: 'f', p: 3 }],
+ meta: { ts: Date.now(), user_id: this.user_id },
+ v: 3,
+ },
+ {
+ op: [{ i: 'o', p: 4 }],
+ meta: { ts: Date.now(), user_id: this.user_id },
+ v: 4,
+ },
+ {
+ op: [{ i: 'o', p: 5 }],
+ meta: { ts: Date.now(), user_id: this.user_id },
+ v: 5,
+ },
+ ],
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return TrackChangesClient.flushAndGetCompressedUpdates(
+ this.project_id,
+ this.doc_id,
+ (error, updates) => {
+ if (error != null) {
+ throw error
+ }
+ return done()
+ }
+ )
+ }
+ )
+ return null
+ })
+
+ describe('when the updates are recent and from the same user', function () {
+ beforeEach(function (done) {
+ TrackChangesClient.pushRawUpdates(
+ this.project_id,
+ this.doc_id,
+ [
+ {
+ op: [{ i: 'b', p: 6 }],
+ meta: { ts: Date.now(), user_id: this.user_id },
+ v: 6,
+ },
+ {
+ op: [{ i: 'a', p: 7 }],
+ meta: { ts: Date.now(), user_id: this.user_id },
+ v: 7,
+ },
+ {
+ op: [{ i: 'r', p: 8 }],
+ meta: { ts: Date.now(), user_id: this.user_id },
+ v: 8,
+ },
+ ],
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return TrackChangesClient.flushAndGetCompressedUpdates(
+ this.project_id,
+ this.doc_id,
+ (error, updates) => {
+ this.updates = updates
+ if (error != null) {
+ throw error
+ }
+ return done()
+ }
+ )
+ }
+ )
+ return null
+ })
+
+ it('should combine all the updates into one pack', function () {
+ return expect(this.updates[0].pack[1].op).to.deep.equal([
+ {
+ p: 6,
+ i: 'bar',
+ },
+ ])
+ })
+
+ return it('should insert the correct version number into mongo', function () {
+ return expect(this.updates[0].v_end).to.equal(8)
+ })
+ })
+
+ return describe('when the updates are far apart', function () {
+ beforeEach(function (done) {
+ const oneDay = 24 * 60 * 60 * 1000
+ TrackChangesClient.pushRawUpdates(
+ this.project_id,
+ this.doc_id,
+ [
+ {
+ op: [{ i: 'b', p: 6 }],
+ meta: { ts: Date.now() + oneDay, user_id: this.user_id },
+ v: 6,
+ },
+ {
+ op: [{ i: 'a', p: 7 }],
+ meta: { ts: Date.now() + oneDay, user_id: this.user_id },
+ v: 7,
+ },
+ {
+ op: [{ i: 'r', p: 8 }],
+ meta: { ts: Date.now() + oneDay, user_id: this.user_id },
+ v: 8,
+ },
+ ],
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return TrackChangesClient.flushAndGetCompressedUpdates(
+ this.project_id,
+ this.doc_id,
+ (error, updates) => {
+ this.updates = updates
+ if (error != null) {
+ throw error
+ }
+ return done()
+ }
+ )
+ }
+ )
+ return null
+ })
+
+ return it('should combine the updates into one pack', function () {
+ expect(this.updates[0].pack[0].op).to.deep.equal([
+ {
+ p: 3,
+ i: 'foo',
+ },
+ ])
+ return expect(this.updates[0].pack[1].op).to.deep.equal([
+ {
+ p: 6,
+ i: 'bar',
+ },
+ ])
+ })
+ })
+ })
+
+ describe('when the updates need processing in batches', function () {
+ before(function (done) {
+ this.project_id = ObjectId().toString()
+ this.doc_id = ObjectId().toString()
+ this.user_id = ObjectId().toString()
+ MockWebApi.projects[this.project_id] = { features: { versioning: false } }
+ const updates = []
+ this.expectedOp = [{ p: 0, i: '' }]
+ for (let i = 0; i <= 250; i++) {
+ updates.push({
+ op: [{ i: 'a', p: 0 }],
+ meta: { ts: Date.now(), user_id: this.user_id },
+ v: i,
+ })
+ this.expectedOp[0].i = `a${this.expectedOp[0].i}`
+ }
+
+ TrackChangesClient.pushRawUpdates(
+ this.project_id,
+ this.doc_id,
+ updates,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return TrackChangesClient.flushAndGetCompressedUpdates(
+ this.project_id,
+ this.doc_id,
+ (error, updates1) => {
+ this.updates = updates1
+ if (error != null) {
+ throw error
+ }
+ return done()
+ }
+ )
+ }
+ )
+ return null
+ })
+
+ it('should concat the compressed op into mongo', function () {
+ return expect(this.updates[0].pack.length).to.deep.equal(3)
+ }) // batch size is 100
+
+ return it('should insert the correct version number into mongo', function () {
+ return expect(this.updates[0].v_end).to.equal(250)
+ })
+ })
+
+ describe('when there are multiple ops in each update', function () {
+ before(function (done) {
+ this.project_id = ObjectId().toString()
+ this.doc_id = ObjectId().toString()
+ this.user_id = ObjectId().toString()
+ MockWebApi.projects[this.project_id] = { features: { versioning: false } }
+ const oneDay = 24 * 60 * 60 * 1000
+ TrackChangesClient.pushRawUpdates(
+ this.project_id,
+ this.doc_id,
+ [
+ {
+ op: [
+ { i: 'f', p: 3 },
+ { i: 'o', p: 4 },
+ { i: 'o', p: 5 },
+ ],
+ meta: { ts: Date.now(), user_id: this.user_id },
+ v: 3,
+ },
+ {
+ op: [
+ { i: 'b', p: 6 },
+ { i: 'a', p: 7 },
+ { i: 'r', p: 8 },
+ ],
+ meta: { ts: Date.now() + oneDay, user_id: this.user_id },
+ v: 4,
+ },
+ ],
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return TrackChangesClient.flushAndGetCompressedUpdates(
+ this.project_id,
+ this.doc_id,
+ (error, updates) => {
+ this.updates = updates
+ if (error != null) {
+ throw error
+ }
+ return done()
+ }
+ )
+ }
+ )
+ return null
+ })
+
+ it('should insert the compressed ops into mongo', function () {
+ expect(this.updates[0].pack[0].op).to.deep.equal([
+ {
+ p: 3,
+ i: 'foo',
+ },
+ ])
+ return expect(this.updates[0].pack[1].op).to.deep.equal([
+ {
+ p: 6,
+ i: 'bar',
+ },
+ ])
+ })
+
+ return it('should insert the correct version numbers into mongo', function () {
+ expect(this.updates[0].pack[0].v).to.equal(3)
+ return expect(this.updates[0].pack[1].v).to.equal(4)
+ })
+ })
+
+ describe('when there is a no-op update', function () {
+ before(function (done) {
+ this.project_id = ObjectId().toString()
+ this.doc_id = ObjectId().toString()
+ this.user_id = ObjectId().toString()
+ MockWebApi.projects[this.project_id] = { features: { versioning: false } }
+ const oneDay = 24 * 60 * 60 * 1000
+ TrackChangesClient.pushRawUpdates(
+ this.project_id,
+ this.doc_id,
+ [
+ {
+ op: [],
+ meta: { ts: Date.now(), user_id: this.user_id },
+ v: 3,
+ },
+ {
+ op: [{ i: 'foo', p: 3 }],
+ meta: { ts: Date.now() + oneDay, user_id: this.user_id },
+ v: 4,
+ },
+ ],
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return TrackChangesClient.flushAndGetCompressedUpdates(
+ this.project_id,
+ this.doc_id,
+ (error, updates) => {
+ this.updates = updates
+ if (error != null) {
+ throw error
+ }
+ return done()
+ }
+ )
+ }
+ )
+ return null
+ })
+
+ it('should insert the compressed no-op into mongo', function () {
+ return expect(this.updates[0].pack[0].op).to.deep.equal([])
+ })
+
+ it('should insert the compressed next update into mongo', function () {
+ return expect(this.updates[0].pack[1].op).to.deep.equal([
+ {
+ p: 3,
+ i: 'foo',
+ },
+ ])
+ })
+
+ return it('should insert the correct version numbers into mongo', function () {
+ expect(this.updates[0].pack[0].v).to.equal(3)
+ return expect(this.updates[0].pack[1].v).to.equal(4)
+ })
+ })
+
+ describe('when there is a comment update', function () {
+ before(function (done) {
+ this.project_id = ObjectId().toString()
+ this.doc_id = ObjectId().toString()
+ this.user_id = ObjectId().toString()
+ MockWebApi.projects[this.project_id] = { features: { versioning: false } }
+ TrackChangesClient.pushRawUpdates(
+ this.project_id,
+ this.doc_id,
+ [
+ {
+ op: [
+ { c: 'foo', p: 3 },
+ { d: 'bar', p: 6 },
+ ],
+ meta: { ts: Date.now(), user_id: this.user_id },
+ v: 3,
+ },
+ ],
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return TrackChangesClient.flushAndGetCompressedUpdates(
+ this.project_id,
+ this.doc_id,
+ (error, updates) => {
+ this.updates = updates
+ if (error != null) {
+ throw error
+ }
+ return done()
+ }
+ )
+ }
+ )
+ return null
+ })
+
+ it('should ignore the comment op', function () {
+ return expect(this.updates[0].pack[0].op).to.deep.equal([
+ { d: 'bar', p: 6 },
+ ])
+ })
+
+ return it('should insert the correct version numbers into mongo', function () {
+ return expect(this.updates[0].pack[0].v).to.equal(3)
+ })
+ })
+
+ describe('when the project has versioning enabled', function () {
+ before(function (done) {
+ this.project_id = ObjectId().toString()
+ this.doc_id = ObjectId().toString()
+ this.user_id = ObjectId().toString()
+ MockWebApi.projects[this.project_id] = { features: { versioning: true } }
+
+ TrackChangesClient.pushRawUpdates(
+ this.project_id,
+ this.doc_id,
+ [
+ {
+ op: [{ i: 'f', p: 3 }],
+ meta: { ts: Date.now(), user_id: this.user_id },
+ v: 3,
+ },
+ ],
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return TrackChangesClient.flushAndGetCompressedUpdates(
+ this.project_id,
+ this.doc_id,
+ (error, updates) => {
+ this.updates = updates
+ if (error != null) {
+ throw error
+ }
+ return done()
+ }
+ )
+ }
+ )
+ return null
+ })
+
+ return it('should not add a expiresAt entry in the update in mongo', function () {
+ return expect(this.updates[0].expiresAt).to.be.undefined
+ })
+ })
+
+ return describe('when the project does not have versioning enabled', function () {
+ before(function (done) {
+ this.project_id = ObjectId().toString()
+ this.doc_id = ObjectId().toString()
+ this.user_id = ObjectId().toString()
+ MockWebApi.projects[this.project_id] = { features: { versioning: false } }
+
+ TrackChangesClient.pushRawUpdates(
+ this.project_id,
+ this.doc_id,
+ [
+ {
+ op: [{ i: 'f', p: 3 }],
+ meta: { ts: Date.now(), user_id: this.user_id },
+ v: 3,
+ },
+ ],
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return TrackChangesClient.flushAndGetCompressedUpdates(
+ this.project_id,
+ this.doc_id,
+ (error, updates) => {
+ this.updates = updates
+ if (error != null) {
+ throw error
+ }
+ return done()
+ }
+ )
+ }
+ )
+ return null
+ })
+
+ return it('should add a expiresAt entry in the update in mongo', function () {
+ return expect(this.updates[0].expiresAt).to.exist
+ })
+ })
+})
diff --git a/services/track-changes/test/acceptance/js/ArchivingUpdatesTests.js b/services/track-changes/test/acceptance/js/ArchivingUpdatesTests.js
new file mode 100644
index 0000000000..09435bc80c
--- /dev/null
+++ b/services/track-changes/test/acceptance/js/ArchivingUpdatesTests.js
@@ -0,0 +1,322 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+ no-undef,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS103: Rewrite code to no longer use __guard__
+ * DS202: Simplify dynamic range loops
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const { expect } = require('chai')
+const { db, ObjectId } = require('../../../app/js/mongodb')
+const Settings = require('@overleaf/settings')
+const request = require('request')
+const rclient = require('redis').createClient(Settings.redis.history) // Only works locally for now
+
+const TrackChangesApp = require('./helpers/TrackChangesApp')
+const TrackChangesClient = require('./helpers/TrackChangesClient')
+const MockDocStoreApi = require('./helpers/MockDocStoreApi')
+const MockWebApi = require('./helpers/MockWebApi')
+
+describe('Archiving updates', function () {
+ before(function (done) {
+ if (
+ __guard__(
+ __guard__(
+ Settings != null ? Settings.trackchanges : undefined,
+ x1 => x1.s3
+ ),
+ x => x.key.length
+ ) < 1
+ ) {
+ const message = new Error('s3 keys not setup, this test setup will fail')
+ return done(message)
+ }
+
+ return TrackChangesClient.waitForS3(done)
+ })
+
+ before(function (done) {
+ this.now = Date.now()
+ this.to = this.now
+ this.user_id = ObjectId().toString()
+ this.user_id_2 = ObjectId().toString()
+ this.doc_id = ObjectId().toString()
+ this.project_id = ObjectId().toString()
+
+ this.minutes = 60 * 1000
+ this.hours = 60 * this.minutes
+
+ MockWebApi.projects[this.project_id] = {
+ features: {
+ versioning: true,
+ },
+ }
+ sinon.spy(MockWebApi, 'getProjectDetails')
+
+ MockWebApi.users[this.user_id] = this.user = {
+ email: 'user@sharelatex.com',
+ first_name: 'Leo',
+ last_name: 'Lion',
+ id: this.user_id,
+ }
+ sinon.spy(MockWebApi, 'getUserInfo')
+
+ MockDocStoreApi.docs[this.doc_id] = this.doc = {
+ _id: this.doc_id,
+ project_id: this.project_id,
+ }
+ sinon.spy(MockDocStoreApi, 'getAllDoc')
+
+ this.updates = []
+ for (
+ let i = 0, end = 512 + 10, asc = end >= 0;
+ asc ? i <= end : i >= end;
+ asc ? i++ : i--
+ ) {
+ this.updates.push({
+ op: [{ i: 'a', p: 0 }],
+ meta: { ts: this.now + (i - 2048) * this.hours, user_id: this.user_id },
+ v: 2 * i + 1,
+ })
+ this.updates.push({
+ op: [{ i: 'b', p: 0 }],
+ meta: {
+ ts: this.now + (i - 2048) * this.hours + 10 * this.minutes,
+ user_id: this.user_id_2,
+ },
+ v: 2 * i + 2,
+ })
+ }
+ TrackChangesApp.ensureRunning(() => {
+ return TrackChangesClient.pushRawUpdates(
+ this.project_id,
+ this.doc_id,
+ this.updates,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return TrackChangesClient.flushDoc(
+ this.project_id,
+ this.doc_id,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return done()
+ }
+ )
+ }
+ )
+ })
+ return null
+ })
+
+ after(function (done) {
+ MockWebApi.getUserInfo.restore()
+ return db.docHistory.deleteMany(
+ { project_id: ObjectId(this.project_id) },
+ () => {
+ return db.docHistoryIndex.remove(
+ { project_id: ObjectId(this.project_id) },
+ () => {
+ return TrackChangesClient.removeS3Doc(
+ this.project_id,
+ this.doc_id,
+ done
+ )
+ }
+ )
+ }
+ )
+ })
+
+ function testExportFeature() {
+ describe('exporting the project', function () {
+ before('fetch export', function (done) {
+ TrackChangesClient.exportProject(
+ this.project_id,
+ (error, updates, userIds) => {
+ if (error) {
+ return done(error)
+ }
+ this.exportedUpdates = updates
+ this.exportedUserIds = userIds
+ done()
+ }
+ )
+ })
+
+ it('should include all the imported updates, with ids, sorted by timestamp', function () {
+ // Add a safe guard for an empty array matching an empty export.
+ expect(this.updates).to.have.length(1024 + 22)
+
+ const expectedExportedUpdates = this.updates
+ .slice()
+ .reverse()
+ .map(update => {
+ // clone object, updates are created once in before handler
+ const exportedUpdate = Object.assign({}, update)
+ exportedUpdate.meta = Object.assign({}, update.meta)
+
+ exportedUpdate.doc_id = this.doc_id
+ exportedUpdate.project_id = this.project_id
+
+ // This is for merged updates, which does not apply here.
+ exportedUpdate.meta.start_ts = exportedUpdate.meta.end_ts =
+ exportedUpdate.meta.ts
+ delete exportedUpdate.meta.ts
+ return exportedUpdate
+ })
+ expect(this.exportedUpdates).to.deep.equal(expectedExportedUpdates)
+ expect(this.exportedUserIds).to.deep.equal([
+ this.user_id,
+ this.user_id_2,
+ ])
+ })
+ })
+ }
+
+ describe("before archiving a doc's updates", function () {
+ testExportFeature()
+ })
+
+ describe("archiving a doc's updates", function () {
+ before(function (done) {
+ TrackChangesClient.pushDocHistory(this.project_id, this.doc_id, error => {
+ if (error != null) {
+ throw error
+ }
+ return done()
+ })
+ return null
+ })
+
+ it('should have one cached pack', function (done) {
+ return db.docHistory.count(
+ { doc_id: ObjectId(this.doc_id), expiresAt: { $exists: true } },
+ (error, count) => {
+ if (error != null) {
+ throw error
+ }
+ count.should.equal(1)
+ return done()
+ }
+ )
+ })
+
+ it('should have one remaining pack after cache is expired', function (done) {
+ return db.docHistory.deleteMany(
+ {
+ doc_id: ObjectId(this.doc_id),
+ expiresAt: { $exists: true },
+ },
+ (err, result) => {
+ if (typeof error !== 'undefined' && error !== null) {
+ throw error
+ }
+ return db.docHistory.count(
+ { doc_id: ObjectId(this.doc_id) },
+ (error, count) => {
+ if (error != null) {
+ throw error
+ }
+ count.should.equal(1)
+ return done()
+ }
+ )
+ }
+ )
+ })
+
+ it('should have a docHistoryIndex entry marked as inS3', function (done) {
+ return db.docHistoryIndex.findOne(
+ { _id: ObjectId(this.doc_id) },
+ (error, index) => {
+ if (error != null) {
+ throw error
+ }
+ index.packs[0].inS3.should.equal(true)
+ return done()
+ }
+ )
+ })
+
+ it('should have a docHistoryIndex entry with the last version', function (done) {
+ return db.docHistoryIndex.findOne(
+ { _id: ObjectId(this.doc_id) },
+ (error, index) => {
+ if (error != null) {
+ throw error
+ }
+ index.packs[0].v_end.should.equal(1024)
+ return done()
+ }
+ )
+ })
+
+ it('should store 1024 doc changes in S3 in one pack', function (done) {
+ return db.docHistoryIndex.findOne(
+ { _id: ObjectId(this.doc_id) },
+ (error, index) => {
+ if (error != null) {
+ throw error
+ }
+ const pack_id = index.packs[0]._id
+ return TrackChangesClient.getS3Doc(
+ this.project_id,
+ this.doc_id,
+ pack_id,
+ (error, doc) => {
+ doc.n.should.equal(1024)
+ doc.pack.length.should.equal(1024)
+ return done()
+ }
+ )
+ }
+ )
+ })
+
+ testExportFeature()
+ })
+
+ return describe("unarchiving a doc's updates", function () {
+ before(function (done) {
+ TrackChangesClient.pullDocHistory(this.project_id, this.doc_id, error => {
+ if (error != null) {
+ throw error
+ }
+ return done()
+ })
+ return null
+ })
+
+ return it('should restore both packs', function (done) {
+ return db.docHistory.count(
+ { doc_id: ObjectId(this.doc_id) },
+ (error, count) => {
+ if (error != null) {
+ throw error
+ }
+ count.should.equal(2)
+ return done()
+ }
+ )
+ })
+ })
+})
+
+function __guard__(value, transform) {
+ return typeof value !== 'undefined' && value !== null
+ ? transform(value)
+ : undefined
+}
diff --git a/services/track-changes/test/acceptance/js/ExportProjectTests.js b/services/track-changes/test/acceptance/js/ExportProjectTests.js
new file mode 100644
index 0000000000..b6ca106a60
--- /dev/null
+++ b/services/track-changes/test/acceptance/js/ExportProjectTests.js
@@ -0,0 +1,34 @@
+const { expect } = require('chai')
+const { ObjectId } = require('../../../app/js/mongodb')
+
+const TrackChangesApp = require('./helpers/TrackChangesApp')
+const TrackChangesClient = require('./helpers/TrackChangesClient')
+
+describe('ExportProject', function () {
+ before('start app', function (done) {
+ TrackChangesApp.ensureRunning(done)
+ })
+
+ describe('when there are no updates', function () {
+ before('fetch export', function (done) {
+ TrackChangesClient.exportProject(
+ ObjectId(),
+ (error, updates, userIds) => {
+ if (error) {
+ return done(error)
+ }
+ this.exportedUpdates = updates
+ this.exportedUserIds = userIds
+ done()
+ }
+ )
+ })
+
+ it('should export an empty array', function () {
+ expect(this.exportedUpdates).to.deep.equal([])
+ expect(this.exportedUserIds).to.deep.equal([])
+ })
+ })
+
+ // see ArchivingUpdatesTests for tests with data in mongo/s3
+})
diff --git a/services/track-changes/test/acceptance/js/FlushingUpdatesTests.js b/services/track-changes/test/acceptance/js/FlushingUpdatesTests.js
new file mode 100644
index 0000000000..65e5ecc468
--- /dev/null
+++ b/services/track-changes/test/acceptance/js/FlushingUpdatesTests.js
@@ -0,0 +1,273 @@
+/* eslint-disable
+ handle-callback-err,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const { expect } = require('chai')
+const { ObjectId } = require('../../../app/js/mongodb')
+const Settings = require('@overleaf/settings')
+const request = require('request')
+const rclient = require('redis').createClient(Settings.redis.history) // Only works locally for now
+
+const TrackChangesApp = require('./helpers/TrackChangesApp')
+const TrackChangesClient = require('./helpers/TrackChangesClient')
+const MockWebApi = require('./helpers/MockWebApi')
+
+describe('Flushing updates', function () {
+ before(function (done) {
+ return TrackChangesApp.ensureRunning(done)
+ })
+
+ describe("flushing a doc's updates", function () {
+ before(function (done) {
+ this.project_id = ObjectId().toString()
+ this.doc_id = ObjectId().toString()
+ this.user_id = ObjectId().toString()
+ MockWebApi.projects[this.project_id] = { features: { versioning: true } }
+
+ TrackChangesClient.pushRawUpdates(
+ this.project_id,
+ this.doc_id,
+ [
+ {
+ op: [{ i: 'f', p: 3 }],
+ meta: { ts: Date.now(), user_id: this.user_id },
+ v: 3,
+ },
+ ],
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return TrackChangesClient.flushDoc(
+ this.project_id,
+ this.doc_id,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return done()
+ }
+ )
+ }
+ )
+ return null
+ })
+
+ return it('should flush the op into mongo', function (done) {
+ TrackChangesClient.getCompressedUpdates(this.doc_id, (error, updates) => {
+ expect(updates[0].pack[0].op).to.deep.equal([
+ {
+ p: 3,
+ i: 'f',
+ },
+ ])
+ return done()
+ })
+ return null
+ })
+ })
+
+ return describe("flushing a project's updates", function () {
+ describe('with versioning enabled', function () {
+ before(function (done) {
+ this.project_id = ObjectId().toString()
+ this.doc_id = ObjectId().toString()
+ this.user_id = ObjectId().toString()
+
+ this.weeks = 7 * 24 * 60 * 60 * 1000
+
+ MockWebApi.projects[this.project_id] = {
+ features: {
+ versioning: true,
+ },
+ }
+
+ TrackChangesClient.pushRawUpdates(
+ this.project_id,
+ this.doc_id,
+ [
+ {
+ op: [{ i: 'g', p: 2 }],
+ meta: { ts: Date.now() - 2 * this.weeks, user_id: this.user_id },
+ v: 2,
+ },
+ {
+ op: [{ i: 'f', p: 3 }],
+ meta: { ts: Date.now(), user_id: this.user_id },
+ v: 3,
+ },
+ ],
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return TrackChangesClient.flushProject(this.project_id, error => {
+ if (error != null) {
+ throw error
+ }
+ return done()
+ })
+ }
+ )
+ return null
+ })
+
+ it('should not mark the updates for deletion', function (done) {
+ TrackChangesClient.getCompressedUpdates(
+ this.doc_id,
+ (error, updates) => {
+ expect(updates[0].expiresAt).to.not.exist
+ return done()
+ }
+ )
+ return null
+ })
+
+ return it('should preserve history forever', function (done) {
+ TrackChangesClient.getProjectMetaData(
+ this.project_id,
+ (error, project) => {
+ expect(project.preserveHistory).to.equal(true)
+ return done()
+ }
+ )
+ return null
+ })
+ })
+
+ describe('without versioning enabled', function () {
+ before(function (done) {
+ this.project_id = ObjectId().toString()
+ this.doc_id = ObjectId().toString()
+ this.user_id = ObjectId().toString()
+
+ this.weeks = 7 * 24 * 60 * 60 * 1000
+
+ MockWebApi.projects[this.project_id] = {
+ features: {
+ versioning: false,
+ },
+ }
+
+ TrackChangesClient.pushRawUpdates(
+ this.project_id,
+ this.doc_id,
+ [
+ {
+ op: [{ i: 'g', p: 2 }],
+ meta: { ts: Date.now() - 2 * this.weeks, user_id: this.user_id },
+ v: 2,
+ },
+ {
+ op: [{ i: 'f', p: 3 }],
+ meta: { ts: Date.now(), user_id: this.user_id },
+ v: 3,
+ },
+ ],
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return TrackChangesClient.flushProject(this.project_id, error => {
+ if (error != null) {
+ throw error
+ }
+ return done()
+ })
+ }
+ )
+ return null
+ })
+
+ return it('should mark the updates for deletion', function (done) {
+ TrackChangesClient.getCompressedUpdates(
+ this.doc_id,
+ (error, updates) => {
+ expect(updates[0].expiresAt).to.exist
+ return done()
+ }
+ )
+ return null
+ })
+ })
+
+ return describe('without versioning enabled but with preserveHistory set to true', function () {
+ before(function (done) {
+ this.project_id = ObjectId().toString()
+ this.doc_id = ObjectId().toString()
+ this.user_id = ObjectId().toString()
+
+ this.weeks = 7 * 24 * 60 * 60 * 1000
+
+ MockWebApi.projects[this.project_id] = {
+ features: {
+ versioning: false,
+ },
+ }
+
+ TrackChangesClient.setPreserveHistoryForProject(
+ this.project_id,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return TrackChangesClient.pushRawUpdates(
+ this.project_id,
+ this.doc_id,
+ [
+ {
+ op: [{ i: 'g', p: 2 }],
+ meta: {
+ ts: Date.now() - 2 * this.weeks,
+ user_id: this.user_id,
+ },
+ v: 2,
+ },
+ {
+ op: [{ i: 'f', p: 3 }],
+ meta: { ts: Date.now(), user_id: this.user_id },
+ v: 3,
+ },
+ ],
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return TrackChangesClient.flushProject(
+ this.project_id,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return done()
+ }
+ )
+ }
+ )
+ }
+ )
+ return null
+ })
+
+ return it('should not mark the updates for deletion', function (done) {
+ TrackChangesClient.getCompressedUpdates(
+ this.doc_id,
+ (error, updates) => {
+ expect(updates[0].expiresAt).to.not.exist
+ return done()
+ }
+ )
+ return null
+ })
+ })
+ })
+})
diff --git a/services/track-changes/test/acceptance/js/GettingADiffTests.js b/services/track-changes/test/acceptance/js/GettingADiffTests.js
new file mode 100644
index 0000000000..f2de3d7d7f
--- /dev/null
+++ b/services/track-changes/test/acceptance/js/GettingADiffTests.js
@@ -0,0 +1,127 @@
+/* eslint-disable
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const { expect } = require('chai')
+const { ObjectId } = require('../../../app/js/mongodb')
+const Settings = require('@overleaf/settings')
+
+const TrackChangesApp = require('./helpers/TrackChangesApp')
+const TrackChangesClient = require('./helpers/TrackChangesClient')
+const MockDocUpdaterApi = require('./helpers/MockDocUpdaterApi')
+const MockWebApi = require('./helpers/MockWebApi')
+
+describe('Getting a diff', function () {
+ beforeEach(function (done) {
+ sinon.spy(MockDocUpdaterApi, 'getDoc')
+
+ this.now = Date.now()
+ this.from = this.now - 100000000
+ this.to = this.now
+ this.user_id = ObjectId().toString()
+ this.doc_id = ObjectId().toString()
+ this.project_id = ObjectId().toString()
+ MockWebApi.projects[this.project_id] = { features: { versioning: true } }
+
+ MockWebApi.users[this.user_id] = this.user = {
+ email: 'user@sharelatex.com',
+ first_name: 'Leo',
+ last_name: 'Lion',
+ id: this.user_id,
+ }
+ sinon.spy(MockWebApi, 'getUserInfo')
+
+ const twoMinutes = 2 * 60 * 1000
+
+ this.updates = [
+ {
+ op: [{ i: 'one ', p: 0 }],
+ meta: { ts: this.from - twoMinutes, user_id: this.user_id },
+ v: 3,
+ },
+ {
+ op: [{ i: 'two ', p: 4 }],
+ meta: { ts: this.from + twoMinutes, user_id: this.user_id },
+ v: (this.fromVersion = 4),
+ },
+ {
+ op: [{ i: 'three ', p: 8 }],
+ meta: { ts: this.to - twoMinutes, user_id: this.user_id },
+ v: (this.toVersion = 5),
+ },
+ {
+ op: [{ i: 'four', p: 14 }],
+ meta: { ts: this.to + twoMinutes, user_id: this.user_id },
+ v: 6,
+ },
+ ]
+ this.lines = ['one two three four']
+ this.expected_diff = [
+ { u: 'one ' },
+ {
+ i: 'two three ',
+ meta: {
+ start_ts: this.from + twoMinutes,
+ end_ts: this.to - twoMinutes,
+ user: this.user,
+ },
+ },
+ ]
+
+ MockDocUpdaterApi.docs[this.doc_id] = {
+ lines: this.lines,
+ version: 7,
+ }
+ TrackChangesApp.ensureRunning(() => {
+ return TrackChangesClient.pushRawUpdates(
+ this.project_id,
+ this.doc_id,
+ this.updates,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return TrackChangesClient.getDiff(
+ this.project_id,
+ this.doc_id,
+ this.fromVersion,
+ this.toVersion,
+ (error, diff) => {
+ if (error != null) {
+ throw error
+ }
+ this.diff = diff.diff
+ return done()
+ }
+ )
+ }
+ )
+ })
+ return null
+ })
+
+ afterEach(function () {
+ MockDocUpdaterApi.getDoc.restore()
+ MockWebApi.getUserInfo.restore()
+ return null
+ })
+
+ it('should return the diff', function () {
+ return expect(this.diff).to.deep.equal(this.expected_diff)
+ })
+
+ return it('should get the doc from the doc updater', function () {
+ MockDocUpdaterApi.getDoc
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ return null
+ })
+})
diff --git a/services/track-changes/test/acceptance/js/GettingUpdatesTests.js b/services/track-changes/test/acceptance/js/GettingUpdatesTests.js
new file mode 100644
index 0000000000..d3fce21171
--- /dev/null
+++ b/services/track-changes/test/acceptance/js/GettingUpdatesTests.js
@@ -0,0 +1,185 @@
+/* eslint-disable
+ chai-friendly/no-unused-expressions,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const { expect } = require('chai')
+const { ObjectId } = require('../../../app/js/mongodb')
+const Settings = require('@overleaf/settings')
+
+const TrackChangesApp = require('./helpers/TrackChangesApp')
+const TrackChangesClient = require('./helpers/TrackChangesClient')
+const MockWebApi = require('./helpers/MockWebApi')
+
+describe('Getting updates', function () {
+ before(function (done) {
+ this.now = Date.now()
+ this.to = this.now
+ this.user_id = ObjectId().toString()
+ this.deleted_user_id = 'deleted_user'
+ this.doc_id = ObjectId().toString()
+ this.project_id = ObjectId().toString()
+
+ this.minutes = 60 * 1000
+ this.hours = 60 * this.minutes
+
+ MockWebApi.projects[this.project_id] = {
+ features: {
+ versioning: true,
+ },
+ }
+
+ MockWebApi.users[this.user_id] = this.user = {
+ email: 'user@sharelatex.com',
+ first_name: 'Leo',
+ last_name: 'Lion',
+ id: this.user_id,
+ }
+ sinon.spy(MockWebApi, 'getUserInfo')
+
+ this.updates = []
+ for (let i = 0; i <= 9; i++) {
+ this.updates.push({
+ op: [{ i: 'a', p: 0 }],
+ meta: {
+ ts: this.now - (9 - i) * this.hours - 2 * this.minutes,
+ user_id: this.user_id,
+ },
+ v: 2 * i + 1,
+ })
+ this.updates.push({
+ op: [{ i: 'b', p: 0 }],
+ meta: { ts: this.now - (9 - i) * this.hours, user_id: this.user_id },
+ v: 2 * i + 2,
+ })
+ }
+ this.updates[0].meta.user_id = this.deleted_user_id
+
+ TrackChangesApp.ensureRunning(() => {
+ return TrackChangesClient.pushRawUpdates(
+ this.project_id,
+ this.doc_id,
+ this.updates,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return done()
+ }
+ )
+ })
+ return null
+ })
+ ;({
+ after() {
+ MockWebApi.getUserInfo.restore()
+ return null
+ },
+ })
+
+ describe('getting updates up to the limit', function () {
+ before(function (done) {
+ TrackChangesClient.getUpdates(
+ this.project_id,
+ { before: this.to + 1, min_count: 3 },
+ (error, body) => {
+ if (error != null) {
+ throw error
+ }
+ this.updates = body.updates
+ return done()
+ }
+ )
+ return null
+ })
+
+ it('should fetch the user details from the web api', function () {
+ return MockWebApi.getUserInfo.calledWith(this.user_id).should.equal(true)
+ })
+
+ return it('should return at least the min_count number of summarized updates', function () {
+ const docs1 = {}
+ docs1[this.doc_id] = { toV: 20, fromV: 19 }
+ const docs2 = {}
+ docs2[this.doc_id] = { toV: 18, fromV: 17 }
+ const docs3 = {}
+ docs3[this.doc_id] = { toV: 16, fromV: 15 }
+ return expect(this.updates.slice(0, 3)).to.deep.equal([
+ {
+ docs: docs1,
+ meta: {
+ start_ts: this.to - 2 * this.minutes,
+ end_ts: this.to,
+ users: [this.user],
+ },
+ },
+ {
+ docs: docs2,
+ meta: {
+ start_ts: this.to - 1 * this.hours - 2 * this.minutes,
+ end_ts: this.to - 1 * this.hours,
+ users: [this.user],
+ },
+ },
+ {
+ docs: docs3,
+ meta: {
+ start_ts: this.to - 2 * this.hours - 2 * this.minutes,
+ end_ts: this.to - 2 * this.hours,
+ users: [this.user],
+ },
+ },
+ ])
+ })
+ })
+
+ return describe('getting updates beyond the end of the database', function () {
+ before(function (done) {
+ TrackChangesClient.getUpdates(
+ this.project_id,
+ { before: this.to - 8 * this.hours + 1, min_count: 30 },
+ (error, body) => {
+ if (error != null) {
+ throw error
+ }
+ this.updates = body.updates
+ return done()
+ }
+ )
+ return null
+ })
+
+ return it('should return as many updates as it can', function () {
+ const docs1 = {}
+ docs1[this.doc_id] = { toV: 4, fromV: 3 }
+ const docs2 = {}
+ docs2[this.doc_id] = { toV: 2, fromV: 1 }
+ return expect(this.updates).to.deep.equal([
+ {
+ docs: docs1,
+ meta: {
+ start_ts: this.to - 8 * this.hours - 2 * this.minutes,
+ end_ts: this.to - 8 * this.hours,
+ users: [this.user],
+ },
+ },
+ {
+ docs: docs2,
+ meta: {
+ start_ts: this.to - 9 * this.hours - 2 * this.minutes,
+ end_ts: this.to - 9 * this.hours,
+ users: [this.user, null],
+ },
+ },
+ ])
+ })
+ })
+})
diff --git a/services/track-changes/test/acceptance/js/LockManagerTests.js b/services/track-changes/test/acceptance/js/LockManagerTests.js
new file mode 100644
index 0000000000..104e7c3220
--- /dev/null
+++ b/services/track-changes/test/acceptance/js/LockManagerTests.js
@@ -0,0 +1,62 @@
+/* eslint-disable
+ handle-callback-err,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const { expect } = require('chai')
+const Settings = require('@overleaf/settings')
+const LockManager = require('../../../app/js/LockManager')
+const rclient = require('redis').createClient(Settings.redis.history) // Only works locally for now
+const TrackChangesApp = require('./helpers/TrackChangesApp')
+
+describe('Locking document', function () {
+ before(function (done) {
+ TrackChangesApp.ensureRunning(done)
+ return null
+ })
+
+ return describe('when the lock has expired in redis', function () {
+ before(function (done) {
+ LockManager.LOCK_TTL = 1 // second
+ LockManager.runWithLock(
+ 'doc123',
+ releaseA => {
+ // we create a lock A and allow it to expire in redis
+ return setTimeout(
+ () =>
+ // now we create a new lock B and try to release A
+ LockManager.runWithLock(
+ 'doc123',
+ releaseB => {
+ return releaseA()
+ }, // try to release lock A to see if it wipes out lock B
+ error => {}
+ ),
+
+ // we never release lock B so nothing should happen here
+ 1500
+ )
+ }, // enough time to wait until the lock has expired
+ error =>
+ // we get here after trying to release lock A
+ done()
+ )
+ return null
+ })
+
+ return it('the new lock should not be removed by the expired locker', function (done) {
+ LockManager.checkLock('doc123', (err, isFree) => {
+ expect(isFree).to.equal(false)
+ return done()
+ })
+ return null
+ })
+ })
+})
diff --git a/services/track-changes/test/acceptance/js/RestoringVersions.js b/services/track-changes/test/acceptance/js/RestoringVersions.js
new file mode 100644
index 0000000000..312d92bef9
--- /dev/null
+++ b/services/track-changes/test/acceptance/js/RestoringVersions.js
@@ -0,0 +1,116 @@
+/* eslint-disable
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const { expect } = require('chai')
+const { ObjectId } = require('../../../app/js/mongodb')
+const Settings = require('@overleaf/settings')
+
+const TrackChangesApp = require('./helpers/TrackChangesApp')
+const TrackChangesClient = require('./helpers/TrackChangesClient')
+const MockDocUpdaterApi = require('./helpers/MockDocUpdaterApi')
+const MockWebApi = require('./helpers/MockWebApi')
+
+describe('Restoring a version', function () {
+ before(function (done) {
+ sinon.spy(MockDocUpdaterApi, 'setDoc')
+
+ this.now = Date.now()
+ this.user_id = ObjectId().toString()
+ this.doc_id = ObjectId().toString()
+ this.project_id = ObjectId().toString()
+ MockWebApi.projects[this.project_id] = { features: { versioning: true } }
+
+ const minutes = 60 * 1000
+
+ this.updates = [
+ {
+ op: [{ i: 'one ', p: 0 }],
+ meta: { ts: this.now - 6 * minutes, user_id: this.user_id },
+ v: 3,
+ },
+ {
+ op: [{ i: 'two ', p: 4 }],
+ meta: { ts: this.now - 4 * minutes, user_id: this.user_id },
+ v: 4,
+ },
+ {
+ op: [{ i: 'three ', p: 8 }],
+ meta: { ts: this.now - 2 * minutes, user_id: this.user_id },
+ v: 5,
+ },
+ {
+ op: [{ i: 'four', p: 14 }],
+ meta: { ts: this.now, user_id: this.user_id },
+ v: 6,
+ },
+ ]
+ this.lines = ['one two three four']
+ this.restored_lines = ['one two ']
+ this.beforeVersion = 5
+
+ MockWebApi.users[this.user_id] = this.user = {
+ email: 'user@sharelatex.com',
+ first_name: 'Leo',
+ last_name: 'Lion',
+ id: this.user_id,
+ }
+
+ MockDocUpdaterApi.docs[this.doc_id] = {
+ lines: this.lines,
+ version: 7,
+ }
+
+ TrackChangesApp.ensureRunning(() => {
+ return TrackChangesClient.pushRawUpdates(
+ this.project_id,
+ this.doc_id,
+ this.updates,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return TrackChangesClient.restoreDoc(
+ this.project_id,
+ this.doc_id,
+ this.beforeVersion,
+ this.user_id,
+ error => {
+ if (error != null) {
+ throw error
+ }
+ return done()
+ }
+ )
+ }
+ )
+ })
+ return null
+ })
+
+ after(function () {
+ MockDocUpdaterApi.setDoc.restore()
+ return null
+ })
+
+ return it('should set the doc in the doc updater', function () {
+ MockDocUpdaterApi.setDoc
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.restored_lines,
+ this.user_id,
+ true
+ )
+ .should.equal(true)
+ return null
+ })
+})
diff --git a/services/track-changes/test/acceptance/js/helpers/MockDocStoreApi.js b/services/track-changes/test/acceptance/js/helpers/MockDocStoreApi.js
new file mode 100644
index 0000000000..12787770a4
--- /dev/null
+++ b/services/track-changes/test/acceptance/js/helpers/MockDocStoreApi.js
@@ -0,0 +1,54 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let MockDocUpdaterApi
+const express = require('express')
+const app = express()
+
+module.exports = MockDocUpdaterApi = {
+ docs: {},
+
+ getAllDoc(project_id, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ return callback(null, this.docs)
+ },
+
+ run() {
+ app.get('/project/:project_id/doc', (req, res, next) => {
+ return this.getAllDoc(req.params.project_id, (error, docs) => {
+ if (error != null) {
+ res.sendStatus(500)
+ }
+ if (docs == null) {
+ return res.sendStatus(404)
+ } else {
+ return res.send(JSON.stringify(docs))
+ }
+ })
+ })
+
+ return app
+ .listen(3016, error => {
+ if (error != null) {
+ throw error
+ }
+ })
+ .on('error', error => {
+ console.error('error starting MockDocStoreApi:', error.message)
+ return process.exit(1)
+ })
+ },
+}
+
+MockDocUpdaterApi.run()
diff --git a/services/track-changes/test/acceptance/js/helpers/MockDocUpdaterApi.js b/services/track-changes/test/acceptance/js/helpers/MockDocUpdaterApi.js
new file mode 100644
index 0000000000..1c39ff5dbc
--- /dev/null
+++ b/services/track-changes/test/acceptance/js/helpers/MockDocUpdaterApi.js
@@ -0,0 +1,89 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+ no-undef,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let MockDocUpdaterApi
+const express = require('express')
+const bodyParser = require('body-parser')
+const app = express()
+app.use(bodyParser.json())
+
+module.exports = MockDocUpdaterApi = {
+ docs: {},
+
+ getDoc(project_id, doc_id, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ return callback(null, this.docs[doc_id])
+ },
+
+ setDoc(project_id, doc_id, lines, user_id, undoing, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ if (!this.docs[doc_id]) {
+ this.docs[doc_id] = {}
+ }
+ this.docs[doc_id].lines = lines
+ return callback()
+ },
+
+ run() {
+ app.get('/project/:project_id/doc/:doc_id', (req, res, next) => {
+ return this.getDoc(
+ req.params.project_id,
+ req.params.doc_id,
+ (error, doc) => {
+ if (error != null) {
+ res.sendStatus(500)
+ }
+ if (doc == null) {
+ return res.sendStatus(404)
+ } else {
+ return res.send(JSON.stringify(doc))
+ }
+ }
+ )
+ })
+
+ app.post('/project/:project_id/doc/:doc_id', (req, res, next) => {
+ return this.setDoc(
+ req.params.project_id,
+ req.params.doc_id,
+ req.body.lines,
+ req.body.user_id,
+ req.body.undoing,
+ (errr, doc) => {
+ if (typeof error !== 'undefined' && error !== null) {
+ return res.sendStatus(500)
+ } else {
+ return res.sendStatus(204)
+ }
+ }
+ )
+ })
+
+ return app
+ .listen(3003, error => {
+ if (error != null) {
+ throw error
+ }
+ })
+ .on('error', error => {
+ console.error('error starting MockDocUpdaterApi:', error.message)
+ return process.exit(1)
+ })
+ },
+}
+
+MockDocUpdaterApi.run()
diff --git a/services/track-changes/test/acceptance/js/helpers/MockWebApi.js b/services/track-changes/test/acceptance/js/helpers/MockWebApi.js
new file mode 100644
index 0000000000..db6968dc54
--- /dev/null
+++ b/services/track-changes/test/acceptance/js/helpers/MockWebApi.js
@@ -0,0 +1,76 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let MockWebApi
+const express = require('express')
+const app = express()
+
+module.exports = MockWebApi = {
+ users: {},
+
+ projects: {},
+
+ getUserInfo(user_id, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ return callback(null, this.users[user_id] || null)
+ },
+
+ getProjectDetails(project_id, callback) {
+ if (callback == null) {
+ callback = function (error, project) {}
+ }
+ return callback(null, this.projects[project_id])
+ },
+
+ run() {
+ app.get('/user/:user_id/personal_info', (req, res, next) => {
+ return this.getUserInfo(req.params.user_id, (error, user) => {
+ if (error != null) {
+ res.sendStatus(500)
+ }
+ if (user == null) {
+ return res.sendStatus(404)
+ } else {
+ return res.send(JSON.stringify(user))
+ }
+ })
+ })
+
+ app.get('/project/:project_id/details', (req, res, next) => {
+ return this.getProjectDetails(req.params.project_id, (error, project) => {
+ if (error != null) {
+ res.sendStatus(500)
+ }
+ if (project == null) {
+ return res.sendStatus(404)
+ } else {
+ return res.send(JSON.stringify(project))
+ }
+ })
+ })
+
+ return app
+ .listen(3000, error => {
+ if (error != null) {
+ throw error
+ }
+ })
+ .on('error', error => {
+ console.error('error starting MockWebApiServer:', error.message)
+ return process.exit(1)
+ })
+ },
+}
+
+MockWebApi.run()
diff --git a/services/track-changes/test/acceptance/js/helpers/TrackChangesApp.js b/services/track-changes/test/acceptance/js/helpers/TrackChangesApp.js
new file mode 100644
index 0000000000..f6ff0cc024
--- /dev/null
+++ b/services/track-changes/test/acceptance/js/helpers/TrackChangesApp.js
@@ -0,0 +1,67 @@
+/* eslint-disable
+ handle-callback-err,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS103: Rewrite code to no longer use __guard__
+ * DS205: Consider reworking code to avoid use of IIFEs
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const app = require('../../../../app')
+const { waitForDb } = require('../../../../app/js/mongodb')
+const logger = require('logger-sharelatex')
+const Settings = require('@overleaf/settings')
+
+module.exports = {
+ running: false,
+ initing: false,
+ callbacks: [],
+ ensureRunning(callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ if (this.running) {
+ return callback()
+ } else if (this.initing) {
+ return this.callbacks.push(callback)
+ }
+ this.initing = true
+ this.callbacks.push(callback)
+ waitForDb().then(() => {
+ return app.listen(
+ __guard__(
+ Settings.internal != null
+ ? Settings.internal.trackchanges
+ : undefined,
+ x => x.port
+ ),
+ 'localhost',
+ error => {
+ if (error != null) {
+ throw error
+ }
+ this.running = true
+ logger.log('track changes running in dev mode')
+
+ return (() => {
+ const result = []
+ for (callback of Array.from(this.callbacks)) {
+ result.push(callback())
+ }
+ return result
+ })()
+ }
+ )
+ })
+ },
+}
+function __guard__(value, transform) {
+ return typeof value !== 'undefined' && value !== null
+ ? transform(value)
+ : undefined
+}
diff --git a/services/track-changes/test/acceptance/js/helpers/TrackChangesClient.js b/services/track-changes/test/acceptance/js/helpers/TrackChangesClient.js
new file mode 100644
index 0000000000..0216c2c928
--- /dev/null
+++ b/services/track-changes/test/acceptance/js/helpers/TrackChangesClient.js
@@ -0,0 +1,301 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+let TrackChangesClient
+const async = require('async')
+const zlib = require('zlib')
+const request = require('request')
+const Settings = require('@overleaf/settings')
+const rclient = require('@overleaf/redis-wrapper').createClient(
+ Settings.redis.history
+) // Only works locally for now
+const Keys = Settings.redis.history.key_schema
+const { db, ObjectId } = require('../../../../app/js/mongodb')
+
+const aws = require('aws-sdk')
+const s3 = new aws.S3({
+ accessKeyId: Settings.trackchanges.s3.key,
+ secretAccessKey: Settings.trackchanges.s3.secret,
+ endpoint: Settings.trackchanges.s3.endpoint,
+ s3ForcePathStyle: Settings.trackchanges.s3.pathStyle,
+})
+const S3_BUCKET = Settings.trackchanges.stores.doc_history
+
+module.exports = TrackChangesClient = {
+ flushAndGetCompressedUpdates(project_id, doc_id, callback) {
+ if (callback == null) {
+ callback = function (error, updates) {}
+ }
+ return TrackChangesClient.flushDoc(project_id, doc_id, error => {
+ if (error != null) {
+ return callback(error)
+ }
+ return TrackChangesClient.getCompressedUpdates(doc_id, callback)
+ })
+ },
+
+ flushDoc(project_id, doc_id, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ return request.post(
+ {
+ url: `http://localhost:3015/project/${project_id}/doc/${doc_id}/flush`,
+ },
+ (error, response, body) => {
+ response.statusCode.should.equal(204)
+ return callback(error)
+ }
+ )
+ },
+
+ flushProject(project_id, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ return request.post(
+ {
+ url: `http://localhost:3015/project/${project_id}/flush`,
+ },
+ (error, response, body) => {
+ response.statusCode.should.equal(204)
+ return callback(error)
+ }
+ )
+ },
+
+ getCompressedUpdates(doc_id, callback) {
+ if (callback == null) {
+ callback = function (error, updates) {}
+ }
+ return db.docHistory
+ .find({ doc_id: ObjectId(doc_id) })
+ .sort({ 'meta.end_ts': 1 })
+ .toArray(callback)
+ },
+
+ getProjectMetaData(project_id, callback) {
+ if (callback == null) {
+ callback = function (error, updates) {}
+ }
+ return db.projectHistoryMetaData.findOne(
+ {
+ project_id: ObjectId(project_id),
+ },
+ callback
+ )
+ },
+
+ setPreserveHistoryForProject(project_id, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ return db.projectHistoryMetaData.updateOne(
+ {
+ project_id: ObjectId(project_id),
+ },
+ {
+ $set: { preserveHistory: true },
+ },
+ {
+ upsert: true,
+ },
+ callback
+ )
+ },
+
+ pushRawUpdates(project_id, doc_id, updates, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ return rclient.sadd(
+ Keys.docsWithHistoryOps({ project_id }),
+ doc_id,
+ error => {
+ if (error != null) {
+ return callback(error)
+ }
+ return rclient.rpush(
+ Keys.uncompressedHistoryOps({ doc_id }),
+ ...Array.from(Array.from(updates).map(u => JSON.stringify(u))),
+ callback
+ )
+ }
+ )
+ },
+
+ getDiff(project_id, doc_id, from, to, callback) {
+ if (callback == null) {
+ callback = function (error, diff) {}
+ }
+ return request.get(
+ {
+ url: `http://localhost:3015/project/${project_id}/doc/${doc_id}/diff?from=${from}&to=${to}`,
+ },
+ (error, response, body) => {
+ response.statusCode.should.equal(200)
+ return callback(null, JSON.parse(body))
+ }
+ )
+ },
+
+ getUpdates(project_id, options, callback) {
+ if (callback == null) {
+ callback = function (error, body) {}
+ }
+ return request.get(
+ {
+ url: `http://localhost:3015/project/${project_id}/updates?before=${options.before}&min_count=${options.min_count}`,
+ },
+ (error, response, body) => {
+ response.statusCode.should.equal(200)
+ return callback(null, JSON.parse(body))
+ }
+ )
+ },
+
+ exportProject(project_id, callback) {
+ request.get(
+ { url: `http://localhost:3015/project/${project_id}/export`, json: true },
+ (error, response, updates) => {
+ if (error) return callback(error)
+ response.statusCode.should.equal(200)
+ callback(null, updates, JSON.parse(response.trailers['x-user-ids']))
+ }
+ )
+ },
+
+ restoreDoc(project_id, doc_id, version, user_id, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ return request.post(
+ {
+ url: `http://localhost:3015/project/${project_id}/doc/${doc_id}/version/${version}/restore`,
+ headers: {
+ 'X-User-Id': user_id,
+ },
+ },
+ (error, response, body) => {
+ response.statusCode.should.equal(204)
+ return callback(null)
+ }
+ )
+ },
+
+ pushDocHistory(project_id, doc_id, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ return request.post(
+ {
+ url: `http://localhost:3015/project/${project_id}/doc/${doc_id}/push`,
+ },
+ (error, response, body) => {
+ response.statusCode.should.equal(204)
+ return callback(error)
+ }
+ )
+ },
+
+ pullDocHistory(project_id, doc_id, callback) {
+ if (callback == null) {
+ callback = function (error) {}
+ }
+ return request.post(
+ {
+ url: `http://localhost:3015/project/${project_id}/doc/${doc_id}/pull`,
+ },
+ (error, response, body) => {
+ response.statusCode.should.equal(204)
+ return callback(error)
+ }
+ )
+ },
+
+ waitForS3(done, retries) {
+ if (retries == null) {
+ retries = 42
+ }
+ if (!Settings.trackchanges.s3.endpoint) {
+ return done()
+ }
+
+ return request.get(`${Settings.trackchanges.s3.endpoint}/`, (err, res) => {
+ if (res && res.statusCode < 500) {
+ return done()
+ }
+
+ if (retries === 0) {
+ return done(err || new Error(`s3 returned ${res.statusCode}`))
+ }
+
+ return setTimeout(
+ () => TrackChangesClient.waitForS3(done, --retries),
+ 1000
+ )
+ })
+ },
+
+ getS3Doc(project_id, doc_id, pack_id, callback) {
+ if (callback == null) {
+ callback = function (error, body) {}
+ }
+ const params = {
+ Bucket: S3_BUCKET,
+ Key: `${project_id}/changes-${doc_id}/pack-${pack_id}`,
+ }
+
+ return s3.getObject(params, (error, data) => {
+ if (error != null) {
+ return callback(error)
+ }
+ const body = data.Body
+ if (body == null) {
+ return callback(new Error('empty response from s3'))
+ }
+ return zlib.gunzip(body, (err, result) => {
+ if (err != null) {
+ return callback(err)
+ }
+ return callback(null, JSON.parse(result.toString()))
+ })
+ })
+ },
+
+ removeS3Doc(project_id, doc_id, callback) {
+ if (callback == null) {
+ callback = function (error, res, body) {}
+ }
+ let params = {
+ Bucket: S3_BUCKET,
+ Prefix: `${project_id}/changes-${doc_id}`,
+ }
+
+ return s3.listObjects(params, (error, data) => {
+ if (error != null) {
+ return callback(error)
+ }
+
+ params = {
+ Bucket: S3_BUCKET,
+ Delete: {
+ Objects: data.Contents.map(s3object => ({ Key: s3object.Key })),
+ },
+ }
+
+ return s3.deleteObjects(params, callback)
+ })
+ },
+}
diff --git a/services/track-changes/test/setup.js b/services/track-changes/test/setup.js
new file mode 100644
index 0000000000..17e1782172
--- /dev/null
+++ b/services/track-changes/test/setup.js
@@ -0,0 +1,21 @@
+const chai = require('chai')
+const SandboxedModule = require('sandboxed-module')
+
+// Chai configuration
+chai.should()
+
+// SandboxedModule configuration
+SandboxedModule.configure({
+ requires: {
+ 'logger-sharelatex': {
+ debug() {},
+ log() {},
+ info() {},
+ warn() {},
+ err() {},
+ error() {},
+ fatal() {},
+ },
+ },
+ globals: { Buffer, JSON, console, process },
+})
diff --git a/services/track-changes/test/unit/js/DiffGenerator/DiffGeneratorTests.js b/services/track-changes/test/unit/js/DiffGenerator/DiffGeneratorTests.js
new file mode 100644
index 0000000000..2b67220de9
--- /dev/null
+++ b/services/track-changes/test/unit/js/DiffGenerator/DiffGeneratorTests.js
@@ -0,0 +1,456 @@
+/* eslint-disable
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const { expect } = require('chai')
+const modulePath = '../../../../app/js/DiffGenerator.js'
+const SandboxedModule = require('sandboxed-module')
+
+describe('DiffGenerator', function () {
+ beforeEach(function () {
+ this.DiffGenerator = SandboxedModule.require(modulePath, {})
+ this.ts = Date.now()
+ this.user_id = 'mock-user-id'
+ this.user_id_2 = 'mock-user-id-2'
+ return (this.meta = {
+ start_ts: this.ts,
+ end_ts: this.ts,
+ user_id: this.user_id,
+ })
+ })
+
+ describe('rewindOp', function () {
+ describe('rewinding an insert', function () {
+ return it('should undo the insert', function () {
+ const content = 'hello world'
+ const rewoundContent = this.DiffGenerator.rewindOp(content, {
+ p: 6,
+ i: 'wo',
+ })
+ return rewoundContent.should.equal('hello rld')
+ })
+ })
+
+ describe('rewinding a delete', function () {
+ return it('should undo the delete', function () {
+ const content = 'hello rld'
+ const rewoundContent = this.DiffGenerator.rewindOp(content, {
+ p: 6,
+ d: 'wo',
+ })
+ return rewoundContent.should.equal('hello world')
+ })
+ })
+
+ describe('with an inconsistent update', function () {
+ return it('should throw an error', function () {
+ const content = 'hello world'
+ return expect(() => {
+ return this.DiffGenerator.rewindOp(content, { p: 6, i: 'foo' })
+ }).to.throw(this.DiffGenerator.ConsistencyError)
+ })
+ })
+
+ return describe('with an update which is beyond the length of the content', function () {
+ return it('should undo the insert as if it were at the end of the content', function () {
+ const content = 'foobar'
+ const rewoundContent = this.DiffGenerator.rewindOp(content, {
+ p: 4,
+ i: 'bar',
+ })
+ return rewoundContent.should.equal('foo')
+ })
+ })
+ })
+
+ describe('rewindUpdate', function () {
+ return it('should rewind ops in reverse', function () {
+ const content = 'aaabbbccc'
+ const update = {
+ op: [
+ { p: 3, i: 'bbb' },
+ { p: 6, i: 'ccc' },
+ ],
+ }
+ const rewoundContent = this.DiffGenerator.rewindUpdate(content, update)
+ return rewoundContent.should.equal('aaa')
+ })
+ })
+
+ describe('rewindUpdates', function () {
+ return it('should rewind updates in reverse', function () {
+ const content = 'aaabbbccc'
+ const updates = [
+ { op: [{ p: 3, i: 'bbb' }] },
+ { op: [{ p: 6, i: 'ccc' }] },
+ ]
+ const rewoundContent = this.DiffGenerator.rewindUpdates(content, updates)
+ return rewoundContent.should.equal('aaa')
+ })
+ })
+
+ describe('buildDiff', function () {
+ beforeEach(function () {
+ this.diff = [{ u: 'mock-diff' }]
+ this.content = 'Hello world'
+ this.updates = [
+ { i: 'mock-update-1' },
+ { i: 'mock-update-2' },
+ { i: 'mock-update-3' },
+ ]
+ this.DiffGenerator.applyUpdateToDiff = sinon.stub().returns(this.diff)
+ this.DiffGenerator.compressDiff = sinon.stub().returns(this.diff)
+ return (this.result = this.DiffGenerator.buildDiff(
+ this.content,
+ this.updates
+ ))
+ })
+
+ it('should return the diff', function () {
+ return this.result.should.deep.equal(this.diff)
+ })
+
+ it('should build the content into an initial diff', function () {
+ return this.DiffGenerator.applyUpdateToDiff
+ .calledWith(
+ [
+ {
+ u: this.content,
+ },
+ ],
+ this.updates[0]
+ )
+ .should.equal(true)
+ })
+
+ it('should apply each update', function () {
+ return Array.from(this.updates).map(update =>
+ this.DiffGenerator.applyUpdateToDiff
+ .calledWith(sinon.match.any, update)
+ .should.equal(true)
+ )
+ })
+
+ return it('should compress the diff', function () {
+ return this.DiffGenerator.compressDiff
+ .calledWith(this.diff)
+ .should.equal(true)
+ })
+ })
+
+ describe('compressDiff', function () {
+ describe('with adjacent inserts with the same user_id', function () {
+ return it('should create one update with combined meta data and min/max timestamps', function () {
+ const diff = this.DiffGenerator.compressDiff([
+ {
+ i: 'foo',
+ meta: { start_ts: 10, end_ts: 20, user: { id: this.user_id } },
+ },
+ {
+ i: 'bar',
+ meta: { start_ts: 5, end_ts: 15, user: { id: this.user_id } },
+ },
+ ])
+ return expect(diff).to.deep.equal([
+ {
+ i: 'foobar',
+ meta: { start_ts: 5, end_ts: 20, user: { id: this.user_id } },
+ },
+ ])
+ })
+ })
+
+ describe('with adjacent inserts with different user_ids', function () {
+ return it('should leave the inserts unchanged', function () {
+ const input = [
+ {
+ i: 'foo',
+ meta: { start_ts: 10, end_ts: 20, user: { id: this.user_id } },
+ },
+ {
+ i: 'bar',
+ meta: { start_ts: 5, end_ts: 15, user: { id: this.user_id_2 } },
+ },
+ ]
+ const output = this.DiffGenerator.compressDiff(input)
+ return expect(output).to.deep.equal(input)
+ })
+ })
+
+ describe('with adjacent deletes with the same user_id', function () {
+ return it('should create one update with combined meta data and min/max timestamps', function () {
+ const diff = this.DiffGenerator.compressDiff([
+ {
+ d: 'foo',
+ meta: { start_ts: 10, end_ts: 20, user: { id: this.user_id } },
+ },
+ {
+ d: 'bar',
+ meta: { start_ts: 5, end_ts: 15, user: { id: this.user_id } },
+ },
+ ])
+ return expect(diff).to.deep.equal([
+ {
+ d: 'foobar',
+ meta: { start_ts: 5, end_ts: 20, user: { id: this.user_id } },
+ },
+ ])
+ })
+ })
+
+ return describe('with adjacent deletes with different user_ids', function () {
+ return it('should leave the deletes unchanged', function () {
+ const input = [
+ {
+ d: 'foo',
+ meta: { start_ts: 10, end_ts: 20, user: { id: this.user_id } },
+ },
+ {
+ d: 'bar',
+ meta: { start_ts: 5, end_ts: 15, user: { id: this.user_id_2 } },
+ },
+ ]
+ const output = this.DiffGenerator.compressDiff(input)
+ return expect(output).to.deep.equal(input)
+ })
+ })
+ })
+
+ return describe('applyUpdateToDiff', function () {
+ describe('an insert', function () {
+ it('should insert into the middle of (u)nchanged text', function () {
+ const diff = this.DiffGenerator.applyUpdateToDiff([{ u: 'foobar' }], {
+ op: [{ p: 3, i: 'baz' }],
+ meta: this.meta,
+ })
+ return expect(diff).to.deep.equal([
+ { u: 'foo' },
+ { i: 'baz', meta: this.meta },
+ { u: 'bar' },
+ ])
+ })
+
+ it('should insert into the start of (u)changed text', function () {
+ const diff = this.DiffGenerator.applyUpdateToDiff([{ u: 'foobar' }], {
+ op: [{ p: 0, i: 'baz' }],
+ meta: this.meta,
+ })
+ return expect(diff).to.deep.equal([
+ { i: 'baz', meta: this.meta },
+ { u: 'foobar' },
+ ])
+ })
+
+ it('should insert into the end of (u)changed text', function () {
+ const diff = this.DiffGenerator.applyUpdateToDiff([{ u: 'foobar' }], {
+ op: [{ p: 6, i: 'baz' }],
+ meta: this.meta,
+ })
+ return expect(diff).to.deep.equal([
+ { u: 'foobar' },
+ { i: 'baz', meta: this.meta },
+ ])
+ })
+
+ it('should insert into the middle of (i)inserted text', function () {
+ const diff = this.DiffGenerator.applyUpdateToDiff(
+ [{ i: 'foobar', meta: this.meta }],
+ { op: [{ p: 3, i: 'baz' }], meta: this.meta }
+ )
+ return expect(diff).to.deep.equal([
+ { i: 'foo', meta: this.meta },
+ { i: 'baz', meta: this.meta },
+ { i: 'bar', meta: this.meta },
+ ])
+ })
+
+ return it('should not count deletes in the running length total', function () {
+ const diff = this.DiffGenerator.applyUpdateToDiff(
+ [{ d: 'deleted', meta: this.meta }, { u: 'foobar' }],
+ { op: [{ p: 3, i: 'baz' }], meta: this.meta }
+ )
+ return expect(diff).to.deep.equal([
+ { d: 'deleted', meta: this.meta },
+ { u: 'foo' },
+ { i: 'baz', meta: this.meta },
+ { u: 'bar' },
+ ])
+ })
+ })
+
+ return describe('a delete', function () {
+ describe('deleting unchanged text', function () {
+ it('should delete from the middle of (u)nchanged text', function () {
+ const diff = this.DiffGenerator.applyUpdateToDiff(
+ [{ u: 'foobazbar' }],
+ { op: [{ p: 3, d: 'baz' }], meta: this.meta }
+ )
+ return expect(diff).to.deep.equal([
+ { u: 'foo' },
+ { d: 'baz', meta: this.meta },
+ { u: 'bar' },
+ ])
+ })
+
+ it('should delete from the start of (u)nchanged text', function () {
+ const diff = this.DiffGenerator.applyUpdateToDiff(
+ [{ u: 'foobazbar' }],
+ { op: [{ p: 0, d: 'foo' }], meta: this.meta }
+ )
+ return expect(diff).to.deep.equal([
+ { d: 'foo', meta: this.meta },
+ { u: 'bazbar' },
+ ])
+ })
+
+ it('should delete from the end of (u)nchanged text', function () {
+ const diff = this.DiffGenerator.applyUpdateToDiff(
+ [{ u: 'foobazbar' }],
+ { op: [{ p: 6, d: 'bar' }], meta: this.meta }
+ )
+ return expect(diff).to.deep.equal([
+ { u: 'foobaz' },
+ { d: 'bar', meta: this.meta },
+ ])
+ })
+
+ return it('should delete across multiple (u)changed text parts', function () {
+ const diff = this.DiffGenerator.applyUpdateToDiff(
+ [{ u: 'foo' }, { u: 'baz' }, { u: 'bar' }],
+ { op: [{ p: 2, d: 'obazb' }], meta: this.meta }
+ )
+ return expect(diff).to.deep.equal([
+ { u: 'fo' },
+ { d: 'o', meta: this.meta },
+ { d: 'baz', meta: this.meta },
+ { d: 'b', meta: this.meta },
+ { u: 'ar' },
+ ])
+ })
+ })
+
+ describe('deleting inserts', function () {
+ it('should delete from the middle of (i)nserted text', function () {
+ const diff = this.DiffGenerator.applyUpdateToDiff(
+ [{ i: 'foobazbar', meta: this.meta }],
+ { op: [{ p: 3, d: 'baz' }], meta: this.meta }
+ )
+ return expect(diff).to.deep.equal([
+ { i: 'foo', meta: this.meta },
+ { i: 'bar', meta: this.meta },
+ ])
+ })
+
+ it('should delete from the start of (u)nchanged text', function () {
+ const diff = this.DiffGenerator.applyUpdateToDiff(
+ [{ i: 'foobazbar', meta: this.meta }],
+ { op: [{ p: 0, d: 'foo' }], meta: this.meta }
+ )
+ return expect(diff).to.deep.equal([{ i: 'bazbar', meta: this.meta }])
+ })
+
+ it('should delete from the end of (u)nchanged text', function () {
+ const diff = this.DiffGenerator.applyUpdateToDiff(
+ [{ i: 'foobazbar', meta: this.meta }],
+ { op: [{ p: 6, d: 'bar' }], meta: this.meta }
+ )
+ return expect(diff).to.deep.equal([{ i: 'foobaz', meta: this.meta }])
+ })
+
+ return it('should delete across multiple (u)changed and (i)nserted text parts', function () {
+ const diff = this.DiffGenerator.applyUpdateToDiff(
+ [{ u: 'foo' }, { i: 'baz', meta: this.meta }, { u: 'bar' }],
+ { op: [{ p: 2, d: 'obazb' }], meta: this.meta }
+ )
+ return expect(diff).to.deep.equal([
+ { u: 'fo' },
+ { d: 'o', meta: this.meta },
+ { d: 'b', meta: this.meta },
+ { u: 'ar' },
+ ])
+ })
+ })
+
+ describe('deleting over existing deletes', function () {
+ return it('should delete across multiple (u)changed and (d)deleted text parts', function () {
+ const diff = this.DiffGenerator.applyUpdateToDiff(
+ [{ u: 'foo' }, { d: 'baz', meta: this.meta }, { u: 'bar' }],
+ { op: [{ p: 2, d: 'ob' }], meta: this.meta }
+ )
+ return expect(diff).to.deep.equal([
+ { u: 'fo' },
+ { d: 'o', meta: this.meta },
+ { d: 'baz', meta: this.meta },
+ { d: 'b', meta: this.meta },
+ { u: 'ar' },
+ ])
+ })
+ })
+
+ describe("deleting when the text doesn't match", function () {
+ it('should throw an error when deleting from the middle of (u)nchanged text', function () {
+ return expect(() =>
+ this.DiffGenerator.applyUpdateToDiff([{ u: 'foobazbar' }], {
+ op: [{ p: 3, d: 'xxx' }],
+ meta: this.meta,
+ })
+ ).to.throw(this.DiffGenerator.ConsistencyError)
+ })
+
+ it('should throw an error when deleting from the start of (u)nchanged text', function () {
+ return expect(() =>
+ this.DiffGenerator.applyUpdateToDiff([{ u: 'foobazbar' }], {
+ op: [{ p: 0, d: 'xxx' }],
+ meta: this.meta,
+ })
+ ).to.throw(this.DiffGenerator.ConsistencyError)
+ })
+
+ return it('should throw an error when deleting from the end of (u)nchanged text', function () {
+ return expect(() =>
+ this.DiffGenerator.applyUpdateToDiff([{ u: 'foobazbar' }], {
+ op: [{ p: 6, d: 'xxx' }],
+ meta: this.meta,
+ })
+ ).to.throw(this.DiffGenerator.ConsistencyError)
+ })
+ })
+
+ describe('when the last update in the existing diff is a delete', function () {
+ return it('should insert the new update before the delete', function () {
+ const diff = this.DiffGenerator.applyUpdateToDiff(
+ [{ u: 'foo' }, { d: 'bar', meta: this.meta }],
+ { op: [{ p: 3, i: 'baz' }], meta: this.meta }
+ )
+ return expect(diff).to.deep.equal([
+ { u: 'foo' },
+ { i: 'baz', meta: this.meta },
+ { d: 'bar', meta: this.meta },
+ ])
+ })
+ })
+
+ return describe('when the only update in the existing diff is a delete', function () {
+ return it('should insert the new update after the delete', function () {
+ const diff = this.DiffGenerator.applyUpdateToDiff(
+ [{ d: 'bar', meta: this.meta }],
+ { op: [{ p: 0, i: 'baz' }], meta: this.meta }
+ )
+ return expect(diff).to.deep.equal([
+ { d: 'bar', meta: this.meta },
+ { i: 'baz', meta: this.meta },
+ ])
+ })
+ })
+ })
+ })
+})
diff --git a/services/track-changes/test/unit/js/DiffManager/DiffManagerTests.js b/services/track-changes/test/unit/js/DiffManager/DiffManagerTests.js
new file mode 100644
index 0000000000..846ad706c2
--- /dev/null
+++ b/services/track-changes/test/unit/js/DiffManager/DiffManagerTests.js
@@ -0,0 +1,444 @@
+/* eslint-disable
+ camelcase,
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const { expect } = require('chai')
+const modulePath = '../../../../app/js/DiffManager.js'
+const SandboxedModule = require('sandboxed-module')
+
+describe('DiffManager', function () {
+ beforeEach(function () {
+ this.DiffManager = SandboxedModule.require(modulePath, {
+ requires: {
+ './UpdatesManager': (this.UpdatesManager = {}),
+ './DocumentUpdaterManager': (this.DocumentUpdaterManager = {}),
+ './DiffGenerator': (this.DiffGenerator = {}),
+ },
+ })
+ this.callback = sinon.stub()
+ this.from = new Date()
+ this.to = new Date(Date.now() + 10000)
+ this.project_id = 'mock-project-id'
+ return (this.doc_id = 'mock-doc-id')
+ })
+
+ describe('getLatestDocAndUpdates', function () {
+ beforeEach(function () {
+ this.content = 'hello world'
+ this.version = 42
+ this.updates = ['mock-update-1', 'mock-update-2']
+
+ this.DocumentUpdaterManager.getDocument = sinon
+ .stub()
+ .callsArgWith(2, null, this.content, this.version)
+ return (this.UpdatesManager.getDocUpdatesWithUserInfo = sinon
+ .stub()
+ .callsArgWith(3, null, this.updates))
+ })
+
+ describe('with a fromVersion', function () {
+ beforeEach(function () {
+ return this.DiffManager.getLatestDocAndUpdates(
+ this.project_id,
+ this.doc_id,
+ this.from,
+ this.callback
+ )
+ })
+
+ it('should get the latest version of the doc', function () {
+ return this.DocumentUpdaterManager.getDocument
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should get the latest updates', function () {
+ return this.UpdatesManager.getDocUpdatesWithUserInfo
+ .calledWith(this.project_id, this.doc_id, { from: this.from })
+ .should.equal(true)
+ })
+
+ return it('should call the callback with the content, version and updates', function () {
+ return this.callback
+ .calledWith(null, this.content, this.version, this.updates)
+ .should.equal(true)
+ })
+ })
+
+ return describe('with no fromVersion', function () {
+ beforeEach(function () {
+ return this.DiffManager.getLatestDocAndUpdates(
+ this.project_id,
+ this.doc_id,
+ null,
+ this.callback
+ )
+ })
+
+ it('should get the latest version of the doc', function () {
+ return this.DocumentUpdaterManager.getDocument
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should not get the latest updates', function () {
+ return this.UpdatesManager.getDocUpdatesWithUserInfo.called.should.equal(
+ false
+ )
+ })
+
+ return it('should call the callback with the content, version and blank updates', function () {
+ return this.callback
+ .calledWith(null, this.content, this.version, [])
+ .should.equal(true)
+ })
+ })
+ })
+
+ describe('getDiff', function () {
+ beforeEach(function () {
+ this.content = 'hello world'
+ // Op versions are the version they were applied to, so doc is always one version
+ // ahead.s
+ this.version = 43
+ this.updates = [
+ {
+ op: 'mock-4',
+ v: 42,
+ meta: { start_ts: new Date(this.to.getTime() + 20) },
+ },
+ {
+ op: 'mock-3',
+ v: 41,
+ meta: { start_ts: new Date(this.to.getTime() + 10) },
+ },
+ {
+ op: 'mock-2',
+ v: 40,
+ meta: { start_ts: new Date(this.to.getTime() - 10) },
+ },
+ {
+ op: 'mock-1',
+ v: 39,
+ meta: { start_ts: new Date(this.to.getTime() - 20) },
+ },
+ ]
+ this.fromVersion = 39
+ this.toVersion = 40
+ this.diffed_updates = this.updates.slice(2)
+ this.rewound_content = 'rewound-content'
+ return (this.diff = [{ u: 'mock-diff' }])
+ })
+
+ describe('with matching versions', function () {
+ beforeEach(function () {
+ this.DiffManager.getDocumentBeforeVersion = sinon
+ .stub()
+ .callsArgWith(3, null, this.rewound_content, this.updates)
+ this.DiffGenerator.buildDiff = sinon.stub().returns(this.diff)
+ return this.DiffManager.getDiff(
+ this.project_id,
+ this.doc_id,
+ this.fromVersion,
+ this.toVersion,
+ this.callback
+ )
+ })
+
+ it('should get the latest doc and version with all recent updates', function () {
+ return this.DiffManager.getDocumentBeforeVersion
+ .calledWith(this.project_id, this.doc_id, this.fromVersion)
+ .should.equal(true)
+ })
+
+ it('should generate the diff', function () {
+ return this.DiffGenerator.buildDiff
+ .calledWith(
+ this.rewound_content,
+ this.diffed_updates.slice().reverse()
+ )
+ .should.equal(true)
+ })
+
+ return it('should call the callback with the diff', function () {
+ return this.callback.calledWith(null, this.diff).should.equal(true)
+ })
+ })
+
+ describe('when the updates are inconsistent', function () {
+ beforeEach(function () {
+ this.DiffManager.getLatestDocAndUpdates = sinon
+ .stub()
+ .callsArgWith(3, null, this.content, this.version, this.updates)
+ this.DiffGenerator.buildDiff = sinon
+ .stub()
+ .throws((this.error = new Error('inconsistent!')))
+ this.DiffGenerator.rewindUpdates = sinon.stub()
+ this.DiffManager.getDiff(
+ this.project_id,
+ this.doc_id,
+ this.fromVersion,
+ this.toVersion,
+ this.callback
+ )
+ })
+
+ it('should call the callback with an error', function () {
+ this.callback.calledWith(sinon.match(Error)).should.equal(true)
+ const errorObj = this.callback.args[0][0]
+ expect(errorObj.message).to.include('inconsistent!')
+ })
+ })
+ })
+
+ describe('getDocumentBeforeVersion', function () {
+ beforeEach(function () {
+ this.DiffManager._tryGetDocumentBeforeVersion = sinon.stub()
+ this.document = 'mock-documents'
+ return (this.rewound_updates = 'mock-rewound-updates')
+ })
+
+ describe('succesfully', function () {
+ beforeEach(function () {
+ this.DiffManager._tryGetDocumentBeforeVersion.yields(
+ null,
+ this.document,
+ this.rewound_updates
+ )
+ return this.DiffManager.getDocumentBeforeVersion(
+ this.project_id,
+ this.doc_id,
+ this.version,
+ this.callback
+ )
+ })
+
+ it('should call _tryGetDocumentBeforeVersion', function () {
+ return this.DiffManager._tryGetDocumentBeforeVersion
+ .calledWith(this.project_id, this.doc_id, this.version)
+ .should.equal(true)
+ })
+
+ return it('should call the callback with the response', function () {
+ return this.callback
+ .calledWith(null, this.document, this.rewound_updates)
+ .should.equal(true)
+ })
+ })
+
+ describe('with a retry needed', function () {
+ beforeEach(function () {
+ let retried = false
+ this.DiffManager._tryGetDocumentBeforeVersion = (
+ project_id,
+ doc_id,
+ version,
+ callback
+ ) => {
+ if (!retried) {
+ retried = true
+ const error = new Error()
+ error.retry = true
+ return callback(error)
+ } else {
+ return callback(null, this.document, this.rewound_updates)
+ }
+ }
+ sinon.spy(this.DiffManager, '_tryGetDocumentBeforeVersion')
+ return this.DiffManager.getDocumentBeforeVersion(
+ this.project_id,
+ this.doc_id,
+ this.version,
+ this.callback
+ )
+ })
+
+ it('should call _tryGetDocumentBeforeVersion twice', function () {
+ return this.DiffManager._tryGetDocumentBeforeVersion.calledTwice.should.equal(
+ true
+ )
+ })
+
+ return it('should call the callback with the response', function () {
+ return this.callback
+ .calledWith(null, this.document, this.rewound_updates)
+ .should.equal(true)
+ })
+ })
+
+ describe('with a non-retriable error', function () {
+ beforeEach(function () {
+ this.error = new Error('oops')
+ this.DiffManager._tryGetDocumentBeforeVersion.yields(this.error)
+ return this.DiffManager.getDocumentBeforeVersion(
+ this.project_id,
+ this.doc_id,
+ this.version,
+ this.callback
+ )
+ })
+
+ it('should call _tryGetDocumentBeforeVersion once', function () {
+ return this.DiffManager._tryGetDocumentBeforeVersion.calledOnce.should.equal(
+ true
+ )
+ })
+
+ return it('should call the callback with the error', function () {
+ return this.callback.calledWith(this.error).should.equal(true)
+ })
+ })
+
+ return describe('when retry limit is matched', function () {
+ beforeEach(function () {
+ this.error = new Error('oops')
+ this.error.retry = true
+ this.DiffManager._tryGetDocumentBeforeVersion.yields(this.error)
+ return this.DiffManager.getDocumentBeforeVersion(
+ this.project_id,
+ this.doc_id,
+ this.version,
+ this.callback
+ )
+ })
+
+ it('should call _tryGetDocumentBeforeVersion three times (max retries)', function () {
+ return this.DiffManager._tryGetDocumentBeforeVersion.calledThrice.should.equal(
+ true
+ )
+ })
+
+ return it('should call the callback with the error', function () {
+ return this.callback.calledWith(this.error).should.equal(true)
+ })
+ })
+ })
+
+ return describe('_tryGetDocumentBeforeVersion', function () {
+ beforeEach(function () {
+ this.content = 'hello world'
+ // Op versions are the version they were applied to, so doc is always one version
+ // ahead.s
+ this.version = 43
+ this.updates = [
+ {
+ op: 'mock-4',
+ v: 42,
+ meta: { start_ts: new Date(this.to.getTime() + 20) },
+ },
+ {
+ op: 'mock-3',
+ v: 41,
+ meta: { start_ts: new Date(this.to.getTime() + 10) },
+ },
+ {
+ op: 'mock-2',
+ v: 40,
+ meta: { start_ts: new Date(this.to.getTime() - 10) },
+ },
+ {
+ op: 'mock-1',
+ v: 39,
+ meta: { start_ts: new Date(this.to.getTime() - 20) },
+ },
+ ]
+ this.fromVersion = 39
+ this.rewound_content = 'rewound-content'
+ return (this.diff = [{ u: 'mock-diff' }])
+ })
+
+ describe('with matching versions', function () {
+ beforeEach(function () {
+ this.DiffManager.getLatestDocAndUpdates = sinon
+ .stub()
+ .callsArgWith(3, null, this.content, this.version, this.updates)
+ this.DiffGenerator.rewindUpdates = sinon.spy((content, updates) => {
+ // the rewindUpdates method reverses the 'updates' array
+ updates.reverse()
+ return this.rewound_content
+ })
+ this.rewindUpdatesWithArgs = this.DiffGenerator.rewindUpdates.withArgs(
+ this.content,
+ this.updates.slice().reverse()
+ )
+ return this.DiffManager._tryGetDocumentBeforeVersion(
+ this.project_id,
+ this.doc_id,
+ this.fromVersion,
+ this.callback
+ )
+ })
+
+ it('should get the latest doc and version with all recent updates', function () {
+ return this.DiffManager.getLatestDocAndUpdates
+ .calledWith(this.project_id, this.doc_id, this.fromVersion)
+ .should.equal(true)
+ })
+
+ it('should rewind the diff', function () {
+ return sinon.assert.calledOnce(this.rewindUpdatesWithArgs)
+ })
+
+ return it('should call the callback with the rewound document and updates', function () {
+ return this.callback
+ .calledWith(null, this.rewound_content, this.updates)
+ .should.equal(true)
+ })
+ })
+
+ describe('with mismatching versions', function () {
+ beforeEach(function () {
+ this.version = 50
+ this.updates = [
+ { op: 'mock-1', v: 40 },
+ { op: 'mock-1', v: 39 },
+ ]
+ this.DiffManager.getLatestDocAndUpdates = sinon
+ .stub()
+ .callsArgWith(3, null, this.content, this.version, this.updates)
+ return this.DiffManager._tryGetDocumentBeforeVersion(
+ this.project_id,
+ this.doc_id,
+ this.fromVersion,
+ this.callback
+ )
+ })
+
+ return it('should call the callback with an error with retry = true set', function () {
+ this.callback.calledOnce.should.equal(true)
+ const error = this.callback.args[0][0]
+ return expect(error.retry).to.equal(true)
+ })
+ })
+
+ return describe('when the updates are inconsistent', function () {
+ beforeEach(function () {
+ this.DiffManager.getLatestDocAndUpdates = sinon
+ .stub()
+ .callsArgWith(3, null, this.content, this.version, this.updates)
+ this.DiffGenerator.rewindUpdates = sinon
+ .stub()
+ .throws((this.error = new Error('inconsistent!')))
+ return this.DiffManager.getDocumentBeforeVersion(
+ this.project_id,
+ this.doc_id,
+ this.fromVersion,
+ this.callback
+ )
+ })
+
+ return it('should call the callback with an error', function () {
+ return this.callback.calledWith(this.error).should.equal(true)
+ })
+ })
+ })
+})
diff --git a/services/track-changes/test/unit/js/DocArchive/MongoAWS.js b/services/track-changes/test/unit/js/DocArchive/MongoAWS.js
new file mode 100644
index 0000000000..72bcaccddf
--- /dev/null
+++ b/services/track-changes/test/unit/js/DocArchive/MongoAWS.js
@@ -0,0 +1,108 @@
+/* eslint-disable
+ handle-callback-err,
+ no-return-assign,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const modulePath = '../../../../app/js/MongoAWS.js'
+const SandboxedModule = require('sandboxed-module')
+const { ObjectId } = require('mongodb')
+const MemoryStream = require('memorystream')
+const zlib = require('zlib')
+
+describe('MongoAWS', function () {
+ beforeEach(function () {
+ this.MongoAWS = SandboxedModule.require(modulePath, {
+ singleOnly: true,
+ requires: {
+ '@overleaf/settings': (this.settings = {
+ trackchanges: {
+ s3: {
+ secret: 's3-secret',
+ key: 's3-key',
+ },
+ stores: {
+ doc_history: 's3-bucket',
+ },
+ },
+ }),
+ child_process: (this.child_process = {}),
+ 'mongo-uri': (this.mongouri = {}),
+ 'aws-sdk': (this.awssdk = {}),
+ fs: (this.fs = {}),
+ 's3-streams': (this.S3S = {}),
+ './mongodb': { db: (this.db = {}), ObjectId },
+ JSONStream: (this.JSONStream = {}),
+ 'readline-stream': (this.readline = sinon.stub()),
+ '@overleaf/metrics': { inc() {} },
+ },
+ })
+
+ this.project_id = ObjectId().toString()
+ this.doc_id = ObjectId().toString()
+ this.pack_id = ObjectId()
+ this.update = { v: 123 }
+ return (this.callback = sinon.stub())
+ })
+
+ describe('archivePack', function () {
+ beforeEach(function (done) {
+ this.awssdk.config = { update: sinon.stub() }
+ this.awssdk.S3 = sinon.stub()
+ this.S3S.WriteStream = () => MemoryStream.createWriteStream()
+ this.db.docHistory = {}
+ this.db.docHistory.findOne = sinon
+ .stub()
+ .callsArgWith(1, null, { pack: 'hello' })
+
+ return this.MongoAWS.archivePack(
+ this.project_id,
+ this.doc_id,
+ this.pack_id,
+ (err, result) => {
+ this.callback()
+ return done()
+ }
+ )
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+
+ return describe('unArchivePack', function () {
+ beforeEach(function (done) {
+ return zlib.gzip('{"pack":"123"}', (err, zbuf) => {
+ this.awssdk.config = { update: sinon.stub() }
+ this.awssdk.S3 = sinon.stub()
+ this.S3S.ReadStream = () =>
+ MemoryStream.createReadStream(zbuf, { readable: true })
+ this.db.docHistory = {}
+ this.db.docHistory.insertOne = sinon
+ .stub()
+ .yields(null, { insertedId: ObjectId() })
+
+ return this.MongoAWS.unArchivePack(
+ this.project_id,
+ this.doc_id,
+ this.pack_id,
+ (err, result) => {
+ this.callback()
+ return done()
+ }
+ )
+ })
+ })
+
+ return it('should call db.docHistory.insert', function () {
+ return this.db.docHistory.insertOne.called.should.equal(true)
+ })
+ })
+})
diff --git a/services/track-changes/test/unit/js/DocumentUpdaterManager/DocumentUpdaterManagerTests.js b/services/track-changes/test/unit/js/DocumentUpdaterManager/DocumentUpdaterManagerTests.js
new file mode 100644
index 0000000000..8221f98d06
--- /dev/null
+++ b/services/track-changes/test/unit/js/DocumentUpdaterManager/DocumentUpdaterManagerTests.js
@@ -0,0 +1,199 @@
+/* eslint-disable
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const { expect } = require('chai')
+const modulePath = '../../../../app/js/DocumentUpdaterManager.js'
+const SandboxedModule = require('sandboxed-module')
+
+describe('DocumentUpdaterManager', function () {
+ beforeEach(function () {
+ this.DocumentUpdaterManager = SandboxedModule.require(modulePath, {
+ requires: {
+ request: (this.request = {}),
+ '@overleaf/settings': (this.settings = {
+ apis: { documentupdater: { url: 'http://example.com' } },
+ }),
+ },
+ })
+ this.callback = sinon.stub()
+ this.lines = ['one', 'two', 'three']
+ return (this.version = 42)
+ })
+
+ describe('getDocument', function () {
+ describe('successfully', function () {
+ beforeEach(function () {
+ this.body = JSON.stringify({
+ lines: this.lines,
+ version: this.version,
+ ops: [],
+ })
+ this.request.get = sinon
+ .stub()
+ .callsArgWith(1, null, { statusCode: 200 }, this.body)
+ return this.DocumentUpdaterManager.getDocument(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('should get the document from the document updater', function () {
+ const url = `${this.settings.apis.documentupdater.url}/project/${this.project_id}/doc/${this.doc_id}`
+ return this.request.get.calledWith(url).should.equal(true)
+ })
+
+ return it('should call the callback with the content and version', function () {
+ return this.callback
+ .calledWith(null, this.lines.join('\n'), this.version)
+ .should.equal(true)
+ })
+ })
+
+ describe('when the document updater API returns an error', function () {
+ beforeEach(function () {
+ this.request.get = sinon
+ .stub()
+ .callsArgWith(
+ 1,
+ (this.error = new Error('something went wrong')),
+ null,
+ null
+ )
+ return this.DocumentUpdaterManager.getDocument(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ return it('should return an error to the callback', function () {
+ return this.callback.calledWith(this.error).should.equal(true)
+ })
+ })
+
+ return describe('when the document updater returns a failure error code', function () {
+ beforeEach(function () {
+ this.request.get = sinon
+ .stub()
+ .callsArgWith(1, null, { statusCode: 500 }, '')
+ return this.DocumentUpdaterManager.getDocument(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ return it('should return the callback with an error', function () {
+ return this.callback
+ .calledWith(
+ sinon.match.has(
+ 'message',
+ 'doc updater returned a non-success status code: 500'
+ )
+ )
+ .should.equal(true)
+ })
+ })
+ })
+
+ return describe('setDocument', function () {
+ beforeEach(function () {
+ this.content = 'mock content'
+ return (this.user_id = 'user-id-123')
+ })
+
+ describe('successfully', function () {
+ beforeEach(function () {
+ this.request.post = sinon
+ .stub()
+ .callsArgWith(1, null, { statusCode: 200 })
+ return this.DocumentUpdaterManager.setDocument(
+ this.project_id,
+ this.doc_id,
+ this.content,
+ this.user_id,
+ this.callback
+ )
+ })
+
+ it('should set the document in the document updater', function () {
+ const url = `${this.settings.apis.documentupdater.url}/project/${this.project_id}/doc/${this.doc_id}`
+ return this.request.post
+ .calledWith({
+ url,
+ json: {
+ lines: this.content.split('\n'),
+ source: 'restore',
+ user_id: this.user_id,
+ undoing: true,
+ },
+ })
+ .should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.calledWith(null).should.equal(true)
+ })
+ })
+
+ describe('when the document updater API returns an error', function () {
+ beforeEach(function () {
+ this.request.post = sinon
+ .stub()
+ .callsArgWith(
+ 1,
+ (this.error = new Error('something went wrong')),
+ null,
+ null
+ )
+ return this.DocumentUpdaterManager.setDocument(
+ this.project_id,
+ this.doc_id,
+ this.content,
+ this.user_id,
+ this.callback
+ )
+ })
+
+ return it('should return an error to the callback', function () {
+ return this.callback.calledWith(this.error).should.equal(true)
+ })
+ })
+
+ return describe('when the document updater returns a failure error code', function () {
+ beforeEach(function () {
+ this.request.post = sinon
+ .stub()
+ .callsArgWith(1, null, { statusCode: 500 }, '')
+ return this.DocumentUpdaterManager.setDocument(
+ this.project_id,
+ this.doc_id,
+ this.content,
+ this.user_id,
+ this.callback
+ )
+ })
+
+ return it('should return the callback with an error', function () {
+ return this.callback
+ .calledWith(
+ sinon.match.has(
+ 'message',
+ 'doc updater returned a non-success status code: 500'
+ )
+ )
+ .should.equal(true)
+ })
+ })
+ })
+})
diff --git a/services/track-changes/test/unit/js/HttpController/HttpControllerTests.js b/services/track-changes/test/unit/js/HttpController/HttpControllerTests.js
new file mode 100644
index 0000000000..6b82dd2b80
--- /dev/null
+++ b/services/track-changes/test/unit/js/HttpController/HttpControllerTests.js
@@ -0,0 +1,199 @@
+/* eslint-disable
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const { expect } = require('chai')
+const modulePath = '../../../../app/js/HttpController.js'
+const SandboxedModule = require('sandboxed-module')
+
+describe('HttpController', function () {
+ beforeEach(function () {
+ this.HttpController = SandboxedModule.require(modulePath, {
+ singleOnly: true,
+ requires: {
+ './UpdatesManager': (this.UpdatesManager = {}),
+ './DiffManager': (this.DiffManager = {}),
+ './RestoreManager': (this.RestoreManager = {}),
+ './PackManager': (this.PackManager = {}),
+ './DocArchiveManager': (this.DocArchiveManager = {}),
+ './HealthChecker': (this.HealthChecker = {}),
+ },
+ })
+ this.doc_id = 'doc-id-123'
+ this.project_id = 'project-id-123'
+ this.next = sinon.stub()
+ this.user_id = 'mock-user-123'
+ return (this.now = Date.now())
+ })
+
+ describe('flushDoc', function () {
+ beforeEach(function () {
+ this.req = {
+ params: {
+ doc_id: this.doc_id,
+ project_id: this.project_id,
+ },
+ }
+ this.res = { sendStatus: sinon.stub() }
+ this.UpdatesManager.processUncompressedUpdatesWithLock = sinon
+ .stub()
+ .callsArg(2)
+ return this.HttpController.flushDoc(this.req, this.res, this.next)
+ })
+
+ it('should process the updates', function () {
+ return this.UpdatesManager.processUncompressedUpdatesWithLock
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ return it('should return a success code', function () {
+ return this.res.sendStatus.calledWith(204).should.equal(true)
+ })
+ })
+
+ describe('flushProject', function () {
+ beforeEach(function () {
+ this.req = {
+ params: {
+ project_id: this.project_id,
+ },
+ }
+ this.res = { sendStatus: sinon.stub() }
+ this.UpdatesManager.processUncompressedUpdatesForProject = sinon
+ .stub()
+ .callsArg(1)
+ return this.HttpController.flushProject(this.req, this.res, this.next)
+ })
+
+ it('should process the updates', function () {
+ return this.UpdatesManager.processUncompressedUpdatesForProject
+ .calledWith(this.project_id)
+ .should.equal(true)
+ })
+
+ return it('should return a success code', function () {
+ return this.res.sendStatus.calledWith(204).should.equal(true)
+ })
+ })
+
+ describe('getDiff', function () {
+ beforeEach(function () {
+ this.from = 42
+ this.to = 45
+ this.req = {
+ params: {
+ doc_id: this.doc_id,
+ project_id: this.project_id,
+ },
+ query: {
+ from: this.from.toString(),
+ to: this.to.toString(),
+ },
+ }
+ this.res = { json: sinon.stub() }
+ this.diff = [{ u: 'mock-diff' }]
+ this.DiffManager.getDiff = sinon.stub().callsArgWith(4, null, this.diff)
+ return this.HttpController.getDiff(this.req, this.res, this.next)
+ })
+
+ it('should get the diff', function () {
+ return this.DiffManager.getDiff
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ parseInt(this.from, 10),
+ parseInt(this.to, 10)
+ )
+ .should.equal(true)
+ })
+
+ return it('should return the diff', function () {
+ return this.res.json.calledWith({ diff: this.diff }).should.equal(true)
+ })
+ })
+
+ describe('getUpdates', function () {
+ beforeEach(function () {
+ this.before = Date.now()
+ this.nextBeforeTimestamp = this.before - 100
+ this.min_count = 10
+ this.req = {
+ params: {
+ project_id: this.project_id,
+ },
+ query: {
+ before: this.before.toString(),
+ min_count: this.min_count.toString(),
+ },
+ }
+ this.res = { json: sinon.stub() }
+ this.updates = ['mock-summarized-updates']
+ this.UpdatesManager.getSummarizedProjectUpdates = sinon
+ .stub()
+ .callsArgWith(2, null, this.updates, this.nextBeforeTimestamp)
+ return this.HttpController.getUpdates(this.req, this.res, this.next)
+ })
+
+ it('should get the updates', function () {
+ return this.UpdatesManager.getSummarizedProjectUpdates
+ .calledWith(this.project_id, {
+ before: this.before,
+ min_count: this.min_count,
+ })
+ .should.equal(true)
+ })
+
+ return it('should return the formatted updates', function () {
+ return this.res.json
+ .calledWith({
+ updates: this.updates,
+ nextBeforeTimestamp: this.nextBeforeTimestamp,
+ })
+ .should.equal(true)
+ })
+ })
+
+ return describe('RestoreManager', function () {
+ beforeEach(function () {
+ this.version = '42'
+ this.req = {
+ params: {
+ doc_id: this.doc_id,
+ project_id: this.project_id,
+ version: this.version,
+ },
+ headers: {
+ 'x-user-id': this.user_id,
+ },
+ }
+ this.res = { sendStatus: sinon.stub() }
+
+ this.RestoreManager.restoreToBeforeVersion = sinon.stub().callsArg(4)
+ return this.HttpController.restore(this.req, this.res, this.next)
+ })
+
+ it('should restore the document', function () {
+ return this.RestoreManager.restoreToBeforeVersion
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ parseInt(this.version, 10),
+ this.user_id
+ )
+ .should.equal(true)
+ })
+
+ return it('should return a success code', function () {
+ return this.res.sendStatus.calledWith(204).should.equal(true)
+ })
+ })
+})
diff --git a/services/track-changes/test/unit/js/LockManager/LockManagerTests.js b/services/track-changes/test/unit/js/LockManager/LockManagerTests.js
new file mode 100644
index 0000000000..fd76bc0d2d
--- /dev/null
+++ b/services/track-changes/test/unit/js/LockManager/LockManagerTests.js
@@ -0,0 +1,317 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+ mocha/no-nested-tests,
+ no-return-assign,
+ no-undef,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const { expect } = require('chai')
+const modulePath = '../../../../app/js/LockManager.js'
+const SandboxedModule = require('sandboxed-module')
+
+describe('LockManager', function () {
+ beforeEach(function () {
+ this.Settings = {
+ redis: {
+ lock: {},
+ },
+ }
+ this.LockManager = SandboxedModule.require(modulePath, {
+ requires: {
+ '@overleaf/redis-wrapper': {
+ createClient: () => {
+ return (this.rclient = { auth: sinon.stub() })
+ },
+ },
+ '@overleaf/settings': this.Settings,
+ },
+ })
+
+ this.key = 'lock-key'
+ return (this.callback = sinon.stub())
+ })
+
+ describe('checkLock', function () {
+ describe('when the lock is taken', function () {
+ beforeEach(function () {
+ this.rclient.exists = sinon.stub().callsArgWith(1, null, '1')
+ return this.LockManager.checkLock(this.key, this.callback)
+ })
+
+ it('should check the lock in redis', function () {
+ return this.rclient.exists.calledWith(this.key).should.equal(true)
+ })
+
+ return it('should return the callback with false', function () {
+ return this.callback.calledWith(null, false).should.equal(true)
+ })
+ })
+
+ return describe('when the lock is free', function () {
+ beforeEach(function () {
+ this.rclient.exists = sinon.stub().callsArgWith(1, null, '0')
+ return this.LockManager.checkLock(this.key, this.callback)
+ })
+
+ return it('should return the callback with true', function () {
+ return this.callback.calledWith(null, true).should.equal(true)
+ })
+ })
+ })
+
+ describe('tryLock', function () {
+ describe('when the lock is taken', function () {
+ beforeEach(function () {
+ this.rclient.set = sinon.stub().callsArgWith(5, null, null)
+ this.LockManager.randomLock = sinon
+ .stub()
+ .returns('locked-random-value')
+ return this.LockManager.tryLock(this.key, this.callback)
+ })
+
+ it('should check the lock in redis', function () {
+ return this.rclient.set
+ .calledWith(
+ this.key,
+ 'locked-random-value',
+ 'EX',
+ this.LockManager.LOCK_TTL,
+ 'NX'
+ )
+ .should.equal(true)
+ })
+
+ return it('should return the callback with false', function () {
+ return this.callback.calledWith(null, false).should.equal(true)
+ })
+ })
+
+ return describe('when the lock is free', function () {
+ beforeEach(function () {
+ this.rclient.set = sinon.stub().callsArgWith(5, null, 'OK')
+ return this.LockManager.tryLock(this.key, this.callback)
+ })
+
+ return it('should return the callback with true', function () {
+ return this.callback.calledWith(null, true).should.equal(true)
+ })
+ })
+ })
+
+ describe('deleteLock', function () {
+ return beforeEach(function () {
+ beforeEach(function () {
+ this.rclient.del = sinon.stub().callsArg(1)
+ return this.LockManager.deleteLock(this.key, this.callback)
+ })
+
+ it('should delete the lock in redis', function () {
+ return this.rclient.del.calledWith(key).should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+ })
+
+ describe('getLock', function () {
+ describe('when the lock is not taken', function () {
+ beforeEach(function (done) {
+ this.LockManager.tryLock = sinon.stub().callsArgWith(1, null, true)
+ return this.LockManager.getLock(this.key, (...args) => {
+ this.callback(...Array.from(args || []))
+ return done()
+ })
+ })
+
+ it('should try to get the lock', function () {
+ return this.LockManager.tryLock.calledWith(this.key).should.equal(true)
+ })
+
+ it('should only need to try once', function () {
+ return this.LockManager.tryLock.callCount.should.equal(1)
+ })
+
+ return it('should return the callback', function () {
+ return this.callback.calledWith(null).should.equal(true)
+ })
+ })
+
+ describe('when the lock is initially set', function () {
+ beforeEach(function (done) {
+ const startTime = Date.now()
+ this.LockManager.LOCK_TEST_INTERVAL = 5
+ this.LockManager.tryLock = function (doc_id, callback) {
+ if (callback == null) {
+ callback = function (error, isFree) {}
+ }
+ if (Date.now() - startTime < 100) {
+ return callback(null, false)
+ } else {
+ return callback(null, true)
+ }
+ }
+ sinon.spy(this.LockManager, 'tryLock')
+
+ return this.LockManager.getLock(this.key, (...args) => {
+ this.callback(...Array.from(args || []))
+ return done()
+ })
+ })
+
+ it('should call tryLock multiple times until free', function () {
+ return (this.LockManager.tryLock.callCount > 1).should.equal(true)
+ })
+
+ return it('should return the callback', function () {
+ return this.callback.calledWith(null).should.equal(true)
+ })
+ })
+
+ return describe('when the lock times out', function () {
+ beforeEach(function (done) {
+ const time = Date.now()
+ this.LockManager.MAX_LOCK_WAIT_TIME = 5
+ this.LockManager.tryLock = sinon.stub().callsArgWith(1, null, false)
+ return this.LockManager.getLock(this.key, (...args) => {
+ this.callback(...Array.from(args || []))
+ return done()
+ })
+ })
+
+ return it('should return the callback with an error', function () {
+ return this.callback
+ .calledWith(sinon.match.instanceOf(Error))
+ .should.equal(true)
+ })
+ })
+ })
+
+ return describe('runWithLock', function () {
+ describe('with successful run', function () {
+ beforeEach(function () {
+ this.runner = function (releaseLock) {
+ if (releaseLock == null) {
+ releaseLock = function (error) {}
+ }
+ return releaseLock()
+ }
+ sinon.spy(this, 'runner')
+ this.LockManager.getLock = sinon.stub().callsArg(1)
+ this.LockManager.releaseLock = sinon.stub().callsArg(2)
+ return this.LockManager.runWithLock(
+ this.key,
+ this.runner,
+ this.callback
+ )
+ })
+
+ it('should get the lock', function () {
+ return this.LockManager.getLock.calledWith(this.key).should.equal(true)
+ })
+
+ it('should run the passed function', function () {
+ return this.runner.called.should.equal(true)
+ })
+
+ it('should release the lock', function () {
+ return this.LockManager.releaseLock
+ .calledWith(this.key)
+ .should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+
+ describe('when the runner function returns an error', function () {
+ beforeEach(function () {
+ this.error = new Error('oops')
+ this.runner = releaseLock => {
+ if (releaseLock == null) {
+ releaseLock = function (error) {}
+ }
+ return releaseLock(this.error)
+ }
+ sinon.spy(this, 'runner')
+ this.LockManager.getLock = sinon.stub().callsArg(1)
+ this.LockManager.releaseLock = sinon.stub().callsArg(2)
+ return this.LockManager.runWithLock(
+ this.key,
+ this.runner,
+ this.callback
+ )
+ })
+
+ it('should release the lock', function () {
+ return this.LockManager.releaseLock
+ .calledWith(this.key)
+ .should.equal(true)
+ })
+
+ return it('should call the callback with the error', function () {
+ return this.callback.calledWith(this.error).should.equal(true)
+ })
+ })
+
+ return describe('releaseLock', function () {
+ describe('when the lock is current', function () {
+ beforeEach(function () {
+ this.rclient.eval = sinon.stub().yields(null, 1)
+ return this.LockManager.releaseLock(
+ this.key,
+ this.lockValue,
+ this.callback
+ )
+ })
+
+ it('should clear the data from redis', function () {
+ return this.rclient.eval
+ .calledWith(
+ this.LockManager.unlockScript,
+ 1,
+ this.key,
+ this.lockValue
+ )
+ .should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+
+ return describe('when the lock has expired', function () {
+ beforeEach(function () {
+ this.rclient.eval = sinon.stub().yields(null, 0)
+ return this.LockManager.releaseLock(
+ this.key,
+ this.lockValue,
+ this.callback
+ )
+ })
+
+ return it('should return an error if the lock has expired', function () {
+ return this.callback
+ .calledWith(
+ sinon.match.has('message', 'tried to release timed out lock')
+ )
+ .should.equal(true)
+ })
+ })
+ })
+ })
+})
diff --git a/services/track-changes/test/unit/js/MongoManager/MongoManagerTests.js b/services/track-changes/test/unit/js/MongoManager/MongoManagerTests.js
new file mode 100644
index 0000000000..6e560e7ab8
--- /dev/null
+++ b/services/track-changes/test/unit/js/MongoManager/MongoManagerTests.js
@@ -0,0 +1,239 @@
+/* eslint-disable
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const { expect } = require('chai')
+const modulePath = '../../../../app/js/MongoManager.js'
+const packModulePath = '../../../../app/js/PackManager.js'
+const SandboxedModule = require('sandboxed-module')
+const { ObjectId } = require('mongodb')
+const tk = require('timekeeper')
+
+describe('MongoManager', function () {
+ beforeEach(function () {
+ tk.freeze(new Date())
+ this.MongoManager = SandboxedModule.require(modulePath, {
+ requires: {
+ './mongodb': { db: (this.db = {}), ObjectId },
+ './PackManager': (this.PackManager = {}),
+ '@overleaf/metrics': { timeAsyncMethod() {} },
+ },
+ })
+ this.callback = sinon.stub()
+ this.doc_id = ObjectId().toString()
+ return (this.project_id = ObjectId().toString())
+ })
+
+ afterEach(function () {
+ return tk.reset()
+ })
+
+ describe('getLastCompressedUpdate', function () {
+ beforeEach(function () {
+ this.update = 'mock-update'
+ this.db.docHistory = {}
+ this.db.docHistory.find = sinon.stub().returns(this.db.docHistory)
+ this.db.docHistory.findOne = sinon.stub().returns(this.db.docHistory)
+ this.db.docHistory.sort = sinon.stub().returns(this.db.docHistory)
+ this.db.docHistory.limit = sinon.stub().returns(this.db.docHistory)
+ this.db.docHistory.toArray = sinon
+ .stub()
+ .callsArgWith(0, null, [this.update])
+
+ return this.MongoManager.getLastCompressedUpdate(
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('should find the updates for the doc', function () {
+ return this.db.docHistory.find
+ .calledWith({ doc_id: ObjectId(this.doc_id) })
+ .should.equal(true)
+ })
+
+ it('should limit to one result', function () {
+ return this.db.docHistory.limit.calledWith(1).should.equal(true)
+ })
+
+ it('should sort in descending version order', function () {
+ return this.db.docHistory.sort.calledWith({ v: -1 }).should.equal(true)
+ })
+
+ return it('should call the call back with the update', function () {
+ return this.callback.calledWith(null, this.update).should.equal(true)
+ })
+ })
+
+ describe('peekLastCompressedUpdate', function () {
+ describe('when there is no last update', function () {
+ beforeEach(function () {
+ this.PackManager.getLastPackFromIndex = sinon
+ .stub()
+ .callsArgWith(1, null, null)
+ this.MongoManager.getLastCompressedUpdate = sinon
+ .stub()
+ .callsArgWith(1, null, null)
+ return this.MongoManager.peekLastCompressedUpdate(
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('should get the last update', function () {
+ return this.MongoManager.getLastCompressedUpdate
+ .calledWith(this.doc_id)
+ .should.equal(true)
+ })
+
+ return it('should call the callback with no update', function () {
+ return this.callback.calledWith(null, null).should.equal(true)
+ })
+ })
+
+ describe('when there is an update', function () {
+ beforeEach(function () {
+ this.update = { _id: Object() }
+ this.MongoManager.getLastCompressedUpdate = sinon
+ .stub()
+ .callsArgWith(1, null, this.update)
+ return this.MongoManager.peekLastCompressedUpdate(
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('should get the last update', function () {
+ return this.MongoManager.getLastCompressedUpdate
+ .calledWith(this.doc_id)
+ .should.equal(true)
+ })
+
+ return it('should call the callback with the update', function () {
+ return this.callback.calledWith(null, this.update).should.equal(true)
+ })
+ })
+
+ return describe('when there is a last update in S3', function () {
+ beforeEach(function () {
+ this.update = { _id: Object(), v: 12345, v_end: 12345, inS3: true }
+ this.PackManager.getLastPackFromIndex = sinon
+ .stub()
+ .callsArgWith(1, null, this.update)
+ this.MongoManager.getLastCompressedUpdate = sinon
+ .stub()
+ .callsArgWith(1, null)
+ return this.MongoManager.peekLastCompressedUpdate(
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('should get the last update', function () {
+ return this.MongoManager.getLastCompressedUpdate
+ .calledWith(this.doc_id)
+ .should.equal(true)
+ })
+
+ return it('should call the callback with a null update and the correct version', function () {
+ return this.callback
+ .calledWith(null, null, this.update.v_end)
+ .should.equal(true)
+ })
+ })
+ })
+
+ describe('backportProjectId', function () {
+ beforeEach(function () {
+ this.db.docHistory = { updateMany: sinon.stub().yields() }
+ return this.MongoManager.backportProjectId(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it("should insert the project_id into all entries for the doc_id which don't have it set", function () {
+ return this.db.docHistory.updateMany
+ .calledWith(
+ {
+ doc_id: ObjectId(this.doc_id),
+ project_id: { $exists: false },
+ },
+ {
+ $set: { project_id: ObjectId(this.project_id) },
+ }
+ )
+ .should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+
+ describe('getProjectMetaData', function () {
+ beforeEach(function () {
+ this.metadata = { mock: 'metadata' }
+ this.db.projectHistoryMetaData = {
+ findOne: sinon.stub().callsArgWith(1, null, this.metadata),
+ }
+ return this.MongoManager.getProjectMetaData(
+ this.project_id,
+ this.callback
+ )
+ })
+
+ it('should look up the meta data in the db', function () {
+ return this.db.projectHistoryMetaData.findOne
+ .calledWith({ project_id: ObjectId(this.project_id) })
+ .should.equal(true)
+ })
+
+ return it('should return the metadata', function () {
+ return this.callback.calledWith(null, this.metadata).should.equal(true)
+ })
+ })
+
+ return describe('setProjectMetaData', function () {
+ beforeEach(function () {
+ this.metadata = { mock: 'metadata' }
+ this.db.projectHistoryMetaData = {
+ updateOne: sinon.stub().yields(),
+ }
+ return this.MongoManager.setProjectMetaData(
+ this.project_id,
+ this.metadata,
+ this.callback
+ )
+ })
+
+ it('should upsert the metadata into the DB', function () {
+ return this.db.projectHistoryMetaData.updateOne
+ .calledWith(
+ {
+ project_id: ObjectId(this.project_id),
+ },
+ {
+ $set: this.metadata,
+ },
+ {
+ upsert: true,
+ }
+ )
+ .should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+})
diff --git a/services/track-changes/test/unit/js/PackManager/PackManagerTests.js b/services/track-changes/test/unit/js/PackManager/PackManagerTests.js
new file mode 100644
index 0000000000..4f538f2109
--- /dev/null
+++ b/services/track-changes/test/unit/js/PackManager/PackManagerTests.js
@@ -0,0 +1,704 @@
+/* eslint-disable
+ mocha/no-identical-title,
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const { assert, expect } = require('chai')
+const modulePath = '../../../../app/js/PackManager.js'
+const SandboxedModule = require('sandboxed-module')
+const { ObjectId } = require('mongodb')
+const _ = require('underscore')
+
+const tk = require('timekeeper')
+
+describe('PackManager', function () {
+ beforeEach(function () {
+ tk.freeze(new Date())
+ this.PackManager = SandboxedModule.require(modulePath, {
+ requires: {
+ bson: require('bson'),
+ './mongodb': { db: (this.db = {}), ObjectId },
+ './LockManager': {},
+ './MongoAWS': {},
+ '@overleaf/metrics': { inc() {} },
+ './ProjectIterator': require('../../../../app/js/ProjectIterator.js'), // Cache for speed
+ '@overleaf/settings': {
+ redis: { lock: { key_schema: {} } },
+ },
+ },
+ })
+ this.callback = sinon.stub()
+ this.doc_id = ObjectId().toString()
+ this.project_id = ObjectId().toString()
+ return (this.PackManager.MAX_COUNT = 512)
+ })
+
+ afterEach(function () {
+ return tk.reset()
+ })
+
+ describe('insertCompressedUpdates', function () {
+ beforeEach(function () {
+ this.lastUpdate = {
+ _id: '12345',
+ pack: [
+ { op: 'op-1', meta: 'meta-1', v: 1 },
+ { op: 'op-2', meta: 'meta-2', v: 2 },
+ ],
+ n: 2,
+ sz: 100,
+ }
+ this.newUpdates = [
+ { op: 'op-3', meta: 'meta-3', v: 3 },
+ { op: 'op-4', meta: 'meta-4', v: 4 },
+ ]
+ return (this.db.docHistory = {
+ insertOne: sinon.stub().yields(),
+ insert: sinon.stub().callsArg(1),
+ updateOne: sinon.stub().yields(),
+ findAndModify: sinon.stub().callsArg(1),
+ })
+ })
+
+ describe('with no last update', function () {
+ beforeEach(function () {
+ this.PackManager.insertUpdatesIntoNewPack = sinon.stub().callsArg(4)
+ return this.PackManager.insertCompressedUpdates(
+ this.project_id,
+ this.doc_id,
+ null,
+ this.newUpdates,
+ true,
+ this.callback
+ )
+ })
+
+ describe('for a small update', function () {
+ it('should insert the update into a new pack', function () {
+ return this.PackManager.insertUpdatesIntoNewPack
+ .calledWith(this.project_id, this.doc_id, this.newUpdates, true)
+ .should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+
+ return describe('for many small updates', function () {
+ beforeEach(function () {
+ this.newUpdates = __range__(0, 2048, true).map(i => ({
+ op: `op-${i}`,
+ meta: `meta-${i}`,
+ v: i,
+ }))
+ return this.PackManager.insertCompressedUpdates(
+ this.project_id,
+ this.doc_id,
+ null,
+ this.newUpdates,
+ false,
+ this.callback
+ )
+ })
+
+ it('should append the initial updates to the existing pack', function () {
+ return this.PackManager.insertUpdatesIntoNewPack
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.newUpdates.slice(0, 512),
+ false
+ )
+ .should.equal(true)
+ })
+
+ it('should insert the first set remaining updates as a new pack', function () {
+ return this.PackManager.insertUpdatesIntoNewPack
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.newUpdates.slice(512, 1024),
+ false
+ )
+ .should.equal(true)
+ })
+
+ it('should insert the second set of remaining updates as a new pack', function () {
+ return this.PackManager.insertUpdatesIntoNewPack
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.newUpdates.slice(1024, 1536),
+ false
+ )
+ .should.equal(true)
+ })
+
+ it('should insert the third set of remaining updates as a new pack', function () {
+ return this.PackManager.insertUpdatesIntoNewPack
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.newUpdates.slice(1536, 2048),
+ false
+ )
+ .should.equal(true)
+ })
+
+ it('should insert the final set of remaining updates as a new pack', function () {
+ return this.PackManager.insertUpdatesIntoNewPack
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.newUpdates.slice(2048, 2049),
+ false
+ )
+ .should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+ })
+
+ describe('with an existing pack as the last update', function () {
+ beforeEach(function () {
+ this.PackManager.appendUpdatesToExistingPack = sinon.stub().callsArg(5)
+ this.PackManager.insertUpdatesIntoNewPack = sinon.stub().callsArg(4)
+ return this.PackManager.insertCompressedUpdates(
+ this.project_id,
+ this.doc_id,
+ this.lastUpdate,
+ this.newUpdates,
+ false,
+ this.callback
+ )
+ })
+
+ describe('for a small update', function () {
+ it('should append the update to the existing pack', function () {
+ return this.PackManager.appendUpdatesToExistingPack
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.lastUpdate,
+ this.newUpdates,
+ false
+ )
+ .should.equal(true)
+ })
+ it('should not insert any new packs', function () {
+ return this.PackManager.insertUpdatesIntoNewPack.called.should.equal(
+ false
+ )
+ })
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+
+ describe('for many small updates', function () {
+ beforeEach(function () {
+ this.newUpdates = __range__(0, 2048, true).map(i => ({
+ op: `op-${i}`,
+ meta: `meta-${i}`,
+ v: i,
+ }))
+ return this.PackManager.insertCompressedUpdates(
+ this.project_id,
+ this.doc_id,
+ this.lastUpdate,
+ this.newUpdates,
+ false,
+ this.callback
+ )
+ })
+
+ it('should append the initial updates to the existing pack', function () {
+ return this.PackManager.appendUpdatesToExistingPack
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.lastUpdate,
+ this.newUpdates.slice(0, 510),
+ false
+ )
+ .should.equal(true)
+ })
+
+ it('should insert the first set remaining updates as a new pack', function () {
+ return this.PackManager.insertUpdatesIntoNewPack
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.newUpdates.slice(510, 1022),
+ false
+ )
+ .should.equal(true)
+ })
+
+ it('should insert the second set of remaining updates as a new pack', function () {
+ return this.PackManager.insertUpdatesIntoNewPack
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.newUpdates.slice(1022, 1534),
+ false
+ )
+ .should.equal(true)
+ })
+
+ it('should insert the third set of remaining updates as a new pack', function () {
+ return this.PackManager.insertUpdatesIntoNewPack
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.newUpdates.slice(1534, 2046),
+ false
+ )
+ .should.equal(true)
+ })
+
+ it('should insert the final set of remaining updates as a new pack', function () {
+ return this.PackManager.insertUpdatesIntoNewPack
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.newUpdates.slice(2046, 2049),
+ false
+ )
+ .should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+
+ return describe('for many big updates', function () {
+ beforeEach(function () {
+ const longString = __range__(
+ 0,
+ 0.75 * this.PackManager.MAX_SIZE,
+ true
+ )
+ .map(j => 'a')
+ .join('')
+ this.newUpdates = [0, 1, 2, 3, 4].map(i => ({
+ op: `op-${i}-${longString}`,
+ meta: `meta-${i}`,
+ v: i,
+ }))
+ return this.PackManager.insertCompressedUpdates(
+ this.project_id,
+ this.doc_id,
+ this.lastUpdate,
+ this.newUpdates,
+ false,
+ this.callback
+ )
+ })
+
+ it('should append the initial updates to the existing pack', function () {
+ return this.PackManager.appendUpdatesToExistingPack
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.lastUpdate,
+ this.newUpdates.slice(0, 1),
+ false
+ )
+ .should.equal(true)
+ })
+
+ it('should insert the first set remaining updates as a new pack', function () {
+ return this.PackManager.insertUpdatesIntoNewPack
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.newUpdates.slice(1, 2),
+ false
+ )
+ .should.equal(true)
+ })
+
+ it('should insert the second set of remaining updates as a new pack', function () {
+ return this.PackManager.insertUpdatesIntoNewPack
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.newUpdates.slice(2, 3),
+ false
+ )
+ .should.equal(true)
+ })
+
+ it('should insert the third set of remaining updates as a new pack', function () {
+ return this.PackManager.insertUpdatesIntoNewPack
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.newUpdates.slice(3, 4),
+ false
+ )
+ .should.equal(true)
+ })
+
+ it('should insert the final set of remaining updates as a new pack', function () {
+ return this.PackManager.insertUpdatesIntoNewPack
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.newUpdates.slice(4, 5),
+ false
+ )
+ .should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+ })
+
+ describe('flushCompressedUpdates', function () {
+ return describe('when there is no previous update', function () {
+ beforeEach(function () {
+ return this.PackManager.flushCompressedUpdates(
+ this.project_id,
+ this.doc_id,
+ null,
+ this.newUpdates,
+ true,
+ this.callback
+ )
+ })
+
+ return describe('for a small update that will expire', function () {
+ it('should insert the update into mongo', function () {
+ return this.db.docHistory.insertOne
+ .calledWithMatch({
+ pack: this.newUpdates,
+ project_id: ObjectId(this.project_id),
+ doc_id: ObjectId(this.doc_id),
+ n: this.newUpdates.length,
+ v: this.newUpdates[0].v,
+ v_end: this.newUpdates[this.newUpdates.length - 1].v,
+ })
+ .should.equal(true)
+ })
+
+ it('should set an expiry time in the future', function () {
+ return this.db.docHistory.insertOne
+ .calledWithMatch({
+ expiresAt: new Date(Date.now() + 7 * 24 * 3600 * 1000),
+ })
+ .should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+ })
+ })
+
+ describe('when there is a recent previous update in mongo that expires', function () {
+ beforeEach(function () {
+ this.lastUpdate = {
+ _id: '12345',
+ pack: [
+ { op: 'op-1', meta: 'meta-1', v: 1 },
+ { op: 'op-2', meta: 'meta-2', v: 2 },
+ ],
+ n: 2,
+ sz: 100,
+ meta: { start_ts: Date.now() - 6 * 3600 * 1000 },
+ expiresAt: new Date(Date.now()),
+ }
+
+ return this.PackManager.flushCompressedUpdates(
+ this.project_id,
+ this.doc_id,
+ this.lastUpdate,
+ this.newUpdates,
+ true,
+ this.callback
+ )
+ })
+
+ return describe('for a small update that will expire', function () {
+ it('should append the update in mongo', function () {
+ return this.db.docHistory.updateOne
+ .calledWithMatch(
+ { _id: this.lastUpdate._id },
+ {
+ $push: { pack: { $each: this.newUpdates } },
+ $set: { v_end: this.newUpdates[this.newUpdates.length - 1].v },
+ }
+ )
+ .should.equal(true)
+ })
+
+ it('should set an expiry time in the future', function () {
+ return this.db.docHistory.updateOne
+ .calledWithMatch(sinon.match.any, {
+ $set: { expiresAt: new Date(Date.now() + 7 * 24 * 3600 * 1000) },
+ })
+ .should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+ })
+
+ describe('when there is a recent previous update in mongo that expires', function () {
+ beforeEach(function () {
+ this.PackManager.updateIndex = sinon.stub().callsArg(2)
+
+ this.lastUpdate = {
+ _id: '12345',
+ pack: [
+ { op: 'op-1', meta: 'meta-1', v: 1 },
+ { op: 'op-2', meta: 'meta-2', v: 2 },
+ ],
+ n: 2,
+ sz: 100,
+ meta: { start_ts: Date.now() - 6 * 3600 * 1000 },
+ expiresAt: new Date(Date.now()),
+ }
+
+ return this.PackManager.flushCompressedUpdates(
+ this.project_id,
+ this.doc_id,
+ this.lastUpdate,
+ this.newUpdates,
+ false,
+ this.callback
+ )
+ })
+
+ return describe('for a small update that will not expire', function () {
+ it('should insert the update into mongo', function () {
+ return this.db.docHistory.insertOne
+ .calledWithMatch({
+ pack: this.newUpdates,
+ project_id: ObjectId(this.project_id),
+ doc_id: ObjectId(this.doc_id),
+ n: this.newUpdates.length,
+ v: this.newUpdates[0].v,
+ v_end: this.newUpdates[this.newUpdates.length - 1].v,
+ })
+ .should.equal(true)
+ })
+
+ it('should not set any expiry time', function () {
+ return this.db.docHistory.insertOne
+ .neverCalledWithMatch(sinon.match.has('expiresAt'))
+ .should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+ })
+
+ return describe('when there is an old previous update in mongo', function () {
+ beforeEach(function () {
+ this.lastUpdate = {
+ _id: '12345',
+ pack: [
+ { op: 'op-1', meta: 'meta-1', v: 1 },
+ { op: 'op-2', meta: 'meta-2', v: 2 },
+ ],
+ n: 2,
+ sz: 100,
+ meta: { start_ts: Date.now() - 30 * 24 * 3600 * 1000 },
+ expiresAt: new Date(Date.now() - 30 * 24 * 3600 * 1000),
+ }
+
+ return this.PackManager.flushCompressedUpdates(
+ this.project_id,
+ this.doc_id,
+ this.lastUpdate,
+ this.newUpdates,
+ true,
+ this.callback
+ )
+ })
+
+ return describe('for a small update that will expire', function () {
+ it('should insert the update into mongo', function () {
+ return this.db.docHistory.insertOne
+ .calledWithMatch({
+ pack: this.newUpdates,
+ project_id: ObjectId(this.project_id),
+ doc_id: ObjectId(this.doc_id),
+ n: this.newUpdates.length,
+ v: this.newUpdates[0].v,
+ v_end: this.newUpdates[this.newUpdates.length - 1].v,
+ })
+ .should.equal(true)
+ })
+
+ it('should set an expiry time in the future', function () {
+ return this.db.docHistory.insertOne
+ .calledWithMatch({
+ expiresAt: new Date(Date.now() + 7 * 24 * 3600 * 1000),
+ })
+ .should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+ })
+ })
+
+ describe('getOpsByVersionRange', function () {})
+
+ describe('loadPacksByVersionRange', function () {})
+
+ describe('fetchPacksIfNeeded', function () {})
+
+ describe('makeProjectIterator', function () {})
+
+ describe('getPackById', function () {})
+
+ describe('increaseTTL', function () {})
+
+ describe('getIndex', function () {})
+
+ describe('getPackFromIndex', function () {})
+ // getLastPackFromIndex:
+ // getIndexWithKeys
+ // initialiseIndex
+ // updateIndex
+ // findCompletedPacks
+ // findUnindexedPacks
+ // insertPacksIntoIndexWithLock
+ // _insertPacksIntoIndex
+ // archivePack
+ // checkArchivedPack
+ // processOldPack
+ // updateIndexIfNeeded
+ // findUnarchivedPacks
+
+ return describe('checkArchiveNotInProgress', function () {
+ describe('when an archive is in progress', function () {
+ beforeEach(function () {
+ this.db.docHistoryIndex = {
+ findOne: sinon.stub().callsArgWith(2, null, { inS3: false }),
+ }
+ return this.PackManager.checkArchiveNotInProgress(
+ this.project_id,
+ this.doc_id,
+ this.pack_id,
+ this.callback
+ )
+ })
+ it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ return it('should return an error', function () {
+ return this.callback
+ .calledWith(sinon.match.has('message'))
+ .should.equal(true)
+ })
+ })
+
+ describe('when an archive is completed', function () {
+ beforeEach(function () {
+ this.db.docHistoryIndex = {
+ findOne: sinon.stub().callsArgWith(2, null, { inS3: true }),
+ }
+ return this.PackManager.checkArchiveNotInProgress(
+ this.project_id,
+ this.doc_id,
+ this.pack_id,
+ this.callback
+ )
+ })
+ it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ return it('should return an error', function () {
+ return this.callback
+ .calledWith(sinon.match.has('message'))
+ .should.equal(true)
+ })
+ })
+
+ return describe('when the archive has not started or completed', function () {
+ beforeEach(function () {
+ this.db.docHistoryIndex = {
+ findOne: sinon.stub().callsArgWith(2, null, {}),
+ }
+ return this.PackManager.checkArchiveNotInProgress(
+ this.project_id,
+ this.doc_id,
+ this.pack_id,
+ this.callback
+ )
+ })
+ it('should call the callback with no error', function () {
+ return this.callback.called.should.equal(true)
+ })
+ return it('should return with no error', function () {
+ return (typeof this.callback.lastCall.args[0]).should.equal('undefined')
+ })
+ })
+ })
+})
+
+// describe "setTTLOnArchivedPack", ->
+// beforeEach ->
+// @pack_id = "somepackid"
+// @onedayinms = 86400000
+// @db.docHistory =
+// findAndModify : sinon.stub().callsArgWith(1)
+
+// it "should set expires to 1 day", (done)->
+// #@PackManager._getOneDayInFutureWithRandomDelay = sinon.stub().returns(@onedayinms)
+// @PackManager.setTTLOnArchivedPack @project_id, @doc_id, @pack_id, =>
+// args = @db.docHistory.findAndModify.args[0][0]
+// args.query._id.should.equal @pack_id
+// args.update['$set'].expiresAt.should.equal @onedayinms
+// done()
+
+// describe "_getOneDayInFutureWithRandomDelay", ->
+// beforeEach ->
+// @onedayinms = 86400000
+// @thirtyMins = 1000 * 60 * 30
+
+// it "should give 1 day + 30 mins random time", (done)->
+// loops = 10000
+// while --loops > 0
+// randomDelay = @PackManager._getOneDayInFutureWithRandomDelay() - new Date(Date.now() + @onedayinms)
+// randomDelay.should.be.above(0)
+// randomDelay.should.be.below(@thirtyMins + 1)
+// done()
+
+function __range__(left, right, inclusive) {
+ const range = []
+ const ascending = left < right
+ const end = !inclusive ? right : ascending ? right + 1 : right - 1
+ for (let i = left; ascending ? i < end : i > end; ascending ? i++ : i--) {
+ range.push(i)
+ }
+ return range
+}
diff --git a/services/track-changes/test/unit/js/RedisManager/RedisManagerTests.js b/services/track-changes/test/unit/js/RedisManager/RedisManagerTests.js
new file mode 100644
index 0000000000..f0c9b0f3d5
--- /dev/null
+++ b/services/track-changes/test/unit/js/RedisManager/RedisManagerTests.js
@@ -0,0 +1,164 @@
+/* eslint-disable
+ camelcase,
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const { expect } = require('chai')
+const modulePath = '../../../../app/js/RedisManager.js'
+const SandboxedModule = require('sandboxed-module')
+
+describe('RedisManager', function () {
+ beforeEach(function () {
+ this.RedisManager = SandboxedModule.require(modulePath, {
+ requires: {
+ '@overleaf/redis-wrapper': {
+ createClient: () => {
+ return (this.rclient = {
+ auth: sinon.stub(),
+ multi: () => this.rclient,
+ })
+ },
+ },
+ '@overleaf/settings': {
+ redis: {
+ history: {
+ key_schema: {
+ uncompressedHistoryOps({ doc_id }) {
+ return `UncompressedHistoryOps:${doc_id}`
+ },
+ docsWithHistoryOps({ project_id }) {
+ return `DocsWithHistoryOps:${project_id}`
+ },
+ },
+ },
+ },
+ },
+ },
+ })
+ this.doc_id = 'doc-id-123'
+ this.project_id = 'project-id-123'
+ this.batchSize = 100
+ return (this.callback = sinon.stub())
+ })
+
+ describe('getOldestDocUpdates', function () {
+ beforeEach(function () {
+ this.rawUpdates = [
+ { v: 42, op: 'mock-op-42' },
+ { v: 45, op: 'mock-op-45' },
+ ]
+ this.jsonUpdates = Array.from(this.rawUpdates).map(update =>
+ JSON.stringify(update)
+ )
+ this.rclient.lrange = sinon.stub().callsArgWith(3, null, this.jsonUpdates)
+ return this.RedisManager.getOldestDocUpdates(
+ this.doc_id,
+ this.batchSize,
+ this.callback
+ )
+ })
+
+ it('should read the updates from redis', function () {
+ return this.rclient.lrange
+ .calledWith(
+ `UncompressedHistoryOps:${this.doc_id}`,
+ 0,
+ this.batchSize - 1
+ )
+ .should.equal(true)
+ })
+
+ it('should call the callback with the unparsed ops', function () {
+ return this.callback.calledWith(null, this.jsonUpdates).should.equal(true)
+ })
+
+ describe('expandDocUpdates', function () {
+ beforeEach(function () {
+ return this.RedisManager.expandDocUpdates(
+ this.jsonUpdates,
+ this.callback
+ )
+ })
+
+ return it('should call the callback with the parsed ops', function () {
+ return this.callback
+ .calledWith(null, this.rawUpdates)
+ .should.equal(true)
+ })
+ })
+
+ return describe('deleteAppliedDocUpdates', function () {
+ beforeEach(function () {
+ this.rclient.lrem = sinon.stub()
+ this.rclient.srem = sinon.stub()
+ this.rclient.exec = sinon.stub().callsArgWith(0)
+ return this.RedisManager.deleteAppliedDocUpdates(
+ this.project_id,
+ this.doc_id,
+ this.jsonUpdates,
+ this.callback
+ )
+ })
+
+ it('should delete the first update from redis', function () {
+ return this.rclient.lrem
+ .calledWith(
+ `UncompressedHistoryOps:${this.doc_id}`,
+ 1,
+ this.jsonUpdates[0]
+ )
+ .should.equal(true)
+ })
+
+ it('should delete the second update from redis', function () {
+ return this.rclient.lrem
+ .calledWith(
+ `UncompressedHistoryOps:${this.doc_id}`,
+ 1,
+ this.jsonUpdates[1]
+ )
+ .should.equal(true)
+ })
+
+ it('should delete the doc from the set of docs with history ops', function () {
+ return this.rclient.srem
+ .calledWith(`DocsWithHistoryOps:${this.project_id}`, this.doc_id)
+ .should.equal(true)
+ })
+
+ return it('should call the callback ', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+ })
+
+ return describe('getDocIdsWithHistoryOps', function () {
+ beforeEach(function () {
+ this.doc_ids = ['mock-id-1', 'mock-id-2']
+ this.rclient.smembers = sinon.stub().callsArgWith(1, null, this.doc_ids)
+ return this.RedisManager.getDocIdsWithHistoryOps(
+ this.project_id,
+ this.callback
+ )
+ })
+
+ it('should read the doc_ids from redis', function () {
+ return this.rclient.smembers
+ .calledWith(`DocsWithHistoryOps:${this.project_id}`)
+ .should.equal(true)
+ })
+
+ return it('should call the callback with the doc_ids', function () {
+ return this.callback.calledWith(null, this.doc_ids).should.equal(true)
+ })
+ })
+})
diff --git a/services/track-changes/test/unit/js/RestoreManager/RestoreManagerTests.js b/services/track-changes/test/unit/js/RestoreManager/RestoreManagerTests.js
new file mode 100644
index 0000000000..9047946405
--- /dev/null
+++ b/services/track-changes/test/unit/js/RestoreManager/RestoreManagerTests.js
@@ -0,0 +1,64 @@
+/* eslint-disable
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const { expect } = require('chai')
+const modulePath = '../../../../app/js/RestoreManager.js'
+const SandboxedModule = require('sandboxed-module')
+
+describe('RestoreManager', function () {
+ beforeEach(function () {
+ this.RestoreManager = SandboxedModule.require(modulePath, {
+ requires: {
+ './DocumentUpdaterManager': (this.DocumentUpdaterManager = {}),
+ './DiffManager': (this.DiffManager = {}),
+ },
+ })
+ this.callback = sinon.stub()
+ this.project_id = 'mock-project-id'
+ this.doc_id = 'mock-doc-id'
+ this.user_id = 'mock-user-id'
+ return (this.version = 42)
+ })
+
+ return describe('restoreToBeforeVersion', function () {
+ beforeEach(function () {
+ this.content = 'mock content'
+ this.DocumentUpdaterManager.setDocument = sinon.stub().callsArg(4)
+ this.DiffManager.getDocumentBeforeVersion = sinon
+ .stub()
+ .callsArgWith(3, null, this.content)
+ return this.RestoreManager.restoreToBeforeVersion(
+ this.project_id,
+ this.doc_id,
+ this.version,
+ this.user_id,
+ this.callback
+ )
+ })
+
+ it('should get the content before the requested version', function () {
+ return this.DiffManager.getDocumentBeforeVersion
+ .calledWith(this.project_id, this.doc_id, this.version)
+ .should.equal(true)
+ })
+
+ it('should set the document in the document updater', function () {
+ return this.DocumentUpdaterManager.setDocument
+ .calledWith(this.project_id, this.doc_id, this.content, this.user_id)
+ .should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+})
diff --git a/services/track-changes/test/unit/js/UpdateCompressor/UpdateCompressorTests.js b/services/track-changes/test/unit/js/UpdateCompressor/UpdateCompressorTests.js
new file mode 100644
index 0000000000..cd7c6c4b76
--- /dev/null
+++ b/services/track-changes/test/unit/js/UpdateCompressor/UpdateCompressorTests.js
@@ -0,0 +1,848 @@
+/* eslint-disable
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const { expect } = require('chai')
+const modulePath = '../../../../app/js/UpdateCompressor.js'
+const SandboxedModule = require('sandboxed-module')
+
+const bigstring = __range__(0, 2 * 1024 * 1024, true)
+ .map(i => 'a')
+ .join('')
+const mediumstring = __range__(0, 1024 * 1024, true)
+ .map(j => 'a')
+ .join('')
+
+describe('UpdateCompressor', function () {
+ beforeEach(function () {
+ this.UpdateCompressor = SandboxedModule.require(modulePath, {
+ requires: {
+ '../lib/diff_match_patch': require('../../../../app/lib/diff_match_patch'),
+ },
+ })
+ this.user_id = 'user-id-1'
+ this.other_user_id = 'user-id-2'
+ this.ts1 = Date.now()
+ return (this.ts2 = Date.now() + 1000)
+ })
+
+ describe('convertToSingleOpUpdates', function () {
+ it('should split grouped updates into individual updates', function () {
+ return expect(
+ this.UpdateCompressor.convertToSingleOpUpdates([
+ {
+ op: [
+ (this.op1 = { p: 0, i: 'Foo' }),
+ (this.op2 = { p: 6, i: 'bar' }),
+ ],
+ meta: { ts: this.ts1, user_id: this.user_id },
+ v: 42,
+ },
+ {
+ op: [(this.op3 = { p: 10, i: 'baz' })],
+ meta: { ts: this.ts2, user_id: this.other_user_id },
+ v: 43,
+ },
+ ])
+ ).to.deep.equal([
+ {
+ op: this.op1,
+ meta: { start_ts: this.ts1, end_ts: this.ts1, user_id: this.user_id },
+ v: 42,
+ },
+ {
+ op: this.op2,
+ meta: { start_ts: this.ts1, end_ts: this.ts1, user_id: this.user_id },
+ v: 42,
+ },
+ {
+ op: this.op3,
+ meta: {
+ start_ts: this.ts2,
+ end_ts: this.ts2,
+ user_id: this.other_user_id,
+ },
+ v: 43,
+ },
+ ])
+ })
+
+ it('should return no-op updates when the op list is empty', function () {
+ return expect(
+ this.UpdateCompressor.convertToSingleOpUpdates([
+ {
+ op: [],
+ meta: { ts: this.ts1, user_id: this.user_id },
+ v: 42,
+ },
+ ])
+ ).to.deep.equal([
+ {
+ op: this.UpdateCompressor.NOOP,
+ meta: { start_ts: this.ts1, end_ts: this.ts1, user_id: this.user_id },
+ v: 42,
+ },
+ ])
+ })
+
+ return it('should ignore comment ops', function () {
+ return expect(
+ this.UpdateCompressor.convertToSingleOpUpdates([
+ {
+ op: [
+ (this.op1 = { p: 0, i: 'Foo' }),
+ (this.op2 = { p: 9, c: 'baz' }),
+ (this.op3 = { p: 6, i: 'bar' }),
+ ],
+ meta: { ts: this.ts1, user_id: this.user_id },
+ v: 42,
+ },
+ ])
+ ).to.deep.equal([
+ {
+ op: this.op1,
+ meta: { start_ts: this.ts1, end_ts: this.ts1, user_id: this.user_id },
+ v: 42,
+ },
+ {
+ op: this.op3,
+ meta: { start_ts: this.ts1, end_ts: this.ts1, user_id: this.user_id },
+ v: 42,
+ },
+ ])
+ })
+ })
+
+ describe('concatUpdatesWithSameVersion', function () {
+ it('should concat updates with the same version', function () {
+ return expect(
+ this.UpdateCompressor.concatUpdatesWithSameVersion([
+ {
+ op: (this.op1 = { p: 0, i: 'Foo' }),
+ meta: {
+ start_ts: this.ts1,
+ end_ts: this.ts1,
+ user_id: this.user_id,
+ },
+ v: 42,
+ },
+ {
+ op: (this.op2 = { p: 6, i: 'bar' }),
+ meta: {
+ start_ts: this.ts1,
+ end_ts: this.ts1,
+ user_id: this.user_id,
+ },
+ v: 42,
+ },
+ {
+ op: (this.op3 = { p: 10, i: 'baz' }),
+ meta: {
+ start_ts: this.ts2,
+ end_ts: this.ts2,
+ user_id: this.other_user_id,
+ },
+ v: 43,
+ },
+ ])
+ ).to.deep.equal([
+ {
+ op: [this.op1, this.op2],
+ meta: { start_ts: this.ts1, end_ts: this.ts1, user_id: this.user_id },
+ v: 42,
+ },
+ {
+ op: [this.op3],
+ meta: {
+ start_ts: this.ts2,
+ end_ts: this.ts2,
+ user_id: this.other_user_id,
+ },
+ v: 43,
+ },
+ ])
+ })
+
+ return it('should turn a noop into an empty op', function () {
+ return expect(
+ this.UpdateCompressor.concatUpdatesWithSameVersion([
+ {
+ op: this.UpdateCompressor.NOOP,
+ meta: {
+ start_ts: this.ts1,
+ end_ts: this.ts1,
+ user_id: this.user_id,
+ },
+ v: 42,
+ },
+ ])
+ ).to.deep.equal([
+ {
+ op: [],
+ meta: { start_ts: this.ts1, end_ts: this.ts1, user_id: this.user_id },
+ v: 42,
+ },
+ ])
+ })
+ })
+
+ describe('compress', function () {
+ describe('insert - insert', function () {
+ it('should append one insert to the other', function () {
+ return expect(
+ this.UpdateCompressor.compressUpdates([
+ {
+ op: { p: 3, i: 'foo' },
+ meta: { ts: this.ts1, user_id: this.user_id },
+ v: 42,
+ },
+ {
+ op: { p: 6, i: 'bar' },
+ meta: { ts: this.ts2, user_id: this.user_id },
+ v: 43,
+ },
+ ])
+ ).to.deep.equal([
+ {
+ op: { p: 3, i: 'foobar' },
+ meta: {
+ start_ts: this.ts1,
+ end_ts: this.ts2,
+ user_id: this.user_id,
+ },
+ v: 43,
+ },
+ ])
+ })
+
+ it('should insert one insert inside the other', function () {
+ return expect(
+ this.UpdateCompressor.compressUpdates([
+ {
+ op: { p: 3, i: 'foo' },
+ meta: { ts: this.ts1, user_id: this.user_id },
+ v: 42,
+ },
+ {
+ op: { p: 5, i: 'bar' },
+ meta: { ts: this.ts2, user_id: this.user_id },
+ v: 43,
+ },
+ ])
+ ).to.deep.equal([
+ {
+ op: { p: 3, i: 'fobaro' },
+ meta: {
+ start_ts: this.ts1,
+ end_ts: this.ts2,
+ user_id: this.user_id,
+ },
+ v: 43,
+ },
+ ])
+ })
+
+ it('should not append separated inserts', function () {
+ return expect(
+ this.UpdateCompressor.compressUpdates([
+ {
+ op: { p: 3, i: 'foo' },
+ meta: { ts: this.ts1, user_id: this.user_id },
+ v: 42,
+ },
+ {
+ op: { p: 9, i: 'bar' },
+ meta: { ts: this.ts2, user_id: this.user_id },
+ v: 43,
+ },
+ ])
+ ).to.deep.equal([
+ {
+ op: { p: 3, i: 'foo' },
+ meta: {
+ start_ts: this.ts1,
+ end_ts: this.ts1,
+ user_id: this.user_id,
+ },
+ v: 42,
+ },
+ {
+ op: { p: 9, i: 'bar' },
+ meta: {
+ start_ts: this.ts2,
+ end_ts: this.ts2,
+ user_id: this.user_id,
+ },
+ v: 43,
+ },
+ ])
+ })
+
+ it('should not append inserts that are too big (second op)', function () {
+ return expect(
+ this.UpdateCompressor.compressUpdates([
+ {
+ op: { p: 3, i: 'foo' },
+ meta: { ts: this.ts1, user_id: this.user_id },
+ v: 42,
+ },
+ {
+ op: { p: 6, i: bigstring },
+ meta: { ts: this.ts2, user_id: this.user_id },
+ v: 43,
+ },
+ ])
+ ).to.deep.equal([
+ {
+ op: { p: 3, i: 'foo' },
+ meta: {
+ start_ts: this.ts1,
+ end_ts: this.ts1,
+ user_id: this.user_id,
+ },
+ v: 42,
+ },
+ {
+ op: { p: 6, i: bigstring },
+ meta: {
+ start_ts: this.ts2,
+ end_ts: this.ts2,
+ user_id: this.user_id,
+ },
+ v: 43,
+ },
+ ])
+ })
+
+ it('should not append inserts that are too big (first op)', function () {
+ return expect(
+ this.UpdateCompressor.compressUpdates([
+ {
+ op: { p: 3, i: bigstring },
+ meta: { ts: this.ts1, user_id: this.user_id },
+ v: 42,
+ },
+ {
+ op: { p: 3 + bigstring.length, i: 'bar' },
+ meta: { ts: this.ts2, user_id: this.user_id },
+ v: 43,
+ },
+ ])
+ ).to.deep.equal([
+ {
+ op: { p: 3, i: bigstring },
+ meta: {
+ start_ts: this.ts1,
+ end_ts: this.ts1,
+ user_id: this.user_id,
+ },
+ v: 42,
+ },
+ {
+ op: { p: 3 + bigstring.length, i: 'bar' },
+ meta: {
+ start_ts: this.ts2,
+ end_ts: this.ts2,
+ user_id: this.user_id,
+ },
+ v: 43,
+ },
+ ])
+ })
+
+ return it('should not append inserts that are too big (first and second op)', function () {
+ return expect(
+ this.UpdateCompressor.compressUpdates([
+ {
+ op: { p: 3, i: mediumstring },
+ meta: { ts: this.ts1, user_id: this.user_id },
+ v: 42,
+ },
+ {
+ op: { p: 3 + mediumstring.length, i: mediumstring },
+ meta: { ts: this.ts2, user_id: this.user_id },
+ v: 43,
+ },
+ ])
+ ).to.deep.equal([
+ {
+ op: { p: 3, i: mediumstring },
+ meta: {
+ start_ts: this.ts1,
+ end_ts: this.ts1,
+ user_id: this.user_id,
+ },
+ v: 42,
+ },
+ {
+ op: { p: 3 + mediumstring.length, i: mediumstring },
+ meta: {
+ start_ts: this.ts2,
+ end_ts: this.ts2,
+ user_id: this.user_id,
+ },
+ v: 43,
+ },
+ ])
+ })
+ })
+
+ describe('delete - delete', function () {
+ it('should append one delete to the other', function () {
+ return expect(
+ this.UpdateCompressor.compressUpdates([
+ {
+ op: { p: 3, d: 'foo' },
+ meta: { ts: this.ts1, user_id: this.user_id },
+ v: 42,
+ },
+ {
+ op: { p: 3, d: 'bar' },
+ meta: { ts: this.ts2, user_id: this.user_id },
+ v: 43,
+ },
+ ])
+ ).to.deep.equal([
+ {
+ op: { p: 3, d: 'foobar' },
+ meta: {
+ start_ts: this.ts1,
+ end_ts: this.ts2,
+ user_id: this.user_id,
+ },
+ v: 43,
+ },
+ ])
+ })
+
+ it('should insert one delete inside the other', function () {
+ return expect(
+ this.UpdateCompressor.compressUpdates([
+ {
+ op: { p: 3, d: 'foo' },
+ meta: { ts: this.ts1, user_id: this.user_id },
+ v: 42,
+ },
+ {
+ op: { p: 1, d: 'bar' },
+ meta: { ts: this.ts2, user_id: this.user_id },
+ v: 43,
+ },
+ ])
+ ).to.deep.equal([
+ {
+ op: { p: 1, d: 'bafoor' },
+ meta: {
+ start_ts: this.ts1,
+ end_ts: this.ts2,
+ user_id: this.user_id,
+ },
+ v: 43,
+ },
+ ])
+ })
+
+ return it('should not append separated deletes', function () {
+ return expect(
+ this.UpdateCompressor.compressUpdates([
+ {
+ op: { p: 3, d: 'foo' },
+ meta: { ts: this.ts1, user_id: this.user_id },
+ v: 42,
+ },
+ {
+ op: { p: 9, d: 'bar' },
+ meta: { ts: this.ts2, user_id: this.user_id },
+ v: 43,
+ },
+ ])
+ ).to.deep.equal([
+ {
+ op: { p: 3, d: 'foo' },
+ meta: {
+ start_ts: this.ts1,
+ end_ts: this.ts1,
+ user_id: this.user_id,
+ },
+ v: 42,
+ },
+ {
+ op: { p: 9, d: 'bar' },
+ meta: {
+ start_ts: this.ts2,
+ end_ts: this.ts2,
+ user_id: this.user_id,
+ },
+ v: 43,
+ },
+ ])
+ })
+ })
+
+ describe('insert - delete', function () {
+ it('should undo a previous insert', function () {
+ return expect(
+ this.UpdateCompressor.compressUpdates([
+ {
+ op: { p: 3, i: 'foo' },
+ meta: { ts: this.ts1, user_id: this.user_id },
+ v: 42,
+ },
+ {
+ op: { p: 5, d: 'o' },
+ meta: { ts: this.ts2, user_id: this.user_id },
+ v: 43,
+ },
+ ])
+ ).to.deep.equal([
+ {
+ op: { p: 3, i: 'fo' },
+ meta: {
+ start_ts: this.ts1,
+ end_ts: this.ts2,
+ user_id: this.user_id,
+ },
+ v: 43,
+ },
+ ])
+ })
+
+ it('should remove part of an insert from the middle', function () {
+ return expect(
+ this.UpdateCompressor.compressUpdates([
+ {
+ op: { p: 3, i: 'fobaro' },
+ meta: { ts: this.ts1, user_id: this.user_id },
+ v: 42,
+ },
+ {
+ op: { p: 5, d: 'bar' },
+ meta: { ts: this.ts2, user_id: this.user_id },
+ v: 43,
+ },
+ ])
+ ).to.deep.equal([
+ {
+ op: { p: 3, i: 'foo' },
+ meta: {
+ start_ts: this.ts1,
+ end_ts: this.ts2,
+ user_id: this.user_id,
+ },
+ v: 43,
+ },
+ ])
+ })
+
+ it('should cancel out two opposite updates', function () {
+ return expect(
+ this.UpdateCompressor.compressUpdates([
+ {
+ op: { p: 3, i: 'foo' },
+ meta: { ts: this.ts1, user_id: this.user_id },
+ v: 42,
+ },
+ {
+ op: { p: 3, d: 'foo' },
+ meta: { ts: this.ts2, user_id: this.user_id },
+ v: 43,
+ },
+ ])
+ ).to.deep.equal([
+ {
+ op: { p: 3, i: '' },
+ meta: {
+ start_ts: this.ts1,
+ end_ts: this.ts2,
+ user_id: this.user_id,
+ },
+ v: 43,
+ },
+ ])
+ })
+
+ it('should not combine separated updates', function () {
+ return expect(
+ this.UpdateCompressor.compressUpdates([
+ {
+ op: { p: 3, i: 'foo' },
+ meta: { ts: this.ts1, user_id: this.user_id },
+ v: 42,
+ },
+ {
+ op: { p: 9, d: 'bar' },
+ meta: { ts: this.ts2, user_id: this.user_id },
+ v: 43,
+ },
+ ])
+ ).to.deep.equal([
+ {
+ op: { p: 3, i: 'foo' },
+ meta: {
+ start_ts: this.ts1,
+ end_ts: this.ts1,
+ user_id: this.user_id,
+ },
+ v: 42,
+ },
+ {
+ op: { p: 9, d: 'bar' },
+ meta: {
+ start_ts: this.ts2,
+ end_ts: this.ts2,
+ user_id: this.user_id,
+ },
+ v: 43,
+ },
+ ])
+ })
+
+ return it('should not combine updates with overlap beyond the end', function () {
+ return expect(
+ this.UpdateCompressor.compressUpdates([
+ {
+ op: { p: 3, i: 'foobar' },
+ meta: { ts: this.ts1, user_id: this.user_id },
+ v: 42,
+ },
+ {
+ op: { p: 6, d: 'bardle' },
+ meta: { ts: this.ts2, user_id: this.user_id },
+ v: 43,
+ },
+ ])
+ ).to.deep.equal([
+ {
+ op: { p: 3, i: 'foobar' },
+ meta: {
+ start_ts: this.ts1,
+ end_ts: this.ts1,
+ user_id: this.user_id,
+ },
+ v: 42,
+ },
+ {
+ op: { p: 6, d: 'bardle' },
+ meta: {
+ start_ts: this.ts2,
+ end_ts: this.ts2,
+ user_id: this.user_id,
+ },
+ v: 43,
+ },
+ ])
+ })
+ })
+
+ describe('delete - insert', function () {
+ it('should do a diff of the content', function () {
+ return expect(
+ this.UpdateCompressor.compressUpdates([
+ {
+ op: { p: 3, d: 'one two three four five six seven eight' },
+ meta: { ts: this.ts1, user_id: this.user_id },
+ v: 42,
+ },
+ {
+ op: { p: 3, i: 'one 2 three four five six seven eight' },
+ meta: { ts: this.ts2, user_id: this.user_id },
+ v: 43,
+ },
+ ])
+ ).to.deep.equal([
+ {
+ op: { p: 7, d: 'two' },
+ meta: {
+ start_ts: this.ts1,
+ end_ts: this.ts2,
+ user_id: this.user_id,
+ },
+ v: 43,
+ },
+ {
+ op: { p: 7, i: '2' },
+ meta: {
+ start_ts: this.ts1,
+ end_ts: this.ts2,
+ user_id: this.user_id,
+ },
+ v: 43,
+ },
+ ])
+ })
+
+ return it('should return a no-op if the delete and insert are the same', function () {
+ return expect(
+ this.UpdateCompressor.compressUpdates([
+ {
+ op: { p: 3, d: 'one two three four five six seven eight' },
+ meta: { ts: this.ts1, user_id: this.user_id },
+ v: 42,
+ },
+ {
+ op: { p: 3, i: 'one two three four five six seven eight' },
+ meta: { ts: this.ts2, user_id: this.user_id },
+ v: 43,
+ },
+ ])
+ ).to.deep.equal([
+ {
+ op: { p: 3, i: '' },
+ meta: {
+ start_ts: this.ts1,
+ end_ts: this.ts2,
+ user_id: this.user_id,
+ },
+ v: 43,
+ },
+ ])
+ })
+ })
+
+ describe('noop - insert', function () {
+ return it('should leave them untouched', function () {
+ return expect(
+ this.UpdateCompressor.compressUpdates([
+ {
+ op: this.UpdateCompressor.NOOP,
+ meta: { ts: this.ts1, user_id: this.user_id },
+ v: 42,
+ },
+ {
+ op: { p: 6, i: 'bar' },
+ meta: { ts: this.ts1, user_id: this.user_id },
+ v: 43,
+ },
+ ])
+ ).to.deep.equal([
+ {
+ op: this.UpdateCompressor.NOOP,
+ meta: {
+ start_ts: this.ts1,
+ end_ts: this.ts1,
+ user_id: this.user_id,
+ },
+ v: 42,
+ },
+ {
+ op: { p: 6, i: 'bar' },
+ meta: {
+ start_ts: this.ts1,
+ end_ts: this.ts1,
+ user_id: this.user_id,
+ },
+ v: 43,
+ },
+ ])
+ })
+ })
+
+ return describe('noop - delete', function () {
+ return it('should leave them untouched', function () {
+ return expect(
+ this.UpdateCompressor.compressUpdates([
+ {
+ op: this.UpdateCompressor.NOOP,
+ meta: { ts: this.ts1, user_id: this.user_id },
+ v: 42,
+ },
+ {
+ op: { p: 6, d: 'bar' },
+ meta: { ts: this.ts1, user_id: this.user_id },
+ v: 43,
+ },
+ ])
+ ).to.deep.equal([
+ {
+ op: this.UpdateCompressor.NOOP,
+ meta: {
+ start_ts: this.ts1,
+ end_ts: this.ts1,
+ user_id: this.user_id,
+ },
+ v: 42,
+ },
+ {
+ op: { p: 6, d: 'bar' },
+ meta: {
+ start_ts: this.ts1,
+ end_ts: this.ts1,
+ user_id: this.user_id,
+ },
+ v: 43,
+ },
+ ])
+ })
+ })
+ })
+
+ return describe('compressRawUpdates', function () {
+ return describe('merging in-place with an array op', function () {
+ return it('should not change the existing last updates', function () {
+ return expect(
+ this.UpdateCompressor.compressRawUpdates(
+ {
+ op: [
+ { p: 1000, d: 'hello' },
+ { p: 1000, i: 'HELLO()' },
+ ],
+ meta: {
+ start_ts: this.ts1,
+ end_ts: this.ts1,
+ user_id: this.user_id,
+ },
+ v: 42,
+ },
+ [
+ {
+ op: [{ p: 1006, i: 'WORLD' }],
+ meta: { ts: this.ts2, user_id: this.user_id },
+ v: 43,
+ },
+ ]
+ )
+ ).to.deep.equal([
+ {
+ op: [
+ { p: 1000, d: 'hello' },
+ { p: 1000, i: 'HELLO()' },
+ ],
+ meta: {
+ start_ts: this.ts1,
+ end_ts: this.ts1,
+ user_id: this.user_id,
+ },
+ v: 42,
+ },
+ {
+ op: [{ p: 1006, i: 'WORLD' }],
+ meta: {
+ start_ts: this.ts2,
+ end_ts: this.ts2,
+ user_id: this.user_id,
+ },
+ v: 43,
+ },
+ ])
+ })
+ })
+ })
+})
+
+function __range__(left, right, inclusive) {
+ const range = []
+ const ascending = left < right
+ const end = !inclusive ? right : ascending ? right + 1 : right - 1
+ for (let i = left; ascending ? i < end : i > end; ascending ? i++ : i--) {
+ range.push(i)
+ }
+ return range
+}
diff --git a/services/track-changes/test/unit/js/UpdateTrimmer/UpdateTrimmerTests.js b/services/track-changes/test/unit/js/UpdateTrimmer/UpdateTrimmerTests.js
new file mode 100644
index 0000000000..a49554d9bb
--- /dev/null
+++ b/services/track-changes/test/unit/js/UpdateTrimmer/UpdateTrimmerTests.js
@@ -0,0 +1,182 @@
+/* eslint-disable
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const { expect } = require('chai')
+const modulePath = '../../../../app/js/UpdateTrimmer.js'
+const SandboxedModule = require('sandboxed-module')
+const tk = require('timekeeper')
+
+describe('UpdateTrimmer', function () {
+ beforeEach(function () {
+ this.now = new Date()
+ tk.freeze(this.now)
+
+ this.UpdateTrimmer = SandboxedModule.require(modulePath, {
+ requires: {
+ './WebApiManager': (this.WebApiManager = {}),
+ './MongoManager': (this.MongoManager = {}),
+ },
+ })
+
+ this.callback = sinon.stub()
+ return (this.project_id = 'mock-project-id')
+ })
+
+ afterEach(function () {
+ return tk.reset()
+ })
+
+ return describe('shouldTrimUpdates', function () {
+ beforeEach(function () {
+ this.metadata = {}
+ this.details = { features: {} }
+ this.MongoManager.getProjectMetaData = sinon
+ .stub()
+ .callsArgWith(1, null, this.metadata)
+ this.MongoManager.setProjectMetaData = sinon.stub().callsArgWith(2)
+ this.MongoManager.upgradeHistory = sinon.stub().callsArgWith(1)
+ return (this.WebApiManager.getProjectDetails = sinon
+ .stub()
+ .callsArgWith(1, null, this.details))
+ })
+
+ describe('with preserveHistory set in the project meta data', function () {
+ beforeEach(function () {
+ this.metadata.preserveHistory = true
+ return this.UpdateTrimmer.shouldTrimUpdates(
+ this.project_id,
+ this.callback
+ )
+ })
+
+ it('should look up the meta data', function () {
+ return this.MongoManager.getProjectMetaData
+ .calledWith(this.project_id)
+ .should.equal(true)
+ })
+
+ it('should not look up the project details', function () {
+ return this.WebApiManager.getProjectDetails.called.should.equal(false)
+ })
+
+ return it('should return false', function () {
+ return this.callback.calledWith(null, false).should.equal(true)
+ })
+ })
+
+ describe('without preserveHistory set in the project meta data', function () {
+ beforeEach(function () {
+ return (this.metadata.preserveHistory = false)
+ })
+
+ describe('when the project has the versioning feature', function () {
+ beforeEach(function () {
+ this.details.features.versioning = true
+ return this.UpdateTrimmer.shouldTrimUpdates(
+ this.project_id,
+ this.callback
+ )
+ })
+
+ it('should look up the meta data', function () {
+ return this.MongoManager.getProjectMetaData
+ .calledWith(this.project_id)
+ .should.equal(true)
+ })
+
+ it('should look up the project details', function () {
+ return this.WebApiManager.getProjectDetails
+ .calledWith(this.project_id)
+ .should.equal(true)
+ })
+
+ it('should insert preserveHistory into the metadata', function () {
+ return this.MongoManager.setProjectMetaData
+ .calledWith(this.project_id, { preserveHistory: true })
+ .should.equal(true)
+ })
+
+ it('should upgrade any existing history', function () {
+ return this.MongoManager.upgradeHistory
+ .calledWith(this.project_id)
+ .should.equal(true)
+ })
+
+ return it('should return false', function () {
+ return this.callback.calledWith(null, false).should.equal(true)
+ })
+ })
+
+ return describe('when the project does not have the versioning feature', function () {
+ beforeEach(function () {
+ this.details.features.versioning = false
+ return this.UpdateTrimmer.shouldTrimUpdates(
+ this.project_id,
+ this.callback
+ )
+ })
+
+ return it('should return true', function () {
+ return this.callback.calledWith(null, true).should.equal(true)
+ })
+ })
+ })
+
+ return describe('without any meta data', function () {
+ beforeEach(function () {
+ return (this.MongoManager.getProjectMetaData = sinon
+ .stub()
+ .callsArgWith(1, null, null))
+ })
+
+ describe('when the project has the versioning feature', function () {
+ beforeEach(function () {
+ this.details.features.versioning = true
+ return this.UpdateTrimmer.shouldTrimUpdates(
+ this.project_id,
+ this.callback
+ )
+ })
+
+ it('should insert preserveHistory into the metadata', function () {
+ return this.MongoManager.setProjectMetaData
+ .calledWith(this.project_id, { preserveHistory: true })
+ .should.equal(true)
+ })
+
+ it('should upgrade any existing history', function () {
+ return this.MongoManager.upgradeHistory
+ .calledWith(this.project_id)
+ .should.equal(true)
+ })
+
+ return it('should return false', function () {
+ return this.callback.calledWith(null, false).should.equal(true)
+ })
+ })
+
+ return describe('when the project does not have the versioning feature', function () {
+ beforeEach(function () {
+ this.details.features.versioning = false
+ return this.UpdateTrimmer.shouldTrimUpdates(
+ this.project_id,
+ this.callback
+ )
+ })
+
+ return it('should return true', function () {
+ return this.callback.calledWith(null, true).should.equal(true)
+ })
+ })
+ })
+ })
+})
diff --git a/services/track-changes/test/unit/js/UpdatesManager/UpdatesManagerTests.js b/services/track-changes/test/unit/js/UpdatesManager/UpdatesManagerTests.js
new file mode 100644
index 0000000000..74233e6ffa
--- /dev/null
+++ b/services/track-changes/test/unit/js/UpdatesManager/UpdatesManagerTests.js
@@ -0,0 +1,1333 @@
+/* eslint-disable
+ camelcase,
+ handle-callback-err,
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS101: Remove unnecessary use of Array.from
+ * DS102: Remove unnecessary code created because of implicit returns
+ * DS207: Consider shorter variations of null checks
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const { expect } = require('chai')
+const { ObjectId } = require('mongodb')
+const modulePath = '../../../../app/js/UpdatesManager.js'
+const SandboxedModule = require('sandboxed-module')
+
+describe('UpdatesManager', function () {
+ beforeEach(function () {
+ this.UpdatesManager = SandboxedModule.require(modulePath, {
+ singleOnly: true,
+ requires: {
+ './UpdateCompressor': (this.UpdateCompressor = {}),
+ './MongoManager': (this.MongoManager = {}),
+ './PackManager': (this.PackManager = {}),
+ './RedisManager': (this.RedisManager = {}),
+ './LockManager': (this.LockManager = {}),
+ './WebApiManager': (this.WebApiManager = {}),
+ './UpdateTrimmer': (this.UpdateTrimmer = {}),
+ './DocArchiveManager': (this.DocArchiveManager = {}),
+ '@overleaf/settings': {
+ redis: {
+ lock: {
+ key_schema: {
+ historyLock({ doc_id }) {
+ return `HistoryLock:${doc_id}`
+ },
+ },
+ },
+ },
+ },
+ },
+ })
+ this.doc_id = 'doc-id-123'
+ this.project_id = 'project-id-123'
+ this.callback = sinon.stub()
+ return (this.temporary = 'temp-mock')
+ })
+
+ describe('compressAndSaveRawUpdates', function () {
+ describe('when there are no raw ops', function () {
+ beforeEach(function () {
+ this.MongoManager.peekLastCompressedUpdate = sinon.stub()
+ return this.UpdatesManager.compressAndSaveRawUpdates(
+ this.project_id,
+ this.doc_id,
+ [],
+ this.temporary,
+ this.callback
+ )
+ })
+
+ it('should not need to access the database', function () {
+ return this.MongoManager.peekLastCompressedUpdate.called.should.equal(
+ false
+ )
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+
+ describe('when there is no compressed history to begin with', function () {
+ beforeEach(function () {
+ this.rawUpdates = [
+ { v: 12, op: 'mock-op-12' },
+ { v: 13, op: 'mock-op-13' },
+ ]
+ this.compressedUpdates = [{ v: 13, op: 'compressed-op-12' }]
+
+ this.MongoManager.peekLastCompressedUpdate = sinon
+ .stub()
+ .callsArgWith(1, null, null)
+ this.PackManager.insertCompressedUpdates = sinon.stub().callsArg(5)
+ this.UpdateCompressor.compressRawUpdates = sinon
+ .stub()
+ .returns(this.compressedUpdates)
+ return this.UpdatesManager.compressAndSaveRawUpdates(
+ this.project_id,
+ this.doc_id,
+ this.rawUpdates,
+ this.temporary,
+ this.callback
+ )
+ })
+
+ it('should look at the last compressed op', function () {
+ return this.MongoManager.peekLastCompressedUpdate
+ .calledWith(this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should save the compressed ops as a pack', function () {
+ return this.PackManager.insertCompressedUpdates
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ null,
+ this.compressedUpdates,
+ this.temporary
+ )
+ .should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+
+ describe('when the raw ops need appending to existing history', function () {
+ beforeEach(function () {
+ this.lastCompressedUpdate = { v: 11, op: 'compressed-op-11' }
+ this.compressedUpdates = [
+ { v: 12, op: 'compressed-op-11+12' },
+ { v: 13, op: 'compressed-op-12' },
+ ]
+
+ this.MongoManager.peekLastCompressedUpdate = sinon
+ .stub()
+ .callsArgWith(
+ 1,
+ null,
+ this.lastCompressedUpdate,
+ this.lastCompressedUpdate.v
+ )
+ this.PackManager.insertCompressedUpdates = sinon.stub().callsArg(5)
+ return (this.UpdateCompressor.compressRawUpdates = sinon
+ .stub()
+ .returns(this.compressedUpdates))
+ })
+
+ describe('when the raw ops start where the existing history ends', function () {
+ beforeEach(function () {
+ this.rawUpdates = [
+ { v: 12, op: 'mock-op-12' },
+ { v: 13, op: 'mock-op-13' },
+ ]
+ return this.UpdatesManager.compressAndSaveRawUpdates(
+ this.project_id,
+ this.doc_id,
+ this.rawUpdates,
+ this.temporary,
+ this.callback
+ )
+ })
+
+ it('should look at the last compressed op', function () {
+ return this.MongoManager.peekLastCompressedUpdate
+ .calledWith(this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should compress the raw ops', function () {
+ return this.UpdateCompressor.compressRawUpdates
+ .calledWith(null, this.rawUpdates)
+ .should.equal(true)
+ })
+
+ it('should save the new compressed ops into a pack', function () {
+ return this.PackManager.insertCompressedUpdates
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.lastCompressedUpdate,
+ this.compressedUpdates,
+ this.temporary
+ )
+ .should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+
+ describe('when the raw ops start where the existing history ends and the history is in a pack', function () {
+ beforeEach(function () {
+ this.lastCompressedUpdate = {
+ pack: [{ v: 11, op: 'compressed-op-11' }],
+ v: 11,
+ }
+ this.rawUpdates = [
+ { v: 12, op: 'mock-op-12' },
+ { v: 13, op: 'mock-op-13' },
+ ]
+ this.MongoManager.peekLastCompressedUpdate = sinon
+ .stub()
+ .callsArgWith(
+ 1,
+ null,
+ this.lastCompressedUpdate,
+ this.lastCompressedUpdate.v
+ )
+ return this.UpdatesManager.compressAndSaveRawUpdates(
+ this.project_id,
+ this.doc_id,
+ this.rawUpdates,
+ this.temporary,
+ this.callback
+ )
+ })
+
+ it('should look at the last compressed op', function () {
+ return this.MongoManager.peekLastCompressedUpdate
+ .calledWith(this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should compress the raw ops', function () {
+ return this.UpdateCompressor.compressRawUpdates
+ .calledWith(null, this.rawUpdates)
+ .should.equal(true)
+ })
+
+ it('should save the new compressed ops into a pack', function () {
+ return this.PackManager.insertCompressedUpdates
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.lastCompressedUpdate,
+ this.compressedUpdates,
+ this.temporary
+ )
+ .should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+
+ describe('when some raw ops are passed that have already been compressed', function () {
+ beforeEach(function () {
+ this.rawUpdates = [
+ { v: 10, op: 'mock-op-10' },
+ { v: 11, op: 'mock-op-11' },
+ { v: 12, op: 'mock-op-12' },
+ { v: 13, op: 'mock-op-13' },
+ ]
+
+ return this.UpdatesManager.compressAndSaveRawUpdates(
+ this.project_id,
+ this.doc_id,
+ this.rawUpdates,
+ this.temporary,
+ this.callback
+ )
+ })
+
+ return it('should only compress the more recent raw ops', function () {
+ return this.UpdateCompressor.compressRawUpdates
+ .calledWith(null, this.rawUpdates.slice(-2))
+ .should.equal(true)
+ })
+ })
+
+ describe('when the raw ops do not follow from the last compressed op version', function () {
+ beforeEach(function () {
+ this.rawUpdates = [{ v: 13, op: 'mock-op-13' }]
+ return this.UpdatesManager.compressAndSaveRawUpdates(
+ this.project_id,
+ this.doc_id,
+ this.rawUpdates,
+ this.temporary,
+ this.callback
+ )
+ })
+
+ it('should call the callback with an error', function () {
+ return this.callback
+ .calledWith(
+ sinon.match.has(
+ 'message',
+ 'Tried to apply raw op at version 13 to last compressed update with version 11 from unknown time'
+ )
+ )
+ .should.equal(true)
+ })
+
+ return it('should not insert any update into mongo', function () {
+ return this.PackManager.insertCompressedUpdates.called.should.equal(
+ false
+ )
+ })
+ })
+
+ return describe('when the raw ops are out of order', function () {
+ beforeEach(function () {
+ this.rawUpdates = [
+ { v: 13, op: 'mock-op-13' },
+ { v: 12, op: 'mock-op-12' },
+ ]
+ return this.UpdatesManager.compressAndSaveRawUpdates(
+ this.project_id,
+ this.doc_id,
+ this.rawUpdates,
+ this.temporary,
+ this.callback
+ )
+ })
+
+ it('should call the callback with an error', function () {
+ return this.callback
+ .calledWith(sinon.match.has('message'))
+ .should.equal(true)
+ })
+
+ return it('should not insert any update into mongo', function () {
+ return this.PackManager.insertCompressedUpdates.called.should.equal(
+ false
+ )
+ })
+ })
+ })
+
+ return describe('when the raw ops need appending to existing history which is in S3', function () {
+ beforeEach(function () {
+ this.lastCompressedUpdate = null
+ this.lastVersion = 11
+ this.compressedUpdates = [{ v: 13, op: 'compressed-op-12' }]
+
+ this.MongoManager.peekLastCompressedUpdate = sinon
+ .stub()
+ .callsArgWith(1, null, null, this.lastVersion)
+ this.PackManager.insertCompressedUpdates = sinon.stub().callsArg(5)
+ return (this.UpdateCompressor.compressRawUpdates = sinon
+ .stub()
+ .returns(this.compressedUpdates))
+ })
+
+ return describe('when the raw ops start where the existing history ends', function () {
+ beforeEach(function () {
+ this.rawUpdates = [
+ { v: 12, op: 'mock-op-12' },
+ { v: 13, op: 'mock-op-13' },
+ ]
+ return this.UpdatesManager.compressAndSaveRawUpdates(
+ this.project_id,
+ this.doc_id,
+ this.rawUpdates,
+ this.temporary,
+ this.callback
+ )
+ })
+
+ it('should try to look at the last compressed op', function () {
+ return this.MongoManager.peekLastCompressedUpdate
+ .calledWith(this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should compress the last compressed op and the raw ops', function () {
+ return this.UpdateCompressor.compressRawUpdates
+ .calledWith(this.lastCompressedUpdate, this.rawUpdates)
+ .should.equal(true)
+ })
+
+ it('should save the compressed ops', function () {
+ return this.PackManager.insertCompressedUpdates
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ null,
+ this.compressedUpdates,
+ this.temporary
+ )
+ .should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+ })
+ })
+
+ describe('processUncompressedUpdates', function () {
+ beforeEach(function () {
+ this.UpdatesManager.compressAndSaveRawUpdates = sinon
+ .stub()
+ .callsArgWith(4)
+ this.RedisManager.deleteAppliedDocUpdates = sinon.stub().callsArg(3)
+ this.MongoManager.backportProjectId = sinon.stub().callsArg(2)
+ return (this.UpdateTrimmer.shouldTrimUpdates = sinon
+ .stub()
+ .callsArgWith(1, null, (this.temporary = 'temp mock')))
+ })
+
+ describe('when there is fewer than one batch to send', function () {
+ beforeEach(function () {
+ this.updates = ['mock-update']
+ this.RedisManager.getOldestDocUpdates = sinon
+ .stub()
+ .callsArgWith(2, null, this.updates)
+ this.RedisManager.expandDocUpdates = sinon
+ .stub()
+ .callsArgWith(1, null, this.updates)
+ return this.UpdatesManager.processUncompressedUpdates(
+ this.project_id,
+ this.doc_id,
+ this.temporary,
+ this.callback
+ )
+ })
+
+ it('should get the oldest updates', function () {
+ return this.RedisManager.getOldestDocUpdates
+ .calledWith(this.doc_id, this.UpdatesManager.REDIS_READ_BATCH_SIZE)
+ .should.equal(true)
+ })
+
+ it('should compress and save the updates', function () {
+ return this.UpdatesManager.compressAndSaveRawUpdates
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.updates,
+ this.temporary
+ )
+ .should.equal(true)
+ })
+
+ it('should delete the batch of uncompressed updates that was just processed', function () {
+ return this.RedisManager.deleteAppliedDocUpdates
+ .calledWith(this.project_id, this.doc_id, this.updates)
+ .should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+
+ return describe('when there are multiple batches to send', function () {
+ beforeEach(function (done) {
+ this.UpdatesManager.REDIS_READ_BATCH_SIZE = 2
+ this.updates = [
+ 'mock-update-0',
+ 'mock-update-1',
+ 'mock-update-2',
+ 'mock-update-3',
+ 'mock-update-4',
+ ]
+ this.redisArray = this.updates.slice()
+ this.RedisManager.getOldestDocUpdates = (
+ doc_id,
+ batchSize,
+ callback
+ ) => {
+ if (callback == null) {
+ callback = function (error, updates) {}
+ }
+ const updates = this.redisArray.slice(0, batchSize)
+ this.redisArray = this.redisArray.slice(batchSize)
+ return callback(null, updates)
+ }
+ sinon.spy(this.RedisManager, 'getOldestDocUpdates')
+ this.RedisManager.expandDocUpdates = (jsonUpdates, callback) => {
+ return callback(null, jsonUpdates)
+ }
+ sinon.spy(this.RedisManager, 'expandDocUpdates')
+ return this.UpdatesManager.processUncompressedUpdates(
+ this.project_id,
+ this.doc_id,
+ this.temporary,
+ (...args) => {
+ this.callback(...Array.from(args || []))
+ return done()
+ }
+ )
+ })
+
+ it('should get the oldest updates in three batches ', function () {
+ return this.RedisManager.getOldestDocUpdates.callCount.should.equal(3)
+ })
+
+ it('should compress and save the updates in batches', function () {
+ this.UpdatesManager.compressAndSaveRawUpdates
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.updates.slice(0, 2),
+ this.temporary
+ )
+ .should.equal(true)
+ this.UpdatesManager.compressAndSaveRawUpdates
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.updates.slice(2, 4),
+ this.temporary
+ )
+ .should.equal(true)
+ return this.UpdatesManager.compressAndSaveRawUpdates
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.updates.slice(4, 5),
+ this.temporary
+ )
+ .should.equal(true)
+ })
+
+ it('should delete the batches of uncompressed updates', function () {
+ return this.RedisManager.deleteAppliedDocUpdates.callCount.should.equal(
+ 3
+ )
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+ })
+
+ describe('processCompressedUpdatesWithLock', function () {
+ beforeEach(function () {
+ this.UpdateTrimmer.shouldTrimUpdates = sinon
+ .stub()
+ .callsArgWith(1, null, (this.temporary = 'temp mock'))
+ this.MongoManager.backportProjectId = sinon.stub().callsArg(2)
+ this.UpdatesManager._processUncompressedUpdates = sinon.stub().callsArg(3)
+ this.LockManager.runWithLock = sinon.stub().callsArg(2)
+ return this.UpdatesManager.processUncompressedUpdatesWithLock(
+ this.project_id,
+ this.doc_id,
+ this.callback
+ )
+ })
+
+ it('should check if the updates are temporary', function () {
+ return this.UpdateTrimmer.shouldTrimUpdates
+ .calledWith(this.project_id)
+ .should.equal(true)
+ })
+
+ it('should backport the project id', function () {
+ return this.MongoManager.backportProjectId
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should run processUncompressedUpdates with the lock', function () {
+ return this.LockManager.runWithLock
+ .calledWith(`HistoryLock:${this.doc_id}`)
+ .should.equal(true)
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+
+ describe('getDocUpdates', function () {
+ beforeEach(function () {
+ this.updates = ['mock-updates']
+ this.options = { to: 'mock-to', limit: 'mock-limit' }
+ this.PackManager.getOpsByVersionRange = sinon
+ .stub()
+ .callsArgWith(4, null, this.updates)
+ this.UpdatesManager.processUncompressedUpdatesWithLock = sinon
+ .stub()
+ .callsArg(2)
+ return this.UpdatesManager.getDocUpdates(
+ this.project_id,
+ this.doc_id,
+ this.options,
+ this.callback
+ )
+ })
+
+ it('should process outstanding updates', function () {
+ return this.UpdatesManager.processUncompressedUpdatesWithLock
+ .calledWith(this.project_id, this.doc_id)
+ .should.equal(true)
+ })
+
+ it('should get the updates from the database', function () {
+ return this.PackManager.getOpsByVersionRange
+ .calledWith(
+ this.project_id,
+ this.doc_id,
+ this.options.from,
+ this.options.to
+ )
+ .should.equal(true)
+ })
+
+ return it('should return the updates', function () {
+ return this.callback.calledWith(null, this.updates).should.equal(true)
+ })
+ })
+
+ describe('getDocUpdatesWithUserInfo', function () {
+ beforeEach(function () {
+ this.updates = ['mock-updates']
+ this.options = { to: 'mock-to', limit: 'mock-limit' }
+ this.updatesWithUserInfo = ['updates-with-user-info']
+ this.UpdatesManager.getDocUpdates = sinon
+ .stub()
+ .callsArgWith(3, null, this.updates)
+ this.UpdatesManager.fillUserInfo = sinon
+ .stub()
+ .callsArgWith(1, null, this.updatesWithUserInfo)
+ return this.UpdatesManager.getDocUpdatesWithUserInfo(
+ this.project_id,
+ this.doc_id,
+ this.options,
+ this.callback
+ )
+ })
+
+ it('should get the updates', function () {
+ return this.UpdatesManager.getDocUpdates
+ .calledWith(this.project_id, this.doc_id, this.options)
+ .should.equal(true)
+ })
+
+ it('should file the updates with the user info', function () {
+ return this.UpdatesManager.fillUserInfo
+ .calledWith(this.updates)
+ .should.equal(true)
+ })
+
+ return it('should return the updates with the filled details', function () {
+ return this.callback
+ .calledWith(null, this.updatesWithUserInfo)
+ .should.equal(true)
+ })
+ })
+
+ describe('processUncompressedUpdatesForProject', function () {
+ beforeEach(function (done) {
+ this.doc_ids = ['mock-id-1', 'mock-id-2']
+ this.UpdateTrimmer.shouldTrimUpdates = sinon
+ .stub()
+ .callsArgWith(1, null, (this.temporary = 'temp mock'))
+ this.MongoManager.backportProjectId = sinon.stub().callsArg(2)
+ this.UpdatesManager._processUncompressedUpdatesForDocWithLock = sinon
+ .stub()
+ .callsArg(3)
+ this.RedisManager.getDocIdsWithHistoryOps = sinon
+ .stub()
+ .callsArgWith(1, null, this.doc_ids)
+ return this.UpdatesManager.processUncompressedUpdatesForProject(
+ this.project_id,
+ () => {
+ this.callback()
+ return done()
+ }
+ )
+ })
+
+ it('should get all the docs with history ops', function () {
+ return this.RedisManager.getDocIdsWithHistoryOps
+ .calledWith(this.project_id)
+ .should.equal(true)
+ })
+
+ it('should process the doc ops for the each doc_id', function () {
+ return Array.from(this.doc_ids).map(doc_id =>
+ this.UpdatesManager._processUncompressedUpdatesForDocWithLock
+ .calledWith(this.project_id, doc_id, this.temporary)
+ .should.equal(true)
+ )
+ })
+
+ return it('should call the callback', function () {
+ return this.callback.called.should.equal(true)
+ })
+ })
+
+ describe('getSummarizedProjectUpdates', function () {
+ beforeEach(function () {
+ this.updates = [
+ {
+ doc_id: 123,
+ v: 456,
+ op: 'mock-updates',
+ meta: { user_id: 123, start_ts: 1233, end_ts: 1234 },
+ },
+ ]
+ this.options = { before: 'mock-before', limit: 'mock-limit' }
+ this.summarizedUpdates = [
+ {
+ meta: { user_ids: [123], start_ts: 1233, end_ts: 1234 },
+ docs: { 123: { fromV: 456, toV: 456 } },
+ },
+ ]
+ this.updatesWithUserInfo = ['updates-with-user-info']
+ this.done_state = false
+ this.iterator = {
+ next: cb => {
+ this.done_state = true
+ return cb(null, this.updates)
+ },
+ done: () => {
+ return this.done_state
+ },
+ }
+ this.PackManager.makeProjectIterator = sinon
+ .stub()
+ .callsArgWith(2, null, this.iterator)
+ this.UpdatesManager.processUncompressedUpdatesForProject = sinon
+ .stub()
+ .callsArg(1)
+ this.UpdatesManager.fillSummarizedUserInfo = sinon
+ .stub()
+ .callsArgWith(1, null, this.updatesWithUserInfo)
+ return this.UpdatesManager.getSummarizedProjectUpdates(
+ this.project_id,
+ this.options,
+ this.callback
+ )
+ })
+
+ it('should process any outstanding updates', function () {
+ return this.UpdatesManager.processUncompressedUpdatesForProject
+ .calledWith(this.project_id)
+ .should.equal(true)
+ })
+
+ it('should get the updates', function () {
+ return this.PackManager.makeProjectIterator
+ .calledWith(this.project_id, this.options.before)
+ .should.equal(true)
+ })
+
+ it('should fill the updates with the user info', function () {
+ return this.UpdatesManager.fillSummarizedUserInfo
+ .calledWith(this.summarizedUpdates)
+ .should.equal(true)
+ })
+
+ return it('should return the updates with the filled details', function () {
+ return this.callback
+ .calledWith(null, this.updatesWithUserInfo)
+ .should.equal(true)
+ })
+ })
+
+ // describe "_extendBatchOfSummarizedUpdates", ->
+ // beforeEach ->
+ // @before = Date.now()
+ // @min_count = 2
+ // @existingSummarizedUpdates = ["summarized-updates-3"]
+ // @summarizedUpdates = ["summarized-updates-3", "summarized-update-2", "summarized-update-1"]
+
+ // describe "when there are updates to get", ->
+ // beforeEach ->
+ // @updates = [
+ // {op: "mock-op-1", meta: end_ts: @before - 10},
+ // {op: "mock-op-1", meta: end_ts: @nextBeforeTimestamp = @before - 20}
+ // ]
+ // @existingSummarizedUpdates = ["summarized-updates-3"]
+ // @summarizedUpdates = ["summarized-updates-3", "summarized-update-2", "summarized-update-1"]
+ // @UpdatesManager._summarizeUpdates = sinon.stub().returns(@summarizedUpdates)
+ // @UpdatesManager.getProjectUpdatesWithUserInfo = sinon.stub().callsArgWith(2, null, @updates)
+ // @UpdatesManager._extendBatchOfSummarizedUpdates @project_id, @existingSummarizedUpdates, @before, @min_count, @callback
+
+ // it "should get the updates", ->
+ // @UpdatesManager.getProjectUpdatesWithUserInfo
+ // .calledWith(@project_id, { before: @before, limit: 3 * @min_count })
+ // .should.equal true
+
+ // it "should summarize the updates", ->
+ // @UpdatesManager._summarizeUpdates
+ // .calledWith(@updates, @existingSummarizedUpdates)
+ // .should.equal true
+
+ // it "should call the callback with the summarized updates and the next before timestamp", ->
+ // @callback.calledWith(null, @summarizedUpdates, @nextBeforeTimestamp).should.equal true
+
+ // describe "when there are no more updates", ->
+ // beforeEach ->
+ // @updates = []
+ // @UpdatesManager._summarizeUpdates = sinon.stub().returns(@summarizedUpdates)
+ // @UpdatesManager.getProjectUpdatesWithUserInfo = sinon.stub().callsArgWith(2, null, @updates)
+ // @UpdatesManager._extendBatchOfSummarizedUpdates @project_id, @existingSummarizedUpdates, @before, @min_count, @callback
+
+ // it "should call the callback with the summarized updates and null for nextBeforeTimestamp", ->
+ // @callback.calledWith(null, @summarizedUpdates, null).should.equal true
+
+ // describe "getSummarizedProjectUpdates", ->
+ // describe "when one batch of updates is enough to meet the limit", ->
+ // beforeEach ->
+ // @before = Date.now()
+ // @min_count = 2
+ // @updates = ["summarized-updates-3", "summarized-updates-2"]
+ // @nextBeforeTimestamp = @before - 100
+ // @UpdatesManager._extendBatchOfSummarizedUpdates = sinon.stub().callsArgWith(4, null, @updates, @nextBeforeTimestamp)
+ // @UpdatesManager.getSummarizedProjectUpdates @project_id, { before: @before, min_count: @min_count }, @callback
+
+ // it "should get the batch of summarized updates", ->
+ // @UpdatesManager._extendBatchOfSummarizedUpdates
+ // .calledWith(@project_id, [], @before, @min_count)
+ // .should.equal true
+
+ // it "should call the callback with the updates", ->
+ // @callback.calledWith(null, @updates, @nextBeforeTimestamp).should.equal true
+
+ // describe "when multiple batches are needed to meet the limit", ->
+ // beforeEach ->
+ // @before = Date.now()
+ // @min_count = 4
+ // @firstBatch = [{ toV: 6, fromV: 6 }, { toV: 5, fromV: 5 }]
+ // @nextBeforeTimestamp = @before - 100
+ // @secondBatch = [{ toV: 4, fromV: 4 }, { toV: 3, fromV: 3 }]
+ // @nextNextBeforeTimestamp = @before - 200
+ // @UpdatesManager._extendBatchOfSummarizedUpdates = (project_id, existingUpdates, before, desiredLength, callback) =>
+ // if existingUpdates.length == 0
+ // callback null, @firstBatch, @nextBeforeTimestamp
+ // else
+ // callback null, @firstBatch.concat(@secondBatch), @nextNextBeforeTimestamp
+ // sinon.spy @UpdatesManager, "_extendBatchOfSummarizedUpdates"
+ // @UpdatesManager.getSummarizedProjectUpdates @project_id, { before: @before, min_count: @min_count }, @callback
+
+ // it "should get the first batch of summarized updates", ->
+ // @UpdatesManager._extendBatchOfSummarizedUpdates
+ // .calledWith(@project_id, [], @before, @min_count)
+ // .should.equal true
+
+ // it "should get the second batch of summarized updates", ->
+ // @UpdatesManager._extendBatchOfSummarizedUpdates
+ // .calledWith(@project_id, @firstBatch, @nextBeforeTimestamp, @min_count)
+ // .should.equal true
+
+ // it "should call the callback with all the updates", ->
+ // @callback.calledWith(null, @firstBatch.concat(@secondBatch), @nextNextBeforeTimestamp).should.equal true
+
+ // describe "when the end of the database is hit", ->
+ // beforeEach ->
+ // @before = Date.now()
+ // @min_count = 4
+ // @updates = [{ toV: 6, fromV: 6 }, { toV: 5, fromV: 5 }]
+ // @UpdatesManager._extendBatchOfSummarizedUpdates = sinon.stub().callsArgWith(4, null, @updates, null)
+ // @UpdatesManager.getSummarizedProjectUpdates @project_id, { before: @before, min_count: @min_count }, @callback
+
+ // it "should get the batch of summarized updates", ->
+ // @UpdatesManager._extendBatchOfSummarizedUpdates
+ // .calledWith(@project_id, [], @before, @min_count)
+ // .should.equal true
+
+ // it "should call the callback with the updates", ->
+ // @callback.calledWith(null, @updates, null).should.equal true
+
+ describe('fillUserInfo', function () {
+ describe('with valid users', function () {
+ beforeEach(function (done) {
+ this.user_id_1 = ObjectId().toString()
+ this.user_id_2 = ObjectId().toString()
+ this.updates = [
+ {
+ meta: {
+ user_id: this.user_id_1,
+ },
+ op: 'mock-op-1',
+ },
+ {
+ meta: {
+ user_id: this.user_id_1,
+ },
+ op: 'mock-op-2',
+ },
+ {
+ meta: {
+ user_id: this.user_id_2,
+ },
+ op: 'mock-op-3',
+ },
+ ]
+ this.user_info = {}
+ this.user_info[this.user_id_1] = { email: 'user1@sharelatex.com' }
+ this.user_info[this.user_id_2] = { email: 'user2@sharelatex.com' }
+
+ this.WebApiManager.getUserInfo = (user_id, callback) => {
+ if (callback == null) {
+ callback = function (error, userInfo) {}
+ }
+ return callback(null, this.user_info[user_id])
+ }
+ sinon.spy(this.WebApiManager, 'getUserInfo')
+
+ return this.UpdatesManager.fillUserInfo(
+ this.updates,
+ (error, results) => {
+ this.results = results
+ return done()
+ }
+ )
+ })
+
+ it('should only call getUserInfo once for each user_id', function () {
+ this.WebApiManager.getUserInfo.calledTwice.should.equal(true)
+ this.WebApiManager.getUserInfo
+ .calledWith(this.user_id_1)
+ .should.equal(true)
+ return this.WebApiManager.getUserInfo
+ .calledWith(this.user_id_2)
+ .should.equal(true)
+ })
+
+ return it('should return the updates with the user info filled', function () {
+ return expect(this.results).to.deep.equal([
+ {
+ meta: {
+ user: {
+ email: 'user1@sharelatex.com',
+ },
+ },
+ op: 'mock-op-1',
+ },
+ {
+ meta: {
+ user: {
+ email: 'user1@sharelatex.com',
+ },
+ },
+ op: 'mock-op-2',
+ },
+ {
+ meta: {
+ user: {
+ email: 'user2@sharelatex.com',
+ },
+ },
+ op: 'mock-op-3',
+ },
+ ])
+ })
+ })
+
+ return describe('with invalid user ids', function () {
+ beforeEach(function (done) {
+ this.updates = [
+ {
+ meta: {
+ user_id: null,
+ },
+ op: 'mock-op-1',
+ },
+ {
+ meta: {
+ user_id: 'anonymous-user',
+ },
+ op: 'mock-op-2',
+ },
+ ]
+ this.WebApiManager.getUserInfo = (user_id, callback) => {
+ if (callback == null) {
+ callback = function (error, userInfo) {}
+ }
+ return callback(null, this.user_info[user_id])
+ }
+ sinon.spy(this.WebApiManager, 'getUserInfo')
+
+ return this.UpdatesManager.fillUserInfo(
+ this.updates,
+ (error, results) => {
+ this.results = results
+ return done()
+ }
+ )
+ })
+
+ it('should not call getUserInfo', function () {
+ return this.WebApiManager.getUserInfo.called.should.equal(false)
+ })
+
+ return it('should return the updates without the user info filled', function () {
+ return expect(this.results).to.deep.equal([
+ {
+ meta: {},
+ op: 'mock-op-1',
+ },
+ {
+ meta: {},
+ op: 'mock-op-2',
+ },
+ ])
+ })
+ })
+ })
+
+ return describe('_summarizeUpdates', function () {
+ beforeEach(function () {
+ this.now = Date.now()
+ this.user_1 = { id: 'mock-user-1' }
+ return (this.user_2 = { id: 'mock-user-2' })
+ })
+
+ it('should concat updates that are close in time', function () {
+ const result = this.UpdatesManager._summarizeUpdates([
+ {
+ doc_id: 'doc-id-1',
+ meta: {
+ user_id: this.user_1.id,
+ start_ts: this.now + 20,
+ end_ts: this.now + 30,
+ },
+ v: 5,
+ },
+ {
+ doc_id: 'doc-id-1',
+ meta: {
+ user_id: this.user_2.id,
+ start_ts: this.now,
+ end_ts: this.now + 10,
+ },
+ v: 4,
+ },
+ ])
+
+ return expect(result).to.deep.equal([
+ {
+ docs: {
+ 'doc-id-1': {
+ fromV: 4,
+ toV: 5,
+ },
+ },
+ meta: {
+ user_ids: [this.user_1.id, this.user_2.id],
+ start_ts: this.now,
+ end_ts: this.now + 30,
+ },
+ },
+ ])
+ })
+
+ it('should leave updates that are far apart in time', function () {
+ const oneDay = 1000 * 60 * 60 * 24
+ const result = this.UpdatesManager._summarizeUpdates([
+ {
+ doc_id: 'doc-id-1',
+ meta: {
+ user_id: this.user_2.id,
+ start_ts: this.now + oneDay,
+ end_ts: this.now + oneDay + 10,
+ },
+ v: 5,
+ },
+ {
+ doc_id: 'doc-id-1',
+ meta: {
+ user_id: this.user_1.id,
+ start_ts: this.now,
+ end_ts: this.now + 10,
+ },
+ v: 4,
+ },
+ ])
+ return expect(result).to.deep.equal([
+ {
+ docs: {
+ 'doc-id-1': {
+ fromV: 5,
+ toV: 5,
+ },
+ },
+ meta: {
+ user_ids: [this.user_2.id],
+ start_ts: this.now + oneDay,
+ end_ts: this.now + oneDay + 10,
+ },
+ },
+ {
+ docs: {
+ 'doc-id-1': {
+ fromV: 4,
+ toV: 4,
+ },
+ },
+ meta: {
+ user_ids: [this.user_1.id],
+ start_ts: this.now,
+ end_ts: this.now + 10,
+ },
+ },
+ ])
+ })
+
+ it('should concat onto existing summarized updates', function () {
+ const result = this.UpdatesManager._summarizeUpdates(
+ [
+ {
+ doc_id: 'doc-id-2',
+ meta: {
+ user_id: this.user_1.id,
+ start_ts: this.now + 20,
+ end_ts: this.now + 30,
+ },
+ v: 5,
+ },
+ {
+ doc_id: 'doc-id-2',
+ meta: {
+ user_id: this.user_2.id,
+ start_ts: this.now,
+ end_ts: this.now + 10,
+ },
+ v: 4,
+ },
+ ],
+ [
+ {
+ docs: {
+ 'doc-id-1': {
+ fromV: 6,
+ toV: 8,
+ },
+ },
+ meta: {
+ user_ids: [this.user_1.id],
+ start_ts: this.now + 40,
+ end_ts: this.now + 50,
+ },
+ },
+ ]
+ )
+ return expect(result).to.deep.equal([
+ {
+ docs: {
+ 'doc-id-1': {
+ toV: 8,
+ fromV: 6,
+ },
+ 'doc-id-2': {
+ toV: 5,
+ fromV: 4,
+ },
+ },
+ meta: {
+ user_ids: [this.user_1.id, this.user_2.id],
+ start_ts: this.now,
+ end_ts: this.now + 50,
+ },
+ },
+ ])
+ })
+
+ it('should include null user values', function () {
+ const result = this.UpdatesManager._summarizeUpdates([
+ {
+ doc_id: 'doc-id-1',
+ meta: {
+ user_id: this.user_1.id,
+ start_ts: this.now + 20,
+ end_ts: this.now + 30,
+ },
+ v: 5,
+ },
+ {
+ doc_id: 'doc-id-1',
+ meta: {
+ user_id: null,
+ start_ts: this.now,
+ end_ts: this.now + 10,
+ },
+ v: 4,
+ },
+ ])
+ return expect(result).to.deep.equal([
+ {
+ docs: {
+ 'doc-id-1': {
+ fromV: 4,
+ toV: 5,
+ },
+ },
+ meta: {
+ user_ids: [this.user_1.id, null],
+ start_ts: this.now,
+ end_ts: this.now + 30,
+ },
+ },
+ ])
+ })
+
+ it('should include null user values, when the null is earlier in the updates list', function () {
+ const result = this.UpdatesManager._summarizeUpdates([
+ {
+ doc_id: 'doc-id-1',
+ meta: {
+ user_id: null,
+ start_ts: this.now,
+ end_ts: this.now + 10,
+ },
+ v: 4,
+ },
+ {
+ doc_id: 'doc-id-1',
+ meta: {
+ user_id: this.user_1.id,
+ start_ts: this.now + 20,
+ end_ts: this.now + 30,
+ },
+ v: 5,
+ },
+ ])
+ return expect(result).to.deep.equal([
+ {
+ docs: {
+ 'doc-id-1': {
+ fromV: 4,
+ toV: 5,
+ },
+ },
+ meta: {
+ user_ids: [null, this.user_1.id],
+ start_ts: this.now,
+ end_ts: this.now + 30,
+ },
+ },
+ ])
+ })
+
+ it('should roll several null user values into one', function () {
+ const result = this.UpdatesManager._summarizeUpdates([
+ {
+ doc_id: 'doc-id-1',
+ meta: {
+ user_id: this.user_1.id,
+ start_ts: this.now + 20,
+ end_ts: this.now + 30,
+ },
+ v: 5,
+ },
+ {
+ doc_id: 'doc-id-1',
+ meta: {
+ user_id: null,
+ start_ts: this.now,
+ end_ts: this.now + 10,
+ },
+ v: 4,
+ },
+ {
+ doc_id: 'doc-id-1',
+ meta: {
+ user_id: null,
+ start_ts: this.now + 2,
+ end_ts: this.now + 4,
+ },
+ v: 4,
+ },
+ ])
+ return expect(result).to.deep.equal([
+ {
+ docs: {
+ 'doc-id-1': {
+ fromV: 4,
+ toV: 5,
+ },
+ },
+ meta: {
+ user_ids: [this.user_1.id, null],
+ start_ts: this.now,
+ end_ts: this.now + 30,
+ },
+ },
+ ])
+ })
+
+ return it('should split updates before a big delete', function () {
+ const result = this.UpdatesManager._summarizeUpdates([
+ {
+ doc_id: 'doc-id-1',
+ op: [{ d: 'this is a long long long long long delete', p: 34 }],
+ meta: {
+ user_id: this.user_1.id,
+ start_ts: this.now + 20,
+ end_ts: this.now + 30,
+ },
+ v: 5,
+ },
+ {
+ doc_id: 'doc-id-1',
+ meta: {
+ user_id: this.user_2.id,
+ start_ts: this.now,
+ end_ts: this.now + 10,
+ },
+ v: 4,
+ },
+ ])
+
+ return expect(result).to.deep.equal([
+ {
+ docs: {
+ 'doc-id-1': {
+ fromV: 5,
+ toV: 5,
+ },
+ },
+ meta: {
+ user_ids: [this.user_1.id],
+ start_ts: this.now + 20,
+ end_ts: this.now + 30,
+ },
+ },
+ {
+ docs: {
+ 'doc-id-1': {
+ fromV: 4,
+ toV: 4,
+ },
+ },
+ meta: {
+ user_ids: [this.user_2.id],
+ start_ts: this.now,
+ end_ts: this.now + 10,
+ },
+ },
+ ])
+ })
+ })
+})
diff --git a/services/track-changes/test/unit/js/WebApiManager/WebApiManagerTests.js b/services/track-changes/test/unit/js/WebApiManager/WebApiManagerTests.js
new file mode 100644
index 0000000000..9caba5185e
--- /dev/null
+++ b/services/track-changes/test/unit/js/WebApiManager/WebApiManagerTests.js
@@ -0,0 +1,208 @@
+/* eslint-disable
+ no-return-assign,
+ no-unused-vars,
+*/
+// TODO: This file was created by bulk-decaffeinate.
+// Fix any style issues and re-enable lint.
+/*
+ * decaffeinate suggestions:
+ * DS102: Remove unnecessary code created because of implicit returns
+ * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
+ */
+const sinon = require('sinon')
+const { expect } = require('chai')
+const modulePath = '../../../../app/js/WebApiManager.js'
+const SandboxedModule = require('sandboxed-module')
+
+describe('WebApiManager', function () {
+ beforeEach(function () {
+ this.WebApiManager = SandboxedModule.require(modulePath, {
+ requires: {
+ requestretry: (this.request = {}),
+ '@overleaf/settings': (this.settings = {
+ apis: {
+ web: {
+ url: 'http://example.com',
+ user: 'sharelatex',
+ pass: 'password',
+ },
+ },
+ }),
+ },
+ })
+ this.callback = sinon.stub()
+ this.user_id = 'mock-user-id'
+ this.project_id = 'mock-project-id'
+ this.user_info = {
+ email: 'leo@sharelatex.com',
+ id: this.user_id,
+ first_name: 'Leo',
+ last_nane: 'Lion',
+ extra_param: 'blah',
+ }
+ return (this.project = { features: 'mock-features' })
+ })
+
+ describe('getUserInfo', function () {
+ describe('successfully', function () {
+ beforeEach(function () {
+ this.body = JSON.stringify(this.user_info)
+ this.request.get = sinon
+ .stub()
+ .callsArgWith(1, null, { statusCode: 200 }, this.body)
+ return this.WebApiManager.getUserInfo(this.user_id, this.callback)
+ })
+
+ it('should get the user from the web api', function () {
+ return this.request.get
+ .calledWithMatch({
+ url: `${this.settings.apis.web.url}/user/${this.user_id}/personal_info`,
+ auth: {
+ user: this.settings.apis.web.user,
+ pass: this.settings.apis.web.pass,
+ sendImmediately: true,
+ },
+ })
+ .should.equal(true)
+ })
+
+ return it('should call the callback with only the email, id and names', function () {
+ return this.callback
+ .calledWith(null, {
+ id: this.user_id,
+ email: this.user_info.email,
+ first_name: this.user_info.first_name,
+ last_name: this.user_info.last_name,
+ })
+ .should.equal(true)
+ })
+ })
+
+ describe('when the web API returns an error', function () {
+ beforeEach(function () {
+ this.request.get = sinon
+ .stub()
+ .callsArgWith(
+ 1,
+ (this.error = new Error('something went wrong')),
+ null,
+ null
+ )
+ return this.WebApiManager.getUserInfo(this.user_id, this.callback)
+ })
+
+ return it('should return an error to the callback', function () {
+ return this.callback.calledWith(this.error).should.equal(true)
+ })
+ })
+
+ describe('when the web returns a failure error code', function () {
+ beforeEach(function () {
+ this.request.get = sinon
+ .stub()
+ .callsArgWith(1, null, { statusCode: 500, attempts: 42 }, '')
+ return this.WebApiManager.getUserInfo(this.user_id, this.callback)
+ })
+
+ return it('should return the callback with an error', function () {
+ return this.callback
+ .calledWith(
+ sinon.match.has(
+ 'message',
+ 'web returned a non-success status code: 500 (attempts: 42)'
+ )
+ )
+ .should.equal(true)
+ })
+ })
+
+ return describe('when the user cannot be found', function () {
+ beforeEach(function () {
+ this.request.get = sinon
+ .stub()
+ .callsArgWith(1, null, { statusCode: 404 }, 'nothing')
+ return this.WebApiManager.getUserInfo(this.user_id, this.callback)
+ })
+
+ return it('should return a null value', function () {
+ return this.callback.calledWith(null, null).should.equal(true)
+ })
+ })
+ })
+
+ return describe('getProjectDetails', function () {
+ describe('successfully', function () {
+ beforeEach(function () {
+ this.body = JSON.stringify(this.project)
+ this.request.get = sinon
+ .stub()
+ .callsArgWith(1, null, { statusCode: 200 }, this.body)
+ return this.WebApiManager.getProjectDetails(
+ this.project_id,
+ this.callback
+ )
+ })
+
+ it('should get the project from the web api', function () {
+ return this.request.get
+ .calledWithMatch({
+ url: `${this.settings.apis.web.url}/project/${this.project_id}/details`,
+ auth: {
+ user: this.settings.apis.web.user,
+ pass: this.settings.apis.web.pass,
+ sendImmediately: true,
+ },
+ })
+ .should.equal(true)
+ })
+
+ return it('should call the callback with the project', function () {
+ return this.callback.calledWith(null, this.project).should.equal(true)
+ })
+ })
+
+ describe('when the web API returns an error', function () {
+ beforeEach(function () {
+ this.request.get = sinon
+ .stub()
+ .callsArgWith(
+ 1,
+ (this.error = new Error('something went wrong')),
+ null,
+ null
+ )
+ return this.WebApiManager.getProjectDetails(
+ this.project_id,
+ this.callback
+ )
+ })
+
+ return it('should return an error to the callback', function () {
+ return this.callback.calledWith(this.error).should.equal(true)
+ })
+ })
+
+ return describe('when the web returns a failure error code', function () {
+ beforeEach(function () {
+ this.request.get = sinon
+ .stub()
+ .callsArgWith(1, null, { statusCode: 500, attempts: 42 }, '')
+ return this.WebApiManager.getProjectDetails(
+ this.project_id,
+ this.callback
+ )
+ })
+
+ return it('should return the callback with an error', function () {
+ return this.callback
+ .calledWith(
+ sinon.match.has(
+ 'message',
+ 'web returned a non-success status code: 500 (attempts: 42)'
+ )
+ )
+ .should.equal(true)
+ })
+ })
+ })
+})