mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-14 20:40:17 -05:00
merge multiple repositories into an existing monorepo
- merged using: 'monorepo_add.sh services-track-changes:services/track-changes' - see https://github.com/shopsys/monorepo-tools
This commit is contained in:
commit
5c8cb4b90a
72 changed files with 22238 additions and 0 deletions
7
services/track-changes/.dockerignore
Normal file
7
services/track-changes/.dockerignore
Normal file
|
@ -0,0 +1,7 @@
|
|||
node_modules/*
|
||||
gitrev
|
||||
.git
|
||||
.gitignore
|
||||
.npm
|
||||
.nvmrc
|
||||
nodemon.json
|
1
services/track-changes/.eslintignore
Normal file
1
services/track-changes/.eslintignore
Normal file
|
@ -0,0 +1 @@
|
|||
app/lib/diff_match_patch.js
|
86
services/track-changes/.eslintrc
Normal file
86
services/track-changes/.eslintrc
Normal file
|
@ -0,0 +1,86 @@
|
|||
// this file was auto-generated, do not edit it directly.
|
||||
// instead run bin/update_build_scripts from
|
||||
// https://github.com/sharelatex/sharelatex-dev-environment
|
||||
{
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"standard",
|
||||
"prettier"
|
||||
],
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 2018
|
||||
},
|
||||
"plugins": [
|
||||
"mocha",
|
||||
"chai-expect",
|
||||
"chai-friendly"
|
||||
],
|
||||
"env": {
|
||||
"node": true,
|
||||
"mocha": true
|
||||
},
|
||||
"rules": {
|
||||
// TODO(das7pad): remove overrides after fixing all the violations manually (https://github.com/overleaf/issues/issues/3882#issuecomment-878999671)
|
||||
// START of temporary overrides
|
||||
"array-callback-return": "off",
|
||||
"no-dupe-else-if": "off",
|
||||
"no-var": "off",
|
||||
"no-empty": "off",
|
||||
"node/handle-callback-err": "off",
|
||||
"no-loss-of-precision": "off",
|
||||
"node/no-callback-literal": "off",
|
||||
"node/no-path-concat": "off",
|
||||
"prefer-regex-literals": "off",
|
||||
// END of temporary overrides
|
||||
|
||||
// Swap the no-unused-expressions rule with a more chai-friendly one
|
||||
"no-unused-expressions": 0,
|
||||
"chai-friendly/no-unused-expressions": "error",
|
||||
|
||||
// Do not allow importing of implicit dependencies.
|
||||
"import/no-extraneous-dependencies": "error"
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
// Test specific rules
|
||||
"files": ["test/**/*.js"],
|
||||
"globals": {
|
||||
"expect": true
|
||||
},
|
||||
"rules": {
|
||||
// mocha-specific rules
|
||||
"mocha/handle-done-callback": "error",
|
||||
"mocha/no-exclusive-tests": "error",
|
||||
"mocha/no-global-tests": "error",
|
||||
"mocha/no-identical-title": "error",
|
||||
"mocha/no-nested-tests": "error",
|
||||
"mocha/no-pending-tests": "error",
|
||||
"mocha/no-skipped-tests": "error",
|
||||
"mocha/no-mocha-arrows": "error",
|
||||
|
||||
// chai-specific rules
|
||||
"chai-expect/missing-assertion": "error",
|
||||
"chai-expect/terminating-properties": "error",
|
||||
|
||||
// prefer-arrow-callback applies to all callbacks, not just ones in mocha tests.
|
||||
// we don't enforce this at the top-level - just in tests to manage `this` scope
|
||||
// based on mocha's context mechanism
|
||||
"mocha/prefer-arrow-callback": "error"
|
||||
}
|
||||
},
|
||||
{
|
||||
// Backend specific rules
|
||||
"files": ["app/**/*.js", "app.js", "index.js"],
|
||||
"rules": {
|
||||
// don't allow console.log in backend code
|
||||
"no-console": "error",
|
||||
|
||||
// Do not allow importing of implicit dependencies.
|
||||
"import/no-extraneous-dependencies": ["error", {
|
||||
// Do not allow importing of devDependencies.
|
||||
"devDependencies": false
|
||||
}]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
38
services/track-changes/.github/ISSUE_TEMPLATE.md
vendored
Normal file
38
services/track-changes/.github/ISSUE_TEMPLATE.md
vendored
Normal file
|
@ -0,0 +1,38 @@
|
|||
<!-- BUG REPORT TEMPLATE -->
|
||||
|
||||
## Steps to Reproduce
|
||||
<!-- Describe the steps leading up to when / where you found the bug. -->
|
||||
<!-- Screenshots may be helpful here. -->
|
||||
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
## Expected Behaviour
|
||||
<!-- What should have happened when you completed the steps above? -->
|
||||
|
||||
## Observed Behaviour
|
||||
<!-- What actually happened when you completed the steps above? -->
|
||||
<!-- Screenshots may be helpful here. -->
|
||||
|
||||
## Context
|
||||
<!-- How has this issue affected you? What were you trying to accomplish? -->
|
||||
|
||||
## Technical Info
|
||||
<!-- Provide any technical details that may be applicable (or N/A if not applicable). -->
|
||||
|
||||
* URL:
|
||||
* Browser Name and version:
|
||||
* Operating System and version (desktop or mobile):
|
||||
* Signed in as:
|
||||
* Project and/or file:
|
||||
|
||||
## Analysis
|
||||
<!--- Optionally, document investigation of / suggest a fix for the bug, e.g. 'comes from this line / commit' -->
|
||||
|
||||
## Who Needs to Know?
|
||||
<!-- If you want to bring this to the attention of particular people, @-mention them below. -->
|
||||
<!-- If a user reported this bug and should be notified when it is fixed, provide the Front conversation link. -->
|
||||
|
||||
-
|
||||
-
|
48
services/track-changes/.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
48
services/track-changes/.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
|
@ -0,0 +1,48 @@
|
|||
|
||||
<!-- ** This is an Overleaf public repository ** -->
|
||||
|
||||
<!-- Please review https://github.com/overleaf/overleaf/blob/master/CONTRIBUTING.md for guidance on what is expected of a contribution. -->
|
||||
|
||||
### Description
|
||||
|
||||
|
||||
|
||||
#### Screenshots
|
||||
|
||||
|
||||
|
||||
#### Related Issues / PRs
|
||||
|
||||
|
||||
|
||||
### Review
|
||||
|
||||
|
||||
|
||||
#### Potential Impact
|
||||
|
||||
|
||||
|
||||
#### Manual Testing Performed
|
||||
|
||||
- [ ]
|
||||
- [ ]
|
||||
|
||||
#### Accessibility
|
||||
|
||||
|
||||
|
||||
### Deployment
|
||||
|
||||
|
||||
|
||||
#### Deployment Checklist
|
||||
|
||||
- [ ] Update documentation not included in the PR (if any)
|
||||
- [ ]
|
||||
|
||||
#### Metrics and Monitoring
|
||||
|
||||
|
||||
|
||||
#### Who Needs to Know?
|
23
services/track-changes/.github/dependabot.yml
vendored
Normal file
23
services/track-changes/.github/dependabot.yml
vendored
Normal file
|
@ -0,0 +1,23 @@
|
|||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
||||
pull-request-branch-name:
|
||||
# Separate sections of the branch name with a hyphen
|
||||
# Docker images use the branch name and do not support slashes in tags
|
||||
# https://github.com/overleaf/google-ops/issues/822
|
||||
# https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#pull-request-branch-nameseparator
|
||||
separator: "-"
|
||||
|
||||
# Block informal upgrades -- security upgrades use a separate queue.
|
||||
# https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#open-pull-requests-limit
|
||||
open-pull-requests-limit: 0
|
||||
|
||||
# currently assign team-magma to all dependabot PRs - this may change in
|
||||
# future if we reorganise teams
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "type:maintenance"
|
7
services/track-changes/.gitignore
vendored
Normal file
7
services/track-changes/.gitignore
vendored
Normal file
|
@ -0,0 +1,7 @@
|
|||
**.swp
|
||||
node_modules/
|
||||
forever/
|
||||
*.js.map
|
||||
|
||||
# managed by dev-environment$ bin/update_build_scripts
|
||||
.npmrc
|
3
services/track-changes/.mocharc.json
Normal file
3
services/track-changes/.mocharc.json
Normal file
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"require": "test/setup.js"
|
||||
}
|
1
services/track-changes/.nvmrc
Normal file
1
services/track-changes/.nvmrc
Normal file
|
@ -0,0 +1 @@
|
|||
12.22.3
|
11
services/track-changes/.prettierrc
Normal file
11
services/track-changes/.prettierrc
Normal file
|
@ -0,0 +1,11 @@
|
|||
# This file was auto-generated, do not edit it directly.
|
||||
# Instead run bin/update_build_scripts from
|
||||
# https://github.com/sharelatex/sharelatex-dev-environment
|
||||
{
|
||||
"arrowParens": "avoid",
|
||||
"semi": false,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "es5",
|
||||
"tabWidth": 2,
|
||||
"useTabs": false
|
||||
}
|
23
services/track-changes/Dockerfile
Normal file
23
services/track-changes/Dockerfile
Normal file
|
@ -0,0 +1,23 @@
|
|||
# This file was auto-generated, do not edit it directly.
|
||||
# Instead run bin/update_build_scripts from
|
||||
# https://github.com/sharelatex/sharelatex-dev-environment
|
||||
|
||||
FROM node:12.22.3 as base
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
FROM base as app
|
||||
|
||||
#wildcard as some files may not be in all repos
|
||||
COPY package*.json npm-shrink*.json /app/
|
||||
|
||||
RUN npm ci --quiet
|
||||
|
||||
COPY . /app
|
||||
|
||||
FROM base
|
||||
|
||||
COPY --from=app /app /app
|
||||
USER node
|
||||
|
||||
CMD ["node", "--expose-gc", "app.js"]
|
662
services/track-changes/LICENSE
Normal file
662
services/track-changes/LICENSE
Normal file
|
@ -0,0 +1,662 @@
|
|||
|
||||
GNU AFFERO GENERAL PUBLIC LICENSE
|
||||
Version 3, 19 November 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU Affero General Public License is a free, copyleft license for
|
||||
software and other kinds of works, specifically designed to ensure
|
||||
cooperation with the community in the case of network server software.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
our General Public Licenses are intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
Developers that use our General Public Licenses protect your rights
|
||||
with two steps: (1) assert copyright on the software, and (2) offer
|
||||
you this License which gives you legal permission to copy, distribute
|
||||
and/or modify the software.
|
||||
|
||||
A secondary benefit of defending all users' freedom is that
|
||||
improvements made in alternate versions of the program, if they
|
||||
receive widespread use, become available for other developers to
|
||||
incorporate. Many developers of free software are heartened and
|
||||
encouraged by the resulting cooperation. However, in the case of
|
||||
software used on network servers, this result may fail to come about.
|
||||
The GNU General Public License permits making a modified version and
|
||||
letting the public access it on a server without ever releasing its
|
||||
source code to the public.
|
||||
|
||||
The GNU Affero General Public License is designed specifically to
|
||||
ensure that, in such cases, the modified source code becomes available
|
||||
to the community. It requires the operator of a network server to
|
||||
provide the source code of the modified version running there to the
|
||||
users of that server. Therefore, public use of a modified version, on
|
||||
a publicly accessible server, gives the public access to the source
|
||||
code of the modified version.
|
||||
|
||||
An older license, called the Affero General Public License and
|
||||
published by Affero, was designed to accomplish similar goals. This is
|
||||
a different license, not a version of the Affero GPL, but Affero has
|
||||
released a new version of the Affero GPL which permits relicensing under
|
||||
this license.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU Affero General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Remote Network Interaction; Use with the GNU General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, if you modify the
|
||||
Program, your modified version must prominently offer all users
|
||||
interacting with it remotely through a computer network (if your version
|
||||
supports such interaction) an opportunity to receive the Corresponding
|
||||
Source of your version by providing access to the Corresponding Source
|
||||
from a network server at no charge, through some standard or customary
|
||||
means of facilitating copying of software. This Corresponding Source
|
||||
shall include the Corresponding Source for any work covered by version 3
|
||||
of the GNU General Public License that is incorporated pursuant to the
|
||||
following paragraph.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the work with which it is combined will remain governed by version
|
||||
3 of the GNU General Public License.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU Affero General Public License from time to time. Such new versions
|
||||
will be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU Affero General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU Affero General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU Affero General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU Affero General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU Affero General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Affero General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If your software can interact with users remotely through a computer
|
||||
network, you should also make sure that it provides a way for users to
|
||||
get its source. For example, if your program is a web application, its
|
||||
interface could display a "Source" link that leads users to an archive
|
||||
of the code. There are many ways you could offer source, and different
|
||||
solutions will be better for different programs; see section 13 for the
|
||||
specific requirements.
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU AGPL, see
|
||||
<http://www.gnu.org/licenses/>.
|
90
services/track-changes/Makefile
Normal file
90
services/track-changes/Makefile
Normal file
|
@ -0,0 +1,90 @@
|
|||
# This file was auto-generated, do not edit it directly.
|
||||
# Instead run bin/update_build_scripts from
|
||||
# https://github.com/sharelatex/sharelatex-dev-environment
|
||||
|
||||
BUILD_NUMBER ?= local
|
||||
BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||
PROJECT_NAME = track-changes
|
||||
BUILD_DIR_NAME = $(shell pwd | xargs basename | tr -cd '[a-zA-Z0-9_.\-]')
|
||||
|
||||
DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml
|
||||
DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \
|
||||
BRANCH_NAME=$(BRANCH_NAME) \
|
||||
PROJECT_NAME=$(PROJECT_NAME) \
|
||||
MOCHA_GREP=${MOCHA_GREP} \
|
||||
docker-compose ${DOCKER_COMPOSE_FLAGS}
|
||||
|
||||
DOCKER_COMPOSE_TEST_ACCEPTANCE = \
|
||||
COMPOSE_PROJECT_NAME=test_acceptance_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE)
|
||||
|
||||
DOCKER_COMPOSE_TEST_UNIT = \
|
||||
COMPOSE_PROJECT_NAME=test_unit_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE)
|
||||
|
||||
clean:
|
||||
-docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
|
||||
-docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
|
||||
-$(DOCKER_COMPOSE_TEST_UNIT) down --rmi local
|
||||
-$(DOCKER_COMPOSE_TEST_ACCEPTANCE) down --rmi local
|
||||
|
||||
format:
|
||||
$(DOCKER_COMPOSE) run --rm test_unit npm run --silent format
|
||||
|
||||
format_fix:
|
||||
$(DOCKER_COMPOSE) run --rm test_unit npm run --silent format:fix
|
||||
|
||||
lint:
|
||||
$(DOCKER_COMPOSE) run --rm test_unit npm run --silent lint
|
||||
|
||||
test: format lint test_unit test_acceptance
|
||||
|
||||
test_unit:
|
||||
ifneq (,$(wildcard test/unit))
|
||||
$(DOCKER_COMPOSE_TEST_UNIT) run --rm test_unit
|
||||
$(MAKE) test_unit_clean
|
||||
endif
|
||||
|
||||
test_clean: test_unit_clean
|
||||
test_unit_clean:
|
||||
ifneq (,$(wildcard test/unit))
|
||||
$(DOCKER_COMPOSE_TEST_UNIT) down -v -t 0
|
||||
endif
|
||||
|
||||
test_acceptance: test_acceptance_clean test_acceptance_pre_run test_acceptance_run
|
||||
$(MAKE) test_acceptance_clean
|
||||
|
||||
test_acceptance_debug: test_acceptance_clean test_acceptance_pre_run test_acceptance_run_debug
|
||||
$(MAKE) test_acceptance_clean
|
||||
|
||||
test_acceptance_run:
|
||||
ifneq (,$(wildcard test/acceptance))
|
||||
$(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance
|
||||
endif
|
||||
|
||||
test_acceptance_run_debug:
|
||||
ifneq (,$(wildcard test/acceptance))
|
||||
$(DOCKER_COMPOSE_TEST_ACCEPTANCE) run -p 127.0.0.9:19999:19999 --rm test_acceptance npm run test:acceptance -- --inspect=0.0.0.0:19999 --inspect-brk
|
||||
endif
|
||||
|
||||
test_clean: test_acceptance_clean
|
||||
test_acceptance_clean:
|
||||
$(DOCKER_COMPOSE_TEST_ACCEPTANCE) down -v -t 0
|
||||
|
||||
test_acceptance_pre_run:
|
||||
ifneq (,$(wildcard test/acceptance/js/scripts/pre-run))
|
||||
$(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance test/acceptance/js/scripts/pre-run
|
||||
endif
|
||||
|
||||
build:
|
||||
docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \
|
||||
--tag gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \
|
||||
.
|
||||
|
||||
tar:
|
||||
$(DOCKER_COMPOSE) up tar
|
||||
|
||||
publish:
|
||||
|
||||
docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
|
||||
|
||||
|
||||
.PHONY: clean test test_unit test_acceptance test_clean build publish
|
20
services/track-changes/README.md
Normal file
20
services/track-changes/README.md
Normal file
|
@ -0,0 +1,20 @@
|
|||
overleaf/track-changes
|
||||
========================
|
||||
|
||||
An API for converting raw editor updates into a compressed and browseable history.
|
||||
|
||||
Acceptance tests can be run with the command
|
||||
```
|
||||
AWS_BUCKET=<bucket-name> AWS_ACCESS_KEY_ID=<aws-access-key> AWS_SECRET_ACCESS_KEY=<aws-secret-access-key> make test
|
||||
```
|
||||
where `bucket-name`, `aws-access-key` and `aws-secret-access-key` are the credentials for an AWS S3 bucket.
|
||||
|
||||
|
||||
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
The code in this repository is released under the GNU AFFERO GENERAL PUBLIC LICENSE, version 3. A copy can be found in the `LICENSE` file.
|
||||
|
||||
Copyright (c) Overleaf, 2014-2019.
|
161
services/track-changes/app.js
Normal file
161
services/track-changes/app.js
Normal file
|
@ -0,0 +1,161 @@
|
|||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS103: Rewrite code to no longer use __guard__
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
Metrics.initialize('track-changes')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const logger = require('logger-sharelatex')
|
||||
const TrackChangesLogger = logger.initialize('track-changes').logger
|
||||
|
||||
if ((Settings.sentry != null ? Settings.sentry.dsn : undefined) != null) {
|
||||
logger.initializeErrorReporting(Settings.sentry.dsn)
|
||||
}
|
||||
|
||||
// log updates as truncated strings
|
||||
const truncateFn = updates =>
|
||||
JSON.parse(
|
||||
JSON.stringify(updates, function (key, value) {
|
||||
let len
|
||||
if (typeof value === 'string' && (len = value.length) > 80) {
|
||||
return (
|
||||
value.substr(0, 32) +
|
||||
`...(message of length ${len} truncated)...` +
|
||||
value.substr(-32)
|
||||
)
|
||||
} else {
|
||||
return value
|
||||
}
|
||||
})
|
||||
)
|
||||
TrackChangesLogger.addSerializers({
|
||||
rawUpdate: truncateFn,
|
||||
rawUpdates: truncateFn,
|
||||
newUpdates: truncateFn,
|
||||
lastUpdate: truncateFn,
|
||||
})
|
||||
|
||||
const Path = require('path')
|
||||
|
||||
Metrics.memory.monitor(logger)
|
||||
|
||||
const childProcess = require('child_process')
|
||||
|
||||
const mongodb = require('./app/js/mongodb')
|
||||
const HttpController = require('./app/js/HttpController')
|
||||
const express = require('express')
|
||||
const bodyParser = require('body-parser')
|
||||
|
||||
const app = express()
|
||||
|
||||
app.use(bodyParser.json())
|
||||
|
||||
app.use(Metrics.http.monitor(logger))
|
||||
|
||||
Metrics.injectMetricsRoute(app)
|
||||
|
||||
app.post('/project/:project_id/doc/:doc_id/flush', HttpController.flushDoc)
|
||||
|
||||
app.get('/project/:project_id/doc/:doc_id/diff', HttpController.getDiff)
|
||||
|
||||
app.get('/project/:project_id/doc/:doc_id/check', HttpController.checkDoc)
|
||||
|
||||
app.get('/project/:project_id/updates', HttpController.getUpdates)
|
||||
app.get('/project/:project_id/export', HttpController.exportProject)
|
||||
|
||||
app.post('/project/:project_id/flush', HttpController.flushProject)
|
||||
|
||||
app.post(
|
||||
'/project/:project_id/doc/:doc_id/version/:version/restore',
|
||||
HttpController.restore
|
||||
)
|
||||
|
||||
app.post('/project/:project_id/doc/:doc_id/push', HttpController.pushDocHistory)
|
||||
app.post('/project/:project_id/doc/:doc_id/pull', HttpController.pullDocHistory)
|
||||
|
||||
app.post('/flush/all', HttpController.flushAll)
|
||||
app.post('/check/dangling', HttpController.checkDanglingUpdates)
|
||||
|
||||
let packWorker = null // use a single packing worker
|
||||
|
||||
app.post('/pack', function (req, res, next) {
|
||||
if (packWorker != null) {
|
||||
return res.send('pack already running')
|
||||
} else {
|
||||
logger.log('running pack')
|
||||
packWorker = childProcess.fork(
|
||||
Path.join(__dirname, '/app/js/PackWorker.js'),
|
||||
[
|
||||
req.query.limit || 1000,
|
||||
req.query.delay || 1000,
|
||||
req.query.timeout || 30 * 60 * 1000,
|
||||
]
|
||||
)
|
||||
packWorker.on('exit', function (code, signal) {
|
||||
logger.log({ code, signal }, 'history auto pack exited')
|
||||
return (packWorker = null)
|
||||
})
|
||||
return res.send('pack started')
|
||||
}
|
||||
})
|
||||
|
||||
app.get('/status', (req, res, next) => res.send('track-changes is alive'))
|
||||
|
||||
app.get('/oops', function (req, res, next) {
|
||||
throw new Error('dummy test error')
|
||||
})
|
||||
|
||||
app.get('/check_lock', HttpController.checkLock)
|
||||
|
||||
app.get('/health_check', HttpController.healthCheck)
|
||||
|
||||
app.use(function (error, req, res, next) {
|
||||
logger.error({ err: error, req }, 'an internal error occured')
|
||||
return res.sendStatus(500)
|
||||
})
|
||||
|
||||
const port =
|
||||
__guard__(
|
||||
Settings.internal != null ? Settings.internal.trackchanges : undefined,
|
||||
x => x.port
|
||||
) || 3015
|
||||
const host =
|
||||
__guard__(
|
||||
Settings.internal != null ? Settings.internal.trackchanges : undefined,
|
||||
x1 => x1.host
|
||||
) || 'localhost'
|
||||
|
||||
if (!module.parent) {
|
||||
// Called directly
|
||||
mongodb
|
||||
.waitForDb()
|
||||
.then(() => {
|
||||
app.listen(port, host, function (error) {
|
||||
if (error != null) {
|
||||
return logger.error(
|
||||
{ err: error },
|
||||
'could not start track-changes server'
|
||||
)
|
||||
} else {
|
||||
return logger.info(
|
||||
`trackchanges starting up, listening on ${host}:${port}`
|
||||
)
|
||||
}
|
||||
})
|
||||
})
|
||||
.catch(err => {
|
||||
logger.fatal({ err }, 'Cannot connect to mongo. Exiting.')
|
||||
process.exit(1)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = app
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return typeof value !== 'undefined' && value !== null
|
||||
? transform(value)
|
||||
: undefined
|
||||
}
|
340
services/track-changes/app/js/DiffGenerator.js
Normal file
340
services/track-changes/app/js/DiffGenerator.js
Normal file
|
@ -0,0 +1,340 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-proto,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let DiffGenerator
|
||||
var ConsistencyError = function (message) {
|
||||
const error = new Error(message)
|
||||
error.name = 'ConsistencyError'
|
||||
error.__proto__ = ConsistencyError.prototype
|
||||
return error
|
||||
}
|
||||
ConsistencyError.prototype.__proto__ = Error.prototype
|
||||
|
||||
const logger = require('logger-sharelatex')
|
||||
|
||||
module.exports = DiffGenerator = {
|
||||
ConsistencyError,
|
||||
|
||||
rewindUpdate(content, update) {
|
||||
for (let j = update.op.length - 1, i = j; j >= 0; j--, i = j) {
|
||||
const op = update.op[i]
|
||||
if (op.broken !== true) {
|
||||
try {
|
||||
content = DiffGenerator.rewindOp(content, op)
|
||||
} catch (e) {
|
||||
if (e instanceof ConsistencyError && (i = update.op.length - 1)) {
|
||||
// catch known case where the last op in an array has been
|
||||
// merged into a later op
|
||||
logger.error(
|
||||
{ err: e, update, op: JSON.stringify(op) },
|
||||
'marking op as broken'
|
||||
)
|
||||
op.broken = true
|
||||
} else {
|
||||
throw e // rethrow the execption
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return content
|
||||
},
|
||||
|
||||
rewindOp(content, op) {
|
||||
let p
|
||||
if (op.i != null) {
|
||||
// ShareJS will accept an op where p > content.length when applied,
|
||||
// and it applies as though p == content.length. However, the op is
|
||||
// passed to us with the original p > content.length. Detect if that
|
||||
// is the case with this op, and shift p back appropriately to match
|
||||
// ShareJS if so.
|
||||
;({ p } = op)
|
||||
const max_p = content.length - op.i.length
|
||||
if (p > max_p) {
|
||||
logger.warn({ max_p, p }, 'truncating position to content length')
|
||||
p = max_p
|
||||
}
|
||||
|
||||
const textToBeRemoved = content.slice(p, p + op.i.length)
|
||||
if (op.i !== textToBeRemoved) {
|
||||
throw new ConsistencyError(
|
||||
`Inserted content, '${op.i}', does not match text to be removed, '${textToBeRemoved}'`
|
||||
)
|
||||
}
|
||||
|
||||
return content.slice(0, p) + content.slice(p + op.i.length)
|
||||
} else if (op.d != null) {
|
||||
return content.slice(0, op.p) + op.d + content.slice(op.p)
|
||||
} else {
|
||||
return content
|
||||
}
|
||||
},
|
||||
|
||||
rewindUpdates(content, updates) {
|
||||
for (const update of Array.from(updates.reverse())) {
|
||||
try {
|
||||
content = DiffGenerator.rewindUpdate(content, update)
|
||||
} catch (e) {
|
||||
e.attempted_update = update // keep a record of the attempted update
|
||||
throw e // rethrow the exception
|
||||
}
|
||||
}
|
||||
return content
|
||||
},
|
||||
|
||||
buildDiff(initialContent, updates) {
|
||||
let diff = [{ u: initialContent }]
|
||||
for (const update of Array.from(updates)) {
|
||||
diff = DiffGenerator.applyUpdateToDiff(diff, update)
|
||||
}
|
||||
diff = DiffGenerator.compressDiff(diff)
|
||||
return diff
|
||||
},
|
||||
|
||||
compressDiff(diff) {
|
||||
const newDiff = []
|
||||
for (const part of Array.from(diff)) {
|
||||
const lastPart = newDiff[newDiff.length - 1]
|
||||
if (
|
||||
lastPart != null &&
|
||||
(lastPart.meta != null ? lastPart.meta.user : undefined) != null &&
|
||||
(part.meta != null ? part.meta.user : undefined) != null
|
||||
) {
|
||||
if (
|
||||
lastPart.i != null &&
|
||||
part.i != null &&
|
||||
lastPart.meta.user.id === part.meta.user.id
|
||||
) {
|
||||
lastPart.i += part.i
|
||||
lastPart.meta.start_ts = Math.min(
|
||||
lastPart.meta.start_ts,
|
||||
part.meta.start_ts
|
||||
)
|
||||
lastPart.meta.end_ts = Math.max(
|
||||
lastPart.meta.end_ts,
|
||||
part.meta.end_ts
|
||||
)
|
||||
} else if (
|
||||
lastPart.d != null &&
|
||||
part.d != null &&
|
||||
lastPart.meta.user.id === part.meta.user.id
|
||||
) {
|
||||
lastPart.d += part.d
|
||||
lastPart.meta.start_ts = Math.min(
|
||||
lastPart.meta.start_ts,
|
||||
part.meta.start_ts
|
||||
)
|
||||
lastPart.meta.end_ts = Math.max(
|
||||
lastPart.meta.end_ts,
|
||||
part.meta.end_ts
|
||||
)
|
||||
} else {
|
||||
newDiff.push(part)
|
||||
}
|
||||
} else {
|
||||
newDiff.push(part)
|
||||
}
|
||||
}
|
||||
return newDiff
|
||||
},
|
||||
|
||||
applyOpToDiff(diff, op, meta) {
|
||||
let consumedDiff
|
||||
const position = 0
|
||||
|
||||
let remainingDiff = diff.slice()
|
||||
;({ consumedDiff, remainingDiff } = DiffGenerator._consumeToOffset(
|
||||
remainingDiff,
|
||||
op.p
|
||||
))
|
||||
const newDiff = consumedDiff
|
||||
|
||||
if (op.i != null) {
|
||||
newDiff.push({
|
||||
i: op.i,
|
||||
meta,
|
||||
})
|
||||
} else if (op.d != null) {
|
||||
;({ consumedDiff, remainingDiff } =
|
||||
DiffGenerator._consumeDiffAffectedByDeleteOp(remainingDiff, op, meta))
|
||||
newDiff.push(...Array.from(consumedDiff || []))
|
||||
}
|
||||
|
||||
newDiff.push(...Array.from(remainingDiff || []))
|
||||
|
||||
return newDiff
|
||||
},
|
||||
|
||||
applyUpdateToDiff(diff, update) {
|
||||
for (const op of Array.from(update.op)) {
|
||||
if (op.broken !== true) {
|
||||
diff = DiffGenerator.applyOpToDiff(diff, op, update.meta)
|
||||
}
|
||||
}
|
||||
return diff
|
||||
},
|
||||
|
||||
_consumeToOffset(remainingDiff, totalOffset) {
|
||||
let part
|
||||
const consumedDiff = []
|
||||
let position = 0
|
||||
while ((part = remainingDiff.shift())) {
|
||||
const length = DiffGenerator._getLengthOfDiffPart(part)
|
||||
if (part.d != null) {
|
||||
consumedDiff.push(part)
|
||||
} else if (position + length >= totalOffset) {
|
||||
const partOffset = totalOffset - position
|
||||
if (partOffset > 0) {
|
||||
consumedDiff.push(DiffGenerator._slicePart(part, 0, partOffset))
|
||||
}
|
||||
if (partOffset < length) {
|
||||
remainingDiff.unshift(DiffGenerator._slicePart(part, partOffset))
|
||||
}
|
||||
break
|
||||
} else {
|
||||
position += length
|
||||
consumedDiff.push(part)
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
consumedDiff,
|
||||
remainingDiff,
|
||||
}
|
||||
},
|
||||
|
||||
_consumeDiffAffectedByDeleteOp(remainingDiff, deleteOp, meta) {
|
||||
const consumedDiff = []
|
||||
let remainingOp = deleteOp
|
||||
while (remainingOp && remainingDiff.length > 0) {
|
||||
let newPart
|
||||
;({ newPart, remainingDiff, remainingOp } =
|
||||
DiffGenerator._consumeDeletedPart(remainingDiff, remainingOp, meta))
|
||||
if (newPart != null) {
|
||||
consumedDiff.push(newPart)
|
||||
}
|
||||
}
|
||||
return {
|
||||
consumedDiff,
|
||||
remainingDiff,
|
||||
}
|
||||
},
|
||||
|
||||
_consumeDeletedPart(remainingDiff, op, meta) {
|
||||
let deletedContent, newPart, remainingOp
|
||||
const part = remainingDiff.shift()
|
||||
const partLength = DiffGenerator._getLengthOfDiffPart(part)
|
||||
|
||||
if (part.d != null) {
|
||||
// Skip existing deletes
|
||||
remainingOp = op
|
||||
newPart = part
|
||||
} else if (partLength > op.d.length) {
|
||||
// Only the first bit of the part has been deleted
|
||||
const remainingPart = DiffGenerator._slicePart(part, op.d.length)
|
||||
remainingDiff.unshift(remainingPart)
|
||||
|
||||
deletedContent = DiffGenerator._getContentOfPart(part).slice(
|
||||
0,
|
||||
op.d.length
|
||||
)
|
||||
if (deletedContent !== op.d) {
|
||||
throw new ConsistencyError(
|
||||
`deleted content, '${deletedContent}', does not match delete op, '${op.d}'`
|
||||
)
|
||||
}
|
||||
|
||||
if (part.u != null) {
|
||||
newPart = {
|
||||
d: op.d,
|
||||
meta,
|
||||
}
|
||||
} else if (part.i != null) {
|
||||
newPart = null
|
||||
}
|
||||
|
||||
remainingOp = null
|
||||
} else if (partLength === op.d.length) {
|
||||
// The entire part has been deleted, but it is the last part
|
||||
|
||||
deletedContent = DiffGenerator._getContentOfPart(part)
|
||||
if (deletedContent !== op.d) {
|
||||
throw new ConsistencyError(
|
||||
`deleted content, '${deletedContent}', does not match delete op, '${op.d}'`
|
||||
)
|
||||
}
|
||||
|
||||
if (part.u != null) {
|
||||
newPart = {
|
||||
d: op.d,
|
||||
meta,
|
||||
}
|
||||
} else if (part.i != null) {
|
||||
newPart = null
|
||||
}
|
||||
|
||||
remainingOp = null
|
||||
} else if (partLength < op.d.length) {
|
||||
// The entire part has been deleted and there is more
|
||||
|
||||
deletedContent = DiffGenerator._getContentOfPart(part)
|
||||
const opContent = op.d.slice(0, deletedContent.length)
|
||||
if (deletedContent !== opContent) {
|
||||
throw new ConsistencyError(
|
||||
`deleted content, '${deletedContent}', does not match delete op, '${opContent}'`
|
||||
)
|
||||
}
|
||||
|
||||
if (part.u) {
|
||||
newPart = {
|
||||
d: part.u,
|
||||
meta,
|
||||
}
|
||||
} else if (part.i != null) {
|
||||
newPart = null
|
||||
}
|
||||
|
||||
remainingOp = {
|
||||
p: op.p,
|
||||
d: op.d.slice(DiffGenerator._getLengthOfDiffPart(part)),
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
newPart,
|
||||
remainingDiff,
|
||||
remainingOp,
|
||||
}
|
||||
},
|
||||
|
||||
_slicePart(basePart, from, to) {
|
||||
let part
|
||||
if (basePart.u != null) {
|
||||
part = { u: basePart.u.slice(from, to) }
|
||||
} else if (basePart.i != null) {
|
||||
part = { i: basePart.i.slice(from, to) }
|
||||
}
|
||||
if (basePart.meta != null) {
|
||||
part.meta = basePart.meta
|
||||
}
|
||||
return part
|
||||
},
|
||||
|
||||
_getLengthOfDiffPart(part) {
|
||||
return (part.u || part.d || part.i || '').length
|
||||
},
|
||||
|
||||
_getContentOfPart(part) {
|
||||
return part.u || part.d || part.i || ''
|
||||
},
|
||||
}
|
188
services/track-changes/app/js/DiffManager.js
Normal file
188
services/track-changes/app/js/DiffManager.js
Normal file
|
@ -0,0 +1,188 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let DiffManager
|
||||
const UpdatesManager = require('./UpdatesManager')
|
||||
const DocumentUpdaterManager = require('./DocumentUpdaterManager')
|
||||
const DiffGenerator = require('./DiffGenerator')
|
||||
const logger = require('logger-sharelatex')
|
||||
|
||||
module.exports = DiffManager = {
|
||||
getLatestDocAndUpdates(project_id, doc_id, fromVersion, callback) {
|
||||
// Get updates last, since then they must be ahead and it
|
||||
// might be possible to rewind to the same version as the doc.
|
||||
if (callback == null) {
|
||||
callback = function (error, content, version, updates) {}
|
||||
}
|
||||
return DocumentUpdaterManager.getDocument(
|
||||
project_id,
|
||||
doc_id,
|
||||
function (error, content, version) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
if (fromVersion == null) {
|
||||
// If we haven't been given a version, just return lastest doc and no updates
|
||||
return callback(null, content, version, [])
|
||||
}
|
||||
return UpdatesManager.getDocUpdatesWithUserInfo(
|
||||
project_id,
|
||||
doc_id,
|
||||
{ from: fromVersion },
|
||||
function (error, updates) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback(null, content, version, updates)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
getDiff(project_id, doc_id, fromVersion, toVersion, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, diff) {}
|
||||
}
|
||||
return DiffManager.getDocumentBeforeVersion(
|
||||
project_id,
|
||||
doc_id,
|
||||
fromVersion,
|
||||
function (error, startingContent, updates) {
|
||||
let diff
|
||||
if (error != null) {
|
||||
if (error.message === 'broken-history') {
|
||||
return callback(null, 'history unavailable')
|
||||
} else {
|
||||
return callback(error)
|
||||
}
|
||||
}
|
||||
|
||||
const updatesToApply = []
|
||||
for (const update of Array.from(updates.slice().reverse())) {
|
||||
if (update.v <= toVersion) {
|
||||
updatesToApply.push(update)
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
diff = DiffGenerator.buildDiff(startingContent, updatesToApply)
|
||||
} catch (e) {
|
||||
return callback(e)
|
||||
}
|
||||
|
||||
return callback(null, diff)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
getDocumentBeforeVersion(project_id, doc_id, version, _callback) {
|
||||
// Whichever order we get the latest document and the latest updates,
|
||||
// there is potential for updates to be applied between them so that
|
||||
// they do not return the same 'latest' versions.
|
||||
// If this happens, we just retry and hopefully get them at the compatible
|
||||
// versions.
|
||||
let retry
|
||||
if (_callback == null) {
|
||||
_callback = function (error, document, rewoundUpdates) {}
|
||||
}
|
||||
let retries = 3
|
||||
const callback = function (error, ...args) {
|
||||
if (error != null) {
|
||||
if (error.retry && retries > 0) {
|
||||
logger.warn(
|
||||
{ error, project_id, doc_id, version, retries },
|
||||
'retrying getDocumentBeforeVersion'
|
||||
)
|
||||
return retry()
|
||||
} else {
|
||||
return _callback(error)
|
||||
}
|
||||
} else {
|
||||
return _callback(null, ...Array.from(args))
|
||||
}
|
||||
}
|
||||
|
||||
return (retry = function () {
|
||||
retries--
|
||||
return DiffManager._tryGetDocumentBeforeVersion(
|
||||
project_id,
|
||||
doc_id,
|
||||
version,
|
||||
callback
|
||||
)
|
||||
})()
|
||||
},
|
||||
|
||||
_tryGetDocumentBeforeVersion(project_id, doc_id, version, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, document, rewoundUpdates) {}
|
||||
}
|
||||
logger.log(
|
||||
{ project_id, doc_id, version },
|
||||
'getting document before version'
|
||||
)
|
||||
return DiffManager.getLatestDocAndUpdates(
|
||||
project_id,
|
||||
doc_id,
|
||||
version,
|
||||
function (error, content, version, updates) {
|
||||
let startingContent
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
|
||||
// bail out if we hit a broken update
|
||||
for (const u of Array.from(updates)) {
|
||||
if (u.broken) {
|
||||
return callback(new Error('broken-history'))
|
||||
}
|
||||
}
|
||||
|
||||
// discard any updates which are ahead of this document version
|
||||
while ((updates[0] != null ? updates[0].v : undefined) >= version) {
|
||||
updates.shift()
|
||||
}
|
||||
|
||||
const lastUpdate = updates[0]
|
||||
if (lastUpdate != null && lastUpdate.v !== version - 1) {
|
||||
error = new Error(
|
||||
`latest update version, ${lastUpdate.v}, does not match doc version, ${version}`
|
||||
)
|
||||
error.retry = true
|
||||
return callback(error)
|
||||
}
|
||||
|
||||
logger.log(
|
||||
{
|
||||
docVersion: version,
|
||||
lastUpdateVersion: lastUpdate != null ? lastUpdate.v : undefined,
|
||||
updateCount: updates.length,
|
||||
},
|
||||
'rewinding updates'
|
||||
)
|
||||
|
||||
const tryUpdates = updates.slice().reverse()
|
||||
|
||||
try {
|
||||
startingContent = DiffGenerator.rewindUpdates(content, tryUpdates)
|
||||
// tryUpdates is reversed, and any unapplied ops are marked as broken
|
||||
} catch (e) {
|
||||
return callback(e)
|
||||
}
|
||||
|
||||
return callback(null, startingContent, tryUpdates)
|
||||
}
|
||||
)
|
||||
},
|
||||
}
|
90
services/track-changes/app/js/DocumentUpdaterManager.js
Normal file
90
services/track-changes/app/js/DocumentUpdaterManager.js
Normal file
|
@ -0,0 +1,90 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let DocumentUpdaterManager
|
||||
const request = require('request')
|
||||
const logger = require('logger-sharelatex')
|
||||
const Settings = require('@overleaf/settings')
|
||||
|
||||
module.exports = DocumentUpdaterManager = {
|
||||
getDocument(project_id, doc_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, content, version) {}
|
||||
}
|
||||
const url = `${Settings.apis.documentupdater.url}/project/${project_id}/doc/${doc_id}`
|
||||
logger.log({ project_id, doc_id }, 'getting doc from document updater')
|
||||
return request.get(url, function (error, res, body) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
try {
|
||||
body = JSON.parse(body)
|
||||
} catch (error1) {
|
||||
error = error1
|
||||
return callback(error)
|
||||
}
|
||||
logger.log(
|
||||
{ project_id, doc_id, version: body.version },
|
||||
'got doc from document updater'
|
||||
)
|
||||
return callback(null, body.lines.join('\n'), body.version)
|
||||
} else {
|
||||
error = new Error(
|
||||
`doc updater returned a non-success status code: ${res.statusCode}`
|
||||
)
|
||||
logger.error(
|
||||
{ err: error, project_id, doc_id, url },
|
||||
'error accessing doc updater'
|
||||
)
|
||||
return callback(error)
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
setDocument(project_id, doc_id, content, user_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
const url = `${Settings.apis.documentupdater.url}/project/${project_id}/doc/${doc_id}`
|
||||
logger.log({ project_id, doc_id }, 'setting doc in document updater')
|
||||
return request.post(
|
||||
{
|
||||
url,
|
||||
json: {
|
||||
lines: content.split('\n'),
|
||||
source: 'restore',
|
||||
user_id,
|
||||
undoing: true,
|
||||
},
|
||||
},
|
||||
function (error, res, body) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
return callback(null)
|
||||
} else {
|
||||
error = new Error(
|
||||
`doc updater returned a non-success status code: ${res.statusCode}`
|
||||
)
|
||||
logger.error(
|
||||
{ err: error, project_id, doc_id, url },
|
||||
'error accessing doc updater'
|
||||
)
|
||||
return callback(error)
|
||||
}
|
||||
}
|
||||
)
|
||||
},
|
||||
}
|
81
services/track-changes/app/js/HealthChecker.js
Normal file
81
services/track-changes/app/js/HealthChecker.js
Normal file
|
@ -0,0 +1,81 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const { ObjectId } = require('./mongodb')
|
||||
const request = require('request')
|
||||
const async = require('async')
|
||||
const settings = require('@overleaf/settings')
|
||||
const { port } = settings.internal.trackchanges
|
||||
const logger = require('logger-sharelatex')
|
||||
const LockManager = require('./LockManager')
|
||||
|
||||
module.exports = {
|
||||
check(callback) {
|
||||
const project_id = ObjectId(settings.trackchanges.healthCheck.project_id)
|
||||
const url = `http://localhost:${port}/project/${project_id}`
|
||||
logger.log({ project_id }, 'running health check')
|
||||
const jobs = [
|
||||
cb =>
|
||||
request.get(
|
||||
{ url: `http://localhost:${port}/check_lock`, timeout: 3000 },
|
||||
function (err, res, body) {
|
||||
if (err != null) {
|
||||
logger.err(
|
||||
{ err, project_id },
|
||||
'error checking lock for health check'
|
||||
)
|
||||
return cb(err)
|
||||
} else if ((res != null ? res.statusCode : undefined) !== 200) {
|
||||
return cb(`status code not 200, it's ${res.statusCode}`)
|
||||
} else {
|
||||
return cb()
|
||||
}
|
||||
}
|
||||
),
|
||||
cb =>
|
||||
request.post(
|
||||
{ url: `${url}/flush`, timeout: 10000 },
|
||||
function (err, res, body) {
|
||||
if (err != null) {
|
||||
logger.err({ err, project_id }, 'error flushing for health check')
|
||||
return cb(err)
|
||||
} else if ((res != null ? res.statusCode : undefined) !== 204) {
|
||||
return cb(`status code not 204, it's ${res.statusCode}`)
|
||||
} else {
|
||||
return cb()
|
||||
}
|
||||
}
|
||||
),
|
||||
cb =>
|
||||
request.get(
|
||||
{ url: `${url}/updates`, timeout: 10000 },
|
||||
function (err, res, body) {
|
||||
if (err != null) {
|
||||
logger.err(
|
||||
{ err, project_id },
|
||||
'error getting updates for health check'
|
||||
)
|
||||
return cb(err)
|
||||
} else if ((res != null ? res.statusCode : undefined) !== 200) {
|
||||
return cb(`status code not 200, it's ${res.statusCode}`)
|
||||
} else {
|
||||
return cb()
|
||||
}
|
||||
}
|
||||
),
|
||||
]
|
||||
return async.series(jobs, callback)
|
||||
},
|
||||
|
||||
checkLock(callback) {
|
||||
return LockManager.healthCheck(callback)
|
||||
},
|
||||
}
|
340
services/track-changes/app/js/HttpController.js
Normal file
340
services/track-changes/app/js/HttpController.js
Normal file
|
@ -0,0 +1,340 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let HttpController
|
||||
const UpdatesManager = require('./UpdatesManager')
|
||||
const DiffManager = require('./DiffManager')
|
||||
const PackManager = require('./PackManager')
|
||||
const RestoreManager = require('./RestoreManager')
|
||||
const logger = require('logger-sharelatex')
|
||||
const HealthChecker = require('./HealthChecker')
|
||||
const _ = require('underscore')
|
||||
|
||||
module.exports = HttpController = {
|
||||
flushDoc(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function (error) {}
|
||||
}
|
||||
const { doc_id } = req.params
|
||||
const { project_id } = req.params
|
||||
logger.log({ project_id, doc_id }, 'compressing doc history')
|
||||
return UpdatesManager.processUncompressedUpdatesWithLock(
|
||||
project_id,
|
||||
doc_id,
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
return res.sendStatus(204)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
flushProject(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function (error) {}
|
||||
}
|
||||
const { project_id } = req.params
|
||||
logger.log({ project_id }, 'compressing project history')
|
||||
return UpdatesManager.processUncompressedUpdatesForProject(
|
||||
project_id,
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
return res.sendStatus(204)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
flushAll(req, res, next) {
|
||||
// limit on projects to flush or -1 for all (default)
|
||||
if (next == null) {
|
||||
next = function (error) {}
|
||||
}
|
||||
const limit = req.query.limit != null ? parseInt(req.query.limit, 10) : -1
|
||||
logger.log({ limit }, 'flushing all projects')
|
||||
return UpdatesManager.flushAll(limit, function (error, result) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
const { failed, succeeded, all } = result
|
||||
const status = `${succeeded.length} succeeded, ${failed.length} failed`
|
||||
if (limit === 0) {
|
||||
return res
|
||||
.status(200)
|
||||
.send(`${status}\nwould flush:\n${all.join('\n')}\n`)
|
||||
} else if (failed.length > 0) {
|
||||
logger.log({ failed, succeeded }, 'error flushing projects')
|
||||
return res
|
||||
.status(500)
|
||||
.send(`${status}\nfailed to flush:\n${failed.join('\n')}\n`)
|
||||
} else {
|
||||
return res
|
||||
.status(200)
|
||||
.send(
|
||||
`${status}\nflushed ${succeeded.length} projects of ${all.length}\n`
|
||||
)
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
checkDanglingUpdates(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function (error) {}
|
||||
}
|
||||
logger.log('checking dangling updates')
|
||||
return UpdatesManager.getDanglingUpdates(function (error, result) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
if (result.length > 0) {
|
||||
logger.log({ dangling: result }, 'found dangling updates')
|
||||
return res.status(500).send(`dangling updates:\n${result.join('\n')}\n`)
|
||||
} else {
|
||||
return res.status(200).send('no dangling updates found\n')
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
checkDoc(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function (error) {}
|
||||
}
|
||||
const { doc_id } = req.params
|
||||
const { project_id } = req.params
|
||||
logger.log({ project_id, doc_id }, 'checking doc history')
|
||||
return DiffManager.getDocumentBeforeVersion(
|
||||
project_id,
|
||||
doc_id,
|
||||
1,
|
||||
function (error, document, rewoundUpdates) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
const broken = []
|
||||
for (const update of Array.from(rewoundUpdates)) {
|
||||
for (const op of Array.from(update.op)) {
|
||||
if (op.broken === true) {
|
||||
broken.push(op)
|
||||
}
|
||||
}
|
||||
}
|
||||
if (broken.length > 0) {
|
||||
return res.send(broken)
|
||||
} else {
|
||||
return res.sendStatus(204)
|
||||
}
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
getDiff(req, res, next) {
|
||||
let from, to
|
||||
if (next == null) {
|
||||
next = function (error) {}
|
||||
}
|
||||
const { doc_id } = req.params
|
||||
const { project_id } = req.params
|
||||
|
||||
if (req.query.from != null) {
|
||||
from = parseInt(req.query.from, 10)
|
||||
} else {
|
||||
from = null
|
||||
}
|
||||
if (req.query.to != null) {
|
||||
to = parseInt(req.query.to, 10)
|
||||
} else {
|
||||
to = null
|
||||
}
|
||||
|
||||
logger.log({ project_id, doc_id, from, to }, 'getting diff')
|
||||
return DiffManager.getDiff(
|
||||
project_id,
|
||||
doc_id,
|
||||
from,
|
||||
to,
|
||||
function (error, diff) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
return res.json({ diff })
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
getUpdates(req, res, next) {
|
||||
let before, min_count
|
||||
if (next == null) {
|
||||
next = function (error) {}
|
||||
}
|
||||
const { project_id } = req.params
|
||||
|
||||
if (req.query.before != null) {
|
||||
before = parseInt(req.query.before, 10)
|
||||
}
|
||||
if (req.query.min_count != null) {
|
||||
min_count = parseInt(req.query.min_count, 10)
|
||||
}
|
||||
|
||||
return UpdatesManager.getSummarizedProjectUpdates(
|
||||
project_id,
|
||||
{ before, min_count },
|
||||
function (error, updates, nextBeforeTimestamp) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
return res.json({
|
||||
updates,
|
||||
nextBeforeTimestamp,
|
||||
})
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
exportProject(req, res, next) {
|
||||
// The project history can be huge:
|
||||
// - updates can weight MBs for insert/delete of full doc
|
||||
// - multiple updates form a pack
|
||||
// Flush updates per pack onto the wire.
|
||||
const { project_id } = req.params
|
||||
logger.log({ project_id }, 'exporting project history')
|
||||
UpdatesManager.exportProject(
|
||||
project_id,
|
||||
function (err, { updates, userIds }, confirmWrite) {
|
||||
const abortStreaming = req.aborted || res.finished || res.destroyed
|
||||
if (abortStreaming) {
|
||||
// Tell the producer to stop emitting data
|
||||
if (confirmWrite) confirmWrite(new Error('stop'))
|
||||
return
|
||||
}
|
||||
const hasStartedStreamingResponse = res.headersSent
|
||||
if (err) {
|
||||
logger.error({ project_id, err }, 'export failed')
|
||||
if (!hasStartedStreamingResponse) {
|
||||
// Generate a nice 500
|
||||
return next(err)
|
||||
} else {
|
||||
// Stop streaming
|
||||
return res.destroy()
|
||||
}
|
||||
}
|
||||
// Compose the response incrementally
|
||||
const isFirstWrite = !hasStartedStreamingResponse
|
||||
const isLastWrite = updates.length === 0
|
||||
if (isFirstWrite) {
|
||||
// The first write will emit the 200 status, headers and start of the
|
||||
// response payload (open array)
|
||||
res.setHeader('Content-Type', 'application/json')
|
||||
res.setHeader('Trailer', 'X-User-Ids')
|
||||
res.writeHead(200)
|
||||
res.write('[')
|
||||
}
|
||||
if (!isFirstWrite && !isLastWrite) {
|
||||
// Starting from the 2nd non-empty write, emit a continuing comma.
|
||||
// write 1: [updates1
|
||||
// write 2: ,updates2
|
||||
// write 3: ,updates3
|
||||
// write N: ]
|
||||
res.write(',')
|
||||
}
|
||||
|
||||
// Every write will emit a blob onto the response stream:
|
||||
// '[update1,update2,...]'
|
||||
// ^^^^^^^^^^^^^^^^^^^
|
||||
res.write(JSON.stringify(updates).slice(1, -1), confirmWrite)
|
||||
|
||||
if (isLastWrite) {
|
||||
// The last write will have no updates and will finish the response
|
||||
// payload (close array) and emit the userIds as trailer.
|
||||
res.addTrailers({ 'X-User-Ids': JSON.stringify(userIds) })
|
||||
res.end(']')
|
||||
}
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
restore(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function (error) {}
|
||||
}
|
||||
let { doc_id, project_id, version } = req.params
|
||||
const user_id = req.headers['x-user-id']
|
||||
version = parseInt(version, 10)
|
||||
return RestoreManager.restoreToBeforeVersion(
|
||||
project_id,
|
||||
doc_id,
|
||||
version,
|
||||
user_id,
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
return res.sendStatus(204)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
pushDocHistory(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function (error) {}
|
||||
}
|
||||
const { project_id } = req.params
|
||||
const { doc_id } = req.params
|
||||
logger.log({ project_id, doc_id }, 'pushing all finalised changes to s3')
|
||||
return PackManager.pushOldPacks(project_id, doc_id, function (error) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
return res.sendStatus(204)
|
||||
})
|
||||
},
|
||||
|
||||
pullDocHistory(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function (error) {}
|
||||
}
|
||||
const { project_id } = req.params
|
||||
const { doc_id } = req.params
|
||||
logger.log({ project_id, doc_id }, 'pulling all packs from s3')
|
||||
return PackManager.pullOldPacks(project_id, doc_id, function (error) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
return res.sendStatus(204)
|
||||
})
|
||||
},
|
||||
|
||||
healthCheck(req, res) {
|
||||
return HealthChecker.check(function (err) {
|
||||
if (err != null) {
|
||||
logger.err({ err }, 'error performing health check')
|
||||
return res.sendStatus(500)
|
||||
} else {
|
||||
return res.sendStatus(200)
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
checkLock(req, res) {
|
||||
return HealthChecker.checkLock(function (err) {
|
||||
if (err != null) {
|
||||
logger.err({ err }, 'error performing lock check')
|
||||
return res.sendStatus(500)
|
||||
} else {
|
||||
return res.sendStatus(200)
|
||||
}
|
||||
})
|
||||
},
|
||||
}
|
164
services/track-changes/app/js/LockManager.js
Normal file
164
services/track-changes/app/js/LockManager.js
Normal file
|
@ -0,0 +1,164 @@
|
|||
/* eslint-disable
|
||||
handle-callback-err,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let LockManager
|
||||
const Settings = require('@overleaf/settings')
|
||||
const redis = require('@overleaf/redis-wrapper')
|
||||
const rclient = redis.createClient(Settings.redis.lock)
|
||||
const os = require('os')
|
||||
const crypto = require('crypto')
|
||||
const logger = require('logger-sharelatex')
|
||||
|
||||
const HOST = os.hostname()
|
||||
const PID = process.pid
|
||||
const RND = crypto.randomBytes(4).toString('hex')
|
||||
let COUNT = 0
|
||||
|
||||
module.exports = LockManager = {
|
||||
LOCK_TEST_INTERVAL: 50, // 50ms between each test of the lock
|
||||
MAX_LOCK_WAIT_TIME: 10000, // 10s maximum time to spend trying to get the lock
|
||||
LOCK_TTL: 300, // seconds (allow 5 minutes for any operation to complete)
|
||||
|
||||
// Use a signed lock value as described in
|
||||
// http://redis.io/topics/distlock#correct-implementation-with-a-single-instance
|
||||
// to prevent accidental unlocking by multiple processes
|
||||
randomLock() {
|
||||
const time = Date.now()
|
||||
return `locked:host=${HOST}:pid=${PID}:random=${RND}:time=${time}:count=${COUNT++}`
|
||||
},
|
||||
|
||||
unlockScript:
|
||||
'if redis.call("get", KEYS[1]) == ARGV[1] then return redis.call("del", KEYS[1]) else return 0 end',
|
||||
|
||||
tryLock(key, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (err, gotLock) {}
|
||||
}
|
||||
const lockValue = LockManager.randomLock()
|
||||
return rclient.set(
|
||||
key,
|
||||
lockValue,
|
||||
'EX',
|
||||
this.LOCK_TTL,
|
||||
'NX',
|
||||
function (err, gotLock) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
if (gotLock === 'OK') {
|
||||
return callback(err, true, lockValue)
|
||||
} else {
|
||||
return callback(err, false)
|
||||
}
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
getLock(key, callback) {
|
||||
let attempt
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
const startTime = Date.now()
|
||||
return (attempt = function () {
|
||||
if (Date.now() - startTime > LockManager.MAX_LOCK_WAIT_TIME) {
|
||||
const e = new Error('Timeout')
|
||||
e.key = key
|
||||
return callback(e)
|
||||
}
|
||||
|
||||
return LockManager.tryLock(key, function (error, gotLock, lockValue) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
if (gotLock) {
|
||||
return callback(null, lockValue)
|
||||
} else {
|
||||
return setTimeout(attempt, LockManager.LOCK_TEST_INTERVAL)
|
||||
}
|
||||
})
|
||||
})()
|
||||
},
|
||||
|
||||
checkLock(key, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (err, isFree) {}
|
||||
}
|
||||
return rclient.exists(key, function (err, exists) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
exists = parseInt(exists)
|
||||
if (exists === 1) {
|
||||
return callback(err, false)
|
||||
} else {
|
||||
return callback(err, true)
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
releaseLock(key, lockValue, callback) {
|
||||
return rclient.eval(
|
||||
LockManager.unlockScript,
|
||||
1,
|
||||
key,
|
||||
lockValue,
|
||||
function (err, result) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
if (result != null && result !== 1) {
|
||||
// successful unlock should release exactly one key
|
||||
logger.error(
|
||||
{ key, lockValue, redis_err: err, redis_result: result },
|
||||
'unlocking error'
|
||||
)
|
||||
return callback(new Error('tried to release timed out lock'))
|
||||
}
|
||||
return callback(err, result)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
runWithLock(key, runner, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
return LockManager.getLock(key, function (error, lockValue) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return runner(error1 =>
|
||||
LockManager.releaseLock(key, lockValue, function (error2) {
|
||||
error = error1 || error2
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback()
|
||||
})
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
healthCheck(callback) {
|
||||
const action = releaseLock => releaseLock()
|
||||
return LockManager.runWithLock(
|
||||
`HistoryLock:HealthCheck:host=${HOST}:pid=${PID}:random=${RND}`,
|
||||
action,
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
close(callback) {
|
||||
rclient.quit()
|
||||
return rclient.once('end', callback)
|
||||
},
|
||||
}
|
199
services/track-changes/app/js/MongoAWS.js
Normal file
199
services/track-changes/app/js/MongoAWS.js
Normal file
|
@ -0,0 +1,199 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let MongoAWS
|
||||
const settings = require('@overleaf/settings')
|
||||
const logger = require('logger-sharelatex')
|
||||
const AWS = require('aws-sdk')
|
||||
const S3S = require('s3-streams')
|
||||
const { db, ObjectId } = require('./mongodb')
|
||||
const JSONStream = require('JSONStream')
|
||||
const ReadlineStream = require('byline')
|
||||
const zlib = require('zlib')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
|
||||
const DAYS = 24 * 3600 * 1000 // one day in milliseconds
|
||||
|
||||
const createStream = function (streamConstructor, project_id, doc_id, pack_id) {
|
||||
const AWS_CONFIG = {
|
||||
accessKeyId: settings.trackchanges.s3.key,
|
||||
secretAccessKey: settings.trackchanges.s3.secret,
|
||||
endpoint: settings.trackchanges.s3.endpoint,
|
||||
s3ForcePathStyle: settings.trackchanges.s3.pathStyle,
|
||||
}
|
||||
|
||||
return streamConstructor(new AWS.S3(AWS_CONFIG), {
|
||||
Bucket: settings.trackchanges.stores.doc_history,
|
||||
Key: project_id + '/changes-' + doc_id + '/pack-' + pack_id,
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = MongoAWS = {
|
||||
archivePack(project_id, doc_id, pack_id, _callback) {
|
||||
if (_callback == null) {
|
||||
_callback = function (error) {}
|
||||
}
|
||||
const callback = function (...args) {
|
||||
_callback(...Array.from(args || []))
|
||||
return (_callback = function () {})
|
||||
}
|
||||
|
||||
const query = {
|
||||
_id: ObjectId(pack_id),
|
||||
doc_id: ObjectId(doc_id),
|
||||
}
|
||||
|
||||
if (project_id == null) {
|
||||
return callback(new Error('invalid project id'))
|
||||
}
|
||||
if (doc_id == null) {
|
||||
return callback(new Error('invalid doc id'))
|
||||
}
|
||||
if (pack_id == null) {
|
||||
return callback(new Error('invalid pack id'))
|
||||
}
|
||||
|
||||
logger.log({ project_id, doc_id, pack_id }, 'uploading data to s3')
|
||||
|
||||
const upload = createStream(S3S.WriteStream, project_id, doc_id, pack_id)
|
||||
|
||||
return db.docHistory.findOne(query, function (err, result) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
if (result == null) {
|
||||
return callback(new Error('cannot find pack to send to s3'))
|
||||
}
|
||||
if (result.expiresAt != null) {
|
||||
return callback(new Error('refusing to send pack with TTL to s3'))
|
||||
}
|
||||
const uncompressedData = JSON.stringify(result)
|
||||
if (uncompressedData.indexOf('\u0000') !== -1) {
|
||||
const error = new Error('null bytes found in upload')
|
||||
logger.error({ err: error, project_id, doc_id, pack_id }, error.message)
|
||||
return callback(error)
|
||||
}
|
||||
return zlib.gzip(uncompressedData, function (err, buf) {
|
||||
logger.log(
|
||||
{
|
||||
project_id,
|
||||
doc_id,
|
||||
pack_id,
|
||||
origSize: uncompressedData.length,
|
||||
newSize: buf.length,
|
||||
},
|
||||
'compressed pack'
|
||||
)
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
upload.on('error', err => callback(err))
|
||||
upload.on('finish', function () {
|
||||
Metrics.inc('archive-pack')
|
||||
logger.log({ project_id, doc_id, pack_id }, 'upload to s3 completed')
|
||||
return callback(null)
|
||||
})
|
||||
upload.write(buf)
|
||||
return upload.end()
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
readArchivedPack(project_id, doc_id, pack_id, _callback) {
|
||||
if (_callback == null) {
|
||||
_callback = function (error, result) {}
|
||||
}
|
||||
const callback = function (...args) {
|
||||
_callback(...Array.from(args || []))
|
||||
return (_callback = function () {})
|
||||
}
|
||||
|
||||
if (project_id == null) {
|
||||
return callback(new Error('invalid project id'))
|
||||
}
|
||||
if (doc_id == null) {
|
||||
return callback(new Error('invalid doc id'))
|
||||
}
|
||||
if (pack_id == null) {
|
||||
return callback(new Error('invalid pack id'))
|
||||
}
|
||||
|
||||
logger.log({ project_id, doc_id, pack_id }, 'downloading data from s3')
|
||||
|
||||
const download = createStream(S3S.ReadStream, project_id, doc_id, pack_id)
|
||||
|
||||
const inputStream = download
|
||||
.on('open', obj => 1)
|
||||
.on('error', err => callback(err))
|
||||
|
||||
const gunzip = zlib.createGunzip()
|
||||
gunzip.setEncoding('utf8')
|
||||
gunzip.on('error', function (err) {
|
||||
logger.log(
|
||||
{ project_id, doc_id, pack_id, err },
|
||||
'error uncompressing gzip stream'
|
||||
)
|
||||
return callback(err)
|
||||
})
|
||||
|
||||
const outputStream = inputStream.pipe(gunzip)
|
||||
const parts = []
|
||||
outputStream.on('error', err => callback(err))
|
||||
outputStream.on('end', function () {
|
||||
let object
|
||||
logger.log({ project_id, doc_id, pack_id }, 'download from s3 completed')
|
||||
try {
|
||||
object = JSON.parse(parts.join(''))
|
||||
} catch (e) {
|
||||
return callback(e)
|
||||
}
|
||||
object._id = ObjectId(object._id)
|
||||
object.doc_id = ObjectId(object.doc_id)
|
||||
object.project_id = ObjectId(object.project_id)
|
||||
for (const op of Array.from(object.pack)) {
|
||||
if (op._id != null) {
|
||||
op._id = ObjectId(op._id)
|
||||
}
|
||||
}
|
||||
return callback(null, object)
|
||||
})
|
||||
return outputStream.on('data', data => parts.push(data))
|
||||
},
|
||||
|
||||
unArchivePack(project_id, doc_id, pack_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
return MongoAWS.readArchivedPack(
|
||||
project_id,
|
||||
doc_id,
|
||||
pack_id,
|
||||
function (err, object) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
Metrics.inc('unarchive-pack')
|
||||
// allow the object to expire, we can always retrieve it again
|
||||
object.expiresAt = new Date(Date.now() + 7 * DAYS)
|
||||
logger.log({ project_id, doc_id, pack_id }, 'inserting object from s3')
|
||||
return db.docHistory.insertOne(object, (err, confirmation) => {
|
||||
if (err) return callback(err)
|
||||
object._id = confirmation.insertedId
|
||||
callback(null, object)
|
||||
})
|
||||
}
|
||||
)
|
||||
},
|
||||
}
|
202
services/track-changes/app/js/MongoManager.js
Normal file
202
services/track-changes/app/js/MongoManager.js
Normal file
|
@ -0,0 +1,202 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let MongoManager
|
||||
const { db, ObjectId } = require('./mongodb')
|
||||
const PackManager = require('./PackManager')
|
||||
const async = require('async')
|
||||
const _ = require('underscore')
|
||||
const metrics = require('@overleaf/metrics')
|
||||
const logger = require('logger-sharelatex')
|
||||
|
||||
module.exports = MongoManager = {
|
||||
getLastCompressedUpdate(doc_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, update) {}
|
||||
}
|
||||
return db.docHistory
|
||||
.find(
|
||||
{ doc_id: ObjectId(doc_id.toString()) },
|
||||
// only return the last entry in a pack
|
||||
{ projection: { pack: { $slice: -1 } } }
|
||||
)
|
||||
.sort({ v: -1 })
|
||||
.limit(1)
|
||||
.toArray(function (error, compressedUpdates) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback(null, compressedUpdates[0] || null)
|
||||
})
|
||||
},
|
||||
|
||||
peekLastCompressedUpdate(doc_id, callback) {
|
||||
// under normal use we pass back the last update as
|
||||
// callback(null,update,version).
|
||||
//
|
||||
// when we have an existing last update but want to force a new one
|
||||
// to start, we pass it back as callback(null,null,version), just
|
||||
// giving the version so we can check consistency.
|
||||
if (callback == null) {
|
||||
callback = function (error, update, version) {}
|
||||
}
|
||||
return MongoManager.getLastCompressedUpdate(
|
||||
doc_id,
|
||||
function (error, update) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
if (update != null) {
|
||||
if (update.broken) {
|
||||
// marked as broken so we will force a new op
|
||||
return callback(null, null)
|
||||
} else if (update.pack != null) {
|
||||
if (update.finalised) {
|
||||
// no more ops can be appended
|
||||
return callback(
|
||||
null,
|
||||
null,
|
||||
update.pack[0] != null ? update.pack[0].v : undefined
|
||||
)
|
||||
} else {
|
||||
return callback(
|
||||
null,
|
||||
update,
|
||||
update.pack[0] != null ? update.pack[0].v : undefined
|
||||
)
|
||||
}
|
||||
} else {
|
||||
return callback(null, update, update.v)
|
||||
}
|
||||
} else {
|
||||
return PackManager.getLastPackFromIndex(
|
||||
doc_id,
|
||||
function (error, pack) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
if (
|
||||
(pack != null ? pack.inS3 : undefined) != null &&
|
||||
(pack != null ? pack.v_end : undefined) != null
|
||||
) {
|
||||
return callback(null, null, pack.v_end)
|
||||
}
|
||||
return callback(null, null)
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
backportProjectId(project_id, doc_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
return db.docHistory.updateMany(
|
||||
{
|
||||
doc_id: ObjectId(doc_id.toString()),
|
||||
project_id: { $exists: false },
|
||||
},
|
||||
{
|
||||
$set: { project_id: ObjectId(project_id.toString()) },
|
||||
},
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
getProjectMetaData(project_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, metadata) {}
|
||||
}
|
||||
return db.projectHistoryMetaData.findOne(
|
||||
{
|
||||
project_id: ObjectId(project_id.toString()),
|
||||
},
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
setProjectMetaData(project_id, metadata, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
return db.projectHistoryMetaData.updateOne(
|
||||
{
|
||||
project_id: ObjectId(project_id),
|
||||
},
|
||||
{
|
||||
$set: metadata,
|
||||
},
|
||||
{
|
||||
upsert: true,
|
||||
},
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
upgradeHistory(project_id, callback) {
|
||||
// preserve the project's existing history
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
return db.docHistory.updateMany(
|
||||
{
|
||||
project_id: ObjectId(project_id),
|
||||
temporary: true,
|
||||
expiresAt: { $exists: true },
|
||||
},
|
||||
{
|
||||
$set: { temporary: false },
|
||||
$unset: { expiresAt: '' },
|
||||
},
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
ensureIndices() {
|
||||
// For finding all updates that go into a diff for a doc
|
||||
db.docHistory.ensureIndex({ doc_id: 1, v: 1 }, { background: true })
|
||||
// For finding all updates that affect a project
|
||||
db.docHistory.ensureIndex(
|
||||
{ project_id: 1, 'meta.end_ts': 1 },
|
||||
{ background: true }
|
||||
)
|
||||
// For finding updates that don't yet have a project_id and need it inserting
|
||||
db.docHistory.ensureIndex(
|
||||
{ doc_id: 1, project_id: 1 },
|
||||
{ background: true }
|
||||
)
|
||||
// For finding project meta-data
|
||||
db.projectHistoryMetaData.ensureIndex(
|
||||
{ project_id: 1 },
|
||||
{ background: true }
|
||||
)
|
||||
// TTL index for auto deleting week old temporary ops
|
||||
db.docHistory.ensureIndex(
|
||||
{ expiresAt: 1 },
|
||||
{ expireAfterSeconds: 0, background: true }
|
||||
)
|
||||
// For finding packs to be checked for archiving
|
||||
db.docHistory.ensureIndex({ last_checked: 1 }, { background: true })
|
||||
// For finding archived packs
|
||||
return db.docHistoryIndex.ensureIndex(
|
||||
{ project_id: 1 },
|
||||
{ background: true }
|
||||
)
|
||||
},
|
||||
}
|
||||
;['getLastCompressedUpdate', 'getProjectMetaData', 'setProjectMetaData'].map(
|
||||
method =>
|
||||
metrics.timeAsyncMethod(MongoManager, method, 'mongo.MongoManager', logger)
|
||||
)
|
1171
services/track-changes/app/js/PackManager.js
Normal file
1171
services/track-changes/app/js/PackManager.js
Normal file
File diff suppressed because it is too large
Load diff
212
services/track-changes/app/js/PackWorker.js
Normal file
212
services/track-changes/app/js/PackWorker.js
Normal file
|
@ -0,0 +1,212 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS103: Rewrite code to no longer use __guard__
|
||||
* DS205: Consider reworking code to avoid use of IIFEs
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let LIMIT, pending
|
||||
let project_id, doc_id
|
||||
const { callbackify } = require('util')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const async = require('async')
|
||||
const _ = require('underscore')
|
||||
const { db, ObjectId, waitForDb, closeDb } = require('./mongodb')
|
||||
const fs = require('fs')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
Metrics.initialize('track-changes')
|
||||
const logger = require('logger-sharelatex')
|
||||
logger.initialize('track-changes-packworker')
|
||||
if ((Settings.sentry != null ? Settings.sentry.dsn : undefined) != null) {
|
||||
logger.initializeErrorReporting(Settings.sentry.dsn)
|
||||
}
|
||||
|
||||
const DAYS = 24 * 3600 * 1000
|
||||
|
||||
const LockManager = require('./LockManager')
|
||||
const PackManager = require('./PackManager')
|
||||
|
||||
// this worker script is forked by the main process to look for
|
||||
// document histories which can be archived
|
||||
|
||||
const source = process.argv[2]
|
||||
const DOCUMENT_PACK_DELAY = Number(process.argv[3]) || 1000
|
||||
const TIMEOUT = Number(process.argv[4]) || 30 * 60 * 1000
|
||||
let COUNT = 0 // number processed
|
||||
let TOTAL = 0 // total number to process
|
||||
|
||||
if (!source.match(/^[0-9]+$/)) {
|
||||
const file = fs.readFileSync(source)
|
||||
const result = (() => {
|
||||
const result1 = []
|
||||
for (const line of Array.from(file.toString().split('\n'))) {
|
||||
;[project_id, doc_id] = Array.from(line.split(' '))
|
||||
result1.push({ doc_id, project_id })
|
||||
}
|
||||
return result1
|
||||
})()
|
||||
pending = _.filter(result, row =>
|
||||
__guard__(row != null ? row.doc_id : undefined, x =>
|
||||
x.match(/^[a-f0-9]{24}$/)
|
||||
)
|
||||
)
|
||||
} else {
|
||||
LIMIT = Number(process.argv[2]) || 1000
|
||||
}
|
||||
|
||||
let shutDownRequested = false
|
||||
const shutDownTimer = setTimeout(function () {
|
||||
logger.log('pack timed out, requesting shutdown')
|
||||
// start the shutdown on the next pack
|
||||
shutDownRequested = true
|
||||
// do a hard shutdown after a further 5 minutes
|
||||
const hardTimeout = setTimeout(function () {
|
||||
logger.error('HARD TIMEOUT in pack archive worker')
|
||||
return process.exit()
|
||||
}, 5 * 60 * 1000)
|
||||
return hardTimeout.unref()
|
||||
}, TIMEOUT)
|
||||
|
||||
logger.log(
|
||||
`checking for updates, limit=${LIMIT}, delay=${DOCUMENT_PACK_DELAY}, timeout=${TIMEOUT}`
|
||||
)
|
||||
|
||||
const finish = function () {
|
||||
if (shutDownTimer != null) {
|
||||
logger.log('cancelling timeout')
|
||||
clearTimeout(shutDownTimer)
|
||||
}
|
||||
logger.log('closing db')
|
||||
callbackify(closeDb)(function () {
|
||||
logger.log('closing LockManager Redis Connection')
|
||||
return LockManager.close(function () {
|
||||
logger.log(
|
||||
{ processedCount: COUNT, allCount: TOTAL },
|
||||
'ready to exit from pack archive worker'
|
||||
)
|
||||
const hardTimeout = setTimeout(function () {
|
||||
logger.error('hard exit from pack archive worker')
|
||||
return process.exit(1)
|
||||
}, 5 * 1000)
|
||||
return hardTimeout.unref()
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
process.on('exit', code => logger.log({ code }, 'pack archive worker exited'))
|
||||
|
||||
const processUpdates = pending =>
|
||||
async.eachSeries(
|
||||
pending,
|
||||
function (result, callback) {
|
||||
let _id
|
||||
;({ _id, project_id, doc_id } = result)
|
||||
COUNT++
|
||||
logger.log({ project_id, doc_id }, `processing ${COUNT}/${TOTAL}`)
|
||||
if (project_id == null || doc_id == null) {
|
||||
logger.log(
|
||||
{ project_id, doc_id },
|
||||
'skipping pack, missing project/doc id'
|
||||
)
|
||||
return callback()
|
||||
}
|
||||
const handler = function (err, result) {
|
||||
if (err != null && err.code === 'InternalError' && err.retryable) {
|
||||
logger.warn(
|
||||
{ err, result },
|
||||
'ignoring S3 error in pack archive worker'
|
||||
)
|
||||
// Ignore any s3 errors due to random problems
|
||||
err = null
|
||||
}
|
||||
if (err != null) {
|
||||
logger.error({ err, result }, 'error in pack archive worker')
|
||||
return callback(err)
|
||||
}
|
||||
if (shutDownRequested) {
|
||||
logger.warn('shutting down pack archive worker')
|
||||
return callback(new Error('shutdown'))
|
||||
}
|
||||
return setTimeout(() => callback(err, result), DOCUMENT_PACK_DELAY)
|
||||
}
|
||||
if (_id == null) {
|
||||
return PackManager.pushOldPacks(project_id, doc_id, handler)
|
||||
} else {
|
||||
return PackManager.processOldPack(project_id, doc_id, _id, handler)
|
||||
}
|
||||
},
|
||||
function (err, results) {
|
||||
if (err != null && err.message !== 'shutdown') {
|
||||
logger.error({ err }, 'error in pack archive worker processUpdates')
|
||||
}
|
||||
return finish()
|
||||
}
|
||||
)
|
||||
// find the packs which can be archived
|
||||
|
||||
const ObjectIdFromDate = function (date) {
|
||||
const id = Math.floor(date.getTime() / 1000).toString(16) + '0000000000000000'
|
||||
return ObjectId(id)
|
||||
}
|
||||
|
||||
// new approach, two passes
|
||||
// find packs to be marked as finalised:true, those which have a newer pack present
|
||||
// then only consider finalised:true packs for archiving
|
||||
|
||||
waitForDb()
|
||||
.then(() => {
|
||||
if (pending != null) {
|
||||
logger.log(`got ${pending.length} entries from ${source}`)
|
||||
processUpdates(pending)
|
||||
} else {
|
||||
processFromOneWeekAgo()
|
||||
}
|
||||
})
|
||||
.catch(err => {
|
||||
logger.fatal({ err }, 'cannot connect to mongo, exiting')
|
||||
process.exit(1)
|
||||
})
|
||||
|
||||
function processFromOneWeekAgo() {
|
||||
const oneWeekAgo = new Date(Date.now() - 7 * DAYS)
|
||||
db.docHistory
|
||||
.find(
|
||||
{
|
||||
expiresAt: { $exists: false },
|
||||
project_id: { $exists: true },
|
||||
v_end: { $exists: true },
|
||||
_id: { $lt: ObjectIdFromDate(oneWeekAgo) },
|
||||
last_checked: { $lt: oneWeekAgo },
|
||||
},
|
||||
{ projection: { _id: 1, doc_id: 1, project_id: 1 } }
|
||||
)
|
||||
.sort({
|
||||
last_checked: 1,
|
||||
})
|
||||
.limit(LIMIT)
|
||||
.toArray(function (err, results) {
|
||||
if (err != null) {
|
||||
logger.log({ err }, 'error checking for updates')
|
||||
finish()
|
||||
return
|
||||
}
|
||||
pending = _.uniq(results, false, result => result.doc_id.toString())
|
||||
TOTAL = pending.length
|
||||
logger.log(`found ${TOTAL} documents to archive`)
|
||||
return processUpdates(pending)
|
||||
})
|
||||
}
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return typeof value !== 'undefined' && value !== null
|
||||
? transform(value)
|
||||
: undefined
|
||||
}
|
113
services/track-changes/app/js/ProjectIterator.js
Normal file
113
services/track-changes/app/js/ProjectIterator.js
Normal file
|
@ -0,0 +1,113 @@
|
|||
/* eslint-disable
|
||||
no-unmodified-loop-condition,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let ProjectIterator
|
||||
const Heap = require('heap')
|
||||
|
||||
module.exports =
|
||||
ProjectIterator =
|
||||
ProjectIterator =
|
||||
class ProjectIterator {
|
||||
constructor(packs, before, getPackByIdFn) {
|
||||
this.before = before
|
||||
this.getPackByIdFn = getPackByIdFn
|
||||
const byEndTs = (a, b) =>
|
||||
b.meta.end_ts - a.meta.end_ts || a.fromIndex - b.fromIndex
|
||||
this.packs = packs.slice().sort(byEndTs)
|
||||
this.queue = new Heap(byEndTs)
|
||||
}
|
||||
|
||||
next(callback) {
|
||||
// what's up next
|
||||
// console.log ">>> top item", iterator.packs[0]
|
||||
const iterator = this
|
||||
const { before } = this
|
||||
const { queue } = iterator
|
||||
const opsToReturn = []
|
||||
let nextPack = iterator.packs[0]
|
||||
let lowWaterMark =
|
||||
(nextPack != null ? nextPack.meta.end_ts : undefined) || 0
|
||||
let nextItem = queue.peek()
|
||||
|
||||
// console.log "queue empty?", queue.empty()
|
||||
// console.log "nextItem", nextItem
|
||||
// console.log "nextItem.meta.end_ts", nextItem?.meta.end_ts
|
||||
// console.log "lowWaterMark", lowWaterMark
|
||||
|
||||
while (
|
||||
before != null &&
|
||||
(nextPack != null ? nextPack.meta.start_ts : undefined) > before
|
||||
) {
|
||||
// discard pack that is outside range
|
||||
iterator.packs.shift()
|
||||
nextPack = iterator.packs[0]
|
||||
lowWaterMark =
|
||||
(nextPack != null ? nextPack.meta.end_ts : undefined) || 0
|
||||
}
|
||||
|
||||
if (
|
||||
(queue.empty() ||
|
||||
(nextItem != null ? nextItem.meta.end_ts : undefined) <=
|
||||
lowWaterMark) &&
|
||||
nextPack != null
|
||||
) {
|
||||
// retrieve the next pack and populate the queue
|
||||
return this.getPackByIdFn(
|
||||
nextPack.project_id,
|
||||
nextPack.doc_id,
|
||||
nextPack._id,
|
||||
function (err, pack) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
iterator.packs.shift() // have now retrieved this pack, remove it
|
||||
// console.log "got pack", pack
|
||||
for (const op of Array.from(pack.pack)) {
|
||||
// console.log "adding op", op
|
||||
if (before == null || op.meta.end_ts < before) {
|
||||
op.doc_id = nextPack.doc_id
|
||||
op.project_id = nextPack.project_id
|
||||
queue.push(op)
|
||||
}
|
||||
}
|
||||
// now try again
|
||||
return iterator.next(callback)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
// console.log "nextItem", nextItem, "lowWaterMark", lowWaterMark
|
||||
while (
|
||||
nextItem != null &&
|
||||
(nextItem != null ? nextItem.meta.end_ts : undefined) > lowWaterMark
|
||||
) {
|
||||
opsToReturn.push(nextItem)
|
||||
queue.pop()
|
||||
nextItem = queue.peek()
|
||||
}
|
||||
|
||||
// console.log "queue empty?", queue.empty()
|
||||
// console.log "nextPack", nextPack?
|
||||
|
||||
if (queue.empty() && nextPack == null) {
|
||||
// got everything
|
||||
iterator._done = true
|
||||
}
|
||||
|
||||
return callback(null, opsToReturn)
|
||||
}
|
||||
|
||||
done() {
|
||||
return this._done
|
||||
}
|
||||
}
|
170
services/track-changes/app/js/RedisManager.js
Normal file
170
services/track-changes/app/js/RedisManager.js
Normal file
|
@ -0,0 +1,170 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS205: Consider reworking code to avoid use of IIFEs
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let RedisManager
|
||||
const Settings = require('@overleaf/settings')
|
||||
const redis = require('@overleaf/redis-wrapper')
|
||||
const rclient = redis.createClient(Settings.redis.history)
|
||||
const Keys = Settings.redis.history.key_schema
|
||||
const async = require('async')
|
||||
|
||||
module.exports = RedisManager = {
|
||||
getOldestDocUpdates(doc_id, batchSize, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, jsonUpdates) {}
|
||||
}
|
||||
const key = Keys.uncompressedHistoryOps({ doc_id })
|
||||
return rclient.lrange(key, 0, batchSize - 1, callback)
|
||||
},
|
||||
|
||||
expandDocUpdates(jsonUpdates, callback) {
|
||||
let rawUpdates
|
||||
if (callback == null) {
|
||||
callback = function (error, rawUpdates) {}
|
||||
}
|
||||
try {
|
||||
rawUpdates = Array.from(jsonUpdates || []).map(update =>
|
||||
JSON.parse(update)
|
||||
)
|
||||
} catch (e) {
|
||||
return callback(e)
|
||||
}
|
||||
return callback(null, rawUpdates)
|
||||
},
|
||||
|
||||
deleteAppliedDocUpdates(project_id, doc_id, docUpdates, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
const multi = rclient.multi()
|
||||
// Delete all the updates which have been applied (exact match)
|
||||
for (const update of Array.from(docUpdates || [])) {
|
||||
multi.lrem(Keys.uncompressedHistoryOps({ doc_id }), 1, update)
|
||||
}
|
||||
return multi.exec(function (error, results) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
// It's ok to delete the doc_id from the set here. Even though the list
|
||||
// of updates may not be empty, we will continue to process it until it is.
|
||||
return rclient.srem(
|
||||
Keys.docsWithHistoryOps({ project_id }),
|
||||
doc_id,
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback(null)
|
||||
}
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
getDocIdsWithHistoryOps(project_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, doc_ids) {}
|
||||
}
|
||||
return rclient.smembers(Keys.docsWithHistoryOps({ project_id }), callback)
|
||||
},
|
||||
|
||||
// iterate over keys asynchronously using redis scan (non-blocking)
|
||||
// handle all the cluster nodes or single redis server
|
||||
_getKeys(pattern, callback) {
|
||||
const nodes = (typeof rclient.nodes === 'function'
|
||||
? rclient.nodes('master')
|
||||
: undefined) || [rclient]
|
||||
const doKeyLookupForNode = (node, cb) =>
|
||||
RedisManager._getKeysFromNode(node, pattern, cb)
|
||||
return async.concatSeries(nodes, doKeyLookupForNode, callback)
|
||||
},
|
||||
|
||||
_getKeysFromNode(node, pattern, callback) {
|
||||
let cursor = 0 // redis iterator
|
||||
const keySet = {} // use hash to avoid duplicate results
|
||||
// scan over all keys looking for pattern
|
||||
var doIteration = cb =>
|
||||
node.scan(
|
||||
cursor,
|
||||
'MATCH',
|
||||
pattern,
|
||||
'COUNT',
|
||||
1000,
|
||||
function (error, reply) {
|
||||
let keys
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
;[cursor, keys] = Array.from(reply)
|
||||
for (const key of Array.from(keys)) {
|
||||
keySet[key] = true
|
||||
}
|
||||
if (cursor === '0') {
|
||||
// note redis returns string result not numeric
|
||||
return callback(null, Object.keys(keySet))
|
||||
} else {
|
||||
return doIteration()
|
||||
}
|
||||
}
|
||||
)
|
||||
return doIteration()
|
||||
},
|
||||
|
||||
// extract ids from keys like DocsWithHistoryOps:57fd0b1f53a8396d22b2c24b
|
||||
// or DocsWithHistoryOps:{57fd0b1f53a8396d22b2c24b} (for redis cluster)
|
||||
_extractIds(keyList) {
|
||||
const ids = (() => {
|
||||
const result = []
|
||||
for (const key of Array.from(keyList)) {
|
||||
const m = key.match(/:\{?([0-9a-f]{24})\}?/) // extract object id
|
||||
result.push(m[1])
|
||||
}
|
||||
return result
|
||||
})()
|
||||
return ids
|
||||
},
|
||||
|
||||
getProjectIdsWithHistoryOps(callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, project_ids) {}
|
||||
}
|
||||
return RedisManager._getKeys(
|
||||
Keys.docsWithHistoryOps({ project_id: '*' }),
|
||||
function (error, project_keys) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
const project_ids = RedisManager._extractIds(project_keys)
|
||||
return callback(error, project_ids)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
getAllDocIdsWithHistoryOps(callback) {
|
||||
// return all the docids, to find dangling history entries after
|
||||
// everything is flushed.
|
||||
if (callback == null) {
|
||||
callback = function (error, doc_ids) {}
|
||||
}
|
||||
return RedisManager._getKeys(
|
||||
Keys.uncompressedHistoryOps({ doc_id: '*' }),
|
||||
function (error, doc_keys) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
const doc_ids = RedisManager._extractIds(doc_keys)
|
||||
return callback(error, doc_ids)
|
||||
}
|
||||
)
|
||||
},
|
||||
}
|
48
services/track-changes/app/js/RestoreManager.js
Normal file
48
services/track-changes/app/js/RestoreManager.js
Normal file
|
@ -0,0 +1,48 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let RestoreManager
|
||||
const DocumentUpdaterManager = require('./DocumentUpdaterManager')
|
||||
const DiffManager = require('./DiffManager')
|
||||
const logger = require('logger-sharelatex')
|
||||
|
||||
module.exports = RestoreManager = {
|
||||
restoreToBeforeVersion(project_id, doc_id, version, user_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
logger.log({ project_id, doc_id, version, user_id }, 'restoring document')
|
||||
return DiffManager.getDocumentBeforeVersion(
|
||||
project_id,
|
||||
doc_id,
|
||||
version,
|
||||
function (error, content) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return DocumentUpdaterManager.setDocument(
|
||||
project_id,
|
||||
doc_id,
|
||||
content,
|
||||
user_id,
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
}
|
340
services/track-changes/app/js/UpdateCompressor.js
Normal file
340
services/track-changes/app/js/UpdateCompressor.js
Normal file
|
@ -0,0 +1,340 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
new-cap,
|
||||
no-throw-literal,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS103: Rewrite code to no longer use __guard__
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let oneMinute, twoMegabytes, UpdateCompressor
|
||||
const strInject = (s1, pos, s2) => s1.slice(0, pos) + s2 + s1.slice(pos)
|
||||
const strRemove = (s1, pos, length) => s1.slice(0, pos) + s1.slice(pos + length)
|
||||
|
||||
const { diff_match_patch } = require('../lib/diff_match_patch')
|
||||
const dmp = new diff_match_patch()
|
||||
|
||||
module.exports = UpdateCompressor = {
|
||||
NOOP: 'noop',
|
||||
|
||||
// Updates come from the doc updater in format
|
||||
// {
|
||||
// op: [ { ... op1 ... }, { ... op2 ... } ]
|
||||
// meta: { ts: ..., user_id: ... }
|
||||
// }
|
||||
// but it's easier to work with on op per update, so convert these updates to
|
||||
// our compressed format
|
||||
// [{
|
||||
// op: op1
|
||||
// meta: { start_ts: ... , end_ts: ..., user_id: ... }
|
||||
// }, {
|
||||
// op: op2
|
||||
// meta: { start_ts: ... , end_ts: ..., user_id: ... }
|
||||
// }]
|
||||
convertToSingleOpUpdates(updates) {
|
||||
const splitUpdates = []
|
||||
for (const update of Array.from(updates)) {
|
||||
// Reject any non-insert or delete ops, i.e. comments
|
||||
const ops = update.op.filter(o => o.i != null || o.d != null)
|
||||
if (ops.length === 0) {
|
||||
splitUpdates.push({
|
||||
op: UpdateCompressor.NOOP,
|
||||
meta: {
|
||||
start_ts: update.meta.start_ts || update.meta.ts,
|
||||
end_ts: update.meta.end_ts || update.meta.ts,
|
||||
user_id: update.meta.user_id,
|
||||
},
|
||||
v: update.v,
|
||||
})
|
||||
} else {
|
||||
for (const op of Array.from(ops)) {
|
||||
splitUpdates.push({
|
||||
op,
|
||||
meta: {
|
||||
start_ts: update.meta.start_ts || update.meta.ts,
|
||||
end_ts: update.meta.end_ts || update.meta.ts,
|
||||
user_id: update.meta.user_id,
|
||||
},
|
||||
v: update.v,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
return splitUpdates
|
||||
},
|
||||
|
||||
concatUpdatesWithSameVersion(updates) {
|
||||
const concattedUpdates = []
|
||||
for (const update of Array.from(updates)) {
|
||||
const lastUpdate = concattedUpdates[concattedUpdates.length - 1]
|
||||
if (lastUpdate != null && lastUpdate.v === update.v) {
|
||||
if (update.op !== UpdateCompressor.NOOP) {
|
||||
lastUpdate.op.push(update.op)
|
||||
}
|
||||
} else {
|
||||
const nextUpdate = {
|
||||
op: [],
|
||||
meta: update.meta,
|
||||
v: update.v,
|
||||
}
|
||||
if (update.op !== UpdateCompressor.NOOP) {
|
||||
nextUpdate.op.push(update.op)
|
||||
}
|
||||
concattedUpdates.push(nextUpdate)
|
||||
}
|
||||
}
|
||||
return concattedUpdates
|
||||
},
|
||||
|
||||
compressRawUpdates(lastPreviousUpdate, rawUpdates) {
|
||||
if (
|
||||
__guard__(
|
||||
lastPreviousUpdate != null ? lastPreviousUpdate.op : undefined,
|
||||
x => x.length
|
||||
) > 1
|
||||
) {
|
||||
// if the last previous update was an array op, don't compress onto it.
|
||||
// The avoids cases where array length changes but version number doesn't
|
||||
return [lastPreviousUpdate].concat(
|
||||
UpdateCompressor.compressRawUpdates(null, rawUpdates)
|
||||
)
|
||||
}
|
||||
if (lastPreviousUpdate != null) {
|
||||
rawUpdates = [lastPreviousUpdate].concat(rawUpdates)
|
||||
}
|
||||
let updates = UpdateCompressor.convertToSingleOpUpdates(rawUpdates)
|
||||
updates = UpdateCompressor.compressUpdates(updates)
|
||||
return UpdateCompressor.concatUpdatesWithSameVersion(updates)
|
||||
},
|
||||
|
||||
compressUpdates(updates) {
|
||||
if (updates.length === 0) {
|
||||
return []
|
||||
}
|
||||
|
||||
let compressedUpdates = [updates.shift()]
|
||||
for (const update of Array.from(updates)) {
|
||||
const lastCompressedUpdate = compressedUpdates.pop()
|
||||
if (lastCompressedUpdate != null) {
|
||||
compressedUpdates = compressedUpdates.concat(
|
||||
UpdateCompressor._concatTwoUpdates(lastCompressedUpdate, update)
|
||||
)
|
||||
} else {
|
||||
compressedUpdates.push(update)
|
||||
}
|
||||
}
|
||||
|
||||
return compressedUpdates
|
||||
},
|
||||
|
||||
MAX_TIME_BETWEEN_UPDATES: (oneMinute = 60 * 1000),
|
||||
MAX_UPDATE_SIZE: (twoMegabytes = 2 * 1024 * 1024),
|
||||
|
||||
_concatTwoUpdates(firstUpdate, secondUpdate) {
|
||||
let offset
|
||||
firstUpdate = {
|
||||
op: firstUpdate.op,
|
||||
meta: {
|
||||
user_id: firstUpdate.meta.user_id || null,
|
||||
start_ts: firstUpdate.meta.start_ts || firstUpdate.meta.ts,
|
||||
end_ts: firstUpdate.meta.end_ts || firstUpdate.meta.ts,
|
||||
},
|
||||
v: firstUpdate.v,
|
||||
}
|
||||
secondUpdate = {
|
||||
op: secondUpdate.op,
|
||||
meta: {
|
||||
user_id: secondUpdate.meta.user_id || null,
|
||||
start_ts: secondUpdate.meta.start_ts || secondUpdate.meta.ts,
|
||||
end_ts: secondUpdate.meta.end_ts || secondUpdate.meta.ts,
|
||||
},
|
||||
v: secondUpdate.v,
|
||||
}
|
||||
|
||||
if (firstUpdate.meta.user_id !== secondUpdate.meta.user_id) {
|
||||
return [firstUpdate, secondUpdate]
|
||||
}
|
||||
|
||||
if (
|
||||
secondUpdate.meta.start_ts - firstUpdate.meta.end_ts >
|
||||
UpdateCompressor.MAX_TIME_BETWEEN_UPDATES
|
||||
) {
|
||||
return [firstUpdate, secondUpdate]
|
||||
}
|
||||
|
||||
const firstOp = firstUpdate.op
|
||||
const secondOp = secondUpdate.op
|
||||
|
||||
const firstSize =
|
||||
(firstOp.i != null ? firstOp.i.length : undefined) ||
|
||||
(firstOp.d != null ? firstOp.d.length : undefined)
|
||||
const secondSize =
|
||||
(secondOp.i != null ? secondOp.i.length : undefined) ||
|
||||
(secondOp.d != null ? secondOp.d.length : undefined)
|
||||
|
||||
// Two inserts
|
||||
if (
|
||||
firstOp.i != null &&
|
||||
secondOp.i != null &&
|
||||
firstOp.p <= secondOp.p &&
|
||||
secondOp.p <= firstOp.p + firstOp.i.length &&
|
||||
firstSize + secondSize < UpdateCompressor.MAX_UPDATE_SIZE
|
||||
) {
|
||||
return [
|
||||
{
|
||||
meta: {
|
||||
start_ts: firstUpdate.meta.start_ts,
|
||||
end_ts: secondUpdate.meta.end_ts,
|
||||
user_id: firstUpdate.meta.user_id,
|
||||
},
|
||||
op: {
|
||||
p: firstOp.p,
|
||||
i: strInject(firstOp.i, secondOp.p - firstOp.p, secondOp.i),
|
||||
},
|
||||
v: secondUpdate.v,
|
||||
},
|
||||
]
|
||||
// Two deletes
|
||||
} else if (
|
||||
firstOp.d != null &&
|
||||
secondOp.d != null &&
|
||||
secondOp.p <= firstOp.p &&
|
||||
firstOp.p <= secondOp.p + secondOp.d.length &&
|
||||
firstSize + secondSize < UpdateCompressor.MAX_UPDATE_SIZE
|
||||
) {
|
||||
return [
|
||||
{
|
||||
meta: {
|
||||
start_ts: firstUpdate.meta.start_ts,
|
||||
end_ts: secondUpdate.meta.end_ts,
|
||||
user_id: firstUpdate.meta.user_id,
|
||||
},
|
||||
op: {
|
||||
p: secondOp.p,
|
||||
d: strInject(secondOp.d, firstOp.p - secondOp.p, firstOp.d),
|
||||
},
|
||||
v: secondUpdate.v,
|
||||
},
|
||||
]
|
||||
// An insert and then a delete
|
||||
} else if (
|
||||
firstOp.i != null &&
|
||||
secondOp.d != null &&
|
||||
firstOp.p <= secondOp.p &&
|
||||
secondOp.p <= firstOp.p + firstOp.i.length
|
||||
) {
|
||||
offset = secondOp.p - firstOp.p
|
||||
const insertedText = firstOp.i.slice(offset, offset + secondOp.d.length)
|
||||
// Only trim the insert when the delete is fully contained within in it
|
||||
if (insertedText === secondOp.d) {
|
||||
const insert = strRemove(firstOp.i, offset, secondOp.d.length)
|
||||
return [
|
||||
{
|
||||
meta: {
|
||||
start_ts: firstUpdate.meta.start_ts,
|
||||
end_ts: secondUpdate.meta.end_ts,
|
||||
user_id: firstUpdate.meta.user_id,
|
||||
},
|
||||
op: {
|
||||
p: firstOp.p,
|
||||
i: insert,
|
||||
},
|
||||
v: secondUpdate.v,
|
||||
},
|
||||
]
|
||||
} else {
|
||||
// This will only happen if the delete extends outside the insert
|
||||
return [firstUpdate, secondUpdate]
|
||||
}
|
||||
|
||||
// A delete then an insert at the same place, likely a copy-paste of a chunk of content
|
||||
} else if (
|
||||
firstOp.d != null &&
|
||||
secondOp.i != null &&
|
||||
firstOp.p === secondOp.p
|
||||
) {
|
||||
offset = firstOp.p
|
||||
const diff_ops = this.diffAsShareJsOps(firstOp.d, secondOp.i)
|
||||
if (diff_ops.length === 0) {
|
||||
return [
|
||||
{
|
||||
// Noop
|
||||
meta: {
|
||||
start_ts: firstUpdate.meta.start_ts,
|
||||
end_ts: secondUpdate.meta.end_ts,
|
||||
user_id: firstUpdate.meta.user_id,
|
||||
},
|
||||
op: {
|
||||
p: firstOp.p,
|
||||
i: '',
|
||||
},
|
||||
v: secondUpdate.v,
|
||||
},
|
||||
]
|
||||
} else {
|
||||
return diff_ops.map(function (op) {
|
||||
op.p += offset
|
||||
return {
|
||||
meta: {
|
||||
start_ts: firstUpdate.meta.start_ts,
|
||||
end_ts: secondUpdate.meta.end_ts,
|
||||
user_id: firstUpdate.meta.user_id,
|
||||
},
|
||||
op,
|
||||
v: secondUpdate.v,
|
||||
}
|
||||
})
|
||||
}
|
||||
} else {
|
||||
return [firstUpdate, secondUpdate]
|
||||
}
|
||||
},
|
||||
|
||||
ADDED: 1,
|
||||
REMOVED: -1,
|
||||
UNCHANGED: 0,
|
||||
diffAsShareJsOps(before, after, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, ops) {}
|
||||
}
|
||||
const diffs = dmp.diff_main(before, after)
|
||||
dmp.diff_cleanupSemantic(diffs)
|
||||
|
||||
const ops = []
|
||||
let position = 0
|
||||
for (const diff of Array.from(diffs)) {
|
||||
const type = diff[0]
|
||||
const content = diff[1]
|
||||
if (type === this.ADDED) {
|
||||
ops.push({
|
||||
i: content,
|
||||
p: position,
|
||||
})
|
||||
position += content.length
|
||||
} else if (type === this.REMOVED) {
|
||||
ops.push({
|
||||
d: content,
|
||||
p: position,
|
||||
})
|
||||
} else if (type === this.UNCHANGED) {
|
||||
position += content.length
|
||||
} else {
|
||||
throw 'Unknown type'
|
||||
}
|
||||
}
|
||||
return ops
|
||||
},
|
||||
}
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return typeof value !== 'undefined' && value !== null
|
||||
? transform(value)
|
||||
: undefined
|
||||
}
|
80
services/track-changes/app/js/UpdateTrimmer.js
Normal file
80
services/track-changes/app/js/UpdateTrimmer.js
Normal file
|
@ -0,0 +1,80 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS103: Rewrite code to no longer use __guard__
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let UpdateTrimmer
|
||||
const MongoManager = require('./MongoManager')
|
||||
const WebApiManager = require('./WebApiManager')
|
||||
const logger = require('logger-sharelatex')
|
||||
|
||||
module.exports = UpdateTrimmer = {
|
||||
shouldTrimUpdates(project_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, shouldTrim) {}
|
||||
}
|
||||
return MongoManager.getProjectMetaData(
|
||||
project_id,
|
||||
function (error, metadata) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
if (metadata != null ? metadata.preserveHistory : undefined) {
|
||||
return callback(null, false)
|
||||
} else {
|
||||
return WebApiManager.getProjectDetails(
|
||||
project_id,
|
||||
function (error, details) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
logger.log({ project_id, details }, 'got details')
|
||||
if (
|
||||
__guard__(
|
||||
details != null ? details.features : undefined,
|
||||
x => x.versioning
|
||||
)
|
||||
) {
|
||||
return MongoManager.setProjectMetaData(
|
||||
project_id,
|
||||
{ preserveHistory: true },
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return MongoManager.upgradeHistory(
|
||||
project_id,
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback(null, false)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
} else {
|
||||
return callback(null, true)
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
)
|
||||
},
|
||||
}
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return typeof value !== 'undefined' && value !== null
|
||||
? transform(value)
|
||||
: undefined
|
||||
}
|
888
services/track-changes/app/js/UpdatesManager.js
Normal file
888
services/track-changes/app/js/UpdatesManager.js
Normal file
|
@ -0,0 +1,888 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS103: Rewrite code to no longer use __guard__
|
||||
* DS205: Consider reworking code to avoid use of IIFEs
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let fiveMinutes, UpdatesManager
|
||||
const MongoManager = require('./MongoManager')
|
||||
const PackManager = require('./PackManager')
|
||||
const RedisManager = require('./RedisManager')
|
||||
const UpdateCompressor = require('./UpdateCompressor')
|
||||
const LockManager = require('./LockManager')
|
||||
const WebApiManager = require('./WebApiManager')
|
||||
const UpdateTrimmer = require('./UpdateTrimmer')
|
||||
const logger = require('logger-sharelatex')
|
||||
const async = require('async')
|
||||
const _ = require('underscore')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const keys = Settings.redis.lock.key_schema
|
||||
|
||||
module.exports = UpdatesManager = {
|
||||
compressAndSaveRawUpdates(
|
||||
project_id,
|
||||
doc_id,
|
||||
rawUpdates,
|
||||
temporary,
|
||||
callback
|
||||
) {
|
||||
let i
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
const { length } = rawUpdates
|
||||
if (length === 0) {
|
||||
return callback()
|
||||
}
|
||||
|
||||
// check that ops are in the correct order
|
||||
for (i = 0; i < rawUpdates.length; i++) {
|
||||
const op = rawUpdates[i]
|
||||
if (i > 0) {
|
||||
const thisVersion = op != null ? op.v : undefined
|
||||
const prevVersion = __guard__(rawUpdates[i - 1], x => x.v)
|
||||
if (!(prevVersion < thisVersion)) {
|
||||
logger.error(
|
||||
{
|
||||
project_id,
|
||||
doc_id,
|
||||
rawUpdates,
|
||||
temporary,
|
||||
thisVersion,
|
||||
prevVersion,
|
||||
},
|
||||
'op versions out of order'
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: we no longer need the lastCompressedUpdate, so change functions not to need it
|
||||
// CORRECTION: we do use it to log the time in case of error
|
||||
return MongoManager.peekLastCompressedUpdate(
|
||||
doc_id,
|
||||
function (error, lastCompressedUpdate, lastVersion) {
|
||||
// lastCompressedUpdate is the most recent update in Mongo, and
|
||||
// lastVersion is its sharejs version number.
|
||||
//
|
||||
// The peekLastCompressedUpdate method may pass the update back
|
||||
// as 'null' (for example if the previous compressed update has
|
||||
// been archived). In this case it can still pass back the
|
||||
// lastVersion from the update to allow us to check consistency.
|
||||
let op
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
|
||||
// Ensure that raw updates start where lastVersion left off
|
||||
if (lastVersion != null) {
|
||||
const discardedUpdates = []
|
||||
rawUpdates = rawUpdates.slice(0)
|
||||
while (rawUpdates[0] != null && rawUpdates[0].v <= lastVersion) {
|
||||
discardedUpdates.push(rawUpdates.shift())
|
||||
}
|
||||
if (discardedUpdates.length) {
|
||||
logger.error(
|
||||
{ project_id, doc_id, discardedUpdates, temporary, lastVersion },
|
||||
'discarded updates already present'
|
||||
)
|
||||
}
|
||||
|
||||
if (rawUpdates[0] != null && rawUpdates[0].v !== lastVersion + 1) {
|
||||
const ts = __guard__(
|
||||
lastCompressedUpdate != null
|
||||
? lastCompressedUpdate.meta
|
||||
: undefined,
|
||||
x1 => x1.end_ts
|
||||
)
|
||||
const last_timestamp = ts != null ? new Date(ts) : 'unknown time'
|
||||
error = new Error(
|
||||
`Tried to apply raw op at version ${rawUpdates[0].v} to last compressed update with version ${lastVersion} from ${last_timestamp}`
|
||||
)
|
||||
logger.error(
|
||||
{
|
||||
err: error,
|
||||
doc_id,
|
||||
project_id,
|
||||
prev_end_ts: ts,
|
||||
temporary,
|
||||
lastCompressedUpdate,
|
||||
},
|
||||
'inconsistent doc versions'
|
||||
)
|
||||
if (
|
||||
(Settings.trackchanges != null
|
||||
? Settings.trackchanges.continueOnError
|
||||
: undefined) &&
|
||||
rawUpdates[0].v > lastVersion + 1
|
||||
) {
|
||||
// we have lost some ops - continue to write into the database, we can't recover at this point
|
||||
lastCompressedUpdate = null
|
||||
} else {
|
||||
return callback(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (rawUpdates.length === 0) {
|
||||
return callback()
|
||||
}
|
||||
|
||||
// some old large ops in redis need to be rejected, they predate
|
||||
// the size limit that now prevents them going through the system
|
||||
const REJECT_LARGE_OP_SIZE = 4 * 1024 * 1024
|
||||
for (var rawUpdate of Array.from(rawUpdates)) {
|
||||
const opSizes = (() => {
|
||||
const result = []
|
||||
for (op of Array.from(
|
||||
(rawUpdate != null ? rawUpdate.op : undefined) || []
|
||||
)) {
|
||||
result.push(
|
||||
(op.i != null ? op.i.length : undefined) ||
|
||||
(op.d != null ? op.d.length : undefined)
|
||||
)
|
||||
}
|
||||
return result
|
||||
})()
|
||||
const size = _.max(opSizes)
|
||||
if (size > REJECT_LARGE_OP_SIZE) {
|
||||
error = new Error(
|
||||
`dropped op exceeding maximum allowed size of ${REJECT_LARGE_OP_SIZE}`
|
||||
)
|
||||
logger.error(
|
||||
{ err: error, doc_id, project_id, size, rawUpdate },
|
||||
'dropped op - too big'
|
||||
)
|
||||
rawUpdate.op = []
|
||||
}
|
||||
}
|
||||
|
||||
const compressedUpdates = UpdateCompressor.compressRawUpdates(
|
||||
null,
|
||||
rawUpdates
|
||||
)
|
||||
return PackManager.insertCompressedUpdates(
|
||||
project_id,
|
||||
doc_id,
|
||||
lastCompressedUpdate,
|
||||
compressedUpdates,
|
||||
temporary,
|
||||
function (error, result) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
if (result != null) {
|
||||
logger.log(
|
||||
{
|
||||
project_id,
|
||||
doc_id,
|
||||
orig_v:
|
||||
lastCompressedUpdate != null
|
||||
? lastCompressedUpdate.v
|
||||
: undefined,
|
||||
new_v: result.v,
|
||||
},
|
||||
'inserted updates into pack'
|
||||
)
|
||||
}
|
||||
return callback()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
// Check whether the updates are temporary (per-project property)
|
||||
_prepareProjectForUpdates(project_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, temporary) {}
|
||||
}
|
||||
return UpdateTrimmer.shouldTrimUpdates(
|
||||
project_id,
|
||||
function (error, temporary) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback(null, temporary)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
// Check for project id on document history (per-document property)
|
||||
_prepareDocForUpdates(project_id, doc_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
return MongoManager.backportProjectId(project_id, doc_id, function (error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback(null)
|
||||
})
|
||||
},
|
||||
|
||||
// Apply updates for specific project/doc after preparing at project and doc level
|
||||
REDIS_READ_BATCH_SIZE: 100,
|
||||
processUncompressedUpdates(project_id, doc_id, temporary, callback) {
|
||||
// get the updates as strings from redis (so we can delete them after they are applied)
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
return RedisManager.getOldestDocUpdates(
|
||||
doc_id,
|
||||
UpdatesManager.REDIS_READ_BATCH_SIZE,
|
||||
function (error, docUpdates) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
const { length } = docUpdates
|
||||
// parse the redis strings into ShareJs updates
|
||||
return RedisManager.expandDocUpdates(
|
||||
docUpdates,
|
||||
function (error, rawUpdates) {
|
||||
if (error != null) {
|
||||
logger.err(
|
||||
{ project_id, doc_id, docUpdates },
|
||||
'failed to parse docUpdates'
|
||||
)
|
||||
return callback(error)
|
||||
}
|
||||
logger.log(
|
||||
{ project_id, doc_id, rawUpdates },
|
||||
'retrieved raw updates from redis'
|
||||
)
|
||||
return UpdatesManager.compressAndSaveRawUpdates(
|
||||
project_id,
|
||||
doc_id,
|
||||
rawUpdates,
|
||||
temporary,
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
logger.log(
|
||||
{ project_id, doc_id },
|
||||
'compressed and saved doc updates'
|
||||
)
|
||||
// delete the applied updates from redis
|
||||
return RedisManager.deleteAppliedDocUpdates(
|
||||
project_id,
|
||||
doc_id,
|
||||
docUpdates,
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
if (length === UpdatesManager.REDIS_READ_BATCH_SIZE) {
|
||||
// There might be more updates
|
||||
logger.log(
|
||||
{ project_id, doc_id },
|
||||
'continuing processing updates'
|
||||
)
|
||||
return setTimeout(
|
||||
() =>
|
||||
UpdatesManager.processUncompressedUpdates(
|
||||
project_id,
|
||||
doc_id,
|
||||
temporary,
|
||||
callback
|
||||
),
|
||||
0
|
||||
)
|
||||
} else {
|
||||
logger.log(
|
||||
{ project_id, doc_id },
|
||||
'all raw updates processed'
|
||||
)
|
||||
return callback()
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
// Process updates for a doc when we flush it individually
|
||||
processUncompressedUpdatesWithLock(project_id, doc_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
return UpdatesManager._prepareProjectForUpdates(
|
||||
project_id,
|
||||
function (error, temporary) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return UpdatesManager._processUncompressedUpdatesForDocWithLock(
|
||||
project_id,
|
||||
doc_id,
|
||||
temporary,
|
||||
callback
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
// Process updates for a doc when the whole project is flushed (internal method)
|
||||
_processUncompressedUpdatesForDocWithLock(
|
||||
project_id,
|
||||
doc_id,
|
||||
temporary,
|
||||
callback
|
||||
) {
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
return UpdatesManager._prepareDocForUpdates(
|
||||
project_id,
|
||||
doc_id,
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return LockManager.runWithLock(
|
||||
keys.historyLock({ doc_id }),
|
||||
releaseLock =>
|
||||
UpdatesManager.processUncompressedUpdates(
|
||||
project_id,
|
||||
doc_id,
|
||||
temporary,
|
||||
releaseLock
|
||||
),
|
||||
callback
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
// Process all updates for a project, only check project-level information once
|
||||
processUncompressedUpdatesForProject(project_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
return RedisManager.getDocIdsWithHistoryOps(
|
||||
project_id,
|
||||
function (error, doc_ids) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return UpdatesManager._prepareProjectForUpdates(
|
||||
project_id,
|
||||
function (error, temporary) {
|
||||
const jobs = []
|
||||
for (const doc_id of Array.from(doc_ids)) {
|
||||
;(doc_id =>
|
||||
jobs.push(cb =>
|
||||
UpdatesManager._processUncompressedUpdatesForDocWithLock(
|
||||
project_id,
|
||||
doc_id,
|
||||
temporary,
|
||||
cb
|
||||
)
|
||||
))(doc_id)
|
||||
}
|
||||
return async.parallelLimit(jobs, 5, callback)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
// flush all outstanding changes
|
||||
flushAll(limit, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, result) {}
|
||||
}
|
||||
return RedisManager.getProjectIdsWithHistoryOps(function (
|
||||
error,
|
||||
project_ids
|
||||
) {
|
||||
let project_id
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
logger.log(
|
||||
{
|
||||
count: project_ids != null ? project_ids.length : undefined,
|
||||
project_ids,
|
||||
},
|
||||
'found projects'
|
||||
)
|
||||
const jobs = []
|
||||
project_ids = _.shuffle(project_ids) // randomise to avoid hitting same projects each time
|
||||
const selectedProjects =
|
||||
limit < 0 ? project_ids : project_ids.slice(0, limit)
|
||||
for (project_id of Array.from(selectedProjects)) {
|
||||
;(project_id =>
|
||||
jobs.push(cb =>
|
||||
UpdatesManager.processUncompressedUpdatesForProject(
|
||||
project_id,
|
||||
err => cb(null, { failed: err != null, project_id })
|
||||
)
|
||||
))(project_id)
|
||||
}
|
||||
return async.series(jobs, function (error, result) {
|
||||
let x
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
const failedProjects = (() => {
|
||||
const result1 = []
|
||||
for (x of Array.from(result)) {
|
||||
if (x.failed) {
|
||||
result1.push(x.project_id)
|
||||
}
|
||||
}
|
||||
return result1
|
||||
})()
|
||||
const succeededProjects = (() => {
|
||||
const result2 = []
|
||||
for (x of Array.from(result)) {
|
||||
if (!x.failed) {
|
||||
result2.push(x.project_id)
|
||||
}
|
||||
}
|
||||
return result2
|
||||
})()
|
||||
return callback(null, {
|
||||
failed: failedProjects,
|
||||
succeeded: succeededProjects,
|
||||
all: project_ids,
|
||||
})
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
getDanglingUpdates(callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, doc_ids) {}
|
||||
}
|
||||
return RedisManager.getAllDocIdsWithHistoryOps(function (
|
||||
error,
|
||||
all_doc_ids
|
||||
) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return RedisManager.getProjectIdsWithHistoryOps(function (
|
||||
error,
|
||||
all_project_ids
|
||||
) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
// function to get doc_ids for each project
|
||||
const task = cb =>
|
||||
async.concatSeries(
|
||||
all_project_ids,
|
||||
RedisManager.getDocIdsWithHistoryOps,
|
||||
cb
|
||||
)
|
||||
// find the dangling doc ids
|
||||
return task(function (error, project_doc_ids) {
|
||||
const dangling_doc_ids = _.difference(all_doc_ids, project_doc_ids)
|
||||
logger.log(
|
||||
{ all_doc_ids, all_project_ids, project_doc_ids, dangling_doc_ids },
|
||||
'checking for dangling doc ids'
|
||||
)
|
||||
return callback(null, dangling_doc_ids)
|
||||
})
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
getDocUpdates(project_id, doc_id, options, callback) {
|
||||
if (options == null) {
|
||||
options = {}
|
||||
}
|
||||
if (callback == null) {
|
||||
callback = function (error, updates) {}
|
||||
}
|
||||
return UpdatesManager.processUncompressedUpdatesWithLock(
|
||||
project_id,
|
||||
doc_id,
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
// console.log "options", options
|
||||
return PackManager.getOpsByVersionRange(
|
||||
project_id,
|
||||
doc_id,
|
||||
options.from,
|
||||
options.to,
|
||||
function (error, updates) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback(null, updates)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
getDocUpdatesWithUserInfo(project_id, doc_id, options, callback) {
|
||||
if (options == null) {
|
||||
options = {}
|
||||
}
|
||||
if (callback == null) {
|
||||
callback = function (error, updates) {}
|
||||
}
|
||||
return UpdatesManager.getDocUpdates(
|
||||
project_id,
|
||||
doc_id,
|
||||
options,
|
||||
function (error, updates) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return UpdatesManager.fillUserInfo(updates, function (error, updates) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback(null, updates)
|
||||
})
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
getSummarizedProjectUpdates(project_id, options, callback) {
|
||||
if (options == null) {
|
||||
options = {}
|
||||
}
|
||||
if (callback == null) {
|
||||
callback = function (error, updates) {}
|
||||
}
|
||||
if (!options.min_count) {
|
||||
options.min_count = 25
|
||||
}
|
||||
let summarizedUpdates = []
|
||||
const { before } = options
|
||||
let nextBeforeTimestamp = null
|
||||
return UpdatesManager.processUncompressedUpdatesForProject(
|
||||
project_id,
|
||||
function (error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return PackManager.makeProjectIterator(
|
||||
project_id,
|
||||
before,
|
||||
function (err, iterator) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
// repeatedly get updates and pass them through the summariser to get an final output with user info
|
||||
return async.whilst(
|
||||
() =>
|
||||
// console.log "checking iterator.done", iterator.done()
|
||||
summarizedUpdates.length < options.min_count &&
|
||||
!iterator.done(),
|
||||
|
||||
cb =>
|
||||
iterator.next(function (err, partialUpdates) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
// logger.log {partialUpdates}, 'got partialUpdates'
|
||||
if (partialUpdates.length === 0) {
|
||||
return cb()
|
||||
} // # FIXME should try to avoid this happening
|
||||
nextBeforeTimestamp =
|
||||
partialUpdates[partialUpdates.length - 1].meta.end_ts
|
||||
// add the updates to the summary list
|
||||
summarizedUpdates = UpdatesManager._summarizeUpdates(
|
||||
partialUpdates,
|
||||
summarizedUpdates
|
||||
)
|
||||
return cb()
|
||||
}),
|
||||
|
||||
() =>
|
||||
// finally done all updates
|
||||
// console.log 'summarized Updates', summarizedUpdates
|
||||
UpdatesManager.fillSummarizedUserInfo(
|
||||
summarizedUpdates,
|
||||
function (err, results) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
return callback(
|
||||
null,
|
||||
results,
|
||||
!iterator.done() ? nextBeforeTimestamp : undefined
|
||||
)
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
exportProject(projectId, consumer) {
|
||||
// Flush anything before collecting updates.
|
||||
UpdatesManager.processUncompressedUpdatesForProject(projectId, err => {
|
||||
if (err) return consumer(err)
|
||||
|
||||
// Fetch all the packs.
|
||||
const before = undefined
|
||||
PackManager.makeProjectIterator(projectId, before, (err, iterator) => {
|
||||
if (err) return consumer(err)
|
||||
|
||||
const accumulatedUserIds = new Set()
|
||||
|
||||
async.whilst(
|
||||
() => !iterator.done(),
|
||||
|
||||
cb =>
|
||||
iterator.next((err, updatesFromASinglePack) => {
|
||||
if (err) return cb(err)
|
||||
|
||||
if (updatesFromASinglePack.length === 0) {
|
||||
// This should not happen when `iterator.done() == false`.
|
||||
// Emitting an empty array would signal the consumer the final
|
||||
// call.
|
||||
return cb()
|
||||
}
|
||||
updatesFromASinglePack.forEach(update => {
|
||||
accumulatedUserIds.add(
|
||||
// Super defensive access on update details.
|
||||
String(update && update.meta && update.meta.user_id)
|
||||
)
|
||||
})
|
||||
// Emit updates and wait for the consumer.
|
||||
consumer(null, { updates: updatesFromASinglePack }, cb)
|
||||
}),
|
||||
|
||||
err => {
|
||||
if (err) return consumer(err)
|
||||
|
||||
// Adding undefined can happen for broken updates.
|
||||
accumulatedUserIds.delete('undefined')
|
||||
|
||||
consumer(null, {
|
||||
updates: [],
|
||||
userIds: Array.from(accumulatedUserIds).sort(),
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
fetchUserInfo(users, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, fetchedUserInfo) {}
|
||||
}
|
||||
const jobs = []
|
||||
const fetchedUserInfo = {}
|
||||
for (const user_id in users) {
|
||||
;(user_id =>
|
||||
jobs.push(callback =>
|
||||
WebApiManager.getUserInfo(user_id, function (error, userInfo) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
fetchedUserInfo[user_id] = userInfo
|
||||
return callback()
|
||||
})
|
||||
))(user_id)
|
||||
}
|
||||
|
||||
return async.series(jobs, function (err) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
return callback(null, fetchedUserInfo)
|
||||
})
|
||||
},
|
||||
|
||||
fillUserInfo(updates, callback) {
|
||||
let update, user_id
|
||||
if (callback == null) {
|
||||
callback = function (error, updates) {}
|
||||
}
|
||||
const users = {}
|
||||
for (update of Array.from(updates)) {
|
||||
;({ user_id } = update.meta)
|
||||
if (UpdatesManager._validUserId(user_id)) {
|
||||
users[user_id] = true
|
||||
}
|
||||
}
|
||||
|
||||
return UpdatesManager.fetchUserInfo(
|
||||
users,
|
||||
function (error, fetchedUserInfo) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
for (update of Array.from(updates)) {
|
||||
;({ user_id } = update.meta)
|
||||
delete update.meta.user_id
|
||||
if (UpdatesManager._validUserId(user_id)) {
|
||||
update.meta.user = fetchedUserInfo[user_id]
|
||||
}
|
||||
}
|
||||
return callback(null, updates)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
fillSummarizedUserInfo(updates, callback) {
|
||||
let update, user_id, user_ids
|
||||
if (callback == null) {
|
||||
callback = function (error, updates) {}
|
||||
}
|
||||
const users = {}
|
||||
for (update of Array.from(updates)) {
|
||||
user_ids = update.meta.user_ids || []
|
||||
for (user_id of Array.from(user_ids)) {
|
||||
if (UpdatesManager._validUserId(user_id)) {
|
||||
users[user_id] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return UpdatesManager.fetchUserInfo(
|
||||
users,
|
||||
function (error, fetchedUserInfo) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
for (update of Array.from(updates)) {
|
||||
user_ids = update.meta.user_ids || []
|
||||
update.meta.users = []
|
||||
delete update.meta.user_ids
|
||||
for (user_id of Array.from(user_ids)) {
|
||||
if (UpdatesManager._validUserId(user_id)) {
|
||||
update.meta.users.push(fetchedUserInfo[user_id])
|
||||
} else {
|
||||
update.meta.users.push(null)
|
||||
}
|
||||
}
|
||||
}
|
||||
return callback(null, updates)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
_validUserId(user_id) {
|
||||
if (user_id == null) {
|
||||
return false
|
||||
} else {
|
||||
return !!user_id.match(/^[a-f0-9]{24}$/)
|
||||
}
|
||||
},
|
||||
|
||||
TIME_BETWEEN_DISTINCT_UPDATES: (fiveMinutes = 5 * 60 * 1000),
|
||||
SPLIT_ON_DELETE_SIZE: 16, // characters
|
||||
_summarizeUpdates(updates, existingSummarizedUpdates) {
|
||||
if (existingSummarizedUpdates == null) {
|
||||
existingSummarizedUpdates = []
|
||||
}
|
||||
const summarizedUpdates = existingSummarizedUpdates.slice()
|
||||
let previousUpdateWasBigDelete = false
|
||||
for (const update of Array.from(updates)) {
|
||||
var doc_id
|
||||
const earliestUpdate = summarizedUpdates[summarizedUpdates.length - 1]
|
||||
let shouldConcat = false
|
||||
|
||||
// If a user inserts some text, then deletes a big chunk including that text,
|
||||
// the update we show might concat the insert and delete, and there will be no sign
|
||||
// of that insert having happened, or be able to restore to it (restoring after a big delete is common).
|
||||
// So, we split the summary on 'big' deletes. However, we've stepping backwards in time with
|
||||
// most recent changes considered first, so if this update is a big delete, we want to start
|
||||
// a new summarized update next timge, hence we monitor the previous update.
|
||||
if (previousUpdateWasBigDelete) {
|
||||
shouldConcat = false
|
||||
} else if (
|
||||
earliestUpdate &&
|
||||
earliestUpdate.meta.end_ts - update.meta.start_ts <
|
||||
this.TIME_BETWEEN_DISTINCT_UPDATES
|
||||
) {
|
||||
// We're going backwards in time through the updates, so only combine if this update starts less than 5 minutes before
|
||||
// the end of current summarized block, so no block spans more than 5 minutes.
|
||||
shouldConcat = true
|
||||
}
|
||||
|
||||
let isBigDelete = false
|
||||
for (const op of Array.from(update.op || [])) {
|
||||
if (op.d != null && op.d.length > this.SPLIT_ON_DELETE_SIZE) {
|
||||
isBigDelete = true
|
||||
}
|
||||
}
|
||||
|
||||
previousUpdateWasBigDelete = isBigDelete
|
||||
|
||||
if (shouldConcat) {
|
||||
// check if the user in this update is already present in the earliest update,
|
||||
// if not, add them to the users list of the earliest update
|
||||
earliestUpdate.meta.user_ids = _.union(earliestUpdate.meta.user_ids, [
|
||||
update.meta.user_id,
|
||||
])
|
||||
|
||||
doc_id = update.doc_id.toString()
|
||||
const doc = earliestUpdate.docs[doc_id]
|
||||
if (doc != null) {
|
||||
doc.fromV = Math.min(doc.fromV, update.v)
|
||||
doc.toV = Math.max(doc.toV, update.v)
|
||||
} else {
|
||||
earliestUpdate.docs[doc_id] = {
|
||||
fromV: update.v,
|
||||
toV: update.v,
|
||||
}
|
||||
}
|
||||
|
||||
earliestUpdate.meta.start_ts = Math.min(
|
||||
earliestUpdate.meta.start_ts,
|
||||
update.meta.start_ts
|
||||
)
|
||||
earliestUpdate.meta.end_ts = Math.max(
|
||||
earliestUpdate.meta.end_ts,
|
||||
update.meta.end_ts
|
||||
)
|
||||
} else {
|
||||
const newUpdate = {
|
||||
meta: {
|
||||
user_ids: [],
|
||||
start_ts: update.meta.start_ts,
|
||||
end_ts: update.meta.end_ts,
|
||||
},
|
||||
docs: {},
|
||||
}
|
||||
|
||||
newUpdate.docs[update.doc_id.toString()] = {
|
||||
fromV: update.v,
|
||||
toV: update.v,
|
||||
}
|
||||
newUpdate.meta.user_ids.push(update.meta.user_id)
|
||||
summarizedUpdates.push(newUpdate)
|
||||
}
|
||||
}
|
||||
|
||||
return summarizedUpdates
|
||||
},
|
||||
}
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return typeof value !== 'undefined' && value !== null
|
||||
? transform(value)
|
||||
: undefined
|
||||
}
|
116
services/track-changes/app/js/WebApiManager.js
Normal file
116
services/track-changes/app/js/WebApiManager.js
Normal file
|
@ -0,0 +1,116 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let WebApiManager
|
||||
const request = require('requestretry') // allow retry on error https://github.com/FGRibreau/node-request-retry
|
||||
const logger = require('logger-sharelatex')
|
||||
const Settings = require('@overleaf/settings')
|
||||
|
||||
// Don't let HTTP calls hang for a long time
|
||||
const MAX_HTTP_REQUEST_LENGTH = 15000 // 15 seconds
|
||||
|
||||
// DEPRECATED! This method of getting user details via track-changes is deprecated
|
||||
// in the way we lay out our services.
|
||||
// Instead, web should be responsible for collecting the raw data (user_ids) and
|
||||
// filling it out with calls to other services. All API calls should create a
|
||||
// tree-like structure as much as possible, with web as the root.
|
||||
module.exports = WebApiManager = {
|
||||
sendRequest(url, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, body) {}
|
||||
}
|
||||
return request.get(
|
||||
{
|
||||
url: `${Settings.apis.web.url}${url}`,
|
||||
timeout: MAX_HTTP_REQUEST_LENGTH,
|
||||
maxAttempts: 2, // for node-request-retry
|
||||
auth: {
|
||||
user: Settings.apis.web.user,
|
||||
pass: Settings.apis.web.pass,
|
||||
sendImmediately: true,
|
||||
},
|
||||
},
|
||||
function (error, res, body) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
if (res.statusCode === 404) {
|
||||
logger.log({ url }, 'got 404 from web api')
|
||||
return callback(null, null)
|
||||
}
|
||||
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
return callback(null, body)
|
||||
} else {
|
||||
error = new Error(
|
||||
`web returned a non-success status code: ${res.statusCode} (attempts: ${res.attempts})`
|
||||
)
|
||||
return callback(error)
|
||||
}
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
getUserInfo(user_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, userInfo) {}
|
||||
}
|
||||
const url = `/user/${user_id}/personal_info`
|
||||
logger.log({ user_id }, 'getting user info from web')
|
||||
return WebApiManager.sendRequest(url, function (error, body) {
|
||||
let user
|
||||
if (error != null) {
|
||||
logger.error({ err: error, user_id, url }, 'error accessing web')
|
||||
return callback(error)
|
||||
}
|
||||
|
||||
if (body === null) {
|
||||
logger.error({ user_id, url }, 'no user found')
|
||||
return callback(null, null)
|
||||
}
|
||||
try {
|
||||
user = JSON.parse(body)
|
||||
} catch (error1) {
|
||||
error = error1
|
||||
return callback(error)
|
||||
}
|
||||
return callback(null, {
|
||||
id: user.id,
|
||||
email: user.email,
|
||||
first_name: user.first_name,
|
||||
last_name: user.last_name,
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
getProjectDetails(project_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, details) {}
|
||||
}
|
||||
const url = `/project/${project_id}/details`
|
||||
logger.log({ project_id }, 'getting project details from web')
|
||||
return WebApiManager.sendRequest(url, function (error, body) {
|
||||
let project
|
||||
if (error != null) {
|
||||
logger.error({ err: error, project_id, url }, 'error accessing web')
|
||||
return callback(error)
|
||||
}
|
||||
|
||||
try {
|
||||
project = JSON.parse(body)
|
||||
} catch (error1) {
|
||||
error = error1
|
||||
return callback(error)
|
||||
}
|
||||
return callback(null, project)
|
||||
})
|
||||
},
|
||||
}
|
42
services/track-changes/app/js/mongodb.js
Normal file
42
services/track-changes/app/js/mongodb.js
Normal file
|
@ -0,0 +1,42 @@
|
|||
const Settings = require('@overleaf/settings')
|
||||
const { MongoClient, ObjectId } = require('mongodb')
|
||||
|
||||
const clientPromise = MongoClient.connect(
|
||||
Settings.mongo.url,
|
||||
Settings.mongo.options
|
||||
)
|
||||
|
||||
let setupDbPromise
|
||||
async function waitForDb() {
|
||||
if (!setupDbPromise) {
|
||||
setupDbPromise = setupDb()
|
||||
}
|
||||
await setupDbPromise
|
||||
}
|
||||
|
||||
const db = {}
|
||||
async function setupDb() {
|
||||
const internalDb = (await clientPromise).db()
|
||||
|
||||
db.docHistory = internalDb.collection('docHistory')
|
||||
db.docHistoryIndex = internalDb.collection('docHistoryIndex')
|
||||
db.projectHistoryMetaData = internalDb.collection('projectHistoryMetaData')
|
||||
}
|
||||
|
||||
async function closeDb() {
|
||||
let client
|
||||
try {
|
||||
client = await clientPromise
|
||||
} catch (e) {
|
||||
// there is nothing to close
|
||||
return
|
||||
}
|
||||
return client.close()
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
db,
|
||||
ObjectId,
|
||||
closeDb,
|
||||
waitForDb,
|
||||
}
|
2339
services/track-changes/app/lib/diff_match_patch.js
Normal file
2339
services/track-changes/app/lib/diff_match_patch.js
Normal file
File diff suppressed because it is too large
Load diff
8
services/track-changes/buildscript.txt
Normal file
8
services/track-changes/buildscript.txt
Normal file
|
@ -0,0 +1,8 @@
|
|||
track-changes
|
||||
--dependencies=mongo,redis,s3
|
||||
--docker-repos=gcr.io/overleaf-ops
|
||||
--env-add=AWS_BUCKET=bucket
|
||||
--env-pass-through=
|
||||
--node-version=12.22.3
|
||||
--public-repo=True
|
||||
--script-version=3.11.0
|
90
services/track-changes/config/settings.defaults.js
Executable file
90
services/track-changes/config/settings.defaults.js
Executable file
|
@ -0,0 +1,90 @@
|
|||
const Path = require('path')
|
||||
const TMP_DIR =
|
||||
process.env.TMP_PATH || Path.resolve(Path.join(__dirname, '../../', 'tmp'))
|
||||
|
||||
module.exports = {
|
||||
mongo: {
|
||||
options: {
|
||||
useUnifiedTopology:
|
||||
(process.env.MONGO_USE_UNIFIED_TOPOLOGY || 'true') === 'true',
|
||||
},
|
||||
url:
|
||||
process.env.MONGO_CONNECTION_STRING ||
|
||||
`mongodb://${process.env.MONGO_HOST || 'localhost'}/sharelatex`,
|
||||
},
|
||||
|
||||
internal: {
|
||||
trackchanges: {
|
||||
port: 3015,
|
||||
host: process.env.LISTEN_ADDRESS || 'localhost',
|
||||
},
|
||||
},
|
||||
apis: {
|
||||
documentupdater: {
|
||||
url: `http://${
|
||||
process.env.DOCUMENT_UPDATER_HOST ||
|
||||
process.env.DOCUPDATER_HOST ||
|
||||
'localhost'
|
||||
}:3003`,
|
||||
},
|
||||
docstore: {
|
||||
url: `http://${process.env.DOCSTORE_HOST || 'localhost'}:3016`,
|
||||
},
|
||||
web: {
|
||||
url: `http://${
|
||||
process.env.WEB_API_HOST || process.env.WEB_HOST || 'localhost'
|
||||
}:${process.env.WEB_API_PORT || process.env.WEB_PORT || 3000}`,
|
||||
user: process.env.WEB_API_USER || 'sharelatex',
|
||||
pass: process.env.WEB_API_PASSWORD || 'password',
|
||||
},
|
||||
},
|
||||
redis: {
|
||||
lock: {
|
||||
host: process.env.REDIS_HOST || 'localhost',
|
||||
port: process.env.REDIS_PORT || 6379,
|
||||
password: process.env.REDIS_PASSWORD || '',
|
||||
key_schema: {
|
||||
historyLock({ doc_id: docId }) {
|
||||
return `HistoryLock:{${docId}}`
|
||||
},
|
||||
historyIndexLock({ project_id: projectId }) {
|
||||
return `HistoryIndexLock:{${projectId}}`
|
||||
},
|
||||
},
|
||||
},
|
||||
history: {
|
||||
host: process.env.REDIS_HOST || 'localhost',
|
||||
port: process.env.REDIS_PORT || 6379,
|
||||
password: process.env.REDIS_PASSWORD || '',
|
||||
key_schema: {
|
||||
uncompressedHistoryOps({ doc_id: docId }) {
|
||||
return `UncompressedHistoryOps:{${docId}}`
|
||||
},
|
||||
docsWithHistoryOps({ project_id: projectId }) {
|
||||
return `DocsWithHistoryOps:{${projectId}}`
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
trackchanges: {
|
||||
s3: {
|
||||
key: process.env.AWS_ACCESS_KEY_ID,
|
||||
secret: process.env.AWS_SECRET_ACCESS_KEY,
|
||||
endpoint: process.env.AWS_S3_ENDPOINT,
|
||||
pathStyle: process.env.AWS_S3_PATH_STYLE === 'true',
|
||||
},
|
||||
stores: {
|
||||
doc_history: process.env.AWS_BUCKET,
|
||||
},
|
||||
continueOnError: process.env.TRACK_CHANGES_CONTINUE_ON_ERROR || false,
|
||||
},
|
||||
|
||||
path: {
|
||||
dumpFolder: Path.join(TMP_DIR, 'dumpFolder'),
|
||||
},
|
||||
|
||||
sentry: {
|
||||
dsn: process.env.SENTRY_DSN,
|
||||
},
|
||||
}
|
72
services/track-changes/docker-compose.ci.yml
Normal file
72
services/track-changes/docker-compose.ci.yml
Normal file
|
@ -0,0 +1,72 @@
|
|||
# This file was auto-generated, do not edit it directly.
|
||||
# Instead run bin/update_build_scripts from
|
||||
# https://github.com/sharelatex/sharelatex-dev-environment
|
||||
|
||||
version: "2.3"
|
||||
|
||||
services:
|
||||
test_unit:
|
||||
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
|
||||
user: node
|
||||
command: npm run test:unit:_run
|
||||
environment:
|
||||
NODE_ENV: test
|
||||
NODE_OPTIONS: "--unhandled-rejections=strict"
|
||||
|
||||
|
||||
test_acceptance:
|
||||
build: .
|
||||
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
|
||||
environment:
|
||||
ELASTIC_SEARCH_DSN: es:9200
|
||||
REDIS_HOST: redis
|
||||
QUEUES_REDIS_HOST: redis
|
||||
MONGO_HOST: mongo
|
||||
POSTGRES_HOST: postgres
|
||||
AWS_S3_ENDPOINT: http://s3:9090
|
||||
AWS_S3_PATH_STYLE: 'true'
|
||||
AWS_ACCESS_KEY_ID: fake
|
||||
AWS_SECRET_ACCESS_KEY: fake
|
||||
MOCHA_GREP: ${MOCHA_GREP}
|
||||
NODE_ENV: test
|
||||
NODE_OPTIONS: "--unhandled-rejections=strict"
|
||||
AWS_BUCKET: bucket
|
||||
depends_on:
|
||||
mongo:
|
||||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_healthy
|
||||
s3:
|
||||
condition: service_healthy
|
||||
user: node
|
||||
command: npm run test:acceptance:_run
|
||||
|
||||
|
||||
tar:
|
||||
build: .
|
||||
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
|
||||
volumes:
|
||||
- ./:/tmp/build/
|
||||
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
|
||||
user: root
|
||||
redis:
|
||||
image: redis
|
||||
healthcheck:
|
||||
test: ping="$$(redis-cli ping)" && [ "$$ping" = 'PONG' ]
|
||||
interval: 1s
|
||||
retries: 20
|
||||
|
||||
mongo:
|
||||
image: mongo:4.0
|
||||
healthcheck:
|
||||
test: "mongo --quiet localhost/test --eval 'quit(db.runCommand({ ping: 1 }).ok ? 0 : 1)'"
|
||||
interval: 1s
|
||||
retries: 20
|
||||
s3:
|
||||
image: adobe/s3mock
|
||||
environment:
|
||||
- initialBuckets=fake_user_files,fake_template_files,fake_public_files,bucket
|
||||
healthcheck:
|
||||
test: wget --quiet --output-document=/dev/null http://localhost:9090
|
||||
interval: 1s
|
||||
retries: 20
|
71
services/track-changes/docker-compose.yml
Normal file
71
services/track-changes/docker-compose.yml
Normal file
|
@ -0,0 +1,71 @@
|
|||
# This file was auto-generated, do not edit it directly.
|
||||
# Instead run bin/update_build_scripts from
|
||||
# https://github.com/sharelatex/sharelatex-dev-environment
|
||||
|
||||
version: "2.3"
|
||||
|
||||
services:
|
||||
test_unit:
|
||||
image: node:12.22.3
|
||||
volumes:
|
||||
- .:/app
|
||||
working_dir: /app
|
||||
environment:
|
||||
MOCHA_GREP: ${MOCHA_GREP}
|
||||
NODE_ENV: test
|
||||
NODE_OPTIONS: "--unhandled-rejections=strict"
|
||||
command: npm run --silent test:unit
|
||||
user: node
|
||||
|
||||
test_acceptance:
|
||||
image: node:12.22.3
|
||||
volumes:
|
||||
- .:/app
|
||||
working_dir: /app
|
||||
environment:
|
||||
ELASTIC_SEARCH_DSN: es:9200
|
||||
REDIS_HOST: redis
|
||||
QUEUES_REDIS_HOST: redis
|
||||
MONGO_HOST: mongo
|
||||
POSTGRES_HOST: postgres
|
||||
AWS_S3_ENDPOINT: http://s3:9090
|
||||
AWS_S3_PATH_STYLE: 'true'
|
||||
AWS_ACCESS_KEY_ID: fake
|
||||
AWS_SECRET_ACCESS_KEY: fake
|
||||
MOCHA_GREP: ${MOCHA_GREP}
|
||||
LOG_LEVEL: ERROR
|
||||
NODE_ENV: test
|
||||
NODE_OPTIONS: "--unhandled-rejections=strict"
|
||||
AWS_BUCKET: bucket
|
||||
user: node
|
||||
depends_on:
|
||||
mongo:
|
||||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_healthy
|
||||
s3:
|
||||
condition: service_healthy
|
||||
command: npm run --silent test:acceptance
|
||||
|
||||
redis:
|
||||
image: redis
|
||||
healthcheck:
|
||||
test: ping=$$(redis-cli ping) && [ "$$ping" = 'PONG' ]
|
||||
interval: 1s
|
||||
retries: 20
|
||||
|
||||
mongo:
|
||||
image: mongo:4.0
|
||||
healthcheck:
|
||||
test: "mongo --quiet localhost/test --eval 'quit(db.runCommand({ ping: 1 }).ok ? 0 : 1)'"
|
||||
interval: 1s
|
||||
retries: 20
|
||||
|
||||
s3:
|
||||
image: adobe/s3mock
|
||||
environment:
|
||||
- initialBuckets=fake_user_files,fake_template_files,fake_public_files,bucket
|
||||
healthcheck:
|
||||
test: wget --quiet --output-document=/dev/null http://localhost:9090
|
||||
interval: 1s
|
||||
retries: 20
|
17
services/track-changes/nodemon.json
Normal file
17
services/track-changes/nodemon.json
Normal file
|
@ -0,0 +1,17 @@
|
|||
{
|
||||
"ignore": [
|
||||
".git",
|
||||
"node_modules/"
|
||||
],
|
||||
"verbose": true,
|
||||
"legacyWatch": true,
|
||||
"execMap": {
|
||||
"js": "npm run start"
|
||||
},
|
||||
"watch": [
|
||||
"app/js/",
|
||||
"app.js",
|
||||
"config/"
|
||||
],
|
||||
"ext": "js"
|
||||
}
|
24
services/track-changes/pack.sh
Executable file
24
services/track-changes/pack.sh
Executable file
|
@ -0,0 +1,24 @@
|
|||
#!/bin/bash -x
|
||||
|
||||
# find all the docHistories with unpacked ops and pack them
|
||||
|
||||
# need to keep track of docs already done
|
||||
|
||||
HOST=${1:-"localhost:3015"}
|
||||
T=${2:-10}
|
||||
|
||||
echo packing all docHistory on $HOST with delay of $T
|
||||
for n in $(seq 5 -1 1) ; do
|
||||
echo starting in $n seconds
|
||||
sleep 1
|
||||
done
|
||||
|
||||
while docs=$(curl "$HOST/doc/list?limit=1000&doc_id=$last_doc"); do
|
||||
if [ -z "$docs" ] ; then break ; fi
|
||||
for d in $docs ; do
|
||||
echo "packing $d"
|
||||
curl -X POST "$HOST/doc/$d/pack"
|
||||
sleep $T
|
||||
last_doc=$d
|
||||
done
|
||||
done
|
5794
services/track-changes/package-lock.json
generated
Normal file
5794
services/track-changes/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load diff
66
services/track-changes/package.json
Normal file
66
services/track-changes/package.json
Normal file
|
@ -0,0 +1,66 @@
|
|||
{
|
||||
"name": "history-sharelatex",
|
||||
"version": "0.1.4",
|
||||
"description": "An API for saving and compressing individual document updates into a browsable history",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/sharelatex/track-changes-sharelatex.git"
|
||||
},
|
||||
"scripts": {
|
||||
"start": "node $NODE_APP_OPTIONS app.js",
|
||||
"test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js",
|
||||
"test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP",
|
||||
"test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js",
|
||||
"test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP",
|
||||
"nodemon": "nodemon --config nodemon.json",
|
||||
"lint": "eslint --max-warnings 0 --format unix .",
|
||||
"format": "prettier --list-different $PWD/'**/*.js'",
|
||||
"format:fix": "prettier --write $PWD/'**/*.js'",
|
||||
"lint:fix": "eslint --fix ."
|
||||
},
|
||||
"dependencies": {
|
||||
"@overleaf/metrics": "^3.5.1",
|
||||
"@overleaf/o-error": "^3.1.0",
|
||||
"@overleaf/redis-wrapper": "^2.0.0",
|
||||
"@overleaf/settings": "^2.1.1",
|
||||
"JSONStream": "^1.3.5",
|
||||
"async": "^2.6.3",
|
||||
"aws-sdk": "^2.643.0",
|
||||
"body-parser": "^1.19.0",
|
||||
"bson": "^1.1.5",
|
||||
"bunyan": "^1.8.15",
|
||||
"byline": "^5.0.0",
|
||||
"express": "4.17.1",
|
||||
"heap": "^0.2.6",
|
||||
"line-reader": "^0.4.0",
|
||||
"logger-sharelatex": "^2.2.0",
|
||||
"mongo-uri": "^0.1.2",
|
||||
"mongodb": "^3.6.0",
|
||||
"redis": "~0.10.1",
|
||||
"request": "~2.88.2",
|
||||
"requestretry": "^4.1.0",
|
||||
"s3-streams": "^0.4.0",
|
||||
"underscore": "~1.13.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"chai": "^4.2.0",
|
||||
"chai-as-promised": "^7.1.1",
|
||||
"cli": "^1.0.1",
|
||||
"eslint": "^7.21.0",
|
||||
"eslint-config-prettier": "^8.1.0",
|
||||
"eslint-config-standard": "^16.0.2",
|
||||
"eslint-plugin-chai-expect": "^2.2.0",
|
||||
"eslint-plugin-chai-friendly": "^0.6.0",
|
||||
"eslint-plugin-import": "^2.22.1",
|
||||
"eslint-plugin-mocha": "^8.0.0",
|
||||
"eslint-plugin-node": "^11.1.0",
|
||||
"eslint-plugin-prettier": "^3.1.2",
|
||||
"eslint-plugin-promise": "^4.2.1",
|
||||
"memorystream": "0.3.1",
|
||||
"mocha": "^8.3.2",
|
||||
"prettier": "^2.2.1",
|
||||
"sandboxed-module": "~2.0.3",
|
||||
"sinon": "~9.0.1",
|
||||
"timekeeper": "2.2.0"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
FROM adobe/s3mock
|
||||
RUN apk add --update --no-cache curl
|
||||
COPY healthcheck.sh /healthcheck.sh
|
||||
HEALTHCHECK --interval=1s --timeout=1s --retries=30 CMD /healthcheck.sh http://localhost:9090
|
|
@ -0,0 +1,9 @@
|
|||
#!/bin/sh
|
||||
|
||||
# health check to allow 404 status code as valid
|
||||
STATUSCODE=$(curl --silent --output /dev/null --write-out "%{http_code}" $1)
|
||||
# will be 000 on non-http error (e.g. connection failure)
|
||||
if test $STATUSCODE -ge 500 || test $STATUSCODE -lt 200; then
|
||||
exit 1
|
||||
fi
|
||||
exit 0
|
|
@ -0,0 +1,587 @@
|
|||
/* eslint-disable
|
||||
handle-callback-err,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const { expect } = require('chai')
|
||||
const { ObjectId } = require('../../../app/js/mongodb')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const request = require('request')
|
||||
const rclient = require('redis').createClient(Settings.redis.history) // Only works locally for now
|
||||
|
||||
const TrackChangesApp = require('./helpers/TrackChangesApp')
|
||||
const TrackChangesClient = require('./helpers/TrackChangesClient')
|
||||
const MockWebApi = require('./helpers/MockWebApi')
|
||||
|
||||
describe('Appending doc ops to the history', function () {
|
||||
before(function (done) {
|
||||
return TrackChangesApp.ensureRunning(done)
|
||||
})
|
||||
|
||||
describe('when the history does not exist yet', function () {
|
||||
before(function (done) {
|
||||
this.project_id = ObjectId().toString()
|
||||
this.doc_id = ObjectId().toString()
|
||||
this.user_id = ObjectId().toString()
|
||||
MockWebApi.projects[this.project_id] = { features: { versioning: false } }
|
||||
TrackChangesClient.pushRawUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
[
|
||||
{
|
||||
op: [{ i: 'f', p: 3 }],
|
||||
meta: { ts: Date.now(), user_id: this.user_id },
|
||||
v: 3,
|
||||
},
|
||||
{
|
||||
op: [{ i: 'o', p: 4 }],
|
||||
meta: { ts: Date.now(), user_id: this.user_id },
|
||||
v: 4,
|
||||
},
|
||||
{
|
||||
op: [{ i: 'o', p: 5 }],
|
||||
meta: { ts: Date.now(), user_id: this.user_id },
|
||||
v: 5,
|
||||
},
|
||||
],
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return TrackChangesClient.flushAndGetCompressedUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
(error, updates) => {
|
||||
this.updates = updates
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return done()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
|
||||
it('should insert the compressed op into mongo', function () {
|
||||
return expect(this.updates[0].pack[0].op).to.deep.equal([
|
||||
{
|
||||
p: 3,
|
||||
i: 'foo',
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should insert the correct version number into mongo', function () {
|
||||
return expect(this.updates[0].v).to.equal(5)
|
||||
})
|
||||
|
||||
it('should store the doc id', function () {
|
||||
return expect(this.updates[0].doc_id.toString()).to.equal(this.doc_id)
|
||||
})
|
||||
|
||||
it('should store the project id', function () {
|
||||
return expect(this.updates[0].project_id.toString()).to.equal(
|
||||
this.project_id
|
||||
)
|
||||
})
|
||||
|
||||
return it('should clear the doc from the DocsWithHistoryOps set', function (done) {
|
||||
rclient.sismember(
|
||||
`DocsWithHistoryOps:${this.project_id}`,
|
||||
this.doc_id,
|
||||
(error, member) => {
|
||||
member.should.equal(0)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the history has already been started', function () {
|
||||
beforeEach(function (done) {
|
||||
this.project_id = ObjectId().toString()
|
||||
this.doc_id = ObjectId().toString()
|
||||
this.user_id = ObjectId().toString()
|
||||
MockWebApi.projects[this.project_id] = { features: { versioning: false } }
|
||||
TrackChangesClient.pushRawUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
[
|
||||
{
|
||||
op: [{ i: 'f', p: 3 }],
|
||||
meta: { ts: Date.now(), user_id: this.user_id },
|
||||
v: 3,
|
||||
},
|
||||
{
|
||||
op: [{ i: 'o', p: 4 }],
|
||||
meta: { ts: Date.now(), user_id: this.user_id },
|
||||
v: 4,
|
||||
},
|
||||
{
|
||||
op: [{ i: 'o', p: 5 }],
|
||||
meta: { ts: Date.now(), user_id: this.user_id },
|
||||
v: 5,
|
||||
},
|
||||
],
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return TrackChangesClient.flushAndGetCompressedUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
(error, updates) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return done()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
|
||||
describe('when the updates are recent and from the same user', function () {
|
||||
beforeEach(function (done) {
|
||||
TrackChangesClient.pushRawUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
[
|
||||
{
|
||||
op: [{ i: 'b', p: 6 }],
|
||||
meta: { ts: Date.now(), user_id: this.user_id },
|
||||
v: 6,
|
||||
},
|
||||
{
|
||||
op: [{ i: 'a', p: 7 }],
|
||||
meta: { ts: Date.now(), user_id: this.user_id },
|
||||
v: 7,
|
||||
},
|
||||
{
|
||||
op: [{ i: 'r', p: 8 }],
|
||||
meta: { ts: Date.now(), user_id: this.user_id },
|
||||
v: 8,
|
||||
},
|
||||
],
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return TrackChangesClient.flushAndGetCompressedUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
(error, updates) => {
|
||||
this.updates = updates
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return done()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
|
||||
it('should combine all the updates into one pack', function () {
|
||||
return expect(this.updates[0].pack[1].op).to.deep.equal([
|
||||
{
|
||||
p: 6,
|
||||
i: 'bar',
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
return it('should insert the correct version number into mongo', function () {
|
||||
return expect(this.updates[0].v_end).to.equal(8)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when the updates are far apart', function () {
|
||||
beforeEach(function (done) {
|
||||
const oneDay = 24 * 60 * 60 * 1000
|
||||
TrackChangesClient.pushRawUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
[
|
||||
{
|
||||
op: [{ i: 'b', p: 6 }],
|
||||
meta: { ts: Date.now() + oneDay, user_id: this.user_id },
|
||||
v: 6,
|
||||
},
|
||||
{
|
||||
op: [{ i: 'a', p: 7 }],
|
||||
meta: { ts: Date.now() + oneDay, user_id: this.user_id },
|
||||
v: 7,
|
||||
},
|
||||
{
|
||||
op: [{ i: 'r', p: 8 }],
|
||||
meta: { ts: Date.now() + oneDay, user_id: this.user_id },
|
||||
v: 8,
|
||||
},
|
||||
],
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return TrackChangesClient.flushAndGetCompressedUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
(error, updates) => {
|
||||
this.updates = updates
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return done()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
|
||||
return it('should combine the updates into one pack', function () {
|
||||
expect(this.updates[0].pack[0].op).to.deep.equal([
|
||||
{
|
||||
p: 3,
|
||||
i: 'foo',
|
||||
},
|
||||
])
|
||||
return expect(this.updates[0].pack[1].op).to.deep.equal([
|
||||
{
|
||||
p: 6,
|
||||
i: 'bar',
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the updates need processing in batches', function () {
|
||||
before(function (done) {
|
||||
this.project_id = ObjectId().toString()
|
||||
this.doc_id = ObjectId().toString()
|
||||
this.user_id = ObjectId().toString()
|
||||
MockWebApi.projects[this.project_id] = { features: { versioning: false } }
|
||||
const updates = []
|
||||
this.expectedOp = [{ p: 0, i: '' }]
|
||||
for (let i = 0; i <= 250; i++) {
|
||||
updates.push({
|
||||
op: [{ i: 'a', p: 0 }],
|
||||
meta: { ts: Date.now(), user_id: this.user_id },
|
||||
v: i,
|
||||
})
|
||||
this.expectedOp[0].i = `a${this.expectedOp[0].i}`
|
||||
}
|
||||
|
||||
TrackChangesClient.pushRawUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
updates,
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return TrackChangesClient.flushAndGetCompressedUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
(error, updates1) => {
|
||||
this.updates = updates1
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return done()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
|
||||
it('should concat the compressed op into mongo', function () {
|
||||
return expect(this.updates[0].pack.length).to.deep.equal(3)
|
||||
}) // batch size is 100
|
||||
|
||||
return it('should insert the correct version number into mongo', function () {
|
||||
return expect(this.updates[0].v_end).to.equal(250)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when there are multiple ops in each update', function () {
|
||||
before(function (done) {
|
||||
this.project_id = ObjectId().toString()
|
||||
this.doc_id = ObjectId().toString()
|
||||
this.user_id = ObjectId().toString()
|
||||
MockWebApi.projects[this.project_id] = { features: { versioning: false } }
|
||||
const oneDay = 24 * 60 * 60 * 1000
|
||||
TrackChangesClient.pushRawUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
[
|
||||
{
|
||||
op: [
|
||||
{ i: 'f', p: 3 },
|
||||
{ i: 'o', p: 4 },
|
||||
{ i: 'o', p: 5 },
|
||||
],
|
||||
meta: { ts: Date.now(), user_id: this.user_id },
|
||||
v: 3,
|
||||
},
|
||||
{
|
||||
op: [
|
||||
{ i: 'b', p: 6 },
|
||||
{ i: 'a', p: 7 },
|
||||
{ i: 'r', p: 8 },
|
||||
],
|
||||
meta: { ts: Date.now() + oneDay, user_id: this.user_id },
|
||||
v: 4,
|
||||
},
|
||||
],
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return TrackChangesClient.flushAndGetCompressedUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
(error, updates) => {
|
||||
this.updates = updates
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return done()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
|
||||
it('should insert the compressed ops into mongo', function () {
|
||||
expect(this.updates[0].pack[0].op).to.deep.equal([
|
||||
{
|
||||
p: 3,
|
||||
i: 'foo',
|
||||
},
|
||||
])
|
||||
return expect(this.updates[0].pack[1].op).to.deep.equal([
|
||||
{
|
||||
p: 6,
|
||||
i: 'bar',
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
return it('should insert the correct version numbers into mongo', function () {
|
||||
expect(this.updates[0].pack[0].v).to.equal(3)
|
||||
return expect(this.updates[0].pack[1].v).to.equal(4)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when there is a no-op update', function () {
|
||||
before(function (done) {
|
||||
this.project_id = ObjectId().toString()
|
||||
this.doc_id = ObjectId().toString()
|
||||
this.user_id = ObjectId().toString()
|
||||
MockWebApi.projects[this.project_id] = { features: { versioning: false } }
|
||||
const oneDay = 24 * 60 * 60 * 1000
|
||||
TrackChangesClient.pushRawUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
[
|
||||
{
|
||||
op: [],
|
||||
meta: { ts: Date.now(), user_id: this.user_id },
|
||||
v: 3,
|
||||
},
|
||||
{
|
||||
op: [{ i: 'foo', p: 3 }],
|
||||
meta: { ts: Date.now() + oneDay, user_id: this.user_id },
|
||||
v: 4,
|
||||
},
|
||||
],
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return TrackChangesClient.flushAndGetCompressedUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
(error, updates) => {
|
||||
this.updates = updates
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return done()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
|
||||
it('should insert the compressed no-op into mongo', function () {
|
||||
return expect(this.updates[0].pack[0].op).to.deep.equal([])
|
||||
})
|
||||
|
||||
it('should insert the compressed next update into mongo', function () {
|
||||
return expect(this.updates[0].pack[1].op).to.deep.equal([
|
||||
{
|
||||
p: 3,
|
||||
i: 'foo',
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
return it('should insert the correct version numbers into mongo', function () {
|
||||
expect(this.updates[0].pack[0].v).to.equal(3)
|
||||
return expect(this.updates[0].pack[1].v).to.equal(4)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when there is a comment update', function () {
|
||||
before(function (done) {
|
||||
this.project_id = ObjectId().toString()
|
||||
this.doc_id = ObjectId().toString()
|
||||
this.user_id = ObjectId().toString()
|
||||
MockWebApi.projects[this.project_id] = { features: { versioning: false } }
|
||||
TrackChangesClient.pushRawUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
[
|
||||
{
|
||||
op: [
|
||||
{ c: 'foo', p: 3 },
|
||||
{ d: 'bar', p: 6 },
|
||||
],
|
||||
meta: { ts: Date.now(), user_id: this.user_id },
|
||||
v: 3,
|
||||
},
|
||||
],
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return TrackChangesClient.flushAndGetCompressedUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
(error, updates) => {
|
||||
this.updates = updates
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return done()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
|
||||
it('should ignore the comment op', function () {
|
||||
return expect(this.updates[0].pack[0].op).to.deep.equal([
|
||||
{ d: 'bar', p: 6 },
|
||||
])
|
||||
})
|
||||
|
||||
return it('should insert the correct version numbers into mongo', function () {
|
||||
return expect(this.updates[0].pack[0].v).to.equal(3)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the project has versioning enabled', function () {
|
||||
before(function (done) {
|
||||
this.project_id = ObjectId().toString()
|
||||
this.doc_id = ObjectId().toString()
|
||||
this.user_id = ObjectId().toString()
|
||||
MockWebApi.projects[this.project_id] = { features: { versioning: true } }
|
||||
|
||||
TrackChangesClient.pushRawUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
[
|
||||
{
|
||||
op: [{ i: 'f', p: 3 }],
|
||||
meta: { ts: Date.now(), user_id: this.user_id },
|
||||
v: 3,
|
||||
},
|
||||
],
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return TrackChangesClient.flushAndGetCompressedUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
(error, updates) => {
|
||||
this.updates = updates
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return done()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
|
||||
return it('should not add a expiresAt entry in the update in mongo', function () {
|
||||
return expect(this.updates[0].expiresAt).to.be.undefined
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when the project does not have versioning enabled', function () {
|
||||
before(function (done) {
|
||||
this.project_id = ObjectId().toString()
|
||||
this.doc_id = ObjectId().toString()
|
||||
this.user_id = ObjectId().toString()
|
||||
MockWebApi.projects[this.project_id] = { features: { versioning: false } }
|
||||
|
||||
TrackChangesClient.pushRawUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
[
|
||||
{
|
||||
op: [{ i: 'f', p: 3 }],
|
||||
meta: { ts: Date.now(), user_id: this.user_id },
|
||||
v: 3,
|
||||
},
|
||||
],
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return TrackChangesClient.flushAndGetCompressedUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
(error, updates) => {
|
||||
this.updates = updates
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return done()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
|
||||
return it('should add a expiresAt entry in the update in mongo', function () {
|
||||
return expect(this.updates[0].expiresAt).to.exist
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,322 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-undef,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS103: Rewrite code to no longer use __guard__
|
||||
* DS202: Simplify dynamic range loops
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const { expect } = require('chai')
|
||||
const { db, ObjectId } = require('../../../app/js/mongodb')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const request = require('request')
|
||||
const rclient = require('redis').createClient(Settings.redis.history) // Only works locally for now
|
||||
|
||||
const TrackChangesApp = require('./helpers/TrackChangesApp')
|
||||
const TrackChangesClient = require('./helpers/TrackChangesClient')
|
||||
const MockDocStoreApi = require('./helpers/MockDocStoreApi')
|
||||
const MockWebApi = require('./helpers/MockWebApi')
|
||||
|
||||
describe('Archiving updates', function () {
|
||||
before(function (done) {
|
||||
if (
|
||||
__guard__(
|
||||
__guard__(
|
||||
Settings != null ? Settings.trackchanges : undefined,
|
||||
x1 => x1.s3
|
||||
),
|
||||
x => x.key.length
|
||||
) < 1
|
||||
) {
|
||||
const message = new Error('s3 keys not setup, this test setup will fail')
|
||||
return done(message)
|
||||
}
|
||||
|
||||
return TrackChangesClient.waitForS3(done)
|
||||
})
|
||||
|
||||
before(function (done) {
|
||||
this.now = Date.now()
|
||||
this.to = this.now
|
||||
this.user_id = ObjectId().toString()
|
||||
this.user_id_2 = ObjectId().toString()
|
||||
this.doc_id = ObjectId().toString()
|
||||
this.project_id = ObjectId().toString()
|
||||
|
||||
this.minutes = 60 * 1000
|
||||
this.hours = 60 * this.minutes
|
||||
|
||||
MockWebApi.projects[this.project_id] = {
|
||||
features: {
|
||||
versioning: true,
|
||||
},
|
||||
}
|
||||
sinon.spy(MockWebApi, 'getProjectDetails')
|
||||
|
||||
MockWebApi.users[this.user_id] = this.user = {
|
||||
email: 'user@sharelatex.com',
|
||||
first_name: 'Leo',
|
||||
last_name: 'Lion',
|
||||
id: this.user_id,
|
||||
}
|
||||
sinon.spy(MockWebApi, 'getUserInfo')
|
||||
|
||||
MockDocStoreApi.docs[this.doc_id] = this.doc = {
|
||||
_id: this.doc_id,
|
||||
project_id: this.project_id,
|
||||
}
|
||||
sinon.spy(MockDocStoreApi, 'getAllDoc')
|
||||
|
||||
this.updates = []
|
||||
for (
|
||||
let i = 0, end = 512 + 10, asc = end >= 0;
|
||||
asc ? i <= end : i >= end;
|
||||
asc ? i++ : i--
|
||||
) {
|
||||
this.updates.push({
|
||||
op: [{ i: 'a', p: 0 }],
|
||||
meta: { ts: this.now + (i - 2048) * this.hours, user_id: this.user_id },
|
||||
v: 2 * i + 1,
|
||||
})
|
||||
this.updates.push({
|
||||
op: [{ i: 'b', p: 0 }],
|
||||
meta: {
|
||||
ts: this.now + (i - 2048) * this.hours + 10 * this.minutes,
|
||||
user_id: this.user_id_2,
|
||||
},
|
||||
v: 2 * i + 2,
|
||||
})
|
||||
}
|
||||
TrackChangesApp.ensureRunning(() => {
|
||||
return TrackChangesClient.pushRawUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.updates,
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return TrackChangesClient.flushDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return done()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
return null
|
||||
})
|
||||
|
||||
after(function (done) {
|
||||
MockWebApi.getUserInfo.restore()
|
||||
return db.docHistory.deleteMany(
|
||||
{ project_id: ObjectId(this.project_id) },
|
||||
() => {
|
||||
return db.docHistoryIndex.remove(
|
||||
{ project_id: ObjectId(this.project_id) },
|
||||
() => {
|
||||
return TrackChangesClient.removeS3Doc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
done
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
function testExportFeature() {
|
||||
describe('exporting the project', function () {
|
||||
before('fetch export', function (done) {
|
||||
TrackChangesClient.exportProject(
|
||||
this.project_id,
|
||||
(error, updates, userIds) => {
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
this.exportedUpdates = updates
|
||||
this.exportedUserIds = userIds
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should include all the imported updates, with ids, sorted by timestamp', function () {
|
||||
// Add a safe guard for an empty array matching an empty export.
|
||||
expect(this.updates).to.have.length(1024 + 22)
|
||||
|
||||
const expectedExportedUpdates = this.updates
|
||||
.slice()
|
||||
.reverse()
|
||||
.map(update => {
|
||||
// clone object, updates are created once in before handler
|
||||
const exportedUpdate = Object.assign({}, update)
|
||||
exportedUpdate.meta = Object.assign({}, update.meta)
|
||||
|
||||
exportedUpdate.doc_id = this.doc_id
|
||||
exportedUpdate.project_id = this.project_id
|
||||
|
||||
// This is for merged updates, which does not apply here.
|
||||
exportedUpdate.meta.start_ts = exportedUpdate.meta.end_ts =
|
||||
exportedUpdate.meta.ts
|
||||
delete exportedUpdate.meta.ts
|
||||
return exportedUpdate
|
||||
})
|
||||
expect(this.exportedUpdates).to.deep.equal(expectedExportedUpdates)
|
||||
expect(this.exportedUserIds).to.deep.equal([
|
||||
this.user_id,
|
||||
this.user_id_2,
|
||||
])
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
describe("before archiving a doc's updates", function () {
|
||||
testExportFeature()
|
||||
})
|
||||
|
||||
describe("archiving a doc's updates", function () {
|
||||
before(function (done) {
|
||||
TrackChangesClient.pushDocHistory(this.project_id, this.doc_id, error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return done()
|
||||
})
|
||||
return null
|
||||
})
|
||||
|
||||
it('should have one cached pack', function (done) {
|
||||
return db.docHistory.count(
|
||||
{ doc_id: ObjectId(this.doc_id), expiresAt: { $exists: true } },
|
||||
(error, count) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
count.should.equal(1)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should have one remaining pack after cache is expired', function (done) {
|
||||
return db.docHistory.deleteMany(
|
||||
{
|
||||
doc_id: ObjectId(this.doc_id),
|
||||
expiresAt: { $exists: true },
|
||||
},
|
||||
(err, result) => {
|
||||
if (typeof error !== 'undefined' && error !== null) {
|
||||
throw error
|
||||
}
|
||||
return db.docHistory.count(
|
||||
{ doc_id: ObjectId(this.doc_id) },
|
||||
(error, count) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
count.should.equal(1)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should have a docHistoryIndex entry marked as inS3', function (done) {
|
||||
return db.docHistoryIndex.findOne(
|
||||
{ _id: ObjectId(this.doc_id) },
|
||||
(error, index) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
index.packs[0].inS3.should.equal(true)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should have a docHistoryIndex entry with the last version', function (done) {
|
||||
return db.docHistoryIndex.findOne(
|
||||
{ _id: ObjectId(this.doc_id) },
|
||||
(error, index) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
index.packs[0].v_end.should.equal(1024)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should store 1024 doc changes in S3 in one pack', function (done) {
|
||||
return db.docHistoryIndex.findOne(
|
||||
{ _id: ObjectId(this.doc_id) },
|
||||
(error, index) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
const pack_id = index.packs[0]._id
|
||||
return TrackChangesClient.getS3Doc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
pack_id,
|
||||
(error, doc) => {
|
||||
doc.n.should.equal(1024)
|
||||
doc.pack.length.should.equal(1024)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
testExportFeature()
|
||||
})
|
||||
|
||||
return describe("unarchiving a doc's updates", function () {
|
||||
before(function (done) {
|
||||
TrackChangesClient.pullDocHistory(this.project_id, this.doc_id, error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return done()
|
||||
})
|
||||
return null
|
||||
})
|
||||
|
||||
return it('should restore both packs', function (done) {
|
||||
return db.docHistory.count(
|
||||
{ doc_id: ObjectId(this.doc_id) },
|
||||
(error, count) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
count.should.equal(2)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return typeof value !== 'undefined' && value !== null
|
||||
? transform(value)
|
||||
: undefined
|
||||
}
|
|
@ -0,0 +1,34 @@
|
|||
const { expect } = require('chai')
|
||||
const { ObjectId } = require('../../../app/js/mongodb')
|
||||
|
||||
const TrackChangesApp = require('./helpers/TrackChangesApp')
|
||||
const TrackChangesClient = require('./helpers/TrackChangesClient')
|
||||
|
||||
describe('ExportProject', function () {
|
||||
before('start app', function (done) {
|
||||
TrackChangesApp.ensureRunning(done)
|
||||
})
|
||||
|
||||
describe('when there are no updates', function () {
|
||||
before('fetch export', function (done) {
|
||||
TrackChangesClient.exportProject(
|
||||
ObjectId(),
|
||||
(error, updates, userIds) => {
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
this.exportedUpdates = updates
|
||||
this.exportedUserIds = userIds
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should export an empty array', function () {
|
||||
expect(this.exportedUpdates).to.deep.equal([])
|
||||
expect(this.exportedUserIds).to.deep.equal([])
|
||||
})
|
||||
})
|
||||
|
||||
// see ArchivingUpdatesTests for tests with data in mongo/s3
|
||||
})
|
|
@ -0,0 +1,273 @@
|
|||
/* eslint-disable
|
||||
handle-callback-err,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const { expect } = require('chai')
|
||||
const { ObjectId } = require('../../../app/js/mongodb')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const request = require('request')
|
||||
const rclient = require('redis').createClient(Settings.redis.history) // Only works locally for now
|
||||
|
||||
const TrackChangesApp = require('./helpers/TrackChangesApp')
|
||||
const TrackChangesClient = require('./helpers/TrackChangesClient')
|
||||
const MockWebApi = require('./helpers/MockWebApi')
|
||||
|
||||
describe('Flushing updates', function () {
|
||||
before(function (done) {
|
||||
return TrackChangesApp.ensureRunning(done)
|
||||
})
|
||||
|
||||
describe("flushing a doc's updates", function () {
|
||||
before(function (done) {
|
||||
this.project_id = ObjectId().toString()
|
||||
this.doc_id = ObjectId().toString()
|
||||
this.user_id = ObjectId().toString()
|
||||
MockWebApi.projects[this.project_id] = { features: { versioning: true } }
|
||||
|
||||
TrackChangesClient.pushRawUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
[
|
||||
{
|
||||
op: [{ i: 'f', p: 3 }],
|
||||
meta: { ts: Date.now(), user_id: this.user_id },
|
||||
v: 3,
|
||||
},
|
||||
],
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return TrackChangesClient.flushDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return done()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
|
||||
return it('should flush the op into mongo', function (done) {
|
||||
TrackChangesClient.getCompressedUpdates(this.doc_id, (error, updates) => {
|
||||
expect(updates[0].pack[0].op).to.deep.equal([
|
||||
{
|
||||
p: 3,
|
||||
i: 'f',
|
||||
},
|
||||
])
|
||||
return done()
|
||||
})
|
||||
return null
|
||||
})
|
||||
})
|
||||
|
||||
return describe("flushing a project's updates", function () {
|
||||
describe('with versioning enabled', function () {
|
||||
before(function (done) {
|
||||
this.project_id = ObjectId().toString()
|
||||
this.doc_id = ObjectId().toString()
|
||||
this.user_id = ObjectId().toString()
|
||||
|
||||
this.weeks = 7 * 24 * 60 * 60 * 1000
|
||||
|
||||
MockWebApi.projects[this.project_id] = {
|
||||
features: {
|
||||
versioning: true,
|
||||
},
|
||||
}
|
||||
|
||||
TrackChangesClient.pushRawUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
[
|
||||
{
|
||||
op: [{ i: 'g', p: 2 }],
|
||||
meta: { ts: Date.now() - 2 * this.weeks, user_id: this.user_id },
|
||||
v: 2,
|
||||
},
|
||||
{
|
||||
op: [{ i: 'f', p: 3 }],
|
||||
meta: { ts: Date.now(), user_id: this.user_id },
|
||||
v: 3,
|
||||
},
|
||||
],
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return TrackChangesClient.flushProject(this.project_id, error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return done()
|
||||
})
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
|
||||
it('should not mark the updates for deletion', function (done) {
|
||||
TrackChangesClient.getCompressedUpdates(
|
||||
this.doc_id,
|
||||
(error, updates) => {
|
||||
expect(updates[0].expiresAt).to.not.exist
|
||||
return done()
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
|
||||
return it('should preserve history forever', function (done) {
|
||||
TrackChangesClient.getProjectMetaData(
|
||||
this.project_id,
|
||||
(error, project) => {
|
||||
expect(project.preserveHistory).to.equal(true)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
})
|
||||
|
||||
describe('without versioning enabled', function () {
|
||||
before(function (done) {
|
||||
this.project_id = ObjectId().toString()
|
||||
this.doc_id = ObjectId().toString()
|
||||
this.user_id = ObjectId().toString()
|
||||
|
||||
this.weeks = 7 * 24 * 60 * 60 * 1000
|
||||
|
||||
MockWebApi.projects[this.project_id] = {
|
||||
features: {
|
||||
versioning: false,
|
||||
},
|
||||
}
|
||||
|
||||
TrackChangesClient.pushRawUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
[
|
||||
{
|
||||
op: [{ i: 'g', p: 2 }],
|
||||
meta: { ts: Date.now() - 2 * this.weeks, user_id: this.user_id },
|
||||
v: 2,
|
||||
},
|
||||
{
|
||||
op: [{ i: 'f', p: 3 }],
|
||||
meta: { ts: Date.now(), user_id: this.user_id },
|
||||
v: 3,
|
||||
},
|
||||
],
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return TrackChangesClient.flushProject(this.project_id, error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return done()
|
||||
})
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
|
||||
return it('should mark the updates for deletion', function (done) {
|
||||
TrackChangesClient.getCompressedUpdates(
|
||||
this.doc_id,
|
||||
(error, updates) => {
|
||||
expect(updates[0].expiresAt).to.exist
|
||||
return done()
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
})
|
||||
|
||||
return describe('without versioning enabled but with preserveHistory set to true', function () {
|
||||
before(function (done) {
|
||||
this.project_id = ObjectId().toString()
|
||||
this.doc_id = ObjectId().toString()
|
||||
this.user_id = ObjectId().toString()
|
||||
|
||||
this.weeks = 7 * 24 * 60 * 60 * 1000
|
||||
|
||||
MockWebApi.projects[this.project_id] = {
|
||||
features: {
|
||||
versioning: false,
|
||||
},
|
||||
}
|
||||
|
||||
TrackChangesClient.setPreserveHistoryForProject(
|
||||
this.project_id,
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return TrackChangesClient.pushRawUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
[
|
||||
{
|
||||
op: [{ i: 'g', p: 2 }],
|
||||
meta: {
|
||||
ts: Date.now() - 2 * this.weeks,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 2,
|
||||
},
|
||||
{
|
||||
op: [{ i: 'f', p: 3 }],
|
||||
meta: { ts: Date.now(), user_id: this.user_id },
|
||||
v: 3,
|
||||
},
|
||||
],
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return TrackChangesClient.flushProject(
|
||||
this.project_id,
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return done()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
|
||||
return it('should not mark the updates for deletion', function (done) {
|
||||
TrackChangesClient.getCompressedUpdates(
|
||||
this.doc_id,
|
||||
(error, updates) => {
|
||||
expect(updates[0].expiresAt).to.not.exist
|
||||
return done()
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
127
services/track-changes/test/acceptance/js/GettingADiffTests.js
Normal file
127
services/track-changes/test/acceptance/js/GettingADiffTests.js
Normal file
|
@ -0,0 +1,127 @@
|
|||
/* eslint-disable
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const { expect } = require('chai')
|
||||
const { ObjectId } = require('../../../app/js/mongodb')
|
||||
const Settings = require('@overleaf/settings')
|
||||
|
||||
const TrackChangesApp = require('./helpers/TrackChangesApp')
|
||||
const TrackChangesClient = require('./helpers/TrackChangesClient')
|
||||
const MockDocUpdaterApi = require('./helpers/MockDocUpdaterApi')
|
||||
const MockWebApi = require('./helpers/MockWebApi')
|
||||
|
||||
describe('Getting a diff', function () {
|
||||
beforeEach(function (done) {
|
||||
sinon.spy(MockDocUpdaterApi, 'getDoc')
|
||||
|
||||
this.now = Date.now()
|
||||
this.from = this.now - 100000000
|
||||
this.to = this.now
|
||||
this.user_id = ObjectId().toString()
|
||||
this.doc_id = ObjectId().toString()
|
||||
this.project_id = ObjectId().toString()
|
||||
MockWebApi.projects[this.project_id] = { features: { versioning: true } }
|
||||
|
||||
MockWebApi.users[this.user_id] = this.user = {
|
||||
email: 'user@sharelatex.com',
|
||||
first_name: 'Leo',
|
||||
last_name: 'Lion',
|
||||
id: this.user_id,
|
||||
}
|
||||
sinon.spy(MockWebApi, 'getUserInfo')
|
||||
|
||||
const twoMinutes = 2 * 60 * 1000
|
||||
|
||||
this.updates = [
|
||||
{
|
||||
op: [{ i: 'one ', p: 0 }],
|
||||
meta: { ts: this.from - twoMinutes, user_id: this.user_id },
|
||||
v: 3,
|
||||
},
|
||||
{
|
||||
op: [{ i: 'two ', p: 4 }],
|
||||
meta: { ts: this.from + twoMinutes, user_id: this.user_id },
|
||||
v: (this.fromVersion = 4),
|
||||
},
|
||||
{
|
||||
op: [{ i: 'three ', p: 8 }],
|
||||
meta: { ts: this.to - twoMinutes, user_id: this.user_id },
|
||||
v: (this.toVersion = 5),
|
||||
},
|
||||
{
|
||||
op: [{ i: 'four', p: 14 }],
|
||||
meta: { ts: this.to + twoMinutes, user_id: this.user_id },
|
||||
v: 6,
|
||||
},
|
||||
]
|
||||
this.lines = ['one two three four']
|
||||
this.expected_diff = [
|
||||
{ u: 'one ' },
|
||||
{
|
||||
i: 'two three ',
|
||||
meta: {
|
||||
start_ts: this.from + twoMinutes,
|
||||
end_ts: this.to - twoMinutes,
|
||||
user: this.user,
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
MockDocUpdaterApi.docs[this.doc_id] = {
|
||||
lines: this.lines,
|
||||
version: 7,
|
||||
}
|
||||
TrackChangesApp.ensureRunning(() => {
|
||||
return TrackChangesClient.pushRawUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.updates,
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return TrackChangesClient.getDiff(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.fromVersion,
|
||||
this.toVersion,
|
||||
(error, diff) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
this.diff = diff.diff
|
||||
return done()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
return null
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
MockDocUpdaterApi.getDoc.restore()
|
||||
MockWebApi.getUserInfo.restore()
|
||||
return null
|
||||
})
|
||||
|
||||
it('should return the diff', function () {
|
||||
return expect(this.diff).to.deep.equal(this.expected_diff)
|
||||
})
|
||||
|
||||
return it('should get the doc from the doc updater', function () {
|
||||
MockDocUpdaterApi.getDoc
|
||||
.calledWith(this.project_id, this.doc_id)
|
||||
.should.equal(true)
|
||||
return null
|
||||
})
|
||||
})
|
185
services/track-changes/test/acceptance/js/GettingUpdatesTests.js
Normal file
185
services/track-changes/test/acceptance/js/GettingUpdatesTests.js
Normal file
|
@ -0,0 +1,185 @@
|
|||
/* eslint-disable
|
||||
chai-friendly/no-unused-expressions,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const { expect } = require('chai')
|
||||
const { ObjectId } = require('../../../app/js/mongodb')
|
||||
const Settings = require('@overleaf/settings')
|
||||
|
||||
const TrackChangesApp = require('./helpers/TrackChangesApp')
|
||||
const TrackChangesClient = require('./helpers/TrackChangesClient')
|
||||
const MockWebApi = require('./helpers/MockWebApi')
|
||||
|
||||
describe('Getting updates', function () {
|
||||
before(function (done) {
|
||||
this.now = Date.now()
|
||||
this.to = this.now
|
||||
this.user_id = ObjectId().toString()
|
||||
this.deleted_user_id = 'deleted_user'
|
||||
this.doc_id = ObjectId().toString()
|
||||
this.project_id = ObjectId().toString()
|
||||
|
||||
this.minutes = 60 * 1000
|
||||
this.hours = 60 * this.minutes
|
||||
|
||||
MockWebApi.projects[this.project_id] = {
|
||||
features: {
|
||||
versioning: true,
|
||||
},
|
||||
}
|
||||
|
||||
MockWebApi.users[this.user_id] = this.user = {
|
||||
email: 'user@sharelatex.com',
|
||||
first_name: 'Leo',
|
||||
last_name: 'Lion',
|
||||
id: this.user_id,
|
||||
}
|
||||
sinon.spy(MockWebApi, 'getUserInfo')
|
||||
|
||||
this.updates = []
|
||||
for (let i = 0; i <= 9; i++) {
|
||||
this.updates.push({
|
||||
op: [{ i: 'a', p: 0 }],
|
||||
meta: {
|
||||
ts: this.now - (9 - i) * this.hours - 2 * this.minutes,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 2 * i + 1,
|
||||
})
|
||||
this.updates.push({
|
||||
op: [{ i: 'b', p: 0 }],
|
||||
meta: { ts: this.now - (9 - i) * this.hours, user_id: this.user_id },
|
||||
v: 2 * i + 2,
|
||||
})
|
||||
}
|
||||
this.updates[0].meta.user_id = this.deleted_user_id
|
||||
|
||||
TrackChangesApp.ensureRunning(() => {
|
||||
return TrackChangesClient.pushRawUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.updates,
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
return null
|
||||
})
|
||||
;({
|
||||
after() {
|
||||
MockWebApi.getUserInfo.restore()
|
||||
return null
|
||||
},
|
||||
})
|
||||
|
||||
describe('getting updates up to the limit', function () {
|
||||
before(function (done) {
|
||||
TrackChangesClient.getUpdates(
|
||||
this.project_id,
|
||||
{ before: this.to + 1, min_count: 3 },
|
||||
(error, body) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
this.updates = body.updates
|
||||
return done()
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
|
||||
it('should fetch the user details from the web api', function () {
|
||||
return MockWebApi.getUserInfo.calledWith(this.user_id).should.equal(true)
|
||||
})
|
||||
|
||||
return it('should return at least the min_count number of summarized updates', function () {
|
||||
const docs1 = {}
|
||||
docs1[this.doc_id] = { toV: 20, fromV: 19 }
|
||||
const docs2 = {}
|
||||
docs2[this.doc_id] = { toV: 18, fromV: 17 }
|
||||
const docs3 = {}
|
||||
docs3[this.doc_id] = { toV: 16, fromV: 15 }
|
||||
return expect(this.updates.slice(0, 3)).to.deep.equal([
|
||||
{
|
||||
docs: docs1,
|
||||
meta: {
|
||||
start_ts: this.to - 2 * this.minutes,
|
||||
end_ts: this.to,
|
||||
users: [this.user],
|
||||
},
|
||||
},
|
||||
{
|
||||
docs: docs2,
|
||||
meta: {
|
||||
start_ts: this.to - 1 * this.hours - 2 * this.minutes,
|
||||
end_ts: this.to - 1 * this.hours,
|
||||
users: [this.user],
|
||||
},
|
||||
},
|
||||
{
|
||||
docs: docs3,
|
||||
meta: {
|
||||
start_ts: this.to - 2 * this.hours - 2 * this.minutes,
|
||||
end_ts: this.to - 2 * this.hours,
|
||||
users: [this.user],
|
||||
},
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
return describe('getting updates beyond the end of the database', function () {
|
||||
before(function (done) {
|
||||
TrackChangesClient.getUpdates(
|
||||
this.project_id,
|
||||
{ before: this.to - 8 * this.hours + 1, min_count: 30 },
|
||||
(error, body) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
this.updates = body.updates
|
||||
return done()
|
||||
}
|
||||
)
|
||||
return null
|
||||
})
|
||||
|
||||
return it('should return as many updates as it can', function () {
|
||||
const docs1 = {}
|
||||
docs1[this.doc_id] = { toV: 4, fromV: 3 }
|
||||
const docs2 = {}
|
||||
docs2[this.doc_id] = { toV: 2, fromV: 1 }
|
||||
return expect(this.updates).to.deep.equal([
|
||||
{
|
||||
docs: docs1,
|
||||
meta: {
|
||||
start_ts: this.to - 8 * this.hours - 2 * this.minutes,
|
||||
end_ts: this.to - 8 * this.hours,
|
||||
users: [this.user],
|
||||
},
|
||||
},
|
||||
{
|
||||
docs: docs2,
|
||||
meta: {
|
||||
start_ts: this.to - 9 * this.hours - 2 * this.minutes,
|
||||
end_ts: this.to - 9 * this.hours,
|
||||
users: [this.user, null],
|
||||
},
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,62 @@
|
|||
/* eslint-disable
|
||||
handle-callback-err,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const { expect } = require('chai')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const LockManager = require('../../../app/js/LockManager')
|
||||
const rclient = require('redis').createClient(Settings.redis.history) // Only works locally for now
|
||||
const TrackChangesApp = require('./helpers/TrackChangesApp')
|
||||
|
||||
describe('Locking document', function () {
|
||||
before(function (done) {
|
||||
TrackChangesApp.ensureRunning(done)
|
||||
return null
|
||||
})
|
||||
|
||||
return describe('when the lock has expired in redis', function () {
|
||||
before(function (done) {
|
||||
LockManager.LOCK_TTL = 1 // second
|
||||
LockManager.runWithLock(
|
||||
'doc123',
|
||||
releaseA => {
|
||||
// we create a lock A and allow it to expire in redis
|
||||
return setTimeout(
|
||||
() =>
|
||||
// now we create a new lock B and try to release A
|
||||
LockManager.runWithLock(
|
||||
'doc123',
|
||||
releaseB => {
|
||||
return releaseA()
|
||||
}, // try to release lock A to see if it wipes out lock B
|
||||
error => {}
|
||||
),
|
||||
|
||||
// we never release lock B so nothing should happen here
|
||||
1500
|
||||
)
|
||||
}, // enough time to wait until the lock has expired
|
||||
error =>
|
||||
// we get here after trying to release lock A
|
||||
done()
|
||||
)
|
||||
return null
|
||||
})
|
||||
|
||||
return it('the new lock should not be removed by the expired locker', function (done) {
|
||||
LockManager.checkLock('doc123', (err, isFree) => {
|
||||
expect(isFree).to.equal(false)
|
||||
return done()
|
||||
})
|
||||
return null
|
||||
})
|
||||
})
|
||||
})
|
116
services/track-changes/test/acceptance/js/RestoringVersions.js
Normal file
116
services/track-changes/test/acceptance/js/RestoringVersions.js
Normal file
|
@ -0,0 +1,116 @@
|
|||
/* eslint-disable
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const { expect } = require('chai')
|
||||
const { ObjectId } = require('../../../app/js/mongodb')
|
||||
const Settings = require('@overleaf/settings')
|
||||
|
||||
const TrackChangesApp = require('./helpers/TrackChangesApp')
|
||||
const TrackChangesClient = require('./helpers/TrackChangesClient')
|
||||
const MockDocUpdaterApi = require('./helpers/MockDocUpdaterApi')
|
||||
const MockWebApi = require('./helpers/MockWebApi')
|
||||
|
||||
describe('Restoring a version', function () {
|
||||
before(function (done) {
|
||||
sinon.spy(MockDocUpdaterApi, 'setDoc')
|
||||
|
||||
this.now = Date.now()
|
||||
this.user_id = ObjectId().toString()
|
||||
this.doc_id = ObjectId().toString()
|
||||
this.project_id = ObjectId().toString()
|
||||
MockWebApi.projects[this.project_id] = { features: { versioning: true } }
|
||||
|
||||
const minutes = 60 * 1000
|
||||
|
||||
this.updates = [
|
||||
{
|
||||
op: [{ i: 'one ', p: 0 }],
|
||||
meta: { ts: this.now - 6 * minutes, user_id: this.user_id },
|
||||
v: 3,
|
||||
},
|
||||
{
|
||||
op: [{ i: 'two ', p: 4 }],
|
||||
meta: { ts: this.now - 4 * minutes, user_id: this.user_id },
|
||||
v: 4,
|
||||
},
|
||||
{
|
||||
op: [{ i: 'three ', p: 8 }],
|
||||
meta: { ts: this.now - 2 * minutes, user_id: this.user_id },
|
||||
v: 5,
|
||||
},
|
||||
{
|
||||
op: [{ i: 'four', p: 14 }],
|
||||
meta: { ts: this.now, user_id: this.user_id },
|
||||
v: 6,
|
||||
},
|
||||
]
|
||||
this.lines = ['one two three four']
|
||||
this.restored_lines = ['one two ']
|
||||
this.beforeVersion = 5
|
||||
|
||||
MockWebApi.users[this.user_id] = this.user = {
|
||||
email: 'user@sharelatex.com',
|
||||
first_name: 'Leo',
|
||||
last_name: 'Lion',
|
||||
id: this.user_id,
|
||||
}
|
||||
|
||||
MockDocUpdaterApi.docs[this.doc_id] = {
|
||||
lines: this.lines,
|
||||
version: 7,
|
||||
}
|
||||
|
||||
TrackChangesApp.ensureRunning(() => {
|
||||
return TrackChangesClient.pushRawUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.updates,
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return TrackChangesClient.restoreDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.beforeVersion,
|
||||
this.user_id,
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return done()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
return null
|
||||
})
|
||||
|
||||
after(function () {
|
||||
MockDocUpdaterApi.setDoc.restore()
|
||||
return null
|
||||
})
|
||||
|
||||
return it('should set the doc in the doc updater', function () {
|
||||
MockDocUpdaterApi.setDoc
|
||||
.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.restored_lines,
|
||||
this.user_id,
|
||||
true
|
||||
)
|
||||
.should.equal(true)
|
||||
return null
|
||||
})
|
||||
})
|
|
@ -0,0 +1,54 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let MockDocUpdaterApi
|
||||
const express = require('express')
|
||||
const app = express()
|
||||
|
||||
module.exports = MockDocUpdaterApi = {
|
||||
docs: {},
|
||||
|
||||
getAllDoc(project_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
return callback(null, this.docs)
|
||||
},
|
||||
|
||||
run() {
|
||||
app.get('/project/:project_id/doc', (req, res, next) => {
|
||||
return this.getAllDoc(req.params.project_id, (error, docs) => {
|
||||
if (error != null) {
|
||||
res.sendStatus(500)
|
||||
}
|
||||
if (docs == null) {
|
||||
return res.sendStatus(404)
|
||||
} else {
|
||||
return res.send(JSON.stringify(docs))
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
return app
|
||||
.listen(3016, error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
})
|
||||
.on('error', error => {
|
||||
console.error('error starting MockDocStoreApi:', error.message)
|
||||
return process.exit(1)
|
||||
})
|
||||
},
|
||||
}
|
||||
|
||||
MockDocUpdaterApi.run()
|
|
@ -0,0 +1,89 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-undef,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let MockDocUpdaterApi
|
||||
const express = require('express')
|
||||
const bodyParser = require('body-parser')
|
||||
const app = express()
|
||||
app.use(bodyParser.json())
|
||||
|
||||
module.exports = MockDocUpdaterApi = {
|
||||
docs: {},
|
||||
|
||||
getDoc(project_id, doc_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
return callback(null, this.docs[doc_id])
|
||||
},
|
||||
|
||||
setDoc(project_id, doc_id, lines, user_id, undoing, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
if (!this.docs[doc_id]) {
|
||||
this.docs[doc_id] = {}
|
||||
}
|
||||
this.docs[doc_id].lines = lines
|
||||
return callback()
|
||||
},
|
||||
|
||||
run() {
|
||||
app.get('/project/:project_id/doc/:doc_id', (req, res, next) => {
|
||||
return this.getDoc(
|
||||
req.params.project_id,
|
||||
req.params.doc_id,
|
||||
(error, doc) => {
|
||||
if (error != null) {
|
||||
res.sendStatus(500)
|
||||
}
|
||||
if (doc == null) {
|
||||
return res.sendStatus(404)
|
||||
} else {
|
||||
return res.send(JSON.stringify(doc))
|
||||
}
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
app.post('/project/:project_id/doc/:doc_id', (req, res, next) => {
|
||||
return this.setDoc(
|
||||
req.params.project_id,
|
||||
req.params.doc_id,
|
||||
req.body.lines,
|
||||
req.body.user_id,
|
||||
req.body.undoing,
|
||||
(errr, doc) => {
|
||||
if (typeof error !== 'undefined' && error !== null) {
|
||||
return res.sendStatus(500)
|
||||
} else {
|
||||
return res.sendStatus(204)
|
||||
}
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
return app
|
||||
.listen(3003, error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
})
|
||||
.on('error', error => {
|
||||
console.error('error starting MockDocUpdaterApi:', error.message)
|
||||
return process.exit(1)
|
||||
})
|
||||
},
|
||||
}
|
||||
|
||||
MockDocUpdaterApi.run()
|
|
@ -0,0 +1,76 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let MockWebApi
|
||||
const express = require('express')
|
||||
const app = express()
|
||||
|
||||
module.exports = MockWebApi = {
|
||||
users: {},
|
||||
|
||||
projects: {},
|
||||
|
||||
getUserInfo(user_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
return callback(null, this.users[user_id] || null)
|
||||
},
|
||||
|
||||
getProjectDetails(project_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, project) {}
|
||||
}
|
||||
return callback(null, this.projects[project_id])
|
||||
},
|
||||
|
||||
run() {
|
||||
app.get('/user/:user_id/personal_info', (req, res, next) => {
|
||||
return this.getUserInfo(req.params.user_id, (error, user) => {
|
||||
if (error != null) {
|
||||
res.sendStatus(500)
|
||||
}
|
||||
if (user == null) {
|
||||
return res.sendStatus(404)
|
||||
} else {
|
||||
return res.send(JSON.stringify(user))
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
app.get('/project/:project_id/details', (req, res, next) => {
|
||||
return this.getProjectDetails(req.params.project_id, (error, project) => {
|
||||
if (error != null) {
|
||||
res.sendStatus(500)
|
||||
}
|
||||
if (project == null) {
|
||||
return res.sendStatus(404)
|
||||
} else {
|
||||
return res.send(JSON.stringify(project))
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
return app
|
||||
.listen(3000, error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
})
|
||||
.on('error', error => {
|
||||
console.error('error starting MockWebApiServer:', error.message)
|
||||
return process.exit(1)
|
||||
})
|
||||
},
|
||||
}
|
||||
|
||||
MockWebApi.run()
|
|
@ -0,0 +1,67 @@
|
|||
/* eslint-disable
|
||||
handle-callback-err,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS103: Rewrite code to no longer use __guard__
|
||||
* DS205: Consider reworking code to avoid use of IIFEs
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const app = require('../../../../app')
|
||||
const { waitForDb } = require('../../../../app/js/mongodb')
|
||||
const logger = require('logger-sharelatex')
|
||||
const Settings = require('@overleaf/settings')
|
||||
|
||||
module.exports = {
|
||||
running: false,
|
||||
initing: false,
|
||||
callbacks: [],
|
||||
ensureRunning(callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
if (this.running) {
|
||||
return callback()
|
||||
} else if (this.initing) {
|
||||
return this.callbacks.push(callback)
|
||||
}
|
||||
this.initing = true
|
||||
this.callbacks.push(callback)
|
||||
waitForDb().then(() => {
|
||||
return app.listen(
|
||||
__guard__(
|
||||
Settings.internal != null
|
||||
? Settings.internal.trackchanges
|
||||
: undefined,
|
||||
x => x.port
|
||||
),
|
||||
'localhost',
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
this.running = true
|
||||
logger.log('track changes running in dev mode')
|
||||
|
||||
return (() => {
|
||||
const result = []
|
||||
for (callback of Array.from(this.callbacks)) {
|
||||
result.push(callback())
|
||||
}
|
||||
return result
|
||||
})()
|
||||
}
|
||||
)
|
||||
})
|
||||
},
|
||||
}
|
||||
function __guard__(value, transform) {
|
||||
return typeof value !== 'undefined' && value !== null
|
||||
? transform(value)
|
||||
: undefined
|
||||
}
|
|
@ -0,0 +1,301 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let TrackChangesClient
|
||||
const async = require('async')
|
||||
const zlib = require('zlib')
|
||||
const request = require('request')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const rclient = require('@overleaf/redis-wrapper').createClient(
|
||||
Settings.redis.history
|
||||
) // Only works locally for now
|
||||
const Keys = Settings.redis.history.key_schema
|
||||
const { db, ObjectId } = require('../../../../app/js/mongodb')
|
||||
|
||||
const aws = require('aws-sdk')
|
||||
const s3 = new aws.S3({
|
||||
accessKeyId: Settings.trackchanges.s3.key,
|
||||
secretAccessKey: Settings.trackchanges.s3.secret,
|
||||
endpoint: Settings.trackchanges.s3.endpoint,
|
||||
s3ForcePathStyle: Settings.trackchanges.s3.pathStyle,
|
||||
})
|
||||
const S3_BUCKET = Settings.trackchanges.stores.doc_history
|
||||
|
||||
module.exports = TrackChangesClient = {
|
||||
flushAndGetCompressedUpdates(project_id, doc_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, updates) {}
|
||||
}
|
||||
return TrackChangesClient.flushDoc(project_id, doc_id, error => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return TrackChangesClient.getCompressedUpdates(doc_id, callback)
|
||||
})
|
||||
},
|
||||
|
||||
flushDoc(project_id, doc_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
return request.post(
|
||||
{
|
||||
url: `http://localhost:3015/project/${project_id}/doc/${doc_id}/flush`,
|
||||
},
|
||||
(error, response, body) => {
|
||||
response.statusCode.should.equal(204)
|
||||
return callback(error)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
flushProject(project_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
return request.post(
|
||||
{
|
||||
url: `http://localhost:3015/project/${project_id}/flush`,
|
||||
},
|
||||
(error, response, body) => {
|
||||
response.statusCode.should.equal(204)
|
||||
return callback(error)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
getCompressedUpdates(doc_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, updates) {}
|
||||
}
|
||||
return db.docHistory
|
||||
.find({ doc_id: ObjectId(doc_id) })
|
||||
.sort({ 'meta.end_ts': 1 })
|
||||
.toArray(callback)
|
||||
},
|
||||
|
||||
getProjectMetaData(project_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, updates) {}
|
||||
}
|
||||
return db.projectHistoryMetaData.findOne(
|
||||
{
|
||||
project_id: ObjectId(project_id),
|
||||
},
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
setPreserveHistoryForProject(project_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
return db.projectHistoryMetaData.updateOne(
|
||||
{
|
||||
project_id: ObjectId(project_id),
|
||||
},
|
||||
{
|
||||
$set: { preserveHistory: true },
|
||||
},
|
||||
{
|
||||
upsert: true,
|
||||
},
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
pushRawUpdates(project_id, doc_id, updates, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
return rclient.sadd(
|
||||
Keys.docsWithHistoryOps({ project_id }),
|
||||
doc_id,
|
||||
error => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return rclient.rpush(
|
||||
Keys.uncompressedHistoryOps({ doc_id }),
|
||||
...Array.from(Array.from(updates).map(u => JSON.stringify(u))),
|
||||
callback
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
getDiff(project_id, doc_id, from, to, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, diff) {}
|
||||
}
|
||||
return request.get(
|
||||
{
|
||||
url: `http://localhost:3015/project/${project_id}/doc/${doc_id}/diff?from=${from}&to=${to}`,
|
||||
},
|
||||
(error, response, body) => {
|
||||
response.statusCode.should.equal(200)
|
||||
return callback(null, JSON.parse(body))
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
getUpdates(project_id, options, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, body) {}
|
||||
}
|
||||
return request.get(
|
||||
{
|
||||
url: `http://localhost:3015/project/${project_id}/updates?before=${options.before}&min_count=${options.min_count}`,
|
||||
},
|
||||
(error, response, body) => {
|
||||
response.statusCode.should.equal(200)
|
||||
return callback(null, JSON.parse(body))
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
exportProject(project_id, callback) {
|
||||
request.get(
|
||||
{ url: `http://localhost:3015/project/${project_id}/export`, json: true },
|
||||
(error, response, updates) => {
|
||||
if (error) return callback(error)
|
||||
response.statusCode.should.equal(200)
|
||||
callback(null, updates, JSON.parse(response.trailers['x-user-ids']))
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
restoreDoc(project_id, doc_id, version, user_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
return request.post(
|
||||
{
|
||||
url: `http://localhost:3015/project/${project_id}/doc/${doc_id}/version/${version}/restore`,
|
||||
headers: {
|
||||
'X-User-Id': user_id,
|
||||
},
|
||||
},
|
||||
(error, response, body) => {
|
||||
response.statusCode.should.equal(204)
|
||||
return callback(null)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
pushDocHistory(project_id, doc_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
return request.post(
|
||||
{
|
||||
url: `http://localhost:3015/project/${project_id}/doc/${doc_id}/push`,
|
||||
},
|
||||
(error, response, body) => {
|
||||
response.statusCode.should.equal(204)
|
||||
return callback(error)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
pullDocHistory(project_id, doc_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error) {}
|
||||
}
|
||||
return request.post(
|
||||
{
|
||||
url: `http://localhost:3015/project/${project_id}/doc/${doc_id}/pull`,
|
||||
},
|
||||
(error, response, body) => {
|
||||
response.statusCode.should.equal(204)
|
||||
return callback(error)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
waitForS3(done, retries) {
|
||||
if (retries == null) {
|
||||
retries = 42
|
||||
}
|
||||
if (!Settings.trackchanges.s3.endpoint) {
|
||||
return done()
|
||||
}
|
||||
|
||||
return request.get(`${Settings.trackchanges.s3.endpoint}/`, (err, res) => {
|
||||
if (res && res.statusCode < 500) {
|
||||
return done()
|
||||
}
|
||||
|
||||
if (retries === 0) {
|
||||
return done(err || new Error(`s3 returned ${res.statusCode}`))
|
||||
}
|
||||
|
||||
return setTimeout(
|
||||
() => TrackChangesClient.waitForS3(done, --retries),
|
||||
1000
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
getS3Doc(project_id, doc_id, pack_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, body) {}
|
||||
}
|
||||
const params = {
|
||||
Bucket: S3_BUCKET,
|
||||
Key: `${project_id}/changes-${doc_id}/pack-${pack_id}`,
|
||||
}
|
||||
|
||||
return s3.getObject(params, (error, data) => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
const body = data.Body
|
||||
if (body == null) {
|
||||
return callback(new Error('empty response from s3'))
|
||||
}
|
||||
return zlib.gunzip(body, (err, result) => {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
return callback(null, JSON.parse(result.toString()))
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
removeS3Doc(project_id, doc_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, res, body) {}
|
||||
}
|
||||
let params = {
|
||||
Bucket: S3_BUCKET,
|
||||
Prefix: `${project_id}/changes-${doc_id}`,
|
||||
}
|
||||
|
||||
return s3.listObjects(params, (error, data) => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
|
||||
params = {
|
||||
Bucket: S3_BUCKET,
|
||||
Delete: {
|
||||
Objects: data.Contents.map(s3object => ({ Key: s3object.Key })),
|
||||
},
|
||||
}
|
||||
|
||||
return s3.deleteObjects(params, callback)
|
||||
})
|
||||
},
|
||||
}
|
21
services/track-changes/test/setup.js
Normal file
21
services/track-changes/test/setup.js
Normal file
|
@ -0,0 +1,21 @@
|
|||
const chai = require('chai')
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
|
||||
// Chai configuration
|
||||
chai.should()
|
||||
|
||||
// SandboxedModule configuration
|
||||
SandboxedModule.configure({
|
||||
requires: {
|
||||
'logger-sharelatex': {
|
||||
debug() {},
|
||||
log() {},
|
||||
info() {},
|
||||
warn() {},
|
||||
err() {},
|
||||
error() {},
|
||||
fatal() {},
|
||||
},
|
||||
},
|
||||
globals: { Buffer, JSON, console, process },
|
||||
})
|
|
@ -0,0 +1,456 @@
|
|||
/* eslint-disable
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const { expect } = require('chai')
|
||||
const modulePath = '../../../../app/js/DiffGenerator.js'
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
|
||||
describe('DiffGenerator', function () {
|
||||
beforeEach(function () {
|
||||
this.DiffGenerator = SandboxedModule.require(modulePath, {})
|
||||
this.ts = Date.now()
|
||||
this.user_id = 'mock-user-id'
|
||||
this.user_id_2 = 'mock-user-id-2'
|
||||
return (this.meta = {
|
||||
start_ts: this.ts,
|
||||
end_ts: this.ts,
|
||||
user_id: this.user_id,
|
||||
})
|
||||
})
|
||||
|
||||
describe('rewindOp', function () {
|
||||
describe('rewinding an insert', function () {
|
||||
return it('should undo the insert', function () {
|
||||
const content = 'hello world'
|
||||
const rewoundContent = this.DiffGenerator.rewindOp(content, {
|
||||
p: 6,
|
||||
i: 'wo',
|
||||
})
|
||||
return rewoundContent.should.equal('hello rld')
|
||||
})
|
||||
})
|
||||
|
||||
describe('rewinding a delete', function () {
|
||||
return it('should undo the delete', function () {
|
||||
const content = 'hello rld'
|
||||
const rewoundContent = this.DiffGenerator.rewindOp(content, {
|
||||
p: 6,
|
||||
d: 'wo',
|
||||
})
|
||||
return rewoundContent.should.equal('hello world')
|
||||
})
|
||||
})
|
||||
|
||||
describe('with an inconsistent update', function () {
|
||||
return it('should throw an error', function () {
|
||||
const content = 'hello world'
|
||||
return expect(() => {
|
||||
return this.DiffGenerator.rewindOp(content, { p: 6, i: 'foo' })
|
||||
}).to.throw(this.DiffGenerator.ConsistencyError)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('with an update which is beyond the length of the content', function () {
|
||||
return it('should undo the insert as if it were at the end of the content', function () {
|
||||
const content = 'foobar'
|
||||
const rewoundContent = this.DiffGenerator.rewindOp(content, {
|
||||
p: 4,
|
||||
i: 'bar',
|
||||
})
|
||||
return rewoundContent.should.equal('foo')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('rewindUpdate', function () {
|
||||
return it('should rewind ops in reverse', function () {
|
||||
const content = 'aaabbbccc'
|
||||
const update = {
|
||||
op: [
|
||||
{ p: 3, i: 'bbb' },
|
||||
{ p: 6, i: 'ccc' },
|
||||
],
|
||||
}
|
||||
const rewoundContent = this.DiffGenerator.rewindUpdate(content, update)
|
||||
return rewoundContent.should.equal('aaa')
|
||||
})
|
||||
})
|
||||
|
||||
describe('rewindUpdates', function () {
|
||||
return it('should rewind updates in reverse', function () {
|
||||
const content = 'aaabbbccc'
|
||||
const updates = [
|
||||
{ op: [{ p: 3, i: 'bbb' }] },
|
||||
{ op: [{ p: 6, i: 'ccc' }] },
|
||||
]
|
||||
const rewoundContent = this.DiffGenerator.rewindUpdates(content, updates)
|
||||
return rewoundContent.should.equal('aaa')
|
||||
})
|
||||
})
|
||||
|
||||
describe('buildDiff', function () {
|
||||
beforeEach(function () {
|
||||
this.diff = [{ u: 'mock-diff' }]
|
||||
this.content = 'Hello world'
|
||||
this.updates = [
|
||||
{ i: 'mock-update-1' },
|
||||
{ i: 'mock-update-2' },
|
||||
{ i: 'mock-update-3' },
|
||||
]
|
||||
this.DiffGenerator.applyUpdateToDiff = sinon.stub().returns(this.diff)
|
||||
this.DiffGenerator.compressDiff = sinon.stub().returns(this.diff)
|
||||
return (this.result = this.DiffGenerator.buildDiff(
|
||||
this.content,
|
||||
this.updates
|
||||
))
|
||||
})
|
||||
|
||||
it('should return the diff', function () {
|
||||
return this.result.should.deep.equal(this.diff)
|
||||
})
|
||||
|
||||
it('should build the content into an initial diff', function () {
|
||||
return this.DiffGenerator.applyUpdateToDiff
|
||||
.calledWith(
|
||||
[
|
||||
{
|
||||
u: this.content,
|
||||
},
|
||||
],
|
||||
this.updates[0]
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should apply each update', function () {
|
||||
return Array.from(this.updates).map(update =>
|
||||
this.DiffGenerator.applyUpdateToDiff
|
||||
.calledWith(sinon.match.any, update)
|
||||
.should.equal(true)
|
||||
)
|
||||
})
|
||||
|
||||
return it('should compress the diff', function () {
|
||||
return this.DiffGenerator.compressDiff
|
||||
.calledWith(this.diff)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('compressDiff', function () {
|
||||
describe('with adjacent inserts with the same user_id', function () {
|
||||
return it('should create one update with combined meta data and min/max timestamps', function () {
|
||||
const diff = this.DiffGenerator.compressDiff([
|
||||
{
|
||||
i: 'foo',
|
||||
meta: { start_ts: 10, end_ts: 20, user: { id: this.user_id } },
|
||||
},
|
||||
{
|
||||
i: 'bar',
|
||||
meta: { start_ts: 5, end_ts: 15, user: { id: this.user_id } },
|
||||
},
|
||||
])
|
||||
return expect(diff).to.deep.equal([
|
||||
{
|
||||
i: 'foobar',
|
||||
meta: { start_ts: 5, end_ts: 20, user: { id: this.user_id } },
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe('with adjacent inserts with different user_ids', function () {
|
||||
return it('should leave the inserts unchanged', function () {
|
||||
const input = [
|
||||
{
|
||||
i: 'foo',
|
||||
meta: { start_ts: 10, end_ts: 20, user: { id: this.user_id } },
|
||||
},
|
||||
{
|
||||
i: 'bar',
|
||||
meta: { start_ts: 5, end_ts: 15, user: { id: this.user_id_2 } },
|
||||
},
|
||||
]
|
||||
const output = this.DiffGenerator.compressDiff(input)
|
||||
return expect(output).to.deep.equal(input)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with adjacent deletes with the same user_id', function () {
|
||||
return it('should create one update with combined meta data and min/max timestamps', function () {
|
||||
const diff = this.DiffGenerator.compressDiff([
|
||||
{
|
||||
d: 'foo',
|
||||
meta: { start_ts: 10, end_ts: 20, user: { id: this.user_id } },
|
||||
},
|
||||
{
|
||||
d: 'bar',
|
||||
meta: { start_ts: 5, end_ts: 15, user: { id: this.user_id } },
|
||||
},
|
||||
])
|
||||
return expect(diff).to.deep.equal([
|
||||
{
|
||||
d: 'foobar',
|
||||
meta: { start_ts: 5, end_ts: 20, user: { id: this.user_id } },
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
return describe('with adjacent deletes with different user_ids', function () {
|
||||
return it('should leave the deletes unchanged', function () {
|
||||
const input = [
|
||||
{
|
||||
d: 'foo',
|
||||
meta: { start_ts: 10, end_ts: 20, user: { id: this.user_id } },
|
||||
},
|
||||
{
|
||||
d: 'bar',
|
||||
meta: { start_ts: 5, end_ts: 15, user: { id: this.user_id_2 } },
|
||||
},
|
||||
]
|
||||
const output = this.DiffGenerator.compressDiff(input)
|
||||
return expect(output).to.deep.equal(input)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
return describe('applyUpdateToDiff', function () {
|
||||
describe('an insert', function () {
|
||||
it('should insert into the middle of (u)nchanged text', function () {
|
||||
const diff = this.DiffGenerator.applyUpdateToDiff([{ u: 'foobar' }], {
|
||||
op: [{ p: 3, i: 'baz' }],
|
||||
meta: this.meta,
|
||||
})
|
||||
return expect(diff).to.deep.equal([
|
||||
{ u: 'foo' },
|
||||
{ i: 'baz', meta: this.meta },
|
||||
{ u: 'bar' },
|
||||
])
|
||||
})
|
||||
|
||||
it('should insert into the start of (u)changed text', function () {
|
||||
const diff = this.DiffGenerator.applyUpdateToDiff([{ u: 'foobar' }], {
|
||||
op: [{ p: 0, i: 'baz' }],
|
||||
meta: this.meta,
|
||||
})
|
||||
return expect(diff).to.deep.equal([
|
||||
{ i: 'baz', meta: this.meta },
|
||||
{ u: 'foobar' },
|
||||
])
|
||||
})
|
||||
|
||||
it('should insert into the end of (u)changed text', function () {
|
||||
const diff = this.DiffGenerator.applyUpdateToDiff([{ u: 'foobar' }], {
|
||||
op: [{ p: 6, i: 'baz' }],
|
||||
meta: this.meta,
|
||||
})
|
||||
return expect(diff).to.deep.equal([
|
||||
{ u: 'foobar' },
|
||||
{ i: 'baz', meta: this.meta },
|
||||
])
|
||||
})
|
||||
|
||||
it('should insert into the middle of (i)inserted text', function () {
|
||||
const diff = this.DiffGenerator.applyUpdateToDiff(
|
||||
[{ i: 'foobar', meta: this.meta }],
|
||||
{ op: [{ p: 3, i: 'baz' }], meta: this.meta }
|
||||
)
|
||||
return expect(diff).to.deep.equal([
|
||||
{ i: 'foo', meta: this.meta },
|
||||
{ i: 'baz', meta: this.meta },
|
||||
{ i: 'bar', meta: this.meta },
|
||||
])
|
||||
})
|
||||
|
||||
return it('should not count deletes in the running length total', function () {
|
||||
const diff = this.DiffGenerator.applyUpdateToDiff(
|
||||
[{ d: 'deleted', meta: this.meta }, { u: 'foobar' }],
|
||||
{ op: [{ p: 3, i: 'baz' }], meta: this.meta }
|
||||
)
|
||||
return expect(diff).to.deep.equal([
|
||||
{ d: 'deleted', meta: this.meta },
|
||||
{ u: 'foo' },
|
||||
{ i: 'baz', meta: this.meta },
|
||||
{ u: 'bar' },
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
return describe('a delete', function () {
|
||||
describe('deleting unchanged text', function () {
|
||||
it('should delete from the middle of (u)nchanged text', function () {
|
||||
const diff = this.DiffGenerator.applyUpdateToDiff(
|
||||
[{ u: 'foobazbar' }],
|
||||
{ op: [{ p: 3, d: 'baz' }], meta: this.meta }
|
||||
)
|
||||
return expect(diff).to.deep.equal([
|
||||
{ u: 'foo' },
|
||||
{ d: 'baz', meta: this.meta },
|
||||
{ u: 'bar' },
|
||||
])
|
||||
})
|
||||
|
||||
it('should delete from the start of (u)nchanged text', function () {
|
||||
const diff = this.DiffGenerator.applyUpdateToDiff(
|
||||
[{ u: 'foobazbar' }],
|
||||
{ op: [{ p: 0, d: 'foo' }], meta: this.meta }
|
||||
)
|
||||
return expect(diff).to.deep.equal([
|
||||
{ d: 'foo', meta: this.meta },
|
||||
{ u: 'bazbar' },
|
||||
])
|
||||
})
|
||||
|
||||
it('should delete from the end of (u)nchanged text', function () {
|
||||
const diff = this.DiffGenerator.applyUpdateToDiff(
|
||||
[{ u: 'foobazbar' }],
|
||||
{ op: [{ p: 6, d: 'bar' }], meta: this.meta }
|
||||
)
|
||||
return expect(diff).to.deep.equal([
|
||||
{ u: 'foobaz' },
|
||||
{ d: 'bar', meta: this.meta },
|
||||
])
|
||||
})
|
||||
|
||||
return it('should delete across multiple (u)changed text parts', function () {
|
||||
const diff = this.DiffGenerator.applyUpdateToDiff(
|
||||
[{ u: 'foo' }, { u: 'baz' }, { u: 'bar' }],
|
||||
{ op: [{ p: 2, d: 'obazb' }], meta: this.meta }
|
||||
)
|
||||
return expect(diff).to.deep.equal([
|
||||
{ u: 'fo' },
|
||||
{ d: 'o', meta: this.meta },
|
||||
{ d: 'baz', meta: this.meta },
|
||||
{ d: 'b', meta: this.meta },
|
||||
{ u: 'ar' },
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe('deleting inserts', function () {
|
||||
it('should delete from the middle of (i)nserted text', function () {
|
||||
const diff = this.DiffGenerator.applyUpdateToDiff(
|
||||
[{ i: 'foobazbar', meta: this.meta }],
|
||||
{ op: [{ p: 3, d: 'baz' }], meta: this.meta }
|
||||
)
|
||||
return expect(diff).to.deep.equal([
|
||||
{ i: 'foo', meta: this.meta },
|
||||
{ i: 'bar', meta: this.meta },
|
||||
])
|
||||
})
|
||||
|
||||
it('should delete from the start of (u)nchanged text', function () {
|
||||
const diff = this.DiffGenerator.applyUpdateToDiff(
|
||||
[{ i: 'foobazbar', meta: this.meta }],
|
||||
{ op: [{ p: 0, d: 'foo' }], meta: this.meta }
|
||||
)
|
||||
return expect(diff).to.deep.equal([{ i: 'bazbar', meta: this.meta }])
|
||||
})
|
||||
|
||||
it('should delete from the end of (u)nchanged text', function () {
|
||||
const diff = this.DiffGenerator.applyUpdateToDiff(
|
||||
[{ i: 'foobazbar', meta: this.meta }],
|
||||
{ op: [{ p: 6, d: 'bar' }], meta: this.meta }
|
||||
)
|
||||
return expect(diff).to.deep.equal([{ i: 'foobaz', meta: this.meta }])
|
||||
})
|
||||
|
||||
return it('should delete across multiple (u)changed and (i)nserted text parts', function () {
|
||||
const diff = this.DiffGenerator.applyUpdateToDiff(
|
||||
[{ u: 'foo' }, { i: 'baz', meta: this.meta }, { u: 'bar' }],
|
||||
{ op: [{ p: 2, d: 'obazb' }], meta: this.meta }
|
||||
)
|
||||
return expect(diff).to.deep.equal([
|
||||
{ u: 'fo' },
|
||||
{ d: 'o', meta: this.meta },
|
||||
{ d: 'b', meta: this.meta },
|
||||
{ u: 'ar' },
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe('deleting over existing deletes', function () {
|
||||
return it('should delete across multiple (u)changed and (d)deleted text parts', function () {
|
||||
const diff = this.DiffGenerator.applyUpdateToDiff(
|
||||
[{ u: 'foo' }, { d: 'baz', meta: this.meta }, { u: 'bar' }],
|
||||
{ op: [{ p: 2, d: 'ob' }], meta: this.meta }
|
||||
)
|
||||
return expect(diff).to.deep.equal([
|
||||
{ u: 'fo' },
|
||||
{ d: 'o', meta: this.meta },
|
||||
{ d: 'baz', meta: this.meta },
|
||||
{ d: 'b', meta: this.meta },
|
||||
{ u: 'ar' },
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe("deleting when the text doesn't match", function () {
|
||||
it('should throw an error when deleting from the middle of (u)nchanged text', function () {
|
||||
return expect(() =>
|
||||
this.DiffGenerator.applyUpdateToDiff([{ u: 'foobazbar' }], {
|
||||
op: [{ p: 3, d: 'xxx' }],
|
||||
meta: this.meta,
|
||||
})
|
||||
).to.throw(this.DiffGenerator.ConsistencyError)
|
||||
})
|
||||
|
||||
it('should throw an error when deleting from the start of (u)nchanged text', function () {
|
||||
return expect(() =>
|
||||
this.DiffGenerator.applyUpdateToDiff([{ u: 'foobazbar' }], {
|
||||
op: [{ p: 0, d: 'xxx' }],
|
||||
meta: this.meta,
|
||||
})
|
||||
).to.throw(this.DiffGenerator.ConsistencyError)
|
||||
})
|
||||
|
||||
return it('should throw an error when deleting from the end of (u)nchanged text', function () {
|
||||
return expect(() =>
|
||||
this.DiffGenerator.applyUpdateToDiff([{ u: 'foobazbar' }], {
|
||||
op: [{ p: 6, d: 'xxx' }],
|
||||
meta: this.meta,
|
||||
})
|
||||
).to.throw(this.DiffGenerator.ConsistencyError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the last update in the existing diff is a delete', function () {
|
||||
return it('should insert the new update before the delete', function () {
|
||||
const diff = this.DiffGenerator.applyUpdateToDiff(
|
||||
[{ u: 'foo' }, { d: 'bar', meta: this.meta }],
|
||||
{ op: [{ p: 3, i: 'baz' }], meta: this.meta }
|
||||
)
|
||||
return expect(diff).to.deep.equal([
|
||||
{ u: 'foo' },
|
||||
{ i: 'baz', meta: this.meta },
|
||||
{ d: 'bar', meta: this.meta },
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when the only update in the existing diff is a delete', function () {
|
||||
return it('should insert the new update after the delete', function () {
|
||||
const diff = this.DiffGenerator.applyUpdateToDiff(
|
||||
[{ d: 'bar', meta: this.meta }],
|
||||
{ op: [{ p: 0, i: 'baz' }], meta: this.meta }
|
||||
)
|
||||
return expect(diff).to.deep.equal([
|
||||
{ d: 'bar', meta: this.meta },
|
||||
{ i: 'baz', meta: this.meta },
|
||||
])
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,444 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const { expect } = require('chai')
|
||||
const modulePath = '../../../../app/js/DiffManager.js'
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
|
||||
describe('DiffManager', function () {
|
||||
beforeEach(function () {
|
||||
this.DiffManager = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'./UpdatesManager': (this.UpdatesManager = {}),
|
||||
'./DocumentUpdaterManager': (this.DocumentUpdaterManager = {}),
|
||||
'./DiffGenerator': (this.DiffGenerator = {}),
|
||||
},
|
||||
})
|
||||
this.callback = sinon.stub()
|
||||
this.from = new Date()
|
||||
this.to = new Date(Date.now() + 10000)
|
||||
this.project_id = 'mock-project-id'
|
||||
return (this.doc_id = 'mock-doc-id')
|
||||
})
|
||||
|
||||
describe('getLatestDocAndUpdates', function () {
|
||||
beforeEach(function () {
|
||||
this.content = 'hello world'
|
||||
this.version = 42
|
||||
this.updates = ['mock-update-1', 'mock-update-2']
|
||||
|
||||
this.DocumentUpdaterManager.getDocument = sinon
|
||||
.stub()
|
||||
.callsArgWith(2, null, this.content, this.version)
|
||||
return (this.UpdatesManager.getDocUpdatesWithUserInfo = sinon
|
||||
.stub()
|
||||
.callsArgWith(3, null, this.updates))
|
||||
})
|
||||
|
||||
describe('with a fromVersion', function () {
|
||||
beforeEach(function () {
|
||||
return this.DiffManager.getLatestDocAndUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.from,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the latest version of the doc', function () {
|
||||
return this.DocumentUpdaterManager.getDocument
|
||||
.calledWith(this.project_id, this.doc_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should get the latest updates', function () {
|
||||
return this.UpdatesManager.getDocUpdatesWithUserInfo
|
||||
.calledWith(this.project_id, this.doc_id, { from: this.from })
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should call the callback with the content, version and updates', function () {
|
||||
return this.callback
|
||||
.calledWith(null, this.content, this.version, this.updates)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('with no fromVersion', function () {
|
||||
beforeEach(function () {
|
||||
return this.DiffManager.getLatestDocAndUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
null,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the latest version of the doc', function () {
|
||||
return this.DocumentUpdaterManager.getDocument
|
||||
.calledWith(this.project_id, this.doc_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should not get the latest updates', function () {
|
||||
return this.UpdatesManager.getDocUpdatesWithUserInfo.called.should.equal(
|
||||
false
|
||||
)
|
||||
})
|
||||
|
||||
return it('should call the callback with the content, version and blank updates', function () {
|
||||
return this.callback
|
||||
.calledWith(null, this.content, this.version, [])
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getDiff', function () {
|
||||
beforeEach(function () {
|
||||
this.content = 'hello world'
|
||||
// Op versions are the version they were applied to, so doc is always one version
|
||||
// ahead.s
|
||||
this.version = 43
|
||||
this.updates = [
|
||||
{
|
||||
op: 'mock-4',
|
||||
v: 42,
|
||||
meta: { start_ts: new Date(this.to.getTime() + 20) },
|
||||
},
|
||||
{
|
||||
op: 'mock-3',
|
||||
v: 41,
|
||||
meta: { start_ts: new Date(this.to.getTime() + 10) },
|
||||
},
|
||||
{
|
||||
op: 'mock-2',
|
||||
v: 40,
|
||||
meta: { start_ts: new Date(this.to.getTime() - 10) },
|
||||
},
|
||||
{
|
||||
op: 'mock-1',
|
||||
v: 39,
|
||||
meta: { start_ts: new Date(this.to.getTime() - 20) },
|
||||
},
|
||||
]
|
||||
this.fromVersion = 39
|
||||
this.toVersion = 40
|
||||
this.diffed_updates = this.updates.slice(2)
|
||||
this.rewound_content = 'rewound-content'
|
||||
return (this.diff = [{ u: 'mock-diff' }])
|
||||
})
|
||||
|
||||
describe('with matching versions', function () {
|
||||
beforeEach(function () {
|
||||
this.DiffManager.getDocumentBeforeVersion = sinon
|
||||
.stub()
|
||||
.callsArgWith(3, null, this.rewound_content, this.updates)
|
||||
this.DiffGenerator.buildDiff = sinon.stub().returns(this.diff)
|
||||
return this.DiffManager.getDiff(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.fromVersion,
|
||||
this.toVersion,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the latest doc and version with all recent updates', function () {
|
||||
return this.DiffManager.getDocumentBeforeVersion
|
||||
.calledWith(this.project_id, this.doc_id, this.fromVersion)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should generate the diff', function () {
|
||||
return this.DiffGenerator.buildDiff
|
||||
.calledWith(
|
||||
this.rewound_content,
|
||||
this.diffed_updates.slice().reverse()
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should call the callback with the diff', function () {
|
||||
return this.callback.calledWith(null, this.diff).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the updates are inconsistent', function () {
|
||||
beforeEach(function () {
|
||||
this.DiffManager.getLatestDocAndUpdates = sinon
|
||||
.stub()
|
||||
.callsArgWith(3, null, this.content, this.version, this.updates)
|
||||
this.DiffGenerator.buildDiff = sinon
|
||||
.stub()
|
||||
.throws((this.error = new Error('inconsistent!')))
|
||||
this.DiffGenerator.rewindUpdates = sinon.stub()
|
||||
this.DiffManager.getDiff(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.fromVersion,
|
||||
this.toVersion,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should call the callback with an error', function () {
|
||||
this.callback.calledWith(sinon.match(Error)).should.equal(true)
|
||||
const errorObj = this.callback.args[0][0]
|
||||
expect(errorObj.message).to.include('inconsistent!')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getDocumentBeforeVersion', function () {
|
||||
beforeEach(function () {
|
||||
this.DiffManager._tryGetDocumentBeforeVersion = sinon.stub()
|
||||
this.document = 'mock-documents'
|
||||
return (this.rewound_updates = 'mock-rewound-updates')
|
||||
})
|
||||
|
||||
describe('succesfully', function () {
|
||||
beforeEach(function () {
|
||||
this.DiffManager._tryGetDocumentBeforeVersion.yields(
|
||||
null,
|
||||
this.document,
|
||||
this.rewound_updates
|
||||
)
|
||||
return this.DiffManager.getDocumentBeforeVersion(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.version,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should call _tryGetDocumentBeforeVersion', function () {
|
||||
return this.DiffManager._tryGetDocumentBeforeVersion
|
||||
.calledWith(this.project_id, this.doc_id, this.version)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should call the callback with the response', function () {
|
||||
return this.callback
|
||||
.calledWith(null, this.document, this.rewound_updates)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with a retry needed', function () {
|
||||
beforeEach(function () {
|
||||
let retried = false
|
||||
this.DiffManager._tryGetDocumentBeforeVersion = (
|
||||
project_id,
|
||||
doc_id,
|
||||
version,
|
||||
callback
|
||||
) => {
|
||||
if (!retried) {
|
||||
retried = true
|
||||
const error = new Error()
|
||||
error.retry = true
|
||||
return callback(error)
|
||||
} else {
|
||||
return callback(null, this.document, this.rewound_updates)
|
||||
}
|
||||
}
|
||||
sinon.spy(this.DiffManager, '_tryGetDocumentBeforeVersion')
|
||||
return this.DiffManager.getDocumentBeforeVersion(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.version,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should call _tryGetDocumentBeforeVersion twice', function () {
|
||||
return this.DiffManager._tryGetDocumentBeforeVersion.calledTwice.should.equal(
|
||||
true
|
||||
)
|
||||
})
|
||||
|
||||
return it('should call the callback with the response', function () {
|
||||
return this.callback
|
||||
.calledWith(null, this.document, this.rewound_updates)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with a non-retriable error', function () {
|
||||
beforeEach(function () {
|
||||
this.error = new Error('oops')
|
||||
this.DiffManager._tryGetDocumentBeforeVersion.yields(this.error)
|
||||
return this.DiffManager.getDocumentBeforeVersion(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.version,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should call _tryGetDocumentBeforeVersion once', function () {
|
||||
return this.DiffManager._tryGetDocumentBeforeVersion.calledOnce.should.equal(
|
||||
true
|
||||
)
|
||||
})
|
||||
|
||||
return it('should call the callback with the error', function () {
|
||||
return this.callback.calledWith(this.error).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when retry limit is matched', function () {
|
||||
beforeEach(function () {
|
||||
this.error = new Error('oops')
|
||||
this.error.retry = true
|
||||
this.DiffManager._tryGetDocumentBeforeVersion.yields(this.error)
|
||||
return this.DiffManager.getDocumentBeforeVersion(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.version,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should call _tryGetDocumentBeforeVersion three times (max retries)', function () {
|
||||
return this.DiffManager._tryGetDocumentBeforeVersion.calledThrice.should.equal(
|
||||
true
|
||||
)
|
||||
})
|
||||
|
||||
return it('should call the callback with the error', function () {
|
||||
return this.callback.calledWith(this.error).should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
return describe('_tryGetDocumentBeforeVersion', function () {
|
||||
beforeEach(function () {
|
||||
this.content = 'hello world'
|
||||
// Op versions are the version they were applied to, so doc is always one version
|
||||
// ahead.s
|
||||
this.version = 43
|
||||
this.updates = [
|
||||
{
|
||||
op: 'mock-4',
|
||||
v: 42,
|
||||
meta: { start_ts: new Date(this.to.getTime() + 20) },
|
||||
},
|
||||
{
|
||||
op: 'mock-3',
|
||||
v: 41,
|
||||
meta: { start_ts: new Date(this.to.getTime() + 10) },
|
||||
},
|
||||
{
|
||||
op: 'mock-2',
|
||||
v: 40,
|
||||
meta: { start_ts: new Date(this.to.getTime() - 10) },
|
||||
},
|
||||
{
|
||||
op: 'mock-1',
|
||||
v: 39,
|
||||
meta: { start_ts: new Date(this.to.getTime() - 20) },
|
||||
},
|
||||
]
|
||||
this.fromVersion = 39
|
||||
this.rewound_content = 'rewound-content'
|
||||
return (this.diff = [{ u: 'mock-diff' }])
|
||||
})
|
||||
|
||||
describe('with matching versions', function () {
|
||||
beforeEach(function () {
|
||||
this.DiffManager.getLatestDocAndUpdates = sinon
|
||||
.stub()
|
||||
.callsArgWith(3, null, this.content, this.version, this.updates)
|
||||
this.DiffGenerator.rewindUpdates = sinon.spy((content, updates) => {
|
||||
// the rewindUpdates method reverses the 'updates' array
|
||||
updates.reverse()
|
||||
return this.rewound_content
|
||||
})
|
||||
this.rewindUpdatesWithArgs = this.DiffGenerator.rewindUpdates.withArgs(
|
||||
this.content,
|
||||
this.updates.slice().reverse()
|
||||
)
|
||||
return this.DiffManager._tryGetDocumentBeforeVersion(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.fromVersion,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the latest doc and version with all recent updates', function () {
|
||||
return this.DiffManager.getLatestDocAndUpdates
|
||||
.calledWith(this.project_id, this.doc_id, this.fromVersion)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should rewind the diff', function () {
|
||||
return sinon.assert.calledOnce(this.rewindUpdatesWithArgs)
|
||||
})
|
||||
|
||||
return it('should call the callback with the rewound document and updates', function () {
|
||||
return this.callback
|
||||
.calledWith(null, this.rewound_content, this.updates)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with mismatching versions', function () {
|
||||
beforeEach(function () {
|
||||
this.version = 50
|
||||
this.updates = [
|
||||
{ op: 'mock-1', v: 40 },
|
||||
{ op: 'mock-1', v: 39 },
|
||||
]
|
||||
this.DiffManager.getLatestDocAndUpdates = sinon
|
||||
.stub()
|
||||
.callsArgWith(3, null, this.content, this.version, this.updates)
|
||||
return this.DiffManager._tryGetDocumentBeforeVersion(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.fromVersion,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
return it('should call the callback with an error with retry = true set', function () {
|
||||
this.callback.calledOnce.should.equal(true)
|
||||
const error = this.callback.args[0][0]
|
||||
return expect(error.retry).to.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when the updates are inconsistent', function () {
|
||||
beforeEach(function () {
|
||||
this.DiffManager.getLatestDocAndUpdates = sinon
|
||||
.stub()
|
||||
.callsArgWith(3, null, this.content, this.version, this.updates)
|
||||
this.DiffGenerator.rewindUpdates = sinon
|
||||
.stub()
|
||||
.throws((this.error = new Error('inconsistent!')))
|
||||
return this.DiffManager.getDocumentBeforeVersion(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.fromVersion,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
return it('should call the callback with an error', function () {
|
||||
return this.callback.calledWith(this.error).should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
108
services/track-changes/test/unit/js/DocArchive/MongoAWS.js
Normal file
108
services/track-changes/test/unit/js/DocArchive/MongoAWS.js
Normal file
|
@ -0,0 +1,108 @@
|
|||
/* eslint-disable
|
||||
handle-callback-err,
|
||||
no-return-assign,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const modulePath = '../../../../app/js/MongoAWS.js'
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const { ObjectId } = require('mongodb')
|
||||
const MemoryStream = require('memorystream')
|
||||
const zlib = require('zlib')
|
||||
|
||||
describe('MongoAWS', function () {
|
||||
beforeEach(function () {
|
||||
this.MongoAWS = SandboxedModule.require(modulePath, {
|
||||
singleOnly: true,
|
||||
requires: {
|
||||
'@overleaf/settings': (this.settings = {
|
||||
trackchanges: {
|
||||
s3: {
|
||||
secret: 's3-secret',
|
||||
key: 's3-key',
|
||||
},
|
||||
stores: {
|
||||
doc_history: 's3-bucket',
|
||||
},
|
||||
},
|
||||
}),
|
||||
child_process: (this.child_process = {}),
|
||||
'mongo-uri': (this.mongouri = {}),
|
||||
'aws-sdk': (this.awssdk = {}),
|
||||
fs: (this.fs = {}),
|
||||
's3-streams': (this.S3S = {}),
|
||||
'./mongodb': { db: (this.db = {}), ObjectId },
|
||||
JSONStream: (this.JSONStream = {}),
|
||||
'readline-stream': (this.readline = sinon.stub()),
|
||||
'@overleaf/metrics': { inc() {} },
|
||||
},
|
||||
})
|
||||
|
||||
this.project_id = ObjectId().toString()
|
||||
this.doc_id = ObjectId().toString()
|
||||
this.pack_id = ObjectId()
|
||||
this.update = { v: 123 }
|
||||
return (this.callback = sinon.stub())
|
||||
})
|
||||
|
||||
describe('archivePack', function () {
|
||||
beforeEach(function (done) {
|
||||
this.awssdk.config = { update: sinon.stub() }
|
||||
this.awssdk.S3 = sinon.stub()
|
||||
this.S3S.WriteStream = () => MemoryStream.createWriteStream()
|
||||
this.db.docHistory = {}
|
||||
this.db.docHistory.findOne = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null, { pack: 'hello' })
|
||||
|
||||
return this.MongoAWS.archivePack(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.pack_id,
|
||||
(err, result) => {
|
||||
this.callback()
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
return it('should call the callback', function () {
|
||||
return this.callback.called.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('unArchivePack', function () {
|
||||
beforeEach(function (done) {
|
||||
return zlib.gzip('{"pack":"123"}', (err, zbuf) => {
|
||||
this.awssdk.config = { update: sinon.stub() }
|
||||
this.awssdk.S3 = sinon.stub()
|
||||
this.S3S.ReadStream = () =>
|
||||
MemoryStream.createReadStream(zbuf, { readable: true })
|
||||
this.db.docHistory = {}
|
||||
this.db.docHistory.insertOne = sinon
|
||||
.stub()
|
||||
.yields(null, { insertedId: ObjectId() })
|
||||
|
||||
return this.MongoAWS.unArchivePack(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.pack_id,
|
||||
(err, result) => {
|
||||
this.callback()
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
return it('should call db.docHistory.insert', function () {
|
||||
return this.db.docHistory.insertOne.called.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,199 @@
|
|||
/* eslint-disable
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const { expect } = require('chai')
|
||||
const modulePath = '../../../../app/js/DocumentUpdaterManager.js'
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
|
||||
describe('DocumentUpdaterManager', function () {
|
||||
beforeEach(function () {
|
||||
this.DocumentUpdaterManager = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
request: (this.request = {}),
|
||||
'@overleaf/settings': (this.settings = {
|
||||
apis: { documentupdater: { url: 'http://example.com' } },
|
||||
}),
|
||||
},
|
||||
})
|
||||
this.callback = sinon.stub()
|
||||
this.lines = ['one', 'two', 'three']
|
||||
return (this.version = 42)
|
||||
})
|
||||
|
||||
describe('getDocument', function () {
|
||||
describe('successfully', function () {
|
||||
beforeEach(function () {
|
||||
this.body = JSON.stringify({
|
||||
lines: this.lines,
|
||||
version: this.version,
|
||||
ops: [],
|
||||
})
|
||||
this.request.get = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null, { statusCode: 200 }, this.body)
|
||||
return this.DocumentUpdaterManager.getDocument(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the document from the document updater', function () {
|
||||
const url = `${this.settings.apis.documentupdater.url}/project/${this.project_id}/doc/${this.doc_id}`
|
||||
return this.request.get.calledWith(url).should.equal(true)
|
||||
})
|
||||
|
||||
return it('should call the callback with the content and version', function () {
|
||||
return this.callback
|
||||
.calledWith(null, this.lines.join('\n'), this.version)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the document updater API returns an error', function () {
|
||||
beforeEach(function () {
|
||||
this.request.get = sinon
|
||||
.stub()
|
||||
.callsArgWith(
|
||||
1,
|
||||
(this.error = new Error('something went wrong')),
|
||||
null,
|
||||
null
|
||||
)
|
||||
return this.DocumentUpdaterManager.getDocument(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
return it('should return an error to the callback', function () {
|
||||
return this.callback.calledWith(this.error).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when the document updater returns a failure error code', function () {
|
||||
beforeEach(function () {
|
||||
this.request.get = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null, { statusCode: 500 }, '')
|
||||
return this.DocumentUpdaterManager.getDocument(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
return it('should return the callback with an error', function () {
|
||||
return this.callback
|
||||
.calledWith(
|
||||
sinon.match.has(
|
||||
'message',
|
||||
'doc updater returned a non-success status code: 500'
|
||||
)
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
return describe('setDocument', function () {
|
||||
beforeEach(function () {
|
||||
this.content = 'mock content'
|
||||
return (this.user_id = 'user-id-123')
|
||||
})
|
||||
|
||||
describe('successfully', function () {
|
||||
beforeEach(function () {
|
||||
this.request.post = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null, { statusCode: 200 })
|
||||
return this.DocumentUpdaterManager.setDocument(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.content,
|
||||
this.user_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should set the document in the document updater', function () {
|
||||
const url = `${this.settings.apis.documentupdater.url}/project/${this.project_id}/doc/${this.doc_id}`
|
||||
return this.request.post
|
||||
.calledWith({
|
||||
url,
|
||||
json: {
|
||||
lines: this.content.split('\n'),
|
||||
source: 'restore',
|
||||
user_id: this.user_id,
|
||||
undoing: true,
|
||||
},
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should call the callback', function () {
|
||||
return this.callback.calledWith(null).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the document updater API returns an error', function () {
|
||||
beforeEach(function () {
|
||||
this.request.post = sinon
|
||||
.stub()
|
||||
.callsArgWith(
|
||||
1,
|
||||
(this.error = new Error('something went wrong')),
|
||||
null,
|
||||
null
|
||||
)
|
||||
return this.DocumentUpdaterManager.setDocument(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.content,
|
||||
this.user_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
return it('should return an error to the callback', function () {
|
||||
return this.callback.calledWith(this.error).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when the document updater returns a failure error code', function () {
|
||||
beforeEach(function () {
|
||||
this.request.post = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null, { statusCode: 500 }, '')
|
||||
return this.DocumentUpdaterManager.setDocument(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.content,
|
||||
this.user_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
return it('should return the callback with an error', function () {
|
||||
return this.callback
|
||||
.calledWith(
|
||||
sinon.match.has(
|
||||
'message',
|
||||
'doc updater returned a non-success status code: 500'
|
||||
)
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,199 @@
|
|||
/* eslint-disable
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const { expect } = require('chai')
|
||||
const modulePath = '../../../../app/js/HttpController.js'
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
|
||||
describe('HttpController', function () {
|
||||
beforeEach(function () {
|
||||
this.HttpController = SandboxedModule.require(modulePath, {
|
||||
singleOnly: true,
|
||||
requires: {
|
||||
'./UpdatesManager': (this.UpdatesManager = {}),
|
||||
'./DiffManager': (this.DiffManager = {}),
|
||||
'./RestoreManager': (this.RestoreManager = {}),
|
||||
'./PackManager': (this.PackManager = {}),
|
||||
'./DocArchiveManager': (this.DocArchiveManager = {}),
|
||||
'./HealthChecker': (this.HealthChecker = {}),
|
||||
},
|
||||
})
|
||||
this.doc_id = 'doc-id-123'
|
||||
this.project_id = 'project-id-123'
|
||||
this.next = sinon.stub()
|
||||
this.user_id = 'mock-user-123'
|
||||
return (this.now = Date.now())
|
||||
})
|
||||
|
||||
describe('flushDoc', function () {
|
||||
beforeEach(function () {
|
||||
this.req = {
|
||||
params: {
|
||||
doc_id: this.doc_id,
|
||||
project_id: this.project_id,
|
||||
},
|
||||
}
|
||||
this.res = { sendStatus: sinon.stub() }
|
||||
this.UpdatesManager.processUncompressedUpdatesWithLock = sinon
|
||||
.stub()
|
||||
.callsArg(2)
|
||||
return this.HttpController.flushDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should process the updates', function () {
|
||||
return this.UpdatesManager.processUncompressedUpdatesWithLock
|
||||
.calledWith(this.project_id, this.doc_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should return a success code', function () {
|
||||
return this.res.sendStatus.calledWith(204).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('flushProject', function () {
|
||||
beforeEach(function () {
|
||||
this.req = {
|
||||
params: {
|
||||
project_id: this.project_id,
|
||||
},
|
||||
}
|
||||
this.res = { sendStatus: sinon.stub() }
|
||||
this.UpdatesManager.processUncompressedUpdatesForProject = sinon
|
||||
.stub()
|
||||
.callsArg(1)
|
||||
return this.HttpController.flushProject(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should process the updates', function () {
|
||||
return this.UpdatesManager.processUncompressedUpdatesForProject
|
||||
.calledWith(this.project_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should return a success code', function () {
|
||||
return this.res.sendStatus.calledWith(204).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getDiff', function () {
|
||||
beforeEach(function () {
|
||||
this.from = 42
|
||||
this.to = 45
|
||||
this.req = {
|
||||
params: {
|
||||
doc_id: this.doc_id,
|
||||
project_id: this.project_id,
|
||||
},
|
||||
query: {
|
||||
from: this.from.toString(),
|
||||
to: this.to.toString(),
|
||||
},
|
||||
}
|
||||
this.res = { json: sinon.stub() }
|
||||
this.diff = [{ u: 'mock-diff' }]
|
||||
this.DiffManager.getDiff = sinon.stub().callsArgWith(4, null, this.diff)
|
||||
return this.HttpController.getDiff(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should get the diff', function () {
|
||||
return this.DiffManager.getDiff
|
||||
.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
parseInt(this.from, 10),
|
||||
parseInt(this.to, 10)
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should return the diff', function () {
|
||||
return this.res.json.calledWith({ diff: this.diff }).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getUpdates', function () {
|
||||
beforeEach(function () {
|
||||
this.before = Date.now()
|
||||
this.nextBeforeTimestamp = this.before - 100
|
||||
this.min_count = 10
|
||||
this.req = {
|
||||
params: {
|
||||
project_id: this.project_id,
|
||||
},
|
||||
query: {
|
||||
before: this.before.toString(),
|
||||
min_count: this.min_count.toString(),
|
||||
},
|
||||
}
|
||||
this.res = { json: sinon.stub() }
|
||||
this.updates = ['mock-summarized-updates']
|
||||
this.UpdatesManager.getSummarizedProjectUpdates = sinon
|
||||
.stub()
|
||||
.callsArgWith(2, null, this.updates, this.nextBeforeTimestamp)
|
||||
return this.HttpController.getUpdates(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should get the updates', function () {
|
||||
return this.UpdatesManager.getSummarizedProjectUpdates
|
||||
.calledWith(this.project_id, {
|
||||
before: this.before,
|
||||
min_count: this.min_count,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should return the formatted updates', function () {
|
||||
return this.res.json
|
||||
.calledWith({
|
||||
updates: this.updates,
|
||||
nextBeforeTimestamp: this.nextBeforeTimestamp,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('RestoreManager', function () {
|
||||
beforeEach(function () {
|
||||
this.version = '42'
|
||||
this.req = {
|
||||
params: {
|
||||
doc_id: this.doc_id,
|
||||
project_id: this.project_id,
|
||||
version: this.version,
|
||||
},
|
||||
headers: {
|
||||
'x-user-id': this.user_id,
|
||||
},
|
||||
}
|
||||
this.res = { sendStatus: sinon.stub() }
|
||||
|
||||
this.RestoreManager.restoreToBeforeVersion = sinon.stub().callsArg(4)
|
||||
return this.HttpController.restore(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should restore the document', function () {
|
||||
return this.RestoreManager.restoreToBeforeVersion
|
||||
.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
parseInt(this.version, 10),
|
||||
this.user_id
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should return a success code', function () {
|
||||
return this.res.sendStatus.calledWith(204).should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,317 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
mocha/no-nested-tests,
|
||||
no-return-assign,
|
||||
no-undef,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const { expect } = require('chai')
|
||||
const modulePath = '../../../../app/js/LockManager.js'
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
|
||||
describe('LockManager', function () {
|
||||
beforeEach(function () {
|
||||
this.Settings = {
|
||||
redis: {
|
||||
lock: {},
|
||||
},
|
||||
}
|
||||
this.LockManager = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'@overleaf/redis-wrapper': {
|
||||
createClient: () => {
|
||||
return (this.rclient = { auth: sinon.stub() })
|
||||
},
|
||||
},
|
||||
'@overleaf/settings': this.Settings,
|
||||
},
|
||||
})
|
||||
|
||||
this.key = 'lock-key'
|
||||
return (this.callback = sinon.stub())
|
||||
})
|
||||
|
||||
describe('checkLock', function () {
|
||||
describe('when the lock is taken', function () {
|
||||
beforeEach(function () {
|
||||
this.rclient.exists = sinon.stub().callsArgWith(1, null, '1')
|
||||
return this.LockManager.checkLock(this.key, this.callback)
|
||||
})
|
||||
|
||||
it('should check the lock in redis', function () {
|
||||
return this.rclient.exists.calledWith(this.key).should.equal(true)
|
||||
})
|
||||
|
||||
return it('should return the callback with false', function () {
|
||||
return this.callback.calledWith(null, false).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when the lock is free', function () {
|
||||
beforeEach(function () {
|
||||
this.rclient.exists = sinon.stub().callsArgWith(1, null, '0')
|
||||
return this.LockManager.checkLock(this.key, this.callback)
|
||||
})
|
||||
|
||||
return it('should return the callback with true', function () {
|
||||
return this.callback.calledWith(null, true).should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('tryLock', function () {
|
||||
describe('when the lock is taken', function () {
|
||||
beforeEach(function () {
|
||||
this.rclient.set = sinon.stub().callsArgWith(5, null, null)
|
||||
this.LockManager.randomLock = sinon
|
||||
.stub()
|
||||
.returns('locked-random-value')
|
||||
return this.LockManager.tryLock(this.key, this.callback)
|
||||
})
|
||||
|
||||
it('should check the lock in redis', function () {
|
||||
return this.rclient.set
|
||||
.calledWith(
|
||||
this.key,
|
||||
'locked-random-value',
|
||||
'EX',
|
||||
this.LockManager.LOCK_TTL,
|
||||
'NX'
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should return the callback with false', function () {
|
||||
return this.callback.calledWith(null, false).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when the lock is free', function () {
|
||||
beforeEach(function () {
|
||||
this.rclient.set = sinon.stub().callsArgWith(5, null, 'OK')
|
||||
return this.LockManager.tryLock(this.key, this.callback)
|
||||
})
|
||||
|
||||
return it('should return the callback with true', function () {
|
||||
return this.callback.calledWith(null, true).should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('deleteLock', function () {
|
||||
return beforeEach(function () {
|
||||
beforeEach(function () {
|
||||
this.rclient.del = sinon.stub().callsArg(1)
|
||||
return this.LockManager.deleteLock(this.key, this.callback)
|
||||
})
|
||||
|
||||
it('should delete the lock in redis', function () {
|
||||
return this.rclient.del.calledWith(key).should.equal(true)
|
||||
})
|
||||
|
||||
return it('should call the callback', function () {
|
||||
return this.callback.called.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getLock', function () {
|
||||
describe('when the lock is not taken', function () {
|
||||
beforeEach(function (done) {
|
||||
this.LockManager.tryLock = sinon.stub().callsArgWith(1, null, true)
|
||||
return this.LockManager.getLock(this.key, (...args) => {
|
||||
this.callback(...Array.from(args || []))
|
||||
return done()
|
||||
})
|
||||
})
|
||||
|
||||
it('should try to get the lock', function () {
|
||||
return this.LockManager.tryLock.calledWith(this.key).should.equal(true)
|
||||
})
|
||||
|
||||
it('should only need to try once', function () {
|
||||
return this.LockManager.tryLock.callCount.should.equal(1)
|
||||
})
|
||||
|
||||
return it('should return the callback', function () {
|
||||
return this.callback.calledWith(null).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the lock is initially set', function () {
|
||||
beforeEach(function (done) {
|
||||
const startTime = Date.now()
|
||||
this.LockManager.LOCK_TEST_INTERVAL = 5
|
||||
this.LockManager.tryLock = function (doc_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function (error, isFree) {}
|
||||
}
|
||||
if (Date.now() - startTime < 100) {
|
||||
return callback(null, false)
|
||||
} else {
|
||||
return callback(null, true)
|
||||
}
|
||||
}
|
||||
sinon.spy(this.LockManager, 'tryLock')
|
||||
|
||||
return this.LockManager.getLock(this.key, (...args) => {
|
||||
this.callback(...Array.from(args || []))
|
||||
return done()
|
||||
})
|
||||
})
|
||||
|
||||
it('should call tryLock multiple times until free', function () {
|
||||
return (this.LockManager.tryLock.callCount > 1).should.equal(true)
|
||||
})
|
||||
|
||||
return it('should return the callback', function () {
|
||||
return this.callback.calledWith(null).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when the lock times out', function () {
|
||||
beforeEach(function (done) {
|
||||
const time = Date.now()
|
||||
this.LockManager.MAX_LOCK_WAIT_TIME = 5
|
||||
this.LockManager.tryLock = sinon.stub().callsArgWith(1, null, false)
|
||||
return this.LockManager.getLock(this.key, (...args) => {
|
||||
this.callback(...Array.from(args || []))
|
||||
return done()
|
||||
})
|
||||
})
|
||||
|
||||
return it('should return the callback with an error', function () {
|
||||
return this.callback
|
||||
.calledWith(sinon.match.instanceOf(Error))
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
return describe('runWithLock', function () {
|
||||
describe('with successful run', function () {
|
||||
beforeEach(function () {
|
||||
this.runner = function (releaseLock) {
|
||||
if (releaseLock == null) {
|
||||
releaseLock = function (error) {}
|
||||
}
|
||||
return releaseLock()
|
||||
}
|
||||
sinon.spy(this, 'runner')
|
||||
this.LockManager.getLock = sinon.stub().callsArg(1)
|
||||
this.LockManager.releaseLock = sinon.stub().callsArg(2)
|
||||
return this.LockManager.runWithLock(
|
||||
this.key,
|
||||
this.runner,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the lock', function () {
|
||||
return this.LockManager.getLock.calledWith(this.key).should.equal(true)
|
||||
})
|
||||
|
||||
it('should run the passed function', function () {
|
||||
return this.runner.called.should.equal(true)
|
||||
})
|
||||
|
||||
it('should release the lock', function () {
|
||||
return this.LockManager.releaseLock
|
||||
.calledWith(this.key)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should call the callback', function () {
|
||||
return this.callback.called.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the runner function returns an error', function () {
|
||||
beforeEach(function () {
|
||||
this.error = new Error('oops')
|
||||
this.runner = releaseLock => {
|
||||
if (releaseLock == null) {
|
||||
releaseLock = function (error) {}
|
||||
}
|
||||
return releaseLock(this.error)
|
||||
}
|
||||
sinon.spy(this, 'runner')
|
||||
this.LockManager.getLock = sinon.stub().callsArg(1)
|
||||
this.LockManager.releaseLock = sinon.stub().callsArg(2)
|
||||
return this.LockManager.runWithLock(
|
||||
this.key,
|
||||
this.runner,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should release the lock', function () {
|
||||
return this.LockManager.releaseLock
|
||||
.calledWith(this.key)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should call the callback with the error', function () {
|
||||
return this.callback.calledWith(this.error).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('releaseLock', function () {
|
||||
describe('when the lock is current', function () {
|
||||
beforeEach(function () {
|
||||
this.rclient.eval = sinon.stub().yields(null, 1)
|
||||
return this.LockManager.releaseLock(
|
||||
this.key,
|
||||
this.lockValue,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should clear the data from redis', function () {
|
||||
return this.rclient.eval
|
||||
.calledWith(
|
||||
this.LockManager.unlockScript,
|
||||
1,
|
||||
this.key,
|
||||
this.lockValue
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should call the callback', function () {
|
||||
return this.callback.called.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when the lock has expired', function () {
|
||||
beforeEach(function () {
|
||||
this.rclient.eval = sinon.stub().yields(null, 0)
|
||||
return this.LockManager.releaseLock(
|
||||
this.key,
|
||||
this.lockValue,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
return it('should return an error if the lock has expired', function () {
|
||||
return this.callback
|
||||
.calledWith(
|
||||
sinon.match.has('message', 'tried to release timed out lock')
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,239 @@
|
|||
/* eslint-disable
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const { expect } = require('chai')
|
||||
const modulePath = '../../../../app/js/MongoManager.js'
|
||||
const packModulePath = '../../../../app/js/PackManager.js'
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const { ObjectId } = require('mongodb')
|
||||
const tk = require('timekeeper')
|
||||
|
||||
describe('MongoManager', function () {
|
||||
beforeEach(function () {
|
||||
tk.freeze(new Date())
|
||||
this.MongoManager = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'./mongodb': { db: (this.db = {}), ObjectId },
|
||||
'./PackManager': (this.PackManager = {}),
|
||||
'@overleaf/metrics': { timeAsyncMethod() {} },
|
||||
},
|
||||
})
|
||||
this.callback = sinon.stub()
|
||||
this.doc_id = ObjectId().toString()
|
||||
return (this.project_id = ObjectId().toString())
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
return tk.reset()
|
||||
})
|
||||
|
||||
describe('getLastCompressedUpdate', function () {
|
||||
beforeEach(function () {
|
||||
this.update = 'mock-update'
|
||||
this.db.docHistory = {}
|
||||
this.db.docHistory.find = sinon.stub().returns(this.db.docHistory)
|
||||
this.db.docHistory.findOne = sinon.stub().returns(this.db.docHistory)
|
||||
this.db.docHistory.sort = sinon.stub().returns(this.db.docHistory)
|
||||
this.db.docHistory.limit = sinon.stub().returns(this.db.docHistory)
|
||||
this.db.docHistory.toArray = sinon
|
||||
.stub()
|
||||
.callsArgWith(0, null, [this.update])
|
||||
|
||||
return this.MongoManager.getLastCompressedUpdate(
|
||||
this.doc_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should find the updates for the doc', function () {
|
||||
return this.db.docHistory.find
|
||||
.calledWith({ doc_id: ObjectId(this.doc_id) })
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should limit to one result', function () {
|
||||
return this.db.docHistory.limit.calledWith(1).should.equal(true)
|
||||
})
|
||||
|
||||
it('should sort in descending version order', function () {
|
||||
return this.db.docHistory.sort.calledWith({ v: -1 }).should.equal(true)
|
||||
})
|
||||
|
||||
return it('should call the call back with the update', function () {
|
||||
return this.callback.calledWith(null, this.update).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('peekLastCompressedUpdate', function () {
|
||||
describe('when there is no last update', function () {
|
||||
beforeEach(function () {
|
||||
this.PackManager.getLastPackFromIndex = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null, null)
|
||||
this.MongoManager.getLastCompressedUpdate = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null, null)
|
||||
return this.MongoManager.peekLastCompressedUpdate(
|
||||
this.doc_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the last update', function () {
|
||||
return this.MongoManager.getLastCompressedUpdate
|
||||
.calledWith(this.doc_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should call the callback with no update', function () {
|
||||
return this.callback.calledWith(null, null).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when there is an update', function () {
|
||||
beforeEach(function () {
|
||||
this.update = { _id: Object() }
|
||||
this.MongoManager.getLastCompressedUpdate = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null, this.update)
|
||||
return this.MongoManager.peekLastCompressedUpdate(
|
||||
this.doc_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the last update', function () {
|
||||
return this.MongoManager.getLastCompressedUpdate
|
||||
.calledWith(this.doc_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should call the callback with the update', function () {
|
||||
return this.callback.calledWith(null, this.update).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when there is a last update in S3', function () {
|
||||
beforeEach(function () {
|
||||
this.update = { _id: Object(), v: 12345, v_end: 12345, inS3: true }
|
||||
this.PackManager.getLastPackFromIndex = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null, this.update)
|
||||
this.MongoManager.getLastCompressedUpdate = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null)
|
||||
return this.MongoManager.peekLastCompressedUpdate(
|
||||
this.doc_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the last update', function () {
|
||||
return this.MongoManager.getLastCompressedUpdate
|
||||
.calledWith(this.doc_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should call the callback with a null update and the correct version', function () {
|
||||
return this.callback
|
||||
.calledWith(null, null, this.update.v_end)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('backportProjectId', function () {
|
||||
beforeEach(function () {
|
||||
this.db.docHistory = { updateMany: sinon.stub().yields() }
|
||||
return this.MongoManager.backportProjectId(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it("should insert the project_id into all entries for the doc_id which don't have it set", function () {
|
||||
return this.db.docHistory.updateMany
|
||||
.calledWith(
|
||||
{
|
||||
doc_id: ObjectId(this.doc_id),
|
||||
project_id: { $exists: false },
|
||||
},
|
||||
{
|
||||
$set: { project_id: ObjectId(this.project_id) },
|
||||
}
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should call the callback', function () {
|
||||
return this.callback.called.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getProjectMetaData', function () {
|
||||
beforeEach(function () {
|
||||
this.metadata = { mock: 'metadata' }
|
||||
this.db.projectHistoryMetaData = {
|
||||
findOne: sinon.stub().callsArgWith(1, null, this.metadata),
|
||||
}
|
||||
return this.MongoManager.getProjectMetaData(
|
||||
this.project_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should look up the meta data in the db', function () {
|
||||
return this.db.projectHistoryMetaData.findOne
|
||||
.calledWith({ project_id: ObjectId(this.project_id) })
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should return the metadata', function () {
|
||||
return this.callback.calledWith(null, this.metadata).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('setProjectMetaData', function () {
|
||||
beforeEach(function () {
|
||||
this.metadata = { mock: 'metadata' }
|
||||
this.db.projectHistoryMetaData = {
|
||||
updateOne: sinon.stub().yields(),
|
||||
}
|
||||
return this.MongoManager.setProjectMetaData(
|
||||
this.project_id,
|
||||
this.metadata,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should upsert the metadata into the DB', function () {
|
||||
return this.db.projectHistoryMetaData.updateOne
|
||||
.calledWith(
|
||||
{
|
||||
project_id: ObjectId(this.project_id),
|
||||
},
|
||||
{
|
||||
$set: this.metadata,
|
||||
},
|
||||
{
|
||||
upsert: true,
|
||||
}
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should call the callback', function () {
|
||||
return this.callback.called.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,704 @@
|
|||
/* eslint-disable
|
||||
mocha/no-identical-title,
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const { assert, expect } = require('chai')
|
||||
const modulePath = '../../../../app/js/PackManager.js'
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const { ObjectId } = require('mongodb')
|
||||
const _ = require('underscore')
|
||||
|
||||
const tk = require('timekeeper')
|
||||
|
||||
describe('PackManager', function () {
|
||||
beforeEach(function () {
|
||||
tk.freeze(new Date())
|
||||
this.PackManager = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
bson: require('bson'),
|
||||
'./mongodb': { db: (this.db = {}), ObjectId },
|
||||
'./LockManager': {},
|
||||
'./MongoAWS': {},
|
||||
'@overleaf/metrics': { inc() {} },
|
||||
'./ProjectIterator': require('../../../../app/js/ProjectIterator.js'), // Cache for speed
|
||||
'@overleaf/settings': {
|
||||
redis: { lock: { key_schema: {} } },
|
||||
},
|
||||
},
|
||||
})
|
||||
this.callback = sinon.stub()
|
||||
this.doc_id = ObjectId().toString()
|
||||
this.project_id = ObjectId().toString()
|
||||
return (this.PackManager.MAX_COUNT = 512)
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
return tk.reset()
|
||||
})
|
||||
|
||||
describe('insertCompressedUpdates', function () {
|
||||
beforeEach(function () {
|
||||
this.lastUpdate = {
|
||||
_id: '12345',
|
||||
pack: [
|
||||
{ op: 'op-1', meta: 'meta-1', v: 1 },
|
||||
{ op: 'op-2', meta: 'meta-2', v: 2 },
|
||||
],
|
||||
n: 2,
|
||||
sz: 100,
|
||||
}
|
||||
this.newUpdates = [
|
||||
{ op: 'op-3', meta: 'meta-3', v: 3 },
|
||||
{ op: 'op-4', meta: 'meta-4', v: 4 },
|
||||
]
|
||||
return (this.db.docHistory = {
|
||||
insertOne: sinon.stub().yields(),
|
||||
insert: sinon.stub().callsArg(1),
|
||||
updateOne: sinon.stub().yields(),
|
||||
findAndModify: sinon.stub().callsArg(1),
|
||||
})
|
||||
})
|
||||
|
||||
describe('with no last update', function () {
|
||||
beforeEach(function () {
|
||||
this.PackManager.insertUpdatesIntoNewPack = sinon.stub().callsArg(4)
|
||||
return this.PackManager.insertCompressedUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
null,
|
||||
this.newUpdates,
|
||||
true,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
describe('for a small update', function () {
|
||||
it('should insert the update into a new pack', function () {
|
||||
return this.PackManager.insertUpdatesIntoNewPack
|
||||
.calledWith(this.project_id, this.doc_id, this.newUpdates, true)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should call the callback', function () {
|
||||
return this.callback.called.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('for many small updates', function () {
|
||||
beforeEach(function () {
|
||||
this.newUpdates = __range__(0, 2048, true).map(i => ({
|
||||
op: `op-${i}`,
|
||||
meta: `meta-${i}`,
|
||||
v: i,
|
||||
}))
|
||||
return this.PackManager.insertCompressedUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
null,
|
||||
this.newUpdates,
|
||||
false,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should append the initial updates to the existing pack', function () {
|
||||
return this.PackManager.insertUpdatesIntoNewPack
|
||||
.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newUpdates.slice(0, 512),
|
||||
false
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should insert the first set remaining updates as a new pack', function () {
|
||||
return this.PackManager.insertUpdatesIntoNewPack
|
||||
.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newUpdates.slice(512, 1024),
|
||||
false
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should insert the second set of remaining updates as a new pack', function () {
|
||||
return this.PackManager.insertUpdatesIntoNewPack
|
||||
.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newUpdates.slice(1024, 1536),
|
||||
false
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should insert the third set of remaining updates as a new pack', function () {
|
||||
return this.PackManager.insertUpdatesIntoNewPack
|
||||
.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newUpdates.slice(1536, 2048),
|
||||
false
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should insert the final set of remaining updates as a new pack', function () {
|
||||
return this.PackManager.insertUpdatesIntoNewPack
|
||||
.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newUpdates.slice(2048, 2049),
|
||||
false
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should call the callback', function () {
|
||||
return this.callback.called.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('with an existing pack as the last update', function () {
|
||||
beforeEach(function () {
|
||||
this.PackManager.appendUpdatesToExistingPack = sinon.stub().callsArg(5)
|
||||
this.PackManager.insertUpdatesIntoNewPack = sinon.stub().callsArg(4)
|
||||
return this.PackManager.insertCompressedUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.lastUpdate,
|
||||
this.newUpdates,
|
||||
false,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
describe('for a small update', function () {
|
||||
it('should append the update to the existing pack', function () {
|
||||
return this.PackManager.appendUpdatesToExistingPack
|
||||
.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.lastUpdate,
|
||||
this.newUpdates,
|
||||
false
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
it('should not insert any new packs', function () {
|
||||
return this.PackManager.insertUpdatesIntoNewPack.called.should.equal(
|
||||
false
|
||||
)
|
||||
})
|
||||
return it('should call the callback', function () {
|
||||
return this.callback.called.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('for many small updates', function () {
|
||||
beforeEach(function () {
|
||||
this.newUpdates = __range__(0, 2048, true).map(i => ({
|
||||
op: `op-${i}`,
|
||||
meta: `meta-${i}`,
|
||||
v: i,
|
||||
}))
|
||||
return this.PackManager.insertCompressedUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.lastUpdate,
|
||||
this.newUpdates,
|
||||
false,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should append the initial updates to the existing pack', function () {
|
||||
return this.PackManager.appendUpdatesToExistingPack
|
||||
.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.lastUpdate,
|
||||
this.newUpdates.slice(0, 510),
|
||||
false
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should insert the first set remaining updates as a new pack', function () {
|
||||
return this.PackManager.insertUpdatesIntoNewPack
|
||||
.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newUpdates.slice(510, 1022),
|
||||
false
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should insert the second set of remaining updates as a new pack', function () {
|
||||
return this.PackManager.insertUpdatesIntoNewPack
|
||||
.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newUpdates.slice(1022, 1534),
|
||||
false
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should insert the third set of remaining updates as a new pack', function () {
|
||||
return this.PackManager.insertUpdatesIntoNewPack
|
||||
.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newUpdates.slice(1534, 2046),
|
||||
false
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should insert the final set of remaining updates as a new pack', function () {
|
||||
return this.PackManager.insertUpdatesIntoNewPack
|
||||
.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newUpdates.slice(2046, 2049),
|
||||
false
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should call the callback', function () {
|
||||
return this.callback.called.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('for many big updates', function () {
|
||||
beforeEach(function () {
|
||||
const longString = __range__(
|
||||
0,
|
||||
0.75 * this.PackManager.MAX_SIZE,
|
||||
true
|
||||
)
|
||||
.map(j => 'a')
|
||||
.join('')
|
||||
this.newUpdates = [0, 1, 2, 3, 4].map(i => ({
|
||||
op: `op-${i}-${longString}`,
|
||||
meta: `meta-${i}`,
|
||||
v: i,
|
||||
}))
|
||||
return this.PackManager.insertCompressedUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.lastUpdate,
|
||||
this.newUpdates,
|
||||
false,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should append the initial updates to the existing pack', function () {
|
||||
return this.PackManager.appendUpdatesToExistingPack
|
||||
.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.lastUpdate,
|
||||
this.newUpdates.slice(0, 1),
|
||||
false
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should insert the first set remaining updates as a new pack', function () {
|
||||
return this.PackManager.insertUpdatesIntoNewPack
|
||||
.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newUpdates.slice(1, 2),
|
||||
false
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should insert the second set of remaining updates as a new pack', function () {
|
||||
return this.PackManager.insertUpdatesIntoNewPack
|
||||
.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newUpdates.slice(2, 3),
|
||||
false
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should insert the third set of remaining updates as a new pack', function () {
|
||||
return this.PackManager.insertUpdatesIntoNewPack
|
||||
.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newUpdates.slice(3, 4),
|
||||
false
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should insert the final set of remaining updates as a new pack', function () {
|
||||
return this.PackManager.insertUpdatesIntoNewPack
|
||||
.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newUpdates.slice(4, 5),
|
||||
false
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should call the callback', function () {
|
||||
return this.callback.called.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('flushCompressedUpdates', function () {
|
||||
return describe('when there is no previous update', function () {
|
||||
beforeEach(function () {
|
||||
return this.PackManager.flushCompressedUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
null,
|
||||
this.newUpdates,
|
||||
true,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
return describe('for a small update that will expire', function () {
|
||||
it('should insert the update into mongo', function () {
|
||||
return this.db.docHistory.insertOne
|
||||
.calledWithMatch({
|
||||
pack: this.newUpdates,
|
||||
project_id: ObjectId(this.project_id),
|
||||
doc_id: ObjectId(this.doc_id),
|
||||
n: this.newUpdates.length,
|
||||
v: this.newUpdates[0].v,
|
||||
v_end: this.newUpdates[this.newUpdates.length - 1].v,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should set an expiry time in the future', function () {
|
||||
return this.db.docHistory.insertOne
|
||||
.calledWithMatch({
|
||||
expiresAt: new Date(Date.now() + 7 * 24 * 3600 * 1000),
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should call the callback', function () {
|
||||
return this.callback.called.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when there is a recent previous update in mongo that expires', function () {
|
||||
beforeEach(function () {
|
||||
this.lastUpdate = {
|
||||
_id: '12345',
|
||||
pack: [
|
||||
{ op: 'op-1', meta: 'meta-1', v: 1 },
|
||||
{ op: 'op-2', meta: 'meta-2', v: 2 },
|
||||
],
|
||||
n: 2,
|
||||
sz: 100,
|
||||
meta: { start_ts: Date.now() - 6 * 3600 * 1000 },
|
||||
expiresAt: new Date(Date.now()),
|
||||
}
|
||||
|
||||
return this.PackManager.flushCompressedUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.lastUpdate,
|
||||
this.newUpdates,
|
||||
true,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
return describe('for a small update that will expire', function () {
|
||||
it('should append the update in mongo', function () {
|
||||
return this.db.docHistory.updateOne
|
||||
.calledWithMatch(
|
||||
{ _id: this.lastUpdate._id },
|
||||
{
|
||||
$push: { pack: { $each: this.newUpdates } },
|
||||
$set: { v_end: this.newUpdates[this.newUpdates.length - 1].v },
|
||||
}
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should set an expiry time in the future', function () {
|
||||
return this.db.docHistory.updateOne
|
||||
.calledWithMatch(sinon.match.any, {
|
||||
$set: { expiresAt: new Date(Date.now() + 7 * 24 * 3600 * 1000) },
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should call the callback', function () {
|
||||
return this.callback.called.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when there is a recent previous update in mongo that expires', function () {
|
||||
beforeEach(function () {
|
||||
this.PackManager.updateIndex = sinon.stub().callsArg(2)
|
||||
|
||||
this.lastUpdate = {
|
||||
_id: '12345',
|
||||
pack: [
|
||||
{ op: 'op-1', meta: 'meta-1', v: 1 },
|
||||
{ op: 'op-2', meta: 'meta-2', v: 2 },
|
||||
],
|
||||
n: 2,
|
||||
sz: 100,
|
||||
meta: { start_ts: Date.now() - 6 * 3600 * 1000 },
|
||||
expiresAt: new Date(Date.now()),
|
||||
}
|
||||
|
||||
return this.PackManager.flushCompressedUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.lastUpdate,
|
||||
this.newUpdates,
|
||||
false,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
return describe('for a small update that will not expire', function () {
|
||||
it('should insert the update into mongo', function () {
|
||||
return this.db.docHistory.insertOne
|
||||
.calledWithMatch({
|
||||
pack: this.newUpdates,
|
||||
project_id: ObjectId(this.project_id),
|
||||
doc_id: ObjectId(this.doc_id),
|
||||
n: this.newUpdates.length,
|
||||
v: this.newUpdates[0].v,
|
||||
v_end: this.newUpdates[this.newUpdates.length - 1].v,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should not set any expiry time', function () {
|
||||
return this.db.docHistory.insertOne
|
||||
.neverCalledWithMatch(sinon.match.has('expiresAt'))
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should call the callback', function () {
|
||||
return this.callback.called.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when there is an old previous update in mongo', function () {
|
||||
beforeEach(function () {
|
||||
this.lastUpdate = {
|
||||
_id: '12345',
|
||||
pack: [
|
||||
{ op: 'op-1', meta: 'meta-1', v: 1 },
|
||||
{ op: 'op-2', meta: 'meta-2', v: 2 },
|
||||
],
|
||||
n: 2,
|
||||
sz: 100,
|
||||
meta: { start_ts: Date.now() - 30 * 24 * 3600 * 1000 },
|
||||
expiresAt: new Date(Date.now() - 30 * 24 * 3600 * 1000),
|
||||
}
|
||||
|
||||
return this.PackManager.flushCompressedUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.lastUpdate,
|
||||
this.newUpdates,
|
||||
true,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
return describe('for a small update that will expire', function () {
|
||||
it('should insert the update into mongo', function () {
|
||||
return this.db.docHistory.insertOne
|
||||
.calledWithMatch({
|
||||
pack: this.newUpdates,
|
||||
project_id: ObjectId(this.project_id),
|
||||
doc_id: ObjectId(this.doc_id),
|
||||
n: this.newUpdates.length,
|
||||
v: this.newUpdates[0].v,
|
||||
v_end: this.newUpdates[this.newUpdates.length - 1].v,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should set an expiry time in the future', function () {
|
||||
return this.db.docHistory.insertOne
|
||||
.calledWithMatch({
|
||||
expiresAt: new Date(Date.now() + 7 * 24 * 3600 * 1000),
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should call the callback', function () {
|
||||
return this.callback.called.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getOpsByVersionRange', function () {})
|
||||
|
||||
describe('loadPacksByVersionRange', function () {})
|
||||
|
||||
describe('fetchPacksIfNeeded', function () {})
|
||||
|
||||
describe('makeProjectIterator', function () {})
|
||||
|
||||
describe('getPackById', function () {})
|
||||
|
||||
describe('increaseTTL', function () {})
|
||||
|
||||
describe('getIndex', function () {})
|
||||
|
||||
describe('getPackFromIndex', function () {})
|
||||
// getLastPackFromIndex:
|
||||
// getIndexWithKeys
|
||||
// initialiseIndex
|
||||
// updateIndex
|
||||
// findCompletedPacks
|
||||
// findUnindexedPacks
|
||||
// insertPacksIntoIndexWithLock
|
||||
// _insertPacksIntoIndex
|
||||
// archivePack
|
||||
// checkArchivedPack
|
||||
// processOldPack
|
||||
// updateIndexIfNeeded
|
||||
// findUnarchivedPacks
|
||||
|
||||
return describe('checkArchiveNotInProgress', function () {
|
||||
describe('when an archive is in progress', function () {
|
||||
beforeEach(function () {
|
||||
this.db.docHistoryIndex = {
|
||||
findOne: sinon.stub().callsArgWith(2, null, { inS3: false }),
|
||||
}
|
||||
return this.PackManager.checkArchiveNotInProgress(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.pack_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
it('should call the callback', function () {
|
||||
return this.callback.called.should.equal(true)
|
||||
})
|
||||
return it('should return an error', function () {
|
||||
return this.callback
|
||||
.calledWith(sinon.match.has('message'))
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when an archive is completed', function () {
|
||||
beforeEach(function () {
|
||||
this.db.docHistoryIndex = {
|
||||
findOne: sinon.stub().callsArgWith(2, null, { inS3: true }),
|
||||
}
|
||||
return this.PackManager.checkArchiveNotInProgress(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.pack_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
it('should call the callback', function () {
|
||||
return this.callback.called.should.equal(true)
|
||||
})
|
||||
return it('should return an error', function () {
|
||||
return this.callback
|
||||
.calledWith(sinon.match.has('message'))
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when the archive has not started or completed', function () {
|
||||
beforeEach(function () {
|
||||
this.db.docHistoryIndex = {
|
||||
findOne: sinon.stub().callsArgWith(2, null, {}),
|
||||
}
|
||||
return this.PackManager.checkArchiveNotInProgress(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.pack_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
it('should call the callback with no error', function () {
|
||||
return this.callback.called.should.equal(true)
|
||||
})
|
||||
return it('should return with no error', function () {
|
||||
return (typeof this.callback.lastCall.args[0]).should.equal('undefined')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// describe "setTTLOnArchivedPack", ->
|
||||
// beforeEach ->
|
||||
// @pack_id = "somepackid"
|
||||
// @onedayinms = 86400000
|
||||
// @db.docHistory =
|
||||
// findAndModify : sinon.stub().callsArgWith(1)
|
||||
|
||||
// it "should set expires to 1 day", (done)->
|
||||
// #@PackManager._getOneDayInFutureWithRandomDelay = sinon.stub().returns(@onedayinms)
|
||||
// @PackManager.setTTLOnArchivedPack @project_id, @doc_id, @pack_id, =>
|
||||
// args = @db.docHistory.findAndModify.args[0][0]
|
||||
// args.query._id.should.equal @pack_id
|
||||
// args.update['$set'].expiresAt.should.equal @onedayinms
|
||||
// done()
|
||||
|
||||
// describe "_getOneDayInFutureWithRandomDelay", ->
|
||||
// beforeEach ->
|
||||
// @onedayinms = 86400000
|
||||
// @thirtyMins = 1000 * 60 * 30
|
||||
|
||||
// it "should give 1 day + 30 mins random time", (done)->
|
||||
// loops = 10000
|
||||
// while --loops > 0
|
||||
// randomDelay = @PackManager._getOneDayInFutureWithRandomDelay() - new Date(Date.now() + @onedayinms)
|
||||
// randomDelay.should.be.above(0)
|
||||
// randomDelay.should.be.below(@thirtyMins + 1)
|
||||
// done()
|
||||
|
||||
function __range__(left, right, inclusive) {
|
||||
const range = []
|
||||
const ascending = left < right
|
||||
const end = !inclusive ? right : ascending ? right + 1 : right - 1
|
||||
for (let i = left; ascending ? i < end : i > end; ascending ? i++ : i--) {
|
||||
range.push(i)
|
||||
}
|
||||
return range
|
||||
}
|
|
@ -0,0 +1,164 @@
|
|||
/* eslint-disable
|
||||
camelcase,
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const { expect } = require('chai')
|
||||
const modulePath = '../../../../app/js/RedisManager.js'
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
|
||||
describe('RedisManager', function () {
|
||||
beforeEach(function () {
|
||||
this.RedisManager = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'@overleaf/redis-wrapper': {
|
||||
createClient: () => {
|
||||
return (this.rclient = {
|
||||
auth: sinon.stub(),
|
||||
multi: () => this.rclient,
|
||||
})
|
||||
},
|
||||
},
|
||||
'@overleaf/settings': {
|
||||
redis: {
|
||||
history: {
|
||||
key_schema: {
|
||||
uncompressedHistoryOps({ doc_id }) {
|
||||
return `UncompressedHistoryOps:${doc_id}`
|
||||
},
|
||||
docsWithHistoryOps({ project_id }) {
|
||||
return `DocsWithHistoryOps:${project_id}`
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
this.doc_id = 'doc-id-123'
|
||||
this.project_id = 'project-id-123'
|
||||
this.batchSize = 100
|
||||
return (this.callback = sinon.stub())
|
||||
})
|
||||
|
||||
describe('getOldestDocUpdates', function () {
|
||||
beforeEach(function () {
|
||||
this.rawUpdates = [
|
||||
{ v: 42, op: 'mock-op-42' },
|
||||
{ v: 45, op: 'mock-op-45' },
|
||||
]
|
||||
this.jsonUpdates = Array.from(this.rawUpdates).map(update =>
|
||||
JSON.stringify(update)
|
||||
)
|
||||
this.rclient.lrange = sinon.stub().callsArgWith(3, null, this.jsonUpdates)
|
||||
return this.RedisManager.getOldestDocUpdates(
|
||||
this.doc_id,
|
||||
this.batchSize,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should read the updates from redis', function () {
|
||||
return this.rclient.lrange
|
||||
.calledWith(
|
||||
`UncompressedHistoryOps:${this.doc_id}`,
|
||||
0,
|
||||
this.batchSize - 1
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should call the callback with the unparsed ops', function () {
|
||||
return this.callback.calledWith(null, this.jsonUpdates).should.equal(true)
|
||||
})
|
||||
|
||||
describe('expandDocUpdates', function () {
|
||||
beforeEach(function () {
|
||||
return this.RedisManager.expandDocUpdates(
|
||||
this.jsonUpdates,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
return it('should call the callback with the parsed ops', function () {
|
||||
return this.callback
|
||||
.calledWith(null, this.rawUpdates)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('deleteAppliedDocUpdates', function () {
|
||||
beforeEach(function () {
|
||||
this.rclient.lrem = sinon.stub()
|
||||
this.rclient.srem = sinon.stub()
|
||||
this.rclient.exec = sinon.stub().callsArgWith(0)
|
||||
return this.RedisManager.deleteAppliedDocUpdates(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.jsonUpdates,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should delete the first update from redis', function () {
|
||||
return this.rclient.lrem
|
||||
.calledWith(
|
||||
`UncompressedHistoryOps:${this.doc_id}`,
|
||||
1,
|
||||
this.jsonUpdates[0]
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should delete the second update from redis', function () {
|
||||
return this.rclient.lrem
|
||||
.calledWith(
|
||||
`UncompressedHistoryOps:${this.doc_id}`,
|
||||
1,
|
||||
this.jsonUpdates[1]
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should delete the doc from the set of docs with history ops', function () {
|
||||
return this.rclient.srem
|
||||
.calledWith(`DocsWithHistoryOps:${this.project_id}`, this.doc_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should call the callback ', function () {
|
||||
return this.callback.called.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
return describe('getDocIdsWithHistoryOps', function () {
|
||||
beforeEach(function () {
|
||||
this.doc_ids = ['mock-id-1', 'mock-id-2']
|
||||
this.rclient.smembers = sinon.stub().callsArgWith(1, null, this.doc_ids)
|
||||
return this.RedisManager.getDocIdsWithHistoryOps(
|
||||
this.project_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should read the doc_ids from redis', function () {
|
||||
return this.rclient.smembers
|
||||
.calledWith(`DocsWithHistoryOps:${this.project_id}`)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should call the callback with the doc_ids', function () {
|
||||
return this.callback.calledWith(null, this.doc_ids).should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,64 @@
|
|||
/* eslint-disable
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const { expect } = require('chai')
|
||||
const modulePath = '../../../../app/js/RestoreManager.js'
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
|
||||
describe('RestoreManager', function () {
|
||||
beforeEach(function () {
|
||||
this.RestoreManager = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'./DocumentUpdaterManager': (this.DocumentUpdaterManager = {}),
|
||||
'./DiffManager': (this.DiffManager = {}),
|
||||
},
|
||||
})
|
||||
this.callback = sinon.stub()
|
||||
this.project_id = 'mock-project-id'
|
||||
this.doc_id = 'mock-doc-id'
|
||||
this.user_id = 'mock-user-id'
|
||||
return (this.version = 42)
|
||||
})
|
||||
|
||||
return describe('restoreToBeforeVersion', function () {
|
||||
beforeEach(function () {
|
||||
this.content = 'mock content'
|
||||
this.DocumentUpdaterManager.setDocument = sinon.stub().callsArg(4)
|
||||
this.DiffManager.getDocumentBeforeVersion = sinon
|
||||
.stub()
|
||||
.callsArgWith(3, null, this.content)
|
||||
return this.RestoreManager.restoreToBeforeVersion(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.version,
|
||||
this.user_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the content before the requested version', function () {
|
||||
return this.DiffManager.getDocumentBeforeVersion
|
||||
.calledWith(this.project_id, this.doc_id, this.version)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should set the document in the document updater', function () {
|
||||
return this.DocumentUpdaterManager.setDocument
|
||||
.calledWith(this.project_id, this.doc_id, this.content, this.user_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should call the callback', function () {
|
||||
return this.callback.called.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,848 @@
|
|||
/* eslint-disable
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const { expect } = require('chai')
|
||||
const modulePath = '../../../../app/js/UpdateCompressor.js'
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
|
||||
const bigstring = __range__(0, 2 * 1024 * 1024, true)
|
||||
.map(i => 'a')
|
||||
.join('')
|
||||
const mediumstring = __range__(0, 1024 * 1024, true)
|
||||
.map(j => 'a')
|
||||
.join('')
|
||||
|
||||
describe('UpdateCompressor', function () {
|
||||
beforeEach(function () {
|
||||
this.UpdateCompressor = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'../lib/diff_match_patch': require('../../../../app/lib/diff_match_patch'),
|
||||
},
|
||||
})
|
||||
this.user_id = 'user-id-1'
|
||||
this.other_user_id = 'user-id-2'
|
||||
this.ts1 = Date.now()
|
||||
return (this.ts2 = Date.now() + 1000)
|
||||
})
|
||||
|
||||
describe('convertToSingleOpUpdates', function () {
|
||||
it('should split grouped updates into individual updates', function () {
|
||||
return expect(
|
||||
this.UpdateCompressor.convertToSingleOpUpdates([
|
||||
{
|
||||
op: [
|
||||
(this.op1 = { p: 0, i: 'Foo' }),
|
||||
(this.op2 = { p: 6, i: 'bar' }),
|
||||
],
|
||||
meta: { ts: this.ts1, user_id: this.user_id },
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: [(this.op3 = { p: 10, i: 'baz' })],
|
||||
meta: { ts: this.ts2, user_id: this.other_user_id },
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
).to.deep.equal([
|
||||
{
|
||||
op: this.op1,
|
||||
meta: { start_ts: this.ts1, end_ts: this.ts1, user_id: this.user_id },
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: this.op2,
|
||||
meta: { start_ts: this.ts1, end_ts: this.ts1, user_id: this.user_id },
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: this.op3,
|
||||
meta: {
|
||||
start_ts: this.ts2,
|
||||
end_ts: this.ts2,
|
||||
user_id: this.other_user_id,
|
||||
},
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should return no-op updates when the op list is empty', function () {
|
||||
return expect(
|
||||
this.UpdateCompressor.convertToSingleOpUpdates([
|
||||
{
|
||||
op: [],
|
||||
meta: { ts: this.ts1, user_id: this.user_id },
|
||||
v: 42,
|
||||
},
|
||||
])
|
||||
).to.deep.equal([
|
||||
{
|
||||
op: this.UpdateCompressor.NOOP,
|
||||
meta: { start_ts: this.ts1, end_ts: this.ts1, user_id: this.user_id },
|
||||
v: 42,
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
return it('should ignore comment ops', function () {
|
||||
return expect(
|
||||
this.UpdateCompressor.convertToSingleOpUpdates([
|
||||
{
|
||||
op: [
|
||||
(this.op1 = { p: 0, i: 'Foo' }),
|
||||
(this.op2 = { p: 9, c: 'baz' }),
|
||||
(this.op3 = { p: 6, i: 'bar' }),
|
||||
],
|
||||
meta: { ts: this.ts1, user_id: this.user_id },
|
||||
v: 42,
|
||||
},
|
||||
])
|
||||
).to.deep.equal([
|
||||
{
|
||||
op: this.op1,
|
||||
meta: { start_ts: this.ts1, end_ts: this.ts1, user_id: this.user_id },
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: this.op3,
|
||||
meta: { start_ts: this.ts1, end_ts: this.ts1, user_id: this.user_id },
|
||||
v: 42,
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe('concatUpdatesWithSameVersion', function () {
|
||||
it('should concat updates with the same version', function () {
|
||||
return expect(
|
||||
this.UpdateCompressor.concatUpdatesWithSameVersion([
|
||||
{
|
||||
op: (this.op1 = { p: 0, i: 'Foo' }),
|
||||
meta: {
|
||||
start_ts: this.ts1,
|
||||
end_ts: this.ts1,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: (this.op2 = { p: 6, i: 'bar' }),
|
||||
meta: {
|
||||
start_ts: this.ts1,
|
||||
end_ts: this.ts1,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: (this.op3 = { p: 10, i: 'baz' }),
|
||||
meta: {
|
||||
start_ts: this.ts2,
|
||||
end_ts: this.ts2,
|
||||
user_id: this.other_user_id,
|
||||
},
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
).to.deep.equal([
|
||||
{
|
||||
op: [this.op1, this.op2],
|
||||
meta: { start_ts: this.ts1, end_ts: this.ts1, user_id: this.user_id },
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: [this.op3],
|
||||
meta: {
|
||||
start_ts: this.ts2,
|
||||
end_ts: this.ts2,
|
||||
user_id: this.other_user_id,
|
||||
},
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
return it('should turn a noop into an empty op', function () {
|
||||
return expect(
|
||||
this.UpdateCompressor.concatUpdatesWithSameVersion([
|
||||
{
|
||||
op: this.UpdateCompressor.NOOP,
|
||||
meta: {
|
||||
start_ts: this.ts1,
|
||||
end_ts: this.ts1,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 42,
|
||||
},
|
||||
])
|
||||
).to.deep.equal([
|
||||
{
|
||||
op: [],
|
||||
meta: { start_ts: this.ts1, end_ts: this.ts1, user_id: this.user_id },
|
||||
v: 42,
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe('compress', function () {
|
||||
describe('insert - insert', function () {
|
||||
it('should append one insert to the other', function () {
|
||||
return expect(
|
||||
this.UpdateCompressor.compressUpdates([
|
||||
{
|
||||
op: { p: 3, i: 'foo' },
|
||||
meta: { ts: this.ts1, user_id: this.user_id },
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: { p: 6, i: 'bar' },
|
||||
meta: { ts: this.ts2, user_id: this.user_id },
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
).to.deep.equal([
|
||||
{
|
||||
op: { p: 3, i: 'foobar' },
|
||||
meta: {
|
||||
start_ts: this.ts1,
|
||||
end_ts: this.ts2,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should insert one insert inside the other', function () {
|
||||
return expect(
|
||||
this.UpdateCompressor.compressUpdates([
|
||||
{
|
||||
op: { p: 3, i: 'foo' },
|
||||
meta: { ts: this.ts1, user_id: this.user_id },
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: { p: 5, i: 'bar' },
|
||||
meta: { ts: this.ts2, user_id: this.user_id },
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
).to.deep.equal([
|
||||
{
|
||||
op: { p: 3, i: 'fobaro' },
|
||||
meta: {
|
||||
start_ts: this.ts1,
|
||||
end_ts: this.ts2,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should not append separated inserts', function () {
|
||||
return expect(
|
||||
this.UpdateCompressor.compressUpdates([
|
||||
{
|
||||
op: { p: 3, i: 'foo' },
|
||||
meta: { ts: this.ts1, user_id: this.user_id },
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: { p: 9, i: 'bar' },
|
||||
meta: { ts: this.ts2, user_id: this.user_id },
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
).to.deep.equal([
|
||||
{
|
||||
op: { p: 3, i: 'foo' },
|
||||
meta: {
|
||||
start_ts: this.ts1,
|
||||
end_ts: this.ts1,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: { p: 9, i: 'bar' },
|
||||
meta: {
|
||||
start_ts: this.ts2,
|
||||
end_ts: this.ts2,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should not append inserts that are too big (second op)', function () {
|
||||
return expect(
|
||||
this.UpdateCompressor.compressUpdates([
|
||||
{
|
||||
op: { p: 3, i: 'foo' },
|
||||
meta: { ts: this.ts1, user_id: this.user_id },
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: { p: 6, i: bigstring },
|
||||
meta: { ts: this.ts2, user_id: this.user_id },
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
).to.deep.equal([
|
||||
{
|
||||
op: { p: 3, i: 'foo' },
|
||||
meta: {
|
||||
start_ts: this.ts1,
|
||||
end_ts: this.ts1,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: { p: 6, i: bigstring },
|
||||
meta: {
|
||||
start_ts: this.ts2,
|
||||
end_ts: this.ts2,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should not append inserts that are too big (first op)', function () {
|
||||
return expect(
|
||||
this.UpdateCompressor.compressUpdates([
|
||||
{
|
||||
op: { p: 3, i: bigstring },
|
||||
meta: { ts: this.ts1, user_id: this.user_id },
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: { p: 3 + bigstring.length, i: 'bar' },
|
||||
meta: { ts: this.ts2, user_id: this.user_id },
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
).to.deep.equal([
|
||||
{
|
||||
op: { p: 3, i: bigstring },
|
||||
meta: {
|
||||
start_ts: this.ts1,
|
||||
end_ts: this.ts1,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: { p: 3 + bigstring.length, i: 'bar' },
|
||||
meta: {
|
||||
start_ts: this.ts2,
|
||||
end_ts: this.ts2,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
return it('should not append inserts that are too big (first and second op)', function () {
|
||||
return expect(
|
||||
this.UpdateCompressor.compressUpdates([
|
||||
{
|
||||
op: { p: 3, i: mediumstring },
|
||||
meta: { ts: this.ts1, user_id: this.user_id },
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: { p: 3 + mediumstring.length, i: mediumstring },
|
||||
meta: { ts: this.ts2, user_id: this.user_id },
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
).to.deep.equal([
|
||||
{
|
||||
op: { p: 3, i: mediumstring },
|
||||
meta: {
|
||||
start_ts: this.ts1,
|
||||
end_ts: this.ts1,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: { p: 3 + mediumstring.length, i: mediumstring },
|
||||
meta: {
|
||||
start_ts: this.ts2,
|
||||
end_ts: this.ts2,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe('delete - delete', function () {
|
||||
it('should append one delete to the other', function () {
|
||||
return expect(
|
||||
this.UpdateCompressor.compressUpdates([
|
||||
{
|
||||
op: { p: 3, d: 'foo' },
|
||||
meta: { ts: this.ts1, user_id: this.user_id },
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: { p: 3, d: 'bar' },
|
||||
meta: { ts: this.ts2, user_id: this.user_id },
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
).to.deep.equal([
|
||||
{
|
||||
op: { p: 3, d: 'foobar' },
|
||||
meta: {
|
||||
start_ts: this.ts1,
|
||||
end_ts: this.ts2,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should insert one delete inside the other', function () {
|
||||
return expect(
|
||||
this.UpdateCompressor.compressUpdates([
|
||||
{
|
||||
op: { p: 3, d: 'foo' },
|
||||
meta: { ts: this.ts1, user_id: this.user_id },
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: { p: 1, d: 'bar' },
|
||||
meta: { ts: this.ts2, user_id: this.user_id },
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
).to.deep.equal([
|
||||
{
|
||||
op: { p: 1, d: 'bafoor' },
|
||||
meta: {
|
||||
start_ts: this.ts1,
|
||||
end_ts: this.ts2,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
return it('should not append separated deletes', function () {
|
||||
return expect(
|
||||
this.UpdateCompressor.compressUpdates([
|
||||
{
|
||||
op: { p: 3, d: 'foo' },
|
||||
meta: { ts: this.ts1, user_id: this.user_id },
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: { p: 9, d: 'bar' },
|
||||
meta: { ts: this.ts2, user_id: this.user_id },
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
).to.deep.equal([
|
||||
{
|
||||
op: { p: 3, d: 'foo' },
|
||||
meta: {
|
||||
start_ts: this.ts1,
|
||||
end_ts: this.ts1,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: { p: 9, d: 'bar' },
|
||||
meta: {
|
||||
start_ts: this.ts2,
|
||||
end_ts: this.ts2,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe('insert - delete', function () {
|
||||
it('should undo a previous insert', function () {
|
||||
return expect(
|
||||
this.UpdateCompressor.compressUpdates([
|
||||
{
|
||||
op: { p: 3, i: 'foo' },
|
||||
meta: { ts: this.ts1, user_id: this.user_id },
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: { p: 5, d: 'o' },
|
||||
meta: { ts: this.ts2, user_id: this.user_id },
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
).to.deep.equal([
|
||||
{
|
||||
op: { p: 3, i: 'fo' },
|
||||
meta: {
|
||||
start_ts: this.ts1,
|
||||
end_ts: this.ts2,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should remove part of an insert from the middle', function () {
|
||||
return expect(
|
||||
this.UpdateCompressor.compressUpdates([
|
||||
{
|
||||
op: { p: 3, i: 'fobaro' },
|
||||
meta: { ts: this.ts1, user_id: this.user_id },
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: { p: 5, d: 'bar' },
|
||||
meta: { ts: this.ts2, user_id: this.user_id },
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
).to.deep.equal([
|
||||
{
|
||||
op: { p: 3, i: 'foo' },
|
||||
meta: {
|
||||
start_ts: this.ts1,
|
||||
end_ts: this.ts2,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should cancel out two opposite updates', function () {
|
||||
return expect(
|
||||
this.UpdateCompressor.compressUpdates([
|
||||
{
|
||||
op: { p: 3, i: 'foo' },
|
||||
meta: { ts: this.ts1, user_id: this.user_id },
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: { p: 3, d: 'foo' },
|
||||
meta: { ts: this.ts2, user_id: this.user_id },
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
).to.deep.equal([
|
||||
{
|
||||
op: { p: 3, i: '' },
|
||||
meta: {
|
||||
start_ts: this.ts1,
|
||||
end_ts: this.ts2,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it('should not combine separated updates', function () {
|
||||
return expect(
|
||||
this.UpdateCompressor.compressUpdates([
|
||||
{
|
||||
op: { p: 3, i: 'foo' },
|
||||
meta: { ts: this.ts1, user_id: this.user_id },
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: { p: 9, d: 'bar' },
|
||||
meta: { ts: this.ts2, user_id: this.user_id },
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
).to.deep.equal([
|
||||
{
|
||||
op: { p: 3, i: 'foo' },
|
||||
meta: {
|
||||
start_ts: this.ts1,
|
||||
end_ts: this.ts1,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: { p: 9, d: 'bar' },
|
||||
meta: {
|
||||
start_ts: this.ts2,
|
||||
end_ts: this.ts2,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
return it('should not combine updates with overlap beyond the end', function () {
|
||||
return expect(
|
||||
this.UpdateCompressor.compressUpdates([
|
||||
{
|
||||
op: { p: 3, i: 'foobar' },
|
||||
meta: { ts: this.ts1, user_id: this.user_id },
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: { p: 6, d: 'bardle' },
|
||||
meta: { ts: this.ts2, user_id: this.user_id },
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
).to.deep.equal([
|
||||
{
|
||||
op: { p: 3, i: 'foobar' },
|
||||
meta: {
|
||||
start_ts: this.ts1,
|
||||
end_ts: this.ts1,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: { p: 6, d: 'bardle' },
|
||||
meta: {
|
||||
start_ts: this.ts2,
|
||||
end_ts: this.ts2,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe('delete - insert', function () {
|
||||
it('should do a diff of the content', function () {
|
||||
return expect(
|
||||
this.UpdateCompressor.compressUpdates([
|
||||
{
|
||||
op: { p: 3, d: 'one two three four five six seven eight' },
|
||||
meta: { ts: this.ts1, user_id: this.user_id },
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: { p: 3, i: 'one 2 three four five six seven eight' },
|
||||
meta: { ts: this.ts2, user_id: this.user_id },
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
).to.deep.equal([
|
||||
{
|
||||
op: { p: 7, d: 'two' },
|
||||
meta: {
|
||||
start_ts: this.ts1,
|
||||
end_ts: this.ts2,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 43,
|
||||
},
|
||||
{
|
||||
op: { p: 7, i: '2' },
|
||||
meta: {
|
||||
start_ts: this.ts1,
|
||||
end_ts: this.ts2,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
return it('should return a no-op if the delete and insert are the same', function () {
|
||||
return expect(
|
||||
this.UpdateCompressor.compressUpdates([
|
||||
{
|
||||
op: { p: 3, d: 'one two three four five six seven eight' },
|
||||
meta: { ts: this.ts1, user_id: this.user_id },
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: { p: 3, i: 'one two three four five six seven eight' },
|
||||
meta: { ts: this.ts2, user_id: this.user_id },
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
).to.deep.equal([
|
||||
{
|
||||
op: { p: 3, i: '' },
|
||||
meta: {
|
||||
start_ts: this.ts1,
|
||||
end_ts: this.ts2,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe('noop - insert', function () {
|
||||
return it('should leave them untouched', function () {
|
||||
return expect(
|
||||
this.UpdateCompressor.compressUpdates([
|
||||
{
|
||||
op: this.UpdateCompressor.NOOP,
|
||||
meta: { ts: this.ts1, user_id: this.user_id },
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: { p: 6, i: 'bar' },
|
||||
meta: { ts: this.ts1, user_id: this.user_id },
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
).to.deep.equal([
|
||||
{
|
||||
op: this.UpdateCompressor.NOOP,
|
||||
meta: {
|
||||
start_ts: this.ts1,
|
||||
end_ts: this.ts1,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: { p: 6, i: 'bar' },
|
||||
meta: {
|
||||
start_ts: this.ts1,
|
||||
end_ts: this.ts1,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
return describe('noop - delete', function () {
|
||||
return it('should leave them untouched', function () {
|
||||
return expect(
|
||||
this.UpdateCompressor.compressUpdates([
|
||||
{
|
||||
op: this.UpdateCompressor.NOOP,
|
||||
meta: { ts: this.ts1, user_id: this.user_id },
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: { p: 6, d: 'bar' },
|
||||
meta: { ts: this.ts1, user_id: this.user_id },
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
).to.deep.equal([
|
||||
{
|
||||
op: this.UpdateCompressor.NOOP,
|
||||
meta: {
|
||||
start_ts: this.ts1,
|
||||
end_ts: this.ts1,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: { p: 6, d: 'bar' },
|
||||
meta: {
|
||||
start_ts: this.ts1,
|
||||
end_ts: this.ts1,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
return describe('compressRawUpdates', function () {
|
||||
return describe('merging in-place with an array op', function () {
|
||||
return it('should not change the existing last updates', function () {
|
||||
return expect(
|
||||
this.UpdateCompressor.compressRawUpdates(
|
||||
{
|
||||
op: [
|
||||
{ p: 1000, d: 'hello' },
|
||||
{ p: 1000, i: 'HELLO()' },
|
||||
],
|
||||
meta: {
|
||||
start_ts: this.ts1,
|
||||
end_ts: this.ts1,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 42,
|
||||
},
|
||||
[
|
||||
{
|
||||
op: [{ p: 1006, i: 'WORLD' }],
|
||||
meta: { ts: this.ts2, user_id: this.user_id },
|
||||
v: 43,
|
||||
},
|
||||
]
|
||||
)
|
||||
).to.deep.equal([
|
||||
{
|
||||
op: [
|
||||
{ p: 1000, d: 'hello' },
|
||||
{ p: 1000, i: 'HELLO()' },
|
||||
],
|
||||
meta: {
|
||||
start_ts: this.ts1,
|
||||
end_ts: this.ts1,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 42,
|
||||
},
|
||||
{
|
||||
op: [{ p: 1006, i: 'WORLD' }],
|
||||
meta: {
|
||||
start_ts: this.ts2,
|
||||
end_ts: this.ts2,
|
||||
user_id: this.user_id,
|
||||
},
|
||||
v: 43,
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
function __range__(left, right, inclusive) {
|
||||
const range = []
|
||||
const ascending = left < right
|
||||
const end = !inclusive ? right : ascending ? right + 1 : right - 1
|
||||
for (let i = left; ascending ? i < end : i > end; ascending ? i++ : i--) {
|
||||
range.push(i)
|
||||
}
|
||||
return range
|
||||
}
|
|
@ -0,0 +1,182 @@
|
|||
/* eslint-disable
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const { expect } = require('chai')
|
||||
const modulePath = '../../../../app/js/UpdateTrimmer.js'
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const tk = require('timekeeper')
|
||||
|
||||
describe('UpdateTrimmer', function () {
|
||||
beforeEach(function () {
|
||||
this.now = new Date()
|
||||
tk.freeze(this.now)
|
||||
|
||||
this.UpdateTrimmer = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'./WebApiManager': (this.WebApiManager = {}),
|
||||
'./MongoManager': (this.MongoManager = {}),
|
||||
},
|
||||
})
|
||||
|
||||
this.callback = sinon.stub()
|
||||
return (this.project_id = 'mock-project-id')
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
return tk.reset()
|
||||
})
|
||||
|
||||
return describe('shouldTrimUpdates', function () {
|
||||
beforeEach(function () {
|
||||
this.metadata = {}
|
||||
this.details = { features: {} }
|
||||
this.MongoManager.getProjectMetaData = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null, this.metadata)
|
||||
this.MongoManager.setProjectMetaData = sinon.stub().callsArgWith(2)
|
||||
this.MongoManager.upgradeHistory = sinon.stub().callsArgWith(1)
|
||||
return (this.WebApiManager.getProjectDetails = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null, this.details))
|
||||
})
|
||||
|
||||
describe('with preserveHistory set in the project meta data', function () {
|
||||
beforeEach(function () {
|
||||
this.metadata.preserveHistory = true
|
||||
return this.UpdateTrimmer.shouldTrimUpdates(
|
||||
this.project_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should look up the meta data', function () {
|
||||
return this.MongoManager.getProjectMetaData
|
||||
.calledWith(this.project_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should not look up the project details', function () {
|
||||
return this.WebApiManager.getProjectDetails.called.should.equal(false)
|
||||
})
|
||||
|
||||
return it('should return false', function () {
|
||||
return this.callback.calledWith(null, false).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('without preserveHistory set in the project meta data', function () {
|
||||
beforeEach(function () {
|
||||
return (this.metadata.preserveHistory = false)
|
||||
})
|
||||
|
||||
describe('when the project has the versioning feature', function () {
|
||||
beforeEach(function () {
|
||||
this.details.features.versioning = true
|
||||
return this.UpdateTrimmer.shouldTrimUpdates(
|
||||
this.project_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should look up the meta data', function () {
|
||||
return this.MongoManager.getProjectMetaData
|
||||
.calledWith(this.project_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should look up the project details', function () {
|
||||
return this.WebApiManager.getProjectDetails
|
||||
.calledWith(this.project_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should insert preserveHistory into the metadata', function () {
|
||||
return this.MongoManager.setProjectMetaData
|
||||
.calledWith(this.project_id, { preserveHistory: true })
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should upgrade any existing history', function () {
|
||||
return this.MongoManager.upgradeHistory
|
||||
.calledWith(this.project_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should return false', function () {
|
||||
return this.callback.calledWith(null, false).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when the project does not have the versioning feature', function () {
|
||||
beforeEach(function () {
|
||||
this.details.features.versioning = false
|
||||
return this.UpdateTrimmer.shouldTrimUpdates(
|
||||
this.project_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
return it('should return true', function () {
|
||||
return this.callback.calledWith(null, true).should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
return describe('without any meta data', function () {
|
||||
beforeEach(function () {
|
||||
return (this.MongoManager.getProjectMetaData = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null, null))
|
||||
})
|
||||
|
||||
describe('when the project has the versioning feature', function () {
|
||||
beforeEach(function () {
|
||||
this.details.features.versioning = true
|
||||
return this.UpdateTrimmer.shouldTrimUpdates(
|
||||
this.project_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should insert preserveHistory into the metadata', function () {
|
||||
return this.MongoManager.setProjectMetaData
|
||||
.calledWith(this.project_id, { preserveHistory: true })
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should upgrade any existing history', function () {
|
||||
return this.MongoManager.upgradeHistory
|
||||
.calledWith(this.project_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should return false', function () {
|
||||
return this.callback.calledWith(null, false).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when the project does not have the versioning feature', function () {
|
||||
beforeEach(function () {
|
||||
this.details.features.versioning = false
|
||||
return this.UpdateTrimmer.shouldTrimUpdates(
|
||||
this.project_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
return it('should return true', function () {
|
||||
return this.callback.calledWith(null, true).should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,208 @@
|
|||
/* eslint-disable
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const { expect } = require('chai')
|
||||
const modulePath = '../../../../app/js/WebApiManager.js'
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
|
||||
describe('WebApiManager', function () {
|
||||
beforeEach(function () {
|
||||
this.WebApiManager = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
requestretry: (this.request = {}),
|
||||
'@overleaf/settings': (this.settings = {
|
||||
apis: {
|
||||
web: {
|
||||
url: 'http://example.com',
|
||||
user: 'sharelatex',
|
||||
pass: 'password',
|
||||
},
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
this.callback = sinon.stub()
|
||||
this.user_id = 'mock-user-id'
|
||||
this.project_id = 'mock-project-id'
|
||||
this.user_info = {
|
||||
email: 'leo@sharelatex.com',
|
||||
id: this.user_id,
|
||||
first_name: 'Leo',
|
||||
last_nane: 'Lion',
|
||||
extra_param: 'blah',
|
||||
}
|
||||
return (this.project = { features: 'mock-features' })
|
||||
})
|
||||
|
||||
describe('getUserInfo', function () {
|
||||
describe('successfully', function () {
|
||||
beforeEach(function () {
|
||||
this.body = JSON.stringify(this.user_info)
|
||||
this.request.get = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null, { statusCode: 200 }, this.body)
|
||||
return this.WebApiManager.getUserInfo(this.user_id, this.callback)
|
||||
})
|
||||
|
||||
it('should get the user from the web api', function () {
|
||||
return this.request.get
|
||||
.calledWithMatch({
|
||||
url: `${this.settings.apis.web.url}/user/${this.user_id}/personal_info`,
|
||||
auth: {
|
||||
user: this.settings.apis.web.user,
|
||||
pass: this.settings.apis.web.pass,
|
||||
sendImmediately: true,
|
||||
},
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should call the callback with only the email, id and names', function () {
|
||||
return this.callback
|
||||
.calledWith(null, {
|
||||
id: this.user_id,
|
||||
email: this.user_info.email,
|
||||
first_name: this.user_info.first_name,
|
||||
last_name: this.user_info.last_name,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the web API returns an error', function () {
|
||||
beforeEach(function () {
|
||||
this.request.get = sinon
|
||||
.stub()
|
||||
.callsArgWith(
|
||||
1,
|
||||
(this.error = new Error('something went wrong')),
|
||||
null,
|
||||
null
|
||||
)
|
||||
return this.WebApiManager.getUserInfo(this.user_id, this.callback)
|
||||
})
|
||||
|
||||
return it('should return an error to the callback', function () {
|
||||
return this.callback.calledWith(this.error).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the web returns a failure error code', function () {
|
||||
beforeEach(function () {
|
||||
this.request.get = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null, { statusCode: 500, attempts: 42 }, '')
|
||||
return this.WebApiManager.getUserInfo(this.user_id, this.callback)
|
||||
})
|
||||
|
||||
return it('should return the callback with an error', function () {
|
||||
return this.callback
|
||||
.calledWith(
|
||||
sinon.match.has(
|
||||
'message',
|
||||
'web returned a non-success status code: 500 (attempts: 42)'
|
||||
)
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when the user cannot be found', function () {
|
||||
beforeEach(function () {
|
||||
this.request.get = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null, { statusCode: 404 }, 'nothing')
|
||||
return this.WebApiManager.getUserInfo(this.user_id, this.callback)
|
||||
})
|
||||
|
||||
return it('should return a null value', function () {
|
||||
return this.callback.calledWith(null, null).should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
return describe('getProjectDetails', function () {
|
||||
describe('successfully', function () {
|
||||
beforeEach(function () {
|
||||
this.body = JSON.stringify(this.project)
|
||||
this.request.get = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null, { statusCode: 200 }, this.body)
|
||||
return this.WebApiManager.getProjectDetails(
|
||||
this.project_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the project from the web api', function () {
|
||||
return this.request.get
|
||||
.calledWithMatch({
|
||||
url: `${this.settings.apis.web.url}/project/${this.project_id}/details`,
|
||||
auth: {
|
||||
user: this.settings.apis.web.user,
|
||||
pass: this.settings.apis.web.pass,
|
||||
sendImmediately: true,
|
||||
},
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should call the callback with the project', function () {
|
||||
return this.callback.calledWith(null, this.project).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the web API returns an error', function () {
|
||||
beforeEach(function () {
|
||||
this.request.get = sinon
|
||||
.stub()
|
||||
.callsArgWith(
|
||||
1,
|
||||
(this.error = new Error('something went wrong')),
|
||||
null,
|
||||
null
|
||||
)
|
||||
return this.WebApiManager.getProjectDetails(
|
||||
this.project_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
return it('should return an error to the callback', function () {
|
||||
return this.callback.calledWith(this.error).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when the web returns a failure error code', function () {
|
||||
beforeEach(function () {
|
||||
this.request.get = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null, { statusCode: 500, attempts: 42 }, '')
|
||||
return this.WebApiManager.getProjectDetails(
|
||||
this.project_id,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
return it('should return the callback with an error', function () {
|
||||
return this.callback
|
||||
.calledWith(
|
||||
sinon.match.has(
|
||||
'message',
|
||||
'web returned a non-success status code: 500 (attempts: 42)'
|
||||
)
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
Loading…
Reference in a new issue