mirror of
https://github.com/overleaf/overleaf.git
synced 2024-11-21 20:47:08 -05:00
merge multiple repositories into an existing monorepo
- merged using: 'monorepo_add.sh services-docstore:services/docstore' - see https://github.com/shopsys/monorepo-tools
This commit is contained in:
commit
eef39ad009
48 changed files with 14377 additions and 0 deletions
7
services/docstore/.dockerignore
Normal file
7
services/docstore/.dockerignore
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
node_modules/*
|
||||||
|
gitrev
|
||||||
|
.git
|
||||||
|
.gitignore
|
||||||
|
.npm
|
||||||
|
.nvmrc
|
||||||
|
nodemon.json
|
86
services/docstore/.eslintrc
Normal file
86
services/docstore/.eslintrc
Normal file
|
@ -0,0 +1,86 @@
|
||||||
|
// this file was auto-generated, do not edit it directly.
|
||||||
|
// instead run bin/update_build_scripts from
|
||||||
|
// https://github.com/sharelatex/sharelatex-dev-environment
|
||||||
|
{
|
||||||
|
"extends": [
|
||||||
|
"eslint:recommended",
|
||||||
|
"standard",
|
||||||
|
"prettier"
|
||||||
|
],
|
||||||
|
"parserOptions": {
|
||||||
|
"ecmaVersion": 2018
|
||||||
|
},
|
||||||
|
"plugins": [
|
||||||
|
"mocha",
|
||||||
|
"chai-expect",
|
||||||
|
"chai-friendly"
|
||||||
|
],
|
||||||
|
"env": {
|
||||||
|
"node": true,
|
||||||
|
"mocha": true
|
||||||
|
},
|
||||||
|
"rules": {
|
||||||
|
// TODO(das7pad): remove overrides after fixing all the violations manually (https://github.com/overleaf/issues/issues/3882#issuecomment-878999671)
|
||||||
|
// START of temporary overrides
|
||||||
|
"array-callback-return": "off",
|
||||||
|
"no-dupe-else-if": "off",
|
||||||
|
"no-var": "off",
|
||||||
|
"no-empty": "off",
|
||||||
|
"node/handle-callback-err": "off",
|
||||||
|
"no-loss-of-precision": "off",
|
||||||
|
"node/no-callback-literal": "off",
|
||||||
|
"node/no-path-concat": "off",
|
||||||
|
"prefer-regex-literals": "off",
|
||||||
|
// END of temporary overrides
|
||||||
|
|
||||||
|
// Swap the no-unused-expressions rule with a more chai-friendly one
|
||||||
|
"no-unused-expressions": 0,
|
||||||
|
"chai-friendly/no-unused-expressions": "error",
|
||||||
|
|
||||||
|
// Do not allow importing of implicit dependencies.
|
||||||
|
"import/no-extraneous-dependencies": "error"
|
||||||
|
},
|
||||||
|
"overrides": [
|
||||||
|
{
|
||||||
|
// Test specific rules
|
||||||
|
"files": ["test/**/*.js"],
|
||||||
|
"globals": {
|
||||||
|
"expect": true
|
||||||
|
},
|
||||||
|
"rules": {
|
||||||
|
// mocha-specific rules
|
||||||
|
"mocha/handle-done-callback": "error",
|
||||||
|
"mocha/no-exclusive-tests": "error",
|
||||||
|
"mocha/no-global-tests": "error",
|
||||||
|
"mocha/no-identical-title": "error",
|
||||||
|
"mocha/no-nested-tests": "error",
|
||||||
|
"mocha/no-pending-tests": "error",
|
||||||
|
"mocha/no-skipped-tests": "error",
|
||||||
|
"mocha/no-mocha-arrows": "error",
|
||||||
|
|
||||||
|
// chai-specific rules
|
||||||
|
"chai-expect/missing-assertion": "error",
|
||||||
|
"chai-expect/terminating-properties": "error",
|
||||||
|
|
||||||
|
// prefer-arrow-callback applies to all callbacks, not just ones in mocha tests.
|
||||||
|
// we don't enforce this at the top-level - just in tests to manage `this` scope
|
||||||
|
// based on mocha's context mechanism
|
||||||
|
"mocha/prefer-arrow-callback": "error"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
// Backend specific rules
|
||||||
|
"files": ["app/**/*.js", "app.js", "index.js"],
|
||||||
|
"rules": {
|
||||||
|
// don't allow console.log in backend code
|
||||||
|
"no-console": "error",
|
||||||
|
|
||||||
|
// Do not allow importing of implicit dependencies.
|
||||||
|
"import/no-extraneous-dependencies": ["error", {
|
||||||
|
// Do not allow importing of devDependencies.
|
||||||
|
"devDependencies": false
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
38
services/docstore/.github/ISSUE_TEMPLATE.md
vendored
Normal file
38
services/docstore/.github/ISSUE_TEMPLATE.md
vendored
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
<!-- BUG REPORT TEMPLATE -->
|
||||||
|
|
||||||
|
## Steps to Reproduce
|
||||||
|
<!-- Describe the steps leading up to when / where you found the bug. -->
|
||||||
|
<!-- Screenshots may be helpful here. -->
|
||||||
|
|
||||||
|
1.
|
||||||
|
2.
|
||||||
|
3.
|
||||||
|
|
||||||
|
## Expected Behaviour
|
||||||
|
<!-- What should have happened when you completed the steps above? -->
|
||||||
|
|
||||||
|
## Observed Behaviour
|
||||||
|
<!-- What actually happened when you completed the steps above? -->
|
||||||
|
<!-- Screenshots may be helpful here. -->
|
||||||
|
|
||||||
|
## Context
|
||||||
|
<!-- How has this issue affected you? What were you trying to accomplish? -->
|
||||||
|
|
||||||
|
## Technical Info
|
||||||
|
<!-- Provide any technical details that may be applicable (or N/A if not applicable). -->
|
||||||
|
|
||||||
|
* URL:
|
||||||
|
* Browser Name and version:
|
||||||
|
* Operating System and version (desktop or mobile):
|
||||||
|
* Signed in as:
|
||||||
|
* Project and/or file:
|
||||||
|
|
||||||
|
## Analysis
|
||||||
|
<!--- Optionally, document investigation of / suggest a fix for the bug, e.g. 'comes from this line / commit' -->
|
||||||
|
|
||||||
|
## Who Needs to Know?
|
||||||
|
<!-- If you want to bring this to the attention of particular people, @-mention them below. -->
|
||||||
|
<!-- If a user reported this bug and should be notified when it is fixed, provide the Front conversation link. -->
|
||||||
|
|
||||||
|
-
|
||||||
|
-
|
48
services/docstore/.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
48
services/docstore/.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
|
||||||
|
<!-- ** This is an Overleaf public repository ** -->
|
||||||
|
|
||||||
|
<!-- Please review https://github.com/overleaf/overleaf/blob/master/CONTRIBUTING.md for guidance on what is expected of a contribution. -->
|
||||||
|
|
||||||
|
### Description
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#### Screenshots
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#### Related Issues / PRs
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### Review
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#### Potential Impact
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#### Manual Testing Performed
|
||||||
|
|
||||||
|
- [ ]
|
||||||
|
- [ ]
|
||||||
|
|
||||||
|
#### Accessibility
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### Deployment
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#### Deployment Checklist
|
||||||
|
|
||||||
|
- [ ] Update documentation not included in the PR (if any)
|
||||||
|
- [ ]
|
||||||
|
|
||||||
|
#### Metrics and Monitoring
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#### Who Needs to Know?
|
23
services/docstore/.github/dependabot.yml
vendored
Normal file
23
services/docstore/.github/dependabot.yml
vendored
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: "npm"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
|
||||||
|
pull-request-branch-name:
|
||||||
|
# Separate sections of the branch name with a hyphen
|
||||||
|
# Docker images use the branch name and do not support slashes in tags
|
||||||
|
# https://github.com/overleaf/google-ops/issues/822
|
||||||
|
# https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#pull-request-branch-nameseparator
|
||||||
|
separator: "-"
|
||||||
|
|
||||||
|
# Block informal upgrades -- security upgrades use a separate queue.
|
||||||
|
# https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#open-pull-requests-limit
|
||||||
|
open-pull-requests-limit: 0
|
||||||
|
|
||||||
|
# currently assign team-magma to all dependabot PRs - this may change in
|
||||||
|
# future if we reorganise teams
|
||||||
|
labels:
|
||||||
|
- "dependencies"
|
||||||
|
- "type:maintenance"
|
5
services/docstore/.gitignore
vendored
Normal file
5
services/docstore/.gitignore
vendored
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
node_modules
|
||||||
|
forever
|
||||||
|
|
||||||
|
# managed by dev-environment$ bin/update_build_scripts
|
||||||
|
.npmrc
|
3
services/docstore/.mocharc.json
Normal file
3
services/docstore/.mocharc.json
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
{
|
||||||
|
"require": "test/setup.js"
|
||||||
|
}
|
1
services/docstore/.nvmrc
Normal file
1
services/docstore/.nvmrc
Normal file
|
@ -0,0 +1 @@
|
||||||
|
12.22.3
|
11
services/docstore/.prettierrc
Normal file
11
services/docstore/.prettierrc
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
# This file was auto-generated, do not edit it directly.
|
||||||
|
# Instead run bin/update_build_scripts from
|
||||||
|
# https://github.com/sharelatex/sharelatex-dev-environment
|
||||||
|
{
|
||||||
|
"arrowParens": "avoid",
|
||||||
|
"semi": false,
|
||||||
|
"singleQuote": true,
|
||||||
|
"trailingComma": "es5",
|
||||||
|
"tabWidth": 2,
|
||||||
|
"useTabs": false
|
||||||
|
}
|
23
services/docstore/Dockerfile
Normal file
23
services/docstore/Dockerfile
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
# This file was auto-generated, do not edit it directly.
|
||||||
|
# Instead run bin/update_build_scripts from
|
||||||
|
# https://github.com/sharelatex/sharelatex-dev-environment
|
||||||
|
|
||||||
|
FROM node:12.22.3 as base
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
FROM base as app
|
||||||
|
|
||||||
|
#wildcard as some files may not be in all repos
|
||||||
|
COPY package*.json npm-shrink*.json /app/
|
||||||
|
|
||||||
|
RUN npm ci --quiet
|
||||||
|
|
||||||
|
COPY . /app
|
||||||
|
|
||||||
|
FROM base
|
||||||
|
|
||||||
|
COPY --from=app /app /app
|
||||||
|
USER node
|
||||||
|
|
||||||
|
CMD ["node", "--expose-gc", "app.js"]
|
662
services/docstore/LICENSE
Normal file
662
services/docstore/LICENSE
Normal file
|
@ -0,0 +1,662 @@
|
||||||
|
|
||||||
|
GNU AFFERO GENERAL PUBLIC LICENSE
|
||||||
|
Version 3, 19 November 2007
|
||||||
|
|
||||||
|
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||||
|
Everyone is permitted to copy and distribute verbatim copies
|
||||||
|
of this license document, but changing it is not allowed.
|
||||||
|
|
||||||
|
Preamble
|
||||||
|
|
||||||
|
The GNU Affero General Public License is a free, copyleft license for
|
||||||
|
software and other kinds of works, specifically designed to ensure
|
||||||
|
cooperation with the community in the case of network server software.
|
||||||
|
|
||||||
|
The licenses for most software and other practical works are designed
|
||||||
|
to take away your freedom to share and change the works. By contrast,
|
||||||
|
our General Public Licenses are intended to guarantee your freedom to
|
||||||
|
share and change all versions of a program--to make sure it remains free
|
||||||
|
software for all its users.
|
||||||
|
|
||||||
|
When we speak of free software, we are referring to freedom, not
|
||||||
|
price. Our General Public Licenses are designed to make sure that you
|
||||||
|
have the freedom to distribute copies of free software (and charge for
|
||||||
|
them if you wish), that you receive source code or can get it if you
|
||||||
|
want it, that you can change the software or use pieces of it in new
|
||||||
|
free programs, and that you know you can do these things.
|
||||||
|
|
||||||
|
Developers that use our General Public Licenses protect your rights
|
||||||
|
with two steps: (1) assert copyright on the software, and (2) offer
|
||||||
|
you this License which gives you legal permission to copy, distribute
|
||||||
|
and/or modify the software.
|
||||||
|
|
||||||
|
A secondary benefit of defending all users' freedom is that
|
||||||
|
improvements made in alternate versions of the program, if they
|
||||||
|
receive widespread use, become available for other developers to
|
||||||
|
incorporate. Many developers of free software are heartened and
|
||||||
|
encouraged by the resulting cooperation. However, in the case of
|
||||||
|
software used on network servers, this result may fail to come about.
|
||||||
|
The GNU General Public License permits making a modified version and
|
||||||
|
letting the public access it on a server without ever releasing its
|
||||||
|
source code to the public.
|
||||||
|
|
||||||
|
The GNU Affero General Public License is designed specifically to
|
||||||
|
ensure that, in such cases, the modified source code becomes available
|
||||||
|
to the community. It requires the operator of a network server to
|
||||||
|
provide the source code of the modified version running there to the
|
||||||
|
users of that server. Therefore, public use of a modified version, on
|
||||||
|
a publicly accessible server, gives the public access to the source
|
||||||
|
code of the modified version.
|
||||||
|
|
||||||
|
An older license, called the Affero General Public License and
|
||||||
|
published by Affero, was designed to accomplish similar goals. This is
|
||||||
|
a different license, not a version of the Affero GPL, but Affero has
|
||||||
|
released a new version of the Affero GPL which permits relicensing under
|
||||||
|
this license.
|
||||||
|
|
||||||
|
The precise terms and conditions for copying, distribution and
|
||||||
|
modification follow.
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
0. Definitions.
|
||||||
|
|
||||||
|
"This License" refers to version 3 of the GNU Affero General Public License.
|
||||||
|
|
||||||
|
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||||
|
works, such as semiconductor masks.
|
||||||
|
|
||||||
|
"The Program" refers to any copyrightable work licensed under this
|
||||||
|
License. Each licensee is addressed as "you". "Licensees" and
|
||||||
|
"recipients" may be individuals or organizations.
|
||||||
|
|
||||||
|
To "modify" a work means to copy from or adapt all or part of the work
|
||||||
|
in a fashion requiring copyright permission, other than the making of an
|
||||||
|
exact copy. The resulting work is called a "modified version" of the
|
||||||
|
earlier work or a work "based on" the earlier work.
|
||||||
|
|
||||||
|
A "covered work" means either the unmodified Program or a work based
|
||||||
|
on the Program.
|
||||||
|
|
||||||
|
To "propagate" a work means to do anything with it that, without
|
||||||
|
permission, would make you directly or secondarily liable for
|
||||||
|
infringement under applicable copyright law, except executing it on a
|
||||||
|
computer or modifying a private copy. Propagation includes copying,
|
||||||
|
distribution (with or without modification), making available to the
|
||||||
|
public, and in some countries other activities as well.
|
||||||
|
|
||||||
|
To "convey" a work means any kind of propagation that enables other
|
||||||
|
parties to make or receive copies. Mere interaction with a user through
|
||||||
|
a computer network, with no transfer of a copy, is not conveying.
|
||||||
|
|
||||||
|
An interactive user interface displays "Appropriate Legal Notices"
|
||||||
|
to the extent that it includes a convenient and prominently visible
|
||||||
|
feature that (1) displays an appropriate copyright notice, and (2)
|
||||||
|
tells the user that there is no warranty for the work (except to the
|
||||||
|
extent that warranties are provided), that licensees may convey the
|
||||||
|
work under this License, and how to view a copy of this License. If
|
||||||
|
the interface presents a list of user commands or options, such as a
|
||||||
|
menu, a prominent item in the list meets this criterion.
|
||||||
|
|
||||||
|
1. Source Code.
|
||||||
|
|
||||||
|
The "source code" for a work means the preferred form of the work
|
||||||
|
for making modifications to it. "Object code" means any non-source
|
||||||
|
form of a work.
|
||||||
|
|
||||||
|
A "Standard Interface" means an interface that either is an official
|
||||||
|
standard defined by a recognized standards body, or, in the case of
|
||||||
|
interfaces specified for a particular programming language, one that
|
||||||
|
is widely used among developers working in that language.
|
||||||
|
|
||||||
|
The "System Libraries" of an executable work include anything, other
|
||||||
|
than the work as a whole, that (a) is included in the normal form of
|
||||||
|
packaging a Major Component, but which is not part of that Major
|
||||||
|
Component, and (b) serves only to enable use of the work with that
|
||||||
|
Major Component, or to implement a Standard Interface for which an
|
||||||
|
implementation is available to the public in source code form. A
|
||||||
|
"Major Component", in this context, means a major essential component
|
||||||
|
(kernel, window system, and so on) of the specific operating system
|
||||||
|
(if any) on which the executable work runs, or a compiler used to
|
||||||
|
produce the work, or an object code interpreter used to run it.
|
||||||
|
|
||||||
|
The "Corresponding Source" for a work in object code form means all
|
||||||
|
the source code needed to generate, install, and (for an executable
|
||||||
|
work) run the object code and to modify the work, including scripts to
|
||||||
|
control those activities. However, it does not include the work's
|
||||||
|
System Libraries, or general-purpose tools or generally available free
|
||||||
|
programs which are used unmodified in performing those activities but
|
||||||
|
which are not part of the work. For example, Corresponding Source
|
||||||
|
includes interface definition files associated with source files for
|
||||||
|
the work, and the source code for shared libraries and dynamically
|
||||||
|
linked subprograms that the work is specifically designed to require,
|
||||||
|
such as by intimate data communication or control flow between those
|
||||||
|
subprograms and other parts of the work.
|
||||||
|
|
||||||
|
The Corresponding Source need not include anything that users
|
||||||
|
can regenerate automatically from other parts of the Corresponding
|
||||||
|
Source.
|
||||||
|
|
||||||
|
The Corresponding Source for a work in source code form is that
|
||||||
|
same work.
|
||||||
|
|
||||||
|
2. Basic Permissions.
|
||||||
|
|
||||||
|
All rights granted under this License are granted for the term of
|
||||||
|
copyright on the Program, and are irrevocable provided the stated
|
||||||
|
conditions are met. This License explicitly affirms your unlimited
|
||||||
|
permission to run the unmodified Program. The output from running a
|
||||||
|
covered work is covered by this License only if the output, given its
|
||||||
|
content, constitutes a covered work. This License acknowledges your
|
||||||
|
rights of fair use or other equivalent, as provided by copyright law.
|
||||||
|
|
||||||
|
You may make, run and propagate covered works that you do not
|
||||||
|
convey, without conditions so long as your license otherwise remains
|
||||||
|
in force. You may convey covered works to others for the sole purpose
|
||||||
|
of having them make modifications exclusively for you, or provide you
|
||||||
|
with facilities for running those works, provided that you comply with
|
||||||
|
the terms of this License in conveying all material for which you do
|
||||||
|
not control copyright. Those thus making or running the covered works
|
||||||
|
for you must do so exclusively on your behalf, under your direction
|
||||||
|
and control, on terms that prohibit them from making any copies of
|
||||||
|
your copyrighted material outside their relationship with you.
|
||||||
|
|
||||||
|
Conveying under any other circumstances is permitted solely under
|
||||||
|
the conditions stated below. Sublicensing is not allowed; section 10
|
||||||
|
makes it unnecessary.
|
||||||
|
|
||||||
|
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||||
|
|
||||||
|
No covered work shall be deemed part of an effective technological
|
||||||
|
measure under any applicable law fulfilling obligations under article
|
||||||
|
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||||
|
similar laws prohibiting or restricting circumvention of such
|
||||||
|
measures.
|
||||||
|
|
||||||
|
When you convey a covered work, you waive any legal power to forbid
|
||||||
|
circumvention of technological measures to the extent such circumvention
|
||||||
|
is effected by exercising rights under this License with respect to
|
||||||
|
the covered work, and you disclaim any intention to limit operation or
|
||||||
|
modification of the work as a means of enforcing, against the work's
|
||||||
|
users, your or third parties' legal rights to forbid circumvention of
|
||||||
|
technological measures.
|
||||||
|
|
||||||
|
4. Conveying Verbatim Copies.
|
||||||
|
|
||||||
|
You may convey verbatim copies of the Program's source code as you
|
||||||
|
receive it, in any medium, provided that you conspicuously and
|
||||||
|
appropriately publish on each copy an appropriate copyright notice;
|
||||||
|
keep intact all notices stating that this License and any
|
||||||
|
non-permissive terms added in accord with section 7 apply to the code;
|
||||||
|
keep intact all notices of the absence of any warranty; and give all
|
||||||
|
recipients a copy of this License along with the Program.
|
||||||
|
|
||||||
|
You may charge any price or no price for each copy that you convey,
|
||||||
|
and you may offer support or warranty protection for a fee.
|
||||||
|
|
||||||
|
5. Conveying Modified Source Versions.
|
||||||
|
|
||||||
|
You may convey a work based on the Program, or the modifications to
|
||||||
|
produce it from the Program, in the form of source code under the
|
||||||
|
terms of section 4, provided that you also meet all of these conditions:
|
||||||
|
|
||||||
|
a) The work must carry prominent notices stating that you modified
|
||||||
|
it, and giving a relevant date.
|
||||||
|
|
||||||
|
b) The work must carry prominent notices stating that it is
|
||||||
|
released under this License and any conditions added under section
|
||||||
|
7. This requirement modifies the requirement in section 4 to
|
||||||
|
"keep intact all notices".
|
||||||
|
|
||||||
|
c) You must license the entire work, as a whole, under this
|
||||||
|
License to anyone who comes into possession of a copy. This
|
||||||
|
License will therefore apply, along with any applicable section 7
|
||||||
|
additional terms, to the whole of the work, and all its parts,
|
||||||
|
regardless of how they are packaged. This License gives no
|
||||||
|
permission to license the work in any other way, but it does not
|
||||||
|
invalidate such permission if you have separately received it.
|
||||||
|
|
||||||
|
d) If the work has interactive user interfaces, each must display
|
||||||
|
Appropriate Legal Notices; however, if the Program has interactive
|
||||||
|
interfaces that do not display Appropriate Legal Notices, your
|
||||||
|
work need not make them do so.
|
||||||
|
|
||||||
|
A compilation of a covered work with other separate and independent
|
||||||
|
works, which are not by their nature extensions of the covered work,
|
||||||
|
and which are not combined with it such as to form a larger program,
|
||||||
|
in or on a volume of a storage or distribution medium, is called an
|
||||||
|
"aggregate" if the compilation and its resulting copyright are not
|
||||||
|
used to limit the access or legal rights of the compilation's users
|
||||||
|
beyond what the individual works permit. Inclusion of a covered work
|
||||||
|
in an aggregate does not cause this License to apply to the other
|
||||||
|
parts of the aggregate.
|
||||||
|
|
||||||
|
6. Conveying Non-Source Forms.
|
||||||
|
|
||||||
|
You may convey a covered work in object code form under the terms
|
||||||
|
of sections 4 and 5, provided that you also convey the
|
||||||
|
machine-readable Corresponding Source under the terms of this License,
|
||||||
|
in one of these ways:
|
||||||
|
|
||||||
|
a) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by the
|
||||||
|
Corresponding Source fixed on a durable physical medium
|
||||||
|
customarily used for software interchange.
|
||||||
|
|
||||||
|
b) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by a
|
||||||
|
written offer, valid for at least three years and valid for as
|
||||||
|
long as you offer spare parts or customer support for that product
|
||||||
|
model, to give anyone who possesses the object code either (1) a
|
||||||
|
copy of the Corresponding Source for all the software in the
|
||||||
|
product that is covered by this License, on a durable physical
|
||||||
|
medium customarily used for software interchange, for a price no
|
||||||
|
more than your reasonable cost of physically performing this
|
||||||
|
conveying of source, or (2) access to copy the
|
||||||
|
Corresponding Source from a network server at no charge.
|
||||||
|
|
||||||
|
c) Convey individual copies of the object code with a copy of the
|
||||||
|
written offer to provide the Corresponding Source. This
|
||||||
|
alternative is allowed only occasionally and noncommercially, and
|
||||||
|
only if you received the object code with such an offer, in accord
|
||||||
|
with subsection 6b.
|
||||||
|
|
||||||
|
d) Convey the object code by offering access from a designated
|
||||||
|
place (gratis or for a charge), and offer equivalent access to the
|
||||||
|
Corresponding Source in the same way through the same place at no
|
||||||
|
further charge. You need not require recipients to copy the
|
||||||
|
Corresponding Source along with the object code. If the place to
|
||||||
|
copy the object code is a network server, the Corresponding Source
|
||||||
|
may be on a different server (operated by you or a third party)
|
||||||
|
that supports equivalent copying facilities, provided you maintain
|
||||||
|
clear directions next to the object code saying where to find the
|
||||||
|
Corresponding Source. Regardless of what server hosts the
|
||||||
|
Corresponding Source, you remain obligated to ensure that it is
|
||||||
|
available for as long as needed to satisfy these requirements.
|
||||||
|
|
||||||
|
e) Convey the object code using peer-to-peer transmission, provided
|
||||||
|
you inform other peers where the object code and Corresponding
|
||||||
|
Source of the work are being offered to the general public at no
|
||||||
|
charge under subsection 6d.
|
||||||
|
|
||||||
|
A separable portion of the object code, whose source code is excluded
|
||||||
|
from the Corresponding Source as a System Library, need not be
|
||||||
|
included in conveying the object code work.
|
||||||
|
|
||||||
|
A "User Product" is either (1) a "consumer product", which means any
|
||||||
|
tangible personal property which is normally used for personal, family,
|
||||||
|
or household purposes, or (2) anything designed or sold for incorporation
|
||||||
|
into a dwelling. In determining whether a product is a consumer product,
|
||||||
|
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||||
|
product received by a particular user, "normally used" refers to a
|
||||||
|
typical or common use of that class of product, regardless of the status
|
||||||
|
of the particular user or of the way in which the particular user
|
||||||
|
actually uses, or expects or is expected to use, the product. A product
|
||||||
|
is a consumer product regardless of whether the product has substantial
|
||||||
|
commercial, industrial or non-consumer uses, unless such uses represent
|
||||||
|
the only significant mode of use of the product.
|
||||||
|
|
||||||
|
"Installation Information" for a User Product means any methods,
|
||||||
|
procedures, authorization keys, or other information required to install
|
||||||
|
and execute modified versions of a covered work in that User Product from
|
||||||
|
a modified version of its Corresponding Source. The information must
|
||||||
|
suffice to ensure that the continued functioning of the modified object
|
||||||
|
code is in no case prevented or interfered with solely because
|
||||||
|
modification has been made.
|
||||||
|
|
||||||
|
If you convey an object code work under this section in, or with, or
|
||||||
|
specifically for use in, a User Product, and the conveying occurs as
|
||||||
|
part of a transaction in which the right of possession and use of the
|
||||||
|
User Product is transferred to the recipient in perpetuity or for a
|
||||||
|
fixed term (regardless of how the transaction is characterized), the
|
||||||
|
Corresponding Source conveyed under this section must be accompanied
|
||||||
|
by the Installation Information. But this requirement does not apply
|
||||||
|
if neither you nor any third party retains the ability to install
|
||||||
|
modified object code on the User Product (for example, the work has
|
||||||
|
been installed in ROM).
|
||||||
|
|
||||||
|
The requirement to provide Installation Information does not include a
|
||||||
|
requirement to continue to provide support service, warranty, or updates
|
||||||
|
for a work that has been modified or installed by the recipient, or for
|
||||||
|
the User Product in which it has been modified or installed. Access to a
|
||||||
|
network may be denied when the modification itself materially and
|
||||||
|
adversely affects the operation of the network or violates the rules and
|
||||||
|
protocols for communication across the network.
|
||||||
|
|
||||||
|
Corresponding Source conveyed, and Installation Information provided,
|
||||||
|
in accord with this section must be in a format that is publicly
|
||||||
|
documented (and with an implementation available to the public in
|
||||||
|
source code form), and must require no special password or key for
|
||||||
|
unpacking, reading or copying.
|
||||||
|
|
||||||
|
7. Additional Terms.
|
||||||
|
|
||||||
|
"Additional permissions" are terms that supplement the terms of this
|
||||||
|
License by making exceptions from one or more of its conditions.
|
||||||
|
Additional permissions that are applicable to the entire Program shall
|
||||||
|
be treated as though they were included in this License, to the extent
|
||||||
|
that they are valid under applicable law. If additional permissions
|
||||||
|
apply only to part of the Program, that part may be used separately
|
||||||
|
under those permissions, but the entire Program remains governed by
|
||||||
|
this License without regard to the additional permissions.
|
||||||
|
|
||||||
|
When you convey a copy of a covered work, you may at your option
|
||||||
|
remove any additional permissions from that copy, or from any part of
|
||||||
|
it. (Additional permissions may be written to require their own
|
||||||
|
removal in certain cases when you modify the work.) You may place
|
||||||
|
additional permissions on material, added by you to a covered work,
|
||||||
|
for which you have or can give appropriate copyright permission.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, for material you
|
||||||
|
add to a covered work, you may (if authorized by the copyright holders of
|
||||||
|
that material) supplement the terms of this License with terms:
|
||||||
|
|
||||||
|
a) Disclaiming warranty or limiting liability differently from the
|
||||||
|
terms of sections 15 and 16 of this License; or
|
||||||
|
|
||||||
|
b) Requiring preservation of specified reasonable legal notices or
|
||||||
|
author attributions in that material or in the Appropriate Legal
|
||||||
|
Notices displayed by works containing it; or
|
||||||
|
|
||||||
|
c) Prohibiting misrepresentation of the origin of that material, or
|
||||||
|
requiring that modified versions of such material be marked in
|
||||||
|
reasonable ways as different from the original version; or
|
||||||
|
|
||||||
|
d) Limiting the use for publicity purposes of names of licensors or
|
||||||
|
authors of the material; or
|
||||||
|
|
||||||
|
e) Declining to grant rights under trademark law for use of some
|
||||||
|
trade names, trademarks, or service marks; or
|
||||||
|
|
||||||
|
f) Requiring indemnification of licensors and authors of that
|
||||||
|
material by anyone who conveys the material (or modified versions of
|
||||||
|
it) with contractual assumptions of liability to the recipient, for
|
||||||
|
any liability that these contractual assumptions directly impose on
|
||||||
|
those licensors and authors.
|
||||||
|
|
||||||
|
All other non-permissive additional terms are considered "further
|
||||||
|
restrictions" within the meaning of section 10. If the Program as you
|
||||||
|
received it, or any part of it, contains a notice stating that it is
|
||||||
|
governed by this License along with a term that is a further
|
||||||
|
restriction, you may remove that term. If a license document contains
|
||||||
|
a further restriction but permits relicensing or conveying under this
|
||||||
|
License, you may add to a covered work material governed by the terms
|
||||||
|
of that license document, provided that the further restriction does
|
||||||
|
not survive such relicensing or conveying.
|
||||||
|
|
||||||
|
If you add terms to a covered work in accord with this section, you
|
||||||
|
must place, in the relevant source files, a statement of the
|
||||||
|
additional terms that apply to those files, or a notice indicating
|
||||||
|
where to find the applicable terms.
|
||||||
|
|
||||||
|
Additional terms, permissive or non-permissive, may be stated in the
|
||||||
|
form of a separately written license, or stated as exceptions;
|
||||||
|
the above requirements apply either way.
|
||||||
|
|
||||||
|
8. Termination.
|
||||||
|
|
||||||
|
You may not propagate or modify a covered work except as expressly
|
||||||
|
provided under this License. Any attempt otherwise to propagate or
|
||||||
|
modify it is void, and will automatically terminate your rights under
|
||||||
|
this License (including any patent licenses granted under the third
|
||||||
|
paragraph of section 11).
|
||||||
|
|
||||||
|
However, if you cease all violation of this License, then your
|
||||||
|
license from a particular copyright holder is reinstated (a)
|
||||||
|
provisionally, unless and until the copyright holder explicitly and
|
||||||
|
finally terminates your license, and (b) permanently, if the copyright
|
||||||
|
holder fails to notify you of the violation by some reasonable means
|
||||||
|
prior to 60 days after the cessation.
|
||||||
|
|
||||||
|
Moreover, your license from a particular copyright holder is
|
||||||
|
reinstated permanently if the copyright holder notifies you of the
|
||||||
|
violation by some reasonable means, this is the first time you have
|
||||||
|
received notice of violation of this License (for any work) from that
|
||||||
|
copyright holder, and you cure the violation prior to 30 days after
|
||||||
|
your receipt of the notice.
|
||||||
|
|
||||||
|
Termination of your rights under this section does not terminate the
|
||||||
|
licenses of parties who have received copies or rights from you under
|
||||||
|
this License. If your rights have been terminated and not permanently
|
||||||
|
reinstated, you do not qualify to receive new licenses for the same
|
||||||
|
material under section 10.
|
||||||
|
|
||||||
|
9. Acceptance Not Required for Having Copies.
|
||||||
|
|
||||||
|
You are not required to accept this License in order to receive or
|
||||||
|
run a copy of the Program. Ancillary propagation of a covered work
|
||||||
|
occurring solely as a consequence of using peer-to-peer transmission
|
||||||
|
to receive a copy likewise does not require acceptance. However,
|
||||||
|
nothing other than this License grants you permission to propagate or
|
||||||
|
modify any covered work. These actions infringe copyright if you do
|
||||||
|
not accept this License. Therefore, by modifying or propagating a
|
||||||
|
covered work, you indicate your acceptance of this License to do so.
|
||||||
|
|
||||||
|
10. Automatic Licensing of Downstream Recipients.
|
||||||
|
|
||||||
|
Each time you convey a covered work, the recipient automatically
|
||||||
|
receives a license from the original licensors, to run, modify and
|
||||||
|
propagate that work, subject to this License. You are not responsible
|
||||||
|
for enforcing compliance by third parties with this License.
|
||||||
|
|
||||||
|
An "entity transaction" is a transaction transferring control of an
|
||||||
|
organization, or substantially all assets of one, or subdividing an
|
||||||
|
organization, or merging organizations. If propagation of a covered
|
||||||
|
work results from an entity transaction, each party to that
|
||||||
|
transaction who receives a copy of the work also receives whatever
|
||||||
|
licenses to the work the party's predecessor in interest had or could
|
||||||
|
give under the previous paragraph, plus a right to possession of the
|
||||||
|
Corresponding Source of the work from the predecessor in interest, if
|
||||||
|
the predecessor has it or can get it with reasonable efforts.
|
||||||
|
|
||||||
|
You may not impose any further restrictions on the exercise of the
|
||||||
|
rights granted or affirmed under this License. For example, you may
|
||||||
|
not impose a license fee, royalty, or other charge for exercise of
|
||||||
|
rights granted under this License, and you may not initiate litigation
|
||||||
|
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||||
|
any patent claim is infringed by making, using, selling, offering for
|
||||||
|
sale, or importing the Program or any portion of it.
|
||||||
|
|
||||||
|
11. Patents.
|
||||||
|
|
||||||
|
A "contributor" is a copyright holder who authorizes use under this
|
||||||
|
License of the Program or a work on which the Program is based. The
|
||||||
|
work thus licensed is called the contributor's "contributor version".
|
||||||
|
|
||||||
|
A contributor's "essential patent claims" are all patent claims
|
||||||
|
owned or controlled by the contributor, whether already acquired or
|
||||||
|
hereafter acquired, that would be infringed by some manner, permitted
|
||||||
|
by this License, of making, using, or selling its contributor version,
|
||||||
|
but do not include claims that would be infringed only as a
|
||||||
|
consequence of further modification of the contributor version. For
|
||||||
|
purposes of this definition, "control" includes the right to grant
|
||||||
|
patent sublicenses in a manner consistent with the requirements of
|
||||||
|
this License.
|
||||||
|
|
||||||
|
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||||
|
patent license under the contributor's essential patent claims, to
|
||||||
|
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||||
|
propagate the contents of its contributor version.
|
||||||
|
|
||||||
|
In the following three paragraphs, a "patent license" is any express
|
||||||
|
agreement or commitment, however denominated, not to enforce a patent
|
||||||
|
(such as an express permission to practice a patent or covenant not to
|
||||||
|
sue for patent infringement). To "grant" such a patent license to a
|
||||||
|
party means to make such an agreement or commitment not to enforce a
|
||||||
|
patent against the party.
|
||||||
|
|
||||||
|
If you convey a covered work, knowingly relying on a patent license,
|
||||||
|
and the Corresponding Source of the work is not available for anyone
|
||||||
|
to copy, free of charge and under the terms of this License, through a
|
||||||
|
publicly available network server or other readily accessible means,
|
||||||
|
then you must either (1) cause the Corresponding Source to be so
|
||||||
|
available, or (2) arrange to deprive yourself of the benefit of the
|
||||||
|
patent license for this particular work, or (3) arrange, in a manner
|
||||||
|
consistent with the requirements of this License, to extend the patent
|
||||||
|
license to downstream recipients. "Knowingly relying" means you have
|
||||||
|
actual knowledge that, but for the patent license, your conveying the
|
||||||
|
covered work in a country, or your recipient's use of the covered work
|
||||||
|
in a country, would infringe one or more identifiable patents in that
|
||||||
|
country that you have reason to believe are valid.
|
||||||
|
|
||||||
|
If, pursuant to or in connection with a single transaction or
|
||||||
|
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||||
|
covered work, and grant a patent license to some of the parties
|
||||||
|
receiving the covered work authorizing them to use, propagate, modify
|
||||||
|
or convey a specific copy of the covered work, then the patent license
|
||||||
|
you grant is automatically extended to all recipients of the covered
|
||||||
|
work and works based on it.
|
||||||
|
|
||||||
|
A patent license is "discriminatory" if it does not include within
|
||||||
|
the scope of its coverage, prohibits the exercise of, or is
|
||||||
|
conditioned on the non-exercise of one or more of the rights that are
|
||||||
|
specifically granted under this License. You may not convey a covered
|
||||||
|
work if you are a party to an arrangement with a third party that is
|
||||||
|
in the business of distributing software, under which you make payment
|
||||||
|
to the third party based on the extent of your activity of conveying
|
||||||
|
the work, and under which the third party grants, to any of the
|
||||||
|
parties who would receive the covered work from you, a discriminatory
|
||||||
|
patent license (a) in connection with copies of the covered work
|
||||||
|
conveyed by you (or copies made from those copies), or (b) primarily
|
||||||
|
for and in connection with specific products or compilations that
|
||||||
|
contain the covered work, unless you entered into that arrangement,
|
||||||
|
or that patent license was granted, prior to 28 March 2007.
|
||||||
|
|
||||||
|
Nothing in this License shall be construed as excluding or limiting
|
||||||
|
any implied license or other defenses to infringement that may
|
||||||
|
otherwise be available to you under applicable patent law.
|
||||||
|
|
||||||
|
12. No Surrender of Others' Freedom.
|
||||||
|
|
||||||
|
If conditions are imposed on you (whether by court order, agreement or
|
||||||
|
otherwise) that contradict the conditions of this License, they do not
|
||||||
|
excuse you from the conditions of this License. If you cannot convey a
|
||||||
|
covered work so as to satisfy simultaneously your obligations under this
|
||||||
|
License and any other pertinent obligations, then as a consequence you may
|
||||||
|
not convey it at all. For example, if you agree to terms that obligate you
|
||||||
|
to collect a royalty for further conveying from those to whom you convey
|
||||||
|
the Program, the only way you could satisfy both those terms and this
|
||||||
|
License would be to refrain entirely from conveying the Program.
|
||||||
|
|
||||||
|
13. Remote Network Interaction; Use with the GNU General Public License.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, if you modify the
|
||||||
|
Program, your modified version must prominently offer all users
|
||||||
|
interacting with it remotely through a computer network (if your version
|
||||||
|
supports such interaction) an opportunity to receive the Corresponding
|
||||||
|
Source of your version by providing access to the Corresponding Source
|
||||||
|
from a network server at no charge, through some standard or customary
|
||||||
|
means of facilitating copying of software. This Corresponding Source
|
||||||
|
shall include the Corresponding Source for any work covered by version 3
|
||||||
|
of the GNU General Public License that is incorporated pursuant to the
|
||||||
|
following paragraph.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, you have
|
||||||
|
permission to link or combine any covered work with a work licensed
|
||||||
|
under version 3 of the GNU General Public License into a single
|
||||||
|
combined work, and to convey the resulting work. The terms of this
|
||||||
|
License will continue to apply to the part which is the covered work,
|
||||||
|
but the work with which it is combined will remain governed by version
|
||||||
|
3 of the GNU General Public License.
|
||||||
|
|
||||||
|
14. Revised Versions of this License.
|
||||||
|
|
||||||
|
The Free Software Foundation may publish revised and/or new versions of
|
||||||
|
the GNU Affero General Public License from time to time. Such new versions
|
||||||
|
will be similar in spirit to the present version, but may differ in detail to
|
||||||
|
address new problems or concerns.
|
||||||
|
|
||||||
|
Each version is given a distinguishing version number. If the
|
||||||
|
Program specifies that a certain numbered version of the GNU Affero General
|
||||||
|
Public License "or any later version" applies to it, you have the
|
||||||
|
option of following the terms and conditions either of that numbered
|
||||||
|
version or of any later version published by the Free Software
|
||||||
|
Foundation. If the Program does not specify a version number of the
|
||||||
|
GNU Affero General Public License, you may choose any version ever published
|
||||||
|
by the Free Software Foundation.
|
||||||
|
|
||||||
|
If the Program specifies that a proxy can decide which future
|
||||||
|
versions of the GNU Affero General Public License can be used, that proxy's
|
||||||
|
public statement of acceptance of a version permanently authorizes you
|
||||||
|
to choose that version for the Program.
|
||||||
|
|
||||||
|
Later license versions may give you additional or different
|
||||||
|
permissions. However, no additional obligations are imposed on any
|
||||||
|
author or copyright holder as a result of your choosing to follow a
|
||||||
|
later version.
|
||||||
|
|
||||||
|
15. Disclaimer of Warranty.
|
||||||
|
|
||||||
|
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||||
|
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||||
|
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||||
|
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||||
|
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||||
|
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||||
|
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||||
|
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||||
|
|
||||||
|
16. Limitation of Liability.
|
||||||
|
|
||||||
|
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||||
|
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||||
|
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||||
|
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||||
|
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||||
|
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||||
|
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||||
|
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||||
|
SUCH DAMAGES.
|
||||||
|
|
||||||
|
17. Interpretation of Sections 15 and 16.
|
||||||
|
|
||||||
|
If the disclaimer of warranty and limitation of liability provided
|
||||||
|
above cannot be given local legal effect according to their terms,
|
||||||
|
reviewing courts shall apply local law that most closely approximates
|
||||||
|
an absolute waiver of all civil liability in connection with the
|
||||||
|
Program, unless a warranty or assumption of liability accompanies a
|
||||||
|
copy of the Program in return for a fee.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
How to Apply These Terms to Your New Programs
|
||||||
|
|
||||||
|
If you develop a new program, and you want it to be of the greatest
|
||||||
|
possible use to the public, the best way to achieve this is to make it
|
||||||
|
free software which everyone can redistribute and change under these terms.
|
||||||
|
|
||||||
|
To do so, attach the following notices to the program. It is safest
|
||||||
|
to attach them to the start of each source file to most effectively
|
||||||
|
state the exclusion of warranty; and each file should have at least
|
||||||
|
the "copyright" line and a pointer to where the full notice is found.
|
||||||
|
|
||||||
|
<one line to give the program's name and a brief idea of what it does.>
|
||||||
|
Copyright (C) <year> <name of author>
|
||||||
|
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU Affero General Public License as published by
|
||||||
|
the Free Software Foundation, either version 3 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Affero General Public License
|
||||||
|
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
Also add information on how to contact you by electronic and paper mail.
|
||||||
|
|
||||||
|
If your software can interact with users remotely through a computer
|
||||||
|
network, you should also make sure that it provides a way for users to
|
||||||
|
get its source. For example, if your program is a web application, its
|
||||||
|
interface could display a "Source" link that leads users to an archive
|
||||||
|
of the code. There are many ways you could offer source, and different
|
||||||
|
solutions will be better for different programs; see section 13 for the
|
||||||
|
specific requirements.
|
||||||
|
|
||||||
|
You should also get your employer (if you work as a programmer) or school,
|
||||||
|
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||||
|
For more information on this, and how to apply and follow the GNU AGPL, see
|
||||||
|
<http://www.gnu.org/licenses/>.
|
90
services/docstore/Makefile
Normal file
90
services/docstore/Makefile
Normal file
|
@ -0,0 +1,90 @@
|
||||||
|
# This file was auto-generated, do not edit it directly.
|
||||||
|
# Instead run bin/update_build_scripts from
|
||||||
|
# https://github.com/sharelatex/sharelatex-dev-environment
|
||||||
|
|
||||||
|
BUILD_NUMBER ?= local
|
||||||
|
BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||||
|
PROJECT_NAME = docstore
|
||||||
|
BUILD_DIR_NAME = $(shell pwd | xargs basename | tr -cd '[a-zA-Z0-9_.\-]')
|
||||||
|
|
||||||
|
DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml
|
||||||
|
DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \
|
||||||
|
BRANCH_NAME=$(BRANCH_NAME) \
|
||||||
|
PROJECT_NAME=$(PROJECT_NAME) \
|
||||||
|
MOCHA_GREP=${MOCHA_GREP} \
|
||||||
|
docker-compose ${DOCKER_COMPOSE_FLAGS}
|
||||||
|
|
||||||
|
DOCKER_COMPOSE_TEST_ACCEPTANCE = \
|
||||||
|
COMPOSE_PROJECT_NAME=test_acceptance_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE)
|
||||||
|
|
||||||
|
DOCKER_COMPOSE_TEST_UNIT = \
|
||||||
|
COMPOSE_PROJECT_NAME=test_unit_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE)
|
||||||
|
|
||||||
|
clean:
|
||||||
|
-docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
|
||||||
|
-docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
|
||||||
|
-$(DOCKER_COMPOSE_TEST_UNIT) down --rmi local
|
||||||
|
-$(DOCKER_COMPOSE_TEST_ACCEPTANCE) down --rmi local
|
||||||
|
|
||||||
|
format:
|
||||||
|
$(DOCKER_COMPOSE) run --rm test_unit npm run --silent format
|
||||||
|
|
||||||
|
format_fix:
|
||||||
|
$(DOCKER_COMPOSE) run --rm test_unit npm run --silent format:fix
|
||||||
|
|
||||||
|
lint:
|
||||||
|
$(DOCKER_COMPOSE) run --rm test_unit npm run --silent lint
|
||||||
|
|
||||||
|
test: format lint test_unit test_acceptance
|
||||||
|
|
||||||
|
test_unit:
|
||||||
|
ifneq (,$(wildcard test/unit))
|
||||||
|
$(DOCKER_COMPOSE_TEST_UNIT) run --rm test_unit
|
||||||
|
$(MAKE) test_unit_clean
|
||||||
|
endif
|
||||||
|
|
||||||
|
test_clean: test_unit_clean
|
||||||
|
test_unit_clean:
|
||||||
|
ifneq (,$(wildcard test/unit))
|
||||||
|
$(DOCKER_COMPOSE_TEST_UNIT) down -v -t 0
|
||||||
|
endif
|
||||||
|
|
||||||
|
test_acceptance: test_acceptance_clean test_acceptance_pre_run test_acceptance_run
|
||||||
|
$(MAKE) test_acceptance_clean
|
||||||
|
|
||||||
|
test_acceptance_debug: test_acceptance_clean test_acceptance_pre_run test_acceptance_run_debug
|
||||||
|
$(MAKE) test_acceptance_clean
|
||||||
|
|
||||||
|
test_acceptance_run:
|
||||||
|
ifneq (,$(wildcard test/acceptance))
|
||||||
|
$(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance
|
||||||
|
endif
|
||||||
|
|
||||||
|
test_acceptance_run_debug:
|
||||||
|
ifneq (,$(wildcard test/acceptance))
|
||||||
|
$(DOCKER_COMPOSE_TEST_ACCEPTANCE) run -p 127.0.0.9:19999:19999 --rm test_acceptance npm run test:acceptance -- --inspect=0.0.0.0:19999 --inspect-brk
|
||||||
|
endif
|
||||||
|
|
||||||
|
test_clean: test_acceptance_clean
|
||||||
|
test_acceptance_clean:
|
||||||
|
$(DOCKER_COMPOSE_TEST_ACCEPTANCE) down -v -t 0
|
||||||
|
|
||||||
|
test_acceptance_pre_run:
|
||||||
|
ifneq (,$(wildcard test/acceptance/js/scripts/pre-run))
|
||||||
|
$(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance test/acceptance/js/scripts/pre-run
|
||||||
|
endif
|
||||||
|
|
||||||
|
build:
|
||||||
|
docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \
|
||||||
|
--tag gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \
|
||||||
|
.
|
||||||
|
|
||||||
|
tar:
|
||||||
|
$(DOCKER_COMPOSE) up tar
|
||||||
|
|
||||||
|
publish:
|
||||||
|
|
||||||
|
docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
|
||||||
|
|
||||||
|
|
||||||
|
.PHONY: clean test test_unit test_acceptance test_clean build publish
|
11
services/docstore/README.md
Normal file
11
services/docstore/README.md
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
overleaf/docstore
|
||||||
|
===================
|
||||||
|
|
||||||
|
A CRUD API for storing and updating text documents in projects
|
||||||
|
|
||||||
|
License
|
||||||
|
-------
|
||||||
|
|
||||||
|
The code in this repository is released under the GNU AFFERO GENERAL PUBLIC LICENSE, version 3. A copy can be found in the `LICENSE` file.
|
||||||
|
|
||||||
|
Copyright (c) Overleaf, 2014-2019.
|
123
services/docstore/app.js
Normal file
123
services/docstore/app.js
Normal file
|
@ -0,0 +1,123 @@
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
const Metrics = require('@overleaf/metrics')
|
||||||
|
Metrics.initialize('docstore')
|
||||||
|
const Settings = require('@overleaf/settings')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const express = require('express')
|
||||||
|
const bodyParser = require('body-parser')
|
||||||
|
const {
|
||||||
|
celebrate: validate,
|
||||||
|
Joi,
|
||||||
|
errors: handleValidationErrors,
|
||||||
|
} = require('celebrate')
|
||||||
|
const mongodb = require('./app/js/mongodb')
|
||||||
|
const Errors = require('./app/js/Errors')
|
||||||
|
const HttpController = require('./app/js/HttpController')
|
||||||
|
|
||||||
|
logger.initialize('docstore')
|
||||||
|
if (Metrics.event_loop != null) {
|
||||||
|
Metrics.event_loop.monitor(logger)
|
||||||
|
}
|
||||||
|
|
||||||
|
const app = express()
|
||||||
|
|
||||||
|
app.use(Metrics.http.monitor(logger))
|
||||||
|
|
||||||
|
Metrics.injectMetricsRoute(app)
|
||||||
|
|
||||||
|
app.param('project_id', function (req, res, next, projectId) {
|
||||||
|
if (projectId != null ? projectId.match(/^[0-9a-f]{24}$/) : undefined) {
|
||||||
|
return next()
|
||||||
|
} else {
|
||||||
|
return next(new Error('invalid project id'))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
app.param('doc_id', function (req, res, next, docId) {
|
||||||
|
if (docId != null ? docId.match(/^[0-9a-f]{24}$/) : undefined) {
|
||||||
|
return next()
|
||||||
|
} else {
|
||||||
|
return next(new Error('invalid doc id'))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
Metrics.injectMetricsRoute(app)
|
||||||
|
|
||||||
|
app.get('/project/:project_id/doc-deleted', HttpController.getAllDeletedDocs)
|
||||||
|
app.get('/project/:project_id/doc', HttpController.getAllDocs)
|
||||||
|
app.get('/project/:project_id/ranges', HttpController.getAllRanges)
|
||||||
|
app.get('/project/:project_id/doc/:doc_id', HttpController.getDoc)
|
||||||
|
app.get('/project/:project_id/doc/:doc_id/deleted', HttpController.isDocDeleted)
|
||||||
|
app.get('/project/:project_id/doc/:doc_id/raw', HttpController.getRawDoc)
|
||||||
|
app.get('/project/:project_id/doc/:doc_id/peek', HttpController.peekDoc)
|
||||||
|
// Add 64kb overhead for the JSON encoding, and double the size to allow for ranges in the json payload
|
||||||
|
app.post(
|
||||||
|
'/project/:project_id/doc/:doc_id',
|
||||||
|
bodyParser.json({ limit: (Settings.max_doc_length + 64 * 1024) * 2 }),
|
||||||
|
HttpController.updateDoc
|
||||||
|
)
|
||||||
|
app.patch(
|
||||||
|
'/project/:project_id/doc/:doc_id',
|
||||||
|
bodyParser.json(),
|
||||||
|
validate({
|
||||||
|
body: {
|
||||||
|
deleted: Joi.boolean(),
|
||||||
|
name: Joi.string().when('deleted', { is: true, then: Joi.required() }),
|
||||||
|
deletedAt: Joi.date().when('deleted', { is: true, then: Joi.required() }),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
HttpController.patchDoc
|
||||||
|
)
|
||||||
|
app.delete('/project/:project_id/doc/:doc_id', (req, res) => {
|
||||||
|
res.status(500).send('DELETE-ing a doc is DEPRECATED. PATCH the doc instead.')
|
||||||
|
})
|
||||||
|
|
||||||
|
app.post('/project/:project_id/archive', HttpController.archiveAllDocs)
|
||||||
|
app.post('/project/:project_id/doc/:doc_id/archive', HttpController.archiveDoc)
|
||||||
|
app.post('/project/:project_id/unarchive', HttpController.unArchiveAllDocs)
|
||||||
|
app.post('/project/:project_id/destroy', HttpController.destroyAllDocs)
|
||||||
|
|
||||||
|
app.get('/health_check', HttpController.healthCheck)
|
||||||
|
|
||||||
|
app.get('/status', (req, res) => res.send('docstore is alive'))
|
||||||
|
|
||||||
|
app.use(handleValidationErrors())
|
||||||
|
app.use(function (error, req, res, next) {
|
||||||
|
logger.error({ err: error, req }, 'request errored')
|
||||||
|
if (error instanceof Errors.NotFoundError) {
|
||||||
|
return res.sendStatus(404)
|
||||||
|
} else if (error instanceof Errors.DocModifiedError) {
|
||||||
|
return res.sendStatus(409)
|
||||||
|
} else {
|
||||||
|
return res.status(500).send('Oops, something went wrong')
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
const { port } = Settings.internal.docstore
|
||||||
|
const { host } = Settings.internal.docstore
|
||||||
|
|
||||||
|
if (!module.parent) {
|
||||||
|
// Called directly
|
||||||
|
mongodb
|
||||||
|
.waitForDb()
|
||||||
|
.then(() => {
|
||||||
|
app.listen(port, host, function (err) {
|
||||||
|
if (err) {
|
||||||
|
logger.fatal({ err }, `Cannot bind to ${host}:${port}. Exiting.`)
|
||||||
|
process.exit(1)
|
||||||
|
}
|
||||||
|
return logger.info(`Docstore starting up, listening on ${host}:${port}`)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
logger.fatal({ err }, 'Cannot connect to mongo. Exiting.')
|
||||||
|
process.exit(1)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = app
|
261
services/docstore/app/js/DocArchiveManager.js
Normal file
261
services/docstore/app/js/DocArchiveManager.js
Normal file
|
@ -0,0 +1,261 @@
|
||||||
|
const { callbackify } = require('util')
|
||||||
|
const MongoManager = require('./MongoManager').promises
|
||||||
|
const Errors = require('./Errors')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const settings = require('@overleaf/settings')
|
||||||
|
const crypto = require('crypto')
|
||||||
|
const Streamifier = require('streamifier')
|
||||||
|
const RangeManager = require('./RangeManager')
|
||||||
|
const PersistorManager = require('./PersistorManager')
|
||||||
|
const pMap = require('p-map')
|
||||||
|
|
||||||
|
const PARALLEL_JOBS = settings.parallelArchiveJobs
|
||||||
|
const ARCHIVE_BATCH_SIZE = settings.archiveBatchSize
|
||||||
|
const UN_ARCHIVE_BATCH_SIZE = settings.unArchiveBatchSize
|
||||||
|
const DESTROY_BATCH_SIZE = settings.destroyBatchSize
|
||||||
|
const DESTROY_RETRY_COUNT = settings.destroyRetryCount
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
archiveAllDocs: callbackify(archiveAllDocs),
|
||||||
|
archiveDocById: callbackify(archiveDocById),
|
||||||
|
archiveDoc: callbackify(archiveDoc),
|
||||||
|
unArchiveAllDocs: callbackify(unArchiveAllDocs),
|
||||||
|
unarchiveDoc: callbackify(unarchiveDoc),
|
||||||
|
destroyAllDocs: callbackify(destroyAllDocs),
|
||||||
|
destroyDoc: callbackify(destroyDoc),
|
||||||
|
getDoc: callbackify(getDoc),
|
||||||
|
promises: {
|
||||||
|
archiveAllDocs,
|
||||||
|
archiveDocById,
|
||||||
|
archiveDoc,
|
||||||
|
unArchiveAllDocs,
|
||||||
|
unarchiveDoc,
|
||||||
|
destroyAllDocs,
|
||||||
|
destroyDoc,
|
||||||
|
getDoc,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
async function archiveAllDocs(projectId) {
|
||||||
|
while (true) {
|
||||||
|
const docs = await MongoManager.getNonArchivedProjectDocs(
|
||||||
|
projectId,
|
||||||
|
ARCHIVE_BATCH_SIZE
|
||||||
|
)
|
||||||
|
if (!docs || docs.length === 0) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
await pMap(docs, doc => archiveDoc(projectId, doc), {
|
||||||
|
concurrency: PARALLEL_JOBS,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function archiveDocById(projectId, docId) {
|
||||||
|
const doc = await MongoManager.findDoc(projectId, docId, {
|
||||||
|
lines: true,
|
||||||
|
ranges: true,
|
||||||
|
rev: true,
|
||||||
|
inS3: true,
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!doc) {
|
||||||
|
throw new Errors.NotFoundError(
|
||||||
|
`Cannot find doc ${docId} in project ${projectId}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(das7pad): consider refactoring MongoManager.findDoc to take a query
|
||||||
|
if (doc.inS3) return
|
||||||
|
return archiveDoc(projectId, doc)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function archiveDoc(projectId, doc) {
|
||||||
|
logger.log(
|
||||||
|
{ project_id: projectId, doc_id: doc._id },
|
||||||
|
'sending doc to persistor'
|
||||||
|
)
|
||||||
|
const key = `${projectId}/${doc._id}`
|
||||||
|
|
||||||
|
if (doc.lines == null) {
|
||||||
|
throw new Error('doc has no lines')
|
||||||
|
}
|
||||||
|
|
||||||
|
const json = JSON.stringify({
|
||||||
|
lines: doc.lines,
|
||||||
|
ranges: doc.ranges,
|
||||||
|
schema_v: 1,
|
||||||
|
})
|
||||||
|
|
||||||
|
// this should never happen, but protects against memory-corruption errors that
|
||||||
|
// have happened in the past
|
||||||
|
if (json.indexOf('\u0000') > -1) {
|
||||||
|
const error = new Error('null bytes detected')
|
||||||
|
logger.err({ err: error, doc }, error.message)
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
|
||||||
|
const md5 = crypto.createHash('md5').update(json).digest('hex')
|
||||||
|
const stream = Streamifier.createReadStream(json)
|
||||||
|
await PersistorManager.sendStream(settings.docstore.bucket, key, stream, {
|
||||||
|
sourceMd5: md5,
|
||||||
|
})
|
||||||
|
await MongoManager.markDocAsArchived(doc._id, doc.rev)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function unArchiveAllDocs(projectId) {
|
||||||
|
while (true) {
|
||||||
|
let docs
|
||||||
|
if (settings.docstore.keepSoftDeletedDocsArchived) {
|
||||||
|
docs = await MongoManager.getNonDeletedArchivedProjectDocs(
|
||||||
|
projectId,
|
||||||
|
UN_ARCHIVE_BATCH_SIZE
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
docs = await MongoManager.getArchivedProjectDocs(
|
||||||
|
projectId,
|
||||||
|
UN_ARCHIVE_BATCH_SIZE
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if (!docs || docs.length === 0) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
await pMap(docs, doc => unarchiveDoc(projectId, doc._id), {
|
||||||
|
concurrency: PARALLEL_JOBS,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// get the doc from the PersistorManager without storing it in mongo
|
||||||
|
async function getDoc(projectId, docId) {
|
||||||
|
const key = `${projectId}/${docId}`
|
||||||
|
const sourceMd5 = await PersistorManager.getObjectMd5Hash(
|
||||||
|
settings.docstore.bucket,
|
||||||
|
key
|
||||||
|
)
|
||||||
|
const stream = await PersistorManager.getObjectStream(
|
||||||
|
settings.docstore.bucket,
|
||||||
|
key
|
||||||
|
)
|
||||||
|
stream.resume()
|
||||||
|
const json = await _streamToString(stream)
|
||||||
|
const md5 = crypto.createHash('md5').update(json).digest('hex')
|
||||||
|
if (sourceMd5 !== md5) {
|
||||||
|
throw new Errors.Md5MismatchError('md5 mismatch when downloading doc', {
|
||||||
|
key,
|
||||||
|
sourceMd5,
|
||||||
|
md5,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const doc = JSON.parse(json)
|
||||||
|
|
||||||
|
const mongoDoc = {}
|
||||||
|
if (doc.schema_v === 1 && doc.lines != null) {
|
||||||
|
mongoDoc.lines = doc.lines
|
||||||
|
if (doc.ranges != null) {
|
||||||
|
mongoDoc.ranges = RangeManager.jsonRangesToMongo(doc.ranges)
|
||||||
|
}
|
||||||
|
} else if (Array.isArray(doc)) {
|
||||||
|
mongoDoc.lines = doc
|
||||||
|
} else {
|
||||||
|
throw new Error("I don't understand the doc format in s3")
|
||||||
|
}
|
||||||
|
|
||||||
|
return mongoDoc
|
||||||
|
}
|
||||||
|
|
||||||
|
// get the doc and unarchive it to mongo
|
||||||
|
async function unarchiveDoc(projectId, docId) {
|
||||||
|
logger.log(
|
||||||
|
{ project_id: projectId, doc_id: docId },
|
||||||
|
'getting doc from persistor'
|
||||||
|
)
|
||||||
|
const key = `${projectId}/${docId}`
|
||||||
|
const originalDoc = await MongoManager.findDoc(projectId, docId, { inS3: 1 })
|
||||||
|
if (!originalDoc.inS3) {
|
||||||
|
// return if it's not actually in S3 as there's nothing to do
|
||||||
|
return
|
||||||
|
}
|
||||||
|
let mongoDoc
|
||||||
|
try {
|
||||||
|
mongoDoc = await getDoc(projectId, docId)
|
||||||
|
} catch (err) {
|
||||||
|
// if we get a 404, we could be in a race and something else has unarchived the doc already
|
||||||
|
if (err instanceof Errors.NotFoundError) {
|
||||||
|
const doc = await MongoManager.findDoc(projectId, docId, { inS3: 1 })
|
||||||
|
if (!doc.inS3) {
|
||||||
|
// the doc has been archived while we were looking for it, so no error
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
await MongoManager.upsertIntoDocCollection(projectId, docId, mongoDoc)
|
||||||
|
await PersistorManager.deleteObject(settings.docstore.bucket, key)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function destroyAllDocs(projectId) {
|
||||||
|
while (true) {
|
||||||
|
const docs = await MongoManager.getProjectsDocs(
|
||||||
|
projectId,
|
||||||
|
{ include_deleted: true, limit: DESTROY_BATCH_SIZE },
|
||||||
|
{ _id: 1 }
|
||||||
|
)
|
||||||
|
if (!docs || docs.length === 0) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
await pMap(docs, doc => destroyDoc(projectId, doc._id), {
|
||||||
|
concurrency: PARALLEL_JOBS,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function destroyDoc(projectId, docId) {
|
||||||
|
logger.log(
|
||||||
|
{ project_id: projectId, doc_id: docId },
|
||||||
|
'removing doc from mongo and persistor'
|
||||||
|
)
|
||||||
|
const doc = await MongoManager.findDoc(projectId, docId, {
|
||||||
|
inS3: 1,
|
||||||
|
})
|
||||||
|
if (!doc) {
|
||||||
|
throw new Errors.NotFoundError('Doc not found in Mongo')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (doc.inS3) {
|
||||||
|
await destroyArchiveWithRetry(projectId, docId)
|
||||||
|
}
|
||||||
|
await MongoManager.destroyDoc(docId)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function destroyArchiveWithRetry(projectId, docId) {
|
||||||
|
let attempt = 0
|
||||||
|
let lastError
|
||||||
|
while (attempt++ <= DESTROY_RETRY_COUNT) {
|
||||||
|
try {
|
||||||
|
await PersistorManager.deleteObject(
|
||||||
|
settings.docstore.bucket,
|
||||||
|
`${projectId}/${docId}`
|
||||||
|
)
|
||||||
|
return
|
||||||
|
} catch (err) {
|
||||||
|
lastError = err
|
||||||
|
logger.warn(
|
||||||
|
{ projectId, docId, err, attempt },
|
||||||
|
'destroying archive failed'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
throw lastError
|
||||||
|
}
|
||||||
|
|
||||||
|
async function _streamToString(stream) {
|
||||||
|
const chunks = []
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
stream.on('data', chunk => chunks.push(chunk))
|
||||||
|
stream.on('error', reject)
|
||||||
|
stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8')))
|
||||||
|
})
|
||||||
|
}
|
378
services/docstore/app/js/DocManager.js
Normal file
378
services/docstore/app/js/DocManager.js
Normal file
|
@ -0,0 +1,378 @@
|
||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
handle-callback-err,
|
||||||
|
no-dupe-keys,
|
||||||
|
no-undef,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let DocManager
|
||||||
|
const MongoManager = require('./MongoManager')
|
||||||
|
const Errors = require('./Errors')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const _ = require('underscore')
|
||||||
|
const DocArchive = require('./DocArchiveManager')
|
||||||
|
const RangeManager = require('./RangeManager')
|
||||||
|
const Settings = require('@overleaf/settings')
|
||||||
|
|
||||||
|
module.exports = DocManager = {
|
||||||
|
// TODO: For historical reasons, the doc version is currently stored in the docOps
|
||||||
|
// collection (which is all that this collection contains). In future, we should
|
||||||
|
// migrate this version property to be part of the docs collection, to guarantee
|
||||||
|
// consitency between lines and version when writing/reading, and for a simpler schema.
|
||||||
|
_getDoc(project_id, doc_id, filter, callback) {
|
||||||
|
if (filter == null) {
|
||||||
|
filter = {}
|
||||||
|
}
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, doc) {}
|
||||||
|
}
|
||||||
|
if (filter.inS3 !== true) {
|
||||||
|
return callback('must include inS3 when getting doc')
|
||||||
|
}
|
||||||
|
|
||||||
|
return MongoManager.findDoc(
|
||||||
|
project_id,
|
||||||
|
doc_id,
|
||||||
|
filter,
|
||||||
|
function (err, doc) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
} else if (doc == null) {
|
||||||
|
return callback(
|
||||||
|
new Errors.NotFoundError(
|
||||||
|
`No such doc: ${doc_id} in project ${project_id}`
|
||||||
|
)
|
||||||
|
)
|
||||||
|
} else if (doc != null ? doc.inS3 : undefined) {
|
||||||
|
return DocArchive.unarchiveDoc(project_id, doc_id, function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
logger.err({ err, project_id, doc_id }, 'error unarchiving doc')
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
return DocManager._getDoc(project_id, doc_id, filter, callback)
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
if (filter.version) {
|
||||||
|
return MongoManager.getDocVersion(
|
||||||
|
doc_id,
|
||||||
|
function (error, version) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
doc.version = version
|
||||||
|
return callback(err, doc)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
return callback(err, doc)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
isDocDeleted(projectId, docId, callback) {
|
||||||
|
MongoManager.findDoc(
|
||||||
|
projectId,
|
||||||
|
docId,
|
||||||
|
{ deleted: true },
|
||||||
|
function (err, doc) {
|
||||||
|
if (err) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
if (!doc) {
|
||||||
|
return callback(
|
||||||
|
new Errors.NotFoundError(
|
||||||
|
`No such project/doc: ${projectId}/${docId}`
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
// `doc.deleted` is `undefined` for non deleted docs
|
||||||
|
callback(null, Boolean(doc.deleted))
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
getFullDoc(project_id, doc_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err, doc) {}
|
||||||
|
}
|
||||||
|
return DocManager._getDoc(
|
||||||
|
project_id,
|
||||||
|
doc_id,
|
||||||
|
{
|
||||||
|
lines: true,
|
||||||
|
rev: true,
|
||||||
|
deleted: true,
|
||||||
|
version: true,
|
||||||
|
ranges: true,
|
||||||
|
inS3: true,
|
||||||
|
},
|
||||||
|
function (err, doc) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
return callback(err, doc)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
// returns the doc without any version information
|
||||||
|
_peekRawDoc(project_id, doc_id, callback) {
|
||||||
|
MongoManager.findDoc(
|
||||||
|
project_id,
|
||||||
|
doc_id,
|
||||||
|
{
|
||||||
|
lines: true,
|
||||||
|
rev: true,
|
||||||
|
deleted: true,
|
||||||
|
version: true,
|
||||||
|
ranges: true,
|
||||||
|
inS3: true,
|
||||||
|
},
|
||||||
|
(err, doc) => {
|
||||||
|
if (err) return callback(err)
|
||||||
|
if (doc == null) {
|
||||||
|
return callback(
|
||||||
|
new Errors.NotFoundError(
|
||||||
|
`No such doc: ${doc_id} in project ${project_id}`
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if (doc && !doc.inS3) {
|
||||||
|
return callback(null, doc)
|
||||||
|
}
|
||||||
|
// skip the unarchiving to mongo when getting a doc
|
||||||
|
DocArchive.getDoc(project_id, doc_id, function (err, archivedDoc) {
|
||||||
|
if (err != null) {
|
||||||
|
logger.err(
|
||||||
|
{ err, project_id, doc_id },
|
||||||
|
'error getting doc from archive'
|
||||||
|
)
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
doc = _.extend(doc, archivedDoc)
|
||||||
|
callback(null, doc)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
// get the doc from mongo if possible, or from the persistent store otherwise,
|
||||||
|
// without unarchiving it (avoids unnecessary writes to mongo)
|
||||||
|
peekDoc(project_id, doc_id, callback) {
|
||||||
|
DocManager._peekRawDoc(project_id, doc_id, (err, doc) => {
|
||||||
|
if (err) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
MongoManager.withRevCheck(
|
||||||
|
doc,
|
||||||
|
MongoManager.getDocVersion,
|
||||||
|
function (error, version) {
|
||||||
|
// If the doc has been modified while we were retrieving it, we
|
||||||
|
// will get a DocModified error
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
doc.version = version
|
||||||
|
return callback(err, doc)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
getDocLines(project_id, doc_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err, doc) {}
|
||||||
|
}
|
||||||
|
return DocManager._getDoc(
|
||||||
|
project_id,
|
||||||
|
doc_id,
|
||||||
|
{ lines: true, inS3: true },
|
||||||
|
function (err, doc) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
return callback(err, doc)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
getAllDeletedDocs(project_id, filter, callback) {
|
||||||
|
MongoManager.getProjectsDeletedDocs(project_id, filter, callback)
|
||||||
|
},
|
||||||
|
|
||||||
|
getAllNonDeletedDocs(project_id, filter, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, docs) {}
|
||||||
|
}
|
||||||
|
return DocArchive.unArchiveAllDocs(project_id, function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return MongoManager.getProjectsDocs(
|
||||||
|
project_id,
|
||||||
|
{ include_deleted: false },
|
||||||
|
filter,
|
||||||
|
function (error, docs) {
|
||||||
|
if (typeof err !== 'undefined' && err !== null) {
|
||||||
|
return callback(error)
|
||||||
|
} else if (docs == null) {
|
||||||
|
return callback(
|
||||||
|
new Errors.NotFoundError(`No docs for project ${project_id}`)
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
return callback(null, docs)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
updateDoc(project_id, doc_id, lines, version, ranges, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, modified, rev) {}
|
||||||
|
}
|
||||||
|
if (lines == null || version == null || ranges == null) {
|
||||||
|
return callback(new Error('no lines, version or ranges provided'))
|
||||||
|
}
|
||||||
|
|
||||||
|
return DocManager._getDoc(
|
||||||
|
project_id,
|
||||||
|
doc_id,
|
||||||
|
{
|
||||||
|
version: true,
|
||||||
|
rev: true,
|
||||||
|
lines: true,
|
||||||
|
version: true,
|
||||||
|
ranges: true,
|
||||||
|
inS3: true,
|
||||||
|
},
|
||||||
|
function (err, doc) {
|
||||||
|
let updateLines, updateRanges, updateVersion
|
||||||
|
if (err != null && !(err instanceof Errors.NotFoundError)) {
|
||||||
|
logger.err(
|
||||||
|
{ project_id, doc_id, err },
|
||||||
|
'error getting document for update'
|
||||||
|
)
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
ranges = RangeManager.jsonRangesToMongo(ranges)
|
||||||
|
|
||||||
|
if (doc == null) {
|
||||||
|
// If the document doesn't exist, we'll make sure to create/update all parts of it.
|
||||||
|
updateLines = true
|
||||||
|
updateVersion = true
|
||||||
|
updateRanges = true
|
||||||
|
} else {
|
||||||
|
updateLines = !_.isEqual(doc.lines, lines)
|
||||||
|
updateVersion = doc.version !== version
|
||||||
|
updateRanges = RangeManager.shouldUpdateRanges(doc.ranges, ranges)
|
||||||
|
}
|
||||||
|
|
||||||
|
let modified = false
|
||||||
|
let rev = (doc != null ? doc.rev : undefined) || 0
|
||||||
|
|
||||||
|
const updateLinesAndRangesIfNeeded = function (cb) {
|
||||||
|
if (updateLines || updateRanges) {
|
||||||
|
const update = {}
|
||||||
|
if (updateLines) {
|
||||||
|
update.lines = lines
|
||||||
|
}
|
||||||
|
if (updateRanges) {
|
||||||
|
update.ranges = ranges
|
||||||
|
}
|
||||||
|
logger.log({ project_id, doc_id }, 'updating doc lines and ranges')
|
||||||
|
|
||||||
|
modified = true
|
||||||
|
rev += 1 // rev will be incremented in mongo by MongoManager.upsertIntoDocCollection
|
||||||
|
return MongoManager.upsertIntoDocCollection(
|
||||||
|
project_id,
|
||||||
|
doc_id,
|
||||||
|
update,
|
||||||
|
cb
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
logger.log(
|
||||||
|
{ project_id, doc_id },
|
||||||
|
'doc lines have not changed - not updating'
|
||||||
|
)
|
||||||
|
return cb()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const updateVersionIfNeeded = function (cb) {
|
||||||
|
if (updateVersion) {
|
||||||
|
logger.log(
|
||||||
|
{
|
||||||
|
project_id,
|
||||||
|
doc_id,
|
||||||
|
oldVersion: doc != null ? doc.version : undefined,
|
||||||
|
newVersion: version,
|
||||||
|
},
|
||||||
|
'updating doc version'
|
||||||
|
)
|
||||||
|
modified = true
|
||||||
|
return MongoManager.setDocVersion(doc_id, version, cb)
|
||||||
|
} else {
|
||||||
|
logger.log(
|
||||||
|
{ project_id, doc_id, version },
|
||||||
|
'doc version has not changed - not updating'
|
||||||
|
)
|
||||||
|
return cb()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return updateLinesAndRangesIfNeeded(function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return updateVersionIfNeeded(function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return callback(null, modified, rev)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
patchDoc(project_id, doc_id, meta, callback) {
|
||||||
|
const projection = { _id: 1, deleted: true }
|
||||||
|
MongoManager.findDoc(project_id, doc_id, projection, (error, doc) => {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
if (!doc) {
|
||||||
|
return callback(
|
||||||
|
new Errors.NotFoundError(
|
||||||
|
`No such project/doc to delete: ${project_id}/${doc_id}`
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (meta.deleted && Settings.docstore.archiveOnSoftDelete) {
|
||||||
|
// The user will not read this doc anytime soon. Flush it out of mongo.
|
||||||
|
DocArchive.archiveDocById(project_id, doc_id, err => {
|
||||||
|
if (err) {
|
||||||
|
logger.warn(
|
||||||
|
{ project_id, doc_id, err },
|
||||||
|
'archiving a single doc in the background failed'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
MongoManager.patchDoc(project_id, doc_id, meta, callback)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
13
services/docstore/app/js/Errors.js
Normal file
13
services/docstore/app/js/Errors.js
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
// import Errors from object-persistor to pass instanceof checks
|
||||||
|
const OError = require('@overleaf/o-error')
|
||||||
|
const { Errors } = require('@overleaf/object-persistor')
|
||||||
|
|
||||||
|
class Md5MismatchError extends OError {}
|
||||||
|
|
||||||
|
class DocModifiedError extends OError {}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
Md5MismatchError,
|
||||||
|
DocModifiedError,
|
||||||
|
...Errors,
|
||||||
|
}
|
67
services/docstore/app/js/HealthChecker.js
Normal file
67
services/docstore/app/js/HealthChecker.js
Normal file
|
@ -0,0 +1,67 @@
|
||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
const { db, ObjectId } = require('./mongodb')
|
||||||
|
const request = require('request')
|
||||||
|
const async = require('async')
|
||||||
|
const _ = require('underscore')
|
||||||
|
const crypto = require('crypto')
|
||||||
|
const settings = require('@overleaf/settings')
|
||||||
|
const { port } = settings.internal.docstore
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
check(callback) {
|
||||||
|
const doc_id = ObjectId()
|
||||||
|
const project_id = ObjectId(settings.docstore.healthCheck.project_id)
|
||||||
|
const url = `http://localhost:${port}/project/${project_id}/doc/${doc_id}`
|
||||||
|
const lines = [
|
||||||
|
'smoke test - delete me',
|
||||||
|
`${crypto.randomBytes(32).toString('hex')}`,
|
||||||
|
]
|
||||||
|
const getOpts = () => ({
|
||||||
|
url,
|
||||||
|
timeout: 3000,
|
||||||
|
})
|
||||||
|
logger.log({ lines, url, doc_id, project_id }, 'running health check')
|
||||||
|
const jobs = [
|
||||||
|
function (cb) {
|
||||||
|
const opts = getOpts()
|
||||||
|
opts.json = { lines, version: 42, ranges: {} }
|
||||||
|
return request.post(opts, cb)
|
||||||
|
},
|
||||||
|
function (cb) {
|
||||||
|
const opts = getOpts()
|
||||||
|
opts.json = true
|
||||||
|
return request.get(opts, function (err, res, body) {
|
||||||
|
if (err != null) {
|
||||||
|
logger.err({ err }, 'docstore returned a error in health check get')
|
||||||
|
return cb(err)
|
||||||
|
} else if (res == null) {
|
||||||
|
return cb('no response from docstore with get check')
|
||||||
|
} else if ((res != null ? res.statusCode : undefined) !== 200) {
|
||||||
|
return cb(`status code not 200, its ${res.statusCode}`)
|
||||||
|
} else if (
|
||||||
|
_.isEqual(body != null ? body.lines : undefined, lines) &&
|
||||||
|
(body != null ? body._id : undefined) === doc_id.toString()
|
||||||
|
) {
|
||||||
|
return cb()
|
||||||
|
} else {
|
||||||
|
return cb(`health check lines not equal ${body.lines} != ${lines}`)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
},
|
||||||
|
cb => db.docs.deleteOne({ _id: doc_id, project_id }, cb),
|
||||||
|
cb => db.docOps.deleteOne({ doc_id }, cb),
|
||||||
|
]
|
||||||
|
return async.series(jobs, callback)
|
||||||
|
},
|
||||||
|
}
|
326
services/docstore/app/js/HttpController.js
Normal file
326
services/docstore/app/js/HttpController.js
Normal file
|
@ -0,0 +1,326 @@
|
||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
handle-callback-err,
|
||||||
|
valid-typeof,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS101: Remove unnecessary use of Array.from
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let HttpController
|
||||||
|
const DocManager = require('./DocManager')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const DocArchive = require('./DocArchiveManager')
|
||||||
|
const HealthChecker = require('./HealthChecker')
|
||||||
|
const Settings = require('@overleaf/settings')
|
||||||
|
|
||||||
|
module.exports = HttpController = {
|
||||||
|
getDoc(req, res, next) {
|
||||||
|
if (next == null) {
|
||||||
|
next = function (error) {}
|
||||||
|
}
|
||||||
|
const { project_id } = req.params
|
||||||
|
const { doc_id } = req.params
|
||||||
|
const include_deleted =
|
||||||
|
(req.query != null ? req.query.include_deleted : undefined) === 'true'
|
||||||
|
logger.log({ project_id, doc_id }, 'getting doc')
|
||||||
|
return DocManager.getFullDoc(project_id, doc_id, function (error, doc) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
logger.log({ doc_id, project_id }, 'got doc')
|
||||||
|
if (doc == null) {
|
||||||
|
return res.sendStatus(404)
|
||||||
|
} else if (doc.deleted && !include_deleted) {
|
||||||
|
return res.sendStatus(404)
|
||||||
|
} else {
|
||||||
|
return res.json(HttpController._buildDocView(doc))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
peekDoc(req, res, next) {
|
||||||
|
const { project_id } = req.params
|
||||||
|
const { doc_id } = req.params
|
||||||
|
logger.log({ project_id, doc_id }, 'peeking doc')
|
||||||
|
DocManager.peekDoc(project_id, doc_id, function (error, doc) {
|
||||||
|
if (error) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
if (doc == null) {
|
||||||
|
return res.sendStatus(404)
|
||||||
|
} else {
|
||||||
|
res.setHeader('x-doc-status', doc.inS3 ? 'archived' : 'active')
|
||||||
|
return res.json(HttpController._buildDocView(doc))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
isDocDeleted(req, res, next) {
|
||||||
|
const { doc_id: docId, project_id: projectId } = req.params
|
||||||
|
DocManager.isDocDeleted(projectId, docId, function (error, deleted) {
|
||||||
|
if (error) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
res.json({ deleted })
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
getRawDoc(req, res, next) {
|
||||||
|
if (next == null) {
|
||||||
|
next = function (error) {}
|
||||||
|
}
|
||||||
|
const { project_id } = req.params
|
||||||
|
const { doc_id } = req.params
|
||||||
|
logger.log({ project_id, doc_id }, 'getting raw doc')
|
||||||
|
return DocManager.getDocLines(project_id, doc_id, function (error, doc) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
if (doc == null) {
|
||||||
|
return res.sendStatus(404)
|
||||||
|
} else {
|
||||||
|
res.setHeader('content-type', 'text/plain')
|
||||||
|
return res.send(HttpController._buildRawDocView(doc))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
getAllDocs(req, res, next) {
|
||||||
|
if (next == null) {
|
||||||
|
next = function (error) {}
|
||||||
|
}
|
||||||
|
const { project_id } = req.params
|
||||||
|
logger.log({ project_id }, 'getting all docs')
|
||||||
|
return DocManager.getAllNonDeletedDocs(
|
||||||
|
project_id,
|
||||||
|
{ lines: true, rev: true },
|
||||||
|
function (error, docs) {
|
||||||
|
if (docs == null) {
|
||||||
|
docs = []
|
||||||
|
}
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
return res.json(HttpController._buildDocsArrayView(project_id, docs))
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
getAllDeletedDocs(req, res, next) {
|
||||||
|
const { project_id } = req.params
|
||||||
|
logger.log({ project_id }, 'getting all deleted docs')
|
||||||
|
DocManager.getAllDeletedDocs(
|
||||||
|
project_id,
|
||||||
|
{ name: true },
|
||||||
|
function (error, docs) {
|
||||||
|
if (error) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
res.json(
|
||||||
|
docs.map(doc => {
|
||||||
|
return { _id: doc._id.toString(), name: doc.name }
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
getAllRanges(req, res, next) {
|
||||||
|
if (next == null) {
|
||||||
|
next = function (error) {}
|
||||||
|
}
|
||||||
|
const { project_id } = req.params
|
||||||
|
logger.log({ project_id }, 'getting all ranges')
|
||||||
|
return DocManager.getAllNonDeletedDocs(
|
||||||
|
project_id,
|
||||||
|
{ ranges: true },
|
||||||
|
function (error, docs) {
|
||||||
|
if (docs == null) {
|
||||||
|
docs = []
|
||||||
|
}
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
return res.json(HttpController._buildDocsArrayView(project_id, docs))
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
updateDoc(req, res, next) {
|
||||||
|
if (next == null) {
|
||||||
|
next = function (error) {}
|
||||||
|
}
|
||||||
|
const { project_id } = req.params
|
||||||
|
const { doc_id } = req.params
|
||||||
|
const lines = req.body != null ? req.body.lines : undefined
|
||||||
|
const version = req.body != null ? req.body.version : undefined
|
||||||
|
const ranges = req.body != null ? req.body.ranges : undefined
|
||||||
|
|
||||||
|
if (lines == null || !(lines instanceof Array)) {
|
||||||
|
logger.error({ project_id, doc_id }, 'no doc lines provided')
|
||||||
|
res.sendStatus(400) // Bad Request
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (version == null || typeof version === !'number') {
|
||||||
|
logger.error({ project_id, doc_id }, 'no doc version provided')
|
||||||
|
res.sendStatus(400) // Bad Request
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ranges == null) {
|
||||||
|
logger.error({ project_id, doc_id }, 'no doc ranges provided')
|
||||||
|
res.sendStatus(400) // Bad Request
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const bodyLength = lines.reduce((len, line) => line.length + len, 0)
|
||||||
|
if (bodyLength > Settings.max_doc_length) {
|
||||||
|
logger.error(
|
||||||
|
{ project_id, doc_id, bodyLength },
|
||||||
|
'document body too large'
|
||||||
|
)
|
||||||
|
res.status(413).send('document body too large')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.log({ project_id, doc_id }, 'got http request to update doc')
|
||||||
|
return DocManager.updateDoc(
|
||||||
|
project_id,
|
||||||
|
doc_id,
|
||||||
|
lines,
|
||||||
|
version,
|
||||||
|
ranges,
|
||||||
|
function (error, modified, rev) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
return res.json({
|
||||||
|
modified,
|
||||||
|
rev,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
patchDoc(req, res, next) {
|
||||||
|
const { project_id, doc_id } = req.params
|
||||||
|
logger.log({ project_id, doc_id }, 'patching doc')
|
||||||
|
|
||||||
|
const allowedFields = ['deleted', 'deletedAt', 'name']
|
||||||
|
const meta = {}
|
||||||
|
Object.entries(req.body).forEach(([field, value]) => {
|
||||||
|
if (allowedFields.includes(field)) {
|
||||||
|
meta[field] = value
|
||||||
|
} else {
|
||||||
|
logger.fatal({ field }, 'joi validation for pathDoc is broken')
|
||||||
|
}
|
||||||
|
})
|
||||||
|
DocManager.patchDoc(project_id, doc_id, meta, function (error) {
|
||||||
|
if (error) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
res.sendStatus(204)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_buildDocView(doc) {
|
||||||
|
const doc_view = { _id: doc._id != null ? doc._id.toString() : undefined }
|
||||||
|
for (const attribute of ['lines', 'rev', 'version', 'ranges', 'deleted']) {
|
||||||
|
if (doc[attribute] != null) {
|
||||||
|
doc_view[attribute] = doc[attribute]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return doc_view
|
||||||
|
},
|
||||||
|
|
||||||
|
_buildRawDocView(doc) {
|
||||||
|
return ((doc != null ? doc.lines : undefined) || []).join('\n')
|
||||||
|
},
|
||||||
|
|
||||||
|
_buildDocsArrayView(project_id, docs) {
|
||||||
|
const docViews = []
|
||||||
|
for (const doc of Array.from(docs)) {
|
||||||
|
if (doc != null) {
|
||||||
|
// There can end up being null docs for some reason :( (probably a race condition)
|
||||||
|
docViews.push(HttpController._buildDocView(doc))
|
||||||
|
} else {
|
||||||
|
logger.error(
|
||||||
|
{ err: new Error('null doc'), project_id },
|
||||||
|
'encountered null doc'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return docViews
|
||||||
|
},
|
||||||
|
|
||||||
|
archiveAllDocs(req, res, next) {
|
||||||
|
if (next == null) {
|
||||||
|
next = function (error) {}
|
||||||
|
}
|
||||||
|
const { project_id } = req.params
|
||||||
|
logger.log({ project_id }, 'archiving all docs')
|
||||||
|
return DocArchive.archiveAllDocs(project_id, function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
return res.sendStatus(204)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
archiveDoc(req, res, next) {
|
||||||
|
const { project_id, doc_id } = req.params
|
||||||
|
logger.log({ project_id, doc_id }, 'archiving a doc')
|
||||||
|
DocArchive.archiveDocById(project_id, doc_id, function (error) {
|
||||||
|
if (error) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
res.sendStatus(204)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
unArchiveAllDocs(req, res, next) {
|
||||||
|
if (next == null) {
|
||||||
|
next = function (error) {}
|
||||||
|
}
|
||||||
|
const { project_id } = req.params
|
||||||
|
logger.log({ project_id }, 'unarchiving all docs')
|
||||||
|
return DocArchive.unArchiveAllDocs(project_id, function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
return res.sendStatus(200)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
destroyAllDocs(req, res, next) {
|
||||||
|
if (next == null) {
|
||||||
|
next = function (error) {}
|
||||||
|
}
|
||||||
|
const { project_id } = req.params
|
||||||
|
logger.log({ project_id }, 'destroying all docs')
|
||||||
|
return DocArchive.destroyAllDocs(project_id, function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
return res.sendStatus(204)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
healthCheck(req, res) {
|
||||||
|
return HealthChecker.check(function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
logger.err({ err }, 'error performing health check')
|
||||||
|
return res.sendStatus(500)
|
||||||
|
} else {
|
||||||
|
return res.sendStatus(200)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
247
services/docstore/app/js/MongoManager.js
Normal file
247
services/docstore/app/js/MongoManager.js
Normal file
|
@ -0,0 +1,247 @@
|
||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
handle-callback-err,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let MongoManager
|
||||||
|
const { db, ObjectId } = require('./mongodb')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const metrics = require('@overleaf/metrics')
|
||||||
|
const Settings = require('@overleaf/settings')
|
||||||
|
const Errors = require('./Errors')
|
||||||
|
const { promisify } = require('util')
|
||||||
|
|
||||||
|
module.exports = MongoManager = {
|
||||||
|
findDoc(project_id, doc_id, filter, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, doc) {}
|
||||||
|
}
|
||||||
|
db.docs.findOne(
|
||||||
|
{
|
||||||
|
_id: ObjectId(doc_id.toString()),
|
||||||
|
project_id: ObjectId(project_id.toString()),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
projection: filter,
|
||||||
|
},
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
getProjectsDeletedDocs(project_id, filter, callback) {
|
||||||
|
db.docs
|
||||||
|
.find(
|
||||||
|
{
|
||||||
|
project_id: ObjectId(project_id.toString()),
|
||||||
|
deleted: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
projection: filter,
|
||||||
|
sort: { deletedAt: -1 },
|
||||||
|
limit: Settings.max_deleted_docs,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.toArray(callback)
|
||||||
|
},
|
||||||
|
|
||||||
|
getProjectsDocs(project_id, options, filter, callback) {
|
||||||
|
const query = { project_id: ObjectId(project_id.toString()) }
|
||||||
|
if (!options.include_deleted) {
|
||||||
|
query.deleted = { $ne: true }
|
||||||
|
}
|
||||||
|
const queryOptions = {
|
||||||
|
projection: filter,
|
||||||
|
}
|
||||||
|
if (options.limit) {
|
||||||
|
queryOptions.limit = options.limit
|
||||||
|
}
|
||||||
|
db.docs.find(query, queryOptions).toArray(callback)
|
||||||
|
},
|
||||||
|
|
||||||
|
getArchivedProjectDocs(project_id, maxResults, callback) {
|
||||||
|
const query = {
|
||||||
|
project_id: ObjectId(project_id.toString()),
|
||||||
|
inS3: true,
|
||||||
|
}
|
||||||
|
db.docs
|
||||||
|
.find(query, { projection: { _id: 1 }, limit: maxResults })
|
||||||
|
.toArray(callback)
|
||||||
|
},
|
||||||
|
|
||||||
|
getNonArchivedProjectDocs(project_id, maxResults, callback) {
|
||||||
|
const query = {
|
||||||
|
project_id: ObjectId(project_id.toString()),
|
||||||
|
inS3: { $ne: true },
|
||||||
|
}
|
||||||
|
db.docs.find(query, { limit: maxResults }).toArray(callback)
|
||||||
|
},
|
||||||
|
|
||||||
|
getNonDeletedArchivedProjectDocs(project_id, maxResults, callback) {
|
||||||
|
const query = {
|
||||||
|
project_id: ObjectId(project_id.toString()),
|
||||||
|
deleted: { $ne: true },
|
||||||
|
inS3: true,
|
||||||
|
}
|
||||||
|
db.docs
|
||||||
|
.find(query, { projection: { _id: 1 }, limit: maxResults })
|
||||||
|
.toArray(callback)
|
||||||
|
},
|
||||||
|
|
||||||
|
upsertIntoDocCollection(project_id, doc_id, updates, callback) {
|
||||||
|
const update = {
|
||||||
|
$set: updates,
|
||||||
|
$inc: {
|
||||||
|
rev: 1,
|
||||||
|
},
|
||||||
|
$unset: {
|
||||||
|
inS3: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
update.$set.project_id = ObjectId(project_id)
|
||||||
|
db.docs.updateOne(
|
||||||
|
{ _id: ObjectId(doc_id) },
|
||||||
|
update,
|
||||||
|
{ upsert: true },
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
patchDoc(project_id, doc_id, meta, callback) {
|
||||||
|
db.docs.updateOne(
|
||||||
|
{
|
||||||
|
_id: ObjectId(doc_id),
|
||||||
|
project_id: ObjectId(project_id),
|
||||||
|
},
|
||||||
|
{ $set: meta },
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
markDocAsArchived(doc_id, rev, callback) {
|
||||||
|
const update = {
|
||||||
|
$set: {},
|
||||||
|
$unset: {},
|
||||||
|
}
|
||||||
|
update.$set.inS3 = true
|
||||||
|
update.$unset.lines = true
|
||||||
|
update.$unset.ranges = true
|
||||||
|
const query = {
|
||||||
|
_id: doc_id,
|
||||||
|
rev,
|
||||||
|
}
|
||||||
|
db.docs.updateOne(query, update, callback)
|
||||||
|
},
|
||||||
|
|
||||||
|
getDocVersion(doc_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, version) {}
|
||||||
|
}
|
||||||
|
db.docOps.findOne(
|
||||||
|
{
|
||||||
|
doc_id: ObjectId(doc_id),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
projection: {
|
||||||
|
version: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
function (error, doc) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
callback(null, (doc && doc.version) || 0)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
setDocVersion(doc_id, version, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
db.docOps.updateOne(
|
||||||
|
{
|
||||||
|
doc_id: ObjectId(doc_id),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$set: { version },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
upsert: true,
|
||||||
|
},
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
getDocRev(doc_id, callback) {
|
||||||
|
db.docs.findOne(
|
||||||
|
{
|
||||||
|
_id: ObjectId(doc_id.toString()),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
projection: { rev: 1 },
|
||||||
|
},
|
||||||
|
function (err, doc) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
callback(null, doc && doc.rev)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
// Helper method to support optimistic locking. Call the provided method for
|
||||||
|
// an existing doc and return the result if the rev in mongo is unchanged when
|
||||||
|
// checked afterwards. If the rev has changed, return a DocModifiedError.
|
||||||
|
withRevCheck(doc, method, callback) {
|
||||||
|
method(doc._id, function (err, result) {
|
||||||
|
if (err) return callback(err)
|
||||||
|
MongoManager.getDocRev(doc._id, function (err, currentRev) {
|
||||||
|
if (err) return callback(err)
|
||||||
|
if (doc.rev !== currentRev) {
|
||||||
|
return callback(
|
||||||
|
new Errors.DocModifiedError('doc rev has changed', {
|
||||||
|
doc_id: doc._id,
|
||||||
|
rev: doc.rev,
|
||||||
|
currentRev,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return callback(null, result)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
destroyDoc(doc_id, callback) {
|
||||||
|
db.docs.deleteOne(
|
||||||
|
{
|
||||||
|
_id: ObjectId(doc_id),
|
||||||
|
},
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
db.docOps.deleteOne(
|
||||||
|
{
|
||||||
|
doc_id: ObjectId(doc_id),
|
||||||
|
},
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
const methods = Object.getOwnPropertyNames(MongoManager)
|
||||||
|
|
||||||
|
module.exports.promises = {}
|
||||||
|
for (const method of methods) {
|
||||||
|
metrics.timeAsyncMethod(MongoManager, method, 'mongo.MongoManager', logger)
|
||||||
|
module.exports.promises[method] = promisify(module.exports[method])
|
||||||
|
}
|
9
services/docstore/app/js/PersistorManager.js
Normal file
9
services/docstore/app/js/PersistorManager.js
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
const settings = require('@overleaf/settings')
|
||||||
|
|
||||||
|
const persistorSettings = settings.docstore
|
||||||
|
persistorSettings.Metrics = require('@overleaf/metrics')
|
||||||
|
|
||||||
|
const ObjectPersistor = require('@overleaf/object-persistor')
|
||||||
|
const persistor = ObjectPersistor(persistorSettings)
|
||||||
|
|
||||||
|
module.exports = persistor
|
69
services/docstore/app/js/RangeManager.js
Normal file
69
services/docstore/app/js/RangeManager.js
Normal file
|
@ -0,0 +1,69 @@
|
||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
no-return-assign,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS101: Remove unnecessary use of Array.from
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let RangeManager
|
||||||
|
const _ = require('underscore')
|
||||||
|
const { ObjectId } = require('./mongodb')
|
||||||
|
|
||||||
|
module.exports = RangeManager = {
|
||||||
|
shouldUpdateRanges(doc_ranges, incoming_ranges) {
|
||||||
|
if (incoming_ranges == null) {
|
||||||
|
throw new Error('expected incoming_ranges')
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the ranges are empty, we don't store them in the DB, so set
|
||||||
|
// doc_ranges to an empty object as default, since this is was the
|
||||||
|
// incoming_ranges will be for an empty range set.
|
||||||
|
if (doc_ranges == null) {
|
||||||
|
doc_ranges = {}
|
||||||
|
}
|
||||||
|
|
||||||
|
return !_.isEqual(doc_ranges, incoming_ranges)
|
||||||
|
},
|
||||||
|
|
||||||
|
jsonRangesToMongo(ranges) {
|
||||||
|
if (ranges == null) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const updateMetadata = function (metadata) {
|
||||||
|
if ((metadata != null ? metadata.ts : undefined) != null) {
|
||||||
|
metadata.ts = new Date(metadata.ts)
|
||||||
|
}
|
||||||
|
if ((metadata != null ? metadata.user_id : undefined) != null) {
|
||||||
|
return (metadata.user_id = RangeManager._safeObjectId(metadata.user_id))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const change of Array.from(ranges.changes || [])) {
|
||||||
|
change.id = RangeManager._safeObjectId(change.id)
|
||||||
|
updateMetadata(change.metadata)
|
||||||
|
}
|
||||||
|
for (const comment of Array.from(ranges.comments || [])) {
|
||||||
|
comment.id = RangeManager._safeObjectId(comment.id)
|
||||||
|
if ((comment.op != null ? comment.op.t : undefined) != null) {
|
||||||
|
comment.op.t = RangeManager._safeObjectId(comment.op.t)
|
||||||
|
}
|
||||||
|
updateMetadata(comment.metadata)
|
||||||
|
}
|
||||||
|
return ranges
|
||||||
|
},
|
||||||
|
|
||||||
|
_safeObjectId(data) {
|
||||||
|
try {
|
||||||
|
return ObjectId(data)
|
||||||
|
} catch (error) {
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
36
services/docstore/app/js/mongodb.js
Normal file
36
services/docstore/app/js/mongodb.js
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
const Settings = require('@overleaf/settings')
|
||||||
|
const { MongoClient, ObjectId } = require('mongodb')
|
||||||
|
|
||||||
|
const clientPromise = MongoClient.connect(
|
||||||
|
Settings.mongo.url,
|
||||||
|
Settings.mongo.options
|
||||||
|
)
|
||||||
|
|
||||||
|
let setupDbPromise
|
||||||
|
async function waitForDb() {
|
||||||
|
if (!setupDbPromise) {
|
||||||
|
setupDbPromise = setupDb()
|
||||||
|
}
|
||||||
|
await setupDbPromise
|
||||||
|
}
|
||||||
|
|
||||||
|
const db = {}
|
||||||
|
async function setupDb() {
|
||||||
|
const internalDb = (await clientPromise).db()
|
||||||
|
|
||||||
|
db.docs = internalDb.collection('docs')
|
||||||
|
db.docOps = internalDb.collection('docOps')
|
||||||
|
}
|
||||||
|
async function addCollection(name) {
|
||||||
|
await waitForDb()
|
||||||
|
const internalDb = (await clientPromise).db()
|
||||||
|
|
||||||
|
db[name] = internalDb.collection(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
db,
|
||||||
|
ObjectId,
|
||||||
|
addCollection,
|
||||||
|
waitForDb,
|
||||||
|
}
|
8
services/docstore/buildscript.txt
Normal file
8
services/docstore/buildscript.txt
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
docstore
|
||||||
|
--dependencies=mongo,gcs
|
||||||
|
--docker-repos=gcr.io/overleaf-ops
|
||||||
|
--env-add=
|
||||||
|
--env-pass-through=
|
||||||
|
--node-version=12.22.3
|
||||||
|
--public-repo=True
|
||||||
|
--script-version=3.11.0
|
88
services/docstore/config/settings.defaults.js
Normal file
88
services/docstore/config/settings.defaults.js
Normal file
|
@ -0,0 +1,88 @@
|
||||||
|
const http = require('http')
|
||||||
|
http.globalAgent.maxSockets = 300
|
||||||
|
|
||||||
|
const Settings = {
|
||||||
|
internal: {
|
||||||
|
docstore: {
|
||||||
|
port: 3016,
|
||||||
|
host: process.env.LISTEN_ADDRESS || 'localhost',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
mongo: {
|
||||||
|
options: {
|
||||||
|
useUnifiedTopology:
|
||||||
|
(process.env.MONGO_USE_UNIFIED_TOPOLOGY || 'true') === 'true',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
docstore: {
|
||||||
|
archiveOnSoftDelete: process.env.ARCHIVE_ON_SOFT_DELETE === 'true',
|
||||||
|
keepSoftDeletedDocsArchived:
|
||||||
|
process.env.KEEP_SOFT_DELETED_DOCS_ARCHIVED === 'true',
|
||||||
|
|
||||||
|
backend: process.env.BACKEND || 's3',
|
||||||
|
healthCheck: {
|
||||||
|
project_id: process.env.HEALTH_CHECK_PROJECT_ID,
|
||||||
|
},
|
||||||
|
bucket: process.env.BUCKET_NAME || process.env.AWS_BUCKET || 'bucket',
|
||||||
|
gcs: {
|
||||||
|
unlockBeforeDelete: process.env.GCS_UNLOCK_BEFORE_DELETE === 'true',
|
||||||
|
deletedBucketSuffix: process.env.GCS_DELETED_BUCKET_SUFFIX,
|
||||||
|
deleteConcurrency: parseInt(process.env.GCS_DELETE_CONCURRENCY) || 50,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
max_deleted_docs: parseInt(process.env.MAX_DELETED_DOCS, 10) || 2000,
|
||||||
|
|
||||||
|
max_doc_length: parseInt(process.env.MAX_DOC_LENGTH) || 2 * 1024 * 1024, // 2mb
|
||||||
|
|
||||||
|
archiveBatchSize: parseInt(process.env.ARCHIVE_BATCH_SIZE, 10) || 50,
|
||||||
|
unArchiveBatchSize: parseInt(process.env.UN_ARCHIVE_BATCH_SIZE, 10) || 50,
|
||||||
|
destroyBatchSize: parseInt(process.env.DESTROY_BATCH_SIZE, 10) || 2000,
|
||||||
|
destroyRetryCount: parseInt(process.env.DESTROY_RETRY_COUNT || '3', 10),
|
||||||
|
parallelArchiveJobs: parseInt(process.env.PARALLEL_ARCHIVE_JOBS, 10) || 5,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env.MONGO_CONNECTION_STRING) {
|
||||||
|
Settings.mongo.url = process.env.MONGO_CONNECTION_STRING
|
||||||
|
} else if (process.env.MONGO_HOST) {
|
||||||
|
Settings.mongo.url = `mongodb://${process.env.MONGO_HOST}/sharelatex`
|
||||||
|
} else {
|
||||||
|
Settings.mongo.url = 'mongodb://127.0.0.1/sharelatex'
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
process.env.AWS_ACCESS_KEY_ID &&
|
||||||
|
process.env.AWS_SECRET_ACCESS_KEY &&
|
||||||
|
process.env.AWS_BUCKET
|
||||||
|
) {
|
||||||
|
Settings.docstore.s3 = {
|
||||||
|
key: process.env.AWS_ACCESS_KEY_ID,
|
||||||
|
secret: process.env.AWS_SECRET_ACCESS_KEY,
|
||||||
|
bucket: process.env.AWS_BUCKET,
|
||||||
|
endpoint: process.env.AWS_S3_ENDPOINT,
|
||||||
|
pathStyle: process.env.AWS_S3_PATH_STYLE,
|
||||||
|
partSize: parseInt(process.env.AWS_S3_PARTSIZE) || 100 * 1024 * 1024,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env.GCS_API_ENDPOINT) {
|
||||||
|
Settings.docstore.gcs.endpoint = {
|
||||||
|
apiEndpoint: process.env.GCS_API_ENDPOINT,
|
||||||
|
apiScheme: process.env.GCS_API_SCHEME,
|
||||||
|
projectId: process.env.GCS_PROJECT_ID,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env.FALLBACK_BACKEND) {
|
||||||
|
Settings.docstore.fallback = {
|
||||||
|
backend: process.env.FALLBACK_BACKEND,
|
||||||
|
// mapping of bucket names on the fallback, to bucket names on the primary.
|
||||||
|
// e.g. { myS3UserFilesBucketName: 'myGoogleUserFilesBucketName' }
|
||||||
|
buckets: JSON.parse(process.env.FALLBACK_BUCKET_MAPPING || '{}'),
|
||||||
|
copyOnMiss: process.env.COPY_ON_MISS === 'true',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Settings
|
14
services/docstore/docker-compose-config.yml
Normal file
14
services/docstore/docker-compose-config.yml
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
version: "2.3"
|
||||||
|
|
||||||
|
services:
|
||||||
|
dev:
|
||||||
|
environment:
|
||||||
|
- AWS_BUCKET
|
||||||
|
- AWS_ACCESS_KEY_ID
|
||||||
|
- AWS_SECRET_ACCESS_KEY
|
||||||
|
|
||||||
|
ci:
|
||||||
|
environment:
|
||||||
|
- AWS_BUCKET
|
||||||
|
- AWS_ACCESS_KEY_ID
|
||||||
|
- AWS_SECRET_ACCESS_KEY
|
64
services/docstore/docker-compose.ci.yml
Normal file
64
services/docstore/docker-compose.ci.yml
Normal file
|
@ -0,0 +1,64 @@
|
||||||
|
# This file was auto-generated, do not edit it directly.
|
||||||
|
# Instead run bin/update_build_scripts from
|
||||||
|
# https://github.com/sharelatex/sharelatex-dev-environment
|
||||||
|
|
||||||
|
version: "2.3"
|
||||||
|
|
||||||
|
services:
|
||||||
|
test_unit:
|
||||||
|
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
|
||||||
|
user: node
|
||||||
|
command: npm run test:unit:_run
|
||||||
|
environment:
|
||||||
|
NODE_ENV: test
|
||||||
|
NODE_OPTIONS: "--unhandled-rejections=strict"
|
||||||
|
|
||||||
|
|
||||||
|
test_acceptance:
|
||||||
|
build: .
|
||||||
|
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
|
||||||
|
extends:
|
||||||
|
file: docker-compose-config.yml
|
||||||
|
service: ci
|
||||||
|
environment:
|
||||||
|
ELASTIC_SEARCH_DSN: es:9200
|
||||||
|
REDIS_HOST: redis
|
||||||
|
QUEUES_REDIS_HOST: redis
|
||||||
|
MONGO_HOST: mongo
|
||||||
|
POSTGRES_HOST: postgres
|
||||||
|
GCS_API_ENDPOINT: gcs:9090
|
||||||
|
GCS_API_SCHEME: http
|
||||||
|
GCS_PROJECT_ID: fake
|
||||||
|
STORAGE_EMULATOR_HOST: http://gcs:9090/storage/v1
|
||||||
|
MOCHA_GREP: ${MOCHA_GREP}
|
||||||
|
NODE_ENV: test
|
||||||
|
NODE_OPTIONS: "--unhandled-rejections=strict"
|
||||||
|
depends_on:
|
||||||
|
mongo:
|
||||||
|
condition: service_healthy
|
||||||
|
gcs:
|
||||||
|
condition: service_healthy
|
||||||
|
user: node
|
||||||
|
command: npm run test:acceptance:_run
|
||||||
|
|
||||||
|
|
||||||
|
tar:
|
||||||
|
build: .
|
||||||
|
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
|
||||||
|
volumes:
|
||||||
|
- ./:/tmp/build/
|
||||||
|
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
|
||||||
|
user: root
|
||||||
|
mongo:
|
||||||
|
image: mongo:4.0
|
||||||
|
healthcheck:
|
||||||
|
test: "mongo --quiet localhost/test --eval 'quit(db.runCommand({ ping: 1 }).ok ? 0 : 1)'"
|
||||||
|
interval: 1s
|
||||||
|
retries: 20
|
||||||
|
gcs:
|
||||||
|
image: fsouza/fake-gcs-server:v1.21.2
|
||||||
|
command: ["--port=9090", "--scheme=http"]
|
||||||
|
healthcheck:
|
||||||
|
test: wget --quiet --output-document=/dev/null http://localhost:9090/storage/v1/b
|
||||||
|
interval: 1s
|
||||||
|
retries: 20
|
63
services/docstore/docker-compose.yml
Normal file
63
services/docstore/docker-compose.yml
Normal file
|
@ -0,0 +1,63 @@
|
||||||
|
# This file was auto-generated, do not edit it directly.
|
||||||
|
# Instead run bin/update_build_scripts from
|
||||||
|
# https://github.com/sharelatex/sharelatex-dev-environment
|
||||||
|
|
||||||
|
version: "2.3"
|
||||||
|
|
||||||
|
services:
|
||||||
|
test_unit:
|
||||||
|
image: node:12.22.3
|
||||||
|
volumes:
|
||||||
|
- .:/app
|
||||||
|
working_dir: /app
|
||||||
|
environment:
|
||||||
|
MOCHA_GREP: ${MOCHA_GREP}
|
||||||
|
NODE_ENV: test
|
||||||
|
NODE_OPTIONS: "--unhandled-rejections=strict"
|
||||||
|
command: npm run --silent test:unit
|
||||||
|
user: node
|
||||||
|
|
||||||
|
test_acceptance:
|
||||||
|
image: node:12.22.3
|
||||||
|
volumes:
|
||||||
|
- .:/app
|
||||||
|
working_dir: /app
|
||||||
|
extends:
|
||||||
|
file: docker-compose-config.yml
|
||||||
|
service: dev
|
||||||
|
environment:
|
||||||
|
ELASTIC_SEARCH_DSN: es:9200
|
||||||
|
REDIS_HOST: redis
|
||||||
|
QUEUES_REDIS_HOST: redis
|
||||||
|
MONGO_HOST: mongo
|
||||||
|
POSTGRES_HOST: postgres
|
||||||
|
GCS_API_ENDPOINT: gcs:9090
|
||||||
|
GCS_API_SCHEME: http
|
||||||
|
GCS_PROJECT_ID: fake
|
||||||
|
STORAGE_EMULATOR_HOST: http://gcs:9090/storage/v1
|
||||||
|
MOCHA_GREP: ${MOCHA_GREP}
|
||||||
|
LOG_LEVEL: ERROR
|
||||||
|
NODE_ENV: test
|
||||||
|
NODE_OPTIONS: "--unhandled-rejections=strict"
|
||||||
|
user: node
|
||||||
|
depends_on:
|
||||||
|
mongo:
|
||||||
|
condition: service_healthy
|
||||||
|
gcs:
|
||||||
|
condition: service_healthy
|
||||||
|
command: npm run --silent test:acceptance
|
||||||
|
|
||||||
|
mongo:
|
||||||
|
image: mongo:4.0
|
||||||
|
healthcheck:
|
||||||
|
test: "mongo --quiet localhost/test --eval 'quit(db.runCommand({ ping: 1 }).ok ? 0 : 1)'"
|
||||||
|
interval: 1s
|
||||||
|
retries: 20
|
||||||
|
|
||||||
|
gcs:
|
||||||
|
image: fsouza/fake-gcs-server:v1.21.2
|
||||||
|
command: ["--port=9090", "--scheme=http"]
|
||||||
|
healthcheck:
|
||||||
|
test: wget --quiet --output-document=/dev/null http://localhost:9090/storage/v1/b
|
||||||
|
interval: 1s
|
||||||
|
retries: 20
|
17
services/docstore/nodemon.json
Normal file
17
services/docstore/nodemon.json
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
{
|
||||||
|
"ignore": [
|
||||||
|
".git",
|
||||||
|
"node_modules/"
|
||||||
|
],
|
||||||
|
"verbose": true,
|
||||||
|
"legacyWatch": true,
|
||||||
|
"execMap": {
|
||||||
|
"js": "npm run start"
|
||||||
|
},
|
||||||
|
"watch": [
|
||||||
|
"app/js/",
|
||||||
|
"app.js",
|
||||||
|
"config/"
|
||||||
|
],
|
||||||
|
"ext": "js"
|
||||||
|
}
|
6039
services/docstore/package-lock.json
generated
Normal file
6039
services/docstore/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load diff
62
services/docstore/package.json
Normal file
62
services/docstore/package.json
Normal file
|
@ -0,0 +1,62 @@
|
||||||
|
{
|
||||||
|
"name": "docstore-sharelatex",
|
||||||
|
"version": "0.1.2",
|
||||||
|
"description": "A CRUD API for handling text documents in projects",
|
||||||
|
"author": "ShareLaTeX <team@sharelatex>",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/sharelatex/docstore-sharelatex.git"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"start": "node $NODE_APP_OPTIONS app.js",
|
||||||
|
"test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js",
|
||||||
|
"test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP",
|
||||||
|
"test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js",
|
||||||
|
"test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP",
|
||||||
|
"nodemon": "nodemon --config nodemon.json",
|
||||||
|
"lint": "eslint --max-warnings 0 --format unix .",
|
||||||
|
"format": "prettier --list-different $PWD/'**/*.js'",
|
||||||
|
"format:fix": "prettier --write $PWD/'**/*.js'",
|
||||||
|
"lint:fix": "eslint --fix ."
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@overleaf/metrics": "^3.5.1",
|
||||||
|
"@overleaf/o-error": "^3.0.0",
|
||||||
|
"@overleaf/object-persistor": "https://github.com/overleaf/object-persistor/archive/4ca62157a2beb747e9a56da3ce1569124b90378a.tar.gz",
|
||||||
|
"@overleaf/settings": "^2.1.1",
|
||||||
|
"async": "^2.6.3",
|
||||||
|
"body-parser": "^1.19.0",
|
||||||
|
"bunyan": "^1.8.15",
|
||||||
|
"celebrate": "^13.0.4",
|
||||||
|
"express": "^4.17.1",
|
||||||
|
"logger-sharelatex": "^2.2.0",
|
||||||
|
"mongodb": "^3.6.0",
|
||||||
|
"p-map": "^4.0.0",
|
||||||
|
"request": "^2.88.2",
|
||||||
|
"streamifier": "^0.1.1",
|
||||||
|
"underscore": "~1.12.1"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@google-cloud/storage": "^5.1.2",
|
||||||
|
"chai": "^4.2.0",
|
||||||
|
"chai-as-promised": "^7.1.1",
|
||||||
|
"eslint": "^7.21.0",
|
||||||
|
"eslint-config-prettier": "^8.1.0",
|
||||||
|
"eslint-config-standard": "^16.0.2",
|
||||||
|
"eslint-plugin-chai-expect": "^2.2.0",
|
||||||
|
"eslint-plugin-chai-friendly": "^0.6.0",
|
||||||
|
"eslint-plugin-import": "^2.22.1",
|
||||||
|
"eslint-plugin-mocha": "^8.0.0",
|
||||||
|
"eslint-plugin-node": "^11.1.0",
|
||||||
|
"eslint-plugin-prettier": "^3.1.2",
|
||||||
|
"eslint-plugin-promise": "^4.2.1",
|
||||||
|
"mocha": "^8.3.2",
|
||||||
|
"prettier": "^2.2.1",
|
||||||
|
"sandboxed-module": "~2.0.4",
|
||||||
|
"sinon": "~9.0.2",
|
||||||
|
"sinon-chai": "^3.5.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": "~6.14.1"
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,5 @@
|
||||||
|
FROM fsouza/fake-gcs-server:latest
|
||||||
|
RUN apk add --update --no-cache curl
|
||||||
|
COPY healthcheck.sh /healthcheck.sh
|
||||||
|
HEALTHCHECK --interval=1s --timeout=1s --retries=30 CMD /healthcheck.sh http://localhost:9090
|
||||||
|
CMD ["--port=9090", "--scheme=http"]
|
9
services/docstore/test/acceptance/deps/healthcheck.sh
Normal file
9
services/docstore/test/acceptance/deps/healthcheck.sh
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
# health check to allow 404 status code as valid
|
||||||
|
STATUSCODE=$(curl --silent --output /dev/null --write-out "%{http_code}" $1)
|
||||||
|
# will be 000 on non-http error (e.g. connection failure)
|
||||||
|
if test $STATUSCODE -ge 500 || test $STATUSCODE -lt 200; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
exit 0
|
1232
services/docstore/test/acceptance/js/ArchiveDocsTests.js
Normal file
1232
services/docstore/test/acceptance/js/ArchiveDocsTests.js
Normal file
File diff suppressed because it is too large
Load diff
470
services/docstore/test/acceptance/js/DeletingDocsTests.js
Normal file
470
services/docstore/test/acceptance/js/DeletingDocsTests.js
Normal file
|
@ -0,0 +1,470 @@
|
||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
handle-callback-err,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
const { db, ObjectId } = require('../../../app/js/mongodb')
|
||||||
|
const { expect } = require('chai')
|
||||||
|
const DocstoreApp = require('./helpers/DocstoreApp')
|
||||||
|
const Errors = require('../../../app/js/Errors')
|
||||||
|
const Settings = require('@overleaf/settings')
|
||||||
|
|
||||||
|
const DocstoreClient = require('./helpers/DocstoreClient')
|
||||||
|
|
||||||
|
function deleteTestSuite(deleteDoc) {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
this.project_id = ObjectId()
|
||||||
|
this.doc_id = ObjectId()
|
||||||
|
this.lines = ['original', 'lines']
|
||||||
|
this.version = 42
|
||||||
|
this.ranges = []
|
||||||
|
return DocstoreApp.ensureRunning(() => {
|
||||||
|
return DocstoreClient.createDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.lines,
|
||||||
|
this.version,
|
||||||
|
this.ranges,
|
||||||
|
error => {
|
||||||
|
if (error != null) {
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show as not deleted on /deleted', function (done) {
|
||||||
|
DocstoreClient.isDocDeleted(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
(error, res, body) => {
|
||||||
|
if (error) return done(error)
|
||||||
|
expect(res.statusCode).to.equal(200)
|
||||||
|
expect(body).to.have.property('deleted').to.equal(false)
|
||||||
|
done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the doc exists', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
deleteDoc(this.project_id, this.doc_id, (error, res, doc) => {
|
||||||
|
this.res = res
|
||||||
|
return done()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
afterEach(function (done) {
|
||||||
|
return db.docs.remove({ _id: this.doc_id }, done)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should mark the doc as deleted on /deleted', function (done) {
|
||||||
|
DocstoreClient.isDocDeleted(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
(error, res, body) => {
|
||||||
|
if (error) return done(error)
|
||||||
|
expect(res.statusCode).to.equal(200)
|
||||||
|
expect(body).to.have.property('deleted').to.equal(true)
|
||||||
|
done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should insert a deleted doc into the docs collection', function (done) {
|
||||||
|
return db.docs.find({ _id: this.doc_id }).toArray((error, docs) => {
|
||||||
|
docs[0]._id.should.deep.equal(this.doc_id)
|
||||||
|
docs[0].lines.should.deep.equal(this.lines)
|
||||||
|
docs[0].deleted.should.equal(true)
|
||||||
|
return done()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not export the doc to s3', function (done) {
|
||||||
|
setTimeout(() => {
|
||||||
|
DocstoreClient.getS3Doc(this.project_id, this.doc_id, error => {
|
||||||
|
expect(error).to.be.instanceOf(Errors.NotFoundError)
|
||||||
|
done()
|
||||||
|
})
|
||||||
|
}, 1000)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when archiveOnSoftDelete is enabled', function () {
|
||||||
|
let archiveOnSoftDelete
|
||||||
|
beforeEach('overwrite settings', function () {
|
||||||
|
archiveOnSoftDelete = Settings.docstore.archiveOnSoftDelete
|
||||||
|
Settings.docstore.archiveOnSoftDelete = true
|
||||||
|
})
|
||||||
|
afterEach('restore settings', function () {
|
||||||
|
Settings.docstore.archiveOnSoftDelete = archiveOnSoftDelete
|
||||||
|
})
|
||||||
|
|
||||||
|
beforeEach('delete Doc', function (done) {
|
||||||
|
deleteDoc(this.project_id, this.doc_id, (error, res) => {
|
||||||
|
this.res = res
|
||||||
|
done()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
beforeEach(function waitForBackgroundFlush(done) {
|
||||||
|
setTimeout(done, 500)
|
||||||
|
})
|
||||||
|
|
||||||
|
afterEach(function cleanupDoc(done) {
|
||||||
|
db.docs.remove({ _id: this.doc_id }, done)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should set the deleted flag in the doc', function (done) {
|
||||||
|
db.docs.findOne({ _id: this.doc_id }, (error, doc) => {
|
||||||
|
if (error) {
|
||||||
|
return done(error)
|
||||||
|
}
|
||||||
|
expect(doc.deleted).to.equal(true)
|
||||||
|
done()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should set inS3 and unset lines and ranges in the doc', function (done) {
|
||||||
|
db.docs.findOne({ _id: this.doc_id }, (error, doc) => {
|
||||||
|
if (error) {
|
||||||
|
return done(error)
|
||||||
|
}
|
||||||
|
expect(doc.lines).to.not.exist
|
||||||
|
expect(doc.ranges).to.not.exist
|
||||||
|
expect(doc.inS3).to.equal(true)
|
||||||
|
done()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should set the doc in s3 correctly', function (done) {
|
||||||
|
DocstoreClient.getS3Doc(this.project_id, this.doc_id, (error, s3_doc) => {
|
||||||
|
if (error) {
|
||||||
|
return done(error)
|
||||||
|
}
|
||||||
|
expect(s3_doc.lines).to.deep.equal(this.lines)
|
||||||
|
expect(s3_doc.ranges).to.deep.equal(this.ranges)
|
||||||
|
done()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the doc exists in another project', function () {
|
||||||
|
const otherProjectId = ObjectId()
|
||||||
|
|
||||||
|
it('should show as not existing on /deleted', function (done) {
|
||||||
|
DocstoreClient.isDocDeleted(otherProjectId, this.doc_id, (error, res) => {
|
||||||
|
if (error) return done(error)
|
||||||
|
expect(res.statusCode).to.equal(404)
|
||||||
|
done()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return a 404 when trying to delete', function (done) {
|
||||||
|
deleteDoc(otherProjectId, this.doc_id, (error, res) => {
|
||||||
|
if (error) return done(error)
|
||||||
|
expect(res.statusCode).to.equal(404)
|
||||||
|
done()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
return describe('when the doc does not exist', function () {
|
||||||
|
it('should show as not existing on /deleted', function (done) {
|
||||||
|
const missing_doc_id = ObjectId()
|
||||||
|
DocstoreClient.isDocDeleted(
|
||||||
|
this.project_id,
|
||||||
|
missing_doc_id,
|
||||||
|
(error, res) => {
|
||||||
|
if (error) return done(error)
|
||||||
|
expect(res.statusCode).to.equal(404)
|
||||||
|
done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return a 404', function (done) {
|
||||||
|
const missing_doc_id = ObjectId()
|
||||||
|
deleteDoc(this.project_id, missing_doc_id, (error, res, doc) => {
|
||||||
|
res.statusCode.should.equal(404)
|
||||||
|
return done()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('Delete via PATCH', function () {
|
||||||
|
deleteTestSuite(DocstoreClient.deleteDoc)
|
||||||
|
|
||||||
|
describe('when providing a custom doc name in the delete request', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
DocstoreClient.deleteDocWithName(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
'wombat.tex',
|
||||||
|
done
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should insert the doc name into the docs collection', function (done) {
|
||||||
|
db.docs.find({ _id: this.doc_id }).toArray((error, docs) => {
|
||||||
|
if (error) return done(error)
|
||||||
|
expect(docs[0].name).to.equal('wombat.tex')
|
||||||
|
done()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when providing a custom deletedAt date in the delete request', function () {
|
||||||
|
beforeEach('record date and delay', function (done) {
|
||||||
|
this.deletedAt = new Date()
|
||||||
|
setTimeout(done, 5)
|
||||||
|
})
|
||||||
|
|
||||||
|
beforeEach('perform deletion with past date', function (done) {
|
||||||
|
DocstoreClient.deleteDocWithDate(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.deletedAt,
|
||||||
|
done
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should insert the date into the docs collection', function (done) {
|
||||||
|
db.docs.find({ _id: this.doc_id }).toArray((error, docs) => {
|
||||||
|
if (error) return done(error)
|
||||||
|
expect(docs[0].deletedAt.toISOString()).to.equal(
|
||||||
|
this.deletedAt.toISOString()
|
||||||
|
)
|
||||||
|
done()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when providing no doc name in the delete request', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
DocstoreClient.deleteDocWithName(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
'',
|
||||||
|
(error, res) => {
|
||||||
|
this.res = res
|
||||||
|
done(error)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should reject the request', function () {
|
||||||
|
expect(this.res.statusCode).to.equal(400)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when providing no date in the delete request', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
DocstoreClient.deleteDocWithDate(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
'',
|
||||||
|
(error, res) => {
|
||||||
|
this.res = res
|
||||||
|
done(error)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should reject the request', function () {
|
||||||
|
expect(this.res.statusCode).to.equal(400)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('before deleting anything', function () {
|
||||||
|
it('should show nothing in deleted docs response', function (done) {
|
||||||
|
DocstoreClient.getAllDeletedDocs(
|
||||||
|
this.project_id,
|
||||||
|
(error, deletedDocs) => {
|
||||||
|
if (error) return done(error)
|
||||||
|
expect(deletedDocs).to.deep.equal([])
|
||||||
|
done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the doc gets a name on delete', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
DocstoreClient.deleteDoc(this.project_id, this.doc_id, done)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show the doc in deleted docs response', function (done) {
|
||||||
|
DocstoreClient.getAllDeletedDocs(
|
||||||
|
this.project_id,
|
||||||
|
(error, deletedDocs) => {
|
||||||
|
if (error) return done(error)
|
||||||
|
expect(deletedDocs).to.deep.equal([
|
||||||
|
{ _id: this.doc_id.toString(), name: 'main.tex' },
|
||||||
|
])
|
||||||
|
done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('after deleting multiple docs', function () {
|
||||||
|
beforeEach('create doc2', function (done) {
|
||||||
|
this.doc_id2 = ObjectId()
|
||||||
|
DocstoreClient.createDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id2,
|
||||||
|
this.lines,
|
||||||
|
this.version,
|
||||||
|
this.ranges,
|
||||||
|
done
|
||||||
|
)
|
||||||
|
})
|
||||||
|
beforeEach('delete doc2', function (done) {
|
||||||
|
DocstoreClient.deleteDocWithName(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id2,
|
||||||
|
'two.tex',
|
||||||
|
done
|
||||||
|
)
|
||||||
|
})
|
||||||
|
beforeEach('create doc3', function (done) {
|
||||||
|
this.doc_id3 = ObjectId()
|
||||||
|
DocstoreClient.createDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id3,
|
||||||
|
this.lines,
|
||||||
|
this.version,
|
||||||
|
this.ranges,
|
||||||
|
done
|
||||||
|
)
|
||||||
|
})
|
||||||
|
beforeEach('delete doc3', function (done) {
|
||||||
|
DocstoreClient.deleteDocWithName(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id3,
|
||||||
|
'three.tex',
|
||||||
|
done
|
||||||
|
)
|
||||||
|
})
|
||||||
|
it('should show all the docs as deleted', function (done) {
|
||||||
|
DocstoreClient.getAllDeletedDocs(
|
||||||
|
this.project_id,
|
||||||
|
(error, deletedDocs) => {
|
||||||
|
if (error) return done(error)
|
||||||
|
|
||||||
|
expect(deletedDocs).to.deep.equal([
|
||||||
|
{ _id: this.doc_id3.toString(), name: 'three.tex' },
|
||||||
|
{ _id: this.doc_id2.toString(), name: 'two.tex' },
|
||||||
|
{ _id: this.doc_id.toString(), name: 'main.tex' },
|
||||||
|
])
|
||||||
|
done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('with one more than max_deleted_docs permits', function () {
|
||||||
|
let maxDeletedDocsBefore
|
||||||
|
beforeEach(function () {
|
||||||
|
maxDeletedDocsBefore = Settings.max_deleted_docs
|
||||||
|
Settings.max_deleted_docs = 2
|
||||||
|
})
|
||||||
|
afterEach(function () {
|
||||||
|
Settings.max_deleted_docs = maxDeletedDocsBefore
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should omit the first deleted doc', function (done) {
|
||||||
|
DocstoreClient.getAllDeletedDocs(
|
||||||
|
this.project_id,
|
||||||
|
(error, deletedDocs) => {
|
||||||
|
if (error) return done(error)
|
||||||
|
|
||||||
|
expect(deletedDocs).to.deep.equal([
|
||||||
|
{ _id: this.doc_id3.toString(), name: 'three.tex' },
|
||||||
|
{ _id: this.doc_id2.toString(), name: 'two.tex' },
|
||||||
|
// dropped main.tex
|
||||||
|
])
|
||||||
|
done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("Destroying a project's documents", function () {
|
||||||
|
describe('when the doc exists', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
return db.docOps.insert(
|
||||||
|
{ doc_id: ObjectId(this.doc_id), version: 1 },
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
return done(err)
|
||||||
|
}
|
||||||
|
return DocstoreClient.destroyAllDoc(this.project_id, done)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should remove the doc from the docs collection', function (done) {
|
||||||
|
return db.docs.find({ _id: this.doc_id }).toArray((err, docs) => {
|
||||||
|
expect(err).not.to.exist
|
||||||
|
expect(docs).to.deep.equal([])
|
||||||
|
return done()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should remove the docOps from the docOps collection', function (done) {
|
||||||
|
return db.docOps.find({ doc_id: this.doc_id }).toArray((err, docOps) => {
|
||||||
|
expect(err).not.to.exist
|
||||||
|
expect(docOps).to.deep.equal([])
|
||||||
|
return done()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
return describe('when the doc is archived', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
return DocstoreClient.archiveAllDoc(this.project_id, err => {
|
||||||
|
if (err != null) {
|
||||||
|
return done(err)
|
||||||
|
}
|
||||||
|
return DocstoreClient.destroyAllDoc(this.project_id, done)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should remove the doc from the docs collection', function (done) {
|
||||||
|
return db.docs.find({ _id: this.doc_id }).toArray((err, docs) => {
|
||||||
|
expect(err).not.to.exist
|
||||||
|
expect(docs).to.deep.equal([])
|
||||||
|
return done()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should remove the docOps from the docOps collection', function (done) {
|
||||||
|
return db.docOps.find({ doc_id: this.doc_id }).toArray((err, docOps) => {
|
||||||
|
expect(err).not.to.exist
|
||||||
|
expect(docOps).to.deep.equal([])
|
||||||
|
return done()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should remove the doc contents from s3', function (done) {
|
||||||
|
return DocstoreClient.getS3Doc(this.project_id, this.doc_id, error => {
|
||||||
|
expect(error).to.be.instanceOf(Errors.NotFoundError)
|
||||||
|
done()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
112
services/docstore/test/acceptance/js/GettingAllDocsTests.js
Normal file
112
services/docstore/test/acceptance/js/GettingAllDocsTests.js
Normal file
|
@ -0,0 +1,112 @@
|
||||||
|
/* eslint-disable
|
||||||
|
handle-callback-err,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS101: Remove unnecessary use of Array.from
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
const sinon = require('sinon')
|
||||||
|
const { ObjectId } = require('mongodb')
|
||||||
|
const async = require('async')
|
||||||
|
const DocstoreApp = require('./helpers/DocstoreApp')
|
||||||
|
|
||||||
|
const DocstoreClient = require('./helpers/DocstoreClient')
|
||||||
|
|
||||||
|
describe('Getting all docs', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
this.project_id = ObjectId()
|
||||||
|
this.docs = [
|
||||||
|
{
|
||||||
|
_id: ObjectId(),
|
||||||
|
lines: ['one', 'two', 'three'],
|
||||||
|
ranges: { mock: 'one' },
|
||||||
|
rev: 2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
_id: ObjectId(),
|
||||||
|
lines: ['aaa', 'bbb', 'ccc'],
|
||||||
|
ranges: { mock: 'two' },
|
||||||
|
rev: 4,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
_id: ObjectId(),
|
||||||
|
lines: ['111', '222', '333'],
|
||||||
|
ranges: { mock: 'three' },
|
||||||
|
rev: 6,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
this.deleted_doc = {
|
||||||
|
_id: ObjectId(),
|
||||||
|
lines: ['deleted'],
|
||||||
|
ranges: { mock: 'four' },
|
||||||
|
rev: 8,
|
||||||
|
}
|
||||||
|
const version = 42
|
||||||
|
const jobs = Array.from(this.docs).map(doc =>
|
||||||
|
(doc => {
|
||||||
|
return callback => {
|
||||||
|
return DocstoreClient.createDoc(
|
||||||
|
this.project_id,
|
||||||
|
doc._id,
|
||||||
|
doc.lines,
|
||||||
|
version,
|
||||||
|
doc.ranges,
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
}
|
||||||
|
})(doc)
|
||||||
|
)
|
||||||
|
jobs.push(cb => {
|
||||||
|
return DocstoreClient.createDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.deleted_doc._id,
|
||||||
|
this.deleted_doc.lines,
|
||||||
|
version,
|
||||||
|
this.deleted_doc.ranges,
|
||||||
|
err => {
|
||||||
|
return DocstoreClient.deleteDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.deleted_doc._id,
|
||||||
|
cb
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
jobs.unshift(cb => DocstoreApp.ensureRunning(cb))
|
||||||
|
return async.series(jobs, done)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('getAllDocs should return all the (non-deleted) docs', function (done) {
|
||||||
|
return DocstoreClient.getAllDocs(this.project_id, (error, res, docs) => {
|
||||||
|
if (error != null) {
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
docs.length.should.equal(this.docs.length)
|
||||||
|
for (let i = 0; i < docs.length; i++) {
|
||||||
|
const doc = docs[i]
|
||||||
|
doc.lines.should.deep.equal(this.docs[i].lines)
|
||||||
|
}
|
||||||
|
return done()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('getAllRanges should return all the (non-deleted) doc ranges', function (done) {
|
||||||
|
return DocstoreClient.getAllRanges(this.project_id, (error, res, docs) => {
|
||||||
|
if (error != null) {
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
docs.length.should.equal(this.docs.length)
|
||||||
|
for (let i = 0; i < docs.length; i++) {
|
||||||
|
const doc = docs[i]
|
||||||
|
doc.ranges.should.deep.equal(this.docs[i].ranges)
|
||||||
|
}
|
||||||
|
return done()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
|
@ -0,0 +1,127 @@
|
||||||
|
const Settings = require('@overleaf/settings')
|
||||||
|
const { ObjectId } = require('../../../app/js/mongodb')
|
||||||
|
const DocstoreApp = require('./helpers/DocstoreApp')
|
||||||
|
const DocstoreClient = require('./helpers/DocstoreClient')
|
||||||
|
const { Storage } = require('@google-cloud/storage')
|
||||||
|
|
||||||
|
describe('Getting A Doc from Archive', function () {
|
||||||
|
before(function (done) {
|
||||||
|
return DocstoreApp.ensureRunning(done)
|
||||||
|
})
|
||||||
|
|
||||||
|
before(async function () {
|
||||||
|
const storage = new Storage(Settings.docstore.gcs.endpoint)
|
||||||
|
await storage.createBucket(Settings.docstore.bucket)
|
||||||
|
await storage.createBucket(`${Settings.docstore.bucket}-deleted`)
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('for an archived doc', function () {
|
||||||
|
before(function (done) {
|
||||||
|
this.project_id = ObjectId()
|
||||||
|
this.timeout(1000 * 30)
|
||||||
|
this.doc = {
|
||||||
|
_id: ObjectId(),
|
||||||
|
lines: ['foo', 'bar'],
|
||||||
|
ranges: {},
|
||||||
|
version: 2,
|
||||||
|
}
|
||||||
|
DocstoreClient.createDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc._id,
|
||||||
|
this.doc.lines,
|
||||||
|
this.doc.version,
|
||||||
|
this.doc.ranges,
|
||||||
|
error => {
|
||||||
|
if (error) {
|
||||||
|
return done(error)
|
||||||
|
}
|
||||||
|
DocstoreClient.archiveDocById(
|
||||||
|
this.project_id,
|
||||||
|
this.doc._id,
|
||||||
|
(error, res) => {
|
||||||
|
this.res = res
|
||||||
|
if (error) {
|
||||||
|
return done(error)
|
||||||
|
}
|
||||||
|
done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should successully archive the doc', function (done) {
|
||||||
|
this.res.statusCode.should.equal(204)
|
||||||
|
done()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return the doc lines and version from persistent storage', function (done) {
|
||||||
|
return DocstoreClient.peekDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc._id,
|
||||||
|
{},
|
||||||
|
(error, res, doc) => {
|
||||||
|
res.statusCode.should.equal(200)
|
||||||
|
res.headers['x-doc-status'].should.equal('archived')
|
||||||
|
doc.lines.should.deep.equal(this.doc.lines)
|
||||||
|
doc.version.should.equal(this.doc.version)
|
||||||
|
doc.ranges.should.deep.equal(this.doc.ranges)
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return the doc lines and version from persistent storage on subsequent requests', function (done) {
|
||||||
|
return DocstoreClient.peekDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc._id,
|
||||||
|
{},
|
||||||
|
(error, res, doc) => {
|
||||||
|
res.statusCode.should.equal(200)
|
||||||
|
res.headers['x-doc-status'].should.equal('archived')
|
||||||
|
doc.lines.should.deep.equal(this.doc.lines)
|
||||||
|
doc.version.should.equal(this.doc.version)
|
||||||
|
doc.ranges.should.deep.equal(this.doc.ranges)
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('for an non-archived doc', function () {
|
||||||
|
before(function (done) {
|
||||||
|
this.project_id = ObjectId()
|
||||||
|
this.timeout(1000 * 30)
|
||||||
|
this.doc = {
|
||||||
|
_id: ObjectId(),
|
||||||
|
lines: ['foo', 'bar'],
|
||||||
|
ranges: {},
|
||||||
|
version: 2,
|
||||||
|
}
|
||||||
|
DocstoreClient.createDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc._id,
|
||||||
|
this.doc.lines,
|
||||||
|
this.doc.version,
|
||||||
|
this.doc.ranges,
|
||||||
|
done
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return the doc lines and version from mongo', function (done) {
|
||||||
|
return DocstoreClient.peekDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc._id,
|
||||||
|
{},
|
||||||
|
(error, res, doc) => {
|
||||||
|
res.statusCode.should.equal(200)
|
||||||
|
res.headers['x-doc-status'].should.equal('active')
|
||||||
|
doc.lines.should.deep.equal(this.doc.lines)
|
||||||
|
doc.version.should.equal(this.doc.version)
|
||||||
|
doc.ranges.should.deep.equal(this.doc.ranges)
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
135
services/docstore/test/acceptance/js/GettingDocsTests.js
Normal file
135
services/docstore/test/acceptance/js/GettingDocsTests.js
Normal file
|
@ -0,0 +1,135 @@
|
||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
handle-callback-err,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
const sinon = require('sinon')
|
||||||
|
const { ObjectId } = require('mongodb')
|
||||||
|
const DocstoreApp = require('./helpers/DocstoreApp')
|
||||||
|
|
||||||
|
const DocstoreClient = require('./helpers/DocstoreClient')
|
||||||
|
|
||||||
|
describe('Getting a doc', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
this.project_id = ObjectId()
|
||||||
|
this.doc_id = ObjectId()
|
||||||
|
this.lines = ['original', 'lines']
|
||||||
|
this.version = 42
|
||||||
|
this.ranges = {
|
||||||
|
changes: [
|
||||||
|
{
|
||||||
|
id: ObjectId().toString(),
|
||||||
|
op: { i: 'foo', p: 3 },
|
||||||
|
meta: {
|
||||||
|
user_id: ObjectId().toString(),
|
||||||
|
ts: new Date().toString(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
return DocstoreApp.ensureRunning(() => {
|
||||||
|
return DocstoreClient.createDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.lines,
|
||||||
|
this.version,
|
||||||
|
this.ranges,
|
||||||
|
error => {
|
||||||
|
if (error != null) {
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the doc exists', function () {
|
||||||
|
return it('should get the doc lines and version', function (done) {
|
||||||
|
return DocstoreClient.getDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
{},
|
||||||
|
(error, res, doc) => {
|
||||||
|
doc.lines.should.deep.equal(this.lines)
|
||||||
|
doc.version.should.equal(this.version)
|
||||||
|
doc.ranges.should.deep.equal(this.ranges)
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the doc does not exist', function () {
|
||||||
|
return it('should return a 404', function (done) {
|
||||||
|
const missing_doc_id = ObjectId()
|
||||||
|
return DocstoreClient.getDoc(
|
||||||
|
this.project_id,
|
||||||
|
missing_doc_id,
|
||||||
|
{},
|
||||||
|
(error, res, doc) => {
|
||||||
|
res.statusCode.should.equal(404)
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
return describe('when the doc is a deleted doc', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
this.deleted_doc_id = ObjectId()
|
||||||
|
return DocstoreClient.createDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.deleted_doc_id,
|
||||||
|
this.lines,
|
||||||
|
this.version,
|
||||||
|
this.ranges,
|
||||||
|
error => {
|
||||||
|
if (error != null) {
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
return DocstoreClient.deleteDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.deleted_doc_id,
|
||||||
|
done
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return the doc', function (done) {
|
||||||
|
return DocstoreClient.getDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.deleted_doc_id,
|
||||||
|
{ include_deleted: true },
|
||||||
|
(error, res, doc) => {
|
||||||
|
doc.lines.should.deep.equal(this.lines)
|
||||||
|
doc.version.should.equal(this.version)
|
||||||
|
doc.ranges.should.deep.equal(this.ranges)
|
||||||
|
doc.deleted.should.equal(true)
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return a 404 when the query string is not set', function (done) {
|
||||||
|
return DocstoreClient.getDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.deleted_doc_id,
|
||||||
|
{},
|
||||||
|
(error, res, doc) => {
|
||||||
|
res.statusCode.should.equal(404)
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
497
services/docstore/test/acceptance/js/UpdatingDocsTests.js
Normal file
497
services/docstore/test/acceptance/js/UpdatingDocsTests.js
Normal file
|
@ -0,0 +1,497 @@
|
||||||
|
/* eslint-disable
|
||||||
|
handle-callback-err,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
const sinon = require('sinon')
|
||||||
|
const { ObjectId } = require('mongodb')
|
||||||
|
const DocstoreApp = require('./helpers/DocstoreApp')
|
||||||
|
|
||||||
|
const DocstoreClient = require('./helpers/DocstoreClient')
|
||||||
|
|
||||||
|
describe('Applying updates to a doc', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
this.project_id = ObjectId()
|
||||||
|
this.doc_id = ObjectId()
|
||||||
|
this.originalLines = ['original', 'lines']
|
||||||
|
this.newLines = ['new', 'lines']
|
||||||
|
this.originalRanges = {
|
||||||
|
changes: [
|
||||||
|
{
|
||||||
|
id: ObjectId().toString(),
|
||||||
|
op: { i: 'foo', p: 3 },
|
||||||
|
meta: {
|
||||||
|
user_id: ObjectId().toString(),
|
||||||
|
ts: new Date().toString(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
this.newRanges = {
|
||||||
|
changes: [
|
||||||
|
{
|
||||||
|
id: ObjectId().toString(),
|
||||||
|
op: { i: 'bar', p: 6 },
|
||||||
|
meta: {
|
||||||
|
user_id: ObjectId().toString(),
|
||||||
|
ts: new Date().toString(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
this.version = 42
|
||||||
|
return DocstoreApp.ensureRunning(() => {
|
||||||
|
return DocstoreClient.createDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.originalLines,
|
||||||
|
this.version,
|
||||||
|
this.originalRanges,
|
||||||
|
error => {
|
||||||
|
if (error != null) {
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when nothing has been updated', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
return DocstoreClient.updateDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.originalLines,
|
||||||
|
this.version,
|
||||||
|
this.originalRanges,
|
||||||
|
(error, res, body) => {
|
||||||
|
this.body = body
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return modified = false', function () {
|
||||||
|
return this.body.modified.should.equal(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should not update the doc in the API', function (done) {
|
||||||
|
return DocstoreClient.getDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
{},
|
||||||
|
(error, res, doc) => {
|
||||||
|
doc.lines.should.deep.equal(this.originalLines)
|
||||||
|
doc.version.should.equal(this.version)
|
||||||
|
doc.ranges.should.deep.equal(this.originalRanges)
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the lines have changed', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
return DocstoreClient.updateDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.newLines,
|
||||||
|
this.version,
|
||||||
|
this.originalRanges,
|
||||||
|
(error, res, body) => {
|
||||||
|
this.body = body
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return modified = true', function () {
|
||||||
|
return this.body.modified.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return the rev', function () {
|
||||||
|
return this.body.rev.should.equal(2)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should update the doc in the API', function (done) {
|
||||||
|
return DocstoreClient.getDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
{},
|
||||||
|
(error, res, doc) => {
|
||||||
|
doc.lines.should.deep.equal(this.newLines)
|
||||||
|
doc.version.should.equal(this.version)
|
||||||
|
doc.ranges.should.deep.equal(this.originalRanges)
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the version has changed', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
return DocstoreClient.updateDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.originalLines,
|
||||||
|
this.version + 1,
|
||||||
|
this.originalRanges,
|
||||||
|
(error, res, body) => {
|
||||||
|
this.body = body
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return modified = true', function () {
|
||||||
|
return this.body.modified.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return the rev', function () {
|
||||||
|
return this.body.rev.should.equal(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should update the doc in the API', function (done) {
|
||||||
|
return DocstoreClient.getDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
{},
|
||||||
|
(error, res, doc) => {
|
||||||
|
doc.lines.should.deep.equal(this.originalLines)
|
||||||
|
doc.version.should.equal(this.version + 1)
|
||||||
|
doc.ranges.should.deep.equal(this.originalRanges)
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the ranges have changed', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
return DocstoreClient.updateDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.originalLines,
|
||||||
|
this.version,
|
||||||
|
this.newRanges,
|
||||||
|
(error, res, body) => {
|
||||||
|
this.body = body
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return modified = true', function () {
|
||||||
|
return this.body.modified.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return the rev', function () {
|
||||||
|
return this.body.rev.should.equal(2)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should update the doc in the API', function (done) {
|
||||||
|
return DocstoreClient.getDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
{},
|
||||||
|
(error, res, doc) => {
|
||||||
|
doc.lines.should.deep.equal(this.originalLines)
|
||||||
|
doc.version.should.equal(this.version)
|
||||||
|
doc.ranges.should.deep.equal(this.newRanges)
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the doc does not exist', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
this.missing_doc_id = ObjectId()
|
||||||
|
return DocstoreClient.updateDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.missing_doc_id,
|
||||||
|
this.originalLines,
|
||||||
|
0,
|
||||||
|
this.originalRanges,
|
||||||
|
(error, res, body) => {
|
||||||
|
this.res = res
|
||||||
|
this.body = body
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should create the doc', function () {
|
||||||
|
return this.body.rev.should.equal(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should be retreivable', function (done) {
|
||||||
|
return DocstoreClient.getDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.missing_doc_id,
|
||||||
|
{},
|
||||||
|
(error, res, doc) => {
|
||||||
|
doc.lines.should.deep.equal(this.originalLines)
|
||||||
|
doc.version.should.equal(0)
|
||||||
|
doc.ranges.should.deep.equal(this.originalRanges)
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when malformed doc lines are provided', function () {
|
||||||
|
describe('when the lines are not an array', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
return DocstoreClient.updateDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
{ foo: 'bar' },
|
||||||
|
this.version,
|
||||||
|
this.originalRanges,
|
||||||
|
(error, res, body) => {
|
||||||
|
this.res = res
|
||||||
|
this.body = body
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return 400', function () {
|
||||||
|
return this.res.statusCode.should.equal(400)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should not update the doc in the API', function (done) {
|
||||||
|
return DocstoreClient.getDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
{},
|
||||||
|
(error, res, doc) => {
|
||||||
|
doc.lines.should.deep.equal(this.originalLines)
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
return describe('when the lines are not present', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
return DocstoreClient.updateDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
null,
|
||||||
|
this.version,
|
||||||
|
this.originalRanges,
|
||||||
|
(error, res, body) => {
|
||||||
|
this.res = res
|
||||||
|
this.body = body
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return 400', function () {
|
||||||
|
return this.res.statusCode.should.equal(400)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should not update the doc in the API', function (done) {
|
||||||
|
return DocstoreClient.getDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
{},
|
||||||
|
(error, res, doc) => {
|
||||||
|
doc.lines.should.deep.equal(this.originalLines)
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when no version is provided', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
return DocstoreClient.updateDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.originalLines,
|
||||||
|
null,
|
||||||
|
this.originalRanges,
|
||||||
|
(error, res, body) => {
|
||||||
|
this.res = res
|
||||||
|
this.body = body
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return 400', function () {
|
||||||
|
return this.res.statusCode.should.equal(400)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should not update the doc in the API', function (done) {
|
||||||
|
return DocstoreClient.getDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
{},
|
||||||
|
(error, res, doc) => {
|
||||||
|
doc.lines.should.deep.equal(this.originalLines)
|
||||||
|
doc.version.should.equal(this.version)
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the content is large', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
const line = new Array(1025).join('x') // 1kb
|
||||||
|
this.largeLines = Array.apply(null, Array(1024)).map(() => line) // 1mb
|
||||||
|
return DocstoreClient.updateDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.largeLines,
|
||||||
|
this.version,
|
||||||
|
this.originalRanges,
|
||||||
|
(error, res, body) => {
|
||||||
|
this.body = body
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return modified = true', function () {
|
||||||
|
return this.body.modified.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should update the doc in the API', function (done) {
|
||||||
|
return DocstoreClient.getDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
{},
|
||||||
|
(error, res, doc) => {
|
||||||
|
doc.lines.should.deep.equal(this.largeLines)
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when there is a large json payload', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
const line = new Array(1025).join('x') // 1kb
|
||||||
|
this.largeLines = Array.apply(null, Array(1024)).map(() => line) // 1kb
|
||||||
|
this.originalRanges.padding = Array.apply(null, Array(2049)).map(
|
||||||
|
() => line
|
||||||
|
) // 2mb + 1kb
|
||||||
|
return DocstoreClient.updateDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.largeLines,
|
||||||
|
this.version,
|
||||||
|
this.originalRanges,
|
||||||
|
(error, res, body) => {
|
||||||
|
this.res = res
|
||||||
|
this.body = body
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return modified = true', function () {
|
||||||
|
return this.body.modified.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should update the doc in the API', function (done) {
|
||||||
|
return DocstoreClient.getDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
{},
|
||||||
|
(error, res, doc) => {
|
||||||
|
doc.lines.should.deep.equal(this.largeLines)
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the document body is too large', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
const line = new Array(1025).join('x') // 1kb
|
||||||
|
this.largeLines = Array.apply(null, Array(2049)).map(() => line) // 2mb + 1kb
|
||||||
|
return DocstoreClient.updateDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.largeLines,
|
||||||
|
this.version,
|
||||||
|
this.originalRanges,
|
||||||
|
(error, res, body) => {
|
||||||
|
this.res = res
|
||||||
|
this.body = body
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return 413', function () {
|
||||||
|
return this.res.statusCode.should.equal(413)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should report body too large', function () {
|
||||||
|
return this.res.body.should.equal('document body too large')
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should not update the doc in the API', function (done) {
|
||||||
|
return DocstoreClient.getDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
{},
|
||||||
|
(error, res, doc) => {
|
||||||
|
doc.lines.should.deep.equal(this.originalLines)
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
return describe('when the json payload is too large', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
const line = new Array(1025).join('x') // 1kb
|
||||||
|
this.largeLines = Array.apply(null, Array(1024)).map(() => line) // 1kb
|
||||||
|
this.originalRanges.padding = Array.apply(null, Array(4096)).map(
|
||||||
|
() => line
|
||||||
|
) // 4mb
|
||||||
|
return DocstoreClient.updateDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.largeLines,
|
||||||
|
this.version,
|
||||||
|
this.originalRanges,
|
||||||
|
(error, res, body) => {
|
||||||
|
this.res = res
|
||||||
|
this.body = body
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should not update the doc in the API', function (done) {
|
||||||
|
return DocstoreClient.getDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
{},
|
||||||
|
(error, res, doc) => {
|
||||||
|
doc.lines.should.deep.equal(this.originalLines)
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
37
services/docstore/test/acceptance/js/helpers/DocstoreApp.js
Normal file
37
services/docstore/test/acceptance/js/helpers/DocstoreApp.js
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
const app = require('../../../../app')
|
||||||
|
const { waitForDb } = require('../../../../app/js/mongodb')
|
||||||
|
require('logger-sharelatex').logger.level('error')
|
||||||
|
const settings = require('@overleaf/settings')
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
running: false,
|
||||||
|
initing: false,
|
||||||
|
callbacks: [],
|
||||||
|
ensureRunning(callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
if (this.running) {
|
||||||
|
return callback()
|
||||||
|
} else if (this.initing) {
|
||||||
|
return this.callbacks.push(callback)
|
||||||
|
}
|
||||||
|
this.initing = true
|
||||||
|
this.callbacks.push(callback)
|
||||||
|
waitForDb().then(() => {
|
||||||
|
return app.listen(settings.internal.docstore.port, 'localhost', error => {
|
||||||
|
if (error != null) {
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
this.running = true
|
||||||
|
return (() => {
|
||||||
|
const result = []
|
||||||
|
for (callback of Array.from(this.callbacks)) {
|
||||||
|
result.push(callback())
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
})()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
195
services/docstore/test/acceptance/js/helpers/DocstoreClient.js
Normal file
195
services/docstore/test/acceptance/js/helpers/DocstoreClient.js
Normal file
|
@ -0,0 +1,195 @@
|
||||||
|
let DocstoreClient
|
||||||
|
const request = require('request').defaults({ jar: false })
|
||||||
|
const settings = require('@overleaf/settings')
|
||||||
|
const Persistor = require('../../../../app/js/PersistorManager')
|
||||||
|
|
||||||
|
async function streamToString(stream) {
|
||||||
|
const chunks = []
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
stream.on('data', chunk => chunks.push(chunk))
|
||||||
|
stream.on('error', reject)
|
||||||
|
stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8')))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getStringFromPersistor(persistor, bucket, key) {
|
||||||
|
const stream = await persistor.getObjectStream(bucket, key, {})
|
||||||
|
stream.resume()
|
||||||
|
return streamToString(stream)
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = DocstoreClient = {
|
||||||
|
createDoc(projectId, docId, lines, version, ranges, callback) {
|
||||||
|
return DocstoreClient.updateDoc(
|
||||||
|
projectId,
|
||||||
|
docId,
|
||||||
|
lines,
|
||||||
|
version,
|
||||||
|
ranges,
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
getDoc(projectId, docId, qs, callback) {
|
||||||
|
request.get(
|
||||||
|
{
|
||||||
|
url: `http://localhost:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}`,
|
||||||
|
json: true,
|
||||||
|
qs,
|
||||||
|
},
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
peekDoc(projectId, docId, qs, callback) {
|
||||||
|
request.get(
|
||||||
|
{
|
||||||
|
url: `http://localhost:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}/peek`,
|
||||||
|
json: true,
|
||||||
|
qs,
|
||||||
|
},
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
isDocDeleted(projectId, docId, callback) {
|
||||||
|
request.get(
|
||||||
|
{
|
||||||
|
url: `http://localhost:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}/deleted`,
|
||||||
|
json: true,
|
||||||
|
},
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
getAllDocs(projectId, callback) {
|
||||||
|
request.get(
|
||||||
|
{
|
||||||
|
url: `http://localhost:${settings.internal.docstore.port}/project/${projectId}/doc`,
|
||||||
|
json: true,
|
||||||
|
},
|
||||||
|
(req, res, body) => {
|
||||||
|
callback(req, res, body)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
getAllDeletedDocs(projectId, callback) {
|
||||||
|
request.get(
|
||||||
|
{
|
||||||
|
url: `http://localhost:${settings.internal.docstore.port}/project/${projectId}/doc-deleted`,
|
||||||
|
json: true,
|
||||||
|
},
|
||||||
|
(error, res, body) => {
|
||||||
|
if (error) return callback(error)
|
||||||
|
if (res.statusCode !== 200) {
|
||||||
|
return callback(new Error('unexpected statusCode'))
|
||||||
|
}
|
||||||
|
callback(null, body)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
getAllRanges(projectId, callback) {
|
||||||
|
request.get(
|
||||||
|
{
|
||||||
|
url: `http://localhost:${settings.internal.docstore.port}/project/${projectId}/ranges`,
|
||||||
|
json: true,
|
||||||
|
},
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
updateDoc(projectId, docId, lines, version, ranges, callback) {
|
||||||
|
return request.post(
|
||||||
|
{
|
||||||
|
url: `http://localhost:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}`,
|
||||||
|
json: {
|
||||||
|
lines,
|
||||||
|
version,
|
||||||
|
ranges,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
deleteDoc(projectId, docId, callback) {
|
||||||
|
DocstoreClient.deleteDocWithDateAndName(
|
||||||
|
projectId,
|
||||||
|
docId,
|
||||||
|
new Date(),
|
||||||
|
'main.tex',
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
deleteDocWithDate(projectId, docId, date, callback) {
|
||||||
|
DocstoreClient.deleteDocWithDateAndName(
|
||||||
|
projectId,
|
||||||
|
docId,
|
||||||
|
date,
|
||||||
|
'main.tex',
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
deleteDocWithName(projectId, docId, name, callback) {
|
||||||
|
DocstoreClient.deleteDocWithDateAndName(
|
||||||
|
projectId,
|
||||||
|
docId,
|
||||||
|
new Date(),
|
||||||
|
name,
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
deleteDocWithDateAndName(projectId, docId, deletedAt, name, callback) {
|
||||||
|
request.patch(
|
||||||
|
{
|
||||||
|
url: `http://localhost:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}`,
|
||||||
|
json: { name, deleted: true, deletedAt },
|
||||||
|
},
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
archiveAllDoc(projectId, callback) {
|
||||||
|
request.post(
|
||||||
|
{
|
||||||
|
url: `http://localhost:${settings.internal.docstore.port}/project/${projectId}/archive`,
|
||||||
|
},
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
archiveDocById(projectId, docId, callback) {
|
||||||
|
request.post(
|
||||||
|
{
|
||||||
|
url: `http://localhost:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}/archive`,
|
||||||
|
},
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
destroyAllDoc(projectId, callback) {
|
||||||
|
request.post(
|
||||||
|
{
|
||||||
|
url: `http://localhost:${settings.internal.docstore.port}/project/${projectId}/destroy`,
|
||||||
|
},
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
getS3Doc(projectId, docId, callback) {
|
||||||
|
getStringFromPersistor(
|
||||||
|
Persistor,
|
||||||
|
settings.docstore.bucket,
|
||||||
|
`${projectId}/${docId}`
|
||||||
|
)
|
||||||
|
.then(data => {
|
||||||
|
callback(null, JSON.parse(data))
|
||||||
|
})
|
||||||
|
.catch(callback)
|
||||||
|
},
|
||||||
|
}
|
23
services/docstore/test/acceptance/scripts/full-test.sh
Executable file
23
services/docstore/test/acceptance/scripts/full-test.sh
Executable file
|
@ -0,0 +1,23 @@
|
||||||
|
#! /usr/bin/env bash
|
||||||
|
|
||||||
|
# npm rebuild
|
||||||
|
|
||||||
|
echo ">> Starting server..."
|
||||||
|
|
||||||
|
grunt --no-color forever:app:start
|
||||||
|
|
||||||
|
echo ">> Server started"
|
||||||
|
|
||||||
|
sleep 20
|
||||||
|
|
||||||
|
echo ">> Running acceptance tests..."
|
||||||
|
grunt --no-color mochaTest:acceptance
|
||||||
|
_test_exit_code=$?
|
||||||
|
|
||||||
|
echo ">> Killing server"
|
||||||
|
|
||||||
|
grunt --no-color forever:app:stop
|
||||||
|
|
||||||
|
echo ">> Done"
|
||||||
|
|
||||||
|
exit $_test_exit_code
|
42
services/docstore/test/setup.js
Normal file
42
services/docstore/test/setup.js
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
const chai = require('chai')
|
||||||
|
const sinon = require('sinon')
|
||||||
|
const sinonChai = require('sinon-chai')
|
||||||
|
const chaiAsPromised = require('chai-as-promised')
|
||||||
|
const SandboxedModule = require('sandboxed-module')
|
||||||
|
|
||||||
|
process.env.BACKEND = 'gcs'
|
||||||
|
|
||||||
|
// Chai configuration
|
||||||
|
chai.should()
|
||||||
|
chai.use(sinonChai)
|
||||||
|
chai.use(chaiAsPromised)
|
||||||
|
|
||||||
|
// Global stubs
|
||||||
|
const sandbox = sinon.createSandbox()
|
||||||
|
const stubs = {
|
||||||
|
logger: {
|
||||||
|
log: sandbox.stub(),
|
||||||
|
warn: sandbox.stub(),
|
||||||
|
err: sandbox.stub(),
|
||||||
|
error: sandbox.stub(),
|
||||||
|
fatal: sandbox.stub(),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
// SandboxedModule configuration
|
||||||
|
SandboxedModule.configure({
|
||||||
|
requires: {
|
||||||
|
'logger-sharelatex': stubs.logger,
|
||||||
|
},
|
||||||
|
globals: { Buffer, JSON, console, process },
|
||||||
|
})
|
||||||
|
|
||||||
|
exports.mochaHooks = {
|
||||||
|
beforeEach() {
|
||||||
|
this.logger = stubs.logger
|
||||||
|
},
|
||||||
|
|
||||||
|
afterEach() {
|
||||||
|
sandbox.reset()
|
||||||
|
},
|
||||||
|
}
|
595
services/docstore/test/unit/js/DocArchiveManagerTests.js
Normal file
595
services/docstore/test/unit/js/DocArchiveManagerTests.js
Normal file
|
@ -0,0 +1,595 @@
|
||||||
|
const sinon = require('sinon')
|
||||||
|
const { expect } = require('chai')
|
||||||
|
const modulePath = '../../../app/js/DocArchiveManager.js'
|
||||||
|
const SandboxedModule = require('sandboxed-module')
|
||||||
|
const { ObjectId } = require('mongodb')
|
||||||
|
const Errors = require('../../../app/js/Errors')
|
||||||
|
|
||||||
|
describe('DocArchiveManager', function () {
|
||||||
|
let DocArchiveManager,
|
||||||
|
PersistorManager,
|
||||||
|
MongoManager,
|
||||||
|
RangeManager,
|
||||||
|
Settings,
|
||||||
|
Crypto,
|
||||||
|
Streamifier,
|
||||||
|
HashDigest,
|
||||||
|
HashUpdate,
|
||||||
|
archivedDocs,
|
||||||
|
mongoDocs,
|
||||||
|
docJson,
|
||||||
|
md5Sum,
|
||||||
|
projectId,
|
||||||
|
readStream,
|
||||||
|
stream
|
||||||
|
|
||||||
|
beforeEach(function () {
|
||||||
|
md5Sum = 'decafbad'
|
||||||
|
|
||||||
|
RangeManager = {
|
||||||
|
jsonRangesToMongo: sinon.stub().returns({ mongo: 'ranges' }),
|
||||||
|
}
|
||||||
|
Settings = {
|
||||||
|
docstore: {
|
||||||
|
bucket: 'wombat',
|
||||||
|
},
|
||||||
|
parallelArchiveJobs: 3,
|
||||||
|
destroyBatchSize: 10,
|
||||||
|
destroyRetryCount: 3,
|
||||||
|
}
|
||||||
|
HashDigest = sinon.stub().returns(md5Sum)
|
||||||
|
HashUpdate = sinon.stub().returns({ digest: HashDigest })
|
||||||
|
Crypto = {
|
||||||
|
createHash: sinon.stub().returns({ update: HashUpdate }),
|
||||||
|
}
|
||||||
|
Streamifier = {
|
||||||
|
createReadStream: sinon.stub().returns({ stream: 'readStream' }),
|
||||||
|
}
|
||||||
|
|
||||||
|
projectId = ObjectId()
|
||||||
|
archivedDocs = [
|
||||||
|
{
|
||||||
|
_id: ObjectId(),
|
||||||
|
inS3: true,
|
||||||
|
rev: 2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
_id: ObjectId(),
|
||||||
|
inS3: true,
|
||||||
|
rev: 4,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
_id: ObjectId(),
|
||||||
|
inS3: true,
|
||||||
|
rev: 6,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
mongoDocs = [
|
||||||
|
{
|
||||||
|
_id: ObjectId(),
|
||||||
|
lines: ['one', 'two', 'three'],
|
||||||
|
rev: 2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
_id: ObjectId(),
|
||||||
|
lines: ['aaa', 'bbb', 'ccc'],
|
||||||
|
rev: 4,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
_id: ObjectId(),
|
||||||
|
inS3: true,
|
||||||
|
rev: 6,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
_id: ObjectId(),
|
||||||
|
inS3: true,
|
||||||
|
rev: 6,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
_id: ObjectId(),
|
||||||
|
lines: ['111', '222', '333'],
|
||||||
|
rev: 6,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
docJson = JSON.stringify({
|
||||||
|
lines: mongoDocs[0].lines,
|
||||||
|
ranges: mongoDocs[0].ranges,
|
||||||
|
schema_v: 1,
|
||||||
|
})
|
||||||
|
|
||||||
|
stream = {
|
||||||
|
on: sinon.stub(),
|
||||||
|
resume: sinon.stub(),
|
||||||
|
}
|
||||||
|
stream.on.withArgs('data').yields(Buffer.from(docJson, 'utf8'))
|
||||||
|
stream.on.withArgs('end').yields()
|
||||||
|
|
||||||
|
readStream = {
|
||||||
|
stream: 'readStream',
|
||||||
|
}
|
||||||
|
|
||||||
|
PersistorManager = {
|
||||||
|
getObjectStream: sinon.stub().resolves(stream),
|
||||||
|
sendStream: sinon.stub().resolves(),
|
||||||
|
getObjectMd5Hash: sinon.stub().resolves(md5Sum),
|
||||||
|
deleteObject: sinon.stub().resolves(),
|
||||||
|
}
|
||||||
|
|
||||||
|
const getNonArchivedProjectDocs = sinon.stub()
|
||||||
|
getNonArchivedProjectDocs
|
||||||
|
.onCall(0)
|
||||||
|
.resolves(mongoDocs.filter(doc => !doc.inS3))
|
||||||
|
getNonArchivedProjectDocs.onCall(1).resolves([])
|
||||||
|
|
||||||
|
const getArchivedProjectDocs = sinon.stub()
|
||||||
|
getArchivedProjectDocs.onCall(0).resolves(archivedDocs)
|
||||||
|
getArchivedProjectDocs.onCall(1).resolves([])
|
||||||
|
|
||||||
|
MongoManager = {
|
||||||
|
promises: {
|
||||||
|
markDocAsArchived: sinon.stub().resolves(),
|
||||||
|
upsertIntoDocCollection: sinon.stub().resolves(),
|
||||||
|
getProjectsDocs: sinon.stub().resolves(mongoDocs),
|
||||||
|
getNonDeletedArchivedProjectDocs: getArchivedProjectDocs,
|
||||||
|
getNonArchivedProjectDocs,
|
||||||
|
getArchivedProjectDocs,
|
||||||
|
findDoc: sinon.stub().rejects(new Errors.NotFoundError()),
|
||||||
|
destroyDoc: sinon.stub().resolves(),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for (const mongoDoc of mongoDocs.concat(archivedDocs)) {
|
||||||
|
MongoManager.promises.findDoc
|
||||||
|
.withArgs(projectId, mongoDoc._id, sinon.match.any)
|
||||||
|
.resolves(mongoDoc)
|
||||||
|
}
|
||||||
|
|
||||||
|
DocArchiveManager = SandboxedModule.require(modulePath, {
|
||||||
|
requires: {
|
||||||
|
'@overleaf/settings': Settings,
|
||||||
|
crypto: Crypto,
|
||||||
|
streamifier: Streamifier,
|
||||||
|
'./MongoManager': MongoManager,
|
||||||
|
'./RangeManager': RangeManager,
|
||||||
|
'./PersistorManager': PersistorManager,
|
||||||
|
'./Errors': Errors,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('archiveDoc', function () {
|
||||||
|
it('should resolve when passed a valid document', async function () {
|
||||||
|
await expect(
|
||||||
|
DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0])
|
||||||
|
).to.eventually.be.fulfilled
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should throw an error if the doc has no lines', async function () {
|
||||||
|
const doc = mongoDocs[0]
|
||||||
|
doc.lines = null
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
DocArchiveManager.promises.archiveDoc(projectId, doc)
|
||||||
|
).to.eventually.be.rejectedWith('doc has no lines')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should add the schema version', async function () {
|
||||||
|
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[1])
|
||||||
|
expect(Streamifier.createReadStream).to.have.been.calledWith(
|
||||||
|
sinon.match(/"schema_v":1/)
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should calculate the hex md5 sum of the content', async function () {
|
||||||
|
const json = JSON.stringify({
|
||||||
|
lines: mongoDocs[0].lines,
|
||||||
|
ranges: mongoDocs[0].ranges,
|
||||||
|
schema_v: 1,
|
||||||
|
})
|
||||||
|
|
||||||
|
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0])
|
||||||
|
|
||||||
|
expect(Crypto.createHash).to.have.been.calledWith('md5')
|
||||||
|
expect(HashUpdate).to.have.been.calledWith(json)
|
||||||
|
expect(HashDigest).to.have.been.calledWith('hex')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should pass the md5 hash to the object persistor for verification', async function () {
|
||||||
|
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0])
|
||||||
|
|
||||||
|
expect(PersistorManager.sendStream).to.have.been.calledWith(
|
||||||
|
sinon.match.any,
|
||||||
|
sinon.match.any,
|
||||||
|
sinon.match.any,
|
||||||
|
{ sourceMd5: md5Sum }
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should pass the correct bucket and key to the persistor', async function () {
|
||||||
|
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0])
|
||||||
|
|
||||||
|
expect(PersistorManager.sendStream).to.have.been.calledWith(
|
||||||
|
Settings.docstore.bucket,
|
||||||
|
`${projectId}/${mongoDocs[0]._id}`
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should create a stream from the encoded json and send it', async function () {
|
||||||
|
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0])
|
||||||
|
expect(Streamifier.createReadStream).to.have.been.calledWith(docJson)
|
||||||
|
expect(PersistorManager.sendStream).to.have.been.calledWith(
|
||||||
|
sinon.match.any,
|
||||||
|
sinon.match.any,
|
||||||
|
readStream
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should mark the doc as archived', async function () {
|
||||||
|
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0])
|
||||||
|
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
|
||||||
|
mongoDocs[0]._id,
|
||||||
|
mongoDocs[0].rev
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('with null bytes in the result', function () {
|
||||||
|
const _stringify = JSON.stringify
|
||||||
|
|
||||||
|
beforeEach(function () {
|
||||||
|
JSON.stringify = sinon.stub().returns('{"bad": "\u0000"}')
|
||||||
|
})
|
||||||
|
|
||||||
|
afterEach(function () {
|
||||||
|
JSON.stringify = _stringify
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return an error', async function () {
|
||||||
|
await expect(
|
||||||
|
DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0])
|
||||||
|
).to.eventually.be.rejectedWith('null bytes detected')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('unarchiveDoc', function () {
|
||||||
|
let docId
|
||||||
|
|
||||||
|
describe('when the doc is in S3', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
MongoManager.promises.findDoc = sinon.stub().resolves({ inS3: true })
|
||||||
|
docId = mongoDocs[0]._id
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should resolve when passed a valid document', async function () {
|
||||||
|
await expect(DocArchiveManager.promises.unarchiveDoc(projectId, docId))
|
||||||
|
.to.eventually.be.fulfilled
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should throw an error if the md5 does not match', async function () {
|
||||||
|
PersistorManager.getObjectMd5Hash.resolves('badf00d')
|
||||||
|
await expect(
|
||||||
|
DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||||
|
).to.eventually.be.rejected.and.be.instanceof(Errors.Md5MismatchError)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should update the doc lines in mongo', async function () {
|
||||||
|
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||||
|
expect(
|
||||||
|
MongoManager.promises.upsertIntoDocCollection
|
||||||
|
).to.have.been.calledWith(projectId, docId, {
|
||||||
|
lines: mongoDocs[0].lines,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should delete the doc in s3', async function () {
|
||||||
|
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||||
|
expect(PersistorManager.deleteObject).to.have.been.calledWith(
|
||||||
|
Settings.docstore.bucket,
|
||||||
|
`${projectId}/${docId}`
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('doc contents', function () {
|
||||||
|
let mongoDoc, s3Doc
|
||||||
|
|
||||||
|
describe('when the doc has the old schema', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
mongoDoc = {
|
||||||
|
lines: ['doc', 'lines'],
|
||||||
|
}
|
||||||
|
s3Doc = ['doc', 'lines']
|
||||||
|
docJson = JSON.stringify(s3Doc)
|
||||||
|
stream.on.withArgs('data').yields(Buffer.from(docJson, 'utf8'))
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return the docs lines', async function () {
|
||||||
|
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||||
|
expect(
|
||||||
|
MongoManager.promises.upsertIntoDocCollection
|
||||||
|
).to.have.been.calledWith(projectId, docId, mongoDoc)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('with the new schema and ranges', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
s3Doc = {
|
||||||
|
lines: ['doc', 'lines'],
|
||||||
|
ranges: { json: 'ranges' },
|
||||||
|
schema_v: 1,
|
||||||
|
}
|
||||||
|
mongoDoc = {
|
||||||
|
lines: ['doc', 'lines'],
|
||||||
|
ranges: { mongo: 'ranges' },
|
||||||
|
}
|
||||||
|
docJson = JSON.stringify(s3Doc)
|
||||||
|
stream.on.withArgs('data').yields(Buffer.from(docJson, 'utf8'))
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return the doc lines and ranges', async function () {
|
||||||
|
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||||
|
expect(
|
||||||
|
MongoManager.promises.upsertIntoDocCollection
|
||||||
|
).to.have.been.calledWith(projectId, docId, mongoDoc)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('with the new schema and no ranges', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
s3Doc = {
|
||||||
|
lines: ['doc', 'lines'],
|
||||||
|
schema_v: 1,
|
||||||
|
}
|
||||||
|
mongoDoc = {
|
||||||
|
lines: ['doc', 'lines'],
|
||||||
|
}
|
||||||
|
docJson = JSON.stringify(s3Doc)
|
||||||
|
stream.on.withArgs('data').yields(Buffer.from(docJson, 'utf8'))
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return only the doc lines', async function () {
|
||||||
|
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||||
|
expect(
|
||||||
|
MongoManager.promises.upsertIntoDocCollection
|
||||||
|
).to.have.been.calledWith(projectId, docId, mongoDoc)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('with an unrecognised schema', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
s3Doc = {
|
||||||
|
lines: ['doc', 'lines'],
|
||||||
|
schema_v: 2,
|
||||||
|
}
|
||||||
|
docJson = JSON.stringify(s3Doc)
|
||||||
|
stream.on.withArgs('data').yields(Buffer.from(docJson, 'utf8'))
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should throw an error', async function () {
|
||||||
|
await expect(
|
||||||
|
DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||||
|
).to.eventually.be.rejectedWith(
|
||||||
|
"I don't understand the doc format in s3"
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not do anything if the file is already unarchived', async function () {
|
||||||
|
MongoManager.promises.findDoc.resolves({ inS3: false })
|
||||||
|
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||||
|
expect(PersistorManager.getObjectStream).not.to.have.been.called
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the file is removed while we are processing it', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
MongoManager.promises.findDoc = sinon.stub().resolves({ inS3: true })
|
||||||
|
MongoManager.promises.findDoc.onSecondCall().resolves({ inS3: false })
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not throw an error if the file is unarchived before we get for its hash', async function () {
|
||||||
|
PersistorManager.getObjectMd5Hash = sinon
|
||||||
|
.stub()
|
||||||
|
.rejects(new Errors.NotFoundError())
|
||||||
|
await expect(DocArchiveManager.promises.unarchiveDoc(projectId, docId))
|
||||||
|
.to.eventually.be.fulfilled
|
||||||
|
expect(PersistorManager.getObjectStream).not.to.have.been.called
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not throw an error if the file is unarchived before we download it', async function () {
|
||||||
|
PersistorManager.getObjectStream = sinon
|
||||||
|
.stub()
|
||||||
|
.rejects(new Errors.NotFoundError())
|
||||||
|
await expect(DocArchiveManager.promises.unarchiveDoc(projectId, docId))
|
||||||
|
.to.eventually.be.fulfilled
|
||||||
|
expect(MongoManager.promises.upsertIntoDocCollection).not.to.have.been
|
||||||
|
.called
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should throw an error if the file is not found but is still listed as archived', async function () {
|
||||||
|
PersistorManager.getObjectStream = sinon
|
||||||
|
.stub()
|
||||||
|
.rejects(new Errors.NotFoundError())
|
||||||
|
await expect(
|
||||||
|
DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||||
|
).to.eventually.be.rejected.and.be.instanceof(Errors.NotFoundError)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('destroyDoc', function () {
|
||||||
|
let docId
|
||||||
|
|
||||||
|
beforeEach(function () {
|
||||||
|
docId = mongoDocs[0]._id
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should resolve when passed a valid document', async function () {
|
||||||
|
await expect(DocArchiveManager.promises.destroyDoc(projectId, docId)).to
|
||||||
|
.eventually.be.fulfilled
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should throw a not found error when there is no document', async function () {
|
||||||
|
await expect(
|
||||||
|
DocArchiveManager.promises.destroyDoc(projectId, 'wombat')
|
||||||
|
).to.eventually.be.rejected.and.be.instanceof(Errors.NotFoundError)
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the doc is in s3', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
mongoDocs[0].inS3 = true
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should delete the document from s3, if it is in s3', async function () {
|
||||||
|
await DocArchiveManager.promises.destroyDoc(projectId, docId)
|
||||||
|
expect(PersistorManager.deleteObject).to.have.been.calledWith(
|
||||||
|
Settings.docstore.bucket,
|
||||||
|
`${projectId}/${docId}`
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should delete the doc in mongo', async function () {
|
||||||
|
await DocArchiveManager.promises.destroyDoc(projectId, docId)
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the destroy request errors', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
mongoDocs[0].inS3 = true
|
||||||
|
PersistorManager.deleteObject.onFirstCall().rejects(new Error('1'))
|
||||||
|
PersistorManager.deleteObject.onSecondCall().rejects(new Error('2'))
|
||||||
|
PersistorManager.deleteObject.onThirdCall().resolves()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should retry', async function () {
|
||||||
|
await DocArchiveManager.promises.destroyDoc(projectId, docId)
|
||||||
|
expect(PersistorManager.deleteObject).to.have.been.calledWith(
|
||||||
|
Settings.docstore.bucket,
|
||||||
|
`${projectId}/${docId}`
|
||||||
|
)
|
||||||
|
expect(PersistorManager.deleteObject.callCount).to.equal(3)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the destroy request errors permanent', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
mongoDocs[0].inS3 = true
|
||||||
|
PersistorManager.deleteObject.rejects(new Error('permanent'))
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should retry and fail eventually', async function () {
|
||||||
|
await expect(DocArchiveManager.promises.destroyDoc(projectId, docId))
|
||||||
|
.to.eventually.be.rejected
|
||||||
|
expect(PersistorManager.deleteObject).to.have.been.calledWith(
|
||||||
|
Settings.docstore.bucket,
|
||||||
|
`${projectId}/${docId}`
|
||||||
|
)
|
||||||
|
expect(PersistorManager.deleteObject.callCount).to.equal(4)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the doc is not in s3', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
mongoDocs[0].inS3 = false
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not delete the document from s3, if it is not in s3', async function () {
|
||||||
|
await DocArchiveManager.promises.destroyDoc(projectId, docId)
|
||||||
|
expect(PersistorManager.deleteObject).not.to.have.been.called
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should delete the doc in mongo', async function () {
|
||||||
|
await DocArchiveManager.promises.destroyDoc(projectId, docId)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('archiveAllDocs', function () {
|
||||||
|
it('should resolve with valid arguments', async function () {
|
||||||
|
await expect(DocArchiveManager.promises.archiveAllDocs(projectId)).to
|
||||||
|
.eventually.be.fulfilled
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should archive all project docs which are not in s3', async function () {
|
||||||
|
await DocArchiveManager.promises.archiveAllDocs(projectId)
|
||||||
|
// not inS3
|
||||||
|
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
|
||||||
|
mongoDocs[0]._id
|
||||||
|
)
|
||||||
|
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
|
||||||
|
mongoDocs[1]._id
|
||||||
|
)
|
||||||
|
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
|
||||||
|
mongoDocs[4]._id
|
||||||
|
)
|
||||||
|
|
||||||
|
// inS3
|
||||||
|
expect(
|
||||||
|
MongoManager.promises.markDocAsArchived
|
||||||
|
).not.to.have.been.calledWith(mongoDocs[2]._id)
|
||||||
|
expect(
|
||||||
|
MongoManager.promises.markDocAsArchived
|
||||||
|
).not.to.have.been.calledWith(mongoDocs[3]._id)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('unArchiveAllDocs', function () {
|
||||||
|
it('should resolve with valid arguments', async function () {
|
||||||
|
await expect(DocArchiveManager.promises.unArchiveAllDocs(projectId)).to
|
||||||
|
.eventually.be.fulfilled
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should unarchive all inS3 docs', async function () {
|
||||||
|
await DocArchiveManager.promises.unArchiveAllDocs(projectId)
|
||||||
|
|
||||||
|
for (const doc of archivedDocs) {
|
||||||
|
expect(PersistorManager.getObjectStream).to.have.been.calledWith(
|
||||||
|
Settings.docstore.bucket,
|
||||||
|
`${projectId}/${doc._id}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('destroyAllDocs', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
MongoManager.promises.getProjectsDocs.onCall(0).resolves(mongoDocs)
|
||||||
|
MongoManager.promises.getProjectsDocs.onCall(1).resolves([])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should resolve with valid arguments', async function () {
|
||||||
|
await expect(DocArchiveManager.promises.destroyAllDocs(projectId)).to
|
||||||
|
.eventually.be.fulfilled
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should delete all docs that are in s3 from s3', async function () {
|
||||||
|
await DocArchiveManager.promises.destroyAllDocs(projectId)
|
||||||
|
|
||||||
|
// not inS3
|
||||||
|
for (const index of [0, 1, 4]) {
|
||||||
|
expect(PersistorManager.deleteObject).not.to.have.been.calledWith(
|
||||||
|
Settings.docstore.bucket,
|
||||||
|
`${projectId}/${mongoDocs[index]._id}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// inS3
|
||||||
|
for (const index of [2, 3]) {
|
||||||
|
expect(PersistorManager.deleteObject).to.have.been.calledWith(
|
||||||
|
Settings.docstore.bucket,
|
||||||
|
`${projectId}/${mongoDocs[index]._id}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should destroy all docs in mongo', async function () {
|
||||||
|
await DocArchiveManager.promises.destroyAllDocs(projectId)
|
||||||
|
|
||||||
|
for (const mongoDoc of mongoDocs) {
|
||||||
|
expect(MongoManager.promises.destroyDoc).to.have.been.calledWith(
|
||||||
|
mongoDoc._id
|
||||||
|
)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
863
services/docstore/test/unit/js/DocManagerTests.js
Normal file
863
services/docstore/test/unit/js/DocManagerTests.js
Normal file
|
@ -0,0 +1,863 @@
|
||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
handle-callback-err,
|
||||||
|
no-dupe-keys,
|
||||||
|
no-return-assign,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
const SandboxedModule = require('sandboxed-module')
|
||||||
|
const sinon = require('sinon')
|
||||||
|
const { assert, expect } = require('chai')
|
||||||
|
const modulePath = require('path').join(__dirname, '../../../app/js/DocManager')
|
||||||
|
const { ObjectId } = require('mongodb')
|
||||||
|
const Errors = require('../../../app/js/Errors')
|
||||||
|
|
||||||
|
describe('DocManager', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.DocManager = SandboxedModule.require(modulePath, {
|
||||||
|
requires: {
|
||||||
|
'./MongoManager': (this.MongoManager = {}),
|
||||||
|
'./DocArchiveManager': (this.DocArchiveManager = {}),
|
||||||
|
'./RangeManager': (this.RangeManager = {
|
||||||
|
jsonRangesToMongo(r) {
|
||||||
|
return r
|
||||||
|
},
|
||||||
|
shouldUpdateRanges: sinon.stub().returns(false),
|
||||||
|
}),
|
||||||
|
'@overleaf/settings': (this.settings = { docstore: {} }),
|
||||||
|
'./Errors': Errors,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
this.doc_id = ObjectId().toString()
|
||||||
|
this.project_id = ObjectId().toString()
|
||||||
|
this.another_project_id = ObjectId().toString()
|
||||||
|
this.callback = sinon.stub()
|
||||||
|
return (this.stubbedError = new Error('blew up'))
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('getFullDoc', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.DocManager._getDoc = sinon.stub()
|
||||||
|
return (this.doc = {
|
||||||
|
_id: this.doc_id,
|
||||||
|
lines: ['2134'],
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should call get doc with a quick filter', function (done) {
|
||||||
|
this.DocManager._getDoc.callsArgWith(3, null, this.doc)
|
||||||
|
return this.DocManager.getFullDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
(err, doc) => {
|
||||||
|
doc.should.equal(this.doc)
|
||||||
|
this.DocManager._getDoc
|
||||||
|
.calledWith(this.project_id, this.doc_id, {
|
||||||
|
lines: true,
|
||||||
|
rev: true,
|
||||||
|
deleted: true,
|
||||||
|
version: true,
|
||||||
|
ranges: true,
|
||||||
|
inS3: true,
|
||||||
|
})
|
||||||
|
.should.equal(true)
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return error when get doc errors', function (done) {
|
||||||
|
this.DocManager._getDoc.callsArgWith(3, 'error')
|
||||||
|
return this.DocManager.getFullDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
(err, exist) => {
|
||||||
|
err.should.equal('error')
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('getRawDoc', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.DocManager._getDoc = sinon.stub()
|
||||||
|
return (this.doc = { lines: ['2134'] })
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should call get doc with a quick filter', function (done) {
|
||||||
|
this.DocManager._getDoc.callsArgWith(3, null, this.doc)
|
||||||
|
return this.DocManager.getDocLines(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
(err, doc) => {
|
||||||
|
doc.should.equal(this.doc)
|
||||||
|
this.DocManager._getDoc
|
||||||
|
.calledWith(this.project_id, this.doc_id, {
|
||||||
|
lines: true,
|
||||||
|
inS3: true,
|
||||||
|
})
|
||||||
|
.should.equal(true)
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return error when get doc errors', function (done) {
|
||||||
|
this.DocManager._getDoc.callsArgWith(3, 'error')
|
||||||
|
return this.DocManager.getDocLines(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
(err, exist) => {
|
||||||
|
err.should.equal('error')
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('getDoc', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.project = { name: 'mock-project' }
|
||||||
|
this.doc = {
|
||||||
|
_id: this.doc_id,
|
||||||
|
project_id: this.project_id,
|
||||||
|
lines: ['mock-lines'],
|
||||||
|
}
|
||||||
|
this.version = 42
|
||||||
|
this.MongoManager.findDoc = sinon.stub()
|
||||||
|
return (this.MongoManager.getDocVersion = sinon
|
||||||
|
.stub()
|
||||||
|
.yields(null, this.version))
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when using a filter', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
return this.MongoManager.findDoc.yields(null, this.doc)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should error if inS3 is not set to true', function (done) {
|
||||||
|
return this.DocManager._getDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
{ inS3: false },
|
||||||
|
err => {
|
||||||
|
expect(err).to.exist
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should always get inS3 even when no filter is passed', function (done) {
|
||||||
|
return this.DocManager._getDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
undefined,
|
||||||
|
err => {
|
||||||
|
this.MongoManager.findDoc.called.should.equal(false)
|
||||||
|
expect(err).to.exist
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should not error if inS3 is set to true', function (done) {
|
||||||
|
return this.DocManager._getDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
{ inS3: true },
|
||||||
|
err => {
|
||||||
|
expect(err).to.not.exist
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the doc is in the doc collection', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.MongoManager.findDoc.yields(null, this.doc)
|
||||||
|
return this.DocManager._getDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
{ version: true, inS3: true },
|
||||||
|
this.callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should get the doc from the doc collection', function () {
|
||||||
|
return this.MongoManager.findDoc
|
||||||
|
.calledWith(this.project_id, this.doc_id)
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should get the doc version from the docOps collection', function () {
|
||||||
|
return this.MongoManager.getDocVersion
|
||||||
|
.calledWith(this.doc_id)
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return the callback with the doc with the version', function () {
|
||||||
|
this.callback.called.should.equal(true)
|
||||||
|
const doc = this.callback.args[0][1]
|
||||||
|
doc.lines.should.equal(this.doc.lines)
|
||||||
|
return doc.version.should.equal(this.version)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('without the version filter', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.MongoManager.findDoc.yields(null, this.doc)
|
||||||
|
return this.DocManager._getDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
{ version: false, inS3: true },
|
||||||
|
this.callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should not get the doc version from the docOps collection', function () {
|
||||||
|
return this.MongoManager.getDocVersion.called.should.equal(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when MongoManager.findDoc errors', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.MongoManager.findDoc.yields(this.stubbedError)
|
||||||
|
return this.DocManager._getDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
{ version: true, inS3: true },
|
||||||
|
this.callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return the error', function () {
|
||||||
|
return this.callback.calledWith(this.stubbedError).should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the doc is archived', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.doc = {
|
||||||
|
_id: this.doc_id,
|
||||||
|
project_id: this.project_id,
|
||||||
|
lines: ['mock-lines'],
|
||||||
|
inS3: true,
|
||||||
|
}
|
||||||
|
this.MongoManager.findDoc.yields(null, this.doc)
|
||||||
|
this.DocArchiveManager.unarchiveDoc = (
|
||||||
|
project_id,
|
||||||
|
doc_id,
|
||||||
|
callback
|
||||||
|
) => {
|
||||||
|
this.doc.inS3 = false
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
sinon.spy(this.DocArchiveManager, 'unarchiveDoc')
|
||||||
|
return this.DocManager._getDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
{ version: true, inS3: true },
|
||||||
|
this.callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should call the DocArchive to unarchive the doc', function () {
|
||||||
|
return this.DocArchiveManager.unarchiveDoc
|
||||||
|
.calledWith(this.project_id, this.doc_id)
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should look up the doc twice', function () {
|
||||||
|
return this.MongoManager.findDoc.calledTwice.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return the doc', function () {
|
||||||
|
return this.callback.calledWith(null, this.doc).should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
return describe('when the doc does not exist in the docs collection', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.MongoManager.findDoc = sinon.stub().yields(null, null)
|
||||||
|
return this.DocManager._getDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
{ version: true, inS3: true },
|
||||||
|
this.callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return a NotFoundError', function () {
|
||||||
|
return this.callback
|
||||||
|
.calledWith(
|
||||||
|
sinon.match.has(
|
||||||
|
'message',
|
||||||
|
`No such doc: ${this.doc_id} in project ${this.project_id}`
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('getAllNonDeletedDocs', function () {
|
||||||
|
describe('when the project exists', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.docs = [
|
||||||
|
{
|
||||||
|
_id: this.doc_id,
|
||||||
|
project_id: this.project_id,
|
||||||
|
lines: ['mock-lines'],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
this.MongoManager.getProjectsDocs = sinon
|
||||||
|
.stub()
|
||||||
|
.callsArgWith(3, null, this.docs)
|
||||||
|
this.DocArchiveManager.unArchiveAllDocs = sinon
|
||||||
|
.stub()
|
||||||
|
.callsArgWith(1, null, this.docs)
|
||||||
|
this.filter = { lines: true }
|
||||||
|
return this.DocManager.getAllNonDeletedDocs(
|
||||||
|
this.project_id,
|
||||||
|
this.filter,
|
||||||
|
this.callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should get the project from the database', function () {
|
||||||
|
return this.MongoManager.getProjectsDocs
|
||||||
|
.calledWith(this.project_id, { include_deleted: false }, this.filter)
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return the docs', function () {
|
||||||
|
return this.callback.calledWith(null, this.docs).should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
return describe('when there are no docs for the project', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.MongoManager.getProjectsDocs = sinon
|
||||||
|
.stub()
|
||||||
|
.callsArgWith(3, null, null)
|
||||||
|
this.DocArchiveManager.unArchiveAllDocs = sinon
|
||||||
|
.stub()
|
||||||
|
.callsArgWith(1, null)
|
||||||
|
return this.DocManager.getAllNonDeletedDocs(
|
||||||
|
this.project_id,
|
||||||
|
this.filter,
|
||||||
|
this.callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return a NotFoundError', function () {
|
||||||
|
return this.callback
|
||||||
|
.calledWith(
|
||||||
|
sinon.match.has('message', `No docs for project ${this.project_id}`)
|
||||||
|
)
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('patchDoc', function () {
|
||||||
|
describe('when the doc exists', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.lines = ['mock', 'doc', 'lines']
|
||||||
|
this.rev = 77
|
||||||
|
this.MongoManager.findDoc = sinon
|
||||||
|
.stub()
|
||||||
|
.yields(null, { _id: ObjectId(this.doc_id) })
|
||||||
|
this.MongoManager.patchDoc = sinon.stub().yields(null)
|
||||||
|
this.DocArchiveManager.archiveDocById = sinon.stub().yields(null)
|
||||||
|
this.meta = {}
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('standard path', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
this.callback = sinon.stub().callsFake(done)
|
||||||
|
this.DocManager.patchDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.meta,
|
||||||
|
this.callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should get the doc', function () {
|
||||||
|
expect(this.MongoManager.findDoc).to.have.been.calledWith(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should persist the meta', function () {
|
||||||
|
expect(this.MongoManager.patchDoc).to.have.been.calledWith(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.meta
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return the callback', function () {
|
||||||
|
expect(this.callback).to.have.been.calledWith(null)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('background flush disabled and deleting a doc', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
this.settings.docstore.archiveOnSoftDelete = false
|
||||||
|
this.meta.deleted = true
|
||||||
|
|
||||||
|
this.callback = sinon.stub().callsFake(done)
|
||||||
|
this.DocManager.patchDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.meta,
|
||||||
|
this.callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not flush the doc out of mongo', function () {
|
||||||
|
expect(this.DocArchiveManager.archiveDocById).to.not.have.been.called
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('background flush enabled and not deleting a doc', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
this.settings.docstore.archiveOnSoftDelete = false
|
||||||
|
this.meta.deleted = false
|
||||||
|
this.callback = sinon.stub().callsFake(done)
|
||||||
|
this.DocManager.patchDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.meta,
|
||||||
|
this.callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not flush the doc out of mongo', function () {
|
||||||
|
expect(this.DocArchiveManager.archiveDocById).to.not.have.been.called
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('background flush enabled and deleting a doc', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.settings.docstore.archiveOnSoftDelete = true
|
||||||
|
this.meta.deleted = true
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the background flush succeeds', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
this.DocArchiveManager.archiveDocById = sinon.stub().yields(null)
|
||||||
|
this.callback = sinon.stub().callsFake(done)
|
||||||
|
this.DocManager.patchDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.meta,
|
||||||
|
this.callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not log a warning', function () {
|
||||||
|
expect(this.logger.warn).to.not.have.been.called
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should flush the doc out of mongo', function () {
|
||||||
|
expect(
|
||||||
|
this.DocArchiveManager.archiveDocById
|
||||||
|
).to.have.been.calledWith(this.project_id, this.doc_id)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the background flush fails', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
this.err = new Error('foo')
|
||||||
|
this.DocArchiveManager.archiveDocById = sinon
|
||||||
|
.stub()
|
||||||
|
.yields(this.err)
|
||||||
|
this.callback = sinon.stub().callsFake(done)
|
||||||
|
this.DocManager.patchDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.meta,
|
||||||
|
this.callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should log a warning', function () {
|
||||||
|
expect(this.logger.warn).to.have.been.calledWith(
|
||||||
|
sinon.match({
|
||||||
|
project_id: this.project_id,
|
||||||
|
doc_id: this.doc_id,
|
||||||
|
err: this.err,
|
||||||
|
}),
|
||||||
|
'archiving a single doc in the background failed'
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not fail the delete process', function () {
|
||||||
|
expect(this.callback).to.have.been.calledWith(null)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the doc does not exist', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.MongoManager.findDoc = sinon.stub().yields(null)
|
||||||
|
this.DocManager.patchDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
{},
|
||||||
|
this.callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return a NotFoundError', function () {
|
||||||
|
expect(this.callback).to.have.been.calledWith(
|
||||||
|
sinon.match.has(
|
||||||
|
'message',
|
||||||
|
`No such project/doc to delete: ${this.project_id}/${this.doc_id}`
|
||||||
|
)
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
return describe('updateDoc', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.oldDocLines = ['old', 'doc', 'lines']
|
||||||
|
this.newDocLines = ['new', 'doc', 'lines']
|
||||||
|
this.originalRanges = {
|
||||||
|
changes: [
|
||||||
|
{
|
||||||
|
id: ObjectId().toString(),
|
||||||
|
op: { i: 'foo', p: 3 },
|
||||||
|
meta: {
|
||||||
|
user_id: ObjectId().toString(),
|
||||||
|
ts: new Date().toString(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
this.newRanges = {
|
||||||
|
changes: [
|
||||||
|
{
|
||||||
|
id: ObjectId().toString(),
|
||||||
|
op: { i: 'bar', p: 6 },
|
||||||
|
meta: {
|
||||||
|
user_id: ObjectId().toString(),
|
||||||
|
ts: new Date().toString(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
this.version = 42
|
||||||
|
this.doc = {
|
||||||
|
_id: this.doc_id,
|
||||||
|
project_id: this.project_id,
|
||||||
|
lines: this.oldDocLines,
|
||||||
|
rev: (this.rev = 5),
|
||||||
|
version: this.version,
|
||||||
|
ranges: this.originalRanges,
|
||||||
|
}
|
||||||
|
|
||||||
|
this.MongoManager.upsertIntoDocCollection = sinon.stub().callsArg(3)
|
||||||
|
this.MongoManager.setDocVersion = sinon.stub().yields()
|
||||||
|
return (this.DocManager._getDoc = sinon.stub())
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when only the doc lines have changed', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.DocManager._getDoc = sinon.stub().callsArgWith(3, null, this.doc)
|
||||||
|
return this.DocManager.updateDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.newDocLines,
|
||||||
|
this.version,
|
||||||
|
this.originalRanges,
|
||||||
|
this.callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should get the existing doc', function () {
|
||||||
|
return this.DocManager._getDoc
|
||||||
|
.calledWith(this.project_id, this.doc_id, {
|
||||||
|
version: true,
|
||||||
|
rev: true,
|
||||||
|
lines: true,
|
||||||
|
version: true,
|
||||||
|
ranges: true,
|
||||||
|
inS3: true,
|
||||||
|
})
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should upsert the document to the doc collection', function () {
|
||||||
|
return this.MongoManager.upsertIntoDocCollection
|
||||||
|
.calledWith(this.project_id, this.doc_id, { lines: this.newDocLines })
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not update the version', function () {
|
||||||
|
return this.MongoManager.setDocVersion.called.should.equal(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return the callback with the new rev', function () {
|
||||||
|
return this.callback
|
||||||
|
.calledWith(null, true, this.rev + 1)
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the doc ranges have changed', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.DocManager._getDoc = sinon.stub().callsArgWith(3, null, this.doc)
|
||||||
|
this.RangeManager.shouldUpdateRanges.returns(true)
|
||||||
|
return this.DocManager.updateDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.oldDocLines,
|
||||||
|
this.version,
|
||||||
|
this.newRanges,
|
||||||
|
this.callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should upsert the ranges', function () {
|
||||||
|
return this.MongoManager.upsertIntoDocCollection
|
||||||
|
.calledWith(this.project_id, this.doc_id, { ranges: this.newRanges })
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not update the version', function () {
|
||||||
|
return this.MongoManager.setDocVersion.called.should.equal(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return the callback with the new rev', function () {
|
||||||
|
return this.callback
|
||||||
|
.calledWith(null, true, this.rev + 1)
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when only the version has changed', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.DocManager._getDoc = sinon.stub().callsArgWith(3, null, this.doc)
|
||||||
|
return this.DocManager.updateDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.oldDocLines,
|
||||||
|
this.version + 1,
|
||||||
|
this.originalRanges,
|
||||||
|
this.callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not change the lines or ranges', function () {
|
||||||
|
return this.MongoManager.upsertIntoDocCollection.called.should.equal(
|
||||||
|
false
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should update the version', function () {
|
||||||
|
return this.MongoManager.setDocVersion
|
||||||
|
.calledWith(this.doc_id, this.version + 1)
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return the callback with the old rev', function () {
|
||||||
|
return this.callback.calledWith(null, true, this.rev).should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the doc has not changed at all', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.DocManager._getDoc = sinon.stub().callsArgWith(3, null, this.doc)
|
||||||
|
return this.DocManager.updateDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.oldDocLines,
|
||||||
|
this.version,
|
||||||
|
this.originalRanges,
|
||||||
|
this.callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not update the ranges or lines', function () {
|
||||||
|
return this.MongoManager.upsertIntoDocCollection.called.should.equal(
|
||||||
|
false
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not update the version', function () {
|
||||||
|
return this.MongoManager.setDocVersion.called.should.equal(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return the callback with the old rev and modified == false', function () {
|
||||||
|
return this.callback
|
||||||
|
.calledWith(null, false, this.rev)
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the version is null', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
return this.DocManager.updateDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.newDocLines,
|
||||||
|
null,
|
||||||
|
this.originalRanges,
|
||||||
|
this.callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return an error', function () {
|
||||||
|
return this.callback
|
||||||
|
.calledWith(
|
||||||
|
sinon.match.has('message', 'no lines, version or ranges provided')
|
||||||
|
)
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the lines are null', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
return this.DocManager.updateDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
null,
|
||||||
|
this.version,
|
||||||
|
this.originalRanges,
|
||||||
|
this.callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return an error', function () {
|
||||||
|
return this.callback
|
||||||
|
.calledWith(
|
||||||
|
sinon.match.has('message', 'no lines, version or ranges provided')
|
||||||
|
)
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the ranges are null', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
return this.DocManager.updateDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.newDocLines,
|
||||||
|
this.version,
|
||||||
|
null,
|
||||||
|
this.callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return an error', function () {
|
||||||
|
return this.callback
|
||||||
|
.calledWith(
|
||||||
|
sinon.match.has('message', 'no lines, version or ranges provided')
|
||||||
|
)
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when there is a generic error getting the doc', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.error = new Error('doc could not be found')
|
||||||
|
this.DocManager._getDoc = sinon
|
||||||
|
.stub()
|
||||||
|
.callsArgWith(3, this.error, null, null)
|
||||||
|
return this.DocManager.updateDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.newDocLines,
|
||||||
|
this.version,
|
||||||
|
this.originalRanges,
|
||||||
|
this.callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not upsert the document to the doc collection', function () {
|
||||||
|
return this.MongoManager.upsertIntoDocCollection.called.should.equal(
|
||||||
|
false
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return the callback with the error', function () {
|
||||||
|
return this.callback.calledWith(this.error).should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the doc lines have not changed', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.DocManager._getDoc = sinon.stub().callsArgWith(3, null, this.doc)
|
||||||
|
return this.DocManager.updateDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.oldDocLines.slice(),
|
||||||
|
this.version,
|
||||||
|
this.originalRanges,
|
||||||
|
this.callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not update the doc', function () {
|
||||||
|
return this.MongoManager.upsertIntoDocCollection.called.should.equal(
|
||||||
|
false
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return the callback with the existing rev', function () {
|
||||||
|
return this.callback
|
||||||
|
.calledWith(null, false, this.rev)
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
return describe('when the doc does not exist', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.DocManager._getDoc = sinon.stub().callsArgWith(3, null, null, null)
|
||||||
|
return this.DocManager.updateDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.newDocLines,
|
||||||
|
this.version,
|
||||||
|
this.originalRanges,
|
||||||
|
this.callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should upsert the document to the doc collection', function () {
|
||||||
|
return this.MongoManager.upsertIntoDocCollection
|
||||||
|
.calledWith(this.project_id, this.doc_id, {
|
||||||
|
lines: this.newDocLines,
|
||||||
|
ranges: this.originalRanges,
|
||||||
|
})
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should set the version', function () {
|
||||||
|
return this.MongoManager.setDocVersion
|
||||||
|
.calledWith(this.doc_id, this.version)
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return the callback with the new rev', function () {
|
||||||
|
return this.callback.calledWith(null, true, 1).should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
513
services/docstore/test/unit/js/HttpControllerTests.js
Normal file
513
services/docstore/test/unit/js/HttpControllerTests.js
Normal file
|
@ -0,0 +1,513 @@
|
||||||
|
/* eslint-disable
|
||||||
|
no-return-assign,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
const SandboxedModule = require('sandboxed-module')
|
||||||
|
const sinon = require('sinon')
|
||||||
|
const { assert, expect } = require('chai')
|
||||||
|
const modulePath = require('path').join(
|
||||||
|
__dirname,
|
||||||
|
'../../../app/js/HttpController'
|
||||||
|
)
|
||||||
|
const { ObjectId } = require('mongodb')
|
||||||
|
|
||||||
|
describe('HttpController', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
const settings = {
|
||||||
|
max_doc_length: 2 * 1024 * 1024,
|
||||||
|
}
|
||||||
|
this.HttpController = SandboxedModule.require(modulePath, {
|
||||||
|
requires: {
|
||||||
|
'./DocManager': (this.DocManager = {}),
|
||||||
|
'./DocArchiveManager': (this.DocArchiveManager = {}),
|
||||||
|
'@overleaf/settings': settings,
|
||||||
|
'./HealthChecker': {},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
this.res = {
|
||||||
|
send: sinon.stub(),
|
||||||
|
sendStatus: sinon.stub(),
|
||||||
|
json: sinon.stub(),
|
||||||
|
setHeader: sinon.stub(),
|
||||||
|
}
|
||||||
|
this.res.status = sinon.stub().returns(this.res)
|
||||||
|
this.req = { query: {} }
|
||||||
|
this.next = sinon.stub()
|
||||||
|
this.project_id = 'mock-project-id'
|
||||||
|
this.doc_id = 'mock-doc-id'
|
||||||
|
this.doc = {
|
||||||
|
_id: this.doc_id,
|
||||||
|
lines: ['mock', 'lines', ' here', '', '', ' spaces '],
|
||||||
|
version: 42,
|
||||||
|
rev: 5,
|
||||||
|
}
|
||||||
|
return (this.deletedDoc = {
|
||||||
|
deleted: true,
|
||||||
|
_id: this.doc_id,
|
||||||
|
lines: ['mock', 'lines', ' here', '', '', ' spaces '],
|
||||||
|
version: 42,
|
||||||
|
rev: 5,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('getDoc', function () {
|
||||||
|
describe('without deleted docs', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.req.params = {
|
||||||
|
project_id: this.project_id,
|
||||||
|
doc_id: this.doc_id,
|
||||||
|
}
|
||||||
|
this.DocManager.getFullDoc = sinon
|
||||||
|
.stub()
|
||||||
|
.callsArgWith(2, null, this.doc)
|
||||||
|
return this.HttpController.getDoc(this.req, this.res, this.next)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should get the document with the version (including deleted)', function () {
|
||||||
|
return this.DocManager.getFullDoc
|
||||||
|
.calledWith(this.project_id, this.doc_id)
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return the doc as JSON', function () {
|
||||||
|
return this.res.json
|
||||||
|
.calledWith({
|
||||||
|
_id: this.doc_id,
|
||||||
|
lines: this.doc.lines,
|
||||||
|
rev: this.doc.rev,
|
||||||
|
version: this.doc.version,
|
||||||
|
})
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
return describe('which is deleted', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.req.params = {
|
||||||
|
project_id: this.project_id,
|
||||||
|
doc_id: this.doc_id,
|
||||||
|
}
|
||||||
|
return (this.DocManager.getFullDoc = sinon
|
||||||
|
.stub()
|
||||||
|
.callsArgWith(2, null, this.deletedDoc))
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should get the doc from the doc manager', function () {
|
||||||
|
this.HttpController.getDoc(this.req, this.res, this.next)
|
||||||
|
return this.DocManager.getFullDoc
|
||||||
|
.calledWith(this.project_id, this.doc_id)
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return 404 if the query string delete is not set ', function () {
|
||||||
|
this.HttpController.getDoc(this.req, this.res, this.next)
|
||||||
|
return this.res.sendStatus.calledWith(404).should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return the doc as JSON if include_deleted is set to true', function () {
|
||||||
|
this.req.query.include_deleted = 'true'
|
||||||
|
this.HttpController.getDoc(this.req, this.res, this.next)
|
||||||
|
return this.res.json
|
||||||
|
.calledWith({
|
||||||
|
_id: this.doc_id,
|
||||||
|
lines: this.doc.lines,
|
||||||
|
rev: this.doc.rev,
|
||||||
|
deleted: true,
|
||||||
|
version: this.doc.version,
|
||||||
|
})
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('getRawDoc', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.req.params = {
|
||||||
|
project_id: this.project_id,
|
||||||
|
doc_id: this.doc_id,
|
||||||
|
}
|
||||||
|
this.DocManager.getDocLines = sinon.stub().callsArgWith(2, null, this.doc)
|
||||||
|
return this.HttpController.getRawDoc(this.req, this.res, this.next)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should get the document without the version', function () {
|
||||||
|
return this.DocManager.getDocLines
|
||||||
|
.calledWith(this.project_id, this.doc_id)
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should set the content type header', function () {
|
||||||
|
return this.res.setHeader
|
||||||
|
.calledWith('content-type', 'text/plain')
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should send the raw version of the doc', function () {
|
||||||
|
return assert.deepEqual(
|
||||||
|
this.res.send.args[0][0],
|
||||||
|
`${this.doc.lines[0]}\n${this.doc.lines[1]}\n${this.doc.lines[2]}\n${this.doc.lines[3]}\n${this.doc.lines[4]}\n${this.doc.lines[5]}`
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('getAllDocs', function () {
|
||||||
|
describe('normally', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.req.params = { project_id: this.project_id }
|
||||||
|
this.docs = [
|
||||||
|
{
|
||||||
|
_id: ObjectId(),
|
||||||
|
lines: ['mock', 'lines', 'one'],
|
||||||
|
rev: 2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
_id: ObjectId(),
|
||||||
|
lines: ['mock', 'lines', 'two'],
|
||||||
|
rev: 4,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
this.DocManager.getAllNonDeletedDocs = sinon
|
||||||
|
.stub()
|
||||||
|
.callsArgWith(2, null, this.docs)
|
||||||
|
return this.HttpController.getAllDocs(this.req, this.res, this.next)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should get all the (non-deleted) docs', function () {
|
||||||
|
return this.DocManager.getAllNonDeletedDocs
|
||||||
|
.calledWith(this.project_id, { lines: true, rev: true })
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return the doc as JSON', function () {
|
||||||
|
return this.res.json
|
||||||
|
.calledWith([
|
||||||
|
{
|
||||||
|
_id: this.docs[0]._id.toString(),
|
||||||
|
lines: this.docs[0].lines,
|
||||||
|
rev: this.docs[0].rev,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
_id: this.docs[1]._id.toString(),
|
||||||
|
lines: this.docs[1].lines,
|
||||||
|
rev: this.docs[1].rev,
|
||||||
|
},
|
||||||
|
])
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
return describe('with a null doc', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.req.params = { project_id: this.project_id }
|
||||||
|
this.docs = [
|
||||||
|
{
|
||||||
|
_id: ObjectId(),
|
||||||
|
lines: ['mock', 'lines', 'one'],
|
||||||
|
rev: 2,
|
||||||
|
},
|
||||||
|
null,
|
||||||
|
{
|
||||||
|
_id: ObjectId(),
|
||||||
|
lines: ['mock', 'lines', 'two'],
|
||||||
|
rev: 4,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
this.DocManager.getAllNonDeletedDocs = sinon
|
||||||
|
.stub()
|
||||||
|
.callsArgWith(2, null, this.docs)
|
||||||
|
return this.HttpController.getAllDocs(this.req, this.res, this.next)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return the non null docs as JSON', function () {
|
||||||
|
return this.res.json
|
||||||
|
.calledWith([
|
||||||
|
{
|
||||||
|
_id: this.docs[0]._id.toString(),
|
||||||
|
lines: this.docs[0].lines,
|
||||||
|
rev: this.docs[0].rev,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
_id: this.docs[2]._id.toString(),
|
||||||
|
lines: this.docs[2].lines,
|
||||||
|
rev: this.docs[2].rev,
|
||||||
|
},
|
||||||
|
])
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should log out an error', function () {
|
||||||
|
return this.logger.error
|
||||||
|
.calledWith(
|
||||||
|
{
|
||||||
|
err: sinon.match.has('message', 'null doc'),
|
||||||
|
project_id: this.project_id,
|
||||||
|
},
|
||||||
|
'encountered null doc'
|
||||||
|
)
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('getAllRanges', function () {
|
||||||
|
return describe('normally', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.req.params = { project_id: this.project_id }
|
||||||
|
this.docs = [
|
||||||
|
{
|
||||||
|
_id: ObjectId(),
|
||||||
|
ranges: { mock_ranges: 'one' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
_id: ObjectId(),
|
||||||
|
ranges: { mock_ranges: 'two' },
|
||||||
|
},
|
||||||
|
]
|
||||||
|
this.DocManager.getAllNonDeletedDocs = sinon
|
||||||
|
.stub()
|
||||||
|
.callsArgWith(2, null, this.docs)
|
||||||
|
return this.HttpController.getAllRanges(this.req, this.res, this.next)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should get all the (non-deleted) doc ranges', function () {
|
||||||
|
return this.DocManager.getAllNonDeletedDocs
|
||||||
|
.calledWith(this.project_id, { ranges: true })
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return the doc as JSON', function () {
|
||||||
|
return this.res.json
|
||||||
|
.calledWith([
|
||||||
|
{
|
||||||
|
_id: this.docs[0]._id.toString(),
|
||||||
|
ranges: this.docs[0].ranges,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
_id: this.docs[1]._id.toString(),
|
||||||
|
ranges: this.docs[1].ranges,
|
||||||
|
},
|
||||||
|
])
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('updateDoc', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
return (this.req.params = {
|
||||||
|
project_id: this.project_id,
|
||||||
|
doc_id: this.doc_id,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the doc lines exist and were updated', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.req.body = {
|
||||||
|
lines: (this.lines = ['hello', 'world']),
|
||||||
|
version: (this.version = 42),
|
||||||
|
ranges: (this.ranges = { changes: 'mock' }),
|
||||||
|
}
|
||||||
|
this.DocManager.updateDoc = sinon
|
||||||
|
.stub()
|
||||||
|
.yields(null, true, (this.rev = 5))
|
||||||
|
return this.HttpController.updateDoc(this.req, this.res, this.next)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should update the document', function () {
|
||||||
|
return this.DocManager.updateDoc
|
||||||
|
.calledWith(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.lines,
|
||||||
|
this.version,
|
||||||
|
this.ranges
|
||||||
|
)
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return a modified status', function () {
|
||||||
|
return this.res.json
|
||||||
|
.calledWith({ modified: true, rev: this.rev })
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the doc lines exist and were not updated', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.req.body = {
|
||||||
|
lines: (this.lines = ['hello', 'world']),
|
||||||
|
version: (this.version = 42),
|
||||||
|
ranges: {},
|
||||||
|
}
|
||||||
|
this.DocManager.updateDoc = sinon
|
||||||
|
.stub()
|
||||||
|
.yields(null, false, (this.rev = 5))
|
||||||
|
return this.HttpController.updateDoc(this.req, this.res, this.next)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return a modified status', function () {
|
||||||
|
return this.res.json
|
||||||
|
.calledWith({ modified: false, rev: this.rev })
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the doc lines are not provided', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.req.body = { version: 42, ranges: {} }
|
||||||
|
this.DocManager.updateDoc = sinon.stub().yields(null, false)
|
||||||
|
return this.HttpController.updateDoc(this.req, this.res, this.next)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not update the document', function () {
|
||||||
|
return this.DocManager.updateDoc.called.should.equal(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return a 400 (bad request) response', function () {
|
||||||
|
return this.res.sendStatus.calledWith(400).should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the doc version are not provided', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.req.body = { version: 42, lines: ['hello world'] }
|
||||||
|
this.DocManager.updateDoc = sinon.stub().yields(null, false)
|
||||||
|
return this.HttpController.updateDoc(this.req, this.res, this.next)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not update the document', function () {
|
||||||
|
return this.DocManager.updateDoc.called.should.equal(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return a 400 (bad request) response', function () {
|
||||||
|
return this.res.sendStatus.calledWith(400).should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('when the doc ranges is not provided', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.req.body = { lines: ['foo'], version: 42 }
|
||||||
|
this.DocManager.updateDoc = sinon.stub().yields(null, false)
|
||||||
|
return this.HttpController.updateDoc(this.req, this.res, this.next)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not update the document', function () {
|
||||||
|
return this.DocManager.updateDoc.called.should.equal(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return a 400 (bad request) response', function () {
|
||||||
|
return this.res.sendStatus.calledWith(400).should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
return describe('when the doc body is too large', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.req.body = {
|
||||||
|
lines: (this.lines = Array(2049).fill('a'.repeat(1024))),
|
||||||
|
version: (this.version = 42),
|
||||||
|
ranges: (this.ranges = { changes: 'mock' }),
|
||||||
|
}
|
||||||
|
return this.HttpController.updateDoc(this.req, this.res, this.next)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return a 413 (too large) response', function () {
|
||||||
|
return sinon.assert.calledWith(this.res.status, 413)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should report that the document body is too large', function () {
|
||||||
|
return sinon.assert.calledWith(this.res.send, 'document body too large')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('patchDoc', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.req.params = {
|
||||||
|
project_id: this.project_id,
|
||||||
|
doc_id: this.doc_id,
|
||||||
|
}
|
||||||
|
this.req.body = { name: 'foo.tex' }
|
||||||
|
this.DocManager.patchDoc = sinon.stub().yields(null)
|
||||||
|
this.HttpController.patchDoc(this.req, this.res, this.next)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should delete the document', function () {
|
||||||
|
expect(this.DocManager.patchDoc).to.have.been.calledWith(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return a 204 (No Content)', function () {
|
||||||
|
expect(this.res.sendStatus).to.have.been.calledWith(204)
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('with an invalid payload', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.req.body = { cannot: 'happen' }
|
||||||
|
|
||||||
|
this.DocManager.patchDoc = sinon.stub().yields(null)
|
||||||
|
this.HttpController.patchDoc(this.req, this.res, this.next)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should log a message', function () {
|
||||||
|
expect(this.logger.fatal).to.have.been.calledWith(
|
||||||
|
{ field: 'cannot' },
|
||||||
|
'joi validation for pathDoc is broken'
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not pass the invalid field along', function () {
|
||||||
|
expect(this.DocManager.patchDoc).to.have.been.calledWith(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
{}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('archiveAllDocs', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.req.params = { project_id: this.project_id }
|
||||||
|
this.DocArchiveManager.archiveAllDocs = sinon.stub().callsArg(1)
|
||||||
|
return this.HttpController.archiveAllDocs(this.req, this.res, this.next)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should archive the project', function () {
|
||||||
|
return this.DocArchiveManager.archiveAllDocs
|
||||||
|
.calledWith(this.project_id)
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return a 204 (No Content)', function () {
|
||||||
|
return this.res.sendStatus.calledWith(204).should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
return describe('destroyAllDocs', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.req.params = { project_id: this.project_id }
|
||||||
|
this.DocArchiveManager.destroyAllDocs = sinon.stub().callsArg(1)
|
||||||
|
return this.HttpController.destroyAllDocs(this.req, this.res, this.next)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should destroy the docs', function () {
|
||||||
|
return sinon.assert.calledWith(
|
||||||
|
this.DocArchiveManager.destroyAllDocs,
|
||||||
|
this.project_id
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return 204', function () {
|
||||||
|
return sinon.assert.calledWith(this.res.sendStatus, 204)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
375
services/docstore/test/unit/js/MongoManagerTests.js
Normal file
375
services/docstore/test/unit/js/MongoManagerTests.js
Normal file
|
@ -0,0 +1,375 @@
|
||||||
|
/* eslint-disable
|
||||||
|
handle-callback-err,
|
||||||
|
no-return-assign,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
const SandboxedModule = require('sandboxed-module')
|
||||||
|
const sinon = require('sinon')
|
||||||
|
const modulePath = require('path').join(
|
||||||
|
__dirname,
|
||||||
|
'../../../app/js/MongoManager'
|
||||||
|
)
|
||||||
|
const { ObjectId } = require('mongodb')
|
||||||
|
const { assert } = require('chai')
|
||||||
|
const Errors = require('../../../app/js/Errors')
|
||||||
|
|
||||||
|
describe('MongoManager', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.MongoManager = SandboxedModule.require(modulePath, {
|
||||||
|
requires: {
|
||||||
|
'./mongodb': {
|
||||||
|
db: (this.db = { docs: {}, docOps: {} }),
|
||||||
|
ObjectId,
|
||||||
|
},
|
||||||
|
'@overleaf/metrics': { timeAsyncMethod: sinon.stub() },
|
||||||
|
'@overleaf/settings': { max_deleted_docs: 42 },
|
||||||
|
'./Errors': Errors,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
this.project_id = ObjectId().toString()
|
||||||
|
this.doc_id = ObjectId().toString()
|
||||||
|
this.callback = sinon.stub()
|
||||||
|
return (this.stubbedErr = new Error('hello world'))
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('findDoc', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.doc = { name: 'mock-doc' }
|
||||||
|
this.db.docs.findOne = sinon.stub().callsArgWith(2, null, this.doc)
|
||||||
|
this.filter = { lines: true }
|
||||||
|
return this.MongoManager.findDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.filter,
|
||||||
|
this.callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should find the doc', function () {
|
||||||
|
this.db.docs.findOne
|
||||||
|
.calledWith(
|
||||||
|
{
|
||||||
|
_id: ObjectId(this.doc_id),
|
||||||
|
project_id: ObjectId(this.project_id),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
projection: this.filter,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should call the callback with the doc', function () {
|
||||||
|
return this.callback.calledWith(null, this.doc).should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('patchDoc', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
this.db.docs.updateOne = sinon.stub().yields(null)
|
||||||
|
this.meta = { name: 'foo.tex' }
|
||||||
|
this.callback.callsFake(done)
|
||||||
|
this.MongoManager.patchDoc(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
this.meta,
|
||||||
|
this.callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should pass the parameter along', function () {
|
||||||
|
this.db.docs.updateOne.should.have.been.calledWith(
|
||||||
|
{
|
||||||
|
_id: ObjectId(this.doc_id),
|
||||||
|
project_id: ObjectId(this.project_id),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$set: this.meta,
|
||||||
|
},
|
||||||
|
this.callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('getProjectsDocs', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.filter = { lines: true }
|
||||||
|
this.doc1 = { name: 'mock-doc1' }
|
||||||
|
this.doc2 = { name: 'mock-doc2' }
|
||||||
|
this.doc3 = { name: 'mock-doc3' }
|
||||||
|
this.doc4 = { name: 'mock-doc4' }
|
||||||
|
this.db.docs.find = sinon.stub().returns({
|
||||||
|
toArray: sinon
|
||||||
|
.stub()
|
||||||
|
.callsArgWith(0, null, [this.doc, this.doc3, this.doc4]),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('with included_deleted = false', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
return this.MongoManager.getProjectsDocs(
|
||||||
|
this.project_id,
|
||||||
|
{ include_deleted: false },
|
||||||
|
this.filter,
|
||||||
|
this.callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should find the non-deleted docs via the project_id', function () {
|
||||||
|
return this.db.docs.find
|
||||||
|
.calledWith(
|
||||||
|
{
|
||||||
|
project_id: ObjectId(this.project_id),
|
||||||
|
deleted: { $ne: true },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
projection: this.filter,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should call the callback with the docs', function () {
|
||||||
|
return this.callback
|
||||||
|
.calledWith(null, [this.doc, this.doc3, this.doc4])
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
return describe('with included_deleted = true', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
return this.MongoManager.getProjectsDocs(
|
||||||
|
this.project_id,
|
||||||
|
{ include_deleted: true },
|
||||||
|
this.filter,
|
||||||
|
this.callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should find all via the project_id', function () {
|
||||||
|
return this.db.docs.find
|
||||||
|
.calledWith(
|
||||||
|
{
|
||||||
|
project_id: ObjectId(this.project_id),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
projection: this.filter,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should call the callback with the docs', function () {
|
||||||
|
return this.callback
|
||||||
|
.calledWith(null, [this.doc, this.doc3, this.doc4])
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('getProjectsDeletedDocs', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
this.filter = { name: true }
|
||||||
|
this.doc1 = { _id: '1', name: 'mock-doc1.tex' }
|
||||||
|
this.doc2 = { _id: '2', name: 'mock-doc2.tex' }
|
||||||
|
this.doc3 = { _id: '3', name: 'mock-doc3.tex' }
|
||||||
|
this.db.docs.find = sinon.stub().returns({
|
||||||
|
toArray: sinon.stub().yields(null, [this.doc1, this.doc2, this.doc3]),
|
||||||
|
})
|
||||||
|
this.callback.callsFake(done)
|
||||||
|
this.MongoManager.getProjectsDeletedDocs(
|
||||||
|
this.project_id,
|
||||||
|
this.filter,
|
||||||
|
this.callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should find the deleted docs via the project_id', function () {
|
||||||
|
this.db.docs.find
|
||||||
|
.calledWith({
|
||||||
|
project_id: ObjectId(this.project_id),
|
||||||
|
deleted: true,
|
||||||
|
})
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should filter, sort by deletedAt and limit', function () {
|
||||||
|
this.db.docs.find
|
||||||
|
.calledWith(sinon.match.any, {
|
||||||
|
projection: this.filter,
|
||||||
|
sort: { deletedAt: -1 },
|
||||||
|
limit: 42,
|
||||||
|
})
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should call the callback with the docs', function () {
|
||||||
|
this.callback
|
||||||
|
.calledWith(null, [this.doc1, this.doc2, this.doc3])
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('upsertIntoDocCollection', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.db.docs.updateOne = sinon.stub().callsArgWith(3, this.stubbedErr)
|
||||||
|
return (this.oldRev = 77)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should upsert the document', function (done) {
|
||||||
|
return this.MongoManager.upsertIntoDocCollection(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
{ lines: this.lines },
|
||||||
|
err => {
|
||||||
|
const args = this.db.docs.updateOne.args[0]
|
||||||
|
assert.deepEqual(args[0], { _id: ObjectId(this.doc_id) })
|
||||||
|
assert.equal(args[1].$set.lines, this.lines)
|
||||||
|
assert.equal(args[1].$inc.rev, 1)
|
||||||
|
assert.deepEqual(args[1].$set.project_id, ObjectId(this.project_id))
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return the error', function (done) {
|
||||||
|
return this.MongoManager.upsertIntoDocCollection(
|
||||||
|
this.project_id,
|
||||||
|
this.doc_id,
|
||||||
|
{ lines: this.lines },
|
||||||
|
err => {
|
||||||
|
err.should.equal(this.stubbedErr)
|
||||||
|
return done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('destroyDoc', function () {
|
||||||
|
beforeEach(function (done) {
|
||||||
|
this.db.docs.deleteOne = sinon.stub().yields()
|
||||||
|
this.db.docOps.deleteOne = sinon.stub().yields()
|
||||||
|
return this.MongoManager.destroyDoc('123456789012', done)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should destroy the doc', function () {
|
||||||
|
return sinon.assert.calledWith(this.db.docs.deleteOne, {
|
||||||
|
_id: ObjectId('123456789012'),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should destroy the docOps', function () {
|
||||||
|
return sinon.assert.calledWith(this.db.docOps.deleteOne, {
|
||||||
|
doc_id: ObjectId('123456789012'),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('getDocVersion', function () {
|
||||||
|
describe('when the doc exists', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.doc = { version: (this.version = 42) }
|
||||||
|
this.db.docOps.findOne = sinon.stub().callsArgWith(2, null, this.doc)
|
||||||
|
return this.MongoManager.getDocVersion(this.doc_id, this.callback)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should look for the doc in the database', function () {
|
||||||
|
return this.db.docOps.findOne
|
||||||
|
.calledWith(
|
||||||
|
{ doc_id: ObjectId(this.doc_id) },
|
||||||
|
{
|
||||||
|
projection: { version: 1 },
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should call the callback with the version', function () {
|
||||||
|
return this.callback.calledWith(null, this.version).should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
return describe("when the doc doesn't exist", function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.db.docOps.findOne = sinon.stub().callsArgWith(2, null, null)
|
||||||
|
return this.MongoManager.getDocVersion(this.doc_id, this.callback)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should call the callback with 0', function () {
|
||||||
|
return this.callback.calledWith(null, 0).should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('setDocVersion', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.version = 42
|
||||||
|
this.db.docOps.updateOne = sinon.stub().callsArg(3)
|
||||||
|
return this.MongoManager.setDocVersion(
|
||||||
|
this.doc_id,
|
||||||
|
this.version,
|
||||||
|
this.callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should update the doc version', function () {
|
||||||
|
return this.db.docOps.updateOne
|
||||||
|
.calledWith(
|
||||||
|
{
|
||||||
|
doc_id: ObjectId(this.doc_id),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$set: {
|
||||||
|
version: this.version,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
upsert: true,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should call the callback', function () {
|
||||||
|
return this.callback.called.should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('withRevCheck', function () {
|
||||||
|
this.beforeEach(function () {
|
||||||
|
this.doc = { _id: ObjectId(), name: 'mock-doc', rev: 1 }
|
||||||
|
this.testFunction = sinon.stub().yields(null, 'foo')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should call the callback when the rev has not changed', function (done) {
|
||||||
|
this.db.docs.findOne = sinon.stub().callsArgWith(2, null, { rev: 1 })
|
||||||
|
this.MongoManager.withRevCheck(
|
||||||
|
this.doc,
|
||||||
|
this.testFunction,
|
||||||
|
(err, result) => {
|
||||||
|
result.should.equal('foo')
|
||||||
|
assert.isNull(err)
|
||||||
|
done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return an error when the rev has changed', function (done) {
|
||||||
|
this.db.docs.findOne = sinon.stub().callsArgWith(2, null, { rev: 2 })
|
||||||
|
this.MongoManager.withRevCheck(
|
||||||
|
this.doc,
|
||||||
|
this.testFunction,
|
||||||
|
(err, result) => {
|
||||||
|
err.should.be.instanceof(Errors.DocModifiedError)
|
||||||
|
done()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
255
services/docstore/test/unit/js/RangeManagerTests.js
Normal file
255
services/docstore/test/unit/js/RangeManagerTests.js
Normal file
|
@ -0,0 +1,255 @@
|
||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
no-return-assign,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
const SandboxedModule = require('sandboxed-module')
|
||||||
|
const sinon = require('sinon')
|
||||||
|
const { assert, expect } = require('chai')
|
||||||
|
const modulePath = require('path').join(
|
||||||
|
__dirname,
|
||||||
|
'../../../app/js/RangeManager'
|
||||||
|
)
|
||||||
|
const { ObjectId } = require('mongodb')
|
||||||
|
const _ = require('underscore')
|
||||||
|
|
||||||
|
describe('RangeManager', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
return (this.RangeManager = SandboxedModule.require(modulePath, {
|
||||||
|
requires: {
|
||||||
|
'./mongodb': {
|
||||||
|
ObjectId,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('jsonRangesToMongo', function () {
|
||||||
|
it('should convert ObjectIds and dates to proper objects', function () {
|
||||||
|
const change_id = ObjectId().toString()
|
||||||
|
const comment_id = ObjectId().toString()
|
||||||
|
const user_id = ObjectId().toString()
|
||||||
|
const thread_id = ObjectId().toString()
|
||||||
|
const ts = new Date().toJSON()
|
||||||
|
return this.RangeManager.jsonRangesToMongo({
|
||||||
|
changes: [
|
||||||
|
{
|
||||||
|
id: change_id,
|
||||||
|
op: { i: 'foo', p: 3 },
|
||||||
|
metadata: {
|
||||||
|
user_id,
|
||||||
|
ts,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
comments: [
|
||||||
|
{
|
||||||
|
id: comment_id,
|
||||||
|
op: { c: 'foo', p: 3, t: thread_id },
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}).should.deep.equal({
|
||||||
|
changes: [
|
||||||
|
{
|
||||||
|
id: ObjectId(change_id),
|
||||||
|
op: { i: 'foo', p: 3 },
|
||||||
|
metadata: {
|
||||||
|
user_id: ObjectId(user_id),
|
||||||
|
ts: new Date(ts),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
comments: [
|
||||||
|
{
|
||||||
|
id: ObjectId(comment_id),
|
||||||
|
op: { c: 'foo', p: 3, t: ObjectId(thread_id) },
|
||||||
|
},
|
||||||
|
],
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should leave malformed ObjectIds as they are', function () {
|
||||||
|
const change_id = 'foo'
|
||||||
|
const comment_id = 'bar'
|
||||||
|
const user_id = 'baz'
|
||||||
|
return this.RangeManager.jsonRangesToMongo({
|
||||||
|
changes: [
|
||||||
|
{
|
||||||
|
id: change_id,
|
||||||
|
metadata: {
|
||||||
|
user_id,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
comments: [
|
||||||
|
{
|
||||||
|
id: comment_id,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}).should.deep.equal({
|
||||||
|
changes: [
|
||||||
|
{
|
||||||
|
id: change_id,
|
||||||
|
metadata: {
|
||||||
|
user_id,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
comments: [
|
||||||
|
{
|
||||||
|
id: comment_id,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should be consistent when transformed through json -> mongo -> json', function () {
|
||||||
|
const change_id = ObjectId().toString()
|
||||||
|
const comment_id = ObjectId().toString()
|
||||||
|
const user_id = ObjectId().toString()
|
||||||
|
const thread_id = ObjectId().toString()
|
||||||
|
const ts = new Date().toJSON()
|
||||||
|
const ranges1 = {
|
||||||
|
changes: [
|
||||||
|
{
|
||||||
|
id: change_id,
|
||||||
|
op: { i: 'foo', p: 3 },
|
||||||
|
metadata: {
|
||||||
|
user_id,
|
||||||
|
ts,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
comments: [
|
||||||
|
{
|
||||||
|
id: comment_id,
|
||||||
|
op: { c: 'foo', p: 3, t: thread_id },
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
const ranges1_copy = JSON.parse(JSON.stringify(ranges1)) // jsonRangesToMongo modifies in place
|
||||||
|
const ranges2 = JSON.parse(
|
||||||
|
JSON.stringify(this.RangeManager.jsonRangesToMongo(ranges1_copy))
|
||||||
|
)
|
||||||
|
return ranges1.should.deep.equal(ranges2)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
return describe('shouldUpdateRanges', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
this.ranges = {
|
||||||
|
changes: [
|
||||||
|
{
|
||||||
|
id: ObjectId(),
|
||||||
|
op: { i: 'foo', p: 3 },
|
||||||
|
metadata: {
|
||||||
|
user_id: ObjectId(),
|
||||||
|
ts: new Date(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
comments: [
|
||||||
|
{
|
||||||
|
id: ObjectId(),
|
||||||
|
op: { c: 'foo', p: 3, t: ObjectId() },
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
return (this.ranges_copy = this.RangeManager.jsonRangesToMongo(
|
||||||
|
JSON.parse(JSON.stringify(this.ranges))
|
||||||
|
))
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('with a blank new range', function () {
|
||||||
|
return it('should throw an error', function () {
|
||||||
|
return expect(() => {
|
||||||
|
return this.RangeManager.shouldUpdateRanges(this.ranges, null)
|
||||||
|
}).to.throw(Error)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('with a blank old range', function () {
|
||||||
|
return it('should treat it like {}', function () {
|
||||||
|
this.RangeManager.shouldUpdateRanges(null, {}).should.equal(false)
|
||||||
|
return this.RangeManager.shouldUpdateRanges(
|
||||||
|
null,
|
||||||
|
this.ranges
|
||||||
|
).should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('with no changes', function () {
|
||||||
|
return it('should return false', function () {
|
||||||
|
return this.RangeManager.shouldUpdateRanges(
|
||||||
|
this.ranges,
|
||||||
|
this.ranges_copy
|
||||||
|
).should.equal(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
return describe('with changes', function () {
|
||||||
|
it('should return true when the change id changes', function () {
|
||||||
|
this.ranges_copy.changes[0].id = ObjectId()
|
||||||
|
return this.RangeManager.shouldUpdateRanges(
|
||||||
|
this.ranges,
|
||||||
|
this.ranges_copy
|
||||||
|
).should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return true when the change user id changes', function () {
|
||||||
|
this.ranges_copy.changes[0].metadata.user_id = ObjectId()
|
||||||
|
return this.RangeManager.shouldUpdateRanges(
|
||||||
|
this.ranges,
|
||||||
|
this.ranges_copy
|
||||||
|
).should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return true when the change ts changes', function () {
|
||||||
|
this.ranges_copy.changes[0].metadata.ts = new Date(Date.now() + 1000)
|
||||||
|
return this.RangeManager.shouldUpdateRanges(
|
||||||
|
this.ranges,
|
||||||
|
this.ranges_copy
|
||||||
|
).should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return true when the change op changes', function () {
|
||||||
|
this.ranges_copy.changes[0].op.i = 'bar'
|
||||||
|
return this.RangeManager.shouldUpdateRanges(
|
||||||
|
this.ranges,
|
||||||
|
this.ranges_copy
|
||||||
|
).should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return true when the comment id changes', function () {
|
||||||
|
this.ranges_copy.comments[0].id = ObjectId()
|
||||||
|
return this.RangeManager.shouldUpdateRanges(
|
||||||
|
this.ranges,
|
||||||
|
this.ranges_copy
|
||||||
|
).should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return true when the comment offset changes', function () {
|
||||||
|
this.ranges_copy.comments[0].op.p = 17
|
||||||
|
return this.RangeManager.shouldUpdateRanges(
|
||||||
|
this.ranges,
|
||||||
|
this.ranges_copy
|
||||||
|
).should.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
return it('should return true when the comment content changes', function () {
|
||||||
|
this.ranges_copy.comments[0].op.c = 'bar'
|
||||||
|
return this.RangeManager.shouldUpdateRanges(
|
||||||
|
this.ranges,
|
||||||
|
this.ranges_copy
|
||||||
|
).should.equal(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
Loading…
Reference in a new issue